1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Testsuite for BPF interpreter and BPF JIT compiler
4  *
5  * Copyright (c) 2011-2014 PLUMgrid, http://plumgrid.com
6  */
7 
8 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
9 
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/filter.h>
13 #include <linux/bpf.h>
14 #include <linux/skbuff.h>
15 #include <linux/netdevice.h>
16 #include <linux/if_vlan.h>
17 #include <linux/random.h>
18 #include <linux/highmem.h>
19 #include <linux/sched.h>
20 
21 /* General test specific settings */
22 #define MAX_SUBTESTS	3
23 #define MAX_TESTRUNS	1000
24 #define MAX_DATA	128
25 #define MAX_INSNS	512
26 #define MAX_K		0xffffFFFF
27 
28 /* Few constants used to init test 'skb' */
29 #define SKB_TYPE	3
30 #define SKB_MARK	0x1234aaaa
31 #define SKB_HASH	0x1234aaab
32 #define SKB_QUEUE_MAP	123
33 #define SKB_VLAN_TCI	0xffff
34 #define SKB_VLAN_PRESENT	1
35 #define SKB_DEV_IFINDEX	577
36 #define SKB_DEV_TYPE	588
37 
38 /* Redefine REGs to make tests less verbose */
39 #define R0		BPF_REG_0
40 #define R1		BPF_REG_1
41 #define R2		BPF_REG_2
42 #define R3		BPF_REG_3
43 #define R4		BPF_REG_4
44 #define R5		BPF_REG_5
45 #define R6		BPF_REG_6
46 #define R7		BPF_REG_7
47 #define R8		BPF_REG_8
48 #define R9		BPF_REG_9
49 #define R10		BPF_REG_10
50 
51 /* Flags that can be passed to test cases */
52 #define FLAG_NO_DATA		BIT(0)
53 #define FLAG_EXPECTED_FAIL	BIT(1)
54 #define FLAG_SKB_FRAG		BIT(2)
55 #define FLAG_VERIFIER_ZEXT	BIT(3)
56 #define FLAG_LARGE_MEM		BIT(4)
57 
58 enum {
59 	CLASSIC  = BIT(6),	/* Old BPF instructions only. */
60 	INTERNAL = BIT(7),	/* Extended instruction set.  */
61 };
62 
63 #define TEST_TYPE_MASK		(CLASSIC | INTERNAL)
64 
65 struct bpf_test {
66 	const char *descr;
67 	union {
68 		struct sock_filter insns[MAX_INSNS];
69 		struct bpf_insn insns_int[MAX_INSNS];
70 		struct {
71 			void *insns;
72 			unsigned int len;
73 		} ptr;
74 	} u;
75 	__u8 aux;
76 	__u8 data[MAX_DATA];
77 	struct {
78 		int data_size;
79 		__u32 result;
80 	} test[MAX_SUBTESTS];
81 	int (*fill_helper)(struct bpf_test *self);
82 	int expected_errcode; /* used when FLAG_EXPECTED_FAIL is set in the aux */
83 	__u8 frag_data[MAX_DATA];
84 	int stack_depth; /* for eBPF only, since tests don't call verifier */
85 	int nr_testruns; /* Custom run count, defaults to MAX_TESTRUNS if 0 */
86 };
87 
88 /* Large test cases need separate allocation and fill handler. */
89 
bpf_fill_maxinsns1(struct bpf_test * self)90 static int bpf_fill_maxinsns1(struct bpf_test *self)
91 {
92 	unsigned int len = BPF_MAXINSNS;
93 	struct sock_filter *insn;
94 	__u32 k = ~0;
95 	int i;
96 
97 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
98 	if (!insn)
99 		return -ENOMEM;
100 
101 	for (i = 0; i < len; i++, k--)
102 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, k);
103 
104 	self->u.ptr.insns = insn;
105 	self->u.ptr.len = len;
106 
107 	return 0;
108 }
109 
bpf_fill_maxinsns2(struct bpf_test * self)110 static int bpf_fill_maxinsns2(struct bpf_test *self)
111 {
112 	unsigned int len = BPF_MAXINSNS;
113 	struct sock_filter *insn;
114 	int i;
115 
116 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
117 	if (!insn)
118 		return -ENOMEM;
119 
120 	for (i = 0; i < len; i++)
121 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
122 
123 	self->u.ptr.insns = insn;
124 	self->u.ptr.len = len;
125 
126 	return 0;
127 }
128 
bpf_fill_maxinsns3(struct bpf_test * self)129 static int bpf_fill_maxinsns3(struct bpf_test *self)
130 {
131 	unsigned int len = BPF_MAXINSNS;
132 	struct sock_filter *insn;
133 	struct rnd_state rnd;
134 	int i;
135 
136 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
137 	if (!insn)
138 		return -ENOMEM;
139 
140 	prandom_seed_state(&rnd, 3141592653589793238ULL);
141 
142 	for (i = 0; i < len - 1; i++) {
143 		__u32 k = prandom_u32_state(&rnd);
144 
145 		insn[i] = __BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, k);
146 	}
147 
148 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
149 
150 	self->u.ptr.insns = insn;
151 	self->u.ptr.len = len;
152 
153 	return 0;
154 }
155 
bpf_fill_maxinsns4(struct bpf_test * self)156 static int bpf_fill_maxinsns4(struct bpf_test *self)
157 {
158 	unsigned int len = BPF_MAXINSNS + 1;
159 	struct sock_filter *insn;
160 	int i;
161 
162 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
163 	if (!insn)
164 		return -ENOMEM;
165 
166 	for (i = 0; i < len; i++)
167 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
168 
169 	self->u.ptr.insns = insn;
170 	self->u.ptr.len = len;
171 
172 	return 0;
173 }
174 
bpf_fill_maxinsns5(struct bpf_test * self)175 static int bpf_fill_maxinsns5(struct bpf_test *self)
176 {
177 	unsigned int len = BPF_MAXINSNS;
178 	struct sock_filter *insn;
179 	int i;
180 
181 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
182 	if (!insn)
183 		return -ENOMEM;
184 
185 	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
186 
187 	for (i = 1; i < len - 1; i++)
188 		insn[i] = __BPF_STMT(BPF_RET | BPF_K, 0xfefefefe);
189 
190 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
191 
192 	self->u.ptr.insns = insn;
193 	self->u.ptr.len = len;
194 
195 	return 0;
196 }
197 
bpf_fill_maxinsns6(struct bpf_test * self)198 static int bpf_fill_maxinsns6(struct bpf_test *self)
199 {
200 	unsigned int len = BPF_MAXINSNS;
201 	struct sock_filter *insn;
202 	int i;
203 
204 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
205 	if (!insn)
206 		return -ENOMEM;
207 
208 	for (i = 0; i < len - 1; i++)
209 		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
210 				     SKF_AD_VLAN_TAG_PRESENT);
211 
212 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
213 
214 	self->u.ptr.insns = insn;
215 	self->u.ptr.len = len;
216 
217 	return 0;
218 }
219 
bpf_fill_maxinsns7(struct bpf_test * self)220 static int bpf_fill_maxinsns7(struct bpf_test *self)
221 {
222 	unsigned int len = BPF_MAXINSNS;
223 	struct sock_filter *insn;
224 	int i;
225 
226 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
227 	if (!insn)
228 		return -ENOMEM;
229 
230 	for (i = 0; i < len - 4; i++)
231 		insn[i] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
232 				     SKF_AD_CPU);
233 
234 	insn[len - 4] = __BPF_STMT(BPF_MISC | BPF_TAX, 0);
235 	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS, SKF_AD_OFF +
236 				   SKF_AD_CPU);
237 	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0);
238 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
239 
240 	self->u.ptr.insns = insn;
241 	self->u.ptr.len = len;
242 
243 	return 0;
244 }
245 
bpf_fill_maxinsns8(struct bpf_test * self)246 static int bpf_fill_maxinsns8(struct bpf_test *self)
247 {
248 	unsigned int len = BPF_MAXINSNS;
249 	struct sock_filter *insn;
250 	int i, jmp_off = len - 3;
251 
252 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
253 	if (!insn)
254 		return -ENOMEM;
255 
256 	insn[0] = __BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff);
257 
258 	for (i = 1; i < len - 1; i++)
259 		insn[i] = __BPF_JUMP(BPF_JMP | BPF_JGT, 0xffffffff, jmp_off--, 0);
260 
261 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
262 
263 	self->u.ptr.insns = insn;
264 	self->u.ptr.len = len;
265 
266 	return 0;
267 }
268 
bpf_fill_maxinsns9(struct bpf_test * self)269 static int bpf_fill_maxinsns9(struct bpf_test *self)
270 {
271 	unsigned int len = BPF_MAXINSNS;
272 	struct bpf_insn *insn;
273 	int i;
274 
275 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
276 	if (!insn)
277 		return -ENOMEM;
278 
279 	insn[0] = BPF_JMP_IMM(BPF_JA, 0, 0, len - 2);
280 	insn[1] = BPF_ALU32_IMM(BPF_MOV, R0, 0xcbababab);
281 	insn[2] = BPF_EXIT_INSN();
282 
283 	for (i = 3; i < len - 2; i++)
284 		insn[i] = BPF_ALU32_IMM(BPF_MOV, R0, 0xfefefefe);
285 
286 	insn[len - 2] = BPF_EXIT_INSN();
287 	insn[len - 1] = BPF_JMP_IMM(BPF_JA, 0, 0, -(len - 1));
288 
289 	self->u.ptr.insns = insn;
290 	self->u.ptr.len = len;
291 
292 	return 0;
293 }
294 
bpf_fill_maxinsns10(struct bpf_test * self)295 static int bpf_fill_maxinsns10(struct bpf_test *self)
296 {
297 	unsigned int len = BPF_MAXINSNS, hlen = len - 2;
298 	struct bpf_insn *insn;
299 	int i;
300 
301 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
302 	if (!insn)
303 		return -ENOMEM;
304 
305 	for (i = 0; i < hlen / 2; i++)
306 		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 2 - 2 * i);
307 	for (i = hlen - 1; i > hlen / 2; i--)
308 		insn[i] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen - 1 - 2 * i);
309 
310 	insn[hlen / 2] = BPF_JMP_IMM(BPF_JA, 0, 0, hlen / 2 - 1);
311 	insn[hlen]     = BPF_ALU32_IMM(BPF_MOV, R0, 0xabababac);
312 	insn[hlen + 1] = BPF_EXIT_INSN();
313 
314 	self->u.ptr.insns = insn;
315 	self->u.ptr.len = len;
316 
317 	return 0;
318 }
319 
__bpf_fill_ja(struct bpf_test * self,unsigned int len,unsigned int plen)320 static int __bpf_fill_ja(struct bpf_test *self, unsigned int len,
321 			 unsigned int plen)
322 {
323 	struct sock_filter *insn;
324 	unsigned int rlen;
325 	int i, j;
326 
327 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
328 	if (!insn)
329 		return -ENOMEM;
330 
331 	rlen = (len % plen) - 1;
332 
333 	for (i = 0; i + plen < len; i += plen)
334 		for (j = 0; j < plen; j++)
335 			insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA,
336 						 plen - 1 - j, 0, 0);
337 	for (j = 0; j < rlen; j++)
338 		insn[i + j] = __BPF_JUMP(BPF_JMP | BPF_JA, rlen - 1 - j,
339 					 0, 0);
340 
341 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xababcbac);
342 
343 	self->u.ptr.insns = insn;
344 	self->u.ptr.len = len;
345 
346 	return 0;
347 }
348 
bpf_fill_maxinsns11(struct bpf_test * self)349 static int bpf_fill_maxinsns11(struct bpf_test *self)
350 {
351 	/* Hits 70 passes on x86_64 and triggers NOPs padding. */
352 	return __bpf_fill_ja(self, BPF_MAXINSNS, 68);
353 }
354 
bpf_fill_maxinsns12(struct bpf_test * self)355 static int bpf_fill_maxinsns12(struct bpf_test *self)
356 {
357 	unsigned int len = BPF_MAXINSNS;
358 	struct sock_filter *insn;
359 	int i = 0;
360 
361 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
362 	if (!insn)
363 		return -ENOMEM;
364 
365 	insn[0] = __BPF_JUMP(BPF_JMP | BPF_JA, len - 2, 0, 0);
366 
367 	for (i = 1; i < len - 1; i++)
368 		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
369 
370 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xabababab);
371 
372 	self->u.ptr.insns = insn;
373 	self->u.ptr.len = len;
374 
375 	return 0;
376 }
377 
bpf_fill_maxinsns13(struct bpf_test * self)378 static int bpf_fill_maxinsns13(struct bpf_test *self)
379 {
380 	unsigned int len = BPF_MAXINSNS;
381 	struct sock_filter *insn;
382 	int i = 0;
383 
384 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
385 	if (!insn)
386 		return -ENOMEM;
387 
388 	for (i = 0; i < len - 3; i++)
389 		insn[i] = __BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0);
390 
391 	insn[len - 3] = __BPF_STMT(BPF_LD | BPF_IMM, 0xabababab);
392 	insn[len - 2] = __BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0);
393 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_A, 0);
394 
395 	self->u.ptr.insns = insn;
396 	self->u.ptr.len = len;
397 
398 	return 0;
399 }
400 
bpf_fill_ja(struct bpf_test * self)401 static int bpf_fill_ja(struct bpf_test *self)
402 {
403 	/* Hits exactly 11 passes on x86_64 JIT. */
404 	return __bpf_fill_ja(self, 12, 9);
405 }
406 
bpf_fill_ld_abs_get_processor_id(struct bpf_test * self)407 static int bpf_fill_ld_abs_get_processor_id(struct bpf_test *self)
408 {
409 	unsigned int len = BPF_MAXINSNS;
410 	struct sock_filter *insn;
411 	int i;
412 
413 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
414 	if (!insn)
415 		return -ENOMEM;
416 
417 	for (i = 0; i < len - 1; i += 2) {
418 		insn[i] = __BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 0);
419 		insn[i + 1] = __BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
420 					 SKF_AD_OFF + SKF_AD_CPU);
421 	}
422 
423 	insn[len - 1] = __BPF_STMT(BPF_RET | BPF_K, 0xbee);
424 
425 	self->u.ptr.insns = insn;
426 	self->u.ptr.len = len;
427 
428 	return 0;
429 }
430 
__bpf_fill_stxdw(struct bpf_test * self,int size)431 static int __bpf_fill_stxdw(struct bpf_test *self, int size)
432 {
433 	unsigned int len = BPF_MAXINSNS;
434 	struct bpf_insn *insn;
435 	int i;
436 
437 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
438 	if (!insn)
439 		return -ENOMEM;
440 
441 	insn[0] = BPF_ALU32_IMM(BPF_MOV, R0, 1);
442 	insn[1] = BPF_ST_MEM(size, R10, -40, 42);
443 
444 	for (i = 2; i < len - 2; i++)
445 		insn[i] = BPF_STX_XADD(size, R10, R0, -40);
446 
447 	insn[len - 2] = BPF_LDX_MEM(size, R0, R10, -40);
448 	insn[len - 1] = BPF_EXIT_INSN();
449 
450 	self->u.ptr.insns = insn;
451 	self->u.ptr.len = len;
452 	self->stack_depth = 40;
453 
454 	return 0;
455 }
456 
bpf_fill_stxw(struct bpf_test * self)457 static int bpf_fill_stxw(struct bpf_test *self)
458 {
459 	return __bpf_fill_stxdw(self, BPF_W);
460 }
461 
bpf_fill_stxdw(struct bpf_test * self)462 static int bpf_fill_stxdw(struct bpf_test *self)
463 {
464 	return __bpf_fill_stxdw(self, BPF_DW);
465 }
466 
__bpf_ld_imm64(struct bpf_insn insns[2],u8 reg,s64 imm64)467 static int __bpf_ld_imm64(struct bpf_insn insns[2], u8 reg, s64 imm64)
468 {
469 	struct bpf_insn tmp[] = {BPF_LD_IMM64(reg, imm64)};
470 
471 	memcpy(insns, tmp, sizeof(tmp));
472 	return 2;
473 }
474 
475 /*
476  * Branch conversion tests. Complex operations can expand to a lot
477  * of instructions when JITed. This in turn may cause jump offsets
478  * to overflow the field size of the native instruction, triggering
479  * a branch conversion mechanism in some JITs.
480  */
__bpf_fill_max_jmp(struct bpf_test * self,int jmp,int imm)481 static int __bpf_fill_max_jmp(struct bpf_test *self, int jmp, int imm)
482 {
483 	struct bpf_insn *insns;
484 	int len = S16_MAX + 5;
485 	int i;
486 
487 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
488 	if (!insns)
489 		return -ENOMEM;
490 
491 	i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
492 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
493 	insns[i++] = BPF_JMP_IMM(jmp, R0, imm, S16_MAX);
494 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 2);
495 	insns[i++] = BPF_EXIT_INSN();
496 
497 	while (i < len - 1) {
498 		static const int ops[] = {
499 			BPF_LSH, BPF_RSH, BPF_ARSH, BPF_ADD,
500 			BPF_SUB, BPF_MUL, BPF_DIV, BPF_MOD,
501 		};
502 		int op = ops[(i >> 1) % ARRAY_SIZE(ops)];
503 
504 		if (i & 1)
505 			insns[i++] = BPF_ALU32_REG(op, R0, R1);
506 		else
507 			insns[i++] = BPF_ALU64_REG(op, R0, R1);
508 	}
509 
510 	insns[i++] = BPF_EXIT_INSN();
511 	self->u.ptr.insns = insns;
512 	self->u.ptr.len = len;
513 	BUG_ON(i != len);
514 
515 	return 0;
516 }
517 
518 /* Branch taken by runtime decision */
bpf_fill_max_jmp_taken(struct bpf_test * self)519 static int bpf_fill_max_jmp_taken(struct bpf_test *self)
520 {
521 	return __bpf_fill_max_jmp(self, BPF_JEQ, 1);
522 }
523 
524 /* Branch not taken by runtime decision */
bpf_fill_max_jmp_not_taken(struct bpf_test * self)525 static int bpf_fill_max_jmp_not_taken(struct bpf_test *self)
526 {
527 	return __bpf_fill_max_jmp(self, BPF_JEQ, 0);
528 }
529 
530 /* Branch always taken, known at JIT time */
bpf_fill_max_jmp_always_taken(struct bpf_test * self)531 static int bpf_fill_max_jmp_always_taken(struct bpf_test *self)
532 {
533 	return __bpf_fill_max_jmp(self, BPF_JGE, 0);
534 }
535 
536 /* Branch never taken, known at JIT time */
bpf_fill_max_jmp_never_taken(struct bpf_test * self)537 static int bpf_fill_max_jmp_never_taken(struct bpf_test *self)
538 {
539 	return __bpf_fill_max_jmp(self, BPF_JLT, 0);
540 }
541 
542 /* ALU result computation used in tests */
__bpf_alu_result(u64 * res,u64 v1,u64 v2,u8 op)543 static bool __bpf_alu_result(u64 *res, u64 v1, u64 v2, u8 op)
544 {
545 	*res = 0;
546 	switch (op) {
547 	case BPF_MOV:
548 		*res = v2;
549 		break;
550 	case BPF_AND:
551 		*res = v1 & v2;
552 		break;
553 	case BPF_OR:
554 		*res = v1 | v2;
555 		break;
556 	case BPF_XOR:
557 		*res = v1 ^ v2;
558 		break;
559 	case BPF_LSH:
560 		*res = v1 << v2;
561 		break;
562 	case BPF_RSH:
563 		*res = v1 >> v2;
564 		break;
565 	case BPF_ARSH:
566 		*res = v1 >> v2;
567 		if (v2 > 0 && v1 > S64_MAX)
568 			*res |= ~0ULL << (64 - v2);
569 		break;
570 	case BPF_ADD:
571 		*res = v1 + v2;
572 		break;
573 	case BPF_SUB:
574 		*res = v1 - v2;
575 		break;
576 	case BPF_MUL:
577 		*res = v1 * v2;
578 		break;
579 	case BPF_DIV:
580 		if (v2 == 0)
581 			return false;
582 		*res = div64_u64(v1, v2);
583 		break;
584 	case BPF_MOD:
585 		if (v2 == 0)
586 			return false;
587 		div64_u64_rem(v1, v2, res);
588 		break;
589 	}
590 	return true;
591 }
592 
593 /* Test an ALU shift operation for all valid shift values */
__bpf_fill_alu_shift(struct bpf_test * self,u8 op,u8 mode,bool alu32)594 static int __bpf_fill_alu_shift(struct bpf_test *self, u8 op,
595 				u8 mode, bool alu32)
596 {
597 	static const s64 regs[] = {
598 		0x0123456789abcdefLL, /* dword > 0, word < 0 */
599 		0xfedcba9876543210LL, /* dword < 0, word > 0 */
600 		0xfedcba0198765432LL, /* dword < 0, word < 0 */
601 		0x0123458967abcdefLL, /* dword > 0, word > 0 */
602 	};
603 	int bits = alu32 ? 32 : 64;
604 	int len = (2 + 7 * bits) * ARRAY_SIZE(regs) + 3;
605 	struct bpf_insn *insn;
606 	int imm, k;
607 	int i = 0;
608 
609 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
610 	if (!insn)
611 		return -ENOMEM;
612 
613 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
614 
615 	for (k = 0; k < ARRAY_SIZE(regs); k++) {
616 		s64 reg = regs[k];
617 
618 		i += __bpf_ld_imm64(&insn[i], R3, reg);
619 
620 		for (imm = 0; imm < bits; imm++) {
621 			u64 val;
622 
623 			/* Perform operation */
624 			insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
625 			insn[i++] = BPF_ALU64_IMM(BPF_MOV, R2, imm);
626 			if (alu32) {
627 				if (mode == BPF_K)
628 					insn[i++] = BPF_ALU32_IMM(op, R1, imm);
629 				else
630 					insn[i++] = BPF_ALU32_REG(op, R1, R2);
631 
632 				if (op == BPF_ARSH)
633 					reg = (s32)reg;
634 				else
635 					reg = (u32)reg;
636 				__bpf_alu_result(&val, reg, imm, op);
637 				val = (u32)val;
638 			} else {
639 				if (mode == BPF_K)
640 					insn[i++] = BPF_ALU64_IMM(op, R1, imm);
641 				else
642 					insn[i++] = BPF_ALU64_REG(op, R1, R2);
643 				__bpf_alu_result(&val, reg, imm, op);
644 			}
645 
646 			/*
647 			 * When debugging a JIT that fails this test, one
648 			 * can write the immediate value to R0 here to find
649 			 * out which operand values that fail.
650 			 */
651 
652 			/* Load reference and check the result */
653 			i += __bpf_ld_imm64(&insn[i], R4, val);
654 			insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
655 			insn[i++] = BPF_EXIT_INSN();
656 		}
657 	}
658 
659 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
660 	insn[i++] = BPF_EXIT_INSN();
661 
662 	self->u.ptr.insns = insn;
663 	self->u.ptr.len = len;
664 	BUG_ON(i != len);
665 
666 	return 0;
667 }
668 
bpf_fill_alu64_lsh_imm(struct bpf_test * self)669 static int bpf_fill_alu64_lsh_imm(struct bpf_test *self)
670 {
671 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, false);
672 }
673 
bpf_fill_alu64_rsh_imm(struct bpf_test * self)674 static int bpf_fill_alu64_rsh_imm(struct bpf_test *self)
675 {
676 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, false);
677 }
678 
bpf_fill_alu64_arsh_imm(struct bpf_test * self)679 static int bpf_fill_alu64_arsh_imm(struct bpf_test *self)
680 {
681 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, false);
682 }
683 
bpf_fill_alu64_lsh_reg(struct bpf_test * self)684 static int bpf_fill_alu64_lsh_reg(struct bpf_test *self)
685 {
686 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, false);
687 }
688 
bpf_fill_alu64_rsh_reg(struct bpf_test * self)689 static int bpf_fill_alu64_rsh_reg(struct bpf_test *self)
690 {
691 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, false);
692 }
693 
bpf_fill_alu64_arsh_reg(struct bpf_test * self)694 static int bpf_fill_alu64_arsh_reg(struct bpf_test *self)
695 {
696 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, false);
697 }
698 
bpf_fill_alu32_lsh_imm(struct bpf_test * self)699 static int bpf_fill_alu32_lsh_imm(struct bpf_test *self)
700 {
701 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_K, true);
702 }
703 
bpf_fill_alu32_rsh_imm(struct bpf_test * self)704 static int bpf_fill_alu32_rsh_imm(struct bpf_test *self)
705 {
706 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_K, true);
707 }
708 
bpf_fill_alu32_arsh_imm(struct bpf_test * self)709 static int bpf_fill_alu32_arsh_imm(struct bpf_test *self)
710 {
711 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_K, true);
712 }
713 
bpf_fill_alu32_lsh_reg(struct bpf_test * self)714 static int bpf_fill_alu32_lsh_reg(struct bpf_test *self)
715 {
716 	return __bpf_fill_alu_shift(self, BPF_LSH, BPF_X, true);
717 }
718 
bpf_fill_alu32_rsh_reg(struct bpf_test * self)719 static int bpf_fill_alu32_rsh_reg(struct bpf_test *self)
720 {
721 	return __bpf_fill_alu_shift(self, BPF_RSH, BPF_X, true);
722 }
723 
bpf_fill_alu32_arsh_reg(struct bpf_test * self)724 static int bpf_fill_alu32_arsh_reg(struct bpf_test *self)
725 {
726 	return __bpf_fill_alu_shift(self, BPF_ARSH, BPF_X, true);
727 }
728 
729 /*
730  * Test an ALU register shift operation for all valid shift values
731  * for the case when the source and destination are the same.
732  */
__bpf_fill_alu_shift_same_reg(struct bpf_test * self,u8 op,bool alu32)733 static int __bpf_fill_alu_shift_same_reg(struct bpf_test *self, u8 op,
734 					 bool alu32)
735 {
736 	int bits = alu32 ? 32 : 64;
737 	int len = 3 + 6 * bits;
738 	struct bpf_insn *insn;
739 	int i = 0;
740 	u64 val;
741 
742 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
743 	if (!insn)
744 		return -ENOMEM;
745 
746 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
747 
748 	for (val = 0; val < bits; val++) {
749 		u64 res;
750 
751 		/* Perform operation */
752 		insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
753 		if (alu32)
754 			insn[i++] = BPF_ALU32_REG(op, R1, R1);
755 		else
756 			insn[i++] = BPF_ALU64_REG(op, R1, R1);
757 
758 		/* Compute the reference result */
759 		__bpf_alu_result(&res, val, val, op);
760 		if (alu32)
761 			res = (u32)res;
762 		i += __bpf_ld_imm64(&insn[i], R2, res);
763 
764 		/* Check the actual result */
765 		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
766 		insn[i++] = BPF_EXIT_INSN();
767 	}
768 
769 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
770 	insn[i++] = BPF_EXIT_INSN();
771 
772 	self->u.ptr.insns = insn;
773 	self->u.ptr.len = len;
774 	BUG_ON(i != len);
775 
776 	return 0;
777 }
778 
bpf_fill_alu64_lsh_same_reg(struct bpf_test * self)779 static int bpf_fill_alu64_lsh_same_reg(struct bpf_test *self)
780 {
781 	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, false);
782 }
783 
bpf_fill_alu64_rsh_same_reg(struct bpf_test * self)784 static int bpf_fill_alu64_rsh_same_reg(struct bpf_test *self)
785 {
786 	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, false);
787 }
788 
bpf_fill_alu64_arsh_same_reg(struct bpf_test * self)789 static int bpf_fill_alu64_arsh_same_reg(struct bpf_test *self)
790 {
791 	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, false);
792 }
793 
bpf_fill_alu32_lsh_same_reg(struct bpf_test * self)794 static int bpf_fill_alu32_lsh_same_reg(struct bpf_test *self)
795 {
796 	return __bpf_fill_alu_shift_same_reg(self, BPF_LSH, true);
797 }
798 
bpf_fill_alu32_rsh_same_reg(struct bpf_test * self)799 static int bpf_fill_alu32_rsh_same_reg(struct bpf_test *self)
800 {
801 	return __bpf_fill_alu_shift_same_reg(self, BPF_RSH, true);
802 }
803 
bpf_fill_alu32_arsh_same_reg(struct bpf_test * self)804 static int bpf_fill_alu32_arsh_same_reg(struct bpf_test *self)
805 {
806 	return __bpf_fill_alu_shift_same_reg(self, BPF_ARSH, true);
807 }
808 
809 /*
810  * Common operand pattern generator for exhaustive power-of-two magnitudes
811  * tests. The block size parameters can be adjusted to increase/reduce the
812  * number of combinatons tested and thereby execution speed and memory
813  * footprint.
814  */
815 
value(int msb,int delta,int sign)816 static inline s64 value(int msb, int delta, int sign)
817 {
818 	return sign * (1LL << msb) + delta;
819 }
820 
__bpf_fill_pattern(struct bpf_test * self,void * arg,int dbits,int sbits,int block1,int block2,int (* emit)(struct bpf_test *,void *,struct bpf_insn *,s64,s64))821 static int __bpf_fill_pattern(struct bpf_test *self, void *arg,
822 			      int dbits, int sbits, int block1, int block2,
823 			      int (*emit)(struct bpf_test*, void*,
824 					  struct bpf_insn*, s64, s64))
825 {
826 	static const int sgn[][2] = {{1, 1}, {1, -1}, {-1, 1}, {-1, -1}};
827 	struct bpf_insn *insns;
828 	int di, si, bt, db, sb;
829 	int count, len, k;
830 	int extra = 1 + 2;
831 	int i = 0;
832 
833 	/* Total number of iterations for the two pattern */
834 	count = (dbits - 1) * (sbits - 1) * block1 * block1 * ARRAY_SIZE(sgn);
835 	count += (max(dbits, sbits) - 1) * block2 * block2 * ARRAY_SIZE(sgn);
836 
837 	/* Compute the maximum number of insns and allocate the buffer */
838 	len = extra + count * (*emit)(self, arg, NULL, 0, 0);
839 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
840 	if (!insns)
841 		return -ENOMEM;
842 
843 	/* Add head instruction(s) */
844 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
845 
846 	/*
847 	 * Pattern 1: all combinations of power-of-two magnitudes and sign,
848 	 * and with a block of contiguous values around each magnitude.
849 	 */
850 	for (di = 0; di < dbits - 1; di++)                 /* Dst magnitudes */
851 		for (si = 0; si < sbits - 1; si++)         /* Src magnitudes */
852 			for (k = 0; k < ARRAY_SIZE(sgn); k++) /* Sign combos */
853 				for (db = -(block1 / 2);
854 				     db < (block1 + 1) / 2; db++)
855 					for (sb = -(block1 / 2);
856 					     sb < (block1 + 1) / 2; sb++) {
857 						s64 dst, src;
858 
859 						dst = value(di, db, sgn[k][0]);
860 						src = value(si, sb, sgn[k][1]);
861 						i += (*emit)(self, arg,
862 							     &insns[i],
863 							     dst, src);
864 					}
865 	/*
866 	 * Pattern 2: all combinations for a larger block of values
867 	 * for each power-of-two magnitude and sign, where the magnitude is
868 	 * the same for both operands.
869 	 */
870 	for (bt = 0; bt < max(dbits, sbits) - 1; bt++)        /* Magnitude   */
871 		for (k = 0; k < ARRAY_SIZE(sgn); k++)         /* Sign combos */
872 			for (db = -(block2 / 2); db < (block2 + 1) / 2; db++)
873 				for (sb = -(block2 / 2);
874 				     sb < (block2 + 1) / 2; sb++) {
875 					s64 dst, src;
876 
877 					dst = value(bt % dbits, db, sgn[k][0]);
878 					src = value(bt % sbits, sb, sgn[k][1]);
879 					i += (*emit)(self, arg, &insns[i],
880 						     dst, src);
881 				}
882 
883 	/* Append tail instructions */
884 	insns[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
885 	insns[i++] = BPF_EXIT_INSN();
886 	BUG_ON(i > len);
887 
888 	self->u.ptr.insns = insns;
889 	self->u.ptr.len = i;
890 
891 	return 0;
892 }
893 
894 /*
895  * Block size parameters used in pattern tests below. une as needed to
896  * increase/reduce the number combinations tested, see following examples.
897  *        block   values per operand MSB
898  * ----------------------------------------
899  *           0     none
900  *           1     (1 << MSB)
901  *           2     (1 << MSB) + [-1, 0]
902  *           3     (1 << MSB) + [-1, 0, 1]
903  */
904 #define PATTERN_BLOCK1 1
905 #define PATTERN_BLOCK2 5
906 
907 /* Number of test runs for a pattern test */
908 #define NR_PATTERN_RUNS 1
909 
910 /*
911  * Exhaustive tests of ALU operations for all combinations of power-of-two
912  * magnitudes of the operands, both for positive and negative values. The
913  * test is designed to verify e.g. the ALU and ALU64 operations for JITs that
914  * emit different code depending on the magnitude of the immediate value.
915  */
__bpf_emit_alu64_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)916 static int __bpf_emit_alu64_imm(struct bpf_test *self, void *arg,
917 				struct bpf_insn *insns, s64 dst, s64 imm)
918 {
919 	int op = *(int *)arg;
920 	int i = 0;
921 	u64 res;
922 
923 	if (!insns)
924 		return 7;
925 
926 	if (__bpf_alu_result(&res, dst, (s32)imm, op)) {
927 		i += __bpf_ld_imm64(&insns[i], R1, dst);
928 		i += __bpf_ld_imm64(&insns[i], R3, res);
929 		insns[i++] = BPF_ALU64_IMM(op, R1, imm);
930 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
931 		insns[i++] = BPF_EXIT_INSN();
932 	}
933 
934 	return i;
935 }
936 
__bpf_emit_alu32_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)937 static int __bpf_emit_alu32_imm(struct bpf_test *self, void *arg,
938 				struct bpf_insn *insns, s64 dst, s64 imm)
939 {
940 	int op = *(int *)arg;
941 	int i = 0;
942 	u64 res;
943 
944 	if (!insns)
945 		return 7;
946 
947 	if (__bpf_alu_result(&res, (u32)dst, (u32)imm, op)) {
948 		i += __bpf_ld_imm64(&insns[i], R1, dst);
949 		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
950 		insns[i++] = BPF_ALU32_IMM(op, R1, imm);
951 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
952 		insns[i++] = BPF_EXIT_INSN();
953 	}
954 
955 	return i;
956 }
957 
__bpf_emit_alu64_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)958 static int __bpf_emit_alu64_reg(struct bpf_test *self, void *arg,
959 				struct bpf_insn *insns, s64 dst, s64 src)
960 {
961 	int op = *(int *)arg;
962 	int i = 0;
963 	u64 res;
964 
965 	if (!insns)
966 		return 9;
967 
968 	if (__bpf_alu_result(&res, dst, src, op)) {
969 		i += __bpf_ld_imm64(&insns[i], R1, dst);
970 		i += __bpf_ld_imm64(&insns[i], R2, src);
971 		i += __bpf_ld_imm64(&insns[i], R3, res);
972 		insns[i++] = BPF_ALU64_REG(op, R1, R2);
973 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
974 		insns[i++] = BPF_EXIT_INSN();
975 	}
976 
977 	return i;
978 }
979 
__bpf_emit_alu32_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)980 static int __bpf_emit_alu32_reg(struct bpf_test *self, void *arg,
981 				struct bpf_insn *insns, s64 dst, s64 src)
982 {
983 	int op = *(int *)arg;
984 	int i = 0;
985 	u64 res;
986 
987 	if (!insns)
988 		return 9;
989 
990 	if (__bpf_alu_result(&res, (u32)dst, (u32)src, op)) {
991 		i += __bpf_ld_imm64(&insns[i], R1, dst);
992 		i += __bpf_ld_imm64(&insns[i], R2, src);
993 		i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
994 		insns[i++] = BPF_ALU32_REG(op, R1, R2);
995 		insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
996 		insns[i++] = BPF_EXIT_INSN();
997 	}
998 
999 	return i;
1000 }
1001 
__bpf_fill_alu64_imm(struct bpf_test * self,int op)1002 static int __bpf_fill_alu64_imm(struct bpf_test *self, int op)
1003 {
1004 	return __bpf_fill_pattern(self, &op, 64, 32,
1005 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1006 				  &__bpf_emit_alu64_imm);
1007 }
1008 
__bpf_fill_alu32_imm(struct bpf_test * self,int op)1009 static int __bpf_fill_alu32_imm(struct bpf_test *self, int op)
1010 {
1011 	return __bpf_fill_pattern(self, &op, 64, 32,
1012 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1013 				  &__bpf_emit_alu32_imm);
1014 }
1015 
__bpf_fill_alu64_reg(struct bpf_test * self,int op)1016 static int __bpf_fill_alu64_reg(struct bpf_test *self, int op)
1017 {
1018 	return __bpf_fill_pattern(self, &op, 64, 64,
1019 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1020 				  &__bpf_emit_alu64_reg);
1021 }
1022 
__bpf_fill_alu32_reg(struct bpf_test * self,int op)1023 static int __bpf_fill_alu32_reg(struct bpf_test *self, int op)
1024 {
1025 	return __bpf_fill_pattern(self, &op, 64, 64,
1026 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
1027 				  &__bpf_emit_alu32_reg);
1028 }
1029 
1030 /* ALU64 immediate operations */
bpf_fill_alu64_mov_imm(struct bpf_test * self)1031 static int bpf_fill_alu64_mov_imm(struct bpf_test *self)
1032 {
1033 	return __bpf_fill_alu64_imm(self, BPF_MOV);
1034 }
1035 
bpf_fill_alu64_and_imm(struct bpf_test * self)1036 static int bpf_fill_alu64_and_imm(struct bpf_test *self)
1037 {
1038 	return __bpf_fill_alu64_imm(self, BPF_AND);
1039 }
1040 
bpf_fill_alu64_or_imm(struct bpf_test * self)1041 static int bpf_fill_alu64_or_imm(struct bpf_test *self)
1042 {
1043 	return __bpf_fill_alu64_imm(self, BPF_OR);
1044 }
1045 
bpf_fill_alu64_xor_imm(struct bpf_test * self)1046 static int bpf_fill_alu64_xor_imm(struct bpf_test *self)
1047 {
1048 	return __bpf_fill_alu64_imm(self, BPF_XOR);
1049 }
1050 
bpf_fill_alu64_add_imm(struct bpf_test * self)1051 static int bpf_fill_alu64_add_imm(struct bpf_test *self)
1052 {
1053 	return __bpf_fill_alu64_imm(self, BPF_ADD);
1054 }
1055 
bpf_fill_alu64_sub_imm(struct bpf_test * self)1056 static int bpf_fill_alu64_sub_imm(struct bpf_test *self)
1057 {
1058 	return __bpf_fill_alu64_imm(self, BPF_SUB);
1059 }
1060 
bpf_fill_alu64_mul_imm(struct bpf_test * self)1061 static int bpf_fill_alu64_mul_imm(struct bpf_test *self)
1062 {
1063 	return __bpf_fill_alu64_imm(self, BPF_MUL);
1064 }
1065 
bpf_fill_alu64_div_imm(struct bpf_test * self)1066 static int bpf_fill_alu64_div_imm(struct bpf_test *self)
1067 {
1068 	return __bpf_fill_alu64_imm(self, BPF_DIV);
1069 }
1070 
bpf_fill_alu64_mod_imm(struct bpf_test * self)1071 static int bpf_fill_alu64_mod_imm(struct bpf_test *self)
1072 {
1073 	return __bpf_fill_alu64_imm(self, BPF_MOD);
1074 }
1075 
1076 /* ALU32 immediate operations */
bpf_fill_alu32_mov_imm(struct bpf_test * self)1077 static int bpf_fill_alu32_mov_imm(struct bpf_test *self)
1078 {
1079 	return __bpf_fill_alu32_imm(self, BPF_MOV);
1080 }
1081 
bpf_fill_alu32_and_imm(struct bpf_test * self)1082 static int bpf_fill_alu32_and_imm(struct bpf_test *self)
1083 {
1084 	return __bpf_fill_alu32_imm(self, BPF_AND);
1085 }
1086 
bpf_fill_alu32_or_imm(struct bpf_test * self)1087 static int bpf_fill_alu32_or_imm(struct bpf_test *self)
1088 {
1089 	return __bpf_fill_alu32_imm(self, BPF_OR);
1090 }
1091 
bpf_fill_alu32_xor_imm(struct bpf_test * self)1092 static int bpf_fill_alu32_xor_imm(struct bpf_test *self)
1093 {
1094 	return __bpf_fill_alu32_imm(self, BPF_XOR);
1095 }
1096 
bpf_fill_alu32_add_imm(struct bpf_test * self)1097 static int bpf_fill_alu32_add_imm(struct bpf_test *self)
1098 {
1099 	return __bpf_fill_alu32_imm(self, BPF_ADD);
1100 }
1101 
bpf_fill_alu32_sub_imm(struct bpf_test * self)1102 static int bpf_fill_alu32_sub_imm(struct bpf_test *self)
1103 {
1104 	return __bpf_fill_alu32_imm(self, BPF_SUB);
1105 }
1106 
bpf_fill_alu32_mul_imm(struct bpf_test * self)1107 static int bpf_fill_alu32_mul_imm(struct bpf_test *self)
1108 {
1109 	return __bpf_fill_alu32_imm(self, BPF_MUL);
1110 }
1111 
bpf_fill_alu32_div_imm(struct bpf_test * self)1112 static int bpf_fill_alu32_div_imm(struct bpf_test *self)
1113 {
1114 	return __bpf_fill_alu32_imm(self, BPF_DIV);
1115 }
1116 
bpf_fill_alu32_mod_imm(struct bpf_test * self)1117 static int bpf_fill_alu32_mod_imm(struct bpf_test *self)
1118 {
1119 	return __bpf_fill_alu32_imm(self, BPF_MOD);
1120 }
1121 
1122 /* ALU64 register operations */
bpf_fill_alu64_mov_reg(struct bpf_test * self)1123 static int bpf_fill_alu64_mov_reg(struct bpf_test *self)
1124 {
1125 	return __bpf_fill_alu64_reg(self, BPF_MOV);
1126 }
1127 
bpf_fill_alu64_and_reg(struct bpf_test * self)1128 static int bpf_fill_alu64_and_reg(struct bpf_test *self)
1129 {
1130 	return __bpf_fill_alu64_reg(self, BPF_AND);
1131 }
1132 
bpf_fill_alu64_or_reg(struct bpf_test * self)1133 static int bpf_fill_alu64_or_reg(struct bpf_test *self)
1134 {
1135 	return __bpf_fill_alu64_reg(self, BPF_OR);
1136 }
1137 
bpf_fill_alu64_xor_reg(struct bpf_test * self)1138 static int bpf_fill_alu64_xor_reg(struct bpf_test *self)
1139 {
1140 	return __bpf_fill_alu64_reg(self, BPF_XOR);
1141 }
1142 
bpf_fill_alu64_add_reg(struct bpf_test * self)1143 static int bpf_fill_alu64_add_reg(struct bpf_test *self)
1144 {
1145 	return __bpf_fill_alu64_reg(self, BPF_ADD);
1146 }
1147 
bpf_fill_alu64_sub_reg(struct bpf_test * self)1148 static int bpf_fill_alu64_sub_reg(struct bpf_test *self)
1149 {
1150 	return __bpf_fill_alu64_reg(self, BPF_SUB);
1151 }
1152 
bpf_fill_alu64_mul_reg(struct bpf_test * self)1153 static int bpf_fill_alu64_mul_reg(struct bpf_test *self)
1154 {
1155 	return __bpf_fill_alu64_reg(self, BPF_MUL);
1156 }
1157 
bpf_fill_alu64_div_reg(struct bpf_test * self)1158 static int bpf_fill_alu64_div_reg(struct bpf_test *self)
1159 {
1160 	return __bpf_fill_alu64_reg(self, BPF_DIV);
1161 }
1162 
bpf_fill_alu64_mod_reg(struct bpf_test * self)1163 static int bpf_fill_alu64_mod_reg(struct bpf_test *self)
1164 {
1165 	return __bpf_fill_alu64_reg(self, BPF_MOD);
1166 }
1167 
1168 /* ALU32 register operations */
bpf_fill_alu32_mov_reg(struct bpf_test * self)1169 static int bpf_fill_alu32_mov_reg(struct bpf_test *self)
1170 {
1171 	return __bpf_fill_alu32_reg(self, BPF_MOV);
1172 }
1173 
bpf_fill_alu32_and_reg(struct bpf_test * self)1174 static int bpf_fill_alu32_and_reg(struct bpf_test *self)
1175 {
1176 	return __bpf_fill_alu32_reg(self, BPF_AND);
1177 }
1178 
bpf_fill_alu32_or_reg(struct bpf_test * self)1179 static int bpf_fill_alu32_or_reg(struct bpf_test *self)
1180 {
1181 	return __bpf_fill_alu32_reg(self, BPF_OR);
1182 }
1183 
bpf_fill_alu32_xor_reg(struct bpf_test * self)1184 static int bpf_fill_alu32_xor_reg(struct bpf_test *self)
1185 {
1186 	return __bpf_fill_alu32_reg(self, BPF_XOR);
1187 }
1188 
bpf_fill_alu32_add_reg(struct bpf_test * self)1189 static int bpf_fill_alu32_add_reg(struct bpf_test *self)
1190 {
1191 	return __bpf_fill_alu32_reg(self, BPF_ADD);
1192 }
1193 
bpf_fill_alu32_sub_reg(struct bpf_test * self)1194 static int bpf_fill_alu32_sub_reg(struct bpf_test *self)
1195 {
1196 	return __bpf_fill_alu32_reg(self, BPF_SUB);
1197 }
1198 
bpf_fill_alu32_mul_reg(struct bpf_test * self)1199 static int bpf_fill_alu32_mul_reg(struct bpf_test *self)
1200 {
1201 	return __bpf_fill_alu32_reg(self, BPF_MUL);
1202 }
1203 
bpf_fill_alu32_div_reg(struct bpf_test * self)1204 static int bpf_fill_alu32_div_reg(struct bpf_test *self)
1205 {
1206 	return __bpf_fill_alu32_reg(self, BPF_DIV);
1207 }
1208 
bpf_fill_alu32_mod_reg(struct bpf_test * self)1209 static int bpf_fill_alu32_mod_reg(struct bpf_test *self)
1210 {
1211 	return __bpf_fill_alu32_reg(self, BPF_MOD);
1212 }
1213 
1214 /*
1215  * Test JITs that implement complex ALU operations as function
1216  * calls, and must re-arrange operands for argument passing.
1217  */
__bpf_fill_alu_imm_regs(struct bpf_test * self,u8 op,bool alu32)1218 static int __bpf_fill_alu_imm_regs(struct bpf_test *self, u8 op, bool alu32)
1219 {
1220 	int len = 2 + 10 * 10;
1221 	struct bpf_insn *insns;
1222 	u64 dst, res;
1223 	int i = 0;
1224 	u32 imm;
1225 	int rd;
1226 
1227 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1228 	if (!insns)
1229 		return -ENOMEM;
1230 
1231 	/* Operand and result values according to operation */
1232 	if (alu32)
1233 		dst = 0x76543210U;
1234 	else
1235 		dst = 0x7edcba9876543210ULL;
1236 	imm = 0x01234567U;
1237 
1238 	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1239 		imm &= 31;
1240 
1241 	__bpf_alu_result(&res, dst, imm, op);
1242 
1243 	if (alu32)
1244 		res = (u32)res;
1245 
1246 	/* Check all operand registers */
1247 	for (rd = R0; rd <= R9; rd++) {
1248 		i += __bpf_ld_imm64(&insns[i], rd, dst);
1249 
1250 		if (alu32)
1251 			insns[i++] = BPF_ALU32_IMM(op, rd, imm);
1252 		else
1253 			insns[i++] = BPF_ALU64_IMM(op, rd, imm);
1254 
1255 		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res, 2);
1256 		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1257 		insns[i++] = BPF_EXIT_INSN();
1258 
1259 		insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1260 		insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, res >> 32, 2);
1261 		insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1262 		insns[i++] = BPF_EXIT_INSN();
1263 	}
1264 
1265 	insns[i++] = BPF_MOV64_IMM(R0, 1);
1266 	insns[i++] = BPF_EXIT_INSN();
1267 
1268 	self->u.ptr.insns = insns;
1269 	self->u.ptr.len = len;
1270 	BUG_ON(i != len);
1271 
1272 	return 0;
1273 }
1274 
1275 /* ALU64 K registers */
bpf_fill_alu64_mov_imm_regs(struct bpf_test * self)1276 static int bpf_fill_alu64_mov_imm_regs(struct bpf_test *self)
1277 {
1278 	return __bpf_fill_alu_imm_regs(self, BPF_MOV, false);
1279 }
1280 
bpf_fill_alu64_and_imm_regs(struct bpf_test * self)1281 static int bpf_fill_alu64_and_imm_regs(struct bpf_test *self)
1282 {
1283 	return __bpf_fill_alu_imm_regs(self, BPF_AND, false);
1284 }
1285 
bpf_fill_alu64_or_imm_regs(struct bpf_test * self)1286 static int bpf_fill_alu64_or_imm_regs(struct bpf_test *self)
1287 {
1288 	return __bpf_fill_alu_imm_regs(self, BPF_OR, false);
1289 }
1290 
bpf_fill_alu64_xor_imm_regs(struct bpf_test * self)1291 static int bpf_fill_alu64_xor_imm_regs(struct bpf_test *self)
1292 {
1293 	return __bpf_fill_alu_imm_regs(self, BPF_XOR, false);
1294 }
1295 
bpf_fill_alu64_lsh_imm_regs(struct bpf_test * self)1296 static int bpf_fill_alu64_lsh_imm_regs(struct bpf_test *self)
1297 {
1298 	return __bpf_fill_alu_imm_regs(self, BPF_LSH, false);
1299 }
1300 
bpf_fill_alu64_rsh_imm_regs(struct bpf_test * self)1301 static int bpf_fill_alu64_rsh_imm_regs(struct bpf_test *self)
1302 {
1303 	return __bpf_fill_alu_imm_regs(self, BPF_RSH, false);
1304 }
1305 
bpf_fill_alu64_arsh_imm_regs(struct bpf_test * self)1306 static int bpf_fill_alu64_arsh_imm_regs(struct bpf_test *self)
1307 {
1308 	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, false);
1309 }
1310 
bpf_fill_alu64_add_imm_regs(struct bpf_test * self)1311 static int bpf_fill_alu64_add_imm_regs(struct bpf_test *self)
1312 {
1313 	return __bpf_fill_alu_imm_regs(self, BPF_ADD, false);
1314 }
1315 
bpf_fill_alu64_sub_imm_regs(struct bpf_test * self)1316 static int bpf_fill_alu64_sub_imm_regs(struct bpf_test *self)
1317 {
1318 	return __bpf_fill_alu_imm_regs(self, BPF_SUB, false);
1319 }
1320 
bpf_fill_alu64_mul_imm_regs(struct bpf_test * self)1321 static int bpf_fill_alu64_mul_imm_regs(struct bpf_test *self)
1322 {
1323 	return __bpf_fill_alu_imm_regs(self, BPF_MUL, false);
1324 }
1325 
bpf_fill_alu64_div_imm_regs(struct bpf_test * self)1326 static int bpf_fill_alu64_div_imm_regs(struct bpf_test *self)
1327 {
1328 	return __bpf_fill_alu_imm_regs(self, BPF_DIV, false);
1329 }
1330 
bpf_fill_alu64_mod_imm_regs(struct bpf_test * self)1331 static int bpf_fill_alu64_mod_imm_regs(struct bpf_test *self)
1332 {
1333 	return __bpf_fill_alu_imm_regs(self, BPF_MOD, false);
1334 }
1335 
1336 /* ALU32 K registers */
bpf_fill_alu32_mov_imm_regs(struct bpf_test * self)1337 static int bpf_fill_alu32_mov_imm_regs(struct bpf_test *self)
1338 {
1339 	return __bpf_fill_alu_imm_regs(self, BPF_MOV, true);
1340 }
1341 
bpf_fill_alu32_and_imm_regs(struct bpf_test * self)1342 static int bpf_fill_alu32_and_imm_regs(struct bpf_test *self)
1343 {
1344 	return __bpf_fill_alu_imm_regs(self, BPF_AND, true);
1345 }
1346 
bpf_fill_alu32_or_imm_regs(struct bpf_test * self)1347 static int bpf_fill_alu32_or_imm_regs(struct bpf_test *self)
1348 {
1349 	return __bpf_fill_alu_imm_regs(self, BPF_OR, true);
1350 }
1351 
bpf_fill_alu32_xor_imm_regs(struct bpf_test * self)1352 static int bpf_fill_alu32_xor_imm_regs(struct bpf_test *self)
1353 {
1354 	return __bpf_fill_alu_imm_regs(self, BPF_XOR, true);
1355 }
1356 
bpf_fill_alu32_lsh_imm_regs(struct bpf_test * self)1357 static int bpf_fill_alu32_lsh_imm_regs(struct bpf_test *self)
1358 {
1359 	return __bpf_fill_alu_imm_regs(self, BPF_LSH, true);
1360 }
1361 
bpf_fill_alu32_rsh_imm_regs(struct bpf_test * self)1362 static int bpf_fill_alu32_rsh_imm_regs(struct bpf_test *self)
1363 {
1364 	return __bpf_fill_alu_imm_regs(self, BPF_RSH, true);
1365 }
1366 
bpf_fill_alu32_arsh_imm_regs(struct bpf_test * self)1367 static int bpf_fill_alu32_arsh_imm_regs(struct bpf_test *self)
1368 {
1369 	return __bpf_fill_alu_imm_regs(self, BPF_ARSH, true);
1370 }
1371 
bpf_fill_alu32_add_imm_regs(struct bpf_test * self)1372 static int bpf_fill_alu32_add_imm_regs(struct bpf_test *self)
1373 {
1374 	return __bpf_fill_alu_imm_regs(self, BPF_ADD, true);
1375 }
1376 
bpf_fill_alu32_sub_imm_regs(struct bpf_test * self)1377 static int bpf_fill_alu32_sub_imm_regs(struct bpf_test *self)
1378 {
1379 	return __bpf_fill_alu_imm_regs(self, BPF_SUB, true);
1380 }
1381 
bpf_fill_alu32_mul_imm_regs(struct bpf_test * self)1382 static int bpf_fill_alu32_mul_imm_regs(struct bpf_test *self)
1383 {
1384 	return __bpf_fill_alu_imm_regs(self, BPF_MUL, true);
1385 }
1386 
bpf_fill_alu32_div_imm_regs(struct bpf_test * self)1387 static int bpf_fill_alu32_div_imm_regs(struct bpf_test *self)
1388 {
1389 	return __bpf_fill_alu_imm_regs(self, BPF_DIV, true);
1390 }
1391 
bpf_fill_alu32_mod_imm_regs(struct bpf_test * self)1392 static int bpf_fill_alu32_mod_imm_regs(struct bpf_test *self)
1393 {
1394 	return __bpf_fill_alu_imm_regs(self, BPF_MOD, true);
1395 }
1396 
1397 /*
1398  * Test JITs that implement complex ALU operations as function
1399  * calls, and must re-arrange operands for argument passing.
1400  */
__bpf_fill_alu_reg_pairs(struct bpf_test * self,u8 op,bool alu32)1401 static int __bpf_fill_alu_reg_pairs(struct bpf_test *self, u8 op, bool alu32)
1402 {
1403 	int len = 2 + 10 * 10 * 12;
1404 	u64 dst, src, res, same;
1405 	struct bpf_insn *insns;
1406 	int rd, rs;
1407 	int i = 0;
1408 
1409 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
1410 	if (!insns)
1411 		return -ENOMEM;
1412 
1413 	/* Operand and result values according to operation */
1414 	if (alu32) {
1415 		dst = 0x76543210U;
1416 		src = 0x01234567U;
1417 	} else {
1418 		dst = 0x7edcba9876543210ULL;
1419 		src = 0x0123456789abcdefULL;
1420 	}
1421 
1422 	if (op == BPF_LSH || op == BPF_RSH || op == BPF_ARSH)
1423 		src &= 31;
1424 
1425 	__bpf_alu_result(&res, dst, src, op);
1426 	__bpf_alu_result(&same, src, src, op);
1427 
1428 	if (alu32) {
1429 		res = (u32)res;
1430 		same = (u32)same;
1431 	}
1432 
1433 	/* Check all combinations of operand registers */
1434 	for (rd = R0; rd <= R9; rd++) {
1435 		for (rs = R0; rs <= R9; rs++) {
1436 			u64 val = rd == rs ? same : res;
1437 
1438 			i += __bpf_ld_imm64(&insns[i], rd, dst);
1439 			i += __bpf_ld_imm64(&insns[i], rs, src);
1440 
1441 			if (alu32)
1442 				insns[i++] = BPF_ALU32_REG(op, rd, rs);
1443 			else
1444 				insns[i++] = BPF_ALU64_REG(op, rd, rs);
1445 
1446 			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val, 2);
1447 			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1448 			insns[i++] = BPF_EXIT_INSN();
1449 
1450 			insns[i++] = BPF_ALU64_IMM(BPF_RSH, rd, 32);
1451 			insns[i++] = BPF_JMP32_IMM(BPF_JEQ, rd, val >> 32, 2);
1452 			insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1453 			insns[i++] = BPF_EXIT_INSN();
1454 		}
1455 	}
1456 
1457 	insns[i++] = BPF_MOV64_IMM(R0, 1);
1458 	insns[i++] = BPF_EXIT_INSN();
1459 
1460 	self->u.ptr.insns = insns;
1461 	self->u.ptr.len = len;
1462 	BUG_ON(i != len);
1463 
1464 	return 0;
1465 }
1466 
1467 /* ALU64 X register combinations */
bpf_fill_alu64_mov_reg_pairs(struct bpf_test * self)1468 static int bpf_fill_alu64_mov_reg_pairs(struct bpf_test *self)
1469 {
1470 	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, false);
1471 }
1472 
bpf_fill_alu64_and_reg_pairs(struct bpf_test * self)1473 static int bpf_fill_alu64_and_reg_pairs(struct bpf_test *self)
1474 {
1475 	return __bpf_fill_alu_reg_pairs(self, BPF_AND, false);
1476 }
1477 
bpf_fill_alu64_or_reg_pairs(struct bpf_test * self)1478 static int bpf_fill_alu64_or_reg_pairs(struct bpf_test *self)
1479 {
1480 	return __bpf_fill_alu_reg_pairs(self, BPF_OR, false);
1481 }
1482 
bpf_fill_alu64_xor_reg_pairs(struct bpf_test * self)1483 static int bpf_fill_alu64_xor_reg_pairs(struct bpf_test *self)
1484 {
1485 	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, false);
1486 }
1487 
bpf_fill_alu64_lsh_reg_pairs(struct bpf_test * self)1488 static int bpf_fill_alu64_lsh_reg_pairs(struct bpf_test *self)
1489 {
1490 	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, false);
1491 }
1492 
bpf_fill_alu64_rsh_reg_pairs(struct bpf_test * self)1493 static int bpf_fill_alu64_rsh_reg_pairs(struct bpf_test *self)
1494 {
1495 	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, false);
1496 }
1497 
bpf_fill_alu64_arsh_reg_pairs(struct bpf_test * self)1498 static int bpf_fill_alu64_arsh_reg_pairs(struct bpf_test *self)
1499 {
1500 	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, false);
1501 }
1502 
bpf_fill_alu64_add_reg_pairs(struct bpf_test * self)1503 static int bpf_fill_alu64_add_reg_pairs(struct bpf_test *self)
1504 {
1505 	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, false);
1506 }
1507 
bpf_fill_alu64_sub_reg_pairs(struct bpf_test * self)1508 static int bpf_fill_alu64_sub_reg_pairs(struct bpf_test *self)
1509 {
1510 	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, false);
1511 }
1512 
bpf_fill_alu64_mul_reg_pairs(struct bpf_test * self)1513 static int bpf_fill_alu64_mul_reg_pairs(struct bpf_test *self)
1514 {
1515 	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, false);
1516 }
1517 
bpf_fill_alu64_div_reg_pairs(struct bpf_test * self)1518 static int bpf_fill_alu64_div_reg_pairs(struct bpf_test *self)
1519 {
1520 	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, false);
1521 }
1522 
bpf_fill_alu64_mod_reg_pairs(struct bpf_test * self)1523 static int bpf_fill_alu64_mod_reg_pairs(struct bpf_test *self)
1524 {
1525 	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, false);
1526 }
1527 
1528 /* ALU32 X register combinations */
bpf_fill_alu32_mov_reg_pairs(struct bpf_test * self)1529 static int bpf_fill_alu32_mov_reg_pairs(struct bpf_test *self)
1530 {
1531 	return __bpf_fill_alu_reg_pairs(self, BPF_MOV, true);
1532 }
1533 
bpf_fill_alu32_and_reg_pairs(struct bpf_test * self)1534 static int bpf_fill_alu32_and_reg_pairs(struct bpf_test *self)
1535 {
1536 	return __bpf_fill_alu_reg_pairs(self, BPF_AND, true);
1537 }
1538 
bpf_fill_alu32_or_reg_pairs(struct bpf_test * self)1539 static int bpf_fill_alu32_or_reg_pairs(struct bpf_test *self)
1540 {
1541 	return __bpf_fill_alu_reg_pairs(self, BPF_OR, true);
1542 }
1543 
bpf_fill_alu32_xor_reg_pairs(struct bpf_test * self)1544 static int bpf_fill_alu32_xor_reg_pairs(struct bpf_test *self)
1545 {
1546 	return __bpf_fill_alu_reg_pairs(self, BPF_XOR, true);
1547 }
1548 
bpf_fill_alu32_lsh_reg_pairs(struct bpf_test * self)1549 static int bpf_fill_alu32_lsh_reg_pairs(struct bpf_test *self)
1550 {
1551 	return __bpf_fill_alu_reg_pairs(self, BPF_LSH, true);
1552 }
1553 
bpf_fill_alu32_rsh_reg_pairs(struct bpf_test * self)1554 static int bpf_fill_alu32_rsh_reg_pairs(struct bpf_test *self)
1555 {
1556 	return __bpf_fill_alu_reg_pairs(self, BPF_RSH, true);
1557 }
1558 
bpf_fill_alu32_arsh_reg_pairs(struct bpf_test * self)1559 static int bpf_fill_alu32_arsh_reg_pairs(struct bpf_test *self)
1560 {
1561 	return __bpf_fill_alu_reg_pairs(self, BPF_ARSH, true);
1562 }
1563 
bpf_fill_alu32_add_reg_pairs(struct bpf_test * self)1564 static int bpf_fill_alu32_add_reg_pairs(struct bpf_test *self)
1565 {
1566 	return __bpf_fill_alu_reg_pairs(self, BPF_ADD, true);
1567 }
1568 
bpf_fill_alu32_sub_reg_pairs(struct bpf_test * self)1569 static int bpf_fill_alu32_sub_reg_pairs(struct bpf_test *self)
1570 {
1571 	return __bpf_fill_alu_reg_pairs(self, BPF_SUB, true);
1572 }
1573 
bpf_fill_alu32_mul_reg_pairs(struct bpf_test * self)1574 static int bpf_fill_alu32_mul_reg_pairs(struct bpf_test *self)
1575 {
1576 	return __bpf_fill_alu_reg_pairs(self, BPF_MUL, true);
1577 }
1578 
bpf_fill_alu32_div_reg_pairs(struct bpf_test * self)1579 static int bpf_fill_alu32_div_reg_pairs(struct bpf_test *self)
1580 {
1581 	return __bpf_fill_alu_reg_pairs(self, BPF_DIV, true);
1582 }
1583 
bpf_fill_alu32_mod_reg_pairs(struct bpf_test * self)1584 static int bpf_fill_alu32_mod_reg_pairs(struct bpf_test *self)
1585 {
1586 	return __bpf_fill_alu_reg_pairs(self, BPF_MOD, true);
1587 }
1588 
1589 /*
1590  * Exhaustive tests of atomic operations for all power-of-two operand
1591  * magnitudes, both for positive and negative values.
1592  */
1593 
__bpf_emit_atomic64(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1594 static int __bpf_emit_atomic64(struct bpf_test *self, void *arg,
1595 			       struct bpf_insn *insns, s64 dst, s64 src)
1596 {
1597 	int op = *(int *)arg;
1598 	u64 keep, fetch, res;
1599 	int i = 0;
1600 
1601 	if (!insns)
1602 		return 21;
1603 
1604 	switch (op) {
1605 	case BPF_XCHG:
1606 		res = src;
1607 		break;
1608 	default:
1609 		__bpf_alu_result(&res, dst, src, BPF_OP(op));
1610 	}
1611 
1612 	keep = 0x0123456789abcdefULL;
1613 	if (op & BPF_FETCH)
1614 		fetch = dst;
1615 	else
1616 		fetch = src;
1617 
1618 	i += __bpf_ld_imm64(&insns[i], R0, keep);
1619 	i += __bpf_ld_imm64(&insns[i], R1, dst);
1620 	i += __bpf_ld_imm64(&insns[i], R2, src);
1621 	i += __bpf_ld_imm64(&insns[i], R3, res);
1622 	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1623 	i += __bpf_ld_imm64(&insns[i], R5, keep);
1624 
1625 	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1626 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, op, R10, R2, -8);
1627 	insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
1628 
1629 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1630 	insns[i++] = BPF_EXIT_INSN();
1631 
1632 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1633 	insns[i++] = BPF_EXIT_INSN();
1634 
1635 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1636 	insns[i++] = BPF_EXIT_INSN();
1637 
1638 	return i;
1639 }
1640 
__bpf_emit_atomic32(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1641 static int __bpf_emit_atomic32(struct bpf_test *self, void *arg,
1642 			       struct bpf_insn *insns, s64 dst, s64 src)
1643 {
1644 	int op = *(int *)arg;
1645 	u64 keep, fetch, res;
1646 	int i = 0;
1647 
1648 	if (!insns)
1649 		return 21;
1650 
1651 	switch (op) {
1652 	case BPF_XCHG:
1653 		res = src;
1654 		break;
1655 	default:
1656 		__bpf_alu_result(&res, (u32)dst, (u32)src, BPF_OP(op));
1657 	}
1658 
1659 	keep = 0x0123456789abcdefULL;
1660 	if (op & BPF_FETCH)
1661 		fetch = (u32)dst;
1662 	else
1663 		fetch = src;
1664 
1665 	i += __bpf_ld_imm64(&insns[i], R0, keep);
1666 	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1667 	i += __bpf_ld_imm64(&insns[i], R2, src);
1668 	i += __bpf_ld_imm64(&insns[i], R3, (u32)res);
1669 	i += __bpf_ld_imm64(&insns[i], R4, fetch);
1670 	i += __bpf_ld_imm64(&insns[i], R5, keep);
1671 
1672 	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1673 	insns[i++] = BPF_ATOMIC_OP(BPF_W, op, R10, R2, -4);
1674 	insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
1675 
1676 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
1677 	insns[i++] = BPF_EXIT_INSN();
1678 
1679 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R4, 1);
1680 	insns[i++] = BPF_EXIT_INSN();
1681 
1682 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
1683 	insns[i++] = BPF_EXIT_INSN();
1684 
1685 	return i;
1686 }
1687 
__bpf_emit_cmpxchg64(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1688 static int __bpf_emit_cmpxchg64(struct bpf_test *self, void *arg,
1689 				struct bpf_insn *insns, s64 dst, s64 src)
1690 {
1691 	int i = 0;
1692 
1693 	if (!insns)
1694 		return 23;
1695 
1696 	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1697 	i += __bpf_ld_imm64(&insns[i], R1, dst);
1698 	i += __bpf_ld_imm64(&insns[i], R2, src);
1699 
1700 	/* Result unsuccessful */
1701 	insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
1702 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1703 	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1704 
1705 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
1706 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1707 	insns[i++] = BPF_EXIT_INSN();
1708 
1709 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1710 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1711 	insns[i++] = BPF_EXIT_INSN();
1712 
1713 	/* Result successful */
1714 	insns[i++] = BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -8);
1715 	insns[i++] = BPF_LDX_MEM(BPF_DW, R3, R10, -8);
1716 
1717 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R2, R3, 2);
1718 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1719 	insns[i++] = BPF_EXIT_INSN();
1720 
1721 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1722 	insns[i++] = BPF_MOV64_IMM(R0, __LINE__);
1723 	insns[i++] = BPF_EXIT_INSN();
1724 
1725 	return i;
1726 }
1727 
__bpf_emit_cmpxchg32(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)1728 static int __bpf_emit_cmpxchg32(struct bpf_test *self, void *arg,
1729 				struct bpf_insn *insns, s64 dst, s64 src)
1730 {
1731 	int i = 0;
1732 
1733 	if (!insns)
1734 		return 27;
1735 
1736 	i += __bpf_ld_imm64(&insns[i], R0, ~dst);
1737 	i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
1738 	i += __bpf_ld_imm64(&insns[i], R2, src);
1739 
1740 	/* Result unsuccessful */
1741 	insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
1742 	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1743 	insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */
1744 	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1745 
1746 	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
1747 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1748 	insns[i++] = BPF_EXIT_INSN();
1749 
1750 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R3, 2);
1751 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1752 	insns[i++] = BPF_EXIT_INSN();
1753 
1754 	/* Result successful */
1755 	i += __bpf_ld_imm64(&insns[i], R0, dst);
1756 	insns[i++] = BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R2, -4);
1757 	insns[i++] = BPF_ZEXT_REG(R0); /* Zext always inserted by verifier */
1758 	insns[i++] = BPF_LDX_MEM(BPF_W, R3, R10, -4);
1759 
1760 	insns[i++] = BPF_JMP32_REG(BPF_JEQ, R2, R3, 2);
1761 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1762 	insns[i++] = BPF_EXIT_INSN();
1763 
1764 	insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
1765 	insns[i++] = BPF_MOV32_IMM(R0, __LINE__);
1766 	insns[i++] = BPF_EXIT_INSN();
1767 
1768 	return i;
1769 }
1770 
__bpf_fill_atomic64(struct bpf_test * self,int op)1771 static int __bpf_fill_atomic64(struct bpf_test *self, int op)
1772 {
1773 	return __bpf_fill_pattern(self, &op, 64, 64,
1774 				  0, PATTERN_BLOCK2,
1775 				  &__bpf_emit_atomic64);
1776 }
1777 
__bpf_fill_atomic32(struct bpf_test * self,int op)1778 static int __bpf_fill_atomic32(struct bpf_test *self, int op)
1779 {
1780 	return __bpf_fill_pattern(self, &op, 64, 64,
1781 				  0, PATTERN_BLOCK2,
1782 				  &__bpf_emit_atomic32);
1783 }
1784 
1785 /* 64-bit atomic operations */
bpf_fill_atomic64_add(struct bpf_test * self)1786 static int bpf_fill_atomic64_add(struct bpf_test *self)
1787 {
1788 	return __bpf_fill_atomic64(self, BPF_ADD);
1789 }
1790 
bpf_fill_atomic64_and(struct bpf_test * self)1791 static int bpf_fill_atomic64_and(struct bpf_test *self)
1792 {
1793 	return __bpf_fill_atomic64(self, BPF_AND);
1794 }
1795 
bpf_fill_atomic64_or(struct bpf_test * self)1796 static int bpf_fill_atomic64_or(struct bpf_test *self)
1797 {
1798 	return __bpf_fill_atomic64(self, BPF_OR);
1799 }
1800 
bpf_fill_atomic64_xor(struct bpf_test * self)1801 static int bpf_fill_atomic64_xor(struct bpf_test *self)
1802 {
1803 	return __bpf_fill_atomic64(self, BPF_XOR);
1804 }
1805 
bpf_fill_atomic64_add_fetch(struct bpf_test * self)1806 static int bpf_fill_atomic64_add_fetch(struct bpf_test *self)
1807 {
1808 	return __bpf_fill_atomic64(self, BPF_ADD | BPF_FETCH);
1809 }
1810 
bpf_fill_atomic64_and_fetch(struct bpf_test * self)1811 static int bpf_fill_atomic64_and_fetch(struct bpf_test *self)
1812 {
1813 	return __bpf_fill_atomic64(self, BPF_AND | BPF_FETCH);
1814 }
1815 
bpf_fill_atomic64_or_fetch(struct bpf_test * self)1816 static int bpf_fill_atomic64_or_fetch(struct bpf_test *self)
1817 {
1818 	return __bpf_fill_atomic64(self, BPF_OR | BPF_FETCH);
1819 }
1820 
bpf_fill_atomic64_xor_fetch(struct bpf_test * self)1821 static int bpf_fill_atomic64_xor_fetch(struct bpf_test *self)
1822 {
1823 	return __bpf_fill_atomic64(self, BPF_XOR | BPF_FETCH);
1824 }
1825 
bpf_fill_atomic64_xchg(struct bpf_test * self)1826 static int bpf_fill_atomic64_xchg(struct bpf_test *self)
1827 {
1828 	return __bpf_fill_atomic64(self, BPF_XCHG);
1829 }
1830 
bpf_fill_cmpxchg64(struct bpf_test * self)1831 static int bpf_fill_cmpxchg64(struct bpf_test *self)
1832 {
1833 	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1834 				  &__bpf_emit_cmpxchg64);
1835 }
1836 
1837 /* 32-bit atomic operations */
bpf_fill_atomic32_add(struct bpf_test * self)1838 static int bpf_fill_atomic32_add(struct bpf_test *self)
1839 {
1840 	return __bpf_fill_atomic32(self, BPF_ADD);
1841 }
1842 
bpf_fill_atomic32_and(struct bpf_test * self)1843 static int bpf_fill_atomic32_and(struct bpf_test *self)
1844 {
1845 	return __bpf_fill_atomic32(self, BPF_AND);
1846 }
1847 
bpf_fill_atomic32_or(struct bpf_test * self)1848 static int bpf_fill_atomic32_or(struct bpf_test *self)
1849 {
1850 	return __bpf_fill_atomic32(self, BPF_OR);
1851 }
1852 
bpf_fill_atomic32_xor(struct bpf_test * self)1853 static int bpf_fill_atomic32_xor(struct bpf_test *self)
1854 {
1855 	return __bpf_fill_atomic32(self, BPF_XOR);
1856 }
1857 
bpf_fill_atomic32_add_fetch(struct bpf_test * self)1858 static int bpf_fill_atomic32_add_fetch(struct bpf_test *self)
1859 {
1860 	return __bpf_fill_atomic32(self, BPF_ADD | BPF_FETCH);
1861 }
1862 
bpf_fill_atomic32_and_fetch(struct bpf_test * self)1863 static int bpf_fill_atomic32_and_fetch(struct bpf_test *self)
1864 {
1865 	return __bpf_fill_atomic32(self, BPF_AND | BPF_FETCH);
1866 }
1867 
bpf_fill_atomic32_or_fetch(struct bpf_test * self)1868 static int bpf_fill_atomic32_or_fetch(struct bpf_test *self)
1869 {
1870 	return __bpf_fill_atomic32(self, BPF_OR | BPF_FETCH);
1871 }
1872 
bpf_fill_atomic32_xor_fetch(struct bpf_test * self)1873 static int bpf_fill_atomic32_xor_fetch(struct bpf_test *self)
1874 {
1875 	return __bpf_fill_atomic32(self, BPF_XOR | BPF_FETCH);
1876 }
1877 
bpf_fill_atomic32_xchg(struct bpf_test * self)1878 static int bpf_fill_atomic32_xchg(struct bpf_test *self)
1879 {
1880 	return __bpf_fill_atomic32(self, BPF_XCHG);
1881 }
1882 
bpf_fill_cmpxchg32(struct bpf_test * self)1883 static int bpf_fill_cmpxchg32(struct bpf_test *self)
1884 {
1885 	return __bpf_fill_pattern(self, NULL, 64, 64, 0, PATTERN_BLOCK2,
1886 				  &__bpf_emit_cmpxchg32);
1887 }
1888 
1889 /*
1890  * Test JITs that implement ATOMIC operations as function calls or
1891  * other primitives, and must re-arrange operands for argument passing.
1892  */
__bpf_fill_atomic_reg_pairs(struct bpf_test * self,u8 width,u8 op)1893 static int __bpf_fill_atomic_reg_pairs(struct bpf_test *self, u8 width, u8 op)
1894 {
1895 	struct bpf_insn *insn;
1896 	int len = 2 + 34 * 10 * 10;
1897 	u64 mem, upd, res;
1898 	int rd, rs, i = 0;
1899 
1900 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
1901 	if (!insn)
1902 		return -ENOMEM;
1903 
1904 	/* Operand and memory values */
1905 	if (width == BPF_DW) {
1906 		mem = 0x0123456789abcdefULL;
1907 		upd = 0xfedcba9876543210ULL;
1908 	} else { /* BPF_W */
1909 		mem = 0x01234567U;
1910 		upd = 0x76543210U;
1911 	}
1912 
1913 	/* Memory updated according to operation */
1914 	switch (op) {
1915 	case BPF_XCHG:
1916 		res = upd;
1917 		break;
1918 	case BPF_CMPXCHG:
1919 		res = mem;
1920 		break;
1921 	default:
1922 		__bpf_alu_result(&res, mem, upd, BPF_OP(op));
1923 	}
1924 
1925 	/* Test all operand registers */
1926 	for (rd = R0; rd <= R9; rd++) {
1927 		for (rs = R0; rs <= R9; rs++) {
1928 			u64 cmp, src;
1929 
1930 			/* Initialize value in memory */
1931 			i += __bpf_ld_imm64(&insn[i], R0, mem);
1932 			insn[i++] = BPF_STX_MEM(width, R10, R0, -8);
1933 
1934 			/* Initialize registers in order */
1935 			i += __bpf_ld_imm64(&insn[i], R0, ~mem);
1936 			i += __bpf_ld_imm64(&insn[i], rs, upd);
1937 			insn[i++] = BPF_MOV64_REG(rd, R10);
1938 
1939 			/* Perform atomic operation */
1940 			insn[i++] = BPF_ATOMIC_OP(width, op, rd, rs, -8);
1941 			if (op == BPF_CMPXCHG && width == BPF_W)
1942 				insn[i++] = BPF_ZEXT_REG(R0);
1943 
1944 			/* Check R0 register value */
1945 			if (op == BPF_CMPXCHG)
1946 				cmp = mem;  /* Expect value from memory */
1947 			else if (R0 == rd || R0 == rs)
1948 				cmp = 0;    /* Aliased, checked below */
1949 			else
1950 				cmp = ~mem; /* Expect value to be preserved */
1951 			if (cmp) {
1952 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1953 							   (u32)cmp, 2);
1954 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1955 				insn[i++] = BPF_EXIT_INSN();
1956 				insn[i++] = BPF_ALU64_IMM(BPF_RSH, R0, 32);
1957 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, R0,
1958 							   cmp >> 32, 2);
1959 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1960 				insn[i++] = BPF_EXIT_INSN();
1961 			}
1962 
1963 			/* Check source register value */
1964 			if (rs == R0 && op == BPF_CMPXCHG)
1965 				src = 0;   /* Aliased with R0, checked above */
1966 			else if (rs == rd && (op == BPF_CMPXCHG ||
1967 					      !(op & BPF_FETCH)))
1968 				src = 0;   /* Aliased with rd, checked below */
1969 			else if (op == BPF_CMPXCHG)
1970 				src = upd; /* Expect value to be preserved */
1971 			else if (op & BPF_FETCH)
1972 				src = mem; /* Expect fetched value from mem */
1973 			else /* no fetch */
1974 				src = upd; /* Expect value to be preserved */
1975 			if (src) {
1976 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1977 							   (u32)src, 2);
1978 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1979 				insn[i++] = BPF_EXIT_INSN();
1980 				insn[i++] = BPF_ALU64_IMM(BPF_RSH, rs, 32);
1981 				insn[i++] = BPF_JMP32_IMM(BPF_JEQ, rs,
1982 							   src >> 32, 2);
1983 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1984 				insn[i++] = BPF_EXIT_INSN();
1985 			}
1986 
1987 			/* Check destination register value */
1988 			if (!(rd == R0 && op == BPF_CMPXCHG) &&
1989 			    !(rd == rs && (op & BPF_FETCH))) {
1990 				insn[i++] = BPF_JMP_REG(BPF_JEQ, rd, R10, 2);
1991 				insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
1992 				insn[i++] = BPF_EXIT_INSN();
1993 			}
1994 
1995 			/* Check value in memory */
1996 			if (rs != rd) {                  /* No aliasing */
1997 				i += __bpf_ld_imm64(&insn[i], R1, res);
1998 			} else if (op == BPF_XCHG) {     /* Aliased, XCHG */
1999 				insn[i++] = BPF_MOV64_REG(R1, R10);
2000 			} else if (op == BPF_CMPXCHG) {  /* Aliased, CMPXCHG */
2001 				i += __bpf_ld_imm64(&insn[i], R1, mem);
2002 			} else {                        /* Aliased, ALU oper */
2003 				i += __bpf_ld_imm64(&insn[i], R1, mem);
2004 				insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
2005 			}
2006 
2007 			insn[i++] = BPF_LDX_MEM(width, R0, R10, -8);
2008 			if (width == BPF_DW)
2009 				insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
2010 			else /* width == BPF_W */
2011 				insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
2012 			insn[i++] = BPF_MOV32_IMM(R0, __LINE__);
2013 			insn[i++] = BPF_EXIT_INSN();
2014 		}
2015 	}
2016 
2017 	insn[i++] = BPF_MOV64_IMM(R0, 1);
2018 	insn[i++] = BPF_EXIT_INSN();
2019 
2020 	self->u.ptr.insns = insn;
2021 	self->u.ptr.len = i;
2022 	BUG_ON(i > len);
2023 
2024 	return 0;
2025 }
2026 
2027 /* 64-bit atomic register tests */
bpf_fill_atomic64_add_reg_pairs(struct bpf_test * self)2028 static int bpf_fill_atomic64_add_reg_pairs(struct bpf_test *self)
2029 {
2030 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD);
2031 }
2032 
bpf_fill_atomic64_and_reg_pairs(struct bpf_test * self)2033 static int bpf_fill_atomic64_and_reg_pairs(struct bpf_test *self)
2034 {
2035 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND);
2036 }
2037 
bpf_fill_atomic64_or_reg_pairs(struct bpf_test * self)2038 static int bpf_fill_atomic64_or_reg_pairs(struct bpf_test *self)
2039 {
2040 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR);
2041 }
2042 
bpf_fill_atomic64_xor_reg_pairs(struct bpf_test * self)2043 static int bpf_fill_atomic64_xor_reg_pairs(struct bpf_test *self)
2044 {
2045 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR);
2046 }
2047 
bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test * self)2048 static int bpf_fill_atomic64_add_fetch_reg_pairs(struct bpf_test *self)
2049 {
2050 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_ADD | BPF_FETCH);
2051 }
2052 
bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test * self)2053 static int bpf_fill_atomic64_and_fetch_reg_pairs(struct bpf_test *self)
2054 {
2055 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_AND | BPF_FETCH);
2056 }
2057 
bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test * self)2058 static int bpf_fill_atomic64_or_fetch_reg_pairs(struct bpf_test *self)
2059 {
2060 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_OR | BPF_FETCH);
2061 }
2062 
bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test * self)2063 static int bpf_fill_atomic64_xor_fetch_reg_pairs(struct bpf_test *self)
2064 {
2065 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XOR | BPF_FETCH);
2066 }
2067 
bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test * self)2068 static int bpf_fill_atomic64_xchg_reg_pairs(struct bpf_test *self)
2069 {
2070 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_XCHG);
2071 }
2072 
bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test * self)2073 static int bpf_fill_atomic64_cmpxchg_reg_pairs(struct bpf_test *self)
2074 {
2075 	return __bpf_fill_atomic_reg_pairs(self, BPF_DW, BPF_CMPXCHG);
2076 }
2077 
2078 /* 32-bit atomic register tests */
bpf_fill_atomic32_add_reg_pairs(struct bpf_test * self)2079 static int bpf_fill_atomic32_add_reg_pairs(struct bpf_test *self)
2080 {
2081 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD);
2082 }
2083 
bpf_fill_atomic32_and_reg_pairs(struct bpf_test * self)2084 static int bpf_fill_atomic32_and_reg_pairs(struct bpf_test *self)
2085 {
2086 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND);
2087 }
2088 
bpf_fill_atomic32_or_reg_pairs(struct bpf_test * self)2089 static int bpf_fill_atomic32_or_reg_pairs(struct bpf_test *self)
2090 {
2091 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR);
2092 }
2093 
bpf_fill_atomic32_xor_reg_pairs(struct bpf_test * self)2094 static int bpf_fill_atomic32_xor_reg_pairs(struct bpf_test *self)
2095 {
2096 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR);
2097 }
2098 
bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test * self)2099 static int bpf_fill_atomic32_add_fetch_reg_pairs(struct bpf_test *self)
2100 {
2101 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_ADD | BPF_FETCH);
2102 }
2103 
bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test * self)2104 static int bpf_fill_atomic32_and_fetch_reg_pairs(struct bpf_test *self)
2105 {
2106 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_AND | BPF_FETCH);
2107 }
2108 
bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test * self)2109 static int bpf_fill_atomic32_or_fetch_reg_pairs(struct bpf_test *self)
2110 {
2111 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_OR | BPF_FETCH);
2112 }
2113 
bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test * self)2114 static int bpf_fill_atomic32_xor_fetch_reg_pairs(struct bpf_test *self)
2115 {
2116 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XOR | BPF_FETCH);
2117 }
2118 
bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test * self)2119 static int bpf_fill_atomic32_xchg_reg_pairs(struct bpf_test *self)
2120 {
2121 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_XCHG);
2122 }
2123 
bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test * self)2124 static int bpf_fill_atomic32_cmpxchg_reg_pairs(struct bpf_test *self)
2125 {
2126 	return __bpf_fill_atomic_reg_pairs(self, BPF_W, BPF_CMPXCHG);
2127 }
2128 
2129 /*
2130  * Test the two-instruction 64-bit immediate load operation for all
2131  * power-of-two magnitudes of the immediate operand. For each MSB, a block
2132  * of immediate values centered around the power-of-two MSB are tested,
2133  * both for positive and negative values. The test is designed to verify
2134  * the operation for JITs that emit different code depending on the magnitude
2135  * of the immediate value. This is often the case if the native instruction
2136  * immediate field width is narrower than 32 bits.
2137  */
bpf_fill_ld_imm64_magn(struct bpf_test * self)2138 static int bpf_fill_ld_imm64_magn(struct bpf_test *self)
2139 {
2140 	int block = 64; /* Increase for more tests per MSB position */
2141 	int len = 3 + 8 * 63 * block * 2;
2142 	struct bpf_insn *insn;
2143 	int bit, adj, sign;
2144 	int i = 0;
2145 
2146 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2147 	if (!insn)
2148 		return -ENOMEM;
2149 
2150 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2151 
2152 	for (bit = 0; bit <= 62; bit++) {
2153 		for (adj = -block / 2; adj < block / 2; adj++) {
2154 			for (sign = -1; sign <= 1; sign += 2) {
2155 				s64 imm = sign * ((1LL << bit) + adj);
2156 
2157 				/* Perform operation */
2158 				i += __bpf_ld_imm64(&insn[i], R1, imm);
2159 
2160 				/* Load reference */
2161 				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2162 				insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3,
2163 							  (u32)(imm >> 32));
2164 				insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2165 				insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2166 
2167 				/* Check result */
2168 				insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2169 				insn[i++] = BPF_EXIT_INSN();
2170 			}
2171 		}
2172 	}
2173 
2174 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2175 	insn[i++] = BPF_EXIT_INSN();
2176 
2177 	self->u.ptr.insns = insn;
2178 	self->u.ptr.len = len;
2179 	BUG_ON(i != len);
2180 
2181 	return 0;
2182 }
2183 
2184 /*
2185  * Test the two-instruction 64-bit immediate load operation for different
2186  * combinations of bytes. Each byte in the 64-bit word is constructed as
2187  * (base & mask) | (rand() & ~mask), where rand() is a deterministic LCG.
2188  * All patterns (base1, mask1) and (base2, mask2) bytes are tested.
2189  */
__bpf_fill_ld_imm64_bytes(struct bpf_test * self,u8 base1,u8 mask1,u8 base2,u8 mask2)2190 static int __bpf_fill_ld_imm64_bytes(struct bpf_test *self,
2191 				     u8 base1, u8 mask1,
2192 				     u8 base2, u8 mask2)
2193 {
2194 	struct bpf_insn *insn;
2195 	int len = 3 + 8 * BIT(8);
2196 	int pattern, index;
2197 	u32 rand = 1;
2198 	int i = 0;
2199 
2200 	insn = kmalloc_array(len, sizeof(*insn), GFP_KERNEL);
2201 	if (!insn)
2202 		return -ENOMEM;
2203 
2204 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2205 
2206 	for (pattern = 0; pattern < BIT(8); pattern++) {
2207 		u64 imm = 0;
2208 
2209 		for (index = 0; index < 8; index++) {
2210 			int byte;
2211 
2212 			if (pattern & BIT(index))
2213 				byte = (base1 & mask1) | (rand & ~mask1);
2214 			else
2215 				byte = (base2 & mask2) | (rand & ~mask2);
2216 			imm = (imm << 8) | byte;
2217 		}
2218 
2219 		/* Update our LCG */
2220 		rand = rand * 1664525 + 1013904223;
2221 
2222 		/* Perform operation */
2223 		i += __bpf_ld_imm64(&insn[i], R1, imm);
2224 
2225 		/* Load reference */
2226 		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R2, imm);
2227 		insn[i++] = BPF_ALU32_IMM(BPF_MOV, R3, (u32)(imm >> 32));
2228 		insn[i++] = BPF_ALU64_IMM(BPF_LSH, R3, 32);
2229 		insn[i++] = BPF_ALU64_REG(BPF_OR, R2, R3);
2230 
2231 		/* Check result */
2232 		insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
2233 		insn[i++] = BPF_EXIT_INSN();
2234 	}
2235 
2236 	insn[i++] = BPF_ALU64_IMM(BPF_MOV, R0, 1);
2237 	insn[i++] = BPF_EXIT_INSN();
2238 
2239 	self->u.ptr.insns = insn;
2240 	self->u.ptr.len = len;
2241 	BUG_ON(i != len);
2242 
2243 	return 0;
2244 }
2245 
bpf_fill_ld_imm64_checker(struct bpf_test * self)2246 static int bpf_fill_ld_imm64_checker(struct bpf_test *self)
2247 {
2248 	return __bpf_fill_ld_imm64_bytes(self, 0, 0xff, 0xff, 0xff);
2249 }
2250 
bpf_fill_ld_imm64_pos_neg(struct bpf_test * self)2251 static int bpf_fill_ld_imm64_pos_neg(struct bpf_test *self)
2252 {
2253 	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0x80, 0x80);
2254 }
2255 
bpf_fill_ld_imm64_pos_zero(struct bpf_test * self)2256 static int bpf_fill_ld_imm64_pos_zero(struct bpf_test *self)
2257 {
2258 	return __bpf_fill_ld_imm64_bytes(self, 1, 0x81, 0, 0xff);
2259 }
2260 
bpf_fill_ld_imm64_neg_zero(struct bpf_test * self)2261 static int bpf_fill_ld_imm64_neg_zero(struct bpf_test *self)
2262 {
2263 	return __bpf_fill_ld_imm64_bytes(self, 0x80, 0x80, 0, 0xff);
2264 }
2265 
2266 /*
2267  * Exhaustive tests of JMP operations for all combinations of power-of-two
2268  * magnitudes of the operands, both for positive and negative values. The
2269  * test is designed to verify e.g. the JMP and JMP32 operations for JITs that
2270  * emit different code depending on the magnitude of the immediate value.
2271  */
2272 
__bpf_match_jmp_cond(s64 v1,s64 v2,u8 op)2273 static bool __bpf_match_jmp_cond(s64 v1, s64 v2, u8 op)
2274 {
2275 	switch (op) {
2276 	case BPF_JSET:
2277 		return !!(v1 & v2);
2278 	case BPF_JEQ:
2279 		return v1 == v2;
2280 	case BPF_JNE:
2281 		return v1 != v2;
2282 	case BPF_JGT:
2283 		return (u64)v1 > (u64)v2;
2284 	case BPF_JGE:
2285 		return (u64)v1 >= (u64)v2;
2286 	case BPF_JLT:
2287 		return (u64)v1 < (u64)v2;
2288 	case BPF_JLE:
2289 		return (u64)v1 <= (u64)v2;
2290 	case BPF_JSGT:
2291 		return v1 > v2;
2292 	case BPF_JSGE:
2293 		return v1 >= v2;
2294 	case BPF_JSLT:
2295 		return v1 < v2;
2296 	case BPF_JSLE:
2297 		return v1 <= v2;
2298 	}
2299 	return false;
2300 }
2301 
__bpf_emit_jmp_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)2302 static int __bpf_emit_jmp_imm(struct bpf_test *self, void *arg,
2303 			      struct bpf_insn *insns, s64 dst, s64 imm)
2304 {
2305 	int op = *(int *)arg;
2306 
2307 	if (insns) {
2308 		bool match = __bpf_match_jmp_cond(dst, (s32)imm, op);
2309 		int i = 0;
2310 
2311 		insns[i++] = BPF_ALU32_IMM(BPF_MOV, R0, match);
2312 
2313 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2314 		insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
2315 		if (!match)
2316 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2317 		insns[i++] = BPF_EXIT_INSN();
2318 
2319 		return i;
2320 	}
2321 
2322 	return 5 + 1;
2323 }
2324 
__bpf_emit_jmp32_imm(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 imm)2325 static int __bpf_emit_jmp32_imm(struct bpf_test *self, void *arg,
2326 				struct bpf_insn *insns, s64 dst, s64 imm)
2327 {
2328 	int op = *(int *)arg;
2329 
2330 	if (insns) {
2331 		bool match = __bpf_match_jmp_cond((s32)dst, (s32)imm, op);
2332 		int i = 0;
2333 
2334 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2335 		insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
2336 		if (!match)
2337 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2338 		insns[i++] = BPF_EXIT_INSN();
2339 
2340 		return i;
2341 	}
2342 
2343 	return 5;
2344 }
2345 
__bpf_emit_jmp_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)2346 static int __bpf_emit_jmp_reg(struct bpf_test *self, void *arg,
2347 			      struct bpf_insn *insns, s64 dst, s64 src)
2348 {
2349 	int op = *(int *)arg;
2350 
2351 	if (insns) {
2352 		bool match = __bpf_match_jmp_cond(dst, src, op);
2353 		int i = 0;
2354 
2355 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2356 		i += __bpf_ld_imm64(&insns[i], R2, src);
2357 		insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
2358 		if (!match)
2359 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2360 		insns[i++] = BPF_EXIT_INSN();
2361 
2362 		return i;
2363 	}
2364 
2365 	return 7;
2366 }
2367 
__bpf_emit_jmp32_reg(struct bpf_test * self,void * arg,struct bpf_insn * insns,s64 dst,s64 src)2368 static int __bpf_emit_jmp32_reg(struct bpf_test *self, void *arg,
2369 				struct bpf_insn *insns, s64 dst, s64 src)
2370 {
2371 	int op = *(int *)arg;
2372 
2373 	if (insns) {
2374 		bool match = __bpf_match_jmp_cond((s32)dst, (s32)src, op);
2375 		int i = 0;
2376 
2377 		i += __bpf_ld_imm64(&insns[i], R1, dst);
2378 		i += __bpf_ld_imm64(&insns[i], R2, src);
2379 		insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
2380 		if (!match)
2381 			insns[i++] = BPF_JMP_IMM(BPF_JA, 0, 0, 1);
2382 		insns[i++] = BPF_EXIT_INSN();
2383 
2384 		return i;
2385 	}
2386 
2387 	return 7;
2388 }
2389 
__bpf_fill_jmp_imm(struct bpf_test * self,int op)2390 static int __bpf_fill_jmp_imm(struct bpf_test *self, int op)
2391 {
2392 	return __bpf_fill_pattern(self, &op, 64, 32,
2393 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2394 				  &__bpf_emit_jmp_imm);
2395 }
2396 
__bpf_fill_jmp32_imm(struct bpf_test * self,int op)2397 static int __bpf_fill_jmp32_imm(struct bpf_test *self, int op)
2398 {
2399 	return __bpf_fill_pattern(self, &op, 64, 32,
2400 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2401 				  &__bpf_emit_jmp32_imm);
2402 }
2403 
__bpf_fill_jmp_reg(struct bpf_test * self,int op)2404 static int __bpf_fill_jmp_reg(struct bpf_test *self, int op)
2405 {
2406 	return __bpf_fill_pattern(self, &op, 64, 64,
2407 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2408 				  &__bpf_emit_jmp_reg);
2409 }
2410 
__bpf_fill_jmp32_reg(struct bpf_test * self,int op)2411 static int __bpf_fill_jmp32_reg(struct bpf_test *self, int op)
2412 {
2413 	return __bpf_fill_pattern(self, &op, 64, 64,
2414 				  PATTERN_BLOCK1, PATTERN_BLOCK2,
2415 				  &__bpf_emit_jmp32_reg);
2416 }
2417 
2418 /* JMP immediate tests */
bpf_fill_jmp_jset_imm(struct bpf_test * self)2419 static int bpf_fill_jmp_jset_imm(struct bpf_test *self)
2420 {
2421 	return __bpf_fill_jmp_imm(self, BPF_JSET);
2422 }
2423 
bpf_fill_jmp_jeq_imm(struct bpf_test * self)2424 static int bpf_fill_jmp_jeq_imm(struct bpf_test *self)
2425 {
2426 	return __bpf_fill_jmp_imm(self, BPF_JEQ);
2427 }
2428 
bpf_fill_jmp_jne_imm(struct bpf_test * self)2429 static int bpf_fill_jmp_jne_imm(struct bpf_test *self)
2430 {
2431 	return __bpf_fill_jmp_imm(self, BPF_JNE);
2432 }
2433 
bpf_fill_jmp_jgt_imm(struct bpf_test * self)2434 static int bpf_fill_jmp_jgt_imm(struct bpf_test *self)
2435 {
2436 	return __bpf_fill_jmp_imm(self, BPF_JGT);
2437 }
2438 
bpf_fill_jmp_jge_imm(struct bpf_test * self)2439 static int bpf_fill_jmp_jge_imm(struct bpf_test *self)
2440 {
2441 	return __bpf_fill_jmp_imm(self, BPF_JGE);
2442 }
2443 
bpf_fill_jmp_jlt_imm(struct bpf_test * self)2444 static int bpf_fill_jmp_jlt_imm(struct bpf_test *self)
2445 {
2446 	return __bpf_fill_jmp_imm(self, BPF_JLT);
2447 }
2448 
bpf_fill_jmp_jle_imm(struct bpf_test * self)2449 static int bpf_fill_jmp_jle_imm(struct bpf_test *self)
2450 {
2451 	return __bpf_fill_jmp_imm(self, BPF_JLE);
2452 }
2453 
bpf_fill_jmp_jsgt_imm(struct bpf_test * self)2454 static int bpf_fill_jmp_jsgt_imm(struct bpf_test *self)
2455 {
2456 	return __bpf_fill_jmp_imm(self, BPF_JSGT);
2457 }
2458 
bpf_fill_jmp_jsge_imm(struct bpf_test * self)2459 static int bpf_fill_jmp_jsge_imm(struct bpf_test *self)
2460 {
2461 	return __bpf_fill_jmp_imm(self, BPF_JSGE);
2462 }
2463 
bpf_fill_jmp_jslt_imm(struct bpf_test * self)2464 static int bpf_fill_jmp_jslt_imm(struct bpf_test *self)
2465 {
2466 	return __bpf_fill_jmp_imm(self, BPF_JSLT);
2467 }
2468 
bpf_fill_jmp_jsle_imm(struct bpf_test * self)2469 static int bpf_fill_jmp_jsle_imm(struct bpf_test *self)
2470 {
2471 	return __bpf_fill_jmp_imm(self, BPF_JSLE);
2472 }
2473 
2474 /* JMP32 immediate tests */
bpf_fill_jmp32_jset_imm(struct bpf_test * self)2475 static int bpf_fill_jmp32_jset_imm(struct bpf_test *self)
2476 {
2477 	return __bpf_fill_jmp32_imm(self, BPF_JSET);
2478 }
2479 
bpf_fill_jmp32_jeq_imm(struct bpf_test * self)2480 static int bpf_fill_jmp32_jeq_imm(struct bpf_test *self)
2481 {
2482 	return __bpf_fill_jmp32_imm(self, BPF_JEQ);
2483 }
2484 
bpf_fill_jmp32_jne_imm(struct bpf_test * self)2485 static int bpf_fill_jmp32_jne_imm(struct bpf_test *self)
2486 {
2487 	return __bpf_fill_jmp32_imm(self, BPF_JNE);
2488 }
2489 
bpf_fill_jmp32_jgt_imm(struct bpf_test * self)2490 static int bpf_fill_jmp32_jgt_imm(struct bpf_test *self)
2491 {
2492 	return __bpf_fill_jmp32_imm(self, BPF_JGT);
2493 }
2494 
bpf_fill_jmp32_jge_imm(struct bpf_test * self)2495 static int bpf_fill_jmp32_jge_imm(struct bpf_test *self)
2496 {
2497 	return __bpf_fill_jmp32_imm(self, BPF_JGE);
2498 }
2499 
bpf_fill_jmp32_jlt_imm(struct bpf_test * self)2500 static int bpf_fill_jmp32_jlt_imm(struct bpf_test *self)
2501 {
2502 	return __bpf_fill_jmp32_imm(self, BPF_JLT);
2503 }
2504 
bpf_fill_jmp32_jle_imm(struct bpf_test * self)2505 static int bpf_fill_jmp32_jle_imm(struct bpf_test *self)
2506 {
2507 	return __bpf_fill_jmp32_imm(self, BPF_JLE);
2508 }
2509 
bpf_fill_jmp32_jsgt_imm(struct bpf_test * self)2510 static int bpf_fill_jmp32_jsgt_imm(struct bpf_test *self)
2511 {
2512 	return __bpf_fill_jmp32_imm(self, BPF_JSGT);
2513 }
2514 
bpf_fill_jmp32_jsge_imm(struct bpf_test * self)2515 static int bpf_fill_jmp32_jsge_imm(struct bpf_test *self)
2516 {
2517 	return __bpf_fill_jmp32_imm(self, BPF_JSGE);
2518 }
2519 
bpf_fill_jmp32_jslt_imm(struct bpf_test * self)2520 static int bpf_fill_jmp32_jslt_imm(struct bpf_test *self)
2521 {
2522 	return __bpf_fill_jmp32_imm(self, BPF_JSLT);
2523 }
2524 
bpf_fill_jmp32_jsle_imm(struct bpf_test * self)2525 static int bpf_fill_jmp32_jsle_imm(struct bpf_test *self)
2526 {
2527 	return __bpf_fill_jmp32_imm(self, BPF_JSLE);
2528 }
2529 
2530 /* JMP register tests */
bpf_fill_jmp_jset_reg(struct bpf_test * self)2531 static int bpf_fill_jmp_jset_reg(struct bpf_test *self)
2532 {
2533 	return __bpf_fill_jmp_reg(self, BPF_JSET);
2534 }
2535 
bpf_fill_jmp_jeq_reg(struct bpf_test * self)2536 static int bpf_fill_jmp_jeq_reg(struct bpf_test *self)
2537 {
2538 	return __bpf_fill_jmp_reg(self, BPF_JEQ);
2539 }
2540 
bpf_fill_jmp_jne_reg(struct bpf_test * self)2541 static int bpf_fill_jmp_jne_reg(struct bpf_test *self)
2542 {
2543 	return __bpf_fill_jmp_reg(self, BPF_JNE);
2544 }
2545 
bpf_fill_jmp_jgt_reg(struct bpf_test * self)2546 static int bpf_fill_jmp_jgt_reg(struct bpf_test *self)
2547 {
2548 	return __bpf_fill_jmp_reg(self, BPF_JGT);
2549 }
2550 
bpf_fill_jmp_jge_reg(struct bpf_test * self)2551 static int bpf_fill_jmp_jge_reg(struct bpf_test *self)
2552 {
2553 	return __bpf_fill_jmp_reg(self, BPF_JGE);
2554 }
2555 
bpf_fill_jmp_jlt_reg(struct bpf_test * self)2556 static int bpf_fill_jmp_jlt_reg(struct bpf_test *self)
2557 {
2558 	return __bpf_fill_jmp_reg(self, BPF_JLT);
2559 }
2560 
bpf_fill_jmp_jle_reg(struct bpf_test * self)2561 static int bpf_fill_jmp_jle_reg(struct bpf_test *self)
2562 {
2563 	return __bpf_fill_jmp_reg(self, BPF_JLE);
2564 }
2565 
bpf_fill_jmp_jsgt_reg(struct bpf_test * self)2566 static int bpf_fill_jmp_jsgt_reg(struct bpf_test *self)
2567 {
2568 	return __bpf_fill_jmp_reg(self, BPF_JSGT);
2569 }
2570 
bpf_fill_jmp_jsge_reg(struct bpf_test * self)2571 static int bpf_fill_jmp_jsge_reg(struct bpf_test *self)
2572 {
2573 	return __bpf_fill_jmp_reg(self, BPF_JSGE);
2574 }
2575 
bpf_fill_jmp_jslt_reg(struct bpf_test * self)2576 static int bpf_fill_jmp_jslt_reg(struct bpf_test *self)
2577 {
2578 	return __bpf_fill_jmp_reg(self, BPF_JSLT);
2579 }
2580 
bpf_fill_jmp_jsle_reg(struct bpf_test * self)2581 static int bpf_fill_jmp_jsle_reg(struct bpf_test *self)
2582 {
2583 	return __bpf_fill_jmp_reg(self, BPF_JSLE);
2584 }
2585 
2586 /* JMP32 register tests */
bpf_fill_jmp32_jset_reg(struct bpf_test * self)2587 static int bpf_fill_jmp32_jset_reg(struct bpf_test *self)
2588 {
2589 	return __bpf_fill_jmp32_reg(self, BPF_JSET);
2590 }
2591 
bpf_fill_jmp32_jeq_reg(struct bpf_test * self)2592 static int bpf_fill_jmp32_jeq_reg(struct bpf_test *self)
2593 {
2594 	return __bpf_fill_jmp32_reg(self, BPF_JEQ);
2595 }
2596 
bpf_fill_jmp32_jne_reg(struct bpf_test * self)2597 static int bpf_fill_jmp32_jne_reg(struct bpf_test *self)
2598 {
2599 	return __bpf_fill_jmp32_reg(self, BPF_JNE);
2600 }
2601 
bpf_fill_jmp32_jgt_reg(struct bpf_test * self)2602 static int bpf_fill_jmp32_jgt_reg(struct bpf_test *self)
2603 {
2604 	return __bpf_fill_jmp32_reg(self, BPF_JGT);
2605 }
2606 
bpf_fill_jmp32_jge_reg(struct bpf_test * self)2607 static int bpf_fill_jmp32_jge_reg(struct bpf_test *self)
2608 {
2609 	return __bpf_fill_jmp32_reg(self, BPF_JGE);
2610 }
2611 
bpf_fill_jmp32_jlt_reg(struct bpf_test * self)2612 static int bpf_fill_jmp32_jlt_reg(struct bpf_test *self)
2613 {
2614 	return __bpf_fill_jmp32_reg(self, BPF_JLT);
2615 }
2616 
bpf_fill_jmp32_jle_reg(struct bpf_test * self)2617 static int bpf_fill_jmp32_jle_reg(struct bpf_test *self)
2618 {
2619 	return __bpf_fill_jmp32_reg(self, BPF_JLE);
2620 }
2621 
bpf_fill_jmp32_jsgt_reg(struct bpf_test * self)2622 static int bpf_fill_jmp32_jsgt_reg(struct bpf_test *self)
2623 {
2624 	return __bpf_fill_jmp32_reg(self, BPF_JSGT);
2625 }
2626 
bpf_fill_jmp32_jsge_reg(struct bpf_test * self)2627 static int bpf_fill_jmp32_jsge_reg(struct bpf_test *self)
2628 {
2629 	return __bpf_fill_jmp32_reg(self, BPF_JSGE);
2630 }
2631 
bpf_fill_jmp32_jslt_reg(struct bpf_test * self)2632 static int bpf_fill_jmp32_jslt_reg(struct bpf_test *self)
2633 {
2634 	return __bpf_fill_jmp32_reg(self, BPF_JSLT);
2635 }
2636 
bpf_fill_jmp32_jsle_reg(struct bpf_test * self)2637 static int bpf_fill_jmp32_jsle_reg(struct bpf_test *self)
2638 {
2639 	return __bpf_fill_jmp32_reg(self, BPF_JSLE);
2640 }
2641 
2642 /*
2643  * Set up a sequence of staggered jumps, forwards and backwards with
2644  * increasing offset. This tests the conversion of relative jumps to
2645  * JITed native jumps. On some architectures, for example MIPS, a large
2646  * PC-relative jump offset may overflow the immediate field of the native
2647  * conditional branch instruction, triggering a conversion to use an
2648  * absolute jump instead. Since this changes the jump offsets, another
2649  * offset computation pass is necessary, and that may in turn trigger
2650  * another branch conversion. This jump sequence is particularly nasty
2651  * in that regard.
2652  *
2653  * The sequence generation is parameterized by size and jump type.
2654  * The size must be even, and the expected result is always size + 1.
2655  * Below is an example with size=8 and result=9.
2656  *
2657  *                     ________________________Start
2658  *                     R0 = 0
2659  *                     R1 = r1
2660  *                     R2 = r2
2661  *            ,------- JMP +4 * 3______________Preamble: 4 insns
2662  * ,----------|-ind 0- if R0 != 7 JMP 8 * 3 + 1 <--------------------.
2663  * |          |        R0 = 8                                        |
2664  * |          |        JMP +7 * 3               ------------------------.
2665  * | ,--------|-----1- if R0 != 5 JMP 7 * 3 + 1 <--------------.     |  |
2666  * | |        |        R0 = 6                                  |     |  |
2667  * | |        |        JMP +5 * 3               ------------------.  |  |
2668  * | | ,------|-----2- if R0 != 3 JMP 6 * 3 + 1 <--------.     |  |  |  |
2669  * | | |      |        R0 = 4                            |     |  |  |  |
2670  * | | |      |        JMP +3 * 3               ------------.  |  |  |  |
2671  * | | | ,----|-----3- if R0 != 1 JMP 5 * 3 + 1 <--.     |  |  |  |  |  |
2672  * | | | |    |        R0 = 2                      |     |  |  |  |  |  |
2673  * | | | |    |        JMP +1 * 3               ------.  |  |  |  |  |  |
2674  * | | | | ,--t=====4> if R0 != 0 JMP 4 * 3 + 1    1  2  3  4  5  6  7  8 loc
2675  * | | | | |           R0 = 1                     -1 +2 -3 +4 -5 +6 -7 +8 off
2676  * | | | | |           JMP -2 * 3               ---'  |  |  |  |  |  |  |
2677  * | | | | | ,------5- if R0 != 2 JMP 3 * 3 + 1 <-----'  |  |  |  |  |  |
2678  * | | | | | |         R0 = 3                            |  |  |  |  |  |
2679  * | | | | | |         JMP -4 * 3               ---------'  |  |  |  |  |
2680  * | | | | | | ,----6- if R0 != 4 JMP 2 * 3 + 1 <-----------'  |  |  |  |
2681  * | | | | | | |       R0 = 5                                  |  |  |  |
2682  * | | | | | | |       JMP -6 * 3               ---------------'  |  |  |
2683  * | | | | | | | ,--7- if R0 != 6 JMP 1 * 3 + 1 <-----------------'  |  |
2684  * | | | | | | | |     R0 = 7                                        |  |
2685  * | | Error | | |     JMP -8 * 3               ---------------------'  |
2686  * | | paths | | | ,8- if R0 != 8 JMP 0 * 3 + 1 <-----------------------'
2687  * | | | | | | | | |   R0 = 9__________________Sequence: 3 * size - 1 insns
2688  * `-+-+-+-+-+-+-+-+-> EXIT____________________Return: 1 insn
2689  *
2690  */
2691 
2692 /* The maximum size parameter */
2693 #define MAX_STAGGERED_JMP_SIZE ((0x7fff / 3) & ~1)
2694 
2695 /* We use a reduced number of iterations to get a reasonable execution time */
2696 #define NR_STAGGERED_JMP_RUNS 10
2697 
__bpf_fill_staggered_jumps(struct bpf_test * self,const struct bpf_insn * jmp,u64 r1,u64 r2)2698 static int __bpf_fill_staggered_jumps(struct bpf_test *self,
2699 				      const struct bpf_insn *jmp,
2700 				      u64 r1, u64 r2)
2701 {
2702 	int size = self->test[0].result - 1;
2703 	int len = 4 + 3 * (size + 1);
2704 	struct bpf_insn *insns;
2705 	int off, ind;
2706 
2707 	insns = kmalloc_array(len, sizeof(*insns), GFP_KERNEL);
2708 	if (!insns)
2709 		return -ENOMEM;
2710 
2711 	/* Preamble */
2712 	insns[0] = BPF_ALU64_IMM(BPF_MOV, R0, 0);
2713 	insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
2714 	insns[2] = BPF_ALU64_IMM(BPF_MOV, R2, r2);
2715 	insns[3] = BPF_JMP_IMM(BPF_JA, 0, 0, 3 * size / 2);
2716 
2717 	/* Sequence */
2718 	for (ind = 0, off = size; ind <= size; ind++, off -= 2) {
2719 		struct bpf_insn *ins = &insns[4 + 3 * ind];
2720 		int loc;
2721 
2722 		if (off == 0)
2723 			off--;
2724 
2725 		loc = abs(off);
2726 		ins[0] = BPF_JMP_IMM(BPF_JNE, R0, loc - 1,
2727 				     3 * (size - ind) + 1);
2728 		ins[1] = BPF_ALU64_IMM(BPF_MOV, R0, loc);
2729 		ins[2] = *jmp;
2730 		ins[2].off = 3 * (off - 1);
2731 	}
2732 
2733 	/* Return */
2734 	insns[len - 1] = BPF_EXIT_INSN();
2735 
2736 	self->u.ptr.insns = insns;
2737 	self->u.ptr.len = len;
2738 
2739 	return 0;
2740 }
2741 
2742 /* 64-bit unconditional jump */
bpf_fill_staggered_ja(struct bpf_test * self)2743 static int bpf_fill_staggered_ja(struct bpf_test *self)
2744 {
2745 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JA, 0, 0, 0);
2746 
2747 	return __bpf_fill_staggered_jumps(self, &jmp, 0, 0);
2748 }
2749 
2750 /* 64-bit immediate jumps */
bpf_fill_staggered_jeq_imm(struct bpf_test * self)2751 static int bpf_fill_staggered_jeq_imm(struct bpf_test *self)
2752 {
2753 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
2754 
2755 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2756 }
2757 
bpf_fill_staggered_jne_imm(struct bpf_test * self)2758 static int bpf_fill_staggered_jne_imm(struct bpf_test *self)
2759 {
2760 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
2761 
2762 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2763 }
2764 
bpf_fill_staggered_jset_imm(struct bpf_test * self)2765 static int bpf_fill_staggered_jset_imm(struct bpf_test *self)
2766 {
2767 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
2768 
2769 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2770 }
2771 
bpf_fill_staggered_jgt_imm(struct bpf_test * self)2772 static int bpf_fill_staggered_jgt_imm(struct bpf_test *self)
2773 {
2774 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
2775 
2776 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2777 }
2778 
bpf_fill_staggered_jge_imm(struct bpf_test * self)2779 static int bpf_fill_staggered_jge_imm(struct bpf_test *self)
2780 {
2781 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
2782 
2783 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2784 }
2785 
bpf_fill_staggered_jlt_imm(struct bpf_test * self)2786 static int bpf_fill_staggered_jlt_imm(struct bpf_test *self)
2787 {
2788 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
2789 
2790 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2791 }
2792 
bpf_fill_staggered_jle_imm(struct bpf_test * self)2793 static int bpf_fill_staggered_jle_imm(struct bpf_test *self)
2794 {
2795 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
2796 
2797 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2798 }
2799 
bpf_fill_staggered_jsgt_imm(struct bpf_test * self)2800 static int bpf_fill_staggered_jsgt_imm(struct bpf_test *self)
2801 {
2802 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
2803 
2804 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2805 }
2806 
bpf_fill_staggered_jsge_imm(struct bpf_test * self)2807 static int bpf_fill_staggered_jsge_imm(struct bpf_test *self)
2808 {
2809 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
2810 
2811 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2812 }
2813 
bpf_fill_staggered_jslt_imm(struct bpf_test * self)2814 static int bpf_fill_staggered_jslt_imm(struct bpf_test *self)
2815 {
2816 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
2817 
2818 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2819 }
2820 
bpf_fill_staggered_jsle_imm(struct bpf_test * self)2821 static int bpf_fill_staggered_jsle_imm(struct bpf_test *self)
2822 {
2823 	struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
2824 
2825 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2826 }
2827 
2828 /* 64-bit register jumps */
bpf_fill_staggered_jeq_reg(struct bpf_test * self)2829 static int bpf_fill_staggered_jeq_reg(struct bpf_test *self)
2830 {
2831 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
2832 
2833 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2834 }
2835 
bpf_fill_staggered_jne_reg(struct bpf_test * self)2836 static int bpf_fill_staggered_jne_reg(struct bpf_test *self)
2837 {
2838 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
2839 
2840 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2841 }
2842 
bpf_fill_staggered_jset_reg(struct bpf_test * self)2843 static int bpf_fill_staggered_jset_reg(struct bpf_test *self)
2844 {
2845 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
2846 
2847 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
2848 }
2849 
bpf_fill_staggered_jgt_reg(struct bpf_test * self)2850 static int bpf_fill_staggered_jgt_reg(struct bpf_test *self)
2851 {
2852 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
2853 
2854 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
2855 }
2856 
bpf_fill_staggered_jge_reg(struct bpf_test * self)2857 static int bpf_fill_staggered_jge_reg(struct bpf_test *self)
2858 {
2859 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
2860 
2861 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2862 }
2863 
bpf_fill_staggered_jlt_reg(struct bpf_test * self)2864 static int bpf_fill_staggered_jlt_reg(struct bpf_test *self)
2865 {
2866 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
2867 
2868 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
2869 }
2870 
bpf_fill_staggered_jle_reg(struct bpf_test * self)2871 static int bpf_fill_staggered_jle_reg(struct bpf_test *self)
2872 {
2873 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
2874 
2875 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2876 }
2877 
bpf_fill_staggered_jsgt_reg(struct bpf_test * self)2878 static int bpf_fill_staggered_jsgt_reg(struct bpf_test *self)
2879 {
2880 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
2881 
2882 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
2883 }
2884 
bpf_fill_staggered_jsge_reg(struct bpf_test * self)2885 static int bpf_fill_staggered_jsge_reg(struct bpf_test *self)
2886 {
2887 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
2888 
2889 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
2890 }
2891 
bpf_fill_staggered_jslt_reg(struct bpf_test * self)2892 static int bpf_fill_staggered_jslt_reg(struct bpf_test *self)
2893 {
2894 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
2895 
2896 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
2897 }
2898 
bpf_fill_staggered_jsle_reg(struct bpf_test * self)2899 static int bpf_fill_staggered_jsle_reg(struct bpf_test *self)
2900 {
2901 	struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
2902 
2903 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
2904 }
2905 
2906 /* 32-bit immediate jumps */
bpf_fill_staggered_jeq32_imm(struct bpf_test * self)2907 static int bpf_fill_staggered_jeq32_imm(struct bpf_test *self)
2908 {
2909 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
2910 
2911 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2912 }
2913 
bpf_fill_staggered_jne32_imm(struct bpf_test * self)2914 static int bpf_fill_staggered_jne32_imm(struct bpf_test *self)
2915 {
2916 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
2917 
2918 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 0);
2919 }
2920 
bpf_fill_staggered_jset32_imm(struct bpf_test * self)2921 static int bpf_fill_staggered_jset32_imm(struct bpf_test *self)
2922 {
2923 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
2924 
2925 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0);
2926 }
2927 
bpf_fill_staggered_jgt32_imm(struct bpf_test * self)2928 static int bpf_fill_staggered_jgt32_imm(struct bpf_test *self)
2929 {
2930 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
2931 
2932 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 0);
2933 }
2934 
bpf_fill_staggered_jge32_imm(struct bpf_test * self)2935 static int bpf_fill_staggered_jge32_imm(struct bpf_test *self)
2936 {
2937 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
2938 
2939 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2940 }
2941 
bpf_fill_staggered_jlt32_imm(struct bpf_test * self)2942 static int bpf_fill_staggered_jlt32_imm(struct bpf_test *self)
2943 {
2944 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
2945 
2946 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2947 }
2948 
bpf_fill_staggered_jle32_imm(struct bpf_test * self)2949 static int bpf_fill_staggered_jle32_imm(struct bpf_test *self)
2950 {
2951 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
2952 
2953 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0);
2954 }
2955 
bpf_fill_staggered_jsgt32_imm(struct bpf_test * self)2956 static int bpf_fill_staggered_jsgt32_imm(struct bpf_test *self)
2957 {
2958 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
2959 
2960 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2961 }
2962 
bpf_fill_staggered_jsge32_imm(struct bpf_test * self)2963 static int bpf_fill_staggered_jsge32_imm(struct bpf_test *self)
2964 {
2965 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
2966 
2967 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2968 }
2969 
bpf_fill_staggered_jslt32_imm(struct bpf_test * self)2970 static int bpf_fill_staggered_jslt32_imm(struct bpf_test *self)
2971 {
2972 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
2973 
2974 	return __bpf_fill_staggered_jumps(self, &jmp, -2, 0);
2975 }
2976 
bpf_fill_staggered_jsle32_imm(struct bpf_test * self)2977 static int bpf_fill_staggered_jsle32_imm(struct bpf_test *self)
2978 {
2979 	struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
2980 
2981 	return __bpf_fill_staggered_jumps(self, &jmp, -1, 0);
2982 }
2983 
2984 /* 32-bit register jumps */
bpf_fill_staggered_jeq32_reg(struct bpf_test * self)2985 static int bpf_fill_staggered_jeq32_reg(struct bpf_test *self)
2986 {
2987 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
2988 
2989 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
2990 }
2991 
bpf_fill_staggered_jne32_reg(struct bpf_test * self)2992 static int bpf_fill_staggered_jne32_reg(struct bpf_test *self)
2993 {
2994 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
2995 
2996 	return __bpf_fill_staggered_jumps(self, &jmp, 4321, 1234);
2997 }
2998 
bpf_fill_staggered_jset32_reg(struct bpf_test * self)2999 static int bpf_fill_staggered_jset32_reg(struct bpf_test *self)
3000 {
3001 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
3002 
3003 	return __bpf_fill_staggered_jumps(self, &jmp, 0x86, 0x82);
3004 }
3005 
bpf_fill_staggered_jgt32_reg(struct bpf_test * self)3006 static int bpf_fill_staggered_jgt32_reg(struct bpf_test *self)
3007 {
3008 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
3009 
3010 	return __bpf_fill_staggered_jumps(self, &jmp, 0x80000000, 1234);
3011 }
3012 
bpf_fill_staggered_jge32_reg(struct bpf_test * self)3013 static int bpf_fill_staggered_jge32_reg(struct bpf_test *self)
3014 {
3015 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
3016 
3017 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3018 }
3019 
bpf_fill_staggered_jlt32_reg(struct bpf_test * self)3020 static int bpf_fill_staggered_jlt32_reg(struct bpf_test *self)
3021 {
3022 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
3023 
3024 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 0x80000000);
3025 }
3026 
bpf_fill_staggered_jle32_reg(struct bpf_test * self)3027 static int bpf_fill_staggered_jle32_reg(struct bpf_test *self)
3028 {
3029 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
3030 
3031 	return __bpf_fill_staggered_jumps(self, &jmp, 1234, 1234);
3032 }
3033 
bpf_fill_staggered_jsgt32_reg(struct bpf_test * self)3034 static int bpf_fill_staggered_jsgt32_reg(struct bpf_test *self)
3035 {
3036 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
3037 
3038 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -2);
3039 }
3040 
bpf_fill_staggered_jsge32_reg(struct bpf_test * self)3041 static int bpf_fill_staggered_jsge32_reg(struct bpf_test *self)
3042 {
3043 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
3044 
3045 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -2);
3046 }
3047 
bpf_fill_staggered_jslt32_reg(struct bpf_test * self)3048 static int bpf_fill_staggered_jslt32_reg(struct bpf_test *self)
3049 {
3050 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
3051 
3052 	return __bpf_fill_staggered_jumps(self, &jmp, -2, -1);
3053 }
3054 
bpf_fill_staggered_jsle32_reg(struct bpf_test * self)3055 static int bpf_fill_staggered_jsle32_reg(struct bpf_test *self)
3056 {
3057 	struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
3058 
3059 	return __bpf_fill_staggered_jumps(self, &jmp, -1, -1);
3060 }
3061 
3062 
3063 static struct bpf_test tests[] = {
3064 	{
3065 		"TAX",
3066 		.u.insns = {
3067 			BPF_STMT(BPF_LD | BPF_IMM, 1),
3068 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3069 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3070 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3071 			BPF_STMT(BPF_ALU | BPF_NEG, 0), /* A == -3 */
3072 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3073 			BPF_STMT(BPF_LD | BPF_LEN, 0),
3074 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3075 			BPF_STMT(BPF_MISC | BPF_TAX, 0), /* X == len - 3 */
3076 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 1),
3077 			BPF_STMT(BPF_RET | BPF_A, 0)
3078 		},
3079 		CLASSIC,
3080 		{ 10, 20, 30, 40, 50 },
3081 		{ { 2, 10 }, { 3, 20 }, { 4, 30 } },
3082 	},
3083 	{
3084 		"TXA",
3085 		.u.insns = {
3086 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3087 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3088 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3089 			BPF_STMT(BPF_RET | BPF_A, 0) /* A == len * 2 */
3090 		},
3091 		CLASSIC,
3092 		{ 10, 20, 30, 40, 50 },
3093 		{ { 1, 2 }, { 3, 6 }, { 4, 8 } },
3094 	},
3095 	{
3096 		"ADD_SUB_MUL_K",
3097 		.u.insns = {
3098 			BPF_STMT(BPF_LD | BPF_IMM, 1),
3099 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 2),
3100 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3101 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3102 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0xffffffff),
3103 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 3),
3104 			BPF_STMT(BPF_RET | BPF_A, 0)
3105 		},
3106 		CLASSIC | FLAG_NO_DATA,
3107 		{ },
3108 		{ { 0, 0xfffffffd } }
3109 	},
3110 	{
3111 		"DIV_MOD_KX",
3112 		.u.insns = {
3113 			BPF_STMT(BPF_LD | BPF_IMM, 8),
3114 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 2),
3115 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3116 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3117 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
3118 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3119 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3120 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x70000000),
3121 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3122 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3123 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
3124 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3125 			BPF_STMT(BPF_LD | BPF_IMM, 0xffffffff),
3126 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x70000000),
3127 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3128 			BPF_STMT(BPF_RET | BPF_A, 0)
3129 		},
3130 		CLASSIC | FLAG_NO_DATA,
3131 		{ },
3132 		{ { 0, 0x20000000 } }
3133 	},
3134 	{
3135 		"AND_OR_LSH_K",
3136 		.u.insns = {
3137 			BPF_STMT(BPF_LD | BPF_IMM, 0xff),
3138 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3139 			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 27),
3140 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3141 			BPF_STMT(BPF_LD | BPF_IMM, 0xf),
3142 			BPF_STMT(BPF_ALU | BPF_OR | BPF_K, 0xf0),
3143 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3144 			BPF_STMT(BPF_RET | BPF_A, 0)
3145 		},
3146 		CLASSIC | FLAG_NO_DATA,
3147 		{ },
3148 		{ { 0, 0x800000ff }, { 1, 0x800000ff } },
3149 	},
3150 	{
3151 		"LD_IMM_0",
3152 		.u.insns = {
3153 			BPF_STMT(BPF_LD | BPF_IMM, 0), /* ld #0 */
3154 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0, 1, 0),
3155 			BPF_STMT(BPF_RET | BPF_K, 0),
3156 			BPF_STMT(BPF_RET | BPF_K, 1),
3157 		},
3158 		CLASSIC,
3159 		{ },
3160 		{ { 1, 1 } },
3161 	},
3162 	{
3163 		"LD_IND",
3164 		.u.insns = {
3165 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3166 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, MAX_K),
3167 			BPF_STMT(BPF_RET | BPF_K, 1)
3168 		},
3169 		CLASSIC,
3170 		{ },
3171 		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3172 	},
3173 	{
3174 		"LD_ABS",
3175 		.u.insns = {
3176 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS, 1000),
3177 			BPF_STMT(BPF_RET | BPF_K, 1)
3178 		},
3179 		CLASSIC,
3180 		{ },
3181 		{ { 1, 0 }, { 10, 0 }, { 60, 0 } },
3182 	},
3183 	{
3184 		"LD_ABS_LL",
3185 		.u.insns = {
3186 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF),
3187 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3188 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_LL_OFF + 1),
3189 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3190 			BPF_STMT(BPF_RET | BPF_A, 0)
3191 		},
3192 		CLASSIC,
3193 		{ 1, 2, 3 },
3194 		{ { 1, 0 }, { 2, 3 } },
3195 	},
3196 	{
3197 		"LD_IND_LL",
3198 		.u.insns = {
3199 			BPF_STMT(BPF_LD | BPF_IMM, SKF_LL_OFF - 1),
3200 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3201 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3202 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3203 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3204 			BPF_STMT(BPF_RET | BPF_A, 0)
3205 		},
3206 		CLASSIC,
3207 		{ 1, 2, 3, 0xff },
3208 		{ { 1, 1 }, { 3, 3 }, { 4, 0xff } },
3209 	},
3210 	{
3211 		"LD_ABS_NET",
3212 		.u.insns = {
3213 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF),
3214 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3215 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, SKF_NET_OFF + 1),
3216 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3217 			BPF_STMT(BPF_RET | BPF_A, 0)
3218 		},
3219 		CLASSIC,
3220 		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3221 		{ { 15, 0 }, { 16, 3 } },
3222 	},
3223 	{
3224 		"LD_IND_NET",
3225 		.u.insns = {
3226 			BPF_STMT(BPF_LD | BPF_IMM, SKF_NET_OFF - 15),
3227 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3228 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
3229 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3230 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 0),
3231 			BPF_STMT(BPF_RET | BPF_A, 0)
3232 		},
3233 		CLASSIC,
3234 		{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3 },
3235 		{ { 14, 0 }, { 15, 1 }, { 17, 3 } },
3236 	},
3237 	{
3238 		"LD_PKTTYPE",
3239 		.u.insns = {
3240 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3241 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3242 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3243 			BPF_STMT(BPF_RET | BPF_K, 1),
3244 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3245 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3246 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3247 			BPF_STMT(BPF_RET | BPF_K, 1),
3248 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3249 				 SKF_AD_OFF + SKF_AD_PKTTYPE),
3250 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, SKB_TYPE, 1, 0),
3251 			BPF_STMT(BPF_RET | BPF_K, 1),
3252 			BPF_STMT(BPF_RET | BPF_A, 0)
3253 		},
3254 		CLASSIC,
3255 		{ },
3256 		{ { 1, 3 }, { 10, 3 } },
3257 	},
3258 	{
3259 		"LD_MARK",
3260 		.u.insns = {
3261 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3262 				 SKF_AD_OFF + SKF_AD_MARK),
3263 			BPF_STMT(BPF_RET | BPF_A, 0)
3264 		},
3265 		CLASSIC,
3266 		{ },
3267 		{ { 1, SKB_MARK}, { 10, SKB_MARK} },
3268 	},
3269 	{
3270 		"LD_RXHASH",
3271 		.u.insns = {
3272 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3273 				 SKF_AD_OFF + SKF_AD_RXHASH),
3274 			BPF_STMT(BPF_RET | BPF_A, 0)
3275 		},
3276 		CLASSIC,
3277 		{ },
3278 		{ { 1, SKB_HASH}, { 10, SKB_HASH} },
3279 	},
3280 	{
3281 		"LD_QUEUE",
3282 		.u.insns = {
3283 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3284 				 SKF_AD_OFF + SKF_AD_QUEUE),
3285 			BPF_STMT(BPF_RET | BPF_A, 0)
3286 		},
3287 		CLASSIC,
3288 		{ },
3289 		{ { 1, SKB_QUEUE_MAP }, { 10, SKB_QUEUE_MAP } },
3290 	},
3291 	{
3292 		"LD_PROTOCOL",
3293 		.u.insns = {
3294 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 1),
3295 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 20, 1, 0),
3296 			BPF_STMT(BPF_RET | BPF_K, 0),
3297 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3298 				 SKF_AD_OFF + SKF_AD_PROTOCOL),
3299 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3300 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3301 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 30, 1, 0),
3302 			BPF_STMT(BPF_RET | BPF_K, 0),
3303 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3304 			BPF_STMT(BPF_RET | BPF_A, 0)
3305 		},
3306 		CLASSIC,
3307 		{ 10, 20, 30 },
3308 		{ { 10, ETH_P_IP }, { 100, ETH_P_IP } },
3309 	},
3310 	{
3311 		"LD_VLAN_TAG",
3312 		.u.insns = {
3313 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3314 				 SKF_AD_OFF + SKF_AD_VLAN_TAG),
3315 			BPF_STMT(BPF_RET | BPF_A, 0)
3316 		},
3317 		CLASSIC,
3318 		{ },
3319 		{
3320 			{ 1, SKB_VLAN_TCI },
3321 			{ 10, SKB_VLAN_TCI }
3322 		},
3323 	},
3324 	{
3325 		"LD_VLAN_TAG_PRESENT",
3326 		.u.insns = {
3327 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3328 				 SKF_AD_OFF + SKF_AD_VLAN_TAG_PRESENT),
3329 			BPF_STMT(BPF_RET | BPF_A, 0)
3330 		},
3331 		CLASSIC,
3332 		{ },
3333 		{
3334 			{ 1, SKB_VLAN_PRESENT },
3335 			{ 10, SKB_VLAN_PRESENT }
3336 		},
3337 	},
3338 	{
3339 		"LD_IFINDEX",
3340 		.u.insns = {
3341 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3342 				 SKF_AD_OFF + SKF_AD_IFINDEX),
3343 			BPF_STMT(BPF_RET | BPF_A, 0)
3344 		},
3345 		CLASSIC,
3346 		{ },
3347 		{ { 1, SKB_DEV_IFINDEX }, { 10, SKB_DEV_IFINDEX } },
3348 	},
3349 	{
3350 		"LD_HATYPE",
3351 		.u.insns = {
3352 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3353 				 SKF_AD_OFF + SKF_AD_HATYPE),
3354 			BPF_STMT(BPF_RET | BPF_A, 0)
3355 		},
3356 		CLASSIC,
3357 		{ },
3358 		{ { 1, SKB_DEV_TYPE }, { 10, SKB_DEV_TYPE } },
3359 	},
3360 	{
3361 		"LD_CPU",
3362 		.u.insns = {
3363 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3364 				 SKF_AD_OFF + SKF_AD_CPU),
3365 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3366 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3367 				 SKF_AD_OFF + SKF_AD_CPU),
3368 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3369 			BPF_STMT(BPF_RET | BPF_A, 0)
3370 		},
3371 		CLASSIC,
3372 		{ },
3373 		{ { 1, 0 }, { 10, 0 } },
3374 	},
3375 	{
3376 		"LD_NLATTR",
3377 		.u.insns = {
3378 			BPF_STMT(BPF_LDX | BPF_IMM, 2),
3379 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3380 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3381 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3382 				 SKF_AD_OFF + SKF_AD_NLATTR),
3383 			BPF_STMT(BPF_RET | BPF_A, 0)
3384 		},
3385 		CLASSIC,
3386 #ifdef __BIG_ENDIAN
3387 		{ 0xff, 0xff, 0, 4, 0, 2, 0, 4, 0, 3 },
3388 #else
3389 		{ 0xff, 0xff, 4, 0, 2, 0, 4, 0, 3, 0 },
3390 #endif
3391 		{ { 4, 0 }, { 20, 6 } },
3392 	},
3393 	{
3394 		"LD_NLATTR_NEST",
3395 		.u.insns = {
3396 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3397 			BPF_STMT(BPF_LDX | BPF_IMM, 3),
3398 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3399 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3400 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3401 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3402 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3403 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3404 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3405 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3406 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3407 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3408 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3409 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3410 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3411 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3412 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3413 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3414 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3415 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3416 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3417 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3418 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3419 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3420 				 SKF_AD_OFF + SKF_AD_NLATTR_NEST),
3421 			BPF_STMT(BPF_RET | BPF_A, 0)
3422 		},
3423 		CLASSIC,
3424 #ifdef __BIG_ENDIAN
3425 		{ 0xff, 0xff, 0, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3 },
3426 #else
3427 		{ 0xff, 0xff, 12, 0, 1, 0, 4, 0, 2, 0, 4, 0, 3, 0 },
3428 #endif
3429 		{ { 4, 0 }, { 20, 10 } },
3430 	},
3431 	{
3432 		"LD_PAYLOAD_OFF",
3433 		.u.insns = {
3434 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3435 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3436 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3437 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3438 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3439 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3440 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3441 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3442 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3443 				 SKF_AD_OFF + SKF_AD_PAY_OFFSET),
3444 			BPF_STMT(BPF_RET | BPF_A, 0)
3445 		},
3446 		CLASSIC,
3447 		/* 00:00:00:00:00:00 > 00:00:00:00:00:00, ethtype IPv4 (0x0800),
3448 		 * length 98: 127.0.0.1 > 127.0.0.1: ICMP echo request,
3449 		 * id 9737, seq 1, length 64
3450 		 */
3451 		{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3452 		  0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
3453 		  0x08, 0x00,
3454 		  0x45, 0x00, 0x00, 0x54, 0xac, 0x8b, 0x40, 0x00, 0x40,
3455 		  0x01, 0x90, 0x1b, 0x7f, 0x00, 0x00, 0x01 },
3456 		{ { 30, 0 }, { 100, 42 } },
3457 	},
3458 	{
3459 		"LD_ANC_XOR",
3460 		.u.insns = {
3461 			BPF_STMT(BPF_LD | BPF_IMM, 10),
3462 			BPF_STMT(BPF_LDX | BPF_IMM, 300),
3463 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
3464 				 SKF_AD_OFF + SKF_AD_ALU_XOR_X),
3465 			BPF_STMT(BPF_RET | BPF_A, 0)
3466 		},
3467 		CLASSIC,
3468 		{ },
3469 		{ { 4, 0xA ^ 300 }, { 20, 0xA ^ 300 } },
3470 	},
3471 	{
3472 		"SPILL_FILL",
3473 		.u.insns = {
3474 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3475 			BPF_STMT(BPF_LD | BPF_IMM, 2),
3476 			BPF_STMT(BPF_ALU | BPF_RSH, 1),
3477 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3478 			BPF_STMT(BPF_ST, 1), /* M1 = 1 ^ len */
3479 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_K, 0x80000000),
3480 			BPF_STMT(BPF_ST, 2), /* M2 = 1 ^ len ^ 0x80000000 */
3481 			BPF_STMT(BPF_STX, 15), /* M3 = len */
3482 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
3483 			BPF_STMT(BPF_LD | BPF_MEM, 2),
3484 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3485 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
3486 			BPF_STMT(BPF_ALU | BPF_XOR | BPF_X, 0),
3487 			BPF_STMT(BPF_RET | BPF_A, 0)
3488 		},
3489 		CLASSIC,
3490 		{ },
3491 		{ { 1, 0x80000001 }, { 2, 0x80000002 }, { 60, 0x80000000 ^ 60 } }
3492 	},
3493 	{
3494 		"JEQ",
3495 		.u.insns = {
3496 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3497 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3498 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 0, 1),
3499 			BPF_STMT(BPF_RET | BPF_K, 1),
3500 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3501 		},
3502 		CLASSIC,
3503 		{ 3, 3, 3, 3, 3 },
3504 		{ { 1, 0 }, { 3, 1 }, { 4, MAX_K } },
3505 	},
3506 	{
3507 		"JGT",
3508 		.u.insns = {
3509 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3510 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3511 			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_X, 0, 0, 1),
3512 			BPF_STMT(BPF_RET | BPF_K, 1),
3513 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3514 		},
3515 		CLASSIC,
3516 		{ 4, 4, 4, 3, 3 },
3517 		{ { 2, 0 }, { 3, 1 }, { 4, MAX_K } },
3518 	},
3519 	{
3520 		"JGE (jt 0), test 1",
3521 		.u.insns = {
3522 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3523 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3524 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3525 			BPF_STMT(BPF_RET | BPF_K, 1),
3526 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3527 		},
3528 		CLASSIC,
3529 		{ 4, 4, 4, 3, 3 },
3530 		{ { 2, 0 }, { 3, 1 }, { 4, 1 } },
3531 	},
3532 	{
3533 		"JGE (jt 0), test 2",
3534 		.u.insns = {
3535 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3536 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 2),
3537 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_X, 0, 0, 1),
3538 			BPF_STMT(BPF_RET | BPF_K, 1),
3539 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3540 		},
3541 		CLASSIC,
3542 		{ 4, 4, 5, 3, 3 },
3543 		{ { 4, 1 }, { 5, 1 }, { 6, MAX_K } },
3544 	},
3545 	{
3546 		"JGE",
3547 		.u.insns = {
3548 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3549 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, MAX_K),
3550 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 1, 1, 0),
3551 			BPF_STMT(BPF_RET | BPF_K, 10),
3552 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 2, 1, 0),
3553 			BPF_STMT(BPF_RET | BPF_K, 20),
3554 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 3, 1, 0),
3555 			BPF_STMT(BPF_RET | BPF_K, 30),
3556 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 4, 1, 0),
3557 			BPF_STMT(BPF_RET | BPF_K, 40),
3558 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3559 		},
3560 		CLASSIC,
3561 		{ 1, 2, 3, 4, 5 },
3562 		{ { 1, 20 }, { 3, 40 }, { 5, MAX_K } },
3563 	},
3564 	{
3565 		"JSET",
3566 		.u.insns = {
3567 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3568 			BPF_JUMP(BPF_JMP | BPF_JA, 1, 1, 1),
3569 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3570 			BPF_JUMP(BPF_JMP | BPF_JA, 0, 0, 0),
3571 			BPF_STMT(BPF_LDX | BPF_LEN, 0),
3572 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3573 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, 4),
3574 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
3575 			BPF_STMT(BPF_LD | BPF_W | BPF_IND, 0),
3576 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 1, 0, 1),
3577 			BPF_STMT(BPF_RET | BPF_K, 10),
3578 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x80000000, 0, 1),
3579 			BPF_STMT(BPF_RET | BPF_K, 20),
3580 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3581 			BPF_STMT(BPF_RET | BPF_K, 30),
3582 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3583 			BPF_STMT(BPF_RET | BPF_K, 30),
3584 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3585 			BPF_STMT(BPF_RET | BPF_K, 30),
3586 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3587 			BPF_STMT(BPF_RET | BPF_K, 30),
3588 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0xffffff, 1, 0),
3589 			BPF_STMT(BPF_RET | BPF_K, 30),
3590 			BPF_STMT(BPF_RET | BPF_K, MAX_K)
3591 		},
3592 		CLASSIC,
3593 		{ 0, 0xAA, 0x55, 1 },
3594 		{ { 4, 10 }, { 5, 20 }, { 6, MAX_K } },
3595 	},
3596 	{
3597 		"tcpdump port 22",
3598 		.u.insns = {
3599 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3600 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 0, 8), /* IPv6 */
3601 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 20),
3602 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3603 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3604 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 17),
3605 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 54),
3606 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 14, 0),
3607 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 56),
3608 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 12, 13),
3609 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0800, 0, 12), /* IPv4 */
3610 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3611 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x84, 2, 0),
3612 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 1, 0),
3613 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x11, 0, 8),
3614 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3615 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 6, 0),
3616 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3617 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3618 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3619 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3620 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 1),
3621 			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3622 			BPF_STMT(BPF_RET | BPF_K, 0),
3623 		},
3624 		CLASSIC,
3625 		/* 3c:07:54:43:e5:76 > 10:bf:48:d6:43:d6, ethertype IPv4(0x0800)
3626 		 * length 114: 10.1.1.149.49700 > 10.1.2.10.22: Flags [P.],
3627 		 * seq 1305692979:1305693027, ack 3650467037, win 65535,
3628 		 * options [nop,nop,TS val 2502645400 ecr 3971138], length 48
3629 		 */
3630 		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3631 		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3632 		  0x08, 0x00,
3633 		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3634 		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3635 		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3636 		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3637 		  0xc2, 0x24,
3638 		  0x00, 0x16 /* dst port */ },
3639 		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3640 	},
3641 	{
3642 		"tcpdump complex",
3643 		.u.insns = {
3644 			/* tcpdump -nei eth0 'tcp port 22 and (((ip[2:2] -
3645 			 * ((ip[0]&0xf)<<2)) - ((tcp[12]&0xf0)>>2)) != 0) and
3646 			 * (len > 115 or len < 30000000000)' -d
3647 			 */
3648 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 12),
3649 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x86dd, 30, 0),
3650 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x800, 0, 29),
3651 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 23),
3652 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x6, 0, 27),
3653 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 20),
3654 			BPF_JUMP(BPF_JMP | BPF_JSET | BPF_K, 0x1fff, 25, 0),
3655 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3656 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 14),
3657 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 2, 0),
3658 			BPF_STMT(BPF_LD | BPF_H | BPF_IND, 16),
3659 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 22, 0, 20),
3660 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 16),
3661 			BPF_STMT(BPF_ST, 1),
3662 			BPF_STMT(BPF_LD | BPF_B | BPF_ABS, 14),
3663 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf),
3664 			BPF_STMT(BPF_ALU | BPF_LSH | BPF_K, 2),
3665 			BPF_STMT(BPF_MISC | BPF_TAX, 0x5), /* libpcap emits K on TAX */
3666 			BPF_STMT(BPF_LD | BPF_MEM, 1),
3667 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
3668 			BPF_STMT(BPF_ST, 5),
3669 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 14),
3670 			BPF_STMT(BPF_LD | BPF_B | BPF_IND, 26),
3671 			BPF_STMT(BPF_ALU | BPF_AND | BPF_K, 0xf0),
3672 			BPF_STMT(BPF_ALU | BPF_RSH | BPF_K, 2),
3673 			BPF_STMT(BPF_MISC | BPF_TAX, 0x9), /* libpcap emits K on TAX */
3674 			BPF_STMT(BPF_LD | BPF_MEM, 5),
3675 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0, 4, 0),
3676 			BPF_STMT(BPF_LD | BPF_LEN, 0),
3677 			BPF_JUMP(BPF_JMP | BPF_JGT | BPF_K, 0x73, 1, 0),
3678 			BPF_JUMP(BPF_JMP | BPF_JGE | BPF_K, 0xfc23ac00, 1, 0),
3679 			BPF_STMT(BPF_RET | BPF_K, 0xffff),
3680 			BPF_STMT(BPF_RET | BPF_K, 0),
3681 		},
3682 		CLASSIC,
3683 		{ 0x10, 0xbf, 0x48, 0xd6, 0x43, 0xd6,
3684 		  0x3c, 0x07, 0x54, 0x43, 0xe5, 0x76,
3685 		  0x08, 0x00,
3686 		  0x45, 0x10, 0x00, 0x64, 0x75, 0xb5,
3687 		  0x40, 0x00, 0x40, 0x06, 0xad, 0x2e, /* IP header */
3688 		  0x0a, 0x01, 0x01, 0x95, /* ip src */
3689 		  0x0a, 0x01, 0x02, 0x0a, /* ip dst */
3690 		  0xc2, 0x24,
3691 		  0x00, 0x16 /* dst port */ },
3692 		{ { 10, 0 }, { 30, 0 }, { 100, 65535 } },
3693 	},
3694 	{
3695 		"RET_A",
3696 		.u.insns = {
3697 			/* check that uninitialized X and A contain zeros */
3698 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
3699 			BPF_STMT(BPF_RET | BPF_A, 0)
3700 		},
3701 		CLASSIC,
3702 		{ },
3703 		{ {1, 0}, {2, 0} },
3704 	},
3705 	{
3706 		"INT: ADD trivial",
3707 		.u.insns_int = {
3708 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3709 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
3710 			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3711 			BPF_ALU64_REG(BPF_SUB, R1, R2),
3712 			BPF_ALU64_IMM(BPF_ADD, R1, -1),
3713 			BPF_ALU64_IMM(BPF_MUL, R1, 3),
3714 			BPF_ALU64_REG(BPF_MOV, R0, R1),
3715 			BPF_EXIT_INSN(),
3716 		},
3717 		INTERNAL,
3718 		{ },
3719 		{ { 0, 0xfffffffd } }
3720 	},
3721 	{
3722 		"INT: MUL_X",
3723 		.u.insns_int = {
3724 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
3725 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3726 			BPF_ALU64_IMM(BPF_MOV, R2, 3),
3727 			BPF_ALU64_REG(BPF_MUL, R1, R2),
3728 			BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
3729 			BPF_EXIT_INSN(),
3730 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
3731 			BPF_EXIT_INSN(),
3732 		},
3733 		INTERNAL,
3734 		{ },
3735 		{ { 0, 1 } }
3736 	},
3737 	{
3738 		"INT: MUL_X2",
3739 		.u.insns_int = {
3740 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3741 			BPF_ALU32_IMM(BPF_MOV, R1, -1),
3742 			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3743 			BPF_ALU64_REG(BPF_MUL, R1, R2),
3744 			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3745 			BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
3746 			BPF_EXIT_INSN(),
3747 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3748 			BPF_EXIT_INSN(),
3749 		},
3750 		INTERNAL,
3751 		{ },
3752 		{ { 0, 1 } }
3753 	},
3754 	{
3755 		"INT: MUL32_X",
3756 		.u.insns_int = {
3757 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
3758 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
3759 			BPF_ALU32_IMM(BPF_MOV, R2, 3),
3760 			BPF_ALU32_REG(BPF_MUL, R1, R2),
3761 			BPF_ALU64_IMM(BPF_RSH, R1, 8),
3762 			BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
3763 			BPF_EXIT_INSN(),
3764 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
3765 			BPF_EXIT_INSN(),
3766 		},
3767 		INTERNAL,
3768 		{ },
3769 		{ { 0, 1 } }
3770 	},
3771 	{
3772 		/* Have to test all register combinations, since
3773 		 * JITing of different registers will produce
3774 		 * different asm code.
3775 		 */
3776 		"INT: ADD 64-bit",
3777 		.u.insns_int = {
3778 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
3779 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
3780 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
3781 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
3782 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
3783 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
3784 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
3785 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
3786 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
3787 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
3788 			BPF_ALU64_IMM(BPF_ADD, R0, 20),
3789 			BPF_ALU64_IMM(BPF_ADD, R1, 20),
3790 			BPF_ALU64_IMM(BPF_ADD, R2, 20),
3791 			BPF_ALU64_IMM(BPF_ADD, R3, 20),
3792 			BPF_ALU64_IMM(BPF_ADD, R4, 20),
3793 			BPF_ALU64_IMM(BPF_ADD, R5, 20),
3794 			BPF_ALU64_IMM(BPF_ADD, R6, 20),
3795 			BPF_ALU64_IMM(BPF_ADD, R7, 20),
3796 			BPF_ALU64_IMM(BPF_ADD, R8, 20),
3797 			BPF_ALU64_IMM(BPF_ADD, R9, 20),
3798 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
3799 			BPF_ALU64_IMM(BPF_SUB, R1, 10),
3800 			BPF_ALU64_IMM(BPF_SUB, R2, 10),
3801 			BPF_ALU64_IMM(BPF_SUB, R3, 10),
3802 			BPF_ALU64_IMM(BPF_SUB, R4, 10),
3803 			BPF_ALU64_IMM(BPF_SUB, R5, 10),
3804 			BPF_ALU64_IMM(BPF_SUB, R6, 10),
3805 			BPF_ALU64_IMM(BPF_SUB, R7, 10),
3806 			BPF_ALU64_IMM(BPF_SUB, R8, 10),
3807 			BPF_ALU64_IMM(BPF_SUB, R9, 10),
3808 			BPF_ALU64_REG(BPF_ADD, R0, R0),
3809 			BPF_ALU64_REG(BPF_ADD, R0, R1),
3810 			BPF_ALU64_REG(BPF_ADD, R0, R2),
3811 			BPF_ALU64_REG(BPF_ADD, R0, R3),
3812 			BPF_ALU64_REG(BPF_ADD, R0, R4),
3813 			BPF_ALU64_REG(BPF_ADD, R0, R5),
3814 			BPF_ALU64_REG(BPF_ADD, R0, R6),
3815 			BPF_ALU64_REG(BPF_ADD, R0, R7),
3816 			BPF_ALU64_REG(BPF_ADD, R0, R8),
3817 			BPF_ALU64_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3818 			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3819 			BPF_EXIT_INSN(),
3820 			BPF_ALU64_REG(BPF_ADD, R1, R0),
3821 			BPF_ALU64_REG(BPF_ADD, R1, R1),
3822 			BPF_ALU64_REG(BPF_ADD, R1, R2),
3823 			BPF_ALU64_REG(BPF_ADD, R1, R3),
3824 			BPF_ALU64_REG(BPF_ADD, R1, R4),
3825 			BPF_ALU64_REG(BPF_ADD, R1, R5),
3826 			BPF_ALU64_REG(BPF_ADD, R1, R6),
3827 			BPF_ALU64_REG(BPF_ADD, R1, R7),
3828 			BPF_ALU64_REG(BPF_ADD, R1, R8),
3829 			BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3830 			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3831 			BPF_EXIT_INSN(),
3832 			BPF_ALU64_REG(BPF_ADD, R2, R0),
3833 			BPF_ALU64_REG(BPF_ADD, R2, R1),
3834 			BPF_ALU64_REG(BPF_ADD, R2, R2),
3835 			BPF_ALU64_REG(BPF_ADD, R2, R3),
3836 			BPF_ALU64_REG(BPF_ADD, R2, R4),
3837 			BPF_ALU64_REG(BPF_ADD, R2, R5),
3838 			BPF_ALU64_REG(BPF_ADD, R2, R6),
3839 			BPF_ALU64_REG(BPF_ADD, R2, R7),
3840 			BPF_ALU64_REG(BPF_ADD, R2, R8),
3841 			BPF_ALU64_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3842 			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3843 			BPF_EXIT_INSN(),
3844 			BPF_ALU64_REG(BPF_ADD, R3, R0),
3845 			BPF_ALU64_REG(BPF_ADD, R3, R1),
3846 			BPF_ALU64_REG(BPF_ADD, R3, R2),
3847 			BPF_ALU64_REG(BPF_ADD, R3, R3),
3848 			BPF_ALU64_REG(BPF_ADD, R3, R4),
3849 			BPF_ALU64_REG(BPF_ADD, R3, R5),
3850 			BPF_ALU64_REG(BPF_ADD, R3, R6),
3851 			BPF_ALU64_REG(BPF_ADD, R3, R7),
3852 			BPF_ALU64_REG(BPF_ADD, R3, R8),
3853 			BPF_ALU64_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
3854 			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
3855 			BPF_EXIT_INSN(),
3856 			BPF_ALU64_REG(BPF_ADD, R4, R0),
3857 			BPF_ALU64_REG(BPF_ADD, R4, R1),
3858 			BPF_ALU64_REG(BPF_ADD, R4, R2),
3859 			BPF_ALU64_REG(BPF_ADD, R4, R3),
3860 			BPF_ALU64_REG(BPF_ADD, R4, R4),
3861 			BPF_ALU64_REG(BPF_ADD, R4, R5),
3862 			BPF_ALU64_REG(BPF_ADD, R4, R6),
3863 			BPF_ALU64_REG(BPF_ADD, R4, R7),
3864 			BPF_ALU64_REG(BPF_ADD, R4, R8),
3865 			BPF_ALU64_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
3866 			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
3867 			BPF_EXIT_INSN(),
3868 			BPF_ALU64_REG(BPF_ADD, R5, R0),
3869 			BPF_ALU64_REG(BPF_ADD, R5, R1),
3870 			BPF_ALU64_REG(BPF_ADD, R5, R2),
3871 			BPF_ALU64_REG(BPF_ADD, R5, R3),
3872 			BPF_ALU64_REG(BPF_ADD, R5, R4),
3873 			BPF_ALU64_REG(BPF_ADD, R5, R5),
3874 			BPF_ALU64_REG(BPF_ADD, R5, R6),
3875 			BPF_ALU64_REG(BPF_ADD, R5, R7),
3876 			BPF_ALU64_REG(BPF_ADD, R5, R8),
3877 			BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
3878 			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
3879 			BPF_EXIT_INSN(),
3880 			BPF_ALU64_REG(BPF_ADD, R6, R0),
3881 			BPF_ALU64_REG(BPF_ADD, R6, R1),
3882 			BPF_ALU64_REG(BPF_ADD, R6, R2),
3883 			BPF_ALU64_REG(BPF_ADD, R6, R3),
3884 			BPF_ALU64_REG(BPF_ADD, R6, R4),
3885 			BPF_ALU64_REG(BPF_ADD, R6, R5),
3886 			BPF_ALU64_REG(BPF_ADD, R6, R6),
3887 			BPF_ALU64_REG(BPF_ADD, R6, R7),
3888 			BPF_ALU64_REG(BPF_ADD, R6, R8),
3889 			BPF_ALU64_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
3890 			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
3891 			BPF_EXIT_INSN(),
3892 			BPF_ALU64_REG(BPF_ADD, R7, R0),
3893 			BPF_ALU64_REG(BPF_ADD, R7, R1),
3894 			BPF_ALU64_REG(BPF_ADD, R7, R2),
3895 			BPF_ALU64_REG(BPF_ADD, R7, R3),
3896 			BPF_ALU64_REG(BPF_ADD, R7, R4),
3897 			BPF_ALU64_REG(BPF_ADD, R7, R5),
3898 			BPF_ALU64_REG(BPF_ADD, R7, R6),
3899 			BPF_ALU64_REG(BPF_ADD, R7, R7),
3900 			BPF_ALU64_REG(BPF_ADD, R7, R8),
3901 			BPF_ALU64_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
3902 			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
3903 			BPF_EXIT_INSN(),
3904 			BPF_ALU64_REG(BPF_ADD, R8, R0),
3905 			BPF_ALU64_REG(BPF_ADD, R8, R1),
3906 			BPF_ALU64_REG(BPF_ADD, R8, R2),
3907 			BPF_ALU64_REG(BPF_ADD, R8, R3),
3908 			BPF_ALU64_REG(BPF_ADD, R8, R4),
3909 			BPF_ALU64_REG(BPF_ADD, R8, R5),
3910 			BPF_ALU64_REG(BPF_ADD, R8, R6),
3911 			BPF_ALU64_REG(BPF_ADD, R8, R7),
3912 			BPF_ALU64_REG(BPF_ADD, R8, R8),
3913 			BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
3914 			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
3915 			BPF_EXIT_INSN(),
3916 			BPF_ALU64_REG(BPF_ADD, R9, R0),
3917 			BPF_ALU64_REG(BPF_ADD, R9, R1),
3918 			BPF_ALU64_REG(BPF_ADD, R9, R2),
3919 			BPF_ALU64_REG(BPF_ADD, R9, R3),
3920 			BPF_ALU64_REG(BPF_ADD, R9, R4),
3921 			BPF_ALU64_REG(BPF_ADD, R9, R5),
3922 			BPF_ALU64_REG(BPF_ADD, R9, R6),
3923 			BPF_ALU64_REG(BPF_ADD, R9, R7),
3924 			BPF_ALU64_REG(BPF_ADD, R9, R8),
3925 			BPF_ALU64_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
3926 			BPF_ALU64_REG(BPF_MOV, R0, R9),
3927 			BPF_EXIT_INSN(),
3928 		},
3929 		INTERNAL,
3930 		{ },
3931 		{ { 0, 2957380 } }
3932 	},
3933 	{
3934 		"INT: ADD 32-bit",
3935 		.u.insns_int = {
3936 			BPF_ALU32_IMM(BPF_MOV, R0, 20),
3937 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
3938 			BPF_ALU32_IMM(BPF_MOV, R2, 2),
3939 			BPF_ALU32_IMM(BPF_MOV, R3, 3),
3940 			BPF_ALU32_IMM(BPF_MOV, R4, 4),
3941 			BPF_ALU32_IMM(BPF_MOV, R5, 5),
3942 			BPF_ALU32_IMM(BPF_MOV, R6, 6),
3943 			BPF_ALU32_IMM(BPF_MOV, R7, 7),
3944 			BPF_ALU32_IMM(BPF_MOV, R8, 8),
3945 			BPF_ALU32_IMM(BPF_MOV, R9, 9),
3946 			BPF_ALU64_IMM(BPF_ADD, R1, 10),
3947 			BPF_ALU64_IMM(BPF_ADD, R2, 10),
3948 			BPF_ALU64_IMM(BPF_ADD, R3, 10),
3949 			BPF_ALU64_IMM(BPF_ADD, R4, 10),
3950 			BPF_ALU64_IMM(BPF_ADD, R5, 10),
3951 			BPF_ALU64_IMM(BPF_ADD, R6, 10),
3952 			BPF_ALU64_IMM(BPF_ADD, R7, 10),
3953 			BPF_ALU64_IMM(BPF_ADD, R8, 10),
3954 			BPF_ALU64_IMM(BPF_ADD, R9, 10),
3955 			BPF_ALU32_REG(BPF_ADD, R0, R1),
3956 			BPF_ALU32_REG(BPF_ADD, R0, R2),
3957 			BPF_ALU32_REG(BPF_ADD, R0, R3),
3958 			BPF_ALU32_REG(BPF_ADD, R0, R4),
3959 			BPF_ALU32_REG(BPF_ADD, R0, R5),
3960 			BPF_ALU32_REG(BPF_ADD, R0, R6),
3961 			BPF_ALU32_REG(BPF_ADD, R0, R7),
3962 			BPF_ALU32_REG(BPF_ADD, R0, R8),
3963 			BPF_ALU32_REG(BPF_ADD, R0, R9), /* R0 == 155 */
3964 			BPF_JMP_IMM(BPF_JEQ, R0, 155, 1),
3965 			BPF_EXIT_INSN(),
3966 			BPF_ALU32_REG(BPF_ADD, R1, R0),
3967 			BPF_ALU32_REG(BPF_ADD, R1, R1),
3968 			BPF_ALU32_REG(BPF_ADD, R1, R2),
3969 			BPF_ALU32_REG(BPF_ADD, R1, R3),
3970 			BPF_ALU32_REG(BPF_ADD, R1, R4),
3971 			BPF_ALU32_REG(BPF_ADD, R1, R5),
3972 			BPF_ALU32_REG(BPF_ADD, R1, R6),
3973 			BPF_ALU32_REG(BPF_ADD, R1, R7),
3974 			BPF_ALU32_REG(BPF_ADD, R1, R8),
3975 			BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
3976 			BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
3977 			BPF_EXIT_INSN(),
3978 			BPF_ALU32_REG(BPF_ADD, R2, R0),
3979 			BPF_ALU32_REG(BPF_ADD, R2, R1),
3980 			BPF_ALU32_REG(BPF_ADD, R2, R2),
3981 			BPF_ALU32_REG(BPF_ADD, R2, R3),
3982 			BPF_ALU32_REG(BPF_ADD, R2, R4),
3983 			BPF_ALU32_REG(BPF_ADD, R2, R5),
3984 			BPF_ALU32_REG(BPF_ADD, R2, R6),
3985 			BPF_ALU32_REG(BPF_ADD, R2, R7),
3986 			BPF_ALU32_REG(BPF_ADD, R2, R8),
3987 			BPF_ALU32_REG(BPF_ADD, R2, R9), /* R2 == 1358 */
3988 			BPF_JMP_IMM(BPF_JEQ, R2, 1358, 1),
3989 			BPF_EXIT_INSN(),
3990 			BPF_ALU32_REG(BPF_ADD, R3, R0),
3991 			BPF_ALU32_REG(BPF_ADD, R3, R1),
3992 			BPF_ALU32_REG(BPF_ADD, R3, R2),
3993 			BPF_ALU32_REG(BPF_ADD, R3, R3),
3994 			BPF_ALU32_REG(BPF_ADD, R3, R4),
3995 			BPF_ALU32_REG(BPF_ADD, R3, R5),
3996 			BPF_ALU32_REG(BPF_ADD, R3, R6),
3997 			BPF_ALU32_REG(BPF_ADD, R3, R7),
3998 			BPF_ALU32_REG(BPF_ADD, R3, R8),
3999 			BPF_ALU32_REG(BPF_ADD, R3, R9), /* R3 == 4063 */
4000 			BPF_JMP_IMM(BPF_JEQ, R3, 4063, 1),
4001 			BPF_EXIT_INSN(),
4002 			BPF_ALU32_REG(BPF_ADD, R4, R0),
4003 			BPF_ALU32_REG(BPF_ADD, R4, R1),
4004 			BPF_ALU32_REG(BPF_ADD, R4, R2),
4005 			BPF_ALU32_REG(BPF_ADD, R4, R3),
4006 			BPF_ALU32_REG(BPF_ADD, R4, R4),
4007 			BPF_ALU32_REG(BPF_ADD, R4, R5),
4008 			BPF_ALU32_REG(BPF_ADD, R4, R6),
4009 			BPF_ALU32_REG(BPF_ADD, R4, R7),
4010 			BPF_ALU32_REG(BPF_ADD, R4, R8),
4011 			BPF_ALU32_REG(BPF_ADD, R4, R9), /* R4 == 12177 */
4012 			BPF_JMP_IMM(BPF_JEQ, R4, 12177, 1),
4013 			BPF_EXIT_INSN(),
4014 			BPF_ALU32_REG(BPF_ADD, R5, R0),
4015 			BPF_ALU32_REG(BPF_ADD, R5, R1),
4016 			BPF_ALU32_REG(BPF_ADD, R5, R2),
4017 			BPF_ALU32_REG(BPF_ADD, R5, R3),
4018 			BPF_ALU32_REG(BPF_ADD, R5, R4),
4019 			BPF_ALU32_REG(BPF_ADD, R5, R5),
4020 			BPF_ALU32_REG(BPF_ADD, R5, R6),
4021 			BPF_ALU32_REG(BPF_ADD, R5, R7),
4022 			BPF_ALU32_REG(BPF_ADD, R5, R8),
4023 			BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
4024 			BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
4025 			BPF_EXIT_INSN(),
4026 			BPF_ALU32_REG(BPF_ADD, R6, R0),
4027 			BPF_ALU32_REG(BPF_ADD, R6, R1),
4028 			BPF_ALU32_REG(BPF_ADD, R6, R2),
4029 			BPF_ALU32_REG(BPF_ADD, R6, R3),
4030 			BPF_ALU32_REG(BPF_ADD, R6, R4),
4031 			BPF_ALU32_REG(BPF_ADD, R6, R5),
4032 			BPF_ALU32_REG(BPF_ADD, R6, R6),
4033 			BPF_ALU32_REG(BPF_ADD, R6, R7),
4034 			BPF_ALU32_REG(BPF_ADD, R6, R8),
4035 			BPF_ALU32_REG(BPF_ADD, R6, R9), /* R6 == 109540 */
4036 			BPF_JMP_IMM(BPF_JEQ, R6, 109540, 1),
4037 			BPF_EXIT_INSN(),
4038 			BPF_ALU32_REG(BPF_ADD, R7, R0),
4039 			BPF_ALU32_REG(BPF_ADD, R7, R1),
4040 			BPF_ALU32_REG(BPF_ADD, R7, R2),
4041 			BPF_ALU32_REG(BPF_ADD, R7, R3),
4042 			BPF_ALU32_REG(BPF_ADD, R7, R4),
4043 			BPF_ALU32_REG(BPF_ADD, R7, R5),
4044 			BPF_ALU32_REG(BPF_ADD, R7, R6),
4045 			BPF_ALU32_REG(BPF_ADD, R7, R7),
4046 			BPF_ALU32_REG(BPF_ADD, R7, R8),
4047 			BPF_ALU32_REG(BPF_ADD, R7, R9), /* R7 == 328605 */
4048 			BPF_JMP_IMM(BPF_JEQ, R7, 328605, 1),
4049 			BPF_EXIT_INSN(),
4050 			BPF_ALU32_REG(BPF_ADD, R8, R0),
4051 			BPF_ALU32_REG(BPF_ADD, R8, R1),
4052 			BPF_ALU32_REG(BPF_ADD, R8, R2),
4053 			BPF_ALU32_REG(BPF_ADD, R8, R3),
4054 			BPF_ALU32_REG(BPF_ADD, R8, R4),
4055 			BPF_ALU32_REG(BPF_ADD, R8, R5),
4056 			BPF_ALU32_REG(BPF_ADD, R8, R6),
4057 			BPF_ALU32_REG(BPF_ADD, R8, R7),
4058 			BPF_ALU32_REG(BPF_ADD, R8, R8),
4059 			BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
4060 			BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
4061 			BPF_EXIT_INSN(),
4062 			BPF_ALU32_REG(BPF_ADD, R9, R0),
4063 			BPF_ALU32_REG(BPF_ADD, R9, R1),
4064 			BPF_ALU32_REG(BPF_ADD, R9, R2),
4065 			BPF_ALU32_REG(BPF_ADD, R9, R3),
4066 			BPF_ALU32_REG(BPF_ADD, R9, R4),
4067 			BPF_ALU32_REG(BPF_ADD, R9, R5),
4068 			BPF_ALU32_REG(BPF_ADD, R9, R6),
4069 			BPF_ALU32_REG(BPF_ADD, R9, R7),
4070 			BPF_ALU32_REG(BPF_ADD, R9, R8),
4071 			BPF_ALU32_REG(BPF_ADD, R9, R9), /* R9 == 2957380 */
4072 			BPF_ALU32_REG(BPF_MOV, R0, R9),
4073 			BPF_EXIT_INSN(),
4074 		},
4075 		INTERNAL,
4076 		{ },
4077 		{ { 0, 2957380 } }
4078 	},
4079 	{	/* Mainly checking JIT here. */
4080 		"INT: SUB",
4081 		.u.insns_int = {
4082 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4083 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4084 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4085 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4086 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4087 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4088 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4089 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4090 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4091 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4092 			BPF_ALU64_REG(BPF_SUB, R0, R0),
4093 			BPF_ALU64_REG(BPF_SUB, R0, R1),
4094 			BPF_ALU64_REG(BPF_SUB, R0, R2),
4095 			BPF_ALU64_REG(BPF_SUB, R0, R3),
4096 			BPF_ALU64_REG(BPF_SUB, R0, R4),
4097 			BPF_ALU64_REG(BPF_SUB, R0, R5),
4098 			BPF_ALU64_REG(BPF_SUB, R0, R6),
4099 			BPF_ALU64_REG(BPF_SUB, R0, R7),
4100 			BPF_ALU64_REG(BPF_SUB, R0, R8),
4101 			BPF_ALU64_REG(BPF_SUB, R0, R9),
4102 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4103 			BPF_JMP_IMM(BPF_JEQ, R0, -55, 1),
4104 			BPF_EXIT_INSN(),
4105 			BPF_ALU64_REG(BPF_SUB, R1, R0),
4106 			BPF_ALU64_REG(BPF_SUB, R1, R2),
4107 			BPF_ALU64_REG(BPF_SUB, R1, R3),
4108 			BPF_ALU64_REG(BPF_SUB, R1, R4),
4109 			BPF_ALU64_REG(BPF_SUB, R1, R5),
4110 			BPF_ALU64_REG(BPF_SUB, R1, R6),
4111 			BPF_ALU64_REG(BPF_SUB, R1, R7),
4112 			BPF_ALU64_REG(BPF_SUB, R1, R8),
4113 			BPF_ALU64_REG(BPF_SUB, R1, R9),
4114 			BPF_ALU64_IMM(BPF_SUB, R1, 10),
4115 			BPF_ALU64_REG(BPF_SUB, R2, R0),
4116 			BPF_ALU64_REG(BPF_SUB, R2, R1),
4117 			BPF_ALU64_REG(BPF_SUB, R2, R3),
4118 			BPF_ALU64_REG(BPF_SUB, R2, R4),
4119 			BPF_ALU64_REG(BPF_SUB, R2, R5),
4120 			BPF_ALU64_REG(BPF_SUB, R2, R6),
4121 			BPF_ALU64_REG(BPF_SUB, R2, R7),
4122 			BPF_ALU64_REG(BPF_SUB, R2, R8),
4123 			BPF_ALU64_REG(BPF_SUB, R2, R9),
4124 			BPF_ALU64_IMM(BPF_SUB, R2, 10),
4125 			BPF_ALU64_REG(BPF_SUB, R3, R0),
4126 			BPF_ALU64_REG(BPF_SUB, R3, R1),
4127 			BPF_ALU64_REG(BPF_SUB, R3, R2),
4128 			BPF_ALU64_REG(BPF_SUB, R3, R4),
4129 			BPF_ALU64_REG(BPF_SUB, R3, R5),
4130 			BPF_ALU64_REG(BPF_SUB, R3, R6),
4131 			BPF_ALU64_REG(BPF_SUB, R3, R7),
4132 			BPF_ALU64_REG(BPF_SUB, R3, R8),
4133 			BPF_ALU64_REG(BPF_SUB, R3, R9),
4134 			BPF_ALU64_IMM(BPF_SUB, R3, 10),
4135 			BPF_ALU64_REG(BPF_SUB, R4, R0),
4136 			BPF_ALU64_REG(BPF_SUB, R4, R1),
4137 			BPF_ALU64_REG(BPF_SUB, R4, R2),
4138 			BPF_ALU64_REG(BPF_SUB, R4, R3),
4139 			BPF_ALU64_REG(BPF_SUB, R4, R5),
4140 			BPF_ALU64_REG(BPF_SUB, R4, R6),
4141 			BPF_ALU64_REG(BPF_SUB, R4, R7),
4142 			BPF_ALU64_REG(BPF_SUB, R4, R8),
4143 			BPF_ALU64_REG(BPF_SUB, R4, R9),
4144 			BPF_ALU64_IMM(BPF_SUB, R4, 10),
4145 			BPF_ALU64_REG(BPF_SUB, R5, R0),
4146 			BPF_ALU64_REG(BPF_SUB, R5, R1),
4147 			BPF_ALU64_REG(BPF_SUB, R5, R2),
4148 			BPF_ALU64_REG(BPF_SUB, R5, R3),
4149 			BPF_ALU64_REG(BPF_SUB, R5, R4),
4150 			BPF_ALU64_REG(BPF_SUB, R5, R6),
4151 			BPF_ALU64_REG(BPF_SUB, R5, R7),
4152 			BPF_ALU64_REG(BPF_SUB, R5, R8),
4153 			BPF_ALU64_REG(BPF_SUB, R5, R9),
4154 			BPF_ALU64_IMM(BPF_SUB, R5, 10),
4155 			BPF_ALU64_REG(BPF_SUB, R6, R0),
4156 			BPF_ALU64_REG(BPF_SUB, R6, R1),
4157 			BPF_ALU64_REG(BPF_SUB, R6, R2),
4158 			BPF_ALU64_REG(BPF_SUB, R6, R3),
4159 			BPF_ALU64_REG(BPF_SUB, R6, R4),
4160 			BPF_ALU64_REG(BPF_SUB, R6, R5),
4161 			BPF_ALU64_REG(BPF_SUB, R6, R7),
4162 			BPF_ALU64_REG(BPF_SUB, R6, R8),
4163 			BPF_ALU64_REG(BPF_SUB, R6, R9),
4164 			BPF_ALU64_IMM(BPF_SUB, R6, 10),
4165 			BPF_ALU64_REG(BPF_SUB, R7, R0),
4166 			BPF_ALU64_REG(BPF_SUB, R7, R1),
4167 			BPF_ALU64_REG(BPF_SUB, R7, R2),
4168 			BPF_ALU64_REG(BPF_SUB, R7, R3),
4169 			BPF_ALU64_REG(BPF_SUB, R7, R4),
4170 			BPF_ALU64_REG(BPF_SUB, R7, R5),
4171 			BPF_ALU64_REG(BPF_SUB, R7, R6),
4172 			BPF_ALU64_REG(BPF_SUB, R7, R8),
4173 			BPF_ALU64_REG(BPF_SUB, R7, R9),
4174 			BPF_ALU64_IMM(BPF_SUB, R7, 10),
4175 			BPF_ALU64_REG(BPF_SUB, R8, R0),
4176 			BPF_ALU64_REG(BPF_SUB, R8, R1),
4177 			BPF_ALU64_REG(BPF_SUB, R8, R2),
4178 			BPF_ALU64_REG(BPF_SUB, R8, R3),
4179 			BPF_ALU64_REG(BPF_SUB, R8, R4),
4180 			BPF_ALU64_REG(BPF_SUB, R8, R5),
4181 			BPF_ALU64_REG(BPF_SUB, R8, R6),
4182 			BPF_ALU64_REG(BPF_SUB, R8, R7),
4183 			BPF_ALU64_REG(BPF_SUB, R8, R9),
4184 			BPF_ALU64_IMM(BPF_SUB, R8, 10),
4185 			BPF_ALU64_REG(BPF_SUB, R9, R0),
4186 			BPF_ALU64_REG(BPF_SUB, R9, R1),
4187 			BPF_ALU64_REG(BPF_SUB, R9, R2),
4188 			BPF_ALU64_REG(BPF_SUB, R9, R3),
4189 			BPF_ALU64_REG(BPF_SUB, R9, R4),
4190 			BPF_ALU64_REG(BPF_SUB, R9, R5),
4191 			BPF_ALU64_REG(BPF_SUB, R9, R6),
4192 			BPF_ALU64_REG(BPF_SUB, R9, R7),
4193 			BPF_ALU64_REG(BPF_SUB, R9, R8),
4194 			BPF_ALU64_IMM(BPF_SUB, R9, 10),
4195 			BPF_ALU64_IMM(BPF_SUB, R0, 10),
4196 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
4197 			BPF_ALU64_REG(BPF_SUB, R0, R1),
4198 			BPF_ALU64_REG(BPF_SUB, R0, R2),
4199 			BPF_ALU64_REG(BPF_SUB, R0, R3),
4200 			BPF_ALU64_REG(BPF_SUB, R0, R4),
4201 			BPF_ALU64_REG(BPF_SUB, R0, R5),
4202 			BPF_ALU64_REG(BPF_SUB, R0, R6),
4203 			BPF_ALU64_REG(BPF_SUB, R0, R7),
4204 			BPF_ALU64_REG(BPF_SUB, R0, R8),
4205 			BPF_ALU64_REG(BPF_SUB, R0, R9),
4206 			BPF_EXIT_INSN(),
4207 		},
4208 		INTERNAL,
4209 		{ },
4210 		{ { 0, 11 } }
4211 	},
4212 	{	/* Mainly checking JIT here. */
4213 		"INT: XOR",
4214 		.u.insns_int = {
4215 			BPF_ALU64_REG(BPF_SUB, R0, R0),
4216 			BPF_ALU64_REG(BPF_XOR, R1, R1),
4217 			BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
4218 			BPF_EXIT_INSN(),
4219 			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4220 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4221 			BPF_ALU64_REG(BPF_SUB, R1, R1),
4222 			BPF_ALU64_REG(BPF_XOR, R2, R2),
4223 			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
4224 			BPF_EXIT_INSN(),
4225 			BPF_ALU64_REG(BPF_SUB, R2, R2),
4226 			BPF_ALU64_REG(BPF_XOR, R3, R3),
4227 			BPF_ALU64_IMM(BPF_MOV, R0, 10),
4228 			BPF_ALU64_IMM(BPF_MOV, R1, -1),
4229 			BPF_JMP_REG(BPF_JEQ, R2, R3, 1),
4230 			BPF_EXIT_INSN(),
4231 			BPF_ALU64_REG(BPF_SUB, R3, R3),
4232 			BPF_ALU64_REG(BPF_XOR, R4, R4),
4233 			BPF_ALU64_IMM(BPF_MOV, R2, 1),
4234 			BPF_ALU64_IMM(BPF_MOV, R5, -1),
4235 			BPF_JMP_REG(BPF_JEQ, R3, R4, 1),
4236 			BPF_EXIT_INSN(),
4237 			BPF_ALU64_REG(BPF_SUB, R4, R4),
4238 			BPF_ALU64_REG(BPF_XOR, R5, R5),
4239 			BPF_ALU64_IMM(BPF_MOV, R3, 1),
4240 			BPF_ALU64_IMM(BPF_MOV, R7, -1),
4241 			BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
4242 			BPF_EXIT_INSN(),
4243 			BPF_ALU64_IMM(BPF_MOV, R5, 1),
4244 			BPF_ALU64_REG(BPF_SUB, R5, R5),
4245 			BPF_ALU64_REG(BPF_XOR, R6, R6),
4246 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4247 			BPF_ALU64_IMM(BPF_MOV, R8, -1),
4248 			BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
4249 			BPF_EXIT_INSN(),
4250 			BPF_ALU64_REG(BPF_SUB, R6, R6),
4251 			BPF_ALU64_REG(BPF_XOR, R7, R7),
4252 			BPF_JMP_REG(BPF_JEQ, R7, R6, 1),
4253 			BPF_EXIT_INSN(),
4254 			BPF_ALU64_REG(BPF_SUB, R7, R7),
4255 			BPF_ALU64_REG(BPF_XOR, R8, R8),
4256 			BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
4257 			BPF_EXIT_INSN(),
4258 			BPF_ALU64_REG(BPF_SUB, R8, R8),
4259 			BPF_ALU64_REG(BPF_XOR, R9, R9),
4260 			BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
4261 			BPF_EXIT_INSN(),
4262 			BPF_ALU64_REG(BPF_SUB, R9, R9),
4263 			BPF_ALU64_REG(BPF_XOR, R0, R0),
4264 			BPF_JMP_REG(BPF_JEQ, R9, R0, 1),
4265 			BPF_EXIT_INSN(),
4266 			BPF_ALU64_REG(BPF_SUB, R1, R1),
4267 			BPF_ALU64_REG(BPF_XOR, R0, R0),
4268 			BPF_JMP_REG(BPF_JEQ, R9, R0, 2),
4269 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4270 			BPF_EXIT_INSN(),
4271 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
4272 			BPF_EXIT_INSN(),
4273 		},
4274 		INTERNAL,
4275 		{ },
4276 		{ { 0, 1 } }
4277 	},
4278 	{	/* Mainly checking JIT here. */
4279 		"INT: MUL",
4280 		.u.insns_int = {
4281 			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4282 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
4283 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4284 			BPF_ALU64_IMM(BPF_MOV, R3, 3),
4285 			BPF_ALU64_IMM(BPF_MOV, R4, 4),
4286 			BPF_ALU64_IMM(BPF_MOV, R5, 5),
4287 			BPF_ALU64_IMM(BPF_MOV, R6, 6),
4288 			BPF_ALU64_IMM(BPF_MOV, R7, 7),
4289 			BPF_ALU64_IMM(BPF_MOV, R8, 8),
4290 			BPF_ALU64_IMM(BPF_MOV, R9, 9),
4291 			BPF_ALU64_REG(BPF_MUL, R0, R0),
4292 			BPF_ALU64_REG(BPF_MUL, R0, R1),
4293 			BPF_ALU64_REG(BPF_MUL, R0, R2),
4294 			BPF_ALU64_REG(BPF_MUL, R0, R3),
4295 			BPF_ALU64_REG(BPF_MUL, R0, R4),
4296 			BPF_ALU64_REG(BPF_MUL, R0, R5),
4297 			BPF_ALU64_REG(BPF_MUL, R0, R6),
4298 			BPF_ALU64_REG(BPF_MUL, R0, R7),
4299 			BPF_ALU64_REG(BPF_MUL, R0, R8),
4300 			BPF_ALU64_REG(BPF_MUL, R0, R9),
4301 			BPF_ALU64_IMM(BPF_MUL, R0, 10),
4302 			BPF_JMP_IMM(BPF_JEQ, R0, 439084800, 1),
4303 			BPF_EXIT_INSN(),
4304 			BPF_ALU64_REG(BPF_MUL, R1, R0),
4305 			BPF_ALU64_REG(BPF_MUL, R1, R2),
4306 			BPF_ALU64_REG(BPF_MUL, R1, R3),
4307 			BPF_ALU64_REG(BPF_MUL, R1, R4),
4308 			BPF_ALU64_REG(BPF_MUL, R1, R5),
4309 			BPF_ALU64_REG(BPF_MUL, R1, R6),
4310 			BPF_ALU64_REG(BPF_MUL, R1, R7),
4311 			BPF_ALU64_REG(BPF_MUL, R1, R8),
4312 			BPF_ALU64_REG(BPF_MUL, R1, R9),
4313 			BPF_ALU64_IMM(BPF_MUL, R1, 10),
4314 			BPF_ALU64_REG(BPF_MOV, R2, R1),
4315 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4316 			BPF_JMP_IMM(BPF_JEQ, R2, 0x5a924, 1),
4317 			BPF_EXIT_INSN(),
4318 			BPF_ALU64_IMM(BPF_LSH, R1, 32),
4319 			BPF_ALU64_IMM(BPF_ARSH, R1, 32),
4320 			BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
4321 			BPF_EXIT_INSN(),
4322 			BPF_ALU64_REG(BPF_MUL, R2, R0),
4323 			BPF_ALU64_REG(BPF_MUL, R2, R1),
4324 			BPF_ALU64_REG(BPF_MUL, R2, R3),
4325 			BPF_ALU64_REG(BPF_MUL, R2, R4),
4326 			BPF_ALU64_REG(BPF_MUL, R2, R5),
4327 			BPF_ALU64_REG(BPF_MUL, R2, R6),
4328 			BPF_ALU64_REG(BPF_MUL, R2, R7),
4329 			BPF_ALU64_REG(BPF_MUL, R2, R8),
4330 			BPF_ALU64_REG(BPF_MUL, R2, R9),
4331 			BPF_ALU64_IMM(BPF_MUL, R2, 10),
4332 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4333 			BPF_ALU64_REG(BPF_MOV, R0, R2),
4334 			BPF_EXIT_INSN(),
4335 		},
4336 		INTERNAL,
4337 		{ },
4338 		{ { 0, 0x35d97ef2 } }
4339 	},
4340 	{	/* Mainly checking JIT here. */
4341 		"MOV REG64",
4342 		.u.insns_int = {
4343 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4344 			BPF_MOV64_REG(R1, R0),
4345 			BPF_MOV64_REG(R2, R1),
4346 			BPF_MOV64_REG(R3, R2),
4347 			BPF_MOV64_REG(R4, R3),
4348 			BPF_MOV64_REG(R5, R4),
4349 			BPF_MOV64_REG(R6, R5),
4350 			BPF_MOV64_REG(R7, R6),
4351 			BPF_MOV64_REG(R8, R7),
4352 			BPF_MOV64_REG(R9, R8),
4353 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4354 			BPF_ALU64_IMM(BPF_MOV, R1, 0),
4355 			BPF_ALU64_IMM(BPF_MOV, R2, 0),
4356 			BPF_ALU64_IMM(BPF_MOV, R3, 0),
4357 			BPF_ALU64_IMM(BPF_MOV, R4, 0),
4358 			BPF_ALU64_IMM(BPF_MOV, R5, 0),
4359 			BPF_ALU64_IMM(BPF_MOV, R6, 0),
4360 			BPF_ALU64_IMM(BPF_MOV, R7, 0),
4361 			BPF_ALU64_IMM(BPF_MOV, R8, 0),
4362 			BPF_ALU64_IMM(BPF_MOV, R9, 0),
4363 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4364 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4365 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4366 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4367 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4368 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4369 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4370 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4371 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4372 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4373 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4374 			BPF_EXIT_INSN(),
4375 		},
4376 		INTERNAL,
4377 		{ },
4378 		{ { 0, 0xfefe } }
4379 	},
4380 	{	/* Mainly checking JIT here. */
4381 		"MOV REG32",
4382 		.u.insns_int = {
4383 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4384 			BPF_MOV64_REG(R1, R0),
4385 			BPF_MOV64_REG(R2, R1),
4386 			BPF_MOV64_REG(R3, R2),
4387 			BPF_MOV64_REG(R4, R3),
4388 			BPF_MOV64_REG(R5, R4),
4389 			BPF_MOV64_REG(R6, R5),
4390 			BPF_MOV64_REG(R7, R6),
4391 			BPF_MOV64_REG(R8, R7),
4392 			BPF_MOV64_REG(R9, R8),
4393 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4394 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
4395 			BPF_ALU32_IMM(BPF_MOV, R2, 0),
4396 			BPF_ALU32_IMM(BPF_MOV, R3, 0),
4397 			BPF_ALU32_IMM(BPF_MOV, R4, 0),
4398 			BPF_ALU32_IMM(BPF_MOV, R5, 0),
4399 			BPF_ALU32_IMM(BPF_MOV, R6, 0),
4400 			BPF_ALU32_IMM(BPF_MOV, R7, 0),
4401 			BPF_ALU32_IMM(BPF_MOV, R8, 0),
4402 			BPF_ALU32_IMM(BPF_MOV, R9, 0),
4403 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4404 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4405 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4406 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4407 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4408 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4409 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4410 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4411 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4412 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4413 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4414 			BPF_EXIT_INSN(),
4415 		},
4416 		INTERNAL,
4417 		{ },
4418 		{ { 0, 0xfefe } }
4419 	},
4420 	{	/* Mainly checking JIT here. */
4421 		"LD IMM64",
4422 		.u.insns_int = {
4423 			BPF_LD_IMM64(R0, 0xffffffffffffffffLL),
4424 			BPF_MOV64_REG(R1, R0),
4425 			BPF_MOV64_REG(R2, R1),
4426 			BPF_MOV64_REG(R3, R2),
4427 			BPF_MOV64_REG(R4, R3),
4428 			BPF_MOV64_REG(R5, R4),
4429 			BPF_MOV64_REG(R6, R5),
4430 			BPF_MOV64_REG(R7, R6),
4431 			BPF_MOV64_REG(R8, R7),
4432 			BPF_MOV64_REG(R9, R8),
4433 			BPF_LD_IMM64(R0, 0x0LL),
4434 			BPF_LD_IMM64(R1, 0x0LL),
4435 			BPF_LD_IMM64(R2, 0x0LL),
4436 			BPF_LD_IMM64(R3, 0x0LL),
4437 			BPF_LD_IMM64(R4, 0x0LL),
4438 			BPF_LD_IMM64(R5, 0x0LL),
4439 			BPF_LD_IMM64(R6, 0x0LL),
4440 			BPF_LD_IMM64(R7, 0x0LL),
4441 			BPF_LD_IMM64(R8, 0x0LL),
4442 			BPF_LD_IMM64(R9, 0x0LL),
4443 			BPF_ALU64_REG(BPF_ADD, R0, R0),
4444 			BPF_ALU64_REG(BPF_ADD, R0, R1),
4445 			BPF_ALU64_REG(BPF_ADD, R0, R2),
4446 			BPF_ALU64_REG(BPF_ADD, R0, R3),
4447 			BPF_ALU64_REG(BPF_ADD, R0, R4),
4448 			BPF_ALU64_REG(BPF_ADD, R0, R5),
4449 			BPF_ALU64_REG(BPF_ADD, R0, R6),
4450 			BPF_ALU64_REG(BPF_ADD, R0, R7),
4451 			BPF_ALU64_REG(BPF_ADD, R0, R8),
4452 			BPF_ALU64_REG(BPF_ADD, R0, R9),
4453 			BPF_ALU64_IMM(BPF_ADD, R0, 0xfefe),
4454 			BPF_EXIT_INSN(),
4455 		},
4456 		INTERNAL,
4457 		{ },
4458 		{ { 0, 0xfefe } }
4459 	},
4460 	{
4461 		"INT: ALU MIX",
4462 		.u.insns_int = {
4463 			BPF_ALU64_IMM(BPF_MOV, R0, 11),
4464 			BPF_ALU64_IMM(BPF_ADD, R0, -1),
4465 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
4466 			BPF_ALU64_IMM(BPF_XOR, R2, 3),
4467 			BPF_ALU64_REG(BPF_DIV, R0, R2),
4468 			BPF_JMP_IMM(BPF_JEQ, R0, 10, 1),
4469 			BPF_EXIT_INSN(),
4470 			BPF_ALU64_IMM(BPF_MOD, R0, 3),
4471 			BPF_JMP_IMM(BPF_JEQ, R0, 1, 1),
4472 			BPF_EXIT_INSN(),
4473 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
4474 			BPF_EXIT_INSN(),
4475 		},
4476 		INTERNAL,
4477 		{ },
4478 		{ { 0, -1 } }
4479 	},
4480 	{
4481 		"INT: shifts by register",
4482 		.u.insns_int = {
4483 			BPF_MOV64_IMM(R0, -1234),
4484 			BPF_MOV64_IMM(R1, 1),
4485 			BPF_ALU32_REG(BPF_RSH, R0, R1),
4486 			BPF_JMP_IMM(BPF_JEQ, R0, 0x7ffffd97, 1),
4487 			BPF_EXIT_INSN(),
4488 			BPF_MOV64_IMM(R2, 1),
4489 			BPF_ALU64_REG(BPF_LSH, R0, R2),
4490 			BPF_MOV32_IMM(R4, -1234),
4491 			BPF_JMP_REG(BPF_JEQ, R0, R4, 1),
4492 			BPF_EXIT_INSN(),
4493 			BPF_ALU64_IMM(BPF_AND, R4, 63),
4494 			BPF_ALU64_REG(BPF_LSH, R0, R4), /* R0 <= 46 */
4495 			BPF_MOV64_IMM(R3, 47),
4496 			BPF_ALU64_REG(BPF_ARSH, R0, R3),
4497 			BPF_JMP_IMM(BPF_JEQ, R0, -617, 1),
4498 			BPF_EXIT_INSN(),
4499 			BPF_MOV64_IMM(R2, 1),
4500 			BPF_ALU64_REG(BPF_LSH, R4, R2), /* R4 = 46 << 1 */
4501 			BPF_JMP_IMM(BPF_JEQ, R4, 92, 1),
4502 			BPF_EXIT_INSN(),
4503 			BPF_MOV64_IMM(R4, 4),
4504 			BPF_ALU64_REG(BPF_LSH, R4, R4), /* R4 = 4 << 4 */
4505 			BPF_JMP_IMM(BPF_JEQ, R4, 64, 1),
4506 			BPF_EXIT_INSN(),
4507 			BPF_MOV64_IMM(R4, 5),
4508 			BPF_ALU32_REG(BPF_LSH, R4, R4), /* R4 = 5 << 5 */
4509 			BPF_JMP_IMM(BPF_JEQ, R4, 160, 1),
4510 			BPF_EXIT_INSN(),
4511 			BPF_MOV64_IMM(R0, -1),
4512 			BPF_EXIT_INSN(),
4513 		},
4514 		INTERNAL,
4515 		{ },
4516 		{ { 0, -1 } }
4517 	},
4518 #ifdef CONFIG_32BIT
4519 	{
4520 		"INT: 32-bit context pointer word order and zero-extension",
4521 		.u.insns_int = {
4522 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
4523 			BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
4524 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
4525 			BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
4526 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
4527 			BPF_EXIT_INSN(),
4528 		},
4529 		INTERNAL,
4530 		{ },
4531 		{ { 0, 1 } }
4532 	},
4533 #endif
4534 	{
4535 		"check: missing ret",
4536 		.u.insns = {
4537 			BPF_STMT(BPF_LD | BPF_IMM, 1),
4538 		},
4539 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4540 		{ },
4541 		{ },
4542 		.fill_helper = NULL,
4543 		.expected_errcode = -EINVAL,
4544 	},
4545 	{
4546 		"check: div_k_0",
4547 		.u.insns = {
4548 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0),
4549 			BPF_STMT(BPF_RET | BPF_K, 0)
4550 		},
4551 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4552 		{ },
4553 		{ },
4554 		.fill_helper = NULL,
4555 		.expected_errcode = -EINVAL,
4556 	},
4557 	{
4558 		"check: unknown insn",
4559 		.u.insns = {
4560 			/* seccomp insn, rejected in socket filter */
4561 			BPF_STMT(BPF_LDX | BPF_W | BPF_ABS, 0),
4562 			BPF_STMT(BPF_RET | BPF_K, 0)
4563 		},
4564 		CLASSIC | FLAG_EXPECTED_FAIL,
4565 		{ },
4566 		{ },
4567 		.fill_helper = NULL,
4568 		.expected_errcode = -EINVAL,
4569 	},
4570 	{
4571 		"check: out of range spill/fill",
4572 		.u.insns = {
4573 			BPF_STMT(BPF_STX, 16),
4574 			BPF_STMT(BPF_RET | BPF_K, 0)
4575 		},
4576 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4577 		{ },
4578 		{ },
4579 		.fill_helper = NULL,
4580 		.expected_errcode = -EINVAL,
4581 	},
4582 	{
4583 		"JUMPS + HOLES",
4584 		.u.insns = {
4585 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4586 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 15),
4587 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4588 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4589 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4590 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4591 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4592 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4593 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4594 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4595 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4596 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4597 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4598 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4599 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4600 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 3, 4),
4601 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4602 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90c2894d, 1, 2),
4603 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4604 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4605 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4606 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4607 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4608 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4609 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4610 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4611 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4612 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4613 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4614 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4615 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4616 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4617 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4618 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4619 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 2, 3),
4620 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x2ac28349, 1, 2),
4621 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4622 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 14, 15),
4623 			BPF_JUMP(BPF_JMP | BPF_JGE, 0, 13, 14),
4624 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4625 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4626 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4627 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4628 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4629 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4630 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4631 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4632 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4633 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4634 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4635 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4636 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4637 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 2, 3),
4638 			BPF_JUMP(BPF_JMP | BPF_JEQ, 0x90d2ff41, 1, 2),
4639 			BPF_STMT(BPF_LD | BPF_H | BPF_ABS, 0),
4640 			BPF_STMT(BPF_RET | BPF_A, 0),
4641 			BPF_STMT(BPF_RET | BPF_A, 0),
4642 		},
4643 		CLASSIC,
4644 		{ 0x00, 0x1b, 0x21, 0x3c, 0x9d, 0xf8,
4645 		  0x90, 0xe2, 0xba, 0x0a, 0x56, 0xb4,
4646 		  0x08, 0x00,
4647 		  0x45, 0x00, 0x00, 0x28, 0x00, 0x00,
4648 		  0x20, 0x00, 0x40, 0x11, 0x00, 0x00, /* IP header */
4649 		  0xc0, 0xa8, 0x33, 0x01,
4650 		  0xc0, 0xa8, 0x33, 0x02,
4651 		  0xbb, 0xb6,
4652 		  0xa9, 0xfa,
4653 		  0x00, 0x14, 0x00, 0x00,
4654 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4655 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4656 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4657 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4658 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4659 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4660 		  0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
4661 		  0xcc, 0xcc, 0xcc, 0xcc },
4662 		{ { 88, 0x001b } }
4663 	},
4664 	{
4665 		"check: RET X",
4666 		.u.insns = {
4667 			BPF_STMT(BPF_RET | BPF_X, 0),
4668 		},
4669 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4670 		{ },
4671 		{ },
4672 		.fill_helper = NULL,
4673 		.expected_errcode = -EINVAL,
4674 	},
4675 	{
4676 		"check: LDX + RET X",
4677 		.u.insns = {
4678 			BPF_STMT(BPF_LDX | BPF_IMM, 42),
4679 			BPF_STMT(BPF_RET | BPF_X, 0),
4680 		},
4681 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4682 		{ },
4683 		{ },
4684 		.fill_helper = NULL,
4685 		.expected_errcode = -EINVAL,
4686 	},
4687 	{	/* Mainly checking JIT here. */
4688 		"M[]: alt STX + LDX",
4689 		.u.insns = {
4690 			BPF_STMT(BPF_LDX | BPF_IMM, 100),
4691 			BPF_STMT(BPF_STX, 0),
4692 			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4693 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4694 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4695 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4696 			BPF_STMT(BPF_STX, 1),
4697 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4698 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4699 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4700 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4701 			BPF_STMT(BPF_STX, 2),
4702 			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4703 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4704 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4705 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4706 			BPF_STMT(BPF_STX, 3),
4707 			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4708 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4709 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4710 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4711 			BPF_STMT(BPF_STX, 4),
4712 			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4713 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4714 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4715 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4716 			BPF_STMT(BPF_STX, 5),
4717 			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4718 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4719 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4720 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4721 			BPF_STMT(BPF_STX, 6),
4722 			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4723 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4724 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4725 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4726 			BPF_STMT(BPF_STX, 7),
4727 			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4728 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4729 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4730 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4731 			BPF_STMT(BPF_STX, 8),
4732 			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4733 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4734 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4735 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4736 			BPF_STMT(BPF_STX, 9),
4737 			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4738 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4739 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4740 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4741 			BPF_STMT(BPF_STX, 10),
4742 			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4743 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4744 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4745 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4746 			BPF_STMT(BPF_STX, 11),
4747 			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4748 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4749 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4750 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4751 			BPF_STMT(BPF_STX, 12),
4752 			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4753 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4754 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4755 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4756 			BPF_STMT(BPF_STX, 13),
4757 			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4758 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4759 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4760 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4761 			BPF_STMT(BPF_STX, 14),
4762 			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4763 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4764 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4765 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4766 			BPF_STMT(BPF_STX, 15),
4767 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4768 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4769 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 1),
4770 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
4771 			BPF_STMT(BPF_RET | BPF_A, 0),
4772 		},
4773 		CLASSIC | FLAG_NO_DATA,
4774 		{ },
4775 		{ { 0, 116 } },
4776 	},
4777 	{	/* Mainly checking JIT here. */
4778 		"M[]: full STX + full LDX",
4779 		.u.insns = {
4780 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbadfeedb),
4781 			BPF_STMT(BPF_STX, 0),
4782 			BPF_STMT(BPF_LDX | BPF_IMM, 0xecabedae),
4783 			BPF_STMT(BPF_STX, 1),
4784 			BPF_STMT(BPF_LDX | BPF_IMM, 0xafccfeaf),
4785 			BPF_STMT(BPF_STX, 2),
4786 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbffdcedc),
4787 			BPF_STMT(BPF_STX, 3),
4788 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbbbdccb),
4789 			BPF_STMT(BPF_STX, 4),
4790 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfbabcbda),
4791 			BPF_STMT(BPF_STX, 5),
4792 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaedecbdb),
4793 			BPF_STMT(BPF_STX, 6),
4794 			BPF_STMT(BPF_LDX | BPF_IMM, 0xadebbade),
4795 			BPF_STMT(BPF_STX, 7),
4796 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfcfcfaec),
4797 			BPF_STMT(BPF_STX, 8),
4798 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbcdddbdc),
4799 			BPF_STMT(BPF_STX, 9),
4800 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfeefdfac),
4801 			BPF_STMT(BPF_STX, 10),
4802 			BPF_STMT(BPF_LDX | BPF_IMM, 0xcddcdeea),
4803 			BPF_STMT(BPF_STX, 11),
4804 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaccfaebb),
4805 			BPF_STMT(BPF_STX, 12),
4806 			BPF_STMT(BPF_LDX | BPF_IMM, 0xbdcccdcf),
4807 			BPF_STMT(BPF_STX, 13),
4808 			BPF_STMT(BPF_LDX | BPF_IMM, 0xaaedecde),
4809 			BPF_STMT(BPF_STX, 14),
4810 			BPF_STMT(BPF_LDX | BPF_IMM, 0xfaeacdad),
4811 			BPF_STMT(BPF_STX, 15),
4812 			BPF_STMT(BPF_LDX | BPF_MEM, 0),
4813 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
4814 			BPF_STMT(BPF_LDX | BPF_MEM, 1),
4815 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4816 			BPF_STMT(BPF_LDX | BPF_MEM, 2),
4817 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4818 			BPF_STMT(BPF_LDX | BPF_MEM, 3),
4819 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4820 			BPF_STMT(BPF_LDX | BPF_MEM, 4),
4821 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4822 			BPF_STMT(BPF_LDX | BPF_MEM, 5),
4823 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4824 			BPF_STMT(BPF_LDX | BPF_MEM, 6),
4825 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4826 			BPF_STMT(BPF_LDX | BPF_MEM, 7),
4827 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4828 			BPF_STMT(BPF_LDX | BPF_MEM, 8),
4829 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4830 			BPF_STMT(BPF_LDX | BPF_MEM, 9),
4831 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4832 			BPF_STMT(BPF_LDX | BPF_MEM, 10),
4833 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4834 			BPF_STMT(BPF_LDX | BPF_MEM, 11),
4835 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4836 			BPF_STMT(BPF_LDX | BPF_MEM, 12),
4837 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4838 			BPF_STMT(BPF_LDX | BPF_MEM, 13),
4839 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4840 			BPF_STMT(BPF_LDX | BPF_MEM, 14),
4841 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4842 			BPF_STMT(BPF_LDX | BPF_MEM, 15),
4843 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
4844 			BPF_STMT(BPF_RET | BPF_A, 0),
4845 		},
4846 		CLASSIC | FLAG_NO_DATA,
4847 		{ },
4848 		{ { 0, 0x2a5a5e5 } },
4849 	},
4850 	{
4851 		"check: SKF_AD_MAX",
4852 		.u.insns = {
4853 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4854 				 SKF_AD_OFF + SKF_AD_MAX),
4855 			BPF_STMT(BPF_RET | BPF_A, 0),
4856 		},
4857 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
4858 		{ },
4859 		{ },
4860 		.fill_helper = NULL,
4861 		.expected_errcode = -EINVAL,
4862 	},
4863 	{	/* Passes checker but fails during runtime. */
4864 		"LD [SKF_AD_OFF-1]",
4865 		.u.insns = {
4866 			BPF_STMT(BPF_LD | BPF_W | BPF_ABS,
4867 				 SKF_AD_OFF - 1),
4868 			BPF_STMT(BPF_RET | BPF_K, 1),
4869 		},
4870 		CLASSIC,
4871 		{ },
4872 		{ { 1, 0 } },
4873 	},
4874 	{
4875 		"load 64-bit immediate",
4876 		.u.insns_int = {
4877 			BPF_LD_IMM64(R1, 0x567800001234LL),
4878 			BPF_MOV64_REG(R2, R1),
4879 			BPF_MOV64_REG(R3, R2),
4880 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
4881 			BPF_ALU64_IMM(BPF_LSH, R3, 32),
4882 			BPF_ALU64_IMM(BPF_RSH, R3, 32),
4883 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
4884 			BPF_JMP_IMM(BPF_JEQ, R2, 0x5678, 1),
4885 			BPF_EXIT_INSN(),
4886 			BPF_JMP_IMM(BPF_JEQ, R3, 0x1234, 1),
4887 			BPF_EXIT_INSN(),
4888 			BPF_LD_IMM64(R0, 0x1ffffffffLL),
4889 			BPF_ALU64_IMM(BPF_RSH, R0, 32), /* R0 = 1 */
4890 			BPF_EXIT_INSN(),
4891 		},
4892 		INTERNAL,
4893 		{ },
4894 		{ { 0, 1 } }
4895 	},
4896 	/* BPF_ALU | BPF_MOV | BPF_X */
4897 	{
4898 		"ALU_MOV_X: dst = 2",
4899 		.u.insns_int = {
4900 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4901 			BPF_ALU32_REG(BPF_MOV, R0, R1),
4902 			BPF_EXIT_INSN(),
4903 		},
4904 		INTERNAL,
4905 		{ },
4906 		{ { 0, 2 } },
4907 	},
4908 	{
4909 		"ALU_MOV_X: dst = 4294967295",
4910 		.u.insns_int = {
4911 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4912 			BPF_ALU32_REG(BPF_MOV, R0, R1),
4913 			BPF_EXIT_INSN(),
4914 		},
4915 		INTERNAL,
4916 		{ },
4917 		{ { 0, 4294967295U } },
4918 	},
4919 	{
4920 		"ALU64_MOV_X: dst = 2",
4921 		.u.insns_int = {
4922 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
4923 			BPF_ALU64_REG(BPF_MOV, R0, R1),
4924 			BPF_EXIT_INSN(),
4925 		},
4926 		INTERNAL,
4927 		{ },
4928 		{ { 0, 2 } },
4929 	},
4930 	{
4931 		"ALU64_MOV_X: dst = 4294967295",
4932 		.u.insns_int = {
4933 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
4934 			BPF_ALU64_REG(BPF_MOV, R0, R1),
4935 			BPF_EXIT_INSN(),
4936 		},
4937 		INTERNAL,
4938 		{ },
4939 		{ { 0, 4294967295U } },
4940 	},
4941 	/* BPF_ALU | BPF_MOV | BPF_K */
4942 	{
4943 		"ALU_MOV_K: dst = 2",
4944 		.u.insns_int = {
4945 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
4946 			BPF_EXIT_INSN(),
4947 		},
4948 		INTERNAL,
4949 		{ },
4950 		{ { 0, 2 } },
4951 	},
4952 	{
4953 		"ALU_MOV_K: dst = 4294967295",
4954 		.u.insns_int = {
4955 			BPF_ALU32_IMM(BPF_MOV, R0, 4294967295U),
4956 			BPF_EXIT_INSN(),
4957 		},
4958 		INTERNAL,
4959 		{ },
4960 		{ { 0, 4294967295U } },
4961 	},
4962 	{
4963 		"ALU_MOV_K: 0x0000ffffffff0000 = 0x00000000ffffffff",
4964 		.u.insns_int = {
4965 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
4966 			BPF_LD_IMM64(R3, 0x00000000ffffffffLL),
4967 			BPF_ALU32_IMM(BPF_MOV, R2, 0xffffffff),
4968 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
4969 			BPF_MOV32_IMM(R0, 2),
4970 			BPF_EXIT_INSN(),
4971 			BPF_MOV32_IMM(R0, 1),
4972 			BPF_EXIT_INSN(),
4973 		},
4974 		INTERNAL,
4975 		{ },
4976 		{ { 0, 0x1 } },
4977 	},
4978 	{
4979 		"ALU_MOV_K: small negative",
4980 		.u.insns_int = {
4981 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4982 			BPF_EXIT_INSN(),
4983 		},
4984 		INTERNAL,
4985 		{ },
4986 		{ { 0, -123 } }
4987 	},
4988 	{
4989 		"ALU_MOV_K: small negative zero extension",
4990 		.u.insns_int = {
4991 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
4992 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
4993 			BPF_EXIT_INSN(),
4994 		},
4995 		INTERNAL,
4996 		{ },
4997 		{ { 0, 0 } }
4998 	},
4999 	{
5000 		"ALU_MOV_K: large negative",
5001 		.u.insns_int = {
5002 			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5003 			BPF_EXIT_INSN(),
5004 		},
5005 		INTERNAL,
5006 		{ },
5007 		{ { 0, -123456789 } }
5008 	},
5009 	{
5010 		"ALU_MOV_K: large negative zero extension",
5011 		.u.insns_int = {
5012 			BPF_ALU32_IMM(BPF_MOV, R0, -123456789),
5013 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5014 			BPF_EXIT_INSN(),
5015 		},
5016 		INTERNAL,
5017 		{ },
5018 		{ { 0, 0 } }
5019 	},
5020 	{
5021 		"ALU64_MOV_K: dst = 2",
5022 		.u.insns_int = {
5023 			BPF_ALU64_IMM(BPF_MOV, R0, 2),
5024 			BPF_EXIT_INSN(),
5025 		},
5026 		INTERNAL,
5027 		{ },
5028 		{ { 0, 2 } },
5029 	},
5030 	{
5031 		"ALU64_MOV_K: dst = 2147483647",
5032 		.u.insns_int = {
5033 			BPF_ALU64_IMM(BPF_MOV, R0, 2147483647),
5034 			BPF_EXIT_INSN(),
5035 		},
5036 		INTERNAL,
5037 		{ },
5038 		{ { 0, 2147483647 } },
5039 	},
5040 	{
5041 		"ALU64_OR_K: dst = 0x0",
5042 		.u.insns_int = {
5043 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5044 			BPF_LD_IMM64(R3, 0x0),
5045 			BPF_ALU64_IMM(BPF_MOV, R2, 0x0),
5046 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5047 			BPF_MOV32_IMM(R0, 2),
5048 			BPF_EXIT_INSN(),
5049 			BPF_MOV32_IMM(R0, 1),
5050 			BPF_EXIT_INSN(),
5051 		},
5052 		INTERNAL,
5053 		{ },
5054 		{ { 0, 0x1 } },
5055 	},
5056 	{
5057 		"ALU64_MOV_K: dst = -1",
5058 		.u.insns_int = {
5059 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
5060 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5061 			BPF_ALU64_IMM(BPF_MOV, R2, 0xffffffff),
5062 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5063 			BPF_MOV32_IMM(R0, 2),
5064 			BPF_EXIT_INSN(),
5065 			BPF_MOV32_IMM(R0, 1),
5066 			BPF_EXIT_INSN(),
5067 		},
5068 		INTERNAL,
5069 		{ },
5070 		{ { 0, 0x1 } },
5071 	},
5072 	{
5073 		"ALU64_MOV_K: small negative",
5074 		.u.insns_int = {
5075 			BPF_ALU64_IMM(BPF_MOV, R0, -123),
5076 			BPF_EXIT_INSN(),
5077 		},
5078 		INTERNAL,
5079 		{ },
5080 		{ { 0, -123 } }
5081 	},
5082 	{
5083 		"ALU64_MOV_K: small negative sign extension",
5084 		.u.insns_int = {
5085 			BPF_ALU64_IMM(BPF_MOV, R0, -123),
5086 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5087 			BPF_EXIT_INSN(),
5088 		},
5089 		INTERNAL,
5090 		{ },
5091 		{ { 0, 0xffffffff } }
5092 	},
5093 	{
5094 		"ALU64_MOV_K: large negative",
5095 		.u.insns_int = {
5096 			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5097 			BPF_EXIT_INSN(),
5098 		},
5099 		INTERNAL,
5100 		{ },
5101 		{ { 0, -123456789 } }
5102 	},
5103 	{
5104 		"ALU64_MOV_K: large negative sign extension",
5105 		.u.insns_int = {
5106 			BPF_ALU64_IMM(BPF_MOV, R0, -123456789),
5107 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5108 			BPF_EXIT_INSN(),
5109 		},
5110 		INTERNAL,
5111 		{ },
5112 		{ { 0, 0xffffffff } }
5113 	},
5114 	/* MOVSX32 */
5115 	{
5116 		"ALU_MOVSX | BPF_B",
5117 		.u.insns_int = {
5118 			BPF_LD_IMM64(R2, 0x00000000ffffffefLL),
5119 			BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5120 			BPF_MOVSX32_REG(R1, R3, 8),
5121 			BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5122 			BPF_MOV32_IMM(R0, 2),
5123 			BPF_EXIT_INSN(),
5124 			BPF_MOV32_IMM(R0, 1),
5125 			BPF_EXIT_INSN(),
5126 		},
5127 		INTERNAL,
5128 		{ },
5129 		{ { 0, 0x1 } },
5130 	},
5131 	{
5132 		"ALU_MOVSX | BPF_H",
5133 		.u.insns_int = {
5134 			BPF_LD_IMM64(R2, 0x00000000ffffbeefLL),
5135 			BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5136 			BPF_MOVSX32_REG(R1, R3, 16),
5137 			BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5138 			BPF_MOV32_IMM(R0, 2),
5139 			BPF_EXIT_INSN(),
5140 			BPF_MOV32_IMM(R0, 1),
5141 			BPF_EXIT_INSN(),
5142 		},
5143 		INTERNAL,
5144 		{ },
5145 		{ { 0, 0x1 } },
5146 	},
5147 	/* MOVSX64 REG */
5148 	{
5149 		"ALU64_MOVSX | BPF_B",
5150 		.u.insns_int = {
5151 			BPF_LD_IMM64(R2, 0xffffffffffffffefLL),
5152 			BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5153 			BPF_MOVSX64_REG(R1, R3, 8),
5154 			BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5155 			BPF_MOV32_IMM(R0, 2),
5156 			BPF_EXIT_INSN(),
5157 			BPF_MOV32_IMM(R0, 1),
5158 			BPF_EXIT_INSN(),
5159 		},
5160 		INTERNAL,
5161 		{ },
5162 		{ { 0, 0x1 } },
5163 	},
5164 	{
5165 		"ALU64_MOVSX | BPF_H",
5166 		.u.insns_int = {
5167 			BPF_LD_IMM64(R2, 0xffffffffffffbeefLL),
5168 			BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5169 			BPF_MOVSX64_REG(R1, R3, 16),
5170 			BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5171 			BPF_MOV32_IMM(R0, 2),
5172 			BPF_EXIT_INSN(),
5173 			BPF_MOV32_IMM(R0, 1),
5174 			BPF_EXIT_INSN(),
5175 		},
5176 		INTERNAL,
5177 		{ },
5178 		{ { 0, 0x1 } },
5179 	},
5180 	{
5181 		"ALU64_MOVSX | BPF_W",
5182 		.u.insns_int = {
5183 			BPF_LD_IMM64(R2, 0xffffffffdeadbeefLL),
5184 			BPF_LD_IMM64(R3, 0xdeadbeefdeadbeefLL),
5185 			BPF_MOVSX64_REG(R1, R3, 32),
5186 			BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
5187 			BPF_MOV32_IMM(R0, 2),
5188 			BPF_EXIT_INSN(),
5189 			BPF_MOV32_IMM(R0, 1),
5190 			BPF_EXIT_INSN(),
5191 		},
5192 		INTERNAL,
5193 		{ },
5194 		{ { 0, 0x1 } },
5195 	},
5196 	/* BPF_ALU | BPF_ADD | BPF_X */
5197 	{
5198 		"ALU_ADD_X: 1 + 2 = 3",
5199 		.u.insns_int = {
5200 			BPF_LD_IMM64(R0, 1),
5201 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5202 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5203 			BPF_EXIT_INSN(),
5204 		},
5205 		INTERNAL,
5206 		{ },
5207 		{ { 0, 3 } },
5208 	},
5209 	{
5210 		"ALU_ADD_X: 1 + 4294967294 = 4294967295",
5211 		.u.insns_int = {
5212 			BPF_LD_IMM64(R0, 1),
5213 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5214 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5215 			BPF_EXIT_INSN(),
5216 		},
5217 		INTERNAL,
5218 		{ },
5219 		{ { 0, 4294967295U } },
5220 	},
5221 	{
5222 		"ALU_ADD_X: 2 + 4294967294 = 0",
5223 		.u.insns_int = {
5224 			BPF_LD_IMM64(R0, 2),
5225 			BPF_LD_IMM64(R1, 4294967294U),
5226 			BPF_ALU32_REG(BPF_ADD, R0, R1),
5227 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5228 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5229 			BPF_EXIT_INSN(),
5230 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5231 			BPF_EXIT_INSN(),
5232 		},
5233 		INTERNAL,
5234 		{ },
5235 		{ { 0, 1 } },
5236 	},
5237 	{
5238 		"ALU64_ADD_X: 1 + 2 = 3",
5239 		.u.insns_int = {
5240 			BPF_LD_IMM64(R0, 1),
5241 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5242 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5243 			BPF_EXIT_INSN(),
5244 		},
5245 		INTERNAL,
5246 		{ },
5247 		{ { 0, 3 } },
5248 	},
5249 	{
5250 		"ALU64_ADD_X: 1 + 4294967294 = 4294967295",
5251 		.u.insns_int = {
5252 			BPF_LD_IMM64(R0, 1),
5253 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5254 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5255 			BPF_EXIT_INSN(),
5256 		},
5257 		INTERNAL,
5258 		{ },
5259 		{ { 0, 4294967295U } },
5260 	},
5261 	{
5262 		"ALU64_ADD_X: 2 + 4294967294 = 4294967296",
5263 		.u.insns_int = {
5264 			BPF_LD_IMM64(R0, 2),
5265 			BPF_LD_IMM64(R1, 4294967294U),
5266 			BPF_LD_IMM64(R2, 4294967296ULL),
5267 			BPF_ALU64_REG(BPF_ADD, R0, R1),
5268 			BPF_JMP_REG(BPF_JEQ, R0, R2, 2),
5269 			BPF_MOV32_IMM(R0, 0),
5270 			BPF_EXIT_INSN(),
5271 			BPF_MOV32_IMM(R0, 1),
5272 			BPF_EXIT_INSN(),
5273 		},
5274 		INTERNAL,
5275 		{ },
5276 		{ { 0, 1 } },
5277 	},
5278 	/* BPF_ALU | BPF_ADD | BPF_K */
5279 	{
5280 		"ALU_ADD_K: 1 + 2 = 3",
5281 		.u.insns_int = {
5282 			BPF_LD_IMM64(R0, 1),
5283 			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5284 			BPF_EXIT_INSN(),
5285 		},
5286 		INTERNAL,
5287 		{ },
5288 		{ { 0, 3 } },
5289 	},
5290 	{
5291 		"ALU_ADD_K: 3 + 0 = 3",
5292 		.u.insns_int = {
5293 			BPF_LD_IMM64(R0, 3),
5294 			BPF_ALU32_IMM(BPF_ADD, R0, 0),
5295 			BPF_EXIT_INSN(),
5296 		},
5297 		INTERNAL,
5298 		{ },
5299 		{ { 0, 3 } },
5300 	},
5301 	{
5302 		"ALU_ADD_K: 1 + 4294967294 = 4294967295",
5303 		.u.insns_int = {
5304 			BPF_LD_IMM64(R0, 1),
5305 			BPF_ALU32_IMM(BPF_ADD, R0, 4294967294U),
5306 			BPF_EXIT_INSN(),
5307 		},
5308 		INTERNAL,
5309 		{ },
5310 		{ { 0, 4294967295U } },
5311 	},
5312 	{
5313 		"ALU_ADD_K: 4294967294 + 2 = 0",
5314 		.u.insns_int = {
5315 			BPF_LD_IMM64(R0, 4294967294U),
5316 			BPF_ALU32_IMM(BPF_ADD, R0, 2),
5317 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
5318 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5319 			BPF_EXIT_INSN(),
5320 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5321 			BPF_EXIT_INSN(),
5322 		},
5323 		INTERNAL,
5324 		{ },
5325 		{ { 0, 1 } },
5326 	},
5327 	{
5328 		"ALU_ADD_K: 0 + (-1) = 0x00000000ffffffff",
5329 		.u.insns_int = {
5330 			BPF_LD_IMM64(R2, 0x0),
5331 			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5332 			BPF_ALU32_IMM(BPF_ADD, R2, 0xffffffff),
5333 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5334 			BPF_MOV32_IMM(R0, 2),
5335 			BPF_EXIT_INSN(),
5336 			BPF_MOV32_IMM(R0, 1),
5337 			BPF_EXIT_INSN(),
5338 		},
5339 		INTERNAL,
5340 		{ },
5341 		{ { 0, 0x1 } },
5342 	},
5343 	{
5344 		"ALU_ADD_K: 0 + 0xffff = 0xffff",
5345 		.u.insns_int = {
5346 			BPF_LD_IMM64(R2, 0x0),
5347 			BPF_LD_IMM64(R3, 0xffff),
5348 			BPF_ALU32_IMM(BPF_ADD, R2, 0xffff),
5349 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5350 			BPF_MOV32_IMM(R0, 2),
5351 			BPF_EXIT_INSN(),
5352 			BPF_MOV32_IMM(R0, 1),
5353 			BPF_EXIT_INSN(),
5354 		},
5355 		INTERNAL,
5356 		{ },
5357 		{ { 0, 0x1 } },
5358 	},
5359 	{
5360 		"ALU_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5361 		.u.insns_int = {
5362 			BPF_LD_IMM64(R2, 0x0),
5363 			BPF_LD_IMM64(R3, 0x7fffffff),
5364 			BPF_ALU32_IMM(BPF_ADD, R2, 0x7fffffff),
5365 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5366 			BPF_MOV32_IMM(R0, 2),
5367 			BPF_EXIT_INSN(),
5368 			BPF_MOV32_IMM(R0, 1),
5369 			BPF_EXIT_INSN(),
5370 		},
5371 		INTERNAL,
5372 		{ },
5373 		{ { 0, 0x1 } },
5374 	},
5375 	{
5376 		"ALU_ADD_K: 0 + 0x80000000 = 0x80000000",
5377 		.u.insns_int = {
5378 			BPF_LD_IMM64(R2, 0x0),
5379 			BPF_LD_IMM64(R3, 0x80000000),
5380 			BPF_ALU32_IMM(BPF_ADD, R2, 0x80000000),
5381 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5382 			BPF_MOV32_IMM(R0, 2),
5383 			BPF_EXIT_INSN(),
5384 			BPF_MOV32_IMM(R0, 1),
5385 			BPF_EXIT_INSN(),
5386 		},
5387 		INTERNAL,
5388 		{ },
5389 		{ { 0, 0x1 } },
5390 	},
5391 	{
5392 		"ALU_ADD_K: 0 + 0x80008000 = 0x80008000",
5393 		.u.insns_int = {
5394 			BPF_LD_IMM64(R2, 0x0),
5395 			BPF_LD_IMM64(R3, 0x80008000),
5396 			BPF_ALU32_IMM(BPF_ADD, R2, 0x80008000),
5397 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5398 			BPF_MOV32_IMM(R0, 2),
5399 			BPF_EXIT_INSN(),
5400 			BPF_MOV32_IMM(R0, 1),
5401 			BPF_EXIT_INSN(),
5402 		},
5403 		INTERNAL,
5404 		{ },
5405 		{ { 0, 0x1 } },
5406 	},
5407 	{
5408 		"ALU64_ADD_K: 1 + 2 = 3",
5409 		.u.insns_int = {
5410 			BPF_LD_IMM64(R0, 1),
5411 			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5412 			BPF_EXIT_INSN(),
5413 		},
5414 		INTERNAL,
5415 		{ },
5416 		{ { 0, 3 } },
5417 	},
5418 	{
5419 		"ALU64_ADD_K: 3 + 0 = 3",
5420 		.u.insns_int = {
5421 			BPF_LD_IMM64(R0, 3),
5422 			BPF_ALU64_IMM(BPF_ADD, R0, 0),
5423 			BPF_EXIT_INSN(),
5424 		},
5425 		INTERNAL,
5426 		{ },
5427 		{ { 0, 3 } },
5428 	},
5429 	{
5430 		"ALU64_ADD_K: 1 + 2147483646 = 2147483647",
5431 		.u.insns_int = {
5432 			BPF_LD_IMM64(R0, 1),
5433 			BPF_ALU64_IMM(BPF_ADD, R0, 2147483646),
5434 			BPF_EXIT_INSN(),
5435 		},
5436 		INTERNAL,
5437 		{ },
5438 		{ { 0, 2147483647 } },
5439 	},
5440 	{
5441 		"ALU64_ADD_K: 4294967294 + 2 = 4294967296",
5442 		.u.insns_int = {
5443 			BPF_LD_IMM64(R0, 4294967294U),
5444 			BPF_LD_IMM64(R1, 4294967296ULL),
5445 			BPF_ALU64_IMM(BPF_ADD, R0, 2),
5446 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
5447 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
5448 			BPF_EXIT_INSN(),
5449 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
5450 			BPF_EXIT_INSN(),
5451 		},
5452 		INTERNAL,
5453 		{ },
5454 		{ { 0, 1 } },
5455 	},
5456 	{
5457 		"ALU64_ADD_K: 2147483646 + -2147483647 = -1",
5458 		.u.insns_int = {
5459 			BPF_LD_IMM64(R0, 2147483646),
5460 			BPF_ALU64_IMM(BPF_ADD, R0, -2147483647),
5461 			BPF_EXIT_INSN(),
5462 		},
5463 		INTERNAL,
5464 		{ },
5465 		{ { 0, -1 } },
5466 	},
5467 	{
5468 		"ALU64_ADD_K: 1 + 0 = 1",
5469 		.u.insns_int = {
5470 			BPF_LD_IMM64(R2, 0x1),
5471 			BPF_LD_IMM64(R3, 0x1),
5472 			BPF_ALU64_IMM(BPF_ADD, R2, 0x0),
5473 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5474 			BPF_MOV32_IMM(R0, 2),
5475 			BPF_EXIT_INSN(),
5476 			BPF_MOV32_IMM(R0, 1),
5477 			BPF_EXIT_INSN(),
5478 		},
5479 		INTERNAL,
5480 		{ },
5481 		{ { 0, 0x1 } },
5482 	},
5483 	{
5484 		"ALU64_ADD_K: 0 + (-1) = 0xffffffffffffffff",
5485 		.u.insns_int = {
5486 			BPF_LD_IMM64(R2, 0x0),
5487 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5488 			BPF_ALU64_IMM(BPF_ADD, R2, 0xffffffff),
5489 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5490 			BPF_MOV32_IMM(R0, 2),
5491 			BPF_EXIT_INSN(),
5492 			BPF_MOV32_IMM(R0, 1),
5493 			BPF_EXIT_INSN(),
5494 		},
5495 		INTERNAL,
5496 		{ },
5497 		{ { 0, 0x1 } },
5498 	},
5499 	{
5500 		"ALU64_ADD_K: 0 + 0xffff = 0xffff",
5501 		.u.insns_int = {
5502 			BPF_LD_IMM64(R2, 0x0),
5503 			BPF_LD_IMM64(R3, 0xffff),
5504 			BPF_ALU64_IMM(BPF_ADD, R2, 0xffff),
5505 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5506 			BPF_MOV32_IMM(R0, 2),
5507 			BPF_EXIT_INSN(),
5508 			BPF_MOV32_IMM(R0, 1),
5509 			BPF_EXIT_INSN(),
5510 		},
5511 		INTERNAL,
5512 		{ },
5513 		{ { 0, 0x1 } },
5514 	},
5515 	{
5516 		"ALU64_ADD_K: 0 + 0x7fffffff = 0x7fffffff",
5517 		.u.insns_int = {
5518 			BPF_LD_IMM64(R2, 0x0),
5519 			BPF_LD_IMM64(R3, 0x7fffffff),
5520 			BPF_ALU64_IMM(BPF_ADD, R2, 0x7fffffff),
5521 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5522 			BPF_MOV32_IMM(R0, 2),
5523 			BPF_EXIT_INSN(),
5524 			BPF_MOV32_IMM(R0, 1),
5525 			BPF_EXIT_INSN(),
5526 		},
5527 		INTERNAL,
5528 		{ },
5529 		{ { 0, 0x1 } },
5530 	},
5531 	{
5532 		"ALU64_ADD_K: 0 + 0x80000000 = 0xffffffff80000000",
5533 		.u.insns_int = {
5534 			BPF_LD_IMM64(R2, 0x0),
5535 			BPF_LD_IMM64(R3, 0xffffffff80000000LL),
5536 			BPF_ALU64_IMM(BPF_ADD, R2, 0x80000000),
5537 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5538 			BPF_MOV32_IMM(R0, 2),
5539 			BPF_EXIT_INSN(),
5540 			BPF_MOV32_IMM(R0, 1),
5541 			BPF_EXIT_INSN(),
5542 		},
5543 		INTERNAL,
5544 		{ },
5545 		{ { 0, 0x1 } },
5546 	},
5547 	{
5548 		"ALU_ADD_K: 0 + 0x80008000 = 0xffffffff80008000",
5549 		.u.insns_int = {
5550 			BPF_LD_IMM64(R2, 0x0),
5551 			BPF_LD_IMM64(R3, 0xffffffff80008000LL),
5552 			BPF_ALU64_IMM(BPF_ADD, R2, 0x80008000),
5553 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5554 			BPF_MOV32_IMM(R0, 2),
5555 			BPF_EXIT_INSN(),
5556 			BPF_MOV32_IMM(R0, 1),
5557 			BPF_EXIT_INSN(),
5558 		},
5559 		INTERNAL,
5560 		{ },
5561 		{ { 0, 0x1 } },
5562 	},
5563 	/* BPF_ALU | BPF_SUB | BPF_X */
5564 	{
5565 		"ALU_SUB_X: 3 - 1 = 2",
5566 		.u.insns_int = {
5567 			BPF_LD_IMM64(R0, 3),
5568 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5569 			BPF_ALU32_REG(BPF_SUB, R0, R1),
5570 			BPF_EXIT_INSN(),
5571 		},
5572 		INTERNAL,
5573 		{ },
5574 		{ { 0, 2 } },
5575 	},
5576 	{
5577 		"ALU_SUB_X: 4294967295 - 4294967294 = 1",
5578 		.u.insns_int = {
5579 			BPF_LD_IMM64(R0, 4294967295U),
5580 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5581 			BPF_ALU32_REG(BPF_SUB, R0, R1),
5582 			BPF_EXIT_INSN(),
5583 		},
5584 		INTERNAL,
5585 		{ },
5586 		{ { 0, 1 } },
5587 	},
5588 	{
5589 		"ALU64_SUB_X: 3 - 1 = 2",
5590 		.u.insns_int = {
5591 			BPF_LD_IMM64(R0, 3),
5592 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
5593 			BPF_ALU64_REG(BPF_SUB, R0, R1),
5594 			BPF_EXIT_INSN(),
5595 		},
5596 		INTERNAL,
5597 		{ },
5598 		{ { 0, 2 } },
5599 	},
5600 	{
5601 		"ALU64_SUB_X: 4294967295 - 4294967294 = 1",
5602 		.u.insns_int = {
5603 			BPF_LD_IMM64(R0, 4294967295U),
5604 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
5605 			BPF_ALU64_REG(BPF_SUB, R0, R1),
5606 			BPF_EXIT_INSN(),
5607 		},
5608 		INTERNAL,
5609 		{ },
5610 		{ { 0, 1 } },
5611 	},
5612 	/* BPF_ALU | BPF_SUB | BPF_K */
5613 	{
5614 		"ALU_SUB_K: 3 - 1 = 2",
5615 		.u.insns_int = {
5616 			BPF_LD_IMM64(R0, 3),
5617 			BPF_ALU32_IMM(BPF_SUB, R0, 1),
5618 			BPF_EXIT_INSN(),
5619 		},
5620 		INTERNAL,
5621 		{ },
5622 		{ { 0, 2 } },
5623 	},
5624 	{
5625 		"ALU_SUB_K: 3 - 0 = 3",
5626 		.u.insns_int = {
5627 			BPF_LD_IMM64(R0, 3),
5628 			BPF_ALU32_IMM(BPF_SUB, R0, 0),
5629 			BPF_EXIT_INSN(),
5630 		},
5631 		INTERNAL,
5632 		{ },
5633 		{ { 0, 3 } },
5634 	},
5635 	{
5636 		"ALU_SUB_K: 4294967295 - 4294967294 = 1",
5637 		.u.insns_int = {
5638 			BPF_LD_IMM64(R0, 4294967295U),
5639 			BPF_ALU32_IMM(BPF_SUB, R0, 4294967294U),
5640 			BPF_EXIT_INSN(),
5641 		},
5642 		INTERNAL,
5643 		{ },
5644 		{ { 0, 1 } },
5645 	},
5646 	{
5647 		"ALU64_SUB_K: 3 - 1 = 2",
5648 		.u.insns_int = {
5649 			BPF_LD_IMM64(R0, 3),
5650 			BPF_ALU64_IMM(BPF_SUB, R0, 1),
5651 			BPF_EXIT_INSN(),
5652 		},
5653 		INTERNAL,
5654 		{ },
5655 		{ { 0, 2 } },
5656 	},
5657 	{
5658 		"ALU64_SUB_K: 3 - 0 = 3",
5659 		.u.insns_int = {
5660 			BPF_LD_IMM64(R0, 3),
5661 			BPF_ALU64_IMM(BPF_SUB, R0, 0),
5662 			BPF_EXIT_INSN(),
5663 		},
5664 		INTERNAL,
5665 		{ },
5666 		{ { 0, 3 } },
5667 	},
5668 	{
5669 		"ALU64_SUB_K: 4294967294 - 4294967295 = -1",
5670 		.u.insns_int = {
5671 			BPF_LD_IMM64(R0, 4294967294U),
5672 			BPF_ALU64_IMM(BPF_SUB, R0, 4294967295U),
5673 			BPF_EXIT_INSN(),
5674 		},
5675 		INTERNAL,
5676 		{ },
5677 		{ { 0, -1 } },
5678 	},
5679 	{
5680 		"ALU64_ADD_K: 2147483646 - 2147483647 = -1",
5681 		.u.insns_int = {
5682 			BPF_LD_IMM64(R0, 2147483646),
5683 			BPF_ALU64_IMM(BPF_SUB, R0, 2147483647),
5684 			BPF_EXIT_INSN(),
5685 		},
5686 		INTERNAL,
5687 		{ },
5688 		{ { 0, -1 } },
5689 	},
5690 	/* BPF_ALU | BPF_MUL | BPF_X */
5691 	{
5692 		"ALU_MUL_X: 2 * 3 = 6",
5693 		.u.insns_int = {
5694 			BPF_LD_IMM64(R0, 2),
5695 			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5696 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5697 			BPF_EXIT_INSN(),
5698 		},
5699 		INTERNAL,
5700 		{ },
5701 		{ { 0, 6 } },
5702 	},
5703 	{
5704 		"ALU_MUL_X: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5705 		.u.insns_int = {
5706 			BPF_LD_IMM64(R0, 2),
5707 			BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
5708 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5709 			BPF_EXIT_INSN(),
5710 		},
5711 		INTERNAL,
5712 		{ },
5713 		{ { 0, 0xFFFFFFF0 } },
5714 	},
5715 	{
5716 		"ALU_MUL_X: -1 * -1 = 1",
5717 		.u.insns_int = {
5718 			BPF_LD_IMM64(R0, -1),
5719 			BPF_ALU32_IMM(BPF_MOV, R1, -1),
5720 			BPF_ALU32_REG(BPF_MUL, R0, R1),
5721 			BPF_EXIT_INSN(),
5722 		},
5723 		INTERNAL,
5724 		{ },
5725 		{ { 0, 1 } },
5726 	},
5727 	{
5728 		"ALU64_MUL_X: 2 * 3 = 6",
5729 		.u.insns_int = {
5730 			BPF_LD_IMM64(R0, 2),
5731 			BPF_ALU32_IMM(BPF_MOV, R1, 3),
5732 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5733 			BPF_EXIT_INSN(),
5734 		},
5735 		INTERNAL,
5736 		{ },
5737 		{ { 0, 6 } },
5738 	},
5739 	{
5740 		"ALU64_MUL_X: 1 * 2147483647 = 2147483647",
5741 		.u.insns_int = {
5742 			BPF_LD_IMM64(R0, 1),
5743 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5744 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5745 			BPF_EXIT_INSN(),
5746 		},
5747 		INTERNAL,
5748 		{ },
5749 		{ { 0, 2147483647 } },
5750 	},
5751 	{
5752 		"ALU64_MUL_X: 64x64 multiply, low word",
5753 		.u.insns_int = {
5754 			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5755 			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5756 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5757 			BPF_EXIT_INSN(),
5758 		},
5759 		INTERNAL,
5760 		{ },
5761 		{ { 0, 0xe5618cf0 } }
5762 	},
5763 	{
5764 		"ALU64_MUL_X: 64x64 multiply, high word",
5765 		.u.insns_int = {
5766 			BPF_LD_IMM64(R0, 0x0fedcba987654321LL),
5767 			BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
5768 			BPF_ALU64_REG(BPF_MUL, R0, R1),
5769 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5770 			BPF_EXIT_INSN(),
5771 		},
5772 		INTERNAL,
5773 		{ },
5774 		{ { 0, 0x2236d88f } }
5775 	},
5776 	/* BPF_ALU | BPF_MUL | BPF_K */
5777 	{
5778 		"ALU_MUL_K: 2 * 3 = 6",
5779 		.u.insns_int = {
5780 			BPF_LD_IMM64(R0, 2),
5781 			BPF_ALU32_IMM(BPF_MUL, R0, 3),
5782 			BPF_EXIT_INSN(),
5783 		},
5784 		INTERNAL,
5785 		{ },
5786 		{ { 0, 6 } },
5787 	},
5788 	{
5789 		"ALU_MUL_K: 3 * 1 = 3",
5790 		.u.insns_int = {
5791 			BPF_LD_IMM64(R0, 3),
5792 			BPF_ALU32_IMM(BPF_MUL, R0, 1),
5793 			BPF_EXIT_INSN(),
5794 		},
5795 		INTERNAL,
5796 		{ },
5797 		{ { 0, 3 } },
5798 	},
5799 	{
5800 		"ALU_MUL_K: 2 * 0x7FFFFFF8 = 0xFFFFFFF0",
5801 		.u.insns_int = {
5802 			BPF_LD_IMM64(R0, 2),
5803 			BPF_ALU32_IMM(BPF_MUL, R0, 0x7FFFFFF8),
5804 			BPF_EXIT_INSN(),
5805 		},
5806 		INTERNAL,
5807 		{ },
5808 		{ { 0, 0xFFFFFFF0 } },
5809 	},
5810 	{
5811 		"ALU_MUL_K: 1 * (-1) = 0x00000000ffffffff",
5812 		.u.insns_int = {
5813 			BPF_LD_IMM64(R2, 0x1),
5814 			BPF_LD_IMM64(R3, 0x00000000ffffffff),
5815 			BPF_ALU32_IMM(BPF_MUL, R2, 0xffffffff),
5816 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5817 			BPF_MOV32_IMM(R0, 2),
5818 			BPF_EXIT_INSN(),
5819 			BPF_MOV32_IMM(R0, 1),
5820 			BPF_EXIT_INSN(),
5821 		},
5822 		INTERNAL,
5823 		{ },
5824 		{ { 0, 0x1 } },
5825 	},
5826 	{
5827 		"ALU64_MUL_K: 2 * 3 = 6",
5828 		.u.insns_int = {
5829 			BPF_LD_IMM64(R0, 2),
5830 			BPF_ALU64_IMM(BPF_MUL, R0, 3),
5831 			BPF_EXIT_INSN(),
5832 		},
5833 		INTERNAL,
5834 		{ },
5835 		{ { 0, 6 } },
5836 	},
5837 	{
5838 		"ALU64_MUL_K: 3 * 1 = 3",
5839 		.u.insns_int = {
5840 			BPF_LD_IMM64(R0, 3),
5841 			BPF_ALU64_IMM(BPF_MUL, R0, 1),
5842 			BPF_EXIT_INSN(),
5843 		},
5844 		INTERNAL,
5845 		{ },
5846 		{ { 0, 3 } },
5847 	},
5848 	{
5849 		"ALU64_MUL_K: 1 * 2147483647 = 2147483647",
5850 		.u.insns_int = {
5851 			BPF_LD_IMM64(R0, 1),
5852 			BPF_ALU64_IMM(BPF_MUL, R0, 2147483647),
5853 			BPF_EXIT_INSN(),
5854 		},
5855 		INTERNAL,
5856 		{ },
5857 		{ { 0, 2147483647 } },
5858 	},
5859 	{
5860 		"ALU64_MUL_K: 1 * -2147483647 = -2147483647",
5861 		.u.insns_int = {
5862 			BPF_LD_IMM64(R0, 1),
5863 			BPF_ALU64_IMM(BPF_MUL, R0, -2147483647),
5864 			BPF_EXIT_INSN(),
5865 		},
5866 		INTERNAL,
5867 		{ },
5868 		{ { 0, -2147483647 } },
5869 	},
5870 	{
5871 		"ALU64_MUL_K: 1 * (-1) = 0xffffffffffffffff",
5872 		.u.insns_int = {
5873 			BPF_LD_IMM64(R2, 0x1),
5874 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
5875 			BPF_ALU64_IMM(BPF_MUL, R2, 0xffffffff),
5876 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5877 			BPF_MOV32_IMM(R0, 2),
5878 			BPF_EXIT_INSN(),
5879 			BPF_MOV32_IMM(R0, 1),
5880 			BPF_EXIT_INSN(),
5881 		},
5882 		INTERNAL,
5883 		{ },
5884 		{ { 0, 0x1 } },
5885 	},
5886 	{
5887 		"ALU64_MUL_K: 64x32 multiply, low word",
5888 		.u.insns_int = {
5889 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5890 			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5891 			BPF_EXIT_INSN(),
5892 		},
5893 		INTERNAL,
5894 		{ },
5895 		{ { 0, 0xe242d208 } }
5896 	},
5897 	{
5898 		"ALU64_MUL_K: 64x32 multiply, high word",
5899 		.u.insns_int = {
5900 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
5901 			BPF_ALU64_IMM(BPF_MUL, R0, 0x12345678),
5902 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
5903 			BPF_EXIT_INSN(),
5904 		},
5905 		INTERNAL,
5906 		{ },
5907 		{ { 0, 0xc28f5c28 } }
5908 	},
5909 	/* BPF_ALU | BPF_DIV | BPF_X */
5910 	{
5911 		"ALU_DIV_X: 6 / 2 = 3",
5912 		.u.insns_int = {
5913 			BPF_LD_IMM64(R0, 6),
5914 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5915 			BPF_ALU32_REG(BPF_DIV, R0, R1),
5916 			BPF_EXIT_INSN(),
5917 		},
5918 		INTERNAL,
5919 		{ },
5920 		{ { 0, 3 } },
5921 	},
5922 	{
5923 		"ALU_DIV_X: 4294967295 / 4294967295 = 1",
5924 		.u.insns_int = {
5925 			BPF_LD_IMM64(R0, 4294967295U),
5926 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
5927 			BPF_ALU32_REG(BPF_DIV, R0, R1),
5928 			BPF_EXIT_INSN(),
5929 		},
5930 		INTERNAL,
5931 		{ },
5932 		{ { 0, 1 } },
5933 	},
5934 	{
5935 		"ALU64_DIV_X: 6 / 2 = 3",
5936 		.u.insns_int = {
5937 			BPF_LD_IMM64(R0, 6),
5938 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
5939 			BPF_ALU64_REG(BPF_DIV, R0, R1),
5940 			BPF_EXIT_INSN(),
5941 		},
5942 		INTERNAL,
5943 		{ },
5944 		{ { 0, 3 } },
5945 	},
5946 	{
5947 		"ALU64_DIV_X: 2147483647 / 2147483647 = 1",
5948 		.u.insns_int = {
5949 			BPF_LD_IMM64(R0, 2147483647),
5950 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
5951 			BPF_ALU64_REG(BPF_DIV, R0, R1),
5952 			BPF_EXIT_INSN(),
5953 		},
5954 		INTERNAL,
5955 		{ },
5956 		{ { 0, 1 } },
5957 	},
5958 	{
5959 		"ALU64_DIV_X: 0xffffffffffffffff / (-1) = 0x0000000000000001",
5960 		.u.insns_int = {
5961 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
5962 			BPF_LD_IMM64(R4, 0xffffffffffffffffLL),
5963 			BPF_LD_IMM64(R3, 0x0000000000000001LL),
5964 			BPF_ALU64_REG(BPF_DIV, R2, R4),
5965 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
5966 			BPF_MOV32_IMM(R0, 2),
5967 			BPF_EXIT_INSN(),
5968 			BPF_MOV32_IMM(R0, 1),
5969 			BPF_EXIT_INSN(),
5970 		},
5971 		INTERNAL,
5972 		{ },
5973 		{ { 0, 0x1 } },
5974 	},
5975 	/* BPF_ALU | BPF_DIV | BPF_K */
5976 	{
5977 		"ALU_DIV_K: 6 / 2 = 3",
5978 		.u.insns_int = {
5979 			BPF_LD_IMM64(R0, 6),
5980 			BPF_ALU32_IMM(BPF_DIV, R0, 2),
5981 			BPF_EXIT_INSN(),
5982 		},
5983 		INTERNAL,
5984 		{ },
5985 		{ { 0, 3 } },
5986 	},
5987 	{
5988 		"ALU_DIV_K: 3 / 1 = 3",
5989 		.u.insns_int = {
5990 			BPF_LD_IMM64(R0, 3),
5991 			BPF_ALU32_IMM(BPF_DIV, R0, 1),
5992 			BPF_EXIT_INSN(),
5993 		},
5994 		INTERNAL,
5995 		{ },
5996 		{ { 0, 3 } },
5997 	},
5998 	{
5999 		"ALU_DIV_K: 4294967295 / 4294967295 = 1",
6000 		.u.insns_int = {
6001 			BPF_LD_IMM64(R0, 4294967295U),
6002 			BPF_ALU32_IMM(BPF_DIV, R0, 4294967295U),
6003 			BPF_EXIT_INSN(),
6004 		},
6005 		INTERNAL,
6006 		{ },
6007 		{ { 0, 1 } },
6008 	},
6009 	{
6010 		"ALU_DIV_K: 0xffffffffffffffff / (-1) = 0x1",
6011 		.u.insns_int = {
6012 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6013 			BPF_LD_IMM64(R3, 0x1UL),
6014 			BPF_ALU32_IMM(BPF_DIV, R2, 0xffffffff),
6015 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6016 			BPF_MOV32_IMM(R0, 2),
6017 			BPF_EXIT_INSN(),
6018 			BPF_MOV32_IMM(R0, 1),
6019 			BPF_EXIT_INSN(),
6020 		},
6021 		INTERNAL,
6022 		{ },
6023 		{ { 0, 0x1 } },
6024 	},
6025 	{
6026 		"ALU64_DIV_K: 6 / 2 = 3",
6027 		.u.insns_int = {
6028 			BPF_LD_IMM64(R0, 6),
6029 			BPF_ALU64_IMM(BPF_DIV, R0, 2),
6030 			BPF_EXIT_INSN(),
6031 		},
6032 		INTERNAL,
6033 		{ },
6034 		{ { 0, 3 } },
6035 	},
6036 	{
6037 		"ALU64_DIV_K: 3 / 1 = 3",
6038 		.u.insns_int = {
6039 			BPF_LD_IMM64(R0, 3),
6040 			BPF_ALU64_IMM(BPF_DIV, R0, 1),
6041 			BPF_EXIT_INSN(),
6042 		},
6043 		INTERNAL,
6044 		{ },
6045 		{ { 0, 3 } },
6046 	},
6047 	{
6048 		"ALU64_DIV_K: 2147483647 / 2147483647 = 1",
6049 		.u.insns_int = {
6050 			BPF_LD_IMM64(R0, 2147483647),
6051 			BPF_ALU64_IMM(BPF_DIV, R0, 2147483647),
6052 			BPF_EXIT_INSN(),
6053 		},
6054 		INTERNAL,
6055 		{ },
6056 		{ { 0, 1 } },
6057 	},
6058 	{
6059 		"ALU64_DIV_K: 0xffffffffffffffff / (-1) = 0x0000000000000001",
6060 		.u.insns_int = {
6061 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6062 			BPF_LD_IMM64(R3, 0x0000000000000001LL),
6063 			BPF_ALU64_IMM(BPF_DIV, R2, 0xffffffff),
6064 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6065 			BPF_MOV32_IMM(R0, 2),
6066 			BPF_EXIT_INSN(),
6067 			BPF_MOV32_IMM(R0, 1),
6068 			BPF_EXIT_INSN(),
6069 		},
6070 		INTERNAL,
6071 		{ },
6072 		{ { 0, 0x1 } },
6073 	},
6074 	/* BPF_ALU | BPF_MOD | BPF_X */
6075 	{
6076 		"ALU_MOD_X: 3 % 2 = 1",
6077 		.u.insns_int = {
6078 			BPF_LD_IMM64(R0, 3),
6079 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6080 			BPF_ALU32_REG(BPF_MOD, R0, R1),
6081 			BPF_EXIT_INSN(),
6082 		},
6083 		INTERNAL,
6084 		{ },
6085 		{ { 0, 1 } },
6086 	},
6087 	{
6088 		"ALU_MOD_X: 4294967295 % 4294967293 = 2",
6089 		.u.insns_int = {
6090 			BPF_LD_IMM64(R0, 4294967295U),
6091 			BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
6092 			BPF_ALU32_REG(BPF_MOD, R0, R1),
6093 			BPF_EXIT_INSN(),
6094 		},
6095 		INTERNAL,
6096 		{ },
6097 		{ { 0, 2 } },
6098 	},
6099 	{
6100 		"ALU64_MOD_X: 3 % 2 = 1",
6101 		.u.insns_int = {
6102 			BPF_LD_IMM64(R0, 3),
6103 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6104 			BPF_ALU64_REG(BPF_MOD, R0, R1),
6105 			BPF_EXIT_INSN(),
6106 		},
6107 		INTERNAL,
6108 		{ },
6109 		{ { 0, 1 } },
6110 	},
6111 	{
6112 		"ALU64_MOD_X: 2147483647 % 2147483645 = 2",
6113 		.u.insns_int = {
6114 			BPF_LD_IMM64(R0, 2147483647),
6115 			BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
6116 			BPF_ALU64_REG(BPF_MOD, R0, R1),
6117 			BPF_EXIT_INSN(),
6118 		},
6119 		INTERNAL,
6120 		{ },
6121 		{ { 0, 2 } },
6122 	},
6123 	/* BPF_ALU | BPF_MOD | BPF_K */
6124 	{
6125 		"ALU_MOD_K: 3 % 2 = 1",
6126 		.u.insns_int = {
6127 			BPF_LD_IMM64(R0, 3),
6128 			BPF_ALU32_IMM(BPF_MOD, R0, 2),
6129 			BPF_EXIT_INSN(),
6130 		},
6131 		INTERNAL,
6132 		{ },
6133 		{ { 0, 1 } },
6134 	},
6135 	{
6136 		"ALU_MOD_K: 3 % 1 = 0",
6137 		.u.insns_int = {
6138 			BPF_LD_IMM64(R0, 3),
6139 			BPF_ALU32_IMM(BPF_MOD, R0, 1),
6140 			BPF_EXIT_INSN(),
6141 		},
6142 		INTERNAL,
6143 		{ },
6144 		{ { 0, 0 } },
6145 	},
6146 	{
6147 		"ALU_MOD_K: 4294967295 % 4294967293 = 2",
6148 		.u.insns_int = {
6149 			BPF_LD_IMM64(R0, 4294967295U),
6150 			BPF_ALU32_IMM(BPF_MOD, R0, 4294967293U),
6151 			BPF_EXIT_INSN(),
6152 		},
6153 		INTERNAL,
6154 		{ },
6155 		{ { 0, 2 } },
6156 	},
6157 	{
6158 		"ALU64_MOD_K: 3 % 2 = 1",
6159 		.u.insns_int = {
6160 			BPF_LD_IMM64(R0, 3),
6161 			BPF_ALU64_IMM(BPF_MOD, R0, 2),
6162 			BPF_EXIT_INSN(),
6163 		},
6164 		INTERNAL,
6165 		{ },
6166 		{ { 0, 1 } },
6167 	},
6168 	{
6169 		"ALU64_MOD_K: 3 % 1 = 0",
6170 		.u.insns_int = {
6171 			BPF_LD_IMM64(R0, 3),
6172 			BPF_ALU64_IMM(BPF_MOD, R0, 1),
6173 			BPF_EXIT_INSN(),
6174 		},
6175 		INTERNAL,
6176 		{ },
6177 		{ { 0, 0 } },
6178 	},
6179 	{
6180 		"ALU64_MOD_K: 2147483647 % 2147483645 = 2",
6181 		.u.insns_int = {
6182 			BPF_LD_IMM64(R0, 2147483647),
6183 			BPF_ALU64_IMM(BPF_MOD, R0, 2147483645),
6184 			BPF_EXIT_INSN(),
6185 		},
6186 		INTERNAL,
6187 		{ },
6188 		{ { 0, 2 } },
6189 	},
6190 	/* BPF_ALU | BPF_DIV | BPF_X off=1 (SDIV) */
6191 	{
6192 		"ALU_SDIV_X: -6 / 2 = -3",
6193 		.u.insns_int = {
6194 			BPF_LD_IMM64(R0, -6),
6195 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6196 			BPF_ALU32_REG_OFF(BPF_DIV, R0, R1, 1),
6197 			BPF_EXIT_INSN(),
6198 		},
6199 		INTERNAL,
6200 		{ },
6201 		{ { 0, -3 } },
6202 	},
6203 	/* BPF_ALU | BPF_DIV | BPF_K off=1 (SDIV) */
6204 	{
6205 		"ALU_SDIV_K: -6 / 2 = -3",
6206 		.u.insns_int = {
6207 			BPF_LD_IMM64(R0, -6),
6208 			BPF_ALU32_IMM_OFF(BPF_DIV, R0, 2, 1),
6209 			BPF_EXIT_INSN(),
6210 		},
6211 		INTERNAL,
6212 		{ },
6213 		{ { 0, -3 } },
6214 	},
6215 	/* BPF_ALU64 | BPF_DIV | BPF_X off=1 (SDIV64) */
6216 	{
6217 		"ALU64_SDIV_X: -6 / 2 = -3",
6218 		.u.insns_int = {
6219 			BPF_LD_IMM64(R0, -6),
6220 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6221 			BPF_ALU64_REG_OFF(BPF_DIV, R0, R1, 1),
6222 			BPF_EXIT_INSN(),
6223 		},
6224 		INTERNAL,
6225 		{ },
6226 		{ { 0, -3 } },
6227 	},
6228 	/* BPF_ALU64 | BPF_DIV | BPF_K off=1 (SDIV64) */
6229 	{
6230 		"ALU64_SDIV_K: -6 / 2 = -3",
6231 		.u.insns_int = {
6232 			BPF_LD_IMM64(R0, -6),
6233 			BPF_ALU64_IMM_OFF(BPF_DIV, R0, 2, 1),
6234 			BPF_EXIT_INSN(),
6235 		},
6236 		INTERNAL,
6237 		{ },
6238 		{ { 0, -3 } },
6239 	},
6240 	/* BPF_ALU | BPF_MOD | BPF_X off=1 (SMOD) */
6241 	{
6242 		"ALU_SMOD_X: -7 % 2 = -1",
6243 		.u.insns_int = {
6244 			BPF_LD_IMM64(R0, -7),
6245 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6246 			BPF_ALU32_REG_OFF(BPF_MOD, R0, R1, 1),
6247 			BPF_EXIT_INSN(),
6248 		},
6249 		INTERNAL,
6250 		{ },
6251 		{ { 0, -1 } },
6252 	},
6253 	/* BPF_ALU | BPF_MOD | BPF_K off=1 (SMOD) */
6254 	{
6255 		"ALU_SMOD_K: -7 % 2 = -1",
6256 		.u.insns_int = {
6257 			BPF_LD_IMM64(R0, -7),
6258 			BPF_ALU32_IMM_OFF(BPF_MOD, R0, 2, 1),
6259 			BPF_EXIT_INSN(),
6260 		},
6261 		INTERNAL,
6262 		{ },
6263 		{ { 0, -1 } },
6264 	},
6265 	/* BPF_ALU64 | BPF_MOD | BPF_X off=1 (SMOD64) */
6266 	{
6267 		"ALU64_SMOD_X: -7 % 2 = -1",
6268 		.u.insns_int = {
6269 			BPF_LD_IMM64(R0, -7),
6270 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6271 			BPF_ALU64_REG_OFF(BPF_MOD, R0, R1, 1),
6272 			BPF_EXIT_INSN(),
6273 		},
6274 		INTERNAL,
6275 		{ },
6276 		{ { 0, -1 } },
6277 	},
6278 	/* BPF_ALU64 | BPF_MOD | BPF_K off=1 (SMOD64) */
6279 	{
6280 		"ALU64_SMOD_K: -7 % 2 = -1",
6281 		.u.insns_int = {
6282 			BPF_LD_IMM64(R0, -7),
6283 			BPF_ALU64_IMM_OFF(BPF_MOD, R0, 2, 1),
6284 			BPF_EXIT_INSN(),
6285 		},
6286 		INTERNAL,
6287 		{ },
6288 		{ { 0, -1 } },
6289 	},
6290 	/* BPF_ALU | BPF_AND | BPF_X */
6291 	{
6292 		"ALU_AND_X: 3 & 2 = 2",
6293 		.u.insns_int = {
6294 			BPF_LD_IMM64(R0, 3),
6295 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6296 			BPF_ALU32_REG(BPF_AND, R0, R1),
6297 			BPF_EXIT_INSN(),
6298 		},
6299 		INTERNAL,
6300 		{ },
6301 		{ { 0, 2 } },
6302 	},
6303 	{
6304 		"ALU_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6305 		.u.insns_int = {
6306 			BPF_LD_IMM64(R0, 0xffffffff),
6307 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6308 			BPF_ALU32_REG(BPF_AND, R0, R1),
6309 			BPF_EXIT_INSN(),
6310 		},
6311 		INTERNAL,
6312 		{ },
6313 		{ { 0, 0xffffffff } },
6314 	},
6315 	{
6316 		"ALU64_AND_X: 3 & 2 = 2",
6317 		.u.insns_int = {
6318 			BPF_LD_IMM64(R0, 3),
6319 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6320 			BPF_ALU64_REG(BPF_AND, R0, R1),
6321 			BPF_EXIT_INSN(),
6322 		},
6323 		INTERNAL,
6324 		{ },
6325 		{ { 0, 2 } },
6326 	},
6327 	{
6328 		"ALU64_AND_X: 0xffffffff & 0xffffffff = 0xffffffff",
6329 		.u.insns_int = {
6330 			BPF_LD_IMM64(R0, 0xffffffff),
6331 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6332 			BPF_ALU64_REG(BPF_AND, R0, R1),
6333 			BPF_EXIT_INSN(),
6334 		},
6335 		INTERNAL,
6336 		{ },
6337 		{ { 0, 0xffffffff } },
6338 	},
6339 	/* BPF_ALU | BPF_AND | BPF_K */
6340 	{
6341 		"ALU_AND_K: 3 & 2 = 2",
6342 		.u.insns_int = {
6343 			BPF_LD_IMM64(R0, 3),
6344 			BPF_ALU32_IMM(BPF_AND, R0, 2),
6345 			BPF_EXIT_INSN(),
6346 		},
6347 		INTERNAL,
6348 		{ },
6349 		{ { 0, 2 } },
6350 	},
6351 	{
6352 		"ALU_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6353 		.u.insns_int = {
6354 			BPF_LD_IMM64(R0, 0xffffffff),
6355 			BPF_ALU32_IMM(BPF_AND, R0, 0xffffffff),
6356 			BPF_EXIT_INSN(),
6357 		},
6358 		INTERNAL,
6359 		{ },
6360 		{ { 0, 0xffffffff } },
6361 	},
6362 	{
6363 		"ALU_AND_K: Small immediate",
6364 		.u.insns_int = {
6365 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6366 			BPF_ALU32_IMM(BPF_AND, R0, 15),
6367 			BPF_EXIT_INSN(),
6368 		},
6369 		INTERNAL,
6370 		{ },
6371 		{ { 0, 4 } }
6372 	},
6373 	{
6374 		"ALU_AND_K: Large immediate",
6375 		.u.insns_int = {
6376 			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6377 			BPF_ALU32_IMM(BPF_AND, R0, 0xafbfcfdf),
6378 			BPF_EXIT_INSN(),
6379 		},
6380 		INTERNAL,
6381 		{ },
6382 		{ { 0, 0xa1b2c3d4 } }
6383 	},
6384 	{
6385 		"ALU_AND_K: Zero extension",
6386 		.u.insns_int = {
6387 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6388 			BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
6389 			BPF_ALU32_IMM(BPF_AND, R0, 0xf0f0f0f0),
6390 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6391 			BPF_MOV32_IMM(R0, 2),
6392 			BPF_EXIT_INSN(),
6393 			BPF_MOV32_IMM(R0, 1),
6394 			BPF_EXIT_INSN(),
6395 		},
6396 		INTERNAL,
6397 		{ },
6398 		{ { 0, 1 } }
6399 	},
6400 	{
6401 		"ALU64_AND_K: 3 & 2 = 2",
6402 		.u.insns_int = {
6403 			BPF_LD_IMM64(R0, 3),
6404 			BPF_ALU64_IMM(BPF_AND, R0, 2),
6405 			BPF_EXIT_INSN(),
6406 		},
6407 		INTERNAL,
6408 		{ },
6409 		{ { 0, 2 } },
6410 	},
6411 	{
6412 		"ALU64_AND_K: 0xffffffff & 0xffffffff = 0xffffffff",
6413 		.u.insns_int = {
6414 			BPF_LD_IMM64(R0, 0xffffffff),
6415 			BPF_ALU64_IMM(BPF_AND, R0, 0xffffffff),
6416 			BPF_EXIT_INSN(),
6417 		},
6418 		INTERNAL,
6419 		{ },
6420 		{ { 0, 0xffffffff } },
6421 	},
6422 	{
6423 		"ALU64_AND_K: 0x0000ffffffff0000 & 0x0 = 0x0000000000000000",
6424 		.u.insns_int = {
6425 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6426 			BPF_LD_IMM64(R3, 0x0000000000000000LL),
6427 			BPF_ALU64_IMM(BPF_AND, R2, 0x0),
6428 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6429 			BPF_MOV32_IMM(R0, 2),
6430 			BPF_EXIT_INSN(),
6431 			BPF_MOV32_IMM(R0, 1),
6432 			BPF_EXIT_INSN(),
6433 		},
6434 		INTERNAL,
6435 		{ },
6436 		{ { 0, 0x1 } },
6437 	},
6438 	{
6439 		"ALU64_AND_K: 0x0000ffffffff0000 & -1 = 0x0000ffffffff0000",
6440 		.u.insns_int = {
6441 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6442 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6443 			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6444 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6445 			BPF_MOV32_IMM(R0, 2),
6446 			BPF_EXIT_INSN(),
6447 			BPF_MOV32_IMM(R0, 1),
6448 			BPF_EXIT_INSN(),
6449 		},
6450 		INTERNAL,
6451 		{ },
6452 		{ { 0, 0x1 } },
6453 	},
6454 	{
6455 		"ALU64_AND_K: 0xffffffffffffffff & -1 = 0xffffffffffffffff",
6456 		.u.insns_int = {
6457 			BPF_LD_IMM64(R2, 0xffffffffffffffffLL),
6458 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6459 			BPF_ALU64_IMM(BPF_AND, R2, 0xffffffff),
6460 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6461 			BPF_MOV32_IMM(R0, 2),
6462 			BPF_EXIT_INSN(),
6463 			BPF_MOV32_IMM(R0, 1),
6464 			BPF_EXIT_INSN(),
6465 		},
6466 		INTERNAL,
6467 		{ },
6468 		{ { 0, 0x1 } },
6469 	},
6470 	{
6471 		"ALU64_AND_K: Sign extension 1",
6472 		.u.insns_int = {
6473 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6474 			BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
6475 			BPF_ALU64_IMM(BPF_AND, R0, 0x0f0f0f0f),
6476 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6477 			BPF_MOV32_IMM(R0, 2),
6478 			BPF_EXIT_INSN(),
6479 			BPF_MOV32_IMM(R0, 1),
6480 			BPF_EXIT_INSN(),
6481 		},
6482 		INTERNAL,
6483 		{ },
6484 		{ { 0, 1 } }
6485 	},
6486 	{
6487 		"ALU64_AND_K: Sign extension 2",
6488 		.u.insns_int = {
6489 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6490 			BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
6491 			BPF_ALU64_IMM(BPF_AND, R0, 0xf0f0f0f0),
6492 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6493 			BPF_MOV32_IMM(R0, 2),
6494 			BPF_EXIT_INSN(),
6495 			BPF_MOV32_IMM(R0, 1),
6496 			BPF_EXIT_INSN(),
6497 		},
6498 		INTERNAL,
6499 		{ },
6500 		{ { 0, 1 } }
6501 	},
6502 	/* BPF_ALU | BPF_OR | BPF_X */
6503 	{
6504 		"ALU_OR_X: 1 | 2 = 3",
6505 		.u.insns_int = {
6506 			BPF_LD_IMM64(R0, 1),
6507 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6508 			BPF_ALU32_REG(BPF_OR, R0, R1),
6509 			BPF_EXIT_INSN(),
6510 		},
6511 		INTERNAL,
6512 		{ },
6513 		{ { 0, 3 } },
6514 	},
6515 	{
6516 		"ALU_OR_X: 0x0 | 0xffffffff = 0xffffffff",
6517 		.u.insns_int = {
6518 			BPF_LD_IMM64(R0, 0),
6519 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6520 			BPF_ALU32_REG(BPF_OR, R0, R1),
6521 			BPF_EXIT_INSN(),
6522 		},
6523 		INTERNAL,
6524 		{ },
6525 		{ { 0, 0xffffffff } },
6526 	},
6527 	{
6528 		"ALU64_OR_X: 1 | 2 = 3",
6529 		.u.insns_int = {
6530 			BPF_LD_IMM64(R0, 1),
6531 			BPF_ALU32_IMM(BPF_MOV, R1, 2),
6532 			BPF_ALU64_REG(BPF_OR, R0, R1),
6533 			BPF_EXIT_INSN(),
6534 		},
6535 		INTERNAL,
6536 		{ },
6537 		{ { 0, 3 } },
6538 	},
6539 	{
6540 		"ALU64_OR_X: 0 | 0xffffffff = 0xffffffff",
6541 		.u.insns_int = {
6542 			BPF_LD_IMM64(R0, 0),
6543 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6544 			BPF_ALU64_REG(BPF_OR, R0, R1),
6545 			BPF_EXIT_INSN(),
6546 		},
6547 		INTERNAL,
6548 		{ },
6549 		{ { 0, 0xffffffff } },
6550 	},
6551 	/* BPF_ALU | BPF_OR | BPF_K */
6552 	{
6553 		"ALU_OR_K: 1 | 2 = 3",
6554 		.u.insns_int = {
6555 			BPF_LD_IMM64(R0, 1),
6556 			BPF_ALU32_IMM(BPF_OR, R0, 2),
6557 			BPF_EXIT_INSN(),
6558 		},
6559 		INTERNAL,
6560 		{ },
6561 		{ { 0, 3 } },
6562 	},
6563 	{
6564 		"ALU_OR_K: 0 & 0xffffffff = 0xffffffff",
6565 		.u.insns_int = {
6566 			BPF_LD_IMM64(R0, 0),
6567 			BPF_ALU32_IMM(BPF_OR, R0, 0xffffffff),
6568 			BPF_EXIT_INSN(),
6569 		},
6570 		INTERNAL,
6571 		{ },
6572 		{ { 0, 0xffffffff } },
6573 	},
6574 	{
6575 		"ALU_OR_K: Small immediate",
6576 		.u.insns_int = {
6577 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6578 			BPF_ALU32_IMM(BPF_OR, R0, 1),
6579 			BPF_EXIT_INSN(),
6580 		},
6581 		INTERNAL,
6582 		{ },
6583 		{ { 0, 0x01020305 } }
6584 	},
6585 	{
6586 		"ALU_OR_K: Large immediate",
6587 		.u.insns_int = {
6588 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6589 			BPF_ALU32_IMM(BPF_OR, R0, 0xa0b0c0d0),
6590 			BPF_EXIT_INSN(),
6591 		},
6592 		INTERNAL,
6593 		{ },
6594 		{ { 0, 0xa1b2c3d4 } }
6595 	},
6596 	{
6597 		"ALU_OR_K: Zero extension",
6598 		.u.insns_int = {
6599 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6600 			BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
6601 			BPF_ALU32_IMM(BPF_OR, R0, 0xf0f0f0f0),
6602 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6603 			BPF_MOV32_IMM(R0, 2),
6604 			BPF_EXIT_INSN(),
6605 			BPF_MOV32_IMM(R0, 1),
6606 			BPF_EXIT_INSN(),
6607 		},
6608 		INTERNAL,
6609 		{ },
6610 		{ { 0, 1 } }
6611 	},
6612 	{
6613 		"ALU64_OR_K: 1 | 2 = 3",
6614 		.u.insns_int = {
6615 			BPF_LD_IMM64(R0, 1),
6616 			BPF_ALU64_IMM(BPF_OR, R0, 2),
6617 			BPF_EXIT_INSN(),
6618 		},
6619 		INTERNAL,
6620 		{ },
6621 		{ { 0, 3 } },
6622 	},
6623 	{
6624 		"ALU64_OR_K: 0 & 0xffffffff = 0xffffffff",
6625 		.u.insns_int = {
6626 			BPF_LD_IMM64(R0, 0),
6627 			BPF_ALU64_IMM(BPF_OR, R0, 0xffffffff),
6628 			BPF_EXIT_INSN(),
6629 		},
6630 		INTERNAL,
6631 		{ },
6632 		{ { 0, 0xffffffff } },
6633 	},
6634 	{
6635 		"ALU64_OR_K: 0x0000ffffffff0000 | 0x0 = 0x0000ffffffff0000",
6636 		.u.insns_int = {
6637 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6638 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6639 			BPF_ALU64_IMM(BPF_OR, R2, 0x0),
6640 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6641 			BPF_MOV32_IMM(R0, 2),
6642 			BPF_EXIT_INSN(),
6643 			BPF_MOV32_IMM(R0, 1),
6644 			BPF_EXIT_INSN(),
6645 		},
6646 		INTERNAL,
6647 		{ },
6648 		{ { 0, 0x1 } },
6649 	},
6650 	{
6651 		"ALU64_OR_K: 0x0000ffffffff0000 | -1 = 0xffffffffffffffff",
6652 		.u.insns_int = {
6653 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6654 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6655 			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6656 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6657 			BPF_MOV32_IMM(R0, 2),
6658 			BPF_EXIT_INSN(),
6659 			BPF_MOV32_IMM(R0, 1),
6660 			BPF_EXIT_INSN(),
6661 		},
6662 		INTERNAL,
6663 		{ },
6664 		{ { 0, 0x1 } },
6665 	},
6666 	{
6667 		"ALU64_OR_K: 0x000000000000000 | -1 = 0xffffffffffffffff",
6668 		.u.insns_int = {
6669 			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6670 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6671 			BPF_ALU64_IMM(BPF_OR, R2, 0xffffffff),
6672 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6673 			BPF_MOV32_IMM(R0, 2),
6674 			BPF_EXIT_INSN(),
6675 			BPF_MOV32_IMM(R0, 1),
6676 			BPF_EXIT_INSN(),
6677 		},
6678 		INTERNAL,
6679 		{ },
6680 		{ { 0, 0x1 } },
6681 	},
6682 	{
6683 		"ALU64_OR_K: Sign extension 1",
6684 		.u.insns_int = {
6685 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6686 			BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
6687 			BPF_ALU64_IMM(BPF_OR, R0, 0x0f0f0f0f),
6688 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6689 			BPF_MOV32_IMM(R0, 2),
6690 			BPF_EXIT_INSN(),
6691 			BPF_MOV32_IMM(R0, 1),
6692 			BPF_EXIT_INSN(),
6693 		},
6694 		INTERNAL,
6695 		{ },
6696 		{ { 0, 1 } }
6697 	},
6698 	{
6699 		"ALU64_OR_K: Sign extension 2",
6700 		.u.insns_int = {
6701 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6702 			BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
6703 			BPF_ALU64_IMM(BPF_OR, R0, 0xf0f0f0f0),
6704 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6705 			BPF_MOV32_IMM(R0, 2),
6706 			BPF_EXIT_INSN(),
6707 			BPF_MOV32_IMM(R0, 1),
6708 			BPF_EXIT_INSN(),
6709 		},
6710 		INTERNAL,
6711 		{ },
6712 		{ { 0, 1 } }
6713 	},
6714 	/* BPF_ALU | BPF_XOR | BPF_X */
6715 	{
6716 		"ALU_XOR_X: 5 ^ 6 = 3",
6717 		.u.insns_int = {
6718 			BPF_LD_IMM64(R0, 5),
6719 			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6720 			BPF_ALU32_REG(BPF_XOR, R0, R1),
6721 			BPF_EXIT_INSN(),
6722 		},
6723 		INTERNAL,
6724 		{ },
6725 		{ { 0, 3 } },
6726 	},
6727 	{
6728 		"ALU_XOR_X: 0x1 ^ 0xffffffff = 0xfffffffe",
6729 		.u.insns_int = {
6730 			BPF_LD_IMM64(R0, 1),
6731 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6732 			BPF_ALU32_REG(BPF_XOR, R0, R1),
6733 			BPF_EXIT_INSN(),
6734 		},
6735 		INTERNAL,
6736 		{ },
6737 		{ { 0, 0xfffffffe } },
6738 	},
6739 	{
6740 		"ALU64_XOR_X: 5 ^ 6 = 3",
6741 		.u.insns_int = {
6742 			BPF_LD_IMM64(R0, 5),
6743 			BPF_ALU32_IMM(BPF_MOV, R1, 6),
6744 			BPF_ALU64_REG(BPF_XOR, R0, R1),
6745 			BPF_EXIT_INSN(),
6746 		},
6747 		INTERNAL,
6748 		{ },
6749 		{ { 0, 3 } },
6750 	},
6751 	{
6752 		"ALU64_XOR_X: 1 ^ 0xffffffff = 0xfffffffe",
6753 		.u.insns_int = {
6754 			BPF_LD_IMM64(R0, 1),
6755 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
6756 			BPF_ALU64_REG(BPF_XOR, R0, R1),
6757 			BPF_EXIT_INSN(),
6758 		},
6759 		INTERNAL,
6760 		{ },
6761 		{ { 0, 0xfffffffe } },
6762 	},
6763 	/* BPF_ALU | BPF_XOR | BPF_K */
6764 	{
6765 		"ALU_XOR_K: 5 ^ 6 = 3",
6766 		.u.insns_int = {
6767 			BPF_LD_IMM64(R0, 5),
6768 			BPF_ALU32_IMM(BPF_XOR, R0, 6),
6769 			BPF_EXIT_INSN(),
6770 		},
6771 		INTERNAL,
6772 		{ },
6773 		{ { 0, 3 } },
6774 	},
6775 	{
6776 		"ALU_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6777 		.u.insns_int = {
6778 			BPF_LD_IMM64(R0, 1),
6779 			BPF_ALU32_IMM(BPF_XOR, R0, 0xffffffff),
6780 			BPF_EXIT_INSN(),
6781 		},
6782 		INTERNAL,
6783 		{ },
6784 		{ { 0, 0xfffffffe } },
6785 	},
6786 	{
6787 		"ALU_XOR_K: Small immediate",
6788 		.u.insns_int = {
6789 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01020304),
6790 			BPF_ALU32_IMM(BPF_XOR, R0, 15),
6791 			BPF_EXIT_INSN(),
6792 		},
6793 		INTERNAL,
6794 		{ },
6795 		{ { 0, 0x0102030b } }
6796 	},
6797 	{
6798 		"ALU_XOR_K: Large immediate",
6799 		.u.insns_int = {
6800 			BPF_ALU32_IMM(BPF_MOV, R0, 0xf1f2f3f4),
6801 			BPF_ALU32_IMM(BPF_XOR, R0, 0xafbfcfdf),
6802 			BPF_EXIT_INSN(),
6803 		},
6804 		INTERNAL,
6805 		{ },
6806 		{ { 0, 0x5e4d3c2b } }
6807 	},
6808 	{
6809 		"ALU_XOR_K: Zero extension",
6810 		.u.insns_int = {
6811 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6812 			BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
6813 			BPF_ALU32_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6814 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6815 			BPF_MOV32_IMM(R0, 2),
6816 			BPF_EXIT_INSN(),
6817 			BPF_MOV32_IMM(R0, 1),
6818 			BPF_EXIT_INSN(),
6819 		},
6820 		INTERNAL,
6821 		{ },
6822 		{ { 0, 1 } }
6823 	},
6824 	{
6825 		"ALU64_XOR_K: 5 ^ 6 = 3",
6826 		.u.insns_int = {
6827 			BPF_LD_IMM64(R0, 5),
6828 			BPF_ALU64_IMM(BPF_XOR, R0, 6),
6829 			BPF_EXIT_INSN(),
6830 		},
6831 		INTERNAL,
6832 		{ },
6833 		{ { 0, 3 } },
6834 	},
6835 	{
6836 		"ALU64_XOR_K: 1 ^ 0xffffffff = 0xfffffffe",
6837 		.u.insns_int = {
6838 			BPF_LD_IMM64(R0, 1),
6839 			BPF_ALU64_IMM(BPF_XOR, R0, 0xffffffff),
6840 			BPF_EXIT_INSN(),
6841 		},
6842 		INTERNAL,
6843 		{ },
6844 		{ { 0, 0xfffffffe } },
6845 	},
6846 	{
6847 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ 0x0 = 0x0000ffffffff0000",
6848 		.u.insns_int = {
6849 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6850 			BPF_LD_IMM64(R3, 0x0000ffffffff0000LL),
6851 			BPF_ALU64_IMM(BPF_XOR, R2, 0x0),
6852 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6853 			BPF_MOV32_IMM(R0, 2),
6854 			BPF_EXIT_INSN(),
6855 			BPF_MOV32_IMM(R0, 1),
6856 			BPF_EXIT_INSN(),
6857 		},
6858 		INTERNAL,
6859 		{ },
6860 		{ { 0, 0x1 } },
6861 	},
6862 	{
6863 		"ALU64_XOR_K: 0x0000ffffffff0000 ^ -1 = 0xffff00000000ffff",
6864 		.u.insns_int = {
6865 			BPF_LD_IMM64(R2, 0x0000ffffffff0000LL),
6866 			BPF_LD_IMM64(R3, 0xffff00000000ffffLL),
6867 			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6868 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6869 			BPF_MOV32_IMM(R0, 2),
6870 			BPF_EXIT_INSN(),
6871 			BPF_MOV32_IMM(R0, 1),
6872 			BPF_EXIT_INSN(),
6873 		},
6874 		INTERNAL,
6875 		{ },
6876 		{ { 0, 0x1 } },
6877 	},
6878 	{
6879 		"ALU64_XOR_K: 0x000000000000000 ^ -1 = 0xffffffffffffffff",
6880 		.u.insns_int = {
6881 			BPF_LD_IMM64(R2, 0x0000000000000000LL),
6882 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
6883 			BPF_ALU64_IMM(BPF_XOR, R2, 0xffffffff),
6884 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
6885 			BPF_MOV32_IMM(R0, 2),
6886 			BPF_EXIT_INSN(),
6887 			BPF_MOV32_IMM(R0, 1),
6888 			BPF_EXIT_INSN(),
6889 		},
6890 		INTERNAL,
6891 		{ },
6892 		{ { 0, 0x1 } },
6893 	},
6894 	{
6895 		"ALU64_XOR_K: Sign extension 1",
6896 		.u.insns_int = {
6897 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6898 			BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
6899 			BPF_ALU64_IMM(BPF_XOR, R0, 0x0f0f0f0f),
6900 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6901 			BPF_MOV32_IMM(R0, 2),
6902 			BPF_EXIT_INSN(),
6903 			BPF_MOV32_IMM(R0, 1),
6904 			BPF_EXIT_INSN(),
6905 		},
6906 		INTERNAL,
6907 		{ },
6908 		{ { 0, 1 } }
6909 	},
6910 	{
6911 		"ALU64_XOR_K: Sign extension 2",
6912 		.u.insns_int = {
6913 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6914 			BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
6915 			BPF_ALU64_IMM(BPF_XOR, R0, 0xf0f0f0f0),
6916 			BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
6917 			BPF_MOV32_IMM(R0, 2),
6918 			BPF_EXIT_INSN(),
6919 			BPF_MOV32_IMM(R0, 1),
6920 			BPF_EXIT_INSN(),
6921 		},
6922 		INTERNAL,
6923 		{ },
6924 		{ { 0, 1 } }
6925 	},
6926 	/* BPF_ALU | BPF_LSH | BPF_X */
6927 	{
6928 		"ALU_LSH_X: 1 << 1 = 2",
6929 		.u.insns_int = {
6930 			BPF_LD_IMM64(R0, 1),
6931 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6932 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6933 			BPF_EXIT_INSN(),
6934 		},
6935 		INTERNAL,
6936 		{ },
6937 		{ { 0, 2 } },
6938 	},
6939 	{
6940 		"ALU_LSH_X: 1 << 31 = 0x80000000",
6941 		.u.insns_int = {
6942 			BPF_LD_IMM64(R0, 1),
6943 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6944 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6945 			BPF_EXIT_INSN(),
6946 		},
6947 		INTERNAL,
6948 		{ },
6949 		{ { 0, 0x80000000 } },
6950 	},
6951 	{
6952 		"ALU_LSH_X: 0x12345678 << 12 = 0x45678000",
6953 		.u.insns_int = {
6954 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
6955 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6956 			BPF_ALU32_REG(BPF_LSH, R0, R1),
6957 			BPF_EXIT_INSN(),
6958 		},
6959 		INTERNAL,
6960 		{ },
6961 		{ { 0, 0x45678000 } }
6962 	},
6963 	{
6964 		"ALU64_LSH_X: 1 << 1 = 2",
6965 		.u.insns_int = {
6966 			BPF_LD_IMM64(R0, 1),
6967 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
6968 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6969 			BPF_EXIT_INSN(),
6970 		},
6971 		INTERNAL,
6972 		{ },
6973 		{ { 0, 2 } },
6974 	},
6975 	{
6976 		"ALU64_LSH_X: 1 << 31 = 0x80000000",
6977 		.u.insns_int = {
6978 			BPF_LD_IMM64(R0, 1),
6979 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
6980 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6981 			BPF_EXIT_INSN(),
6982 		},
6983 		INTERNAL,
6984 		{ },
6985 		{ { 0, 0x80000000 } },
6986 	},
6987 	{
6988 		"ALU64_LSH_X: Shift < 32, low word",
6989 		.u.insns_int = {
6990 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
6991 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
6992 			BPF_ALU64_REG(BPF_LSH, R0, R1),
6993 			BPF_EXIT_INSN(),
6994 		},
6995 		INTERNAL,
6996 		{ },
6997 		{ { 0, 0xbcdef000 } }
6998 	},
6999 	{
7000 		"ALU64_LSH_X: Shift < 32, high word",
7001 		.u.insns_int = {
7002 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7003 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7004 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7005 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7006 			BPF_EXIT_INSN(),
7007 		},
7008 		INTERNAL,
7009 		{ },
7010 		{ { 0, 0x3456789a } }
7011 	},
7012 	{
7013 		"ALU64_LSH_X: Shift > 32, low word",
7014 		.u.insns_int = {
7015 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7016 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7017 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7018 			BPF_EXIT_INSN(),
7019 		},
7020 		INTERNAL,
7021 		{ },
7022 		{ { 0, 0 } }
7023 	},
7024 	{
7025 		"ALU64_LSH_X: Shift > 32, high word",
7026 		.u.insns_int = {
7027 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7028 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7029 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7030 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7031 			BPF_EXIT_INSN(),
7032 		},
7033 		INTERNAL,
7034 		{ },
7035 		{ { 0, 0x9abcdef0 } }
7036 	},
7037 	{
7038 		"ALU64_LSH_X: Shift == 32, low word",
7039 		.u.insns_int = {
7040 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7041 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7042 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7043 			BPF_EXIT_INSN(),
7044 		},
7045 		INTERNAL,
7046 		{ },
7047 		{ { 0, 0 } }
7048 	},
7049 	{
7050 		"ALU64_LSH_X: Shift == 32, high word",
7051 		.u.insns_int = {
7052 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7053 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7054 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7055 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7056 			BPF_EXIT_INSN(),
7057 		},
7058 		INTERNAL,
7059 		{ },
7060 		{ { 0, 0x89abcdef } }
7061 	},
7062 	{
7063 		"ALU64_LSH_X: Zero shift, low word",
7064 		.u.insns_int = {
7065 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7066 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7067 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7068 			BPF_EXIT_INSN(),
7069 		},
7070 		INTERNAL,
7071 		{ },
7072 		{ { 0, 0x89abcdef } }
7073 	},
7074 	{
7075 		"ALU64_LSH_X: Zero shift, high word",
7076 		.u.insns_int = {
7077 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7078 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7079 			BPF_ALU64_REG(BPF_LSH, R0, R1),
7080 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7081 			BPF_EXIT_INSN(),
7082 		},
7083 		INTERNAL,
7084 		{ },
7085 		{ { 0, 0x01234567 } }
7086 	},
7087 	/* BPF_ALU | BPF_LSH | BPF_K */
7088 	{
7089 		"ALU_LSH_K: 1 << 1 = 2",
7090 		.u.insns_int = {
7091 			BPF_LD_IMM64(R0, 1),
7092 			BPF_ALU32_IMM(BPF_LSH, R0, 1),
7093 			BPF_EXIT_INSN(),
7094 		},
7095 		INTERNAL,
7096 		{ },
7097 		{ { 0, 2 } },
7098 	},
7099 	{
7100 		"ALU_LSH_K: 1 << 31 = 0x80000000",
7101 		.u.insns_int = {
7102 			BPF_LD_IMM64(R0, 1),
7103 			BPF_ALU32_IMM(BPF_LSH, R0, 31),
7104 			BPF_EXIT_INSN(),
7105 		},
7106 		INTERNAL,
7107 		{ },
7108 		{ { 0, 0x80000000 } },
7109 	},
7110 	{
7111 		"ALU_LSH_K: 0x12345678 << 12 = 0x45678000",
7112 		.u.insns_int = {
7113 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7114 			BPF_ALU32_IMM(BPF_LSH, R0, 12),
7115 			BPF_EXIT_INSN(),
7116 		},
7117 		INTERNAL,
7118 		{ },
7119 		{ { 0, 0x45678000 } }
7120 	},
7121 	{
7122 		"ALU_LSH_K: 0x12345678 << 0 = 0x12345678",
7123 		.u.insns_int = {
7124 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7125 			BPF_ALU32_IMM(BPF_LSH, R0, 0),
7126 			BPF_EXIT_INSN(),
7127 		},
7128 		INTERNAL,
7129 		{ },
7130 		{ { 0, 0x12345678 } }
7131 	},
7132 	{
7133 		"ALU64_LSH_K: 1 << 1 = 2",
7134 		.u.insns_int = {
7135 			BPF_LD_IMM64(R0, 1),
7136 			BPF_ALU64_IMM(BPF_LSH, R0, 1),
7137 			BPF_EXIT_INSN(),
7138 		},
7139 		INTERNAL,
7140 		{ },
7141 		{ { 0, 2 } },
7142 	},
7143 	{
7144 		"ALU64_LSH_K: 1 << 31 = 0x80000000",
7145 		.u.insns_int = {
7146 			BPF_LD_IMM64(R0, 1),
7147 			BPF_ALU64_IMM(BPF_LSH, R0, 31),
7148 			BPF_EXIT_INSN(),
7149 		},
7150 		INTERNAL,
7151 		{ },
7152 		{ { 0, 0x80000000 } },
7153 	},
7154 	{
7155 		"ALU64_LSH_K: Shift < 32, low word",
7156 		.u.insns_int = {
7157 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7158 			BPF_ALU64_IMM(BPF_LSH, R0, 12),
7159 			BPF_EXIT_INSN(),
7160 		},
7161 		INTERNAL,
7162 		{ },
7163 		{ { 0, 0xbcdef000 } }
7164 	},
7165 	{
7166 		"ALU64_LSH_K: Shift < 32, high word",
7167 		.u.insns_int = {
7168 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7169 			BPF_ALU64_IMM(BPF_LSH, R0, 12),
7170 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7171 			BPF_EXIT_INSN(),
7172 		},
7173 		INTERNAL,
7174 		{ },
7175 		{ { 0, 0x3456789a } }
7176 	},
7177 	{
7178 		"ALU64_LSH_K: Shift > 32, low word",
7179 		.u.insns_int = {
7180 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7181 			BPF_ALU64_IMM(BPF_LSH, R0, 36),
7182 			BPF_EXIT_INSN(),
7183 		},
7184 		INTERNAL,
7185 		{ },
7186 		{ { 0, 0 } }
7187 	},
7188 	{
7189 		"ALU64_LSH_K: Shift > 32, high word",
7190 		.u.insns_int = {
7191 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7192 			BPF_ALU64_IMM(BPF_LSH, R0, 36),
7193 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7194 			BPF_EXIT_INSN(),
7195 		},
7196 		INTERNAL,
7197 		{ },
7198 		{ { 0, 0x9abcdef0 } }
7199 	},
7200 	{
7201 		"ALU64_LSH_K: Shift == 32, low word",
7202 		.u.insns_int = {
7203 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7204 			BPF_ALU64_IMM(BPF_LSH, R0, 32),
7205 			BPF_EXIT_INSN(),
7206 		},
7207 		INTERNAL,
7208 		{ },
7209 		{ { 0, 0 } }
7210 	},
7211 	{
7212 		"ALU64_LSH_K: Shift == 32, high word",
7213 		.u.insns_int = {
7214 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7215 			BPF_ALU64_IMM(BPF_LSH, R0, 32),
7216 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7217 			BPF_EXIT_INSN(),
7218 		},
7219 		INTERNAL,
7220 		{ },
7221 		{ { 0, 0x89abcdef } }
7222 	},
7223 	{
7224 		"ALU64_LSH_K: Zero shift",
7225 		.u.insns_int = {
7226 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7227 			BPF_ALU64_IMM(BPF_LSH, R0, 0),
7228 			BPF_EXIT_INSN(),
7229 		},
7230 		INTERNAL,
7231 		{ },
7232 		{ { 0, 0x89abcdef } }
7233 	},
7234 	/* BPF_ALU | BPF_RSH | BPF_X */
7235 	{
7236 		"ALU_RSH_X: 2 >> 1 = 1",
7237 		.u.insns_int = {
7238 			BPF_LD_IMM64(R0, 2),
7239 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
7240 			BPF_ALU32_REG(BPF_RSH, R0, R1),
7241 			BPF_EXIT_INSN(),
7242 		},
7243 		INTERNAL,
7244 		{ },
7245 		{ { 0, 1 } },
7246 	},
7247 	{
7248 		"ALU_RSH_X: 0x80000000 >> 31 = 1",
7249 		.u.insns_int = {
7250 			BPF_LD_IMM64(R0, 0x80000000),
7251 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
7252 			BPF_ALU32_REG(BPF_RSH, R0, R1),
7253 			BPF_EXIT_INSN(),
7254 		},
7255 		INTERNAL,
7256 		{ },
7257 		{ { 0, 1 } },
7258 	},
7259 	{
7260 		"ALU_RSH_X: 0x12345678 >> 20 = 0x123",
7261 		.u.insns_int = {
7262 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7263 			BPF_ALU32_IMM(BPF_MOV, R1, 20),
7264 			BPF_ALU32_REG(BPF_RSH, R0, R1),
7265 			BPF_EXIT_INSN(),
7266 		},
7267 		INTERNAL,
7268 		{ },
7269 		{ { 0, 0x123 } }
7270 	},
7271 	{
7272 		"ALU64_RSH_X: 2 >> 1 = 1",
7273 		.u.insns_int = {
7274 			BPF_LD_IMM64(R0, 2),
7275 			BPF_ALU32_IMM(BPF_MOV, R1, 1),
7276 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7277 			BPF_EXIT_INSN(),
7278 		},
7279 		INTERNAL,
7280 		{ },
7281 		{ { 0, 1 } },
7282 	},
7283 	{
7284 		"ALU64_RSH_X: 0x80000000 >> 31 = 1",
7285 		.u.insns_int = {
7286 			BPF_LD_IMM64(R0, 0x80000000),
7287 			BPF_ALU32_IMM(BPF_MOV, R1, 31),
7288 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7289 			BPF_EXIT_INSN(),
7290 		},
7291 		INTERNAL,
7292 		{ },
7293 		{ { 0, 1 } },
7294 	},
7295 	{
7296 		"ALU64_RSH_X: Shift < 32, low word",
7297 		.u.insns_int = {
7298 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7299 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7300 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7301 			BPF_EXIT_INSN(),
7302 		},
7303 		INTERNAL,
7304 		{ },
7305 		{ { 0, 0x56789abc } }
7306 	},
7307 	{
7308 		"ALU64_RSH_X: Shift < 32, high word",
7309 		.u.insns_int = {
7310 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7311 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7312 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7313 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7314 			BPF_EXIT_INSN(),
7315 		},
7316 		INTERNAL,
7317 		{ },
7318 		{ { 0, 0x00081234 } }
7319 	},
7320 	{
7321 		"ALU64_RSH_X: Shift > 32, low word",
7322 		.u.insns_int = {
7323 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7324 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7325 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7326 			BPF_EXIT_INSN(),
7327 		},
7328 		INTERNAL,
7329 		{ },
7330 		{ { 0, 0x08123456 } }
7331 	},
7332 	{
7333 		"ALU64_RSH_X: Shift > 32, high word",
7334 		.u.insns_int = {
7335 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7336 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7337 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7338 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7339 			BPF_EXIT_INSN(),
7340 		},
7341 		INTERNAL,
7342 		{ },
7343 		{ { 0, 0 } }
7344 	},
7345 	{
7346 		"ALU64_RSH_X: Shift == 32, low word",
7347 		.u.insns_int = {
7348 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7349 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7350 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7351 			BPF_EXIT_INSN(),
7352 		},
7353 		INTERNAL,
7354 		{ },
7355 		{ { 0, 0x81234567 } }
7356 	},
7357 	{
7358 		"ALU64_RSH_X: Shift == 32, high word",
7359 		.u.insns_int = {
7360 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7361 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7362 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7363 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7364 			BPF_EXIT_INSN(),
7365 		},
7366 		INTERNAL,
7367 		{ },
7368 		{ { 0, 0 } }
7369 	},
7370 	{
7371 		"ALU64_RSH_X: Zero shift, low word",
7372 		.u.insns_int = {
7373 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7374 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7375 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7376 			BPF_EXIT_INSN(),
7377 		},
7378 		INTERNAL,
7379 		{ },
7380 		{ { 0, 0x89abcdef } }
7381 	},
7382 	{
7383 		"ALU64_RSH_X: Zero shift, high word",
7384 		.u.insns_int = {
7385 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7386 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7387 			BPF_ALU64_REG(BPF_RSH, R0, R1),
7388 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7389 			BPF_EXIT_INSN(),
7390 		},
7391 		INTERNAL,
7392 		{ },
7393 		{ { 0, 0x81234567 } }
7394 	},
7395 	/* BPF_ALU | BPF_RSH | BPF_K */
7396 	{
7397 		"ALU_RSH_K: 2 >> 1 = 1",
7398 		.u.insns_int = {
7399 			BPF_LD_IMM64(R0, 2),
7400 			BPF_ALU32_IMM(BPF_RSH, R0, 1),
7401 			BPF_EXIT_INSN(),
7402 		},
7403 		INTERNAL,
7404 		{ },
7405 		{ { 0, 1 } },
7406 	},
7407 	{
7408 		"ALU_RSH_K: 0x80000000 >> 31 = 1",
7409 		.u.insns_int = {
7410 			BPF_LD_IMM64(R0, 0x80000000),
7411 			BPF_ALU32_IMM(BPF_RSH, R0, 31),
7412 			BPF_EXIT_INSN(),
7413 		},
7414 		INTERNAL,
7415 		{ },
7416 		{ { 0, 1 } },
7417 	},
7418 	{
7419 		"ALU_RSH_K: 0x12345678 >> 20 = 0x123",
7420 		.u.insns_int = {
7421 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7422 			BPF_ALU32_IMM(BPF_RSH, R0, 20),
7423 			BPF_EXIT_INSN(),
7424 		},
7425 		INTERNAL,
7426 		{ },
7427 		{ { 0, 0x123 } }
7428 	},
7429 	{
7430 		"ALU_RSH_K: 0x12345678 >> 0 = 0x12345678",
7431 		.u.insns_int = {
7432 			BPF_ALU32_IMM(BPF_MOV, R0, 0x12345678),
7433 			BPF_ALU32_IMM(BPF_RSH, R0, 0),
7434 			BPF_EXIT_INSN(),
7435 		},
7436 		INTERNAL,
7437 		{ },
7438 		{ { 0, 0x12345678 } }
7439 	},
7440 	{
7441 		"ALU64_RSH_K: 2 >> 1 = 1",
7442 		.u.insns_int = {
7443 			BPF_LD_IMM64(R0, 2),
7444 			BPF_ALU64_IMM(BPF_RSH, R0, 1),
7445 			BPF_EXIT_INSN(),
7446 		},
7447 		INTERNAL,
7448 		{ },
7449 		{ { 0, 1 } },
7450 	},
7451 	{
7452 		"ALU64_RSH_K: 0x80000000 >> 31 = 1",
7453 		.u.insns_int = {
7454 			BPF_LD_IMM64(R0, 0x80000000),
7455 			BPF_ALU64_IMM(BPF_RSH, R0, 31),
7456 			BPF_EXIT_INSN(),
7457 		},
7458 		INTERNAL,
7459 		{ },
7460 		{ { 0, 1 } },
7461 	},
7462 	{
7463 		"ALU64_RSH_K: Shift < 32, low word",
7464 		.u.insns_int = {
7465 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7466 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7467 			BPF_EXIT_INSN(),
7468 		},
7469 		INTERNAL,
7470 		{ },
7471 		{ { 0, 0x56789abc } }
7472 	},
7473 	{
7474 		"ALU64_RSH_K: Shift < 32, high word",
7475 		.u.insns_int = {
7476 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7477 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7478 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7479 			BPF_EXIT_INSN(),
7480 		},
7481 		INTERNAL,
7482 		{ },
7483 		{ { 0, 0x00081234 } }
7484 	},
7485 	{
7486 		"ALU64_RSH_K: Shift > 32, low word",
7487 		.u.insns_int = {
7488 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7489 			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7490 			BPF_EXIT_INSN(),
7491 		},
7492 		INTERNAL,
7493 		{ },
7494 		{ { 0, 0x08123456 } }
7495 	},
7496 	{
7497 		"ALU64_RSH_K: Shift > 32, high word",
7498 		.u.insns_int = {
7499 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7500 			BPF_ALU64_IMM(BPF_RSH, R0, 36),
7501 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7502 			BPF_EXIT_INSN(),
7503 		},
7504 		INTERNAL,
7505 		{ },
7506 		{ { 0, 0 } }
7507 	},
7508 	{
7509 		"ALU64_RSH_K: Shift == 32, low word",
7510 		.u.insns_int = {
7511 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7512 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7513 			BPF_EXIT_INSN(),
7514 		},
7515 		INTERNAL,
7516 		{ },
7517 		{ { 0, 0x81234567 } }
7518 	},
7519 	{
7520 		"ALU64_RSH_K: Shift == 32, high word",
7521 		.u.insns_int = {
7522 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7523 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7524 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7525 			BPF_EXIT_INSN(),
7526 		},
7527 		INTERNAL,
7528 		{ },
7529 		{ { 0, 0 } }
7530 	},
7531 	{
7532 		"ALU64_RSH_K: Zero shift",
7533 		.u.insns_int = {
7534 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7535 			BPF_ALU64_IMM(BPF_RSH, R0, 0),
7536 			BPF_EXIT_INSN(),
7537 		},
7538 		INTERNAL,
7539 		{ },
7540 		{ { 0, 0x89abcdef } }
7541 	},
7542 	/* BPF_ALU | BPF_ARSH | BPF_X */
7543 	{
7544 		"ALU32_ARSH_X: -1234 >> 7 = -10",
7545 		.u.insns_int = {
7546 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7547 			BPF_ALU32_IMM(BPF_MOV, R1, 7),
7548 			BPF_ALU32_REG(BPF_ARSH, R0, R1),
7549 			BPF_EXIT_INSN(),
7550 		},
7551 		INTERNAL,
7552 		{ },
7553 		{ { 0, -10 } }
7554 	},
7555 	{
7556 		"ALU64_ARSH_X: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7557 		.u.insns_int = {
7558 			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7559 			BPF_ALU32_IMM(BPF_MOV, R1, 40),
7560 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7561 			BPF_EXIT_INSN(),
7562 		},
7563 		INTERNAL,
7564 		{ },
7565 		{ { 0, 0xffff00ff } },
7566 	},
7567 	{
7568 		"ALU64_ARSH_X: Shift < 32, low word",
7569 		.u.insns_int = {
7570 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7571 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7572 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7573 			BPF_EXIT_INSN(),
7574 		},
7575 		INTERNAL,
7576 		{ },
7577 		{ { 0, 0x56789abc } }
7578 	},
7579 	{
7580 		"ALU64_ARSH_X: Shift < 32, high word",
7581 		.u.insns_int = {
7582 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7583 			BPF_ALU32_IMM(BPF_MOV, R1, 12),
7584 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7585 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7586 			BPF_EXIT_INSN(),
7587 		},
7588 		INTERNAL,
7589 		{ },
7590 		{ { 0, 0xfff81234 } }
7591 	},
7592 	{
7593 		"ALU64_ARSH_X: Shift > 32, low word",
7594 		.u.insns_int = {
7595 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7596 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7597 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7598 			BPF_EXIT_INSN(),
7599 		},
7600 		INTERNAL,
7601 		{ },
7602 		{ { 0, 0xf8123456 } }
7603 	},
7604 	{
7605 		"ALU64_ARSH_X: Shift > 32, high word",
7606 		.u.insns_int = {
7607 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7608 			BPF_ALU32_IMM(BPF_MOV, R1, 36),
7609 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7610 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7611 			BPF_EXIT_INSN(),
7612 		},
7613 		INTERNAL,
7614 		{ },
7615 		{ { 0, -1 } }
7616 	},
7617 	{
7618 		"ALU64_ARSH_X: Shift == 32, low word",
7619 		.u.insns_int = {
7620 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7621 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7622 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7623 			BPF_EXIT_INSN(),
7624 		},
7625 		INTERNAL,
7626 		{ },
7627 		{ { 0, 0x81234567 } }
7628 	},
7629 	{
7630 		"ALU64_ARSH_X: Shift == 32, high word",
7631 		.u.insns_int = {
7632 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7633 			BPF_ALU32_IMM(BPF_MOV, R1, 32),
7634 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7635 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7636 			BPF_EXIT_INSN(),
7637 		},
7638 		INTERNAL,
7639 		{ },
7640 		{ { 0, -1 } }
7641 	},
7642 	{
7643 		"ALU64_ARSH_X: Zero shift, low word",
7644 		.u.insns_int = {
7645 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7646 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7647 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7648 			BPF_EXIT_INSN(),
7649 		},
7650 		INTERNAL,
7651 		{ },
7652 		{ { 0, 0x89abcdef } }
7653 	},
7654 	{
7655 		"ALU64_ARSH_X: Zero shift, high word",
7656 		.u.insns_int = {
7657 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7658 			BPF_ALU32_IMM(BPF_MOV, R1, 0),
7659 			BPF_ALU64_REG(BPF_ARSH, R0, R1),
7660 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7661 			BPF_EXIT_INSN(),
7662 		},
7663 		INTERNAL,
7664 		{ },
7665 		{ { 0, 0x81234567 } }
7666 	},
7667 	/* BPF_ALU | BPF_ARSH | BPF_K */
7668 	{
7669 		"ALU32_ARSH_K: -1234 >> 7 = -10",
7670 		.u.insns_int = {
7671 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7672 			BPF_ALU32_IMM(BPF_ARSH, R0, 7),
7673 			BPF_EXIT_INSN(),
7674 		},
7675 		INTERNAL,
7676 		{ },
7677 		{ { 0, -10 } }
7678 	},
7679 	{
7680 		"ALU32_ARSH_K: -1234 >> 0 = -1234",
7681 		.u.insns_int = {
7682 			BPF_ALU32_IMM(BPF_MOV, R0, -1234),
7683 			BPF_ALU32_IMM(BPF_ARSH, R0, 0),
7684 			BPF_EXIT_INSN(),
7685 		},
7686 		INTERNAL,
7687 		{ },
7688 		{ { 0, -1234 } }
7689 	},
7690 	{
7691 		"ALU64_ARSH_K: 0xff00ff0000000000 >> 40 = 0xffffffffffff00ff",
7692 		.u.insns_int = {
7693 			BPF_LD_IMM64(R0, 0xff00ff0000000000LL),
7694 			BPF_ALU64_IMM(BPF_ARSH, R0, 40),
7695 			BPF_EXIT_INSN(),
7696 		},
7697 		INTERNAL,
7698 		{ },
7699 		{ { 0, 0xffff00ff } },
7700 	},
7701 	{
7702 		"ALU64_ARSH_K: Shift < 32, low word",
7703 		.u.insns_int = {
7704 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7705 			BPF_ALU64_IMM(BPF_RSH, R0, 12),
7706 			BPF_EXIT_INSN(),
7707 		},
7708 		INTERNAL,
7709 		{ },
7710 		{ { 0, 0x56789abc } }
7711 	},
7712 	{
7713 		"ALU64_ARSH_K: Shift < 32, high word",
7714 		.u.insns_int = {
7715 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7716 			BPF_ALU64_IMM(BPF_ARSH, R0, 12),
7717 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7718 			BPF_EXIT_INSN(),
7719 		},
7720 		INTERNAL,
7721 		{ },
7722 		{ { 0, 0xfff81234 } }
7723 	},
7724 	{
7725 		"ALU64_ARSH_K: Shift > 32, low word",
7726 		.u.insns_int = {
7727 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7728 			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7729 			BPF_EXIT_INSN(),
7730 		},
7731 		INTERNAL,
7732 		{ },
7733 		{ { 0, 0xf8123456 } }
7734 	},
7735 	{
7736 		"ALU64_ARSH_K: Shift > 32, high word",
7737 		.u.insns_int = {
7738 			BPF_LD_IMM64(R0, 0xf123456789abcdefLL),
7739 			BPF_ALU64_IMM(BPF_ARSH, R0, 36),
7740 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7741 			BPF_EXIT_INSN(),
7742 		},
7743 		INTERNAL,
7744 		{ },
7745 		{ { 0, -1 } }
7746 	},
7747 	{
7748 		"ALU64_ARSH_K: Shift == 32, low word",
7749 		.u.insns_int = {
7750 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7751 			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7752 			BPF_EXIT_INSN(),
7753 		},
7754 		INTERNAL,
7755 		{ },
7756 		{ { 0, 0x81234567 } }
7757 	},
7758 	{
7759 		"ALU64_ARSH_K: Shift == 32, high word",
7760 		.u.insns_int = {
7761 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7762 			BPF_ALU64_IMM(BPF_ARSH, R0, 32),
7763 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7764 			BPF_EXIT_INSN(),
7765 		},
7766 		INTERNAL,
7767 		{ },
7768 		{ { 0, -1 } }
7769 	},
7770 	{
7771 		"ALU64_ARSH_K: Zero shift",
7772 		.u.insns_int = {
7773 			BPF_LD_IMM64(R0, 0x8123456789abcdefLL),
7774 			BPF_ALU64_IMM(BPF_ARSH, R0, 0),
7775 			BPF_EXIT_INSN(),
7776 		},
7777 		INTERNAL,
7778 		{ },
7779 		{ { 0, 0x89abcdef } }
7780 	},
7781 	/* BPF_ALU | BPF_NEG */
7782 	{
7783 		"ALU_NEG: -(3) = -3",
7784 		.u.insns_int = {
7785 			BPF_ALU32_IMM(BPF_MOV, R0, 3),
7786 			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7787 			BPF_EXIT_INSN(),
7788 		},
7789 		INTERNAL,
7790 		{ },
7791 		{ { 0, -3 } },
7792 	},
7793 	{
7794 		"ALU_NEG: -(-3) = 3",
7795 		.u.insns_int = {
7796 			BPF_ALU32_IMM(BPF_MOV, R0, -3),
7797 			BPF_ALU32_IMM(BPF_NEG, R0, 0),
7798 			BPF_EXIT_INSN(),
7799 		},
7800 		INTERNAL,
7801 		{ },
7802 		{ { 0, 3 } },
7803 	},
7804 	{
7805 		"ALU64_NEG: -(3) = -3",
7806 		.u.insns_int = {
7807 			BPF_LD_IMM64(R0, 3),
7808 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7809 			BPF_EXIT_INSN(),
7810 		},
7811 		INTERNAL,
7812 		{ },
7813 		{ { 0, -3 } },
7814 	},
7815 	{
7816 		"ALU64_NEG: -(-3) = 3",
7817 		.u.insns_int = {
7818 			BPF_LD_IMM64(R0, -3),
7819 			BPF_ALU64_IMM(BPF_NEG, R0, 0),
7820 			BPF_EXIT_INSN(),
7821 		},
7822 		INTERNAL,
7823 		{ },
7824 		{ { 0, 3 } },
7825 	},
7826 	/* BPF_ALU | BPF_END | BPF_FROM_BE */
7827 	{
7828 		"ALU_END_FROM_BE 16: 0x0123456789abcdef -> 0xcdef",
7829 		.u.insns_int = {
7830 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7831 			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7832 			BPF_EXIT_INSN(),
7833 		},
7834 		INTERNAL,
7835 		{ },
7836 		{ { 0,  cpu_to_be16(0xcdef) } },
7837 	},
7838 	{
7839 		"ALU_END_FROM_BE 32: 0x0123456789abcdef -> 0x89abcdef",
7840 		.u.insns_int = {
7841 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7842 			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7843 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7844 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7845 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7846 			BPF_EXIT_INSN(),
7847 		},
7848 		INTERNAL,
7849 		{ },
7850 		{ { 0, cpu_to_be32(0x89abcdef) } },
7851 	},
7852 	{
7853 		"ALU_END_FROM_BE 64: 0x0123456789abcdef -> 0x89abcdef",
7854 		.u.insns_int = {
7855 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7856 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7857 			BPF_EXIT_INSN(),
7858 		},
7859 		INTERNAL,
7860 		{ },
7861 		{ { 0, (u32) cpu_to_be64(0x0123456789abcdefLL) } },
7862 	},
7863 	{
7864 		"ALU_END_FROM_BE 64: 0x0123456789abcdef >> 32 -> 0x01234567",
7865 		.u.insns_int = {
7866 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7867 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7868 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7869 			BPF_EXIT_INSN(),
7870 		},
7871 		INTERNAL,
7872 		{ },
7873 		{ { 0, (u32) (cpu_to_be64(0x0123456789abcdefLL) >> 32) } },
7874 	},
7875 	/* BPF_ALU | BPF_END | BPF_FROM_BE, reversed */
7876 	{
7877 		"ALU_END_FROM_BE 16: 0xfedcba9876543210 -> 0x3210",
7878 		.u.insns_int = {
7879 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7880 			BPF_ENDIAN(BPF_FROM_BE, R0, 16),
7881 			BPF_EXIT_INSN(),
7882 		},
7883 		INTERNAL,
7884 		{ },
7885 		{ { 0,  cpu_to_be16(0x3210) } },
7886 	},
7887 	{
7888 		"ALU_END_FROM_BE 32: 0xfedcba9876543210 -> 0x76543210",
7889 		.u.insns_int = {
7890 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7891 			BPF_ENDIAN(BPF_FROM_BE, R0, 32),
7892 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7893 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7894 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7895 			BPF_EXIT_INSN(),
7896 		},
7897 		INTERNAL,
7898 		{ },
7899 		{ { 0, cpu_to_be32(0x76543210) } },
7900 	},
7901 	{
7902 		"ALU_END_FROM_BE 64: 0xfedcba9876543210 -> 0x76543210",
7903 		.u.insns_int = {
7904 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7905 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7906 			BPF_EXIT_INSN(),
7907 		},
7908 		INTERNAL,
7909 		{ },
7910 		{ { 0, (u32) cpu_to_be64(0xfedcba9876543210ULL) } },
7911 	},
7912 	{
7913 		"ALU_END_FROM_BE 64: 0xfedcba9876543210 >> 32 -> 0xfedcba98",
7914 		.u.insns_int = {
7915 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7916 			BPF_ENDIAN(BPF_FROM_BE, R0, 64),
7917 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7918 			BPF_EXIT_INSN(),
7919 		},
7920 		INTERNAL,
7921 		{ },
7922 		{ { 0, (u32) (cpu_to_be64(0xfedcba9876543210ULL) >> 32) } },
7923 	},
7924 	/* BPF_ALU | BPF_END | BPF_FROM_LE */
7925 	{
7926 		"ALU_END_FROM_LE 16: 0x0123456789abcdef -> 0xefcd",
7927 		.u.insns_int = {
7928 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7929 			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7930 			BPF_EXIT_INSN(),
7931 		},
7932 		INTERNAL,
7933 		{ },
7934 		{ { 0, cpu_to_le16(0xcdef) } },
7935 	},
7936 	{
7937 		"ALU_END_FROM_LE 32: 0x0123456789abcdef -> 0xefcdab89",
7938 		.u.insns_int = {
7939 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7940 			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7941 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7942 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7943 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7944 			BPF_EXIT_INSN(),
7945 		},
7946 		INTERNAL,
7947 		{ },
7948 		{ { 0, cpu_to_le32(0x89abcdef) } },
7949 	},
7950 	{
7951 		"ALU_END_FROM_LE 64: 0x0123456789abcdef -> 0x67452301",
7952 		.u.insns_int = {
7953 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7954 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7955 			BPF_EXIT_INSN(),
7956 		},
7957 		INTERNAL,
7958 		{ },
7959 		{ { 0, (u32) cpu_to_le64(0x0123456789abcdefLL) } },
7960 	},
7961 	{
7962 		"ALU_END_FROM_LE 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
7963 		.u.insns_int = {
7964 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
7965 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
7966 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
7967 			BPF_EXIT_INSN(),
7968 		},
7969 		INTERNAL,
7970 		{ },
7971 		{ { 0, (u32) (cpu_to_le64(0x0123456789abcdefLL) >> 32) } },
7972 	},
7973 	/* BPF_ALU | BPF_END | BPF_FROM_LE, reversed */
7974 	{
7975 		"ALU_END_FROM_LE 16: 0xfedcba9876543210 -> 0x1032",
7976 		.u.insns_int = {
7977 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7978 			BPF_ENDIAN(BPF_FROM_LE, R0, 16),
7979 			BPF_EXIT_INSN(),
7980 		},
7981 		INTERNAL,
7982 		{ },
7983 		{ { 0,  cpu_to_le16(0x3210) } },
7984 	},
7985 	{
7986 		"ALU_END_FROM_LE 32: 0xfedcba9876543210 -> 0x10325476",
7987 		.u.insns_int = {
7988 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
7989 			BPF_ENDIAN(BPF_FROM_LE, R0, 32),
7990 			BPF_ALU64_REG(BPF_MOV, R1, R0),
7991 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
7992 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
7993 			BPF_EXIT_INSN(),
7994 		},
7995 		INTERNAL,
7996 		{ },
7997 		{ { 0, cpu_to_le32(0x76543210) } },
7998 	},
7999 	{
8000 		"ALU_END_FROM_LE 64: 0xfedcba9876543210 -> 0x10325476",
8001 		.u.insns_int = {
8002 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8003 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8004 			BPF_EXIT_INSN(),
8005 		},
8006 		INTERNAL,
8007 		{ },
8008 		{ { 0, (u32) cpu_to_le64(0xfedcba9876543210ULL) } },
8009 	},
8010 	{
8011 		"ALU_END_FROM_LE 64: 0xfedcba9876543210 >> 32 -> 0x98badcfe",
8012 		.u.insns_int = {
8013 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8014 			BPF_ENDIAN(BPF_FROM_LE, R0, 64),
8015 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
8016 			BPF_EXIT_INSN(),
8017 		},
8018 		INTERNAL,
8019 		{ },
8020 		{ { 0, (u32) (cpu_to_le64(0xfedcba9876543210ULL) >> 32) } },
8021 	},
8022 	/* BSWAP */
8023 	{
8024 		"BSWAP 16: 0x0123456789abcdef -> 0xefcd",
8025 		.u.insns_int = {
8026 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8027 			BPF_BSWAP(R0, 16),
8028 			BPF_EXIT_INSN(),
8029 		},
8030 		INTERNAL,
8031 		{ },
8032 		{ { 0, 0xefcd } },
8033 	},
8034 	{
8035 		"BSWAP 32: 0x0123456789abcdef -> 0xefcdab89",
8036 		.u.insns_int = {
8037 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8038 			BPF_BSWAP(R0, 32),
8039 			BPF_ALU64_REG(BPF_MOV, R1, R0),
8040 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
8041 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8042 			BPF_EXIT_INSN(),
8043 		},
8044 		INTERNAL,
8045 		{ },
8046 		{ { 0, 0xefcdab89 } },
8047 	},
8048 	{
8049 		"BSWAP 64: 0x0123456789abcdef -> 0x67452301",
8050 		.u.insns_int = {
8051 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8052 			BPF_BSWAP(R0, 64),
8053 			BPF_EXIT_INSN(),
8054 		},
8055 		INTERNAL,
8056 		{ },
8057 		{ { 0, 0x67452301 } },
8058 	},
8059 	{
8060 		"BSWAP 64: 0x0123456789abcdef >> 32 -> 0xefcdab89",
8061 		.u.insns_int = {
8062 			BPF_LD_IMM64(R0, 0x0123456789abcdefLL),
8063 			BPF_BSWAP(R0, 64),
8064 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
8065 			BPF_EXIT_INSN(),
8066 		},
8067 		INTERNAL,
8068 		{ },
8069 		{ { 0, 0xefcdab89 } },
8070 	},
8071 	/* BSWAP, reversed */
8072 	{
8073 		"BSWAP 16: 0xfedcba9876543210 -> 0x1032",
8074 		.u.insns_int = {
8075 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8076 			BPF_BSWAP(R0, 16),
8077 			BPF_EXIT_INSN(),
8078 		},
8079 		INTERNAL,
8080 		{ },
8081 		{ { 0, 0x1032 } },
8082 	},
8083 	{
8084 		"BSWAP 32: 0xfedcba9876543210 -> 0x10325476",
8085 		.u.insns_int = {
8086 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8087 			BPF_BSWAP(R0, 32),
8088 			BPF_ALU64_REG(BPF_MOV, R1, R0),
8089 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
8090 			BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
8091 			BPF_EXIT_INSN(),
8092 		},
8093 		INTERNAL,
8094 		{ },
8095 		{ { 0, 0x10325476 } },
8096 	},
8097 	{
8098 		"BSWAP 64: 0xfedcba9876543210 -> 0x98badcfe",
8099 		.u.insns_int = {
8100 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8101 			BPF_BSWAP(R0, 64),
8102 			BPF_EXIT_INSN(),
8103 		},
8104 		INTERNAL,
8105 		{ },
8106 		{ { 0, 0x98badcfe } },
8107 	},
8108 	{
8109 		"BSWAP 64: 0xfedcba9876543210 >> 32 -> 0x10325476",
8110 		.u.insns_int = {
8111 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
8112 			BPF_BSWAP(R0, 64),
8113 			BPF_ALU64_IMM(BPF_RSH, R0, 32),
8114 			BPF_EXIT_INSN(),
8115 		},
8116 		INTERNAL,
8117 		{ },
8118 		{ { 0, 0x10325476 } },
8119 	},
8120 	/* BPF_LDX_MEM B/H/W/DW */
8121 	{
8122 		"BPF_LDX_MEM | BPF_B, base",
8123 		.u.insns_int = {
8124 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8125 			BPF_LD_IMM64(R2, 0x0000000000000008ULL),
8126 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8127 #ifdef __BIG_ENDIAN
8128 			BPF_LDX_MEM(BPF_B, R0, R10, -1),
8129 #else
8130 			BPF_LDX_MEM(BPF_B, R0, R10, -8),
8131 #endif
8132 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8133 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8134 			BPF_EXIT_INSN(),
8135 		},
8136 		INTERNAL,
8137 		{ },
8138 		{ { 0, 0 } },
8139 		.stack_depth = 8,
8140 	},
8141 	{
8142 		"BPF_LDX_MEM | BPF_B, MSB set",
8143 		.u.insns_int = {
8144 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8145 			BPF_LD_IMM64(R2, 0x0000000000000088ULL),
8146 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8147 #ifdef __BIG_ENDIAN
8148 			BPF_LDX_MEM(BPF_B, R0, R10, -1),
8149 #else
8150 			BPF_LDX_MEM(BPF_B, R0, R10, -8),
8151 #endif
8152 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8153 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8154 			BPF_EXIT_INSN(),
8155 		},
8156 		INTERNAL,
8157 		{ },
8158 		{ { 0, 0 } },
8159 		.stack_depth = 8,
8160 	},
8161 	{
8162 		"BPF_LDX_MEM | BPF_B, negative offset",
8163 		.u.insns_int = {
8164 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8165 			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8166 			BPF_ALU64_IMM(BPF_ADD, R1, 512),
8167 			BPF_STX_MEM(BPF_B, R1, R2, -256),
8168 			BPF_LDX_MEM(BPF_B, R0, R1, -256),
8169 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8170 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8171 			BPF_EXIT_INSN(),
8172 		},
8173 		INTERNAL | FLAG_LARGE_MEM,
8174 		{ },
8175 		{ { 512, 0 } },
8176 		.stack_depth = 0,
8177 	},
8178 	{
8179 		"BPF_LDX_MEM | BPF_B, small positive offset",
8180 		.u.insns_int = {
8181 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8182 			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8183 			BPF_STX_MEM(BPF_B, R1, R2, 256),
8184 			BPF_LDX_MEM(BPF_B, R0, R1, 256),
8185 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8186 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8187 			BPF_EXIT_INSN(),
8188 		},
8189 		INTERNAL | FLAG_LARGE_MEM,
8190 		{ },
8191 		{ { 512, 0 } },
8192 		.stack_depth = 0,
8193 	},
8194 	{
8195 		"BPF_LDX_MEM | BPF_B, large positive offset",
8196 		.u.insns_int = {
8197 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8198 			BPF_LD_IMM64(R3, 0x0000000000000088ULL),
8199 			BPF_STX_MEM(BPF_B, R1, R2, 4096),
8200 			BPF_LDX_MEM(BPF_B, R0, R1, 4096),
8201 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8202 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8203 			BPF_EXIT_INSN(),
8204 		},
8205 		INTERNAL | FLAG_LARGE_MEM,
8206 		{ },
8207 		{ { 4096 + 16, 0 } },
8208 		.stack_depth = 0,
8209 	},
8210 	{
8211 		"BPF_LDX_MEM | BPF_H, base",
8212 		.u.insns_int = {
8213 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8214 			BPF_LD_IMM64(R2, 0x0000000000000708ULL),
8215 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8216 #ifdef __BIG_ENDIAN
8217 			BPF_LDX_MEM(BPF_H, R0, R10, -2),
8218 #else
8219 			BPF_LDX_MEM(BPF_H, R0, R10, -8),
8220 #endif
8221 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8222 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8223 			BPF_EXIT_INSN(),
8224 		},
8225 		INTERNAL,
8226 		{ },
8227 		{ { 0, 0 } },
8228 		.stack_depth = 8,
8229 	},
8230 	{
8231 		"BPF_LDX_MEM | BPF_H, MSB set",
8232 		.u.insns_int = {
8233 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8234 			BPF_LD_IMM64(R2, 0x0000000000008788ULL),
8235 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8236 #ifdef __BIG_ENDIAN
8237 			BPF_LDX_MEM(BPF_H, R0, R10, -2),
8238 #else
8239 			BPF_LDX_MEM(BPF_H, R0, R10, -8),
8240 #endif
8241 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8242 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8243 			BPF_EXIT_INSN(),
8244 		},
8245 		INTERNAL,
8246 		{ },
8247 		{ { 0, 0 } },
8248 		.stack_depth = 8,
8249 	},
8250 	{
8251 		"BPF_LDX_MEM | BPF_H, negative offset",
8252 		.u.insns_int = {
8253 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8254 			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8255 			BPF_ALU64_IMM(BPF_ADD, R1, 512),
8256 			BPF_STX_MEM(BPF_H, R1, R2, -256),
8257 			BPF_LDX_MEM(BPF_H, R0, R1, -256),
8258 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8259 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8260 			BPF_EXIT_INSN(),
8261 		},
8262 		INTERNAL | FLAG_LARGE_MEM,
8263 		{ },
8264 		{ { 512, 0 } },
8265 		.stack_depth = 0,
8266 	},
8267 	{
8268 		"BPF_LDX_MEM | BPF_H, small positive offset",
8269 		.u.insns_int = {
8270 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8271 			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8272 			BPF_STX_MEM(BPF_H, R1, R2, 256),
8273 			BPF_LDX_MEM(BPF_H, R0, R1, 256),
8274 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8275 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8276 			BPF_EXIT_INSN(),
8277 		},
8278 		INTERNAL | FLAG_LARGE_MEM,
8279 		{ },
8280 		{ { 512, 0 } },
8281 		.stack_depth = 0,
8282 	},
8283 	{
8284 		"BPF_LDX_MEM | BPF_H, large positive offset",
8285 		.u.insns_int = {
8286 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8287 			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8288 			BPF_STX_MEM(BPF_H, R1, R2, 8192),
8289 			BPF_LDX_MEM(BPF_H, R0, R1, 8192),
8290 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8291 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8292 			BPF_EXIT_INSN(),
8293 		},
8294 		INTERNAL | FLAG_LARGE_MEM,
8295 		{ },
8296 		{ { 8192 + 16, 0 } },
8297 		.stack_depth = 0,
8298 	},
8299 	{
8300 		"BPF_LDX_MEM | BPF_H, unaligned positive offset",
8301 		.u.insns_int = {
8302 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8303 			BPF_LD_IMM64(R3, 0x0000000000008788ULL),
8304 			BPF_STX_MEM(BPF_H, R1, R2, 13),
8305 			BPF_LDX_MEM(BPF_H, R0, R1, 13),
8306 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8307 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8308 			BPF_EXIT_INSN(),
8309 		},
8310 		INTERNAL | FLAG_LARGE_MEM,
8311 		{ },
8312 		{ { 32, 0 } },
8313 		.stack_depth = 0,
8314 	},
8315 	{
8316 		"BPF_LDX_MEM | BPF_W, base",
8317 		.u.insns_int = {
8318 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8319 			BPF_LD_IMM64(R2, 0x0000000005060708ULL),
8320 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8321 #ifdef __BIG_ENDIAN
8322 			BPF_LDX_MEM(BPF_W, R0, R10, -4),
8323 #else
8324 			BPF_LDX_MEM(BPF_W, R0, R10, -8),
8325 #endif
8326 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8327 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8328 			BPF_EXIT_INSN(),
8329 		},
8330 		INTERNAL,
8331 		{ },
8332 		{ { 0, 0 } },
8333 		.stack_depth = 8,
8334 	},
8335 	{
8336 		"BPF_LDX_MEM | BPF_W, MSB set",
8337 		.u.insns_int = {
8338 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8339 			BPF_LD_IMM64(R2, 0x0000000085868788ULL),
8340 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8341 #ifdef __BIG_ENDIAN
8342 			BPF_LDX_MEM(BPF_W, R0, R10, -4),
8343 #else
8344 			BPF_LDX_MEM(BPF_W, R0, R10, -8),
8345 #endif
8346 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8347 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8348 			BPF_EXIT_INSN(),
8349 		},
8350 		INTERNAL,
8351 		{ },
8352 		{ { 0, 0 } },
8353 		.stack_depth = 8,
8354 	},
8355 	{
8356 		"BPF_LDX_MEM | BPF_W, negative offset",
8357 		.u.insns_int = {
8358 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8359 			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8360 			BPF_ALU64_IMM(BPF_ADD, R1, 512),
8361 			BPF_STX_MEM(BPF_W, R1, R2, -256),
8362 			BPF_LDX_MEM(BPF_W, R0, R1, -256),
8363 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8364 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8365 			BPF_EXIT_INSN(),
8366 		},
8367 		INTERNAL | FLAG_LARGE_MEM,
8368 		{ },
8369 		{ { 512, 0 } },
8370 		.stack_depth = 0,
8371 	},
8372 	{
8373 		"BPF_LDX_MEM | BPF_W, small positive offset",
8374 		.u.insns_int = {
8375 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8376 			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8377 			BPF_STX_MEM(BPF_W, R1, R2, 256),
8378 			BPF_LDX_MEM(BPF_W, R0, R1, 256),
8379 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8380 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8381 			BPF_EXIT_INSN(),
8382 		},
8383 		INTERNAL | FLAG_LARGE_MEM,
8384 		{ },
8385 		{ { 512, 0 } },
8386 		.stack_depth = 0,
8387 	},
8388 	{
8389 		"BPF_LDX_MEM | BPF_W, large positive offset",
8390 		.u.insns_int = {
8391 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8392 			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8393 			BPF_STX_MEM(BPF_W, R1, R2, 16384),
8394 			BPF_LDX_MEM(BPF_W, R0, R1, 16384),
8395 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8396 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8397 			BPF_EXIT_INSN(),
8398 		},
8399 		INTERNAL | FLAG_LARGE_MEM,
8400 		{ },
8401 		{ { 16384 + 16, 0 } },
8402 		.stack_depth = 0,
8403 	},
8404 	{
8405 		"BPF_LDX_MEM | BPF_W, unaligned positive offset",
8406 		.u.insns_int = {
8407 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8408 			BPF_LD_IMM64(R3, 0x0000000085868788ULL),
8409 			BPF_STX_MEM(BPF_W, R1, R2, 13),
8410 			BPF_LDX_MEM(BPF_W, R0, R1, 13),
8411 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8412 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8413 			BPF_EXIT_INSN(),
8414 		},
8415 		INTERNAL | FLAG_LARGE_MEM,
8416 		{ },
8417 		{ { 32, 0 } },
8418 		.stack_depth = 0,
8419 	},
8420 	{
8421 		"BPF_LDX_MEM | BPF_DW, base",
8422 		.u.insns_int = {
8423 			BPF_LD_IMM64(R1, 0x0102030405060708ULL),
8424 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8425 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8426 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8427 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8428 			BPF_EXIT_INSN(),
8429 		},
8430 		INTERNAL,
8431 		{ },
8432 		{ { 0, 0 } },
8433 		.stack_depth = 8,
8434 	},
8435 	{
8436 		"BPF_LDX_MEM | BPF_DW, MSB set",
8437 		.u.insns_int = {
8438 			BPF_LD_IMM64(R1, 0x8182838485868788ULL),
8439 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8440 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8441 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
8442 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8443 			BPF_EXIT_INSN(),
8444 		},
8445 		INTERNAL,
8446 		{ },
8447 		{ { 0, 0 } },
8448 		.stack_depth = 8,
8449 	},
8450 	{
8451 		"BPF_LDX_MEM | BPF_DW, negative offset",
8452 		.u.insns_int = {
8453 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8454 			BPF_ALU64_IMM(BPF_ADD, R1, 512),
8455 			BPF_STX_MEM(BPF_DW, R1, R2, -256),
8456 			BPF_LDX_MEM(BPF_DW, R0, R1, -256),
8457 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8458 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8459 			BPF_EXIT_INSN(),
8460 		},
8461 		INTERNAL | FLAG_LARGE_MEM,
8462 		{ },
8463 		{ { 512, 0 } },
8464 		.stack_depth = 0,
8465 	},
8466 	{
8467 		"BPF_LDX_MEM | BPF_DW, small positive offset",
8468 		.u.insns_int = {
8469 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8470 			BPF_STX_MEM(BPF_DW, R1, R2, 256),
8471 			BPF_LDX_MEM(BPF_DW, R0, R1, 256),
8472 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8473 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8474 			BPF_EXIT_INSN(),
8475 		},
8476 		INTERNAL | FLAG_LARGE_MEM,
8477 		{ },
8478 		{ { 512, 0 } },
8479 		.stack_depth = 8,
8480 	},
8481 	{
8482 		"BPF_LDX_MEM | BPF_DW, large positive offset",
8483 		.u.insns_int = {
8484 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8485 			BPF_STX_MEM(BPF_DW, R1, R2, 32760),
8486 			BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
8487 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8488 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8489 			BPF_EXIT_INSN(),
8490 		},
8491 		INTERNAL | FLAG_LARGE_MEM,
8492 		{ },
8493 		{ { 32768, 0 } },
8494 		.stack_depth = 0,
8495 	},
8496 	{
8497 		"BPF_LDX_MEM | BPF_DW, unaligned positive offset",
8498 		.u.insns_int = {
8499 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8500 			BPF_STX_MEM(BPF_DW, R1, R2, 13),
8501 			BPF_LDX_MEM(BPF_DW, R0, R1, 13),
8502 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8503 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8504 			BPF_EXIT_INSN(),
8505 		},
8506 		INTERNAL | FLAG_LARGE_MEM,
8507 		{ },
8508 		{ { 32, 0 } },
8509 		.stack_depth = 0,
8510 	},
8511 	/* BPF_LDX_MEMSX B/H/W */
8512 	{
8513 		"BPF_LDX_MEMSX | BPF_B",
8514 		.u.insns_int = {
8515 			BPF_LD_IMM64(R1, 0xdead0000000000f0ULL),
8516 			BPF_LD_IMM64(R2, 0xfffffffffffffff0ULL),
8517 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8518 #ifdef __BIG_ENDIAN
8519 			BPF_LDX_MEMSX(BPF_B, R0, R10, -1),
8520 #else
8521 			BPF_LDX_MEMSX(BPF_B, R0, R10, -8),
8522 #endif
8523 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8524 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8525 			BPF_EXIT_INSN(),
8526 		},
8527 		INTERNAL,
8528 		{ },
8529 		{ { 0, 0 } },
8530 		.stack_depth = 8,
8531 	},
8532 	{
8533 		"BPF_LDX_MEMSX | BPF_H",
8534 		.u.insns_int = {
8535 			BPF_LD_IMM64(R1, 0xdead00000000f123ULL),
8536 			BPF_LD_IMM64(R2, 0xfffffffffffff123ULL),
8537 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8538 #ifdef __BIG_ENDIAN
8539 			BPF_LDX_MEMSX(BPF_H, R0, R10, -2),
8540 #else
8541 			BPF_LDX_MEMSX(BPF_H, R0, R10, -8),
8542 #endif
8543 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8544 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8545 			BPF_EXIT_INSN(),
8546 		},
8547 		INTERNAL,
8548 		{ },
8549 		{ { 0, 0 } },
8550 		.stack_depth = 8,
8551 	},
8552 	{
8553 		"BPF_LDX_MEMSX | BPF_W",
8554 		.u.insns_int = {
8555 			BPF_LD_IMM64(R1, 0x00000000deadbeefULL),
8556 			BPF_LD_IMM64(R2, 0xffffffffdeadbeefULL),
8557 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8558 #ifdef __BIG_ENDIAN
8559 			BPF_LDX_MEMSX(BPF_W, R0, R10, -4),
8560 #else
8561 			BPF_LDX_MEMSX(BPF_W, R0, R10, -8),
8562 #endif
8563 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
8564 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8565 			BPF_EXIT_INSN(),
8566 		},
8567 		INTERNAL,
8568 		{ },
8569 		{ { 0, 0 } },
8570 		.stack_depth = 8,
8571 	},
8572 	/* BPF_STX_MEM B/H/W/DW */
8573 	{
8574 		"BPF_STX_MEM | BPF_B",
8575 		.u.insns_int = {
8576 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8577 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8578 			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e008ULL),
8579 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8580 #ifdef __BIG_ENDIAN
8581 			BPF_STX_MEM(BPF_B, R10, R2, -1),
8582 #else
8583 			BPF_STX_MEM(BPF_B, R10, R2, -8),
8584 #endif
8585 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8586 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8587 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8588 			BPF_EXIT_INSN(),
8589 		},
8590 		INTERNAL,
8591 		{ },
8592 		{ { 0, 0 } },
8593 		.stack_depth = 8,
8594 	},
8595 	{
8596 		"BPF_STX_MEM | BPF_B, MSB set",
8597 		.u.insns_int = {
8598 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8599 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8600 			BPF_LD_IMM64(R3, 0x8090a0b0c0d0e088ULL),
8601 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8602 #ifdef __BIG_ENDIAN
8603 			BPF_STX_MEM(BPF_B, R10, R2, -1),
8604 #else
8605 			BPF_STX_MEM(BPF_B, R10, R2, -8),
8606 #endif
8607 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8608 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8609 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8610 			BPF_EXIT_INSN(),
8611 		},
8612 		INTERNAL,
8613 		{ },
8614 		{ { 0, 0 } },
8615 		.stack_depth = 8,
8616 	},
8617 	{
8618 		"BPF_STX_MEM | BPF_H",
8619 		.u.insns_int = {
8620 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8621 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8622 			BPF_LD_IMM64(R3, 0x8090a0b0c0d00708ULL),
8623 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8624 #ifdef __BIG_ENDIAN
8625 			BPF_STX_MEM(BPF_H, R10, R2, -2),
8626 #else
8627 			BPF_STX_MEM(BPF_H, R10, R2, -8),
8628 #endif
8629 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8630 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8631 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8632 			BPF_EXIT_INSN(),
8633 		},
8634 		INTERNAL,
8635 		{ },
8636 		{ { 0, 0 } },
8637 		.stack_depth = 8,
8638 	},
8639 	{
8640 		"BPF_STX_MEM | BPF_H, MSB set",
8641 		.u.insns_int = {
8642 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8643 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8644 			BPF_LD_IMM64(R3, 0x8090a0b0c0d08788ULL),
8645 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8646 #ifdef __BIG_ENDIAN
8647 			BPF_STX_MEM(BPF_H, R10, R2, -2),
8648 #else
8649 			BPF_STX_MEM(BPF_H, R10, R2, -8),
8650 #endif
8651 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8652 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8653 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8654 			BPF_EXIT_INSN(),
8655 		},
8656 		INTERNAL,
8657 		{ },
8658 		{ { 0, 0 } },
8659 		.stack_depth = 8,
8660 	},
8661 	{
8662 		"BPF_STX_MEM | BPF_W",
8663 		.u.insns_int = {
8664 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8665 			BPF_LD_IMM64(R2, 0x0102030405060708ULL),
8666 			BPF_LD_IMM64(R3, 0x8090a0b005060708ULL),
8667 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8668 #ifdef __BIG_ENDIAN
8669 			BPF_STX_MEM(BPF_W, R10, R2, -4),
8670 #else
8671 			BPF_STX_MEM(BPF_W, R10, R2, -8),
8672 #endif
8673 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8674 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8675 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8676 			BPF_EXIT_INSN(),
8677 		},
8678 		INTERNAL,
8679 		{ },
8680 		{ { 0, 0 } },
8681 		.stack_depth = 8,
8682 	},
8683 	{
8684 		"BPF_STX_MEM | BPF_W, MSB set",
8685 		.u.insns_int = {
8686 			BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
8687 			BPF_LD_IMM64(R2, 0x8182838485868788ULL),
8688 			BPF_LD_IMM64(R3, 0x8090a0b085868788ULL),
8689 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
8690 #ifdef __BIG_ENDIAN
8691 			BPF_STX_MEM(BPF_W, R10, R2, -4),
8692 #else
8693 			BPF_STX_MEM(BPF_W, R10, R2, -8),
8694 #endif
8695 			BPF_LDX_MEM(BPF_DW, R0, R10, -8),
8696 			BPF_JMP_REG(BPF_JNE, R0, R3, 1),
8697 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
8698 			BPF_EXIT_INSN(),
8699 		},
8700 		INTERNAL,
8701 		{ },
8702 		{ { 0, 0 } },
8703 		.stack_depth = 8,
8704 	},
8705 	/* BPF_ST(X) | BPF_MEM | BPF_B/H/W/DW */
8706 	{
8707 		"ST_MEM_B: Store/Load byte: max negative",
8708 		.u.insns_int = {
8709 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8710 			BPF_ST_MEM(BPF_B, R10, -40, 0xff),
8711 			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8712 			BPF_EXIT_INSN(),
8713 		},
8714 		INTERNAL,
8715 		{ },
8716 		{ { 0, 0xff } },
8717 		.stack_depth = 40,
8718 	},
8719 	{
8720 		"ST_MEM_B: Store/Load byte: max positive",
8721 		.u.insns_int = {
8722 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8723 			BPF_ST_MEM(BPF_H, R10, -40, 0x7f),
8724 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8725 			BPF_EXIT_INSN(),
8726 		},
8727 		INTERNAL,
8728 		{ },
8729 		{ { 0, 0x7f } },
8730 		.stack_depth = 40,
8731 	},
8732 	{
8733 		"STX_MEM_B: Store/Load byte: max negative",
8734 		.u.insns_int = {
8735 			BPF_LD_IMM64(R0, 0),
8736 			BPF_LD_IMM64(R1, 0xffLL),
8737 			BPF_STX_MEM(BPF_B, R10, R1, -40),
8738 			BPF_LDX_MEM(BPF_B, R0, R10, -40),
8739 			BPF_EXIT_INSN(),
8740 		},
8741 		INTERNAL,
8742 		{ },
8743 		{ { 0, 0xff } },
8744 		.stack_depth = 40,
8745 	},
8746 	{
8747 		"ST_MEM_H: Store/Load half word: max negative",
8748 		.u.insns_int = {
8749 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8750 			BPF_ST_MEM(BPF_H, R10, -40, 0xffff),
8751 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8752 			BPF_EXIT_INSN(),
8753 		},
8754 		INTERNAL,
8755 		{ },
8756 		{ { 0, 0xffff } },
8757 		.stack_depth = 40,
8758 	},
8759 	{
8760 		"ST_MEM_H: Store/Load half word: max positive",
8761 		.u.insns_int = {
8762 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8763 			BPF_ST_MEM(BPF_H, R10, -40, 0x7fff),
8764 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8765 			BPF_EXIT_INSN(),
8766 		},
8767 		INTERNAL,
8768 		{ },
8769 		{ { 0, 0x7fff } },
8770 		.stack_depth = 40,
8771 	},
8772 	{
8773 		"STX_MEM_H: Store/Load half word: max negative",
8774 		.u.insns_int = {
8775 			BPF_LD_IMM64(R0, 0),
8776 			BPF_LD_IMM64(R1, 0xffffLL),
8777 			BPF_STX_MEM(BPF_H, R10, R1, -40),
8778 			BPF_LDX_MEM(BPF_H, R0, R10, -40),
8779 			BPF_EXIT_INSN(),
8780 		},
8781 		INTERNAL,
8782 		{ },
8783 		{ { 0, 0xffff } },
8784 		.stack_depth = 40,
8785 	},
8786 	{
8787 		"ST_MEM_W: Store/Load word: max negative",
8788 		.u.insns_int = {
8789 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8790 			BPF_ST_MEM(BPF_W, R10, -40, 0xffffffff),
8791 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8792 			BPF_EXIT_INSN(),
8793 		},
8794 		INTERNAL,
8795 		{ },
8796 		{ { 0, 0xffffffff } },
8797 		.stack_depth = 40,
8798 	},
8799 	{
8800 		"ST_MEM_W: Store/Load word: max positive",
8801 		.u.insns_int = {
8802 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8803 			BPF_ST_MEM(BPF_W, R10, -40, 0x7fffffff),
8804 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8805 			BPF_EXIT_INSN(),
8806 		},
8807 		INTERNAL,
8808 		{ },
8809 		{ { 0, 0x7fffffff } },
8810 		.stack_depth = 40,
8811 	},
8812 	{
8813 		"STX_MEM_W: Store/Load word: max negative",
8814 		.u.insns_int = {
8815 			BPF_LD_IMM64(R0, 0),
8816 			BPF_LD_IMM64(R1, 0xffffffffLL),
8817 			BPF_STX_MEM(BPF_W, R10, R1, -40),
8818 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8819 			BPF_EXIT_INSN(),
8820 		},
8821 		INTERNAL,
8822 		{ },
8823 		{ { 0, 0xffffffff } },
8824 		.stack_depth = 40,
8825 	},
8826 	{
8827 		"ST_MEM_DW: Store/Load double word: max negative",
8828 		.u.insns_int = {
8829 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8830 			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8831 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8832 			BPF_EXIT_INSN(),
8833 		},
8834 		INTERNAL,
8835 		{ },
8836 		{ { 0, 0xffffffff } },
8837 		.stack_depth = 40,
8838 	},
8839 	{
8840 		"ST_MEM_DW: Store/Load double word: max negative 2",
8841 		.u.insns_int = {
8842 			BPF_LD_IMM64(R2, 0xffff00000000ffffLL),
8843 			BPF_LD_IMM64(R3, 0xffffffffffffffffLL),
8844 			BPF_ST_MEM(BPF_DW, R10, -40, 0xffffffff),
8845 			BPF_LDX_MEM(BPF_DW, R2, R10, -40),
8846 			BPF_JMP_REG(BPF_JEQ, R2, R3, 2),
8847 			BPF_MOV32_IMM(R0, 2),
8848 			BPF_EXIT_INSN(),
8849 			BPF_MOV32_IMM(R0, 1),
8850 			BPF_EXIT_INSN(),
8851 		},
8852 		INTERNAL,
8853 		{ },
8854 		{ { 0, 0x1 } },
8855 		.stack_depth = 40,
8856 	},
8857 	{
8858 		"ST_MEM_DW: Store/Load double word: max positive",
8859 		.u.insns_int = {
8860 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
8861 			BPF_ST_MEM(BPF_DW, R10, -40, 0x7fffffff),
8862 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8863 			BPF_EXIT_INSN(),
8864 		},
8865 		INTERNAL,
8866 		{ },
8867 		{ { 0, 0x7fffffff } },
8868 		.stack_depth = 40,
8869 	},
8870 	{
8871 		"STX_MEM_DW: Store/Load double word: max negative",
8872 		.u.insns_int = {
8873 			BPF_LD_IMM64(R0, 0),
8874 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
8875 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8876 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
8877 			BPF_EXIT_INSN(),
8878 		},
8879 		INTERNAL,
8880 		{ },
8881 		{ { 0, 0xffffffff } },
8882 		.stack_depth = 40,
8883 	},
8884 	{
8885 		"STX_MEM_DW: Store double word: first word in memory",
8886 		.u.insns_int = {
8887 			BPF_LD_IMM64(R0, 0),
8888 			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8889 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8890 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
8891 			BPF_EXIT_INSN(),
8892 		},
8893 		INTERNAL,
8894 		{ },
8895 #ifdef __BIG_ENDIAN
8896 		{ { 0, 0x01234567 } },
8897 #else
8898 		{ { 0, 0x89abcdef } },
8899 #endif
8900 		.stack_depth = 40,
8901 	},
8902 	{
8903 		"STX_MEM_DW: Store double word: second word in memory",
8904 		.u.insns_int = {
8905 			BPF_LD_IMM64(R0, 0),
8906 			BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
8907 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
8908 			BPF_LDX_MEM(BPF_W, R0, R10, -36),
8909 			BPF_EXIT_INSN(),
8910 		},
8911 		INTERNAL,
8912 		{ },
8913 #ifdef __BIG_ENDIAN
8914 		{ { 0, 0x89abcdef } },
8915 #else
8916 		{ { 0, 0x01234567 } },
8917 #endif
8918 		.stack_depth = 40,
8919 	},
8920 	/* BPF_STX | BPF_ATOMIC | BPF_W/DW */
8921 	{
8922 		"STX_XADD_W: X + 1 + 1 + 1 + ...",
8923 		{ },
8924 		INTERNAL,
8925 		{ },
8926 		{ { 0, 4134 } },
8927 		.fill_helper = bpf_fill_stxw,
8928 	},
8929 	{
8930 		"STX_XADD_DW: X + 1 + 1 + 1 + ...",
8931 		{ },
8932 		INTERNAL,
8933 		{ },
8934 		{ { 0, 4134 } },
8935 		.fill_helper = bpf_fill_stxdw,
8936 	},
8937 	/*
8938 	 * Exhaustive tests of atomic operation variants.
8939 	 * Individual tests are expanded from template macros for all
8940 	 * combinations of ALU operation, word size and fetching.
8941 	 */
8942 #define BPF_ATOMIC_POISON(width) ((width) == BPF_W ? (0xbaadf00dULL << 32) : 0)
8943 
8944 #define BPF_ATOMIC_OP_TEST1(width, op, logic, old, update, result)	\
8945 {									\
8946 	"BPF_ATOMIC | " #width ", " #op ": Test: "			\
8947 		#old " " #logic " " #update " = " #result,		\
8948 	.u.insns_int = {						\
8949 		BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)),	\
8950 		BPF_ST_MEM(width, R10, -40, old),			\
8951 		BPF_ATOMIC_OP(width, op, R10, R5, -40),			\
8952 		BPF_LDX_MEM(width, R0, R10, -40),			\
8953 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8954 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8955 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8956 		BPF_EXIT_INSN(),					\
8957 	},								\
8958 	INTERNAL,							\
8959 	{ },								\
8960 	{ { 0, result } },						\
8961 	.stack_depth = 40,						\
8962 }
8963 #define BPF_ATOMIC_OP_TEST2(width, op, logic, old, update, result)	\
8964 {									\
8965 	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r10: "	\
8966 		#old " " #logic " " #update " = " #result,		\
8967 	.u.insns_int = {						\
8968 		BPF_ALU64_REG(BPF_MOV, R1, R10),			\
8969 		BPF_LD_IMM64(R0, (update) | BPF_ATOMIC_POISON(width)),	\
8970 		BPF_ST_MEM(BPF_W, R10, -40, old),			\
8971 		BPF_ATOMIC_OP(width, op, R10, R0, -40),			\
8972 		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8973 		BPF_ALU64_REG(BPF_SUB, R0, R1),				\
8974 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8975 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8976 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8977 		BPF_EXIT_INSN(),					\
8978 	},								\
8979 	INTERNAL,							\
8980 	{ },								\
8981 	{ { 0, 0 } },							\
8982 	.stack_depth = 40,						\
8983 }
8984 #define BPF_ATOMIC_OP_TEST3(width, op, logic, old, update, result)	\
8985 {									\
8986 	"BPF_ATOMIC | " #width ", " #op ": Test side effects, r0: "	\
8987 		#old " " #logic " " #update " = " #result,		\
8988 	.u.insns_int = {						\
8989 		BPF_ALU64_REG(BPF_MOV, R0, R10),			\
8990 		BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)),	\
8991 		BPF_ST_MEM(width, R10, -40, old),			\
8992 		BPF_ATOMIC_OP(width, op, R10, R1, -40),			\
8993 		BPF_ALU64_REG(BPF_SUB, R0, R10),			\
8994 		BPF_ALU64_REG(BPF_MOV, R1, R0),				\
8995 		BPF_ALU64_IMM(BPF_RSH, R1, 32),				\
8996 		BPF_ALU64_REG(BPF_OR, R0, R1),				\
8997 		BPF_EXIT_INSN(),					\
8998 	},								\
8999 	INTERNAL,                                                       \
9000 	{ },                                                            \
9001 	{ { 0, 0 } },                                                   \
9002 	.stack_depth = 40,                                              \
9003 }
9004 #define BPF_ATOMIC_OP_TEST4(width, op, logic, old, update, result)	\
9005 {									\
9006 	"BPF_ATOMIC | " #width ", " #op ": Test fetch: "		\
9007 		#old " " #logic " " #update " = " #result,		\
9008 	.u.insns_int = {						\
9009 		BPF_LD_IMM64(R3, (update) | BPF_ATOMIC_POISON(width)),	\
9010 		BPF_ST_MEM(width, R10, -40, old),			\
9011 		BPF_ATOMIC_OP(width, op, R10, R3, -40),			\
9012 		BPF_ALU32_REG(BPF_MOV, R0, R3),                         \
9013 		BPF_EXIT_INSN(),					\
9014 	},								\
9015 	INTERNAL,                                                       \
9016 	{ },                                                            \
9017 	{ { 0, (op) & BPF_FETCH ? old : update } },			\
9018 	.stack_depth = 40,                                              \
9019 }
9020 	/* BPF_ATOMIC | BPF_W: BPF_ADD */
9021 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9022 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9023 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9024 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD, +, 0x12, 0xab, 0xbd),
9025 	/* BPF_ATOMIC | BPF_W: BPF_ADD | BPF_FETCH */
9026 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9027 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9028 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9029 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9030 	/* BPF_ATOMIC | BPF_DW: BPF_ADD */
9031 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9032 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9033 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9034 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD, +, 0x12, 0xab, 0xbd),
9035 	/* BPF_ATOMIC | BPF_DW: BPF_ADD | BPF_FETCH */
9036 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9037 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9038 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9039 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_ADD | BPF_FETCH, +, 0x12, 0xab, 0xbd),
9040 	/* BPF_ATOMIC | BPF_W: BPF_AND */
9041 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9042 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9043 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9044 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND, &, 0x12, 0xab, 0x02),
9045 	/* BPF_ATOMIC | BPF_W: BPF_AND | BPF_FETCH */
9046 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9047 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9048 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9049 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9050 	/* BPF_ATOMIC | BPF_DW: BPF_AND */
9051 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9052 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9053 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9054 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND, &, 0x12, 0xab, 0x02),
9055 	/* BPF_ATOMIC | BPF_DW: BPF_AND | BPF_FETCH */
9056 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9057 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9058 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9059 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_AND | BPF_FETCH, &, 0x12, 0xab, 0x02),
9060 	/* BPF_ATOMIC | BPF_W: BPF_OR */
9061 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9062 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9063 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9064 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR, |, 0x12, 0xab, 0xbb),
9065 	/* BPF_ATOMIC | BPF_W: BPF_OR | BPF_FETCH */
9066 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9067 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9068 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9069 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9070 	/* BPF_ATOMIC | BPF_DW: BPF_OR */
9071 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9072 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9073 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9074 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR, |, 0x12, 0xab, 0xbb),
9075 	/* BPF_ATOMIC | BPF_DW: BPF_OR | BPF_FETCH */
9076 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9077 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9078 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9079 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_OR | BPF_FETCH, |, 0x12, 0xab, 0xbb),
9080 	/* BPF_ATOMIC | BPF_W: BPF_XOR */
9081 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9082 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9083 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9084 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9085 	/* BPF_ATOMIC | BPF_W: BPF_XOR | BPF_FETCH */
9086 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9087 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9088 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9089 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9090 	/* BPF_ATOMIC | BPF_DW: BPF_XOR */
9091 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9092 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9093 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9094 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR, ^, 0x12, 0xab, 0xb9),
9095 	/* BPF_ATOMIC | BPF_DW: BPF_XOR | BPF_FETCH */
9096 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9097 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9098 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9099 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XOR | BPF_FETCH, ^, 0x12, 0xab, 0xb9),
9100 	/* BPF_ATOMIC | BPF_W: BPF_XCHG */
9101 	BPF_ATOMIC_OP_TEST1(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9102 	BPF_ATOMIC_OP_TEST2(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9103 	BPF_ATOMIC_OP_TEST3(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9104 	BPF_ATOMIC_OP_TEST4(BPF_W, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9105 	/* BPF_ATOMIC | BPF_DW: BPF_XCHG */
9106 	BPF_ATOMIC_OP_TEST1(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9107 	BPF_ATOMIC_OP_TEST2(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9108 	BPF_ATOMIC_OP_TEST3(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9109 	BPF_ATOMIC_OP_TEST4(BPF_DW, BPF_XCHG, xchg, 0x12, 0xab, 0xab),
9110 #undef BPF_ATOMIC_POISON
9111 #undef BPF_ATOMIC_OP_TEST1
9112 #undef BPF_ATOMIC_OP_TEST2
9113 #undef BPF_ATOMIC_OP_TEST3
9114 #undef BPF_ATOMIC_OP_TEST4
9115 	/* BPF_ATOMIC | BPF_W, BPF_CMPXCHG */
9116 	{
9117 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful return",
9118 		.u.insns_int = {
9119 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9120 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9121 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9122 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9123 			BPF_EXIT_INSN(),
9124 		},
9125 		INTERNAL,
9126 		{ },
9127 		{ { 0, 0x01234567 } },
9128 		.stack_depth = 40,
9129 	},
9130 	{
9131 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test successful store",
9132 		.u.insns_int = {
9133 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9134 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9135 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9136 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9137 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
9138 			BPF_EXIT_INSN(),
9139 		},
9140 		INTERNAL,
9141 		{ },
9142 		{ { 0, 0x89abcdef } },
9143 		.stack_depth = 40,
9144 	},
9145 	{
9146 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure return",
9147 		.u.insns_int = {
9148 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9149 			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9150 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9151 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9152 			BPF_EXIT_INSN(),
9153 		},
9154 		INTERNAL,
9155 		{ },
9156 		{ { 0, 0x01234567 } },
9157 		.stack_depth = 40,
9158 	},
9159 	{
9160 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test failure store",
9161 		.u.insns_int = {
9162 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9163 			BPF_ALU32_IMM(BPF_MOV, R0, 0x76543210),
9164 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9165 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9166 			BPF_LDX_MEM(BPF_W, R0, R10, -40),
9167 			BPF_EXIT_INSN(),
9168 		},
9169 		INTERNAL,
9170 		{ },
9171 		{ { 0, 0x01234567 } },
9172 		.stack_depth = 40,
9173 	},
9174 	{
9175 		"BPF_ATOMIC | BPF_W, BPF_CMPXCHG: Test side effects",
9176 		.u.insns_int = {
9177 			BPF_ST_MEM(BPF_W, R10, -40, 0x01234567),
9178 			BPF_ALU32_IMM(BPF_MOV, R0, 0x01234567),
9179 			BPF_ALU32_IMM(BPF_MOV, R3, 0x89abcdef),
9180 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9181 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R3, -40),
9182 			BPF_ALU32_REG(BPF_MOV, R0, R3),
9183 			BPF_EXIT_INSN(),
9184 		},
9185 		INTERNAL,
9186 		{ },
9187 		{ { 0, 0x89abcdef } },
9188 		.stack_depth = 40,
9189 	},
9190 	/* BPF_ATOMIC | BPF_DW, BPF_CMPXCHG */
9191 	{
9192 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful return",
9193 		.u.insns_int = {
9194 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9195 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9196 			BPF_ALU64_REG(BPF_MOV, R0, R1),
9197 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
9198 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9199 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9200 			BPF_ALU64_REG(BPF_SUB, R0, R1),
9201 			BPF_EXIT_INSN(),
9202 		},
9203 		INTERNAL,
9204 		{ },
9205 		{ { 0, 0 } },
9206 		.stack_depth = 40,
9207 	},
9208 	{
9209 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test successful store",
9210 		.u.insns_int = {
9211 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9212 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9213 			BPF_ALU64_REG(BPF_MOV, R0, R1),
9214 			BPF_STX_MEM(BPF_DW, R10, R0, -40),
9215 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9216 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9217 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9218 			BPF_ALU64_REG(BPF_SUB, R0, R2),
9219 			BPF_EXIT_INSN(),
9220 		},
9221 		INTERNAL,
9222 		{ },
9223 		{ { 0, 0 } },
9224 		.stack_depth = 40,
9225 	},
9226 	{
9227 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure return",
9228 		.u.insns_int = {
9229 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9230 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9231 			BPF_ALU64_REG(BPF_MOV, R0, R1),
9232 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
9233 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
9234 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9235 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9236 			BPF_ALU64_REG(BPF_SUB, R0, R1),
9237 			BPF_EXIT_INSN(),
9238 		},
9239 		INTERNAL,
9240 		{ },
9241 		{ { 0, 0 } },
9242 		.stack_depth = 40,
9243 	},
9244 	{
9245 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test failure store",
9246 		.u.insns_int = {
9247 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9248 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9249 			BPF_ALU64_REG(BPF_MOV, R0, R1),
9250 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
9251 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
9252 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9253 			BPF_LDX_MEM(BPF_DW, R0, R10, -40),
9254 			BPF_JMP_REG(BPF_JNE, R0, R1, 1),
9255 			BPF_ALU64_REG(BPF_SUB, R0, R1),
9256 			BPF_EXIT_INSN(),
9257 		},
9258 		INTERNAL,
9259 		{ },
9260 		{ { 0, 0 } },
9261 		.stack_depth = 40,
9262 	},
9263 	{
9264 		"BPF_ATOMIC | BPF_DW, BPF_CMPXCHG: Test side effects",
9265 		.u.insns_int = {
9266 			BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
9267 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),
9268 			BPF_ALU64_REG(BPF_MOV, R0, R1),
9269 			BPF_STX_MEM(BPF_DW, R10, R1, -40),
9270 			BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, R10, R2, -40),
9271 			BPF_LD_IMM64(R0, 0xfedcba9876543210ULL),
9272 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
9273 			BPF_ALU64_REG(BPF_SUB, R0, R2),
9274 			BPF_EXIT_INSN(),
9275 		},
9276 		INTERNAL,
9277 		{ },
9278 		{ { 0, 0 } },
9279 		.stack_depth = 40,
9280 	},
9281 	/* BPF_JMP32 | BPF_JEQ | BPF_K */
9282 	{
9283 		"JMP32_JEQ_K: Small immediate",
9284 		.u.insns_int = {
9285 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9286 			BPF_JMP32_IMM(BPF_JEQ, R0, 321, 1),
9287 			BPF_JMP32_IMM(BPF_JEQ, R0, 123, 1),
9288 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9289 			BPF_EXIT_INSN(),
9290 		},
9291 		INTERNAL,
9292 		{ },
9293 		{ { 0, 123 } }
9294 	},
9295 	{
9296 		"JMP32_JEQ_K: Large immediate",
9297 		.u.insns_int = {
9298 			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9299 			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678 & 0xffff, 1),
9300 			BPF_JMP32_IMM(BPF_JEQ, R0, 12345678, 1),
9301 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9302 			BPF_EXIT_INSN(),
9303 		},
9304 		INTERNAL,
9305 		{ },
9306 		{ { 0, 12345678 } }
9307 	},
9308 	{
9309 		"JMP32_JEQ_K: negative immediate",
9310 		.u.insns_int = {
9311 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9312 			BPF_JMP32_IMM(BPF_JEQ, R0,  123, 1),
9313 			BPF_JMP32_IMM(BPF_JEQ, R0, -123, 1),
9314 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9315 			BPF_EXIT_INSN(),
9316 		},
9317 		INTERNAL,
9318 		{ },
9319 		{ { 0, -123 } }
9320 	},
9321 	/* BPF_JMP32 | BPF_JEQ | BPF_X */
9322 	{
9323 		"JMP32_JEQ_X",
9324 		.u.insns_int = {
9325 			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9326 			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9327 			BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
9328 			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9329 			BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
9330 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9331 			BPF_EXIT_INSN(),
9332 		},
9333 		INTERNAL,
9334 		{ },
9335 		{ { 0, 1234 } }
9336 	},
9337 	/* BPF_JMP32 | BPF_JNE | BPF_K */
9338 	{
9339 		"JMP32_JNE_K: Small immediate",
9340 		.u.insns_int = {
9341 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9342 			BPF_JMP32_IMM(BPF_JNE, R0, 123, 1),
9343 			BPF_JMP32_IMM(BPF_JNE, R0, 321, 1),
9344 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9345 			BPF_EXIT_INSN(),
9346 		},
9347 		INTERNAL,
9348 		{ },
9349 		{ { 0, 123 } }
9350 	},
9351 	{
9352 		"JMP32_JNE_K: Large immediate",
9353 		.u.insns_int = {
9354 			BPF_ALU32_IMM(BPF_MOV, R0, 12345678),
9355 			BPF_JMP32_IMM(BPF_JNE, R0, 12345678, 1),
9356 			BPF_JMP32_IMM(BPF_JNE, R0, 12345678 & 0xffff, 1),
9357 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9358 			BPF_EXIT_INSN(),
9359 		},
9360 		INTERNAL,
9361 		{ },
9362 		{ { 0, 12345678 } }
9363 	},
9364 	{
9365 		"JMP32_JNE_K: negative immediate",
9366 		.u.insns_int = {
9367 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9368 			BPF_JMP32_IMM(BPF_JNE, R0, -123, 1),
9369 			BPF_JMP32_IMM(BPF_JNE, R0,  123, 1),
9370 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9371 			BPF_EXIT_INSN(),
9372 		},
9373 		INTERNAL,
9374 		{ },
9375 		{ { 0, -123 } }
9376 	},
9377 	/* BPF_JMP32 | BPF_JNE | BPF_X */
9378 	{
9379 		"JMP32_JNE_X",
9380 		.u.insns_int = {
9381 			BPF_ALU32_IMM(BPF_MOV, R0, 1234),
9382 			BPF_ALU32_IMM(BPF_MOV, R1, 1234),
9383 			BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
9384 			BPF_ALU32_IMM(BPF_MOV, R1, 4321),
9385 			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9386 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9387 			BPF_EXIT_INSN(),
9388 		},
9389 		INTERNAL,
9390 		{ },
9391 		{ { 0, 1234 } }
9392 	},
9393 	/* BPF_JMP32 | BPF_JSET | BPF_K */
9394 	{
9395 		"JMP32_JSET_K: Small immediate",
9396 		.u.insns_int = {
9397 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9398 			BPF_JMP32_IMM(BPF_JSET, R0, 2, 1),
9399 			BPF_JMP32_IMM(BPF_JSET, R0, 3, 1),
9400 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9401 			BPF_EXIT_INSN(),
9402 		},
9403 		INTERNAL,
9404 		{ },
9405 		{ { 0, 1 } }
9406 	},
9407 	{
9408 		"JMP32_JSET_K: Large immediate",
9409 		.u.insns_int = {
9410 			BPF_ALU32_IMM(BPF_MOV, R0, 0x40000000),
9411 			BPF_JMP32_IMM(BPF_JSET, R0, 0x3fffffff, 1),
9412 			BPF_JMP32_IMM(BPF_JSET, R0, 0x60000000, 1),
9413 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9414 			BPF_EXIT_INSN(),
9415 		},
9416 		INTERNAL,
9417 		{ },
9418 		{ { 0, 0x40000000 } }
9419 	},
9420 	{
9421 		"JMP32_JSET_K: negative immediate",
9422 		.u.insns_int = {
9423 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9424 			BPF_JMP32_IMM(BPF_JSET, R0, -1, 1),
9425 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9426 			BPF_EXIT_INSN(),
9427 		},
9428 		INTERNAL,
9429 		{ },
9430 		{ { 0, -123 } }
9431 	},
9432 	/* BPF_JMP32 | BPF_JSET | BPF_X */
9433 	{
9434 		"JMP32_JSET_X",
9435 		.u.insns_int = {
9436 			BPF_ALU32_IMM(BPF_MOV, R0, 8),
9437 			BPF_ALU32_IMM(BPF_MOV, R1, 7),
9438 			BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
9439 			BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
9440 			BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
9441 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9442 			BPF_EXIT_INSN(),
9443 		},
9444 		INTERNAL,
9445 		{ },
9446 		{ { 0, 8 } }
9447 	},
9448 	/* BPF_JMP32 | BPF_JGT | BPF_K */
9449 	{
9450 		"JMP32_JGT_K: Small immediate",
9451 		.u.insns_int = {
9452 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9453 			BPF_JMP32_IMM(BPF_JGT, R0, 123, 1),
9454 			BPF_JMP32_IMM(BPF_JGT, R0, 122, 1),
9455 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9456 			BPF_EXIT_INSN(),
9457 		},
9458 		INTERNAL,
9459 		{ },
9460 		{ { 0, 123 } }
9461 	},
9462 	{
9463 		"JMP32_JGT_K: Large immediate",
9464 		.u.insns_int = {
9465 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9466 			BPF_JMP32_IMM(BPF_JGT, R0, 0xffffffff, 1),
9467 			BPF_JMP32_IMM(BPF_JGT, R0, 0xfffffffd, 1),
9468 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9469 			BPF_EXIT_INSN(),
9470 		},
9471 		INTERNAL,
9472 		{ },
9473 		{ { 0, 0xfffffffe } }
9474 	},
9475 	/* BPF_JMP32 | BPF_JGT | BPF_X */
9476 	{
9477 		"JMP32_JGT_X",
9478 		.u.insns_int = {
9479 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9480 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9481 			BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
9482 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9483 			BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
9484 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9485 			BPF_EXIT_INSN(),
9486 		},
9487 		INTERNAL,
9488 		{ },
9489 		{ { 0, 0xfffffffe } }
9490 	},
9491 	/* BPF_JMP32 | BPF_JGE | BPF_K */
9492 	{
9493 		"JMP32_JGE_K: Small immediate",
9494 		.u.insns_int = {
9495 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9496 			BPF_JMP32_IMM(BPF_JGE, R0, 124, 1),
9497 			BPF_JMP32_IMM(BPF_JGE, R0, 123, 1),
9498 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9499 			BPF_EXIT_INSN(),
9500 		},
9501 		INTERNAL,
9502 		{ },
9503 		{ { 0, 123 } }
9504 	},
9505 	{
9506 		"JMP32_JGE_K: Large immediate",
9507 		.u.insns_int = {
9508 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9509 			BPF_JMP32_IMM(BPF_JGE, R0, 0xffffffff, 1),
9510 			BPF_JMP32_IMM(BPF_JGE, R0, 0xfffffffe, 1),
9511 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9512 			BPF_EXIT_INSN(),
9513 		},
9514 		INTERNAL,
9515 		{ },
9516 		{ { 0, 0xfffffffe } }
9517 	},
9518 	/* BPF_JMP32 | BPF_JGE | BPF_X */
9519 	{
9520 		"JMP32_JGE_X",
9521 		.u.insns_int = {
9522 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9523 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9524 			BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
9525 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9526 			BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
9527 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9528 			BPF_EXIT_INSN(),
9529 		},
9530 		INTERNAL,
9531 		{ },
9532 		{ { 0, 0xfffffffe } }
9533 	},
9534 	/* BPF_JMP32 | BPF_JLT | BPF_K */
9535 	{
9536 		"JMP32_JLT_K: Small immediate",
9537 		.u.insns_int = {
9538 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9539 			BPF_JMP32_IMM(BPF_JLT, R0, 123, 1),
9540 			BPF_JMP32_IMM(BPF_JLT, R0, 124, 1),
9541 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9542 			BPF_EXIT_INSN(),
9543 		},
9544 		INTERNAL,
9545 		{ },
9546 		{ { 0, 123 } }
9547 	},
9548 	{
9549 		"JMP32_JLT_K: Large immediate",
9550 		.u.insns_int = {
9551 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9552 			BPF_JMP32_IMM(BPF_JLT, R0, 0xfffffffd, 1),
9553 			BPF_JMP32_IMM(BPF_JLT, R0, 0xffffffff, 1),
9554 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9555 			BPF_EXIT_INSN(),
9556 		},
9557 		INTERNAL,
9558 		{ },
9559 		{ { 0, 0xfffffffe } }
9560 	},
9561 	/* BPF_JMP32 | BPF_JLT | BPF_X */
9562 	{
9563 		"JMP32_JLT_X",
9564 		.u.insns_int = {
9565 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9566 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9567 			BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
9568 			BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
9569 			BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
9570 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9571 			BPF_EXIT_INSN(),
9572 		},
9573 		INTERNAL,
9574 		{ },
9575 		{ { 0, 0xfffffffe } }
9576 	},
9577 	/* BPF_JMP32 | BPF_JLE | BPF_K */
9578 	{
9579 		"JMP32_JLE_K: Small immediate",
9580 		.u.insns_int = {
9581 			BPF_ALU32_IMM(BPF_MOV, R0, 123),
9582 			BPF_JMP32_IMM(BPF_JLE, R0, 122, 1),
9583 			BPF_JMP32_IMM(BPF_JLE, R0, 123, 1),
9584 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9585 			BPF_EXIT_INSN(),
9586 		},
9587 		INTERNAL,
9588 		{ },
9589 		{ { 0, 123 } }
9590 	},
9591 	{
9592 		"JMP32_JLE_K: Large immediate",
9593 		.u.insns_int = {
9594 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9595 			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffd, 1),
9596 			BPF_JMP32_IMM(BPF_JLE, R0, 0xfffffffe, 1),
9597 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9598 			BPF_EXIT_INSN(),
9599 		},
9600 		INTERNAL,
9601 		{ },
9602 		{ { 0, 0xfffffffe } }
9603 	},
9604 	/* BPF_JMP32 | BPF_JLE | BPF_X */
9605 	{
9606 		"JMP32_JLE_X",
9607 		.u.insns_int = {
9608 			BPF_ALU32_IMM(BPF_MOV, R0, 0xfffffffe),
9609 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
9610 			BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
9611 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
9612 			BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
9613 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9614 			BPF_EXIT_INSN(),
9615 		},
9616 		INTERNAL,
9617 		{ },
9618 		{ { 0, 0xfffffffe } }
9619 	},
9620 	/* BPF_JMP32 | BPF_JSGT | BPF_K */
9621 	{
9622 		"JMP32_JSGT_K: Small immediate",
9623 		.u.insns_int = {
9624 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9625 			BPF_JMP32_IMM(BPF_JSGT, R0, -123, 1),
9626 			BPF_JMP32_IMM(BPF_JSGT, R0, -124, 1),
9627 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9628 			BPF_EXIT_INSN(),
9629 		},
9630 		INTERNAL,
9631 		{ },
9632 		{ { 0, -123 } }
9633 	},
9634 	{
9635 		"JMP32_JSGT_K: Large immediate",
9636 		.u.insns_int = {
9637 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9638 			BPF_JMP32_IMM(BPF_JSGT, R0, -12345678, 1),
9639 			BPF_JMP32_IMM(BPF_JSGT, R0, -12345679, 1),
9640 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9641 			BPF_EXIT_INSN(),
9642 		},
9643 		INTERNAL,
9644 		{ },
9645 		{ { 0, -12345678 } }
9646 	},
9647 	/* BPF_JMP32 | BPF_JSGT | BPF_X */
9648 	{
9649 		"JMP32_JSGT_X",
9650 		.u.insns_int = {
9651 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9652 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9653 			BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
9654 			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9655 			BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
9656 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9657 			BPF_EXIT_INSN(),
9658 		},
9659 		INTERNAL,
9660 		{ },
9661 		{ { 0, -12345678 } }
9662 	},
9663 	/* BPF_JMP32 | BPF_JSGE | BPF_K */
9664 	{
9665 		"JMP32_JSGE_K: Small immediate",
9666 		.u.insns_int = {
9667 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9668 			BPF_JMP32_IMM(BPF_JSGE, R0, -122, 1),
9669 			BPF_JMP32_IMM(BPF_JSGE, R0, -123, 1),
9670 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9671 			BPF_EXIT_INSN(),
9672 		},
9673 		INTERNAL,
9674 		{ },
9675 		{ { 0, -123 } }
9676 	},
9677 	{
9678 		"JMP32_JSGE_K: Large immediate",
9679 		.u.insns_int = {
9680 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9681 			BPF_JMP32_IMM(BPF_JSGE, R0, -12345677, 1),
9682 			BPF_JMP32_IMM(BPF_JSGE, R0, -12345678, 1),
9683 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9684 			BPF_EXIT_INSN(),
9685 		},
9686 		INTERNAL,
9687 		{ },
9688 		{ { 0, -12345678 } }
9689 	},
9690 	/* BPF_JMP32 | BPF_JSGE | BPF_X */
9691 	{
9692 		"JMP32_JSGE_X",
9693 		.u.insns_int = {
9694 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9695 			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9696 			BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
9697 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9698 			BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
9699 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9700 			BPF_EXIT_INSN(),
9701 		},
9702 		INTERNAL,
9703 		{ },
9704 		{ { 0, -12345678 } }
9705 	},
9706 	/* BPF_JMP32 | BPF_JSLT | BPF_K */
9707 	{
9708 		"JMP32_JSLT_K: Small immediate",
9709 		.u.insns_int = {
9710 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9711 			BPF_JMP32_IMM(BPF_JSLT, R0, -123, 1),
9712 			BPF_JMP32_IMM(BPF_JSLT, R0, -122, 1),
9713 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9714 			BPF_EXIT_INSN(),
9715 		},
9716 		INTERNAL,
9717 		{ },
9718 		{ { 0, -123 } }
9719 	},
9720 	{
9721 		"JMP32_JSLT_K: Large immediate",
9722 		.u.insns_int = {
9723 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9724 			BPF_JMP32_IMM(BPF_JSLT, R0, -12345678, 1),
9725 			BPF_JMP32_IMM(BPF_JSLT, R0, -12345677, 1),
9726 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9727 			BPF_EXIT_INSN(),
9728 		},
9729 		INTERNAL,
9730 		{ },
9731 		{ { 0, -12345678 } }
9732 	},
9733 	/* BPF_JMP32 | BPF_JSLT | BPF_X */
9734 	{
9735 		"JMP32_JSLT_X",
9736 		.u.insns_int = {
9737 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9738 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9739 			BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
9740 			BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
9741 			BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
9742 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9743 			BPF_EXIT_INSN(),
9744 		},
9745 		INTERNAL,
9746 		{ },
9747 		{ { 0, -12345678 } }
9748 	},
9749 	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9750 	{
9751 		"JMP32_JSLE_K: Small immediate",
9752 		.u.insns_int = {
9753 			BPF_ALU32_IMM(BPF_MOV, R0, -123),
9754 			BPF_JMP32_IMM(BPF_JSLE, R0, -124, 1),
9755 			BPF_JMP32_IMM(BPF_JSLE, R0, -123, 1),
9756 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9757 			BPF_EXIT_INSN(),
9758 		},
9759 		INTERNAL,
9760 		{ },
9761 		{ { 0, -123 } }
9762 	},
9763 	{
9764 		"JMP32_JSLE_K: Large immediate",
9765 		.u.insns_int = {
9766 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9767 			BPF_JMP32_IMM(BPF_JSLE, R0, -12345679, 1),
9768 			BPF_JMP32_IMM(BPF_JSLE, R0, -12345678, 1),
9769 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9770 			BPF_EXIT_INSN(),
9771 		},
9772 		INTERNAL,
9773 		{ },
9774 		{ { 0, -12345678 } }
9775 	},
9776 	/* BPF_JMP32 | BPF_JSLE | BPF_K */
9777 	{
9778 		"JMP32_JSLE_X",
9779 		.u.insns_int = {
9780 			BPF_ALU32_IMM(BPF_MOV, R0, -12345678),
9781 			BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
9782 			BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
9783 			BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
9784 			BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
9785 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9786 			BPF_EXIT_INSN(),
9787 		},
9788 		INTERNAL,
9789 		{ },
9790 		{ { 0, -12345678 } }
9791 	},
9792 	/* BPF_JMP | BPF_EXIT */
9793 	{
9794 		"JMP_EXIT",
9795 		.u.insns_int = {
9796 			BPF_ALU32_IMM(BPF_MOV, R0, 0x4711),
9797 			BPF_EXIT_INSN(),
9798 			BPF_ALU32_IMM(BPF_MOV, R0, 0x4712),
9799 		},
9800 		INTERNAL,
9801 		{ },
9802 		{ { 0, 0x4711 } },
9803 	},
9804 	/* BPF_JMP | BPF_JA */
9805 	{
9806 		"JMP_JA: Unconditional jump: if (true) return 1",
9807 		.u.insns_int = {
9808 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9809 			BPF_JMP_IMM(BPF_JA, 0, 0, 1),
9810 			BPF_EXIT_INSN(),
9811 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9812 			BPF_EXIT_INSN(),
9813 		},
9814 		INTERNAL,
9815 		{ },
9816 		{ { 0, 1 } },
9817 	},
9818 	/* BPF_JMP32 | BPF_JA */
9819 	{
9820 		"JMP32_JA: Unconditional jump: if (true) return 1",
9821 		.u.insns_int = {
9822 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9823 			BPF_JMP32_IMM(BPF_JA, 0, 1, 0),
9824 			BPF_EXIT_INSN(),
9825 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9826 			BPF_EXIT_INSN(),
9827 		},
9828 		INTERNAL,
9829 		{ },
9830 		{ { 0, 1 } },
9831 	},
9832 	/* BPF_JMP | BPF_JSLT | BPF_K */
9833 	{
9834 		"JMP_JSLT_K: Signed jump: if (-2 < -1) return 1",
9835 		.u.insns_int = {
9836 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9837 			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9838 			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9839 			BPF_EXIT_INSN(),
9840 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9841 			BPF_EXIT_INSN(),
9842 		},
9843 		INTERNAL,
9844 		{ },
9845 		{ { 0, 1 } },
9846 	},
9847 	{
9848 		"JMP_JSLT_K: Signed jump: if (-1 < -1) return 0",
9849 		.u.insns_int = {
9850 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9851 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9852 			BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
9853 			BPF_EXIT_INSN(),
9854 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9855 			BPF_EXIT_INSN(),
9856 		},
9857 		INTERNAL,
9858 		{ },
9859 		{ { 0, 1 } },
9860 	},
9861 	/* BPF_JMP | BPF_JSGT | BPF_K */
9862 	{
9863 		"JMP_JSGT_K: Signed jump: if (-1 > -2) return 1",
9864 		.u.insns_int = {
9865 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9866 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9867 			BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
9868 			BPF_EXIT_INSN(),
9869 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9870 			BPF_EXIT_INSN(),
9871 		},
9872 		INTERNAL,
9873 		{ },
9874 		{ { 0, 1 } },
9875 	},
9876 	{
9877 		"JMP_JSGT_K: Signed jump: if (-1 > -1) return 0",
9878 		.u.insns_int = {
9879 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9880 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9881 			BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
9882 			BPF_EXIT_INSN(),
9883 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9884 			BPF_EXIT_INSN(),
9885 		},
9886 		INTERNAL,
9887 		{ },
9888 		{ { 0, 1 } },
9889 	},
9890 	/* BPF_JMP | BPF_JSLE | BPF_K */
9891 	{
9892 		"JMP_JSLE_K: Signed jump: if (-2 <= -1) return 1",
9893 		.u.insns_int = {
9894 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9895 			BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
9896 			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9897 			BPF_EXIT_INSN(),
9898 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9899 			BPF_EXIT_INSN(),
9900 		},
9901 		INTERNAL,
9902 		{ },
9903 		{ { 0, 1 } },
9904 	},
9905 	{
9906 		"JMP_JSLE_K: Signed jump: if (-1 <= -1) return 1",
9907 		.u.insns_int = {
9908 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9909 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9910 			BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
9911 			BPF_EXIT_INSN(),
9912 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9913 			BPF_EXIT_INSN(),
9914 		},
9915 		INTERNAL,
9916 		{ },
9917 		{ { 0, 1 } },
9918 	},
9919 	{
9920 		"JMP_JSLE_K: Signed jump: value walk 1",
9921 		.u.insns_int = {
9922 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9923 			BPF_LD_IMM64(R1, 3),
9924 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
9925 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9926 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9927 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9928 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9929 			BPF_ALU64_IMM(BPF_SUB, R1, 1),
9930 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9931 			BPF_EXIT_INSN(),		/* bad exit */
9932 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9933 			BPF_EXIT_INSN(),
9934 		},
9935 		INTERNAL,
9936 		{ },
9937 		{ { 0, 1 } },
9938 	},
9939 	{
9940 		"JMP_JSLE_K: Signed jump: value walk 2",
9941 		.u.insns_int = {
9942 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9943 			BPF_LD_IMM64(R1, 3),
9944 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
9945 			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9946 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
9947 			BPF_ALU64_IMM(BPF_SUB, R1, 2),
9948 			BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
9949 			BPF_EXIT_INSN(),		/* bad exit */
9950 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
9951 			BPF_EXIT_INSN(),
9952 		},
9953 		INTERNAL,
9954 		{ },
9955 		{ { 0, 1 } },
9956 	},
9957 	/* BPF_JMP | BPF_JSGE | BPF_K */
9958 	{
9959 		"JMP_JSGE_K: Signed jump: if (-1 >= -2) return 1",
9960 		.u.insns_int = {
9961 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9962 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9963 			BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
9964 			BPF_EXIT_INSN(),
9965 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9966 			BPF_EXIT_INSN(),
9967 		},
9968 		INTERNAL,
9969 		{ },
9970 		{ { 0, 1 } },
9971 	},
9972 	{
9973 		"JMP_JSGE_K: Signed jump: if (-1 >= -1) return 1",
9974 		.u.insns_int = {
9975 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9976 			BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
9977 			BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
9978 			BPF_EXIT_INSN(),
9979 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
9980 			BPF_EXIT_INSN(),
9981 		},
9982 		INTERNAL,
9983 		{ },
9984 		{ { 0, 1 } },
9985 	},
9986 	{
9987 		"JMP_JSGE_K: Signed jump: value walk 1",
9988 		.u.insns_int = {
9989 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
9990 			BPF_LD_IMM64(R1, -3),
9991 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
9992 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9993 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
9994 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9995 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
9996 			BPF_ALU64_IMM(BPF_ADD, R1, 1),
9997 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
9998 			BPF_EXIT_INSN(),		/* bad exit */
9999 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
10000 			BPF_EXIT_INSN(),
10001 		},
10002 		INTERNAL,
10003 		{ },
10004 		{ { 0, 1 } },
10005 	},
10006 	{
10007 		"JMP_JSGE_K: Signed jump: value walk 2",
10008 		.u.insns_int = {
10009 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10010 			BPF_LD_IMM64(R1, -3),
10011 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
10012 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
10013 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
10014 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
10015 			BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
10016 			BPF_EXIT_INSN(),		/* bad exit */
10017 			BPF_ALU32_IMM(BPF_MOV, R0, 1),	/* good exit */
10018 			BPF_EXIT_INSN(),
10019 		},
10020 		INTERNAL,
10021 		{ },
10022 		{ { 0, 1 } },
10023 	},
10024 	/* BPF_JMP | BPF_JGT | BPF_K */
10025 	{
10026 		"JMP_JGT_K: if (3 > 2) return 1",
10027 		.u.insns_int = {
10028 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10029 			BPF_LD_IMM64(R1, 3),
10030 			BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
10031 			BPF_EXIT_INSN(),
10032 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10033 			BPF_EXIT_INSN(),
10034 		},
10035 		INTERNAL,
10036 		{ },
10037 		{ { 0, 1 } },
10038 	},
10039 	{
10040 		"JMP_JGT_K: Unsigned jump: if (-1 > 1) return 1",
10041 		.u.insns_int = {
10042 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10043 			BPF_LD_IMM64(R1, -1),
10044 			BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
10045 			BPF_EXIT_INSN(),
10046 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10047 			BPF_EXIT_INSN(),
10048 		},
10049 		INTERNAL,
10050 		{ },
10051 		{ { 0, 1 } },
10052 	},
10053 	/* BPF_JMP | BPF_JLT | BPF_K */
10054 	{
10055 		"JMP_JLT_K: if (2 < 3) return 1",
10056 		.u.insns_int = {
10057 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10058 			BPF_LD_IMM64(R1, 2),
10059 			BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
10060 			BPF_EXIT_INSN(),
10061 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10062 			BPF_EXIT_INSN(),
10063 		},
10064 		INTERNAL,
10065 		{ },
10066 		{ { 0, 1 } },
10067 	},
10068 	{
10069 		"JMP_JGT_K: Unsigned jump: if (1 < -1) return 1",
10070 		.u.insns_int = {
10071 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10072 			BPF_LD_IMM64(R1, 1),
10073 			BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
10074 			BPF_EXIT_INSN(),
10075 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10076 			BPF_EXIT_INSN(),
10077 		},
10078 		INTERNAL,
10079 		{ },
10080 		{ { 0, 1 } },
10081 	},
10082 	/* BPF_JMP | BPF_JGE | BPF_K */
10083 	{
10084 		"JMP_JGE_K: if (3 >= 2) return 1",
10085 		.u.insns_int = {
10086 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10087 			BPF_LD_IMM64(R1, 3),
10088 			BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
10089 			BPF_EXIT_INSN(),
10090 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10091 			BPF_EXIT_INSN(),
10092 		},
10093 		INTERNAL,
10094 		{ },
10095 		{ { 0, 1 } },
10096 	},
10097 	/* BPF_JMP | BPF_JLE | BPF_K */
10098 	{
10099 		"JMP_JLE_K: if (2 <= 3) return 1",
10100 		.u.insns_int = {
10101 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10102 			BPF_LD_IMM64(R1, 2),
10103 			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10104 			BPF_EXIT_INSN(),
10105 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10106 			BPF_EXIT_INSN(),
10107 		},
10108 		INTERNAL,
10109 		{ },
10110 		{ { 0, 1 } },
10111 	},
10112 	/* BPF_JMP | BPF_JGT | BPF_K jump backwards */
10113 	{
10114 		"JMP_JGT_K: if (3 > 2) return 1 (jump backwards)",
10115 		.u.insns_int = {
10116 			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10117 			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10118 			BPF_EXIT_INSN(),
10119 			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10120 			BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
10121 			BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
10122 			BPF_EXIT_INSN(),
10123 		},
10124 		INTERNAL,
10125 		{ },
10126 		{ { 0, 1 } },
10127 	},
10128 	{
10129 		"JMP_JGE_K: if (3 >= 3) return 1",
10130 		.u.insns_int = {
10131 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10132 			BPF_LD_IMM64(R1, 3),
10133 			BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
10134 			BPF_EXIT_INSN(),
10135 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10136 			BPF_EXIT_INSN(),
10137 		},
10138 		INTERNAL,
10139 		{ },
10140 		{ { 0, 1 } },
10141 	},
10142 	/* BPF_JMP | BPF_JLT | BPF_K jump backwards */
10143 	{
10144 		"JMP_JGT_K: if (2 < 3) return 1 (jump backwards)",
10145 		.u.insns_int = {
10146 			BPF_JMP_IMM(BPF_JA, 0, 0, 2), /* goto start */
10147 			BPF_ALU32_IMM(BPF_MOV, R0, 1), /* out: */
10148 			BPF_EXIT_INSN(),
10149 			BPF_ALU32_IMM(BPF_MOV, R0, 0), /* start: */
10150 			BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
10151 			BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
10152 			BPF_EXIT_INSN(),
10153 		},
10154 		INTERNAL,
10155 		{ },
10156 		{ { 0, 1 } },
10157 	},
10158 	{
10159 		"JMP_JLE_K: if (3 <= 3) return 1",
10160 		.u.insns_int = {
10161 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10162 			BPF_LD_IMM64(R1, 3),
10163 			BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
10164 			BPF_EXIT_INSN(),
10165 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10166 			BPF_EXIT_INSN(),
10167 		},
10168 		INTERNAL,
10169 		{ },
10170 		{ { 0, 1 } },
10171 	},
10172 	/* BPF_JMP | BPF_JNE | BPF_K */
10173 	{
10174 		"JMP_JNE_K: if (3 != 2) return 1",
10175 		.u.insns_int = {
10176 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10177 			BPF_LD_IMM64(R1, 3),
10178 			BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
10179 			BPF_EXIT_INSN(),
10180 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10181 			BPF_EXIT_INSN(),
10182 		},
10183 		INTERNAL,
10184 		{ },
10185 		{ { 0, 1 } },
10186 	},
10187 	/* BPF_JMP | BPF_JEQ | BPF_K */
10188 	{
10189 		"JMP_JEQ_K: if (3 == 3) return 1",
10190 		.u.insns_int = {
10191 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10192 			BPF_LD_IMM64(R1, 3),
10193 			BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
10194 			BPF_EXIT_INSN(),
10195 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10196 			BPF_EXIT_INSN(),
10197 		},
10198 		INTERNAL,
10199 		{ },
10200 		{ { 0, 1 } },
10201 	},
10202 	/* BPF_JMP | BPF_JSET | BPF_K */
10203 	{
10204 		"JMP_JSET_K: if (0x3 & 0x2) return 1",
10205 		.u.insns_int = {
10206 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10207 			BPF_LD_IMM64(R1, 3),
10208 			BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
10209 			BPF_EXIT_INSN(),
10210 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10211 			BPF_EXIT_INSN(),
10212 		},
10213 		INTERNAL,
10214 		{ },
10215 		{ { 0, 1 } },
10216 	},
10217 	{
10218 		"JMP_JSET_K: if (0x3 & 0xffffffff) return 1",
10219 		.u.insns_int = {
10220 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10221 			BPF_LD_IMM64(R1, 3),
10222 			BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
10223 			BPF_EXIT_INSN(),
10224 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10225 			BPF_EXIT_INSN(),
10226 		},
10227 		INTERNAL,
10228 		{ },
10229 		{ { 0, 1 } },
10230 	},
10231 	/* BPF_JMP | BPF_JSGT | BPF_X */
10232 	{
10233 		"JMP_JSGT_X: Signed jump: if (-1 > -2) return 1",
10234 		.u.insns_int = {
10235 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10236 			BPF_LD_IMM64(R1, -1),
10237 			BPF_LD_IMM64(R2, -2),
10238 			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10239 			BPF_EXIT_INSN(),
10240 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10241 			BPF_EXIT_INSN(),
10242 		},
10243 		INTERNAL,
10244 		{ },
10245 		{ { 0, 1 } },
10246 	},
10247 	{
10248 		"JMP_JSGT_X: Signed jump: if (-1 > -1) return 0",
10249 		.u.insns_int = {
10250 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10251 			BPF_LD_IMM64(R1, -1),
10252 			BPF_LD_IMM64(R2, -1),
10253 			BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
10254 			BPF_EXIT_INSN(),
10255 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10256 			BPF_EXIT_INSN(),
10257 		},
10258 		INTERNAL,
10259 		{ },
10260 		{ { 0, 1 } },
10261 	},
10262 	/* BPF_JMP | BPF_JSLT | BPF_X */
10263 	{
10264 		"JMP_JSLT_X: Signed jump: if (-2 < -1) return 1",
10265 		.u.insns_int = {
10266 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10267 			BPF_LD_IMM64(R1, -1),
10268 			BPF_LD_IMM64(R2, -2),
10269 			BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
10270 			BPF_EXIT_INSN(),
10271 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10272 			BPF_EXIT_INSN(),
10273 		},
10274 		INTERNAL,
10275 		{ },
10276 		{ { 0, 1 } },
10277 	},
10278 	{
10279 		"JMP_JSLT_X: Signed jump: if (-1 < -1) return 0",
10280 		.u.insns_int = {
10281 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10282 			BPF_LD_IMM64(R1, -1),
10283 			BPF_LD_IMM64(R2, -1),
10284 			BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
10285 			BPF_EXIT_INSN(),
10286 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10287 			BPF_EXIT_INSN(),
10288 		},
10289 		INTERNAL,
10290 		{ },
10291 		{ { 0, 1 } },
10292 	},
10293 	/* BPF_JMP | BPF_JSGE | BPF_X */
10294 	{
10295 		"JMP_JSGE_X: Signed jump: if (-1 >= -2) return 1",
10296 		.u.insns_int = {
10297 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10298 			BPF_LD_IMM64(R1, -1),
10299 			BPF_LD_IMM64(R2, -2),
10300 			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10301 			BPF_EXIT_INSN(),
10302 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10303 			BPF_EXIT_INSN(),
10304 		},
10305 		INTERNAL,
10306 		{ },
10307 		{ { 0, 1 } },
10308 	},
10309 	{
10310 		"JMP_JSGE_X: Signed jump: if (-1 >= -1) return 1",
10311 		.u.insns_int = {
10312 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10313 			BPF_LD_IMM64(R1, -1),
10314 			BPF_LD_IMM64(R2, -1),
10315 			BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
10316 			BPF_EXIT_INSN(),
10317 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10318 			BPF_EXIT_INSN(),
10319 		},
10320 		INTERNAL,
10321 		{ },
10322 		{ { 0, 1 } },
10323 	},
10324 	/* BPF_JMP | BPF_JSLE | BPF_X */
10325 	{
10326 		"JMP_JSLE_X: Signed jump: if (-2 <= -1) return 1",
10327 		.u.insns_int = {
10328 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10329 			BPF_LD_IMM64(R1, -1),
10330 			BPF_LD_IMM64(R2, -2),
10331 			BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
10332 			BPF_EXIT_INSN(),
10333 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10334 			BPF_EXIT_INSN(),
10335 		},
10336 		INTERNAL,
10337 		{ },
10338 		{ { 0, 1 } },
10339 	},
10340 	{
10341 		"JMP_JSLE_X: Signed jump: if (-1 <= -1) return 1",
10342 		.u.insns_int = {
10343 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10344 			BPF_LD_IMM64(R1, -1),
10345 			BPF_LD_IMM64(R2, -1),
10346 			BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
10347 			BPF_EXIT_INSN(),
10348 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10349 			BPF_EXIT_INSN(),
10350 		},
10351 		INTERNAL,
10352 		{ },
10353 		{ { 0, 1 } },
10354 	},
10355 	/* BPF_JMP | BPF_JGT | BPF_X */
10356 	{
10357 		"JMP_JGT_X: if (3 > 2) return 1",
10358 		.u.insns_int = {
10359 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10360 			BPF_LD_IMM64(R1, 3),
10361 			BPF_LD_IMM64(R2, 2),
10362 			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10363 			BPF_EXIT_INSN(),
10364 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10365 			BPF_EXIT_INSN(),
10366 		},
10367 		INTERNAL,
10368 		{ },
10369 		{ { 0, 1 } },
10370 	},
10371 	{
10372 		"JMP_JGT_X: Unsigned jump: if (-1 > 1) return 1",
10373 		.u.insns_int = {
10374 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10375 			BPF_LD_IMM64(R1, -1),
10376 			BPF_LD_IMM64(R2, 1),
10377 			BPF_JMP_REG(BPF_JGT, R1, R2, 1),
10378 			BPF_EXIT_INSN(),
10379 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10380 			BPF_EXIT_INSN(),
10381 		},
10382 		INTERNAL,
10383 		{ },
10384 		{ { 0, 1 } },
10385 	},
10386 	/* BPF_JMP | BPF_JLT | BPF_X */
10387 	{
10388 		"JMP_JLT_X: if (2 < 3) return 1",
10389 		.u.insns_int = {
10390 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10391 			BPF_LD_IMM64(R1, 3),
10392 			BPF_LD_IMM64(R2, 2),
10393 			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10394 			BPF_EXIT_INSN(),
10395 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10396 			BPF_EXIT_INSN(),
10397 		},
10398 		INTERNAL,
10399 		{ },
10400 		{ { 0, 1 } },
10401 	},
10402 	{
10403 		"JMP_JLT_X: Unsigned jump: if (1 < -1) return 1",
10404 		.u.insns_int = {
10405 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10406 			BPF_LD_IMM64(R1, -1),
10407 			BPF_LD_IMM64(R2, 1),
10408 			BPF_JMP_REG(BPF_JLT, R2, R1, 1),
10409 			BPF_EXIT_INSN(),
10410 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10411 			BPF_EXIT_INSN(),
10412 		},
10413 		INTERNAL,
10414 		{ },
10415 		{ { 0, 1 } },
10416 	},
10417 	/* BPF_JMP | BPF_JGE | BPF_X */
10418 	{
10419 		"JMP_JGE_X: if (3 >= 2) return 1",
10420 		.u.insns_int = {
10421 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10422 			BPF_LD_IMM64(R1, 3),
10423 			BPF_LD_IMM64(R2, 2),
10424 			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10425 			BPF_EXIT_INSN(),
10426 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10427 			BPF_EXIT_INSN(),
10428 		},
10429 		INTERNAL,
10430 		{ },
10431 		{ { 0, 1 } },
10432 	},
10433 	{
10434 		"JMP_JGE_X: if (3 >= 3) return 1",
10435 		.u.insns_int = {
10436 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10437 			BPF_LD_IMM64(R1, 3),
10438 			BPF_LD_IMM64(R2, 3),
10439 			BPF_JMP_REG(BPF_JGE, R1, R2, 1),
10440 			BPF_EXIT_INSN(),
10441 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10442 			BPF_EXIT_INSN(),
10443 		},
10444 		INTERNAL,
10445 		{ },
10446 		{ { 0, 1 } },
10447 	},
10448 	/* BPF_JMP | BPF_JLE | BPF_X */
10449 	{
10450 		"JMP_JLE_X: if (2 <= 3) return 1",
10451 		.u.insns_int = {
10452 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10453 			BPF_LD_IMM64(R1, 3),
10454 			BPF_LD_IMM64(R2, 2),
10455 			BPF_JMP_REG(BPF_JLE, R2, R1, 1),
10456 			BPF_EXIT_INSN(),
10457 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10458 			BPF_EXIT_INSN(),
10459 		},
10460 		INTERNAL,
10461 		{ },
10462 		{ { 0, 1 } },
10463 	},
10464 	{
10465 		"JMP_JLE_X: if (3 <= 3) return 1",
10466 		.u.insns_int = {
10467 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10468 			BPF_LD_IMM64(R1, 3),
10469 			BPF_LD_IMM64(R2, 3),
10470 			BPF_JMP_REG(BPF_JLE, R1, R2, 1),
10471 			BPF_EXIT_INSN(),
10472 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10473 			BPF_EXIT_INSN(),
10474 		},
10475 		INTERNAL,
10476 		{ },
10477 		{ { 0, 1 } },
10478 	},
10479 	{
10480 		/* Mainly testing JIT + imm64 here. */
10481 		"JMP_JGE_X: ldimm64 test 1",
10482 		.u.insns_int = {
10483 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10484 			BPF_LD_IMM64(R1, 3),
10485 			BPF_LD_IMM64(R2, 2),
10486 			BPF_JMP_REG(BPF_JGE, R1, R2, 2),
10487 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10488 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10489 			BPF_EXIT_INSN(),
10490 		},
10491 		INTERNAL,
10492 		{ },
10493 		{ { 0, 0xeeeeeeeeU } },
10494 	},
10495 	{
10496 		"JMP_JGE_X: ldimm64 test 2",
10497 		.u.insns_int = {
10498 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10499 			BPF_LD_IMM64(R1, 3),
10500 			BPF_LD_IMM64(R2, 2),
10501 			BPF_JMP_REG(BPF_JGE, R1, R2, 0),
10502 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10503 			BPF_EXIT_INSN(),
10504 		},
10505 		INTERNAL,
10506 		{ },
10507 		{ { 0, 0xffffffffU } },
10508 	},
10509 	{
10510 		"JMP_JGE_X: ldimm64 test 3",
10511 		.u.insns_int = {
10512 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10513 			BPF_LD_IMM64(R1, 3),
10514 			BPF_LD_IMM64(R2, 2),
10515 			BPF_JMP_REG(BPF_JGE, R1, R2, 4),
10516 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10517 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10518 			BPF_EXIT_INSN(),
10519 		},
10520 		INTERNAL,
10521 		{ },
10522 		{ { 0, 1 } },
10523 	},
10524 	{
10525 		"JMP_JLE_X: ldimm64 test 1",
10526 		.u.insns_int = {
10527 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10528 			BPF_LD_IMM64(R1, 3),
10529 			BPF_LD_IMM64(R2, 2),
10530 			BPF_JMP_REG(BPF_JLE, R2, R1, 2),
10531 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10532 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10533 			BPF_EXIT_INSN(),
10534 		},
10535 		INTERNAL,
10536 		{ },
10537 		{ { 0, 0xeeeeeeeeU } },
10538 	},
10539 	{
10540 		"JMP_JLE_X: ldimm64 test 2",
10541 		.u.insns_int = {
10542 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10543 			BPF_LD_IMM64(R1, 3),
10544 			BPF_LD_IMM64(R2, 2),
10545 			BPF_JMP_REG(BPF_JLE, R2, R1, 0),
10546 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10547 			BPF_EXIT_INSN(),
10548 		},
10549 		INTERNAL,
10550 		{ },
10551 		{ { 0, 0xffffffffU } },
10552 	},
10553 	{
10554 		"JMP_JLE_X: ldimm64 test 3",
10555 		.u.insns_int = {
10556 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10557 			BPF_LD_IMM64(R1, 3),
10558 			BPF_LD_IMM64(R2, 2),
10559 			BPF_JMP_REG(BPF_JLE, R2, R1, 4),
10560 			BPF_LD_IMM64(R0, 0xffffffffffffffffULL),
10561 			BPF_LD_IMM64(R0, 0xeeeeeeeeeeeeeeeeULL),
10562 			BPF_EXIT_INSN(),
10563 		},
10564 		INTERNAL,
10565 		{ },
10566 		{ { 0, 1 } },
10567 	},
10568 	/* BPF_JMP | BPF_JNE | BPF_X */
10569 	{
10570 		"JMP_JNE_X: if (3 != 2) return 1",
10571 		.u.insns_int = {
10572 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10573 			BPF_LD_IMM64(R1, 3),
10574 			BPF_LD_IMM64(R2, 2),
10575 			BPF_JMP_REG(BPF_JNE, R1, R2, 1),
10576 			BPF_EXIT_INSN(),
10577 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10578 			BPF_EXIT_INSN(),
10579 		},
10580 		INTERNAL,
10581 		{ },
10582 		{ { 0, 1 } },
10583 	},
10584 	/* BPF_JMP | BPF_JEQ | BPF_X */
10585 	{
10586 		"JMP_JEQ_X: if (3 == 3) return 1",
10587 		.u.insns_int = {
10588 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10589 			BPF_LD_IMM64(R1, 3),
10590 			BPF_LD_IMM64(R2, 3),
10591 			BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
10592 			BPF_EXIT_INSN(),
10593 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10594 			BPF_EXIT_INSN(),
10595 		},
10596 		INTERNAL,
10597 		{ },
10598 		{ { 0, 1 } },
10599 	},
10600 	/* BPF_JMP | BPF_JSET | BPF_X */
10601 	{
10602 		"JMP_JSET_X: if (0x3 & 0x2) return 1",
10603 		.u.insns_int = {
10604 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10605 			BPF_LD_IMM64(R1, 3),
10606 			BPF_LD_IMM64(R2, 2),
10607 			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10608 			BPF_EXIT_INSN(),
10609 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10610 			BPF_EXIT_INSN(),
10611 		},
10612 		INTERNAL,
10613 		{ },
10614 		{ { 0, 1 } },
10615 	},
10616 	{
10617 		"JMP_JSET_X: if (0x3 & 0xffffffff) return 1",
10618 		.u.insns_int = {
10619 			BPF_ALU32_IMM(BPF_MOV, R0, 0),
10620 			BPF_LD_IMM64(R1, 3),
10621 			BPF_LD_IMM64(R2, 0xffffffff),
10622 			BPF_JMP_REG(BPF_JSET, R1, R2, 1),
10623 			BPF_EXIT_INSN(),
10624 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
10625 			BPF_EXIT_INSN(),
10626 		},
10627 		INTERNAL,
10628 		{ },
10629 		{ { 0, 1 } },
10630 	},
10631 	{
10632 		"JMP_JA: Jump, gap, jump, ...",
10633 		{ },
10634 		CLASSIC | FLAG_NO_DATA,
10635 		{ },
10636 		{ { 0, 0xababcbac } },
10637 		.fill_helper = bpf_fill_ja,
10638 	},
10639 	{	/* Mainly checking JIT here. */
10640 		"BPF_MAXINSNS: Maximum possible literals",
10641 		{ },
10642 		CLASSIC | FLAG_NO_DATA,
10643 		{ },
10644 		{ { 0, 0xffffffff } },
10645 		.fill_helper = bpf_fill_maxinsns1,
10646 	},
10647 	{	/* Mainly checking JIT here. */
10648 		"BPF_MAXINSNS: Single literal",
10649 		{ },
10650 		CLASSIC | FLAG_NO_DATA,
10651 		{ },
10652 		{ { 0, 0xfefefefe } },
10653 		.fill_helper = bpf_fill_maxinsns2,
10654 	},
10655 	{	/* Mainly checking JIT here. */
10656 		"BPF_MAXINSNS: Run/add until end",
10657 		{ },
10658 		CLASSIC | FLAG_NO_DATA,
10659 		{ },
10660 		{ { 0, 0x947bf368 } },
10661 		.fill_helper = bpf_fill_maxinsns3,
10662 	},
10663 	{
10664 		"BPF_MAXINSNS: Too many instructions",
10665 		{ },
10666 		CLASSIC | FLAG_NO_DATA | FLAG_EXPECTED_FAIL,
10667 		{ },
10668 		{ },
10669 		.fill_helper = bpf_fill_maxinsns4,
10670 		.expected_errcode = -EINVAL,
10671 	},
10672 	{	/* Mainly checking JIT here. */
10673 		"BPF_MAXINSNS: Very long jump",
10674 		{ },
10675 		CLASSIC | FLAG_NO_DATA,
10676 		{ },
10677 		{ { 0, 0xabababab } },
10678 		.fill_helper = bpf_fill_maxinsns5,
10679 	},
10680 	{	/* Mainly checking JIT here. */
10681 		"BPF_MAXINSNS: Ctx heavy transformations",
10682 		{ },
10683 		CLASSIC,
10684 		{ },
10685 		{
10686 			{  1, SKB_VLAN_PRESENT },
10687 			{ 10, SKB_VLAN_PRESENT }
10688 		},
10689 		.fill_helper = bpf_fill_maxinsns6,
10690 	},
10691 	{	/* Mainly checking JIT here. */
10692 		"BPF_MAXINSNS: Call heavy transformations",
10693 		{ },
10694 		CLASSIC | FLAG_NO_DATA,
10695 		{ },
10696 		{ { 1, 0 }, { 10, 0 } },
10697 		.fill_helper = bpf_fill_maxinsns7,
10698 	},
10699 	{	/* Mainly checking JIT here. */
10700 		"BPF_MAXINSNS: Jump heavy test",
10701 		{ },
10702 		CLASSIC | FLAG_NO_DATA,
10703 		{ },
10704 		{ { 0, 0xffffffff } },
10705 		.fill_helper = bpf_fill_maxinsns8,
10706 	},
10707 	{	/* Mainly checking JIT here. */
10708 		"BPF_MAXINSNS: Very long jump backwards",
10709 		{ },
10710 		INTERNAL | FLAG_NO_DATA,
10711 		{ },
10712 		{ { 0, 0xcbababab } },
10713 		.fill_helper = bpf_fill_maxinsns9,
10714 	},
10715 	{	/* Mainly checking JIT here. */
10716 		"BPF_MAXINSNS: Edge hopping nuthouse",
10717 		{ },
10718 		INTERNAL | FLAG_NO_DATA,
10719 		{ },
10720 		{ { 0, 0xabababac } },
10721 		.fill_helper = bpf_fill_maxinsns10,
10722 	},
10723 	{
10724 		"BPF_MAXINSNS: Jump, gap, jump, ...",
10725 		{ },
10726 		CLASSIC | FLAG_NO_DATA,
10727 		{ },
10728 		{ { 0, 0xababcbac } },
10729 		.fill_helper = bpf_fill_maxinsns11,
10730 	},
10731 	{
10732 		"BPF_MAXINSNS: jump over MSH",
10733 		{ },
10734 		CLASSIC | FLAG_EXPECTED_FAIL,
10735 		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10736 		{ { 4, 0xabababab } },
10737 		.fill_helper = bpf_fill_maxinsns12,
10738 		.expected_errcode = -EINVAL,
10739 	},
10740 	{
10741 		"BPF_MAXINSNS: exec all MSH",
10742 		{ },
10743 		CLASSIC,
10744 		{ 0xfa, 0xfb, 0xfc, 0xfd, },
10745 		{ { 4, 0xababab83 } },
10746 		.fill_helper = bpf_fill_maxinsns13,
10747 	},
10748 	{
10749 		"BPF_MAXINSNS: ld_abs+get_processor_id",
10750 		{ },
10751 		CLASSIC,
10752 		{ },
10753 		{ { 1, 0xbee } },
10754 		.fill_helper = bpf_fill_ld_abs_get_processor_id,
10755 	},
10756 	/*
10757 	 * LD_IND / LD_ABS on fragmented SKBs
10758 	 */
10759 	{
10760 		"LD_IND byte frag",
10761 		.u.insns = {
10762 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10763 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x0),
10764 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10765 		},
10766 		CLASSIC | FLAG_SKB_FRAG,
10767 		{ },
10768 		{ {0x40, 0x42} },
10769 		.frag_data = {
10770 			0x42, 0x00, 0x00, 0x00,
10771 			0x43, 0x44, 0x00, 0x00,
10772 			0x21, 0x07, 0x19, 0x83,
10773 		},
10774 	},
10775 	{
10776 		"LD_IND halfword frag",
10777 		.u.insns = {
10778 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10779 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x4),
10780 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10781 		},
10782 		CLASSIC | FLAG_SKB_FRAG,
10783 		{ },
10784 		{ {0x40, 0x4344} },
10785 		.frag_data = {
10786 			0x42, 0x00, 0x00, 0x00,
10787 			0x43, 0x44, 0x00, 0x00,
10788 			0x21, 0x07, 0x19, 0x83,
10789 		},
10790 	},
10791 	{
10792 		"LD_IND word frag",
10793 		.u.insns = {
10794 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10795 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x8),
10796 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10797 		},
10798 		CLASSIC | FLAG_SKB_FRAG,
10799 		{ },
10800 		{ {0x40, 0x21071983} },
10801 		.frag_data = {
10802 			0x42, 0x00, 0x00, 0x00,
10803 			0x43, 0x44, 0x00, 0x00,
10804 			0x21, 0x07, 0x19, 0x83,
10805 		},
10806 	},
10807 	{
10808 		"LD_IND halfword mixed head/frag",
10809 		.u.insns = {
10810 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10811 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
10812 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10813 		},
10814 		CLASSIC | FLAG_SKB_FRAG,
10815 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10816 		{ {0x40, 0x0519} },
10817 		.frag_data = { 0x19, 0x82 },
10818 	},
10819 	{
10820 		"LD_IND word mixed head/frag",
10821 		.u.insns = {
10822 			BPF_STMT(BPF_LDX | BPF_IMM, 0x40),
10823 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
10824 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10825 		},
10826 		CLASSIC | FLAG_SKB_FRAG,
10827 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10828 		{ {0x40, 0x25051982} },
10829 		.frag_data = { 0x19, 0x82 },
10830 	},
10831 	{
10832 		"LD_ABS byte frag",
10833 		.u.insns = {
10834 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x40),
10835 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10836 		},
10837 		CLASSIC | FLAG_SKB_FRAG,
10838 		{ },
10839 		{ {0x40, 0x42} },
10840 		.frag_data = {
10841 			0x42, 0x00, 0x00, 0x00,
10842 			0x43, 0x44, 0x00, 0x00,
10843 			0x21, 0x07, 0x19, 0x83,
10844 		},
10845 	},
10846 	{
10847 		"LD_ABS halfword frag",
10848 		.u.insns = {
10849 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x44),
10850 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10851 		},
10852 		CLASSIC | FLAG_SKB_FRAG,
10853 		{ },
10854 		{ {0x40, 0x4344} },
10855 		.frag_data = {
10856 			0x42, 0x00, 0x00, 0x00,
10857 			0x43, 0x44, 0x00, 0x00,
10858 			0x21, 0x07, 0x19, 0x83,
10859 		},
10860 	},
10861 	{
10862 		"LD_ABS word frag",
10863 		.u.insns = {
10864 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x48),
10865 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10866 		},
10867 		CLASSIC | FLAG_SKB_FRAG,
10868 		{ },
10869 		{ {0x40, 0x21071983} },
10870 		.frag_data = {
10871 			0x42, 0x00, 0x00, 0x00,
10872 			0x43, 0x44, 0x00, 0x00,
10873 			0x21, 0x07, 0x19, 0x83,
10874 		},
10875 	},
10876 	{
10877 		"LD_ABS halfword mixed head/frag",
10878 		.u.insns = {
10879 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
10880 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10881 		},
10882 		CLASSIC | FLAG_SKB_FRAG,
10883 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10884 		{ {0x40, 0x0519} },
10885 		.frag_data = { 0x19, 0x82 },
10886 	},
10887 	{
10888 		"LD_ABS word mixed head/frag",
10889 		.u.insns = {
10890 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3e),
10891 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10892 		},
10893 		CLASSIC | FLAG_SKB_FRAG,
10894 		{ [0x3e] = 0x25, [0x3f] = 0x05, },
10895 		{ {0x40, 0x25051982} },
10896 		.frag_data = { 0x19, 0x82 },
10897 	},
10898 	/*
10899 	 * LD_IND / LD_ABS on non fragmented SKBs
10900 	 */
10901 	{
10902 		/*
10903 		 * this tests that the JIT/interpreter correctly resets X
10904 		 * before using it in an LD_IND instruction.
10905 		 */
10906 		"LD_IND byte default X",
10907 		.u.insns = {
10908 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10909 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10910 		},
10911 		CLASSIC,
10912 		{ [0x1] = 0x42 },
10913 		{ {0x40, 0x42 } },
10914 	},
10915 	{
10916 		"LD_IND byte positive offset",
10917 		.u.insns = {
10918 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10919 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10920 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10921 		},
10922 		CLASSIC,
10923 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10924 		{ {0x40, 0x82 } },
10925 	},
10926 	{
10927 		"LD_IND byte negative offset",
10928 		.u.insns = {
10929 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10930 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x1),
10931 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10932 		},
10933 		CLASSIC,
10934 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10935 		{ {0x40, 0x05 } },
10936 	},
10937 	{
10938 		"LD_IND byte positive offset, all ff",
10939 		.u.insns = {
10940 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10941 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10942 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10943 		},
10944 		CLASSIC,
10945 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
10946 		{ {0x40, 0xff } },
10947 	},
10948 	{
10949 		"LD_IND byte positive offset, out of bounds",
10950 		.u.insns = {
10951 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10952 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, 0x1),
10953 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10954 		},
10955 		CLASSIC,
10956 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10957 		{ {0x3f, 0 }, },
10958 	},
10959 	{
10960 		"LD_IND byte negative offset, out of bounds",
10961 		.u.insns = {
10962 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
10963 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, -0x3f),
10964 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10965 		},
10966 		CLASSIC,
10967 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10968 		{ {0x3f, 0 } },
10969 	},
10970 	{
10971 		"LD_IND byte negative offset, multiple calls",
10972 		.u.insns = {
10973 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
10974 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 1),
10975 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 2),
10976 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 3),
10977 			BPF_STMT(BPF_LD | BPF_IND | BPF_B, SKF_LL_OFF + 4),
10978 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10979 		},
10980 		CLASSIC,
10981 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
10982 		{ {0x40, 0x82 }, },
10983 	},
10984 	{
10985 		"LD_IND halfword positive offset",
10986 		.u.insns = {
10987 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
10988 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x2),
10989 			BPF_STMT(BPF_RET | BPF_A, 0x0),
10990 		},
10991 		CLASSIC,
10992 		{
10993 			[0x1c] = 0xaa, [0x1d] = 0x55,
10994 			[0x1e] = 0xbb, [0x1f] = 0x66,
10995 			[0x20] = 0xcc, [0x21] = 0x77,
10996 			[0x22] = 0xdd, [0x23] = 0x88,
10997 		},
10998 		{ {0x40, 0xdd88 } },
10999 	},
11000 	{
11001 		"LD_IND halfword negative offset",
11002 		.u.insns = {
11003 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11004 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x2),
11005 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11006 		},
11007 		CLASSIC,
11008 		{
11009 			[0x1c] = 0xaa, [0x1d] = 0x55,
11010 			[0x1e] = 0xbb, [0x1f] = 0x66,
11011 			[0x20] = 0xcc, [0x21] = 0x77,
11012 			[0x22] = 0xdd, [0x23] = 0x88,
11013 		},
11014 		{ {0x40, 0xbb66 } },
11015 	},
11016 	{
11017 		"LD_IND halfword unaligned",
11018 		.u.insns = {
11019 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11020 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x1),
11021 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11022 		},
11023 		CLASSIC,
11024 		{
11025 			[0x1c] = 0xaa, [0x1d] = 0x55,
11026 			[0x1e] = 0xbb, [0x1f] = 0x66,
11027 			[0x20] = 0xcc, [0x21] = 0x77,
11028 			[0x22] = 0xdd, [0x23] = 0x88,
11029 		},
11030 		{ {0x40, 0x66cc } },
11031 	},
11032 	{
11033 		"LD_IND halfword positive offset, all ff",
11034 		.u.insns = {
11035 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3d),
11036 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11037 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11038 		},
11039 		CLASSIC,
11040 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
11041 		{ {0x40, 0xffff } },
11042 	},
11043 	{
11044 		"LD_IND halfword positive offset, out of bounds",
11045 		.u.insns = {
11046 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11047 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, 0x1),
11048 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11049 		},
11050 		CLASSIC,
11051 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11052 		{ {0x3f, 0 }, },
11053 	},
11054 	{
11055 		"LD_IND halfword negative offset, out of bounds",
11056 		.u.insns = {
11057 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11058 			BPF_STMT(BPF_LD | BPF_IND | BPF_H, -0x3f),
11059 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11060 		},
11061 		CLASSIC,
11062 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11063 		{ {0x3f, 0 } },
11064 	},
11065 	{
11066 		"LD_IND word positive offset",
11067 		.u.insns = {
11068 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11069 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x4),
11070 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11071 		},
11072 		CLASSIC,
11073 		{
11074 			[0x1c] = 0xaa, [0x1d] = 0x55,
11075 			[0x1e] = 0xbb, [0x1f] = 0x66,
11076 			[0x20] = 0xcc, [0x21] = 0x77,
11077 			[0x22] = 0xdd, [0x23] = 0x88,
11078 			[0x24] = 0xee, [0x25] = 0x99,
11079 			[0x26] = 0xff, [0x27] = 0xaa,
11080 		},
11081 		{ {0x40, 0xee99ffaa } },
11082 	},
11083 	{
11084 		"LD_IND word negative offset",
11085 		.u.insns = {
11086 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11087 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x4),
11088 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11089 		},
11090 		CLASSIC,
11091 		{
11092 			[0x1c] = 0xaa, [0x1d] = 0x55,
11093 			[0x1e] = 0xbb, [0x1f] = 0x66,
11094 			[0x20] = 0xcc, [0x21] = 0x77,
11095 			[0x22] = 0xdd, [0x23] = 0x88,
11096 			[0x24] = 0xee, [0x25] = 0x99,
11097 			[0x26] = 0xff, [0x27] = 0xaa,
11098 		},
11099 		{ {0x40, 0xaa55bb66 } },
11100 	},
11101 	{
11102 		"LD_IND word unaligned (addr & 3 == 2)",
11103 		.u.insns = {
11104 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11105 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x2),
11106 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11107 		},
11108 		CLASSIC,
11109 		{
11110 			[0x1c] = 0xaa, [0x1d] = 0x55,
11111 			[0x1e] = 0xbb, [0x1f] = 0x66,
11112 			[0x20] = 0xcc, [0x21] = 0x77,
11113 			[0x22] = 0xdd, [0x23] = 0x88,
11114 			[0x24] = 0xee, [0x25] = 0x99,
11115 			[0x26] = 0xff, [0x27] = 0xaa,
11116 		},
11117 		{ {0x40, 0xbb66cc77 } },
11118 	},
11119 	{
11120 		"LD_IND word unaligned (addr & 3 == 1)",
11121 		.u.insns = {
11122 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11123 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3),
11124 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11125 		},
11126 		CLASSIC,
11127 		{
11128 			[0x1c] = 0xaa, [0x1d] = 0x55,
11129 			[0x1e] = 0xbb, [0x1f] = 0x66,
11130 			[0x20] = 0xcc, [0x21] = 0x77,
11131 			[0x22] = 0xdd, [0x23] = 0x88,
11132 			[0x24] = 0xee, [0x25] = 0x99,
11133 			[0x26] = 0xff, [0x27] = 0xaa,
11134 		},
11135 		{ {0x40, 0x55bb66cc } },
11136 	},
11137 	{
11138 		"LD_IND word unaligned (addr & 3 == 3)",
11139 		.u.insns = {
11140 			BPF_STMT(BPF_LDX | BPF_IMM, 0x20),
11141 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x1),
11142 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11143 		},
11144 		CLASSIC,
11145 		{
11146 			[0x1c] = 0xaa, [0x1d] = 0x55,
11147 			[0x1e] = 0xbb, [0x1f] = 0x66,
11148 			[0x20] = 0xcc, [0x21] = 0x77,
11149 			[0x22] = 0xdd, [0x23] = 0x88,
11150 			[0x24] = 0xee, [0x25] = 0x99,
11151 			[0x26] = 0xff, [0x27] = 0xaa,
11152 		},
11153 		{ {0x40, 0x66cc77dd } },
11154 	},
11155 	{
11156 		"LD_IND word positive offset, all ff",
11157 		.u.insns = {
11158 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3b),
11159 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11160 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11161 		},
11162 		CLASSIC,
11163 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
11164 		{ {0x40, 0xffffffff } },
11165 	},
11166 	{
11167 		"LD_IND word positive offset, out of bounds",
11168 		.u.insns = {
11169 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11170 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, 0x1),
11171 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11172 		},
11173 		CLASSIC,
11174 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11175 		{ {0x3f, 0 }, },
11176 	},
11177 	{
11178 		"LD_IND word negative offset, out of bounds",
11179 		.u.insns = {
11180 			BPF_STMT(BPF_LDX | BPF_IMM, 0x3e),
11181 			BPF_STMT(BPF_LD | BPF_IND | BPF_W, -0x3f),
11182 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11183 		},
11184 		CLASSIC,
11185 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11186 		{ {0x3f, 0 } },
11187 	},
11188 	{
11189 		"LD_ABS byte",
11190 		.u.insns = {
11191 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x20),
11192 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11193 		},
11194 		CLASSIC,
11195 		{
11196 			[0x1c] = 0xaa, [0x1d] = 0x55,
11197 			[0x1e] = 0xbb, [0x1f] = 0x66,
11198 			[0x20] = 0xcc, [0x21] = 0x77,
11199 			[0x22] = 0xdd, [0x23] = 0x88,
11200 			[0x24] = 0xee, [0x25] = 0x99,
11201 			[0x26] = 0xff, [0x27] = 0xaa,
11202 		},
11203 		{ {0x40, 0xcc } },
11204 	},
11205 	{
11206 		"LD_ABS byte positive offset, all ff",
11207 		.u.insns = {
11208 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11209 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11210 		},
11211 		CLASSIC,
11212 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
11213 		{ {0x40, 0xff } },
11214 	},
11215 	{
11216 		"LD_ABS byte positive offset, out of bounds",
11217 		.u.insns = {
11218 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, 0x3f),
11219 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11220 		},
11221 		CLASSIC,
11222 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11223 		{ {0x3f, 0 }, },
11224 	},
11225 	{
11226 		"LD_ABS byte negative offset, out of bounds load",
11227 		.u.insns = {
11228 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, -1),
11229 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11230 		},
11231 		CLASSIC | FLAG_EXPECTED_FAIL,
11232 		.expected_errcode = -EINVAL,
11233 	},
11234 	{
11235 		"LD_ABS byte negative offset, in bounds",
11236 		.u.insns = {
11237 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11238 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11239 		},
11240 		CLASSIC,
11241 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11242 		{ {0x40, 0x82 }, },
11243 	},
11244 	{
11245 		"LD_ABS byte negative offset, out of bounds",
11246 		.u.insns = {
11247 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11248 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11249 		},
11250 		CLASSIC,
11251 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11252 		{ {0x3f, 0 }, },
11253 	},
11254 	{
11255 		"LD_ABS byte negative offset, multiple calls",
11256 		.u.insns = {
11257 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3c),
11258 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3d),
11259 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3e),
11260 			BPF_STMT(BPF_LD | BPF_ABS | BPF_B, SKF_LL_OFF + 0x3f),
11261 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11262 		},
11263 		CLASSIC,
11264 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11265 		{ {0x40, 0x82 }, },
11266 	},
11267 	{
11268 		"LD_ABS halfword",
11269 		.u.insns = {
11270 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x22),
11271 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11272 		},
11273 		CLASSIC,
11274 		{
11275 			[0x1c] = 0xaa, [0x1d] = 0x55,
11276 			[0x1e] = 0xbb, [0x1f] = 0x66,
11277 			[0x20] = 0xcc, [0x21] = 0x77,
11278 			[0x22] = 0xdd, [0x23] = 0x88,
11279 			[0x24] = 0xee, [0x25] = 0x99,
11280 			[0x26] = 0xff, [0x27] = 0xaa,
11281 		},
11282 		{ {0x40, 0xdd88 } },
11283 	},
11284 	{
11285 		"LD_ABS halfword unaligned",
11286 		.u.insns = {
11287 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x25),
11288 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11289 		},
11290 		CLASSIC,
11291 		{
11292 			[0x1c] = 0xaa, [0x1d] = 0x55,
11293 			[0x1e] = 0xbb, [0x1f] = 0x66,
11294 			[0x20] = 0xcc, [0x21] = 0x77,
11295 			[0x22] = 0xdd, [0x23] = 0x88,
11296 			[0x24] = 0xee, [0x25] = 0x99,
11297 			[0x26] = 0xff, [0x27] = 0xaa,
11298 		},
11299 		{ {0x40, 0x99ff } },
11300 	},
11301 	{
11302 		"LD_ABS halfword positive offset, all ff",
11303 		.u.insns = {
11304 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3e),
11305 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11306 		},
11307 		CLASSIC,
11308 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
11309 		{ {0x40, 0xffff } },
11310 	},
11311 	{
11312 		"LD_ABS halfword positive offset, out of bounds",
11313 		.u.insns = {
11314 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, 0x3f),
11315 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11316 		},
11317 		CLASSIC,
11318 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11319 		{ {0x3f, 0 }, },
11320 	},
11321 	{
11322 		"LD_ABS halfword negative offset, out of bounds load",
11323 		.u.insns = {
11324 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, -1),
11325 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11326 		},
11327 		CLASSIC | FLAG_EXPECTED_FAIL,
11328 		.expected_errcode = -EINVAL,
11329 	},
11330 	{
11331 		"LD_ABS halfword negative offset, in bounds",
11332 		.u.insns = {
11333 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11334 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11335 		},
11336 		CLASSIC,
11337 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11338 		{ {0x40, 0x1982 }, },
11339 	},
11340 	{
11341 		"LD_ABS halfword negative offset, out of bounds",
11342 		.u.insns = {
11343 			BPF_STMT(BPF_LD | BPF_ABS | BPF_H, SKF_LL_OFF + 0x3e),
11344 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11345 		},
11346 		CLASSIC,
11347 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11348 		{ {0x3f, 0 }, },
11349 	},
11350 	{
11351 		"LD_ABS word",
11352 		.u.insns = {
11353 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x1c),
11354 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11355 		},
11356 		CLASSIC,
11357 		{
11358 			[0x1c] = 0xaa, [0x1d] = 0x55,
11359 			[0x1e] = 0xbb, [0x1f] = 0x66,
11360 			[0x20] = 0xcc, [0x21] = 0x77,
11361 			[0x22] = 0xdd, [0x23] = 0x88,
11362 			[0x24] = 0xee, [0x25] = 0x99,
11363 			[0x26] = 0xff, [0x27] = 0xaa,
11364 		},
11365 		{ {0x40, 0xaa55bb66 } },
11366 	},
11367 	{
11368 		"LD_ABS word unaligned (addr & 3 == 2)",
11369 		.u.insns = {
11370 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x22),
11371 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11372 		},
11373 		CLASSIC,
11374 		{
11375 			[0x1c] = 0xaa, [0x1d] = 0x55,
11376 			[0x1e] = 0xbb, [0x1f] = 0x66,
11377 			[0x20] = 0xcc, [0x21] = 0x77,
11378 			[0x22] = 0xdd, [0x23] = 0x88,
11379 			[0x24] = 0xee, [0x25] = 0x99,
11380 			[0x26] = 0xff, [0x27] = 0xaa,
11381 		},
11382 		{ {0x40, 0xdd88ee99 } },
11383 	},
11384 	{
11385 		"LD_ABS word unaligned (addr & 3 == 1)",
11386 		.u.insns = {
11387 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x21),
11388 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11389 		},
11390 		CLASSIC,
11391 		{
11392 			[0x1c] = 0xaa, [0x1d] = 0x55,
11393 			[0x1e] = 0xbb, [0x1f] = 0x66,
11394 			[0x20] = 0xcc, [0x21] = 0x77,
11395 			[0x22] = 0xdd, [0x23] = 0x88,
11396 			[0x24] = 0xee, [0x25] = 0x99,
11397 			[0x26] = 0xff, [0x27] = 0xaa,
11398 		},
11399 		{ {0x40, 0x77dd88ee } },
11400 	},
11401 	{
11402 		"LD_ABS word unaligned (addr & 3 == 3)",
11403 		.u.insns = {
11404 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x23),
11405 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11406 		},
11407 		CLASSIC,
11408 		{
11409 			[0x1c] = 0xaa, [0x1d] = 0x55,
11410 			[0x1e] = 0xbb, [0x1f] = 0x66,
11411 			[0x20] = 0xcc, [0x21] = 0x77,
11412 			[0x22] = 0xdd, [0x23] = 0x88,
11413 			[0x24] = 0xee, [0x25] = 0x99,
11414 			[0x26] = 0xff, [0x27] = 0xaa,
11415 		},
11416 		{ {0x40, 0x88ee99ff } },
11417 	},
11418 	{
11419 		"LD_ABS word positive offset, all ff",
11420 		.u.insns = {
11421 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3c),
11422 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11423 		},
11424 		CLASSIC,
11425 		{ [0x3c] = 0xff, [0x3d] = 0xff,  [0x3e] = 0xff, [0x3f] = 0xff },
11426 		{ {0x40, 0xffffffff } },
11427 	},
11428 	{
11429 		"LD_ABS word positive offset, out of bounds",
11430 		.u.insns = {
11431 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, 0x3f),
11432 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11433 		},
11434 		CLASSIC,
11435 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11436 		{ {0x3f, 0 }, },
11437 	},
11438 	{
11439 		"LD_ABS word negative offset, out of bounds load",
11440 		.u.insns = {
11441 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, -1),
11442 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11443 		},
11444 		CLASSIC | FLAG_EXPECTED_FAIL,
11445 		.expected_errcode = -EINVAL,
11446 	},
11447 	{
11448 		"LD_ABS word negative offset, in bounds",
11449 		.u.insns = {
11450 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11451 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11452 		},
11453 		CLASSIC,
11454 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11455 		{ {0x40, 0x25051982 }, },
11456 	},
11457 	{
11458 		"LD_ABS word negative offset, out of bounds",
11459 		.u.insns = {
11460 			BPF_STMT(BPF_LD | BPF_ABS | BPF_W, SKF_LL_OFF + 0x3c),
11461 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11462 		},
11463 		CLASSIC,
11464 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11465 		{ {0x3f, 0 }, },
11466 	},
11467 	{
11468 		"LDX_MSH standalone, preserved A",
11469 		.u.insns = {
11470 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11471 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11472 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11473 		},
11474 		CLASSIC,
11475 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11476 		{ {0x40, 0xffeebbaa }, },
11477 	},
11478 	{
11479 		"LDX_MSH standalone, preserved A 2",
11480 		.u.insns = {
11481 			BPF_STMT(BPF_LD | BPF_IMM, 0x175e9d63),
11482 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11483 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3d),
11484 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11485 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3f),
11486 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11487 		},
11488 		CLASSIC,
11489 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11490 		{ {0x40, 0x175e9d63 }, },
11491 	},
11492 	{
11493 		"LDX_MSH standalone, test result 1",
11494 		.u.insns = {
11495 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11496 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3c),
11497 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
11498 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11499 		},
11500 		CLASSIC,
11501 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11502 		{ {0x40, 0x14 }, },
11503 	},
11504 	{
11505 		"LDX_MSH standalone, test result 2",
11506 		.u.insns = {
11507 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11508 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x3e),
11509 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
11510 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11511 		},
11512 		CLASSIC,
11513 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11514 		{ {0x40, 0x24 }, },
11515 	},
11516 	{
11517 		"LDX_MSH standalone, negative offset",
11518 		.u.insns = {
11519 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11520 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, -1),
11521 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
11522 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11523 		},
11524 		CLASSIC,
11525 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11526 		{ {0x40, 0 }, },
11527 	},
11528 	{
11529 		"LDX_MSH standalone, negative offset 2",
11530 		.u.insns = {
11531 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11532 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, SKF_LL_OFF + 0x3e),
11533 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
11534 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11535 		},
11536 		CLASSIC,
11537 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11538 		{ {0x40, 0x24 }, },
11539 	},
11540 	{
11541 		"LDX_MSH standalone, out of bounds",
11542 		.u.insns = {
11543 			BPF_STMT(BPF_LD | BPF_IMM, 0xffeebbaa),
11544 			BPF_STMT(BPF_LDX | BPF_B | BPF_MSH, 0x40),
11545 			BPF_STMT(BPF_MISC | BPF_TXA, 0),
11546 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11547 		},
11548 		CLASSIC,
11549 		{ [0x3c] = 0x25, [0x3d] = 0x05,  [0x3e] = 0x19, [0x3f] = 0x82 },
11550 		{ {0x40, 0 }, },
11551 	},
11552 	/*
11553 	 * verify that the interpreter or JIT correctly sets A and X
11554 	 * to 0.
11555 	 */
11556 	{
11557 		"ADD default X",
11558 		.u.insns = {
11559 			/*
11560 			 * A = 0x42
11561 			 * A = A + X
11562 			 * ret A
11563 			 */
11564 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11565 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_X, 0),
11566 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11567 		},
11568 		CLASSIC | FLAG_NO_DATA,
11569 		{},
11570 		{ {0x1, 0x42 } },
11571 	},
11572 	{
11573 		"ADD default A",
11574 		.u.insns = {
11575 			/*
11576 			 * A = A + 0x42
11577 			 * ret A
11578 			 */
11579 			BPF_STMT(BPF_ALU | BPF_ADD | BPF_K, 0x42),
11580 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11581 		},
11582 		CLASSIC | FLAG_NO_DATA,
11583 		{},
11584 		{ {0x1, 0x42 } },
11585 	},
11586 	{
11587 		"SUB default X",
11588 		.u.insns = {
11589 			/*
11590 			 * A = 0x66
11591 			 * A = A - X
11592 			 * ret A
11593 			 */
11594 			BPF_STMT(BPF_LD | BPF_IMM, 0x66),
11595 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_X, 0),
11596 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11597 		},
11598 		CLASSIC | FLAG_NO_DATA,
11599 		{},
11600 		{ {0x1, 0x66 } },
11601 	},
11602 	{
11603 		"SUB default A",
11604 		.u.insns = {
11605 			/*
11606 			 * A = A - -0x66
11607 			 * ret A
11608 			 */
11609 			BPF_STMT(BPF_ALU | BPF_SUB | BPF_K, -0x66),
11610 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11611 		},
11612 		CLASSIC | FLAG_NO_DATA,
11613 		{},
11614 		{ {0x1, 0x66 } },
11615 	},
11616 	{
11617 		"MUL default X",
11618 		.u.insns = {
11619 			/*
11620 			 * A = 0x42
11621 			 * A = A * X
11622 			 * ret A
11623 			 */
11624 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11625 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_X, 0),
11626 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11627 		},
11628 		CLASSIC | FLAG_NO_DATA,
11629 		{},
11630 		{ {0x1, 0x0 } },
11631 	},
11632 	{
11633 		"MUL default A",
11634 		.u.insns = {
11635 			/*
11636 			 * A = A * 0x66
11637 			 * ret A
11638 			 */
11639 			BPF_STMT(BPF_ALU | BPF_MUL | BPF_K, 0x66),
11640 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11641 		},
11642 		CLASSIC | FLAG_NO_DATA,
11643 		{},
11644 		{ {0x1, 0x0 } },
11645 	},
11646 	{
11647 		"DIV default X",
11648 		.u.insns = {
11649 			/*
11650 			 * A = 0x42
11651 			 * A = A / X ; this halt the filter execution if X is 0
11652 			 * ret 0x42
11653 			 */
11654 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11655 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_X, 0),
11656 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11657 		},
11658 		CLASSIC | FLAG_NO_DATA,
11659 		{},
11660 		{ {0x1, 0x0 } },
11661 	},
11662 	{
11663 		"DIV default A",
11664 		.u.insns = {
11665 			/*
11666 			 * A = A / 1
11667 			 * ret A
11668 			 */
11669 			BPF_STMT(BPF_ALU | BPF_DIV | BPF_K, 0x1),
11670 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11671 		},
11672 		CLASSIC | FLAG_NO_DATA,
11673 		{},
11674 		{ {0x1, 0x0 } },
11675 	},
11676 	{
11677 		"MOD default X",
11678 		.u.insns = {
11679 			/*
11680 			 * A = 0x42
11681 			 * A = A mod X ; this halt the filter execution if X is 0
11682 			 * ret 0x42
11683 			 */
11684 			BPF_STMT(BPF_LD | BPF_IMM, 0x42),
11685 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_X, 0),
11686 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11687 		},
11688 		CLASSIC | FLAG_NO_DATA,
11689 		{},
11690 		{ {0x1, 0x0 } },
11691 	},
11692 	{
11693 		"MOD default A",
11694 		.u.insns = {
11695 			/*
11696 			 * A = A mod 1
11697 			 * ret A
11698 			 */
11699 			BPF_STMT(BPF_ALU | BPF_MOD | BPF_K, 0x1),
11700 			BPF_STMT(BPF_RET | BPF_A, 0x0),
11701 		},
11702 		CLASSIC | FLAG_NO_DATA,
11703 		{},
11704 		{ {0x1, 0x0 } },
11705 	},
11706 	{
11707 		"JMP EQ default A",
11708 		.u.insns = {
11709 			/*
11710 			 * cmp A, 0x0, 0, 1
11711 			 * ret 0x42
11712 			 * ret 0x66
11713 			 */
11714 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0x0, 0, 1),
11715 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11716 			BPF_STMT(BPF_RET | BPF_K, 0x66),
11717 		},
11718 		CLASSIC | FLAG_NO_DATA,
11719 		{},
11720 		{ {0x1, 0x42 } },
11721 	},
11722 	{
11723 		"JMP EQ default X",
11724 		.u.insns = {
11725 			/*
11726 			 * A = 0x0
11727 			 * cmp A, X, 0, 1
11728 			 * ret 0x42
11729 			 * ret 0x66
11730 			 */
11731 			BPF_STMT(BPF_LD | BPF_IMM, 0x0),
11732 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_X, 0x0, 0, 1),
11733 			BPF_STMT(BPF_RET | BPF_K, 0x42),
11734 			BPF_STMT(BPF_RET | BPF_K, 0x66),
11735 		},
11736 		CLASSIC | FLAG_NO_DATA,
11737 		{},
11738 		{ {0x1, 0x42 } },
11739 	},
11740 	/* Checking interpreter vs JIT wrt signed extended imms. */
11741 	{
11742 		"JNE signed compare, test 1",
11743 		.u.insns_int = {
11744 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11745 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11746 			BPF_MOV64_REG(R2, R1),
11747 			BPF_ALU64_REG(BPF_AND, R2, R3),
11748 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11749 			BPF_JMP_IMM(BPF_JNE, R2, -17104896, 1),
11750 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11751 			BPF_EXIT_INSN(),
11752 		},
11753 		INTERNAL,
11754 		{ },
11755 		{ { 0, 1 } },
11756 	},
11757 	{
11758 		"JNE signed compare, test 2",
11759 		.u.insns_int = {
11760 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11761 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11762 			BPF_MOV64_REG(R2, R1),
11763 			BPF_ALU64_REG(BPF_AND, R2, R3),
11764 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11765 			BPF_JMP_IMM(BPF_JNE, R2, 0xfefb0000, 1),
11766 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11767 			BPF_EXIT_INSN(),
11768 		},
11769 		INTERNAL,
11770 		{ },
11771 		{ { 0, 1 } },
11772 	},
11773 	{
11774 		"JNE signed compare, test 3",
11775 		.u.insns_int = {
11776 			BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
11777 			BPF_ALU32_IMM(BPF_MOV, R3, 0xffff0000),
11778 			BPF_ALU32_IMM(BPF_MOV, R4, 0xfefb0000),
11779 			BPF_MOV64_REG(R2, R1),
11780 			BPF_ALU64_REG(BPF_AND, R2, R3),
11781 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11782 			BPF_JMP_REG(BPF_JNE, R2, R4, 1),
11783 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11784 			BPF_EXIT_INSN(),
11785 		},
11786 		INTERNAL,
11787 		{ },
11788 		{ { 0, 2 } },
11789 	},
11790 	{
11791 		"JNE signed compare, test 4",
11792 		.u.insns_int = {
11793 			BPF_LD_IMM64(R1, -17104896),
11794 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11795 			BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
11796 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11797 			BPF_EXIT_INSN(),
11798 		},
11799 		INTERNAL,
11800 		{ },
11801 		{ { 0, 2 } },
11802 	},
11803 	{
11804 		"JNE signed compare, test 5",
11805 		.u.insns_int = {
11806 			BPF_LD_IMM64(R1, 0xfefb0000),
11807 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11808 			BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
11809 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11810 			BPF_EXIT_INSN(),
11811 		},
11812 		INTERNAL,
11813 		{ },
11814 		{ { 0, 1 } },
11815 	},
11816 	{
11817 		"JNE signed compare, test 6",
11818 		.u.insns_int = {
11819 			BPF_LD_IMM64(R1, 0x7efb0000),
11820 			BPF_ALU32_IMM(BPF_MOV, R0, 1),
11821 			BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
11822 			BPF_ALU32_IMM(BPF_MOV, R0, 2),
11823 			BPF_EXIT_INSN(),
11824 		},
11825 		INTERNAL,
11826 		{ },
11827 		{ { 0, 2 } },
11828 	},
11829 	{
11830 		"JNE signed compare, test 7",
11831 		.u.insns = {
11832 			BPF_STMT(BPF_LD | BPF_IMM, 0xffff0000),
11833 			BPF_STMT(BPF_MISC | BPF_TAX, 0),
11834 			BPF_STMT(BPF_LD | BPF_IMM, 0xfefbbc12),
11835 			BPF_STMT(BPF_ALU | BPF_AND | BPF_X, 0),
11836 			BPF_JUMP(BPF_JMP | BPF_JEQ | BPF_K, 0xfefb0000, 1, 0),
11837 			BPF_STMT(BPF_RET | BPF_K, 1),
11838 			BPF_STMT(BPF_RET | BPF_K, 2),
11839 		},
11840 		CLASSIC | FLAG_NO_DATA,
11841 		{},
11842 		{ { 0, 2 } },
11843 	},
11844 	/* BPF_LDX_MEM with operand aliasing */
11845 	{
11846 		"LDX_MEM_B: operand register aliasing",
11847 		.u.insns_int = {
11848 			BPF_ST_MEM(BPF_B, R10, -8, 123),
11849 			BPF_MOV64_REG(R0, R10),
11850 			BPF_LDX_MEM(BPF_B, R0, R0, -8),
11851 			BPF_EXIT_INSN(),
11852 		},
11853 		INTERNAL,
11854 		{ },
11855 		{ { 0, 123 } },
11856 		.stack_depth = 8,
11857 	},
11858 	{
11859 		"LDX_MEM_H: operand register aliasing",
11860 		.u.insns_int = {
11861 			BPF_ST_MEM(BPF_H, R10, -8, 12345),
11862 			BPF_MOV64_REG(R0, R10),
11863 			BPF_LDX_MEM(BPF_H, R0, R0, -8),
11864 			BPF_EXIT_INSN(),
11865 		},
11866 		INTERNAL,
11867 		{ },
11868 		{ { 0, 12345 } },
11869 		.stack_depth = 8,
11870 	},
11871 	{
11872 		"LDX_MEM_W: operand register aliasing",
11873 		.u.insns_int = {
11874 			BPF_ST_MEM(BPF_W, R10, -8, 123456789),
11875 			BPF_MOV64_REG(R0, R10),
11876 			BPF_LDX_MEM(BPF_W, R0, R0, -8),
11877 			BPF_EXIT_INSN(),
11878 		},
11879 		INTERNAL,
11880 		{ },
11881 		{ { 0, 123456789 } },
11882 		.stack_depth = 8,
11883 	},
11884 	{
11885 		"LDX_MEM_DW: operand register aliasing",
11886 		.u.insns_int = {
11887 			BPF_LD_IMM64(R1, 0x123456789abcdefULL),
11888 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
11889 			BPF_MOV64_REG(R0, R10),
11890 			BPF_LDX_MEM(BPF_DW, R0, R0, -8),
11891 			BPF_ALU64_REG(BPF_SUB, R0, R1),
11892 			BPF_MOV64_REG(R1, R0),
11893 			BPF_ALU64_IMM(BPF_RSH, R1, 32),
11894 			BPF_ALU64_REG(BPF_OR, R0, R1),
11895 			BPF_EXIT_INSN(),
11896 		},
11897 		INTERNAL,
11898 		{ },
11899 		{ { 0, 0 } },
11900 		.stack_depth = 8,
11901 	},
11902 	/*
11903 	 * Register (non-)clobbering tests for the case where a JIT implements
11904 	 * complex ALU or ATOMIC operations via function calls. If so, the
11905 	 * function call must be transparent to the eBPF registers. The JIT
11906 	 * must therefore save and restore relevant registers across the call.
11907 	 * The following tests check that the eBPF registers retain their
11908 	 * values after such an operation. Mainly intended for complex ALU
11909 	 * and atomic operation, but we run it for all. You never know...
11910 	 *
11911 	 * Note that each operations should be tested twice with different
11912 	 * destinations, to check preservation for all registers.
11913 	 */
11914 #define BPF_TEST_CLOBBER_ALU(alu, op, dst, src)			\
11915 	{							\
11916 		#alu "_" #op " to " #dst ": no clobbering",	\
11917 		.u.insns_int = {				\
11918 			BPF_ALU64_IMM(BPF_MOV, R0, R0),		\
11919 			BPF_ALU64_IMM(BPF_MOV, R1, R1),		\
11920 			BPF_ALU64_IMM(BPF_MOV, R2, R2),		\
11921 			BPF_ALU64_IMM(BPF_MOV, R3, R3),		\
11922 			BPF_ALU64_IMM(BPF_MOV, R4, R4),		\
11923 			BPF_ALU64_IMM(BPF_MOV, R5, R5),		\
11924 			BPF_ALU64_IMM(BPF_MOV, R6, R6),		\
11925 			BPF_ALU64_IMM(BPF_MOV, R7, R7),		\
11926 			BPF_ALU64_IMM(BPF_MOV, R8, R8),		\
11927 			BPF_ALU64_IMM(BPF_MOV, R9, R9),		\
11928 			BPF_##alu(BPF_ ##op, dst, src),		\
11929 			BPF_ALU32_IMM(BPF_MOV, dst, dst),	\
11930 			BPF_JMP_IMM(BPF_JNE, R0, R0, 10),	\
11931 			BPF_JMP_IMM(BPF_JNE, R1, R1, 9),	\
11932 			BPF_JMP_IMM(BPF_JNE, R2, R2, 8),	\
11933 			BPF_JMP_IMM(BPF_JNE, R3, R3, 7),	\
11934 			BPF_JMP_IMM(BPF_JNE, R4, R4, 6),	\
11935 			BPF_JMP_IMM(BPF_JNE, R5, R5, 5),	\
11936 			BPF_JMP_IMM(BPF_JNE, R6, R6, 4),	\
11937 			BPF_JMP_IMM(BPF_JNE, R7, R7, 3),	\
11938 			BPF_JMP_IMM(BPF_JNE, R8, R8, 2),	\
11939 			BPF_JMP_IMM(BPF_JNE, R9, R9, 1),	\
11940 			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
11941 			BPF_EXIT_INSN(),			\
11942 		},						\
11943 		INTERNAL,					\
11944 		{ },						\
11945 		{ { 0, 1 } }					\
11946 	}
11947 	/* ALU64 operations, register clobbering */
11948 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
11949 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R9, 123456789),
11950 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
11951 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R9, 123456789),
11952 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
11953 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R9, 123456789),
11954 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
11955 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R9, 12),
11956 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
11957 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R9, 12),
11958 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
11959 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R9, 12),
11960 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
11961 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R9, 123456789),
11962 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
11963 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R9, 123456789),
11964 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
11965 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R9, 123456789),
11966 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
11967 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R9, 123456789),
11968 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
11969 	BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R9, 123456789),
11970 	/* ALU32 immediate operations, register clobbering */
11971 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
11972 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R9, 123456789),
11973 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
11974 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R9, 123456789),
11975 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
11976 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R9, 123456789),
11977 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
11978 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R9, 12),
11979 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
11980 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R9, 12),
11981 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
11982 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R9, 12),
11983 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
11984 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R9, 123456789),
11985 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
11986 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R9, 123456789),
11987 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
11988 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R9, 123456789),
11989 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
11990 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R9, 123456789),
11991 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
11992 	BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R9, 123456789),
11993 	/* ALU64 register operations, register clobbering */
11994 	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
11995 	BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
11996 	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
11997 	BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
11998 	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
11999 	BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
12000 	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
12001 	BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
12002 	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
12003 	BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
12004 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
12005 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
12006 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
12007 	BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
12008 	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
12009 	BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
12010 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
12011 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
12012 	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
12013 	BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
12014 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
12015 	BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
12016 	/* ALU32 register operations, register clobbering */
12017 	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
12018 	BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
12019 	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
12020 	BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
12021 	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
12022 	BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
12023 	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
12024 	BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
12025 	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
12026 	BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
12027 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
12028 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
12029 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
12030 	BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
12031 	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
12032 	BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
12033 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
12034 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
12035 	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
12036 	BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
12037 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
12038 	BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
12039 #undef BPF_TEST_CLOBBER_ALU
12040 #define BPF_TEST_CLOBBER_ATOMIC(width, op)			\
12041 	{							\
12042 		"Atomic_" #width " " #op ": no clobbering",	\
12043 		.u.insns_int = {				\
12044 			BPF_ALU64_IMM(BPF_MOV, R0, 0),		\
12045 			BPF_ALU64_IMM(BPF_MOV, R1, 1),		\
12046 			BPF_ALU64_IMM(BPF_MOV, R2, 2),		\
12047 			BPF_ALU64_IMM(BPF_MOV, R3, 3),		\
12048 			BPF_ALU64_IMM(BPF_MOV, R4, 4),		\
12049 			BPF_ALU64_IMM(BPF_MOV, R5, 5),		\
12050 			BPF_ALU64_IMM(BPF_MOV, R6, 6),		\
12051 			BPF_ALU64_IMM(BPF_MOV, R7, 7),		\
12052 			BPF_ALU64_IMM(BPF_MOV, R8, 8),		\
12053 			BPF_ALU64_IMM(BPF_MOV, R9, 9),		\
12054 			BPF_ST_MEM(width, R10, -8,		\
12055 				   (op) == BPF_CMPXCHG ? 0 :	\
12056 				   (op) & BPF_FETCH ? 1 : 0),	\
12057 			BPF_ATOMIC_OP(width, op, R10, R1, -8),	\
12058 			BPF_JMP_IMM(BPF_JNE, R0, 0, 10),	\
12059 			BPF_JMP_IMM(BPF_JNE, R1, 1, 9),		\
12060 			BPF_JMP_IMM(BPF_JNE, R2, 2, 8),		\
12061 			BPF_JMP_IMM(BPF_JNE, R3, 3, 7),		\
12062 			BPF_JMP_IMM(BPF_JNE, R4, 4, 6),		\
12063 			BPF_JMP_IMM(BPF_JNE, R5, 5, 5),		\
12064 			BPF_JMP_IMM(BPF_JNE, R6, 6, 4),		\
12065 			BPF_JMP_IMM(BPF_JNE, R7, 7, 3),		\
12066 			BPF_JMP_IMM(BPF_JNE, R8, 8, 2),		\
12067 			BPF_JMP_IMM(BPF_JNE, R9, 9, 1),		\
12068 			BPF_ALU64_IMM(BPF_MOV, R0, 1),		\
12069 			BPF_EXIT_INSN(),			\
12070 		},						\
12071 		INTERNAL,					\
12072 		{ },						\
12073 		{ { 0, 1 } },					\
12074 		.stack_depth = 8,				\
12075 	}
12076 	/* 64-bit atomic operations, register clobbering */
12077 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD),
12078 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND),
12079 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR),
12080 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR),
12081 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_ADD | BPF_FETCH),
12082 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_AND | BPF_FETCH),
12083 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_OR | BPF_FETCH),
12084 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XOR | BPF_FETCH),
12085 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_XCHG),
12086 	BPF_TEST_CLOBBER_ATOMIC(BPF_DW, BPF_CMPXCHG),
12087 	/* 32-bit atomic operations, register clobbering */
12088 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD),
12089 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND),
12090 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR),
12091 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR),
12092 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_ADD | BPF_FETCH),
12093 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_AND | BPF_FETCH),
12094 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_OR | BPF_FETCH),
12095 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XOR | BPF_FETCH),
12096 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_XCHG),
12097 	BPF_TEST_CLOBBER_ATOMIC(BPF_W, BPF_CMPXCHG),
12098 #undef BPF_TEST_CLOBBER_ATOMIC
12099 	/* Checking that ALU32 src is not zero extended in place */
12100 #define BPF_ALU32_SRC_ZEXT(op)					\
12101 	{							\
12102 		"ALU32_" #op "_X: src preserved in zext",	\
12103 		.u.insns_int = {				\
12104 			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
12105 			BPF_LD_IMM64(R2, 0xfedcba9876543210ULL),\
12106 			BPF_ALU64_REG(BPF_MOV, R0, R1),		\
12107 			BPF_ALU32_REG(BPF_##op, R2, R1),	\
12108 			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
12109 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
12110 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
12111 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
12112 			BPF_EXIT_INSN(),			\
12113 		},						\
12114 		INTERNAL,					\
12115 		{ },						\
12116 		{ { 0, 0 } },					\
12117 	}
12118 	BPF_ALU32_SRC_ZEXT(MOV),
12119 	BPF_ALU32_SRC_ZEXT(AND),
12120 	BPF_ALU32_SRC_ZEXT(OR),
12121 	BPF_ALU32_SRC_ZEXT(XOR),
12122 	BPF_ALU32_SRC_ZEXT(ADD),
12123 	BPF_ALU32_SRC_ZEXT(SUB),
12124 	BPF_ALU32_SRC_ZEXT(MUL),
12125 	BPF_ALU32_SRC_ZEXT(DIV),
12126 	BPF_ALU32_SRC_ZEXT(MOD),
12127 #undef BPF_ALU32_SRC_ZEXT
12128 	/* Checking that ATOMIC32 src is not zero extended in place */
12129 #define BPF_ATOMIC32_SRC_ZEXT(op)					\
12130 	{								\
12131 		"ATOMIC_W_" #op ": src preserved in zext",		\
12132 		.u.insns_int = {					\
12133 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),	\
12134 			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
12135 			BPF_ST_MEM(BPF_W, R10, -4, 0),			\
12136 			BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4),	\
12137 			BPF_ALU64_REG(BPF_SUB, R0, R1),			\
12138 			BPF_ALU64_REG(BPF_MOV, R1, R0),			\
12139 			BPF_ALU64_IMM(BPF_RSH, R1, 32),			\
12140 			BPF_ALU64_REG(BPF_OR, R0, R1),			\
12141 			BPF_EXIT_INSN(),				\
12142 		},							\
12143 		INTERNAL,						\
12144 		{ },							\
12145 		{ { 0, 0 } },						\
12146 		.stack_depth = 8,					\
12147 	}
12148 	BPF_ATOMIC32_SRC_ZEXT(ADD),
12149 	BPF_ATOMIC32_SRC_ZEXT(AND),
12150 	BPF_ATOMIC32_SRC_ZEXT(OR),
12151 	BPF_ATOMIC32_SRC_ZEXT(XOR),
12152 #undef BPF_ATOMIC32_SRC_ZEXT
12153 	/* Checking that CMPXCHG32 src is not zero extended in place */
12154 	{
12155 		"ATOMIC_W_CMPXCHG: src preserved in zext",
12156 		.u.insns_int = {
12157 			BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
12158 			BPF_ALU64_REG(BPF_MOV, R2, R1),
12159 			BPF_ALU64_REG(BPF_MOV, R0, 0),
12160 			BPF_ST_MEM(BPF_W, R10, -4, 0),
12161 			BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
12162 			BPF_ALU64_REG(BPF_SUB, R1, R2),
12163 			BPF_ALU64_REG(BPF_MOV, R2, R1),
12164 			BPF_ALU64_IMM(BPF_RSH, R2, 32),
12165 			BPF_ALU64_REG(BPF_OR, R1, R2),
12166 			BPF_ALU64_REG(BPF_MOV, R0, R1),
12167 			BPF_EXIT_INSN(),
12168 		},
12169 		INTERNAL,
12170 		{ },
12171 		{ { 0, 0 } },
12172 		.stack_depth = 8,
12173 	},
12174 	/* Checking that JMP32 immediate src is not zero extended in place */
12175 #define BPF_JMP32_IMM_ZEXT(op)					\
12176 	{							\
12177 		"JMP32_" #op "_K: operand preserved in zext",	\
12178 		.u.insns_int = {				\
12179 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12180 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
12181 			BPF_JMP32_IMM(BPF_##op, R0, 1234, 1),	\
12182 			BPF_JMP_A(0), /* Nop */			\
12183 			BPF_ALU64_REG(BPF_SUB, R0, R1),		\
12184 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
12185 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
12186 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
12187 			BPF_EXIT_INSN(),			\
12188 		},						\
12189 		INTERNAL,					\
12190 		{ },						\
12191 		{ { 0, 0 } },					\
12192 	}
12193 	BPF_JMP32_IMM_ZEXT(JEQ),
12194 	BPF_JMP32_IMM_ZEXT(JNE),
12195 	BPF_JMP32_IMM_ZEXT(JSET),
12196 	BPF_JMP32_IMM_ZEXT(JGT),
12197 	BPF_JMP32_IMM_ZEXT(JGE),
12198 	BPF_JMP32_IMM_ZEXT(JLT),
12199 	BPF_JMP32_IMM_ZEXT(JLE),
12200 	BPF_JMP32_IMM_ZEXT(JSGT),
12201 	BPF_JMP32_IMM_ZEXT(JSGE),
12202 	BPF_JMP32_IMM_ZEXT(JSLT),
12203 	BPF_JMP32_IMM_ZEXT(JSLE),
12204 #undef BPF_JMP2_IMM_ZEXT
12205 	/* Checking that JMP32 dst & src are not zero extended in place */
12206 #define BPF_JMP32_REG_ZEXT(op)					\
12207 	{							\
12208 		"JMP32_" #op "_X: operands preserved in zext",	\
12209 		.u.insns_int = {				\
12210 			BPF_LD_IMM64(R0, 0x0123456789acbdefULL),\
12211 			BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
12212 			BPF_ALU64_REG(BPF_MOV, R2, R0),		\
12213 			BPF_ALU64_REG(BPF_MOV, R3, R1),		\
12214 			BPF_JMP32_IMM(BPF_##op, R0, R1, 1),	\
12215 			BPF_JMP_A(0), /* Nop */			\
12216 			BPF_ALU64_REG(BPF_SUB, R0, R2),		\
12217 			BPF_ALU64_REG(BPF_SUB, R1, R3),		\
12218 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
12219 			BPF_ALU64_REG(BPF_MOV, R1, R0),		\
12220 			BPF_ALU64_IMM(BPF_RSH, R1, 32),		\
12221 			BPF_ALU64_REG(BPF_OR, R0, R1),		\
12222 			BPF_EXIT_INSN(),			\
12223 		},						\
12224 		INTERNAL,					\
12225 		{ },						\
12226 		{ { 0, 0 } },					\
12227 	}
12228 	BPF_JMP32_REG_ZEXT(JEQ),
12229 	BPF_JMP32_REG_ZEXT(JNE),
12230 	BPF_JMP32_REG_ZEXT(JSET),
12231 	BPF_JMP32_REG_ZEXT(JGT),
12232 	BPF_JMP32_REG_ZEXT(JGE),
12233 	BPF_JMP32_REG_ZEXT(JLT),
12234 	BPF_JMP32_REG_ZEXT(JLE),
12235 	BPF_JMP32_REG_ZEXT(JSGT),
12236 	BPF_JMP32_REG_ZEXT(JSGE),
12237 	BPF_JMP32_REG_ZEXT(JSLT),
12238 	BPF_JMP32_REG_ZEXT(JSLE),
12239 #undef BPF_JMP2_REG_ZEXT
12240 	/* ALU64 K register combinations */
12241 	{
12242 		"ALU64_MOV_K: registers",
12243 		{ },
12244 		INTERNAL,
12245 		{ },
12246 		{ { 0, 1 } },
12247 		.fill_helper = bpf_fill_alu64_mov_imm_regs,
12248 	},
12249 	{
12250 		"ALU64_AND_K: registers",
12251 		{ },
12252 		INTERNAL,
12253 		{ },
12254 		{ { 0, 1 } },
12255 		.fill_helper = bpf_fill_alu64_and_imm_regs,
12256 	},
12257 	{
12258 		"ALU64_OR_K: registers",
12259 		{ },
12260 		INTERNAL,
12261 		{ },
12262 		{ { 0, 1 } },
12263 		.fill_helper = bpf_fill_alu64_or_imm_regs,
12264 	},
12265 	{
12266 		"ALU64_XOR_K: registers",
12267 		{ },
12268 		INTERNAL,
12269 		{ },
12270 		{ { 0, 1 } },
12271 		.fill_helper = bpf_fill_alu64_xor_imm_regs,
12272 	},
12273 	{
12274 		"ALU64_LSH_K: registers",
12275 		{ },
12276 		INTERNAL,
12277 		{ },
12278 		{ { 0, 1 } },
12279 		.fill_helper = bpf_fill_alu64_lsh_imm_regs,
12280 	},
12281 	{
12282 		"ALU64_RSH_K: registers",
12283 		{ },
12284 		INTERNAL,
12285 		{ },
12286 		{ { 0, 1 } },
12287 		.fill_helper = bpf_fill_alu64_rsh_imm_regs,
12288 	},
12289 	{
12290 		"ALU64_ARSH_K: registers",
12291 		{ },
12292 		INTERNAL,
12293 		{ },
12294 		{ { 0, 1 } },
12295 		.fill_helper = bpf_fill_alu64_arsh_imm_regs,
12296 	},
12297 	{
12298 		"ALU64_ADD_K: registers",
12299 		{ },
12300 		INTERNAL,
12301 		{ },
12302 		{ { 0, 1 } },
12303 		.fill_helper = bpf_fill_alu64_add_imm_regs,
12304 	},
12305 	{
12306 		"ALU64_SUB_K: registers",
12307 		{ },
12308 		INTERNAL,
12309 		{ },
12310 		{ { 0, 1 } },
12311 		.fill_helper = bpf_fill_alu64_sub_imm_regs,
12312 	},
12313 	{
12314 		"ALU64_MUL_K: registers",
12315 		{ },
12316 		INTERNAL,
12317 		{ },
12318 		{ { 0, 1 } },
12319 		.fill_helper = bpf_fill_alu64_mul_imm_regs,
12320 	},
12321 	{
12322 		"ALU64_DIV_K: registers",
12323 		{ },
12324 		INTERNAL,
12325 		{ },
12326 		{ { 0, 1 } },
12327 		.fill_helper = bpf_fill_alu64_div_imm_regs,
12328 	},
12329 	{
12330 		"ALU64_MOD_K: registers",
12331 		{ },
12332 		INTERNAL,
12333 		{ },
12334 		{ { 0, 1 } },
12335 		.fill_helper = bpf_fill_alu64_mod_imm_regs,
12336 	},
12337 	/* ALU32 K registers */
12338 	{
12339 		"ALU32_MOV_K: registers",
12340 		{ },
12341 		INTERNAL,
12342 		{ },
12343 		{ { 0, 1 } },
12344 		.fill_helper = bpf_fill_alu32_mov_imm_regs,
12345 	},
12346 	{
12347 		"ALU32_AND_K: registers",
12348 		{ },
12349 		INTERNAL,
12350 		{ },
12351 		{ { 0, 1 } },
12352 		.fill_helper = bpf_fill_alu32_and_imm_regs,
12353 	},
12354 	{
12355 		"ALU32_OR_K: registers",
12356 		{ },
12357 		INTERNAL,
12358 		{ },
12359 		{ { 0, 1 } },
12360 		.fill_helper = bpf_fill_alu32_or_imm_regs,
12361 	},
12362 	{
12363 		"ALU32_XOR_K: registers",
12364 		{ },
12365 		INTERNAL,
12366 		{ },
12367 		{ { 0, 1 } },
12368 		.fill_helper = bpf_fill_alu32_xor_imm_regs,
12369 	},
12370 	{
12371 		"ALU32_LSH_K: registers",
12372 		{ },
12373 		INTERNAL,
12374 		{ },
12375 		{ { 0, 1 } },
12376 		.fill_helper = bpf_fill_alu32_lsh_imm_regs,
12377 	},
12378 	{
12379 		"ALU32_RSH_K: registers",
12380 		{ },
12381 		INTERNAL,
12382 		{ },
12383 		{ { 0, 1 } },
12384 		.fill_helper = bpf_fill_alu32_rsh_imm_regs,
12385 	},
12386 	{
12387 		"ALU32_ARSH_K: registers",
12388 		{ },
12389 		INTERNAL,
12390 		{ },
12391 		{ { 0, 1 } },
12392 		.fill_helper = bpf_fill_alu32_arsh_imm_regs,
12393 	},
12394 	{
12395 		"ALU32_ADD_K: registers",
12396 		{ },
12397 		INTERNAL,
12398 		{ },
12399 		{ { 0, 1 } },
12400 		.fill_helper = bpf_fill_alu32_add_imm_regs,
12401 	},
12402 	{
12403 		"ALU32_SUB_K: registers",
12404 		{ },
12405 		INTERNAL,
12406 		{ },
12407 		{ { 0, 1 } },
12408 		.fill_helper = bpf_fill_alu32_sub_imm_regs,
12409 	},
12410 	{
12411 		"ALU32_MUL_K: registers",
12412 		{ },
12413 		INTERNAL,
12414 		{ },
12415 		{ { 0, 1 } },
12416 		.fill_helper = bpf_fill_alu32_mul_imm_regs,
12417 	},
12418 	{
12419 		"ALU32_DIV_K: registers",
12420 		{ },
12421 		INTERNAL,
12422 		{ },
12423 		{ { 0, 1 } },
12424 		.fill_helper = bpf_fill_alu32_div_imm_regs,
12425 	},
12426 	{
12427 		"ALU32_MOD_K: registers",
12428 		{ },
12429 		INTERNAL,
12430 		{ },
12431 		{ { 0, 1 } },
12432 		.fill_helper = bpf_fill_alu32_mod_imm_regs,
12433 	},
12434 	/* ALU64 X register combinations */
12435 	{
12436 		"ALU64_MOV_X: register combinations",
12437 		{ },
12438 		INTERNAL,
12439 		{ },
12440 		{ { 0, 1 } },
12441 		.fill_helper = bpf_fill_alu64_mov_reg_pairs,
12442 	},
12443 	{
12444 		"ALU64_AND_X: register combinations",
12445 		{ },
12446 		INTERNAL,
12447 		{ },
12448 		{ { 0, 1 } },
12449 		.fill_helper = bpf_fill_alu64_and_reg_pairs,
12450 	},
12451 	{
12452 		"ALU64_OR_X: register combinations",
12453 		{ },
12454 		INTERNAL,
12455 		{ },
12456 		{ { 0, 1 } },
12457 		.fill_helper = bpf_fill_alu64_or_reg_pairs,
12458 	},
12459 	{
12460 		"ALU64_XOR_X: register combinations",
12461 		{ },
12462 		INTERNAL,
12463 		{ },
12464 		{ { 0, 1 } },
12465 		.fill_helper = bpf_fill_alu64_xor_reg_pairs,
12466 	},
12467 	{
12468 		"ALU64_LSH_X: register combinations",
12469 		{ },
12470 		INTERNAL,
12471 		{ },
12472 		{ { 0, 1 } },
12473 		.fill_helper = bpf_fill_alu64_lsh_reg_pairs,
12474 	},
12475 	{
12476 		"ALU64_RSH_X: register combinations",
12477 		{ },
12478 		INTERNAL,
12479 		{ },
12480 		{ { 0, 1 } },
12481 		.fill_helper = bpf_fill_alu64_rsh_reg_pairs,
12482 	},
12483 	{
12484 		"ALU64_ARSH_X: register combinations",
12485 		{ },
12486 		INTERNAL,
12487 		{ },
12488 		{ { 0, 1 } },
12489 		.fill_helper = bpf_fill_alu64_arsh_reg_pairs,
12490 	},
12491 	{
12492 		"ALU64_ADD_X: register combinations",
12493 		{ },
12494 		INTERNAL,
12495 		{ },
12496 		{ { 0, 1 } },
12497 		.fill_helper = bpf_fill_alu64_add_reg_pairs,
12498 	},
12499 	{
12500 		"ALU64_SUB_X: register combinations",
12501 		{ },
12502 		INTERNAL,
12503 		{ },
12504 		{ { 0, 1 } },
12505 		.fill_helper = bpf_fill_alu64_sub_reg_pairs,
12506 	},
12507 	{
12508 		"ALU64_MUL_X: register combinations",
12509 		{ },
12510 		INTERNAL,
12511 		{ },
12512 		{ { 0, 1 } },
12513 		.fill_helper = bpf_fill_alu64_mul_reg_pairs,
12514 	},
12515 	{
12516 		"ALU64_DIV_X: register combinations",
12517 		{ },
12518 		INTERNAL,
12519 		{ },
12520 		{ { 0, 1 } },
12521 		.fill_helper = bpf_fill_alu64_div_reg_pairs,
12522 	},
12523 	{
12524 		"ALU64_MOD_X: register combinations",
12525 		{ },
12526 		INTERNAL,
12527 		{ },
12528 		{ { 0, 1 } },
12529 		.fill_helper = bpf_fill_alu64_mod_reg_pairs,
12530 	},
12531 	/* ALU32 X register combinations */
12532 	{
12533 		"ALU32_MOV_X: register combinations",
12534 		{ },
12535 		INTERNAL,
12536 		{ },
12537 		{ { 0, 1 } },
12538 		.fill_helper = bpf_fill_alu32_mov_reg_pairs,
12539 	},
12540 	{
12541 		"ALU32_AND_X: register combinations",
12542 		{ },
12543 		INTERNAL,
12544 		{ },
12545 		{ { 0, 1 } },
12546 		.fill_helper = bpf_fill_alu32_and_reg_pairs,
12547 	},
12548 	{
12549 		"ALU32_OR_X: register combinations",
12550 		{ },
12551 		INTERNAL,
12552 		{ },
12553 		{ { 0, 1 } },
12554 		.fill_helper = bpf_fill_alu32_or_reg_pairs,
12555 	},
12556 	{
12557 		"ALU32_XOR_X: register combinations",
12558 		{ },
12559 		INTERNAL,
12560 		{ },
12561 		{ { 0, 1 } },
12562 		.fill_helper = bpf_fill_alu32_xor_reg_pairs,
12563 	},
12564 	{
12565 		"ALU32_LSH_X: register combinations",
12566 		{ },
12567 		INTERNAL,
12568 		{ },
12569 		{ { 0, 1 } },
12570 		.fill_helper = bpf_fill_alu32_lsh_reg_pairs,
12571 	},
12572 	{
12573 		"ALU32_RSH_X: register combinations",
12574 		{ },
12575 		INTERNAL,
12576 		{ },
12577 		{ { 0, 1 } },
12578 		.fill_helper = bpf_fill_alu32_rsh_reg_pairs,
12579 	},
12580 	{
12581 		"ALU32_ARSH_X: register combinations",
12582 		{ },
12583 		INTERNAL,
12584 		{ },
12585 		{ { 0, 1 } },
12586 		.fill_helper = bpf_fill_alu32_arsh_reg_pairs,
12587 	},
12588 	{
12589 		"ALU32_ADD_X: register combinations",
12590 		{ },
12591 		INTERNAL,
12592 		{ },
12593 		{ { 0, 1 } },
12594 		.fill_helper = bpf_fill_alu32_add_reg_pairs,
12595 	},
12596 	{
12597 		"ALU32_SUB_X: register combinations",
12598 		{ },
12599 		INTERNAL,
12600 		{ },
12601 		{ { 0, 1 } },
12602 		.fill_helper = bpf_fill_alu32_sub_reg_pairs,
12603 	},
12604 	{
12605 		"ALU32_MUL_X: register combinations",
12606 		{ },
12607 		INTERNAL,
12608 		{ },
12609 		{ { 0, 1 } },
12610 		.fill_helper = bpf_fill_alu32_mul_reg_pairs,
12611 	},
12612 	{
12613 		"ALU32_DIV_X: register combinations",
12614 		{ },
12615 		INTERNAL,
12616 		{ },
12617 		{ { 0, 1 } },
12618 		.fill_helper = bpf_fill_alu32_div_reg_pairs,
12619 	},
12620 	{
12621 		"ALU32_MOD_X register combinations",
12622 		{ },
12623 		INTERNAL,
12624 		{ },
12625 		{ { 0, 1 } },
12626 		.fill_helper = bpf_fill_alu32_mod_reg_pairs,
12627 	},
12628 	/* Exhaustive test of ALU64 shift operations */
12629 	{
12630 		"ALU64_LSH_K: all shift values",
12631 		{ },
12632 		INTERNAL | FLAG_NO_DATA,
12633 		{ },
12634 		{ { 0, 1 } },
12635 		.fill_helper = bpf_fill_alu64_lsh_imm,
12636 	},
12637 	{
12638 		"ALU64_RSH_K: all shift values",
12639 		{ },
12640 		INTERNAL | FLAG_NO_DATA,
12641 		{ },
12642 		{ { 0, 1 } },
12643 		.fill_helper = bpf_fill_alu64_rsh_imm,
12644 	},
12645 	{
12646 		"ALU64_ARSH_K: all shift values",
12647 		{ },
12648 		INTERNAL | FLAG_NO_DATA,
12649 		{ },
12650 		{ { 0, 1 } },
12651 		.fill_helper = bpf_fill_alu64_arsh_imm,
12652 	},
12653 	{
12654 		"ALU64_LSH_X: all shift values",
12655 		{ },
12656 		INTERNAL | FLAG_NO_DATA,
12657 		{ },
12658 		{ { 0, 1 } },
12659 		.fill_helper = bpf_fill_alu64_lsh_reg,
12660 	},
12661 	{
12662 		"ALU64_RSH_X: all shift values",
12663 		{ },
12664 		INTERNAL | FLAG_NO_DATA,
12665 		{ },
12666 		{ { 0, 1 } },
12667 		.fill_helper = bpf_fill_alu64_rsh_reg,
12668 	},
12669 	{
12670 		"ALU64_ARSH_X: all shift values",
12671 		{ },
12672 		INTERNAL | FLAG_NO_DATA,
12673 		{ },
12674 		{ { 0, 1 } },
12675 		.fill_helper = bpf_fill_alu64_arsh_reg,
12676 	},
12677 	/* Exhaustive test of ALU32 shift operations */
12678 	{
12679 		"ALU32_LSH_K: all shift values",
12680 		{ },
12681 		INTERNAL | FLAG_NO_DATA,
12682 		{ },
12683 		{ { 0, 1 } },
12684 		.fill_helper = bpf_fill_alu32_lsh_imm,
12685 	},
12686 	{
12687 		"ALU32_RSH_K: all shift values",
12688 		{ },
12689 		INTERNAL | FLAG_NO_DATA,
12690 		{ },
12691 		{ { 0, 1 } },
12692 		.fill_helper = bpf_fill_alu32_rsh_imm,
12693 	},
12694 	{
12695 		"ALU32_ARSH_K: all shift values",
12696 		{ },
12697 		INTERNAL | FLAG_NO_DATA,
12698 		{ },
12699 		{ { 0, 1 } },
12700 		.fill_helper = bpf_fill_alu32_arsh_imm,
12701 	},
12702 	{
12703 		"ALU32_LSH_X: all shift values",
12704 		{ },
12705 		INTERNAL | FLAG_NO_DATA,
12706 		{ },
12707 		{ { 0, 1 } },
12708 		.fill_helper = bpf_fill_alu32_lsh_reg,
12709 	},
12710 	{
12711 		"ALU32_RSH_X: all shift values",
12712 		{ },
12713 		INTERNAL | FLAG_NO_DATA,
12714 		{ },
12715 		{ { 0, 1 } },
12716 		.fill_helper = bpf_fill_alu32_rsh_reg,
12717 	},
12718 	{
12719 		"ALU32_ARSH_X: all shift values",
12720 		{ },
12721 		INTERNAL | FLAG_NO_DATA,
12722 		{ },
12723 		{ { 0, 1 } },
12724 		.fill_helper = bpf_fill_alu32_arsh_reg,
12725 	},
12726 	/*
12727 	 * Exhaustive test of ALU64 shift operations when
12728 	 * source and destination register are the same.
12729 	 */
12730 	{
12731 		"ALU64_LSH_X: all shift values with the same register",
12732 		{ },
12733 		INTERNAL | FLAG_NO_DATA,
12734 		{ },
12735 		{ { 0, 1 } },
12736 		.fill_helper = bpf_fill_alu64_lsh_same_reg,
12737 	},
12738 	{
12739 		"ALU64_RSH_X: all shift values with the same register",
12740 		{ },
12741 		INTERNAL | FLAG_NO_DATA,
12742 		{ },
12743 		{ { 0, 1 } },
12744 		.fill_helper = bpf_fill_alu64_rsh_same_reg,
12745 	},
12746 	{
12747 		"ALU64_ARSH_X: all shift values with the same register",
12748 		{ },
12749 		INTERNAL | FLAG_NO_DATA,
12750 		{ },
12751 		{ { 0, 1 } },
12752 		.fill_helper = bpf_fill_alu64_arsh_same_reg,
12753 	},
12754 	/*
12755 	 * Exhaustive test of ALU32 shift operations when
12756 	 * source and destination register are the same.
12757 	 */
12758 	{
12759 		"ALU32_LSH_X: all shift values with the same register",
12760 		{ },
12761 		INTERNAL | FLAG_NO_DATA,
12762 		{ },
12763 		{ { 0, 1 } },
12764 		.fill_helper = bpf_fill_alu32_lsh_same_reg,
12765 	},
12766 	{
12767 		"ALU32_RSH_X: all shift values with the same register",
12768 		{ },
12769 		INTERNAL | FLAG_NO_DATA,
12770 		{ },
12771 		{ { 0, 1 } },
12772 		.fill_helper = bpf_fill_alu32_rsh_same_reg,
12773 	},
12774 	{
12775 		"ALU32_ARSH_X: all shift values with the same register",
12776 		{ },
12777 		INTERNAL | FLAG_NO_DATA,
12778 		{ },
12779 		{ { 0, 1 } },
12780 		.fill_helper = bpf_fill_alu32_arsh_same_reg,
12781 	},
12782 	/* ALU64 immediate magnitudes */
12783 	{
12784 		"ALU64_MOV_K: all immediate value magnitudes",
12785 		{ },
12786 		INTERNAL | FLAG_NO_DATA,
12787 		{ },
12788 		{ { 0, 1 } },
12789 		.fill_helper = bpf_fill_alu64_mov_imm,
12790 		.nr_testruns = NR_PATTERN_RUNS,
12791 	},
12792 	{
12793 		"ALU64_AND_K: all immediate value magnitudes",
12794 		{ },
12795 		INTERNAL | FLAG_NO_DATA,
12796 		{ },
12797 		{ { 0, 1 } },
12798 		.fill_helper = bpf_fill_alu64_and_imm,
12799 		.nr_testruns = NR_PATTERN_RUNS,
12800 	},
12801 	{
12802 		"ALU64_OR_K: all immediate value magnitudes",
12803 		{ },
12804 		INTERNAL | FLAG_NO_DATA,
12805 		{ },
12806 		{ { 0, 1 } },
12807 		.fill_helper = bpf_fill_alu64_or_imm,
12808 		.nr_testruns = NR_PATTERN_RUNS,
12809 	},
12810 	{
12811 		"ALU64_XOR_K: all immediate value magnitudes",
12812 		{ },
12813 		INTERNAL | FLAG_NO_DATA,
12814 		{ },
12815 		{ { 0, 1 } },
12816 		.fill_helper = bpf_fill_alu64_xor_imm,
12817 		.nr_testruns = NR_PATTERN_RUNS,
12818 	},
12819 	{
12820 		"ALU64_ADD_K: all immediate value magnitudes",
12821 		{ },
12822 		INTERNAL | FLAG_NO_DATA,
12823 		{ },
12824 		{ { 0, 1 } },
12825 		.fill_helper = bpf_fill_alu64_add_imm,
12826 		.nr_testruns = NR_PATTERN_RUNS,
12827 	},
12828 	{
12829 		"ALU64_SUB_K: all immediate value magnitudes",
12830 		{ },
12831 		INTERNAL | FLAG_NO_DATA,
12832 		{ },
12833 		{ { 0, 1 } },
12834 		.fill_helper = bpf_fill_alu64_sub_imm,
12835 		.nr_testruns = NR_PATTERN_RUNS,
12836 	},
12837 	{
12838 		"ALU64_MUL_K: all immediate value magnitudes",
12839 		{ },
12840 		INTERNAL | FLAG_NO_DATA,
12841 		{ },
12842 		{ { 0, 1 } },
12843 		.fill_helper = bpf_fill_alu64_mul_imm,
12844 		.nr_testruns = NR_PATTERN_RUNS,
12845 	},
12846 	{
12847 		"ALU64_DIV_K: all immediate value magnitudes",
12848 		{ },
12849 		INTERNAL | FLAG_NO_DATA,
12850 		{ },
12851 		{ { 0, 1 } },
12852 		.fill_helper = bpf_fill_alu64_div_imm,
12853 		.nr_testruns = NR_PATTERN_RUNS,
12854 	},
12855 	{
12856 		"ALU64_MOD_K: all immediate value magnitudes",
12857 		{ },
12858 		INTERNAL | FLAG_NO_DATA,
12859 		{ },
12860 		{ { 0, 1 } },
12861 		.fill_helper = bpf_fill_alu64_mod_imm,
12862 		.nr_testruns = NR_PATTERN_RUNS,
12863 	},
12864 	/* ALU32 immediate magnitudes */
12865 	{
12866 		"ALU32_MOV_K: all immediate value magnitudes",
12867 		{ },
12868 		INTERNAL | FLAG_NO_DATA,
12869 		{ },
12870 		{ { 0, 1 } },
12871 		.fill_helper = bpf_fill_alu32_mov_imm,
12872 		.nr_testruns = NR_PATTERN_RUNS,
12873 	},
12874 	{
12875 		"ALU32_AND_K: all immediate value magnitudes",
12876 		{ },
12877 		INTERNAL | FLAG_NO_DATA,
12878 		{ },
12879 		{ { 0, 1 } },
12880 		.fill_helper = bpf_fill_alu32_and_imm,
12881 		.nr_testruns = NR_PATTERN_RUNS,
12882 	},
12883 	{
12884 		"ALU32_OR_K: all immediate value magnitudes",
12885 		{ },
12886 		INTERNAL | FLAG_NO_DATA,
12887 		{ },
12888 		{ { 0, 1 } },
12889 		.fill_helper = bpf_fill_alu32_or_imm,
12890 		.nr_testruns = NR_PATTERN_RUNS,
12891 	},
12892 	{
12893 		"ALU32_XOR_K: all immediate value magnitudes",
12894 		{ },
12895 		INTERNAL | FLAG_NO_DATA,
12896 		{ },
12897 		{ { 0, 1 } },
12898 		.fill_helper = bpf_fill_alu32_xor_imm,
12899 		.nr_testruns = NR_PATTERN_RUNS,
12900 	},
12901 	{
12902 		"ALU32_ADD_K: all immediate value magnitudes",
12903 		{ },
12904 		INTERNAL | FLAG_NO_DATA,
12905 		{ },
12906 		{ { 0, 1 } },
12907 		.fill_helper = bpf_fill_alu32_add_imm,
12908 		.nr_testruns = NR_PATTERN_RUNS,
12909 	},
12910 	{
12911 		"ALU32_SUB_K: all immediate value magnitudes",
12912 		{ },
12913 		INTERNAL | FLAG_NO_DATA,
12914 		{ },
12915 		{ { 0, 1 } },
12916 		.fill_helper = bpf_fill_alu32_sub_imm,
12917 		.nr_testruns = NR_PATTERN_RUNS,
12918 	},
12919 	{
12920 		"ALU32_MUL_K: all immediate value magnitudes",
12921 		{ },
12922 		INTERNAL | FLAG_NO_DATA,
12923 		{ },
12924 		{ { 0, 1 } },
12925 		.fill_helper = bpf_fill_alu32_mul_imm,
12926 		.nr_testruns = NR_PATTERN_RUNS,
12927 	},
12928 	{
12929 		"ALU32_DIV_K: all immediate value magnitudes",
12930 		{ },
12931 		INTERNAL | FLAG_NO_DATA,
12932 		{ },
12933 		{ { 0, 1 } },
12934 		.fill_helper = bpf_fill_alu32_div_imm,
12935 		.nr_testruns = NR_PATTERN_RUNS,
12936 	},
12937 	{
12938 		"ALU32_MOD_K: all immediate value magnitudes",
12939 		{ },
12940 		INTERNAL | FLAG_NO_DATA,
12941 		{ },
12942 		{ { 0, 1 } },
12943 		.fill_helper = bpf_fill_alu32_mod_imm,
12944 		.nr_testruns = NR_PATTERN_RUNS,
12945 	},
12946 	/* ALU64 register magnitudes */
12947 	{
12948 		"ALU64_MOV_X: all register value magnitudes",
12949 		{ },
12950 		INTERNAL | FLAG_NO_DATA,
12951 		{ },
12952 		{ { 0, 1 } },
12953 		.fill_helper = bpf_fill_alu64_mov_reg,
12954 		.nr_testruns = NR_PATTERN_RUNS,
12955 	},
12956 	{
12957 		"ALU64_AND_X: all register value magnitudes",
12958 		{ },
12959 		INTERNAL | FLAG_NO_DATA,
12960 		{ },
12961 		{ { 0, 1 } },
12962 		.fill_helper = bpf_fill_alu64_and_reg,
12963 		.nr_testruns = NR_PATTERN_RUNS,
12964 	},
12965 	{
12966 		"ALU64_OR_X: all register value magnitudes",
12967 		{ },
12968 		INTERNAL | FLAG_NO_DATA,
12969 		{ },
12970 		{ { 0, 1 } },
12971 		.fill_helper = bpf_fill_alu64_or_reg,
12972 		.nr_testruns = NR_PATTERN_RUNS,
12973 	},
12974 	{
12975 		"ALU64_XOR_X: all register value magnitudes",
12976 		{ },
12977 		INTERNAL | FLAG_NO_DATA,
12978 		{ },
12979 		{ { 0, 1 } },
12980 		.fill_helper = bpf_fill_alu64_xor_reg,
12981 		.nr_testruns = NR_PATTERN_RUNS,
12982 	},
12983 	{
12984 		"ALU64_ADD_X: all register value magnitudes",
12985 		{ },
12986 		INTERNAL | FLAG_NO_DATA,
12987 		{ },
12988 		{ { 0, 1 } },
12989 		.fill_helper = bpf_fill_alu64_add_reg,
12990 		.nr_testruns = NR_PATTERN_RUNS,
12991 	},
12992 	{
12993 		"ALU64_SUB_X: all register value magnitudes",
12994 		{ },
12995 		INTERNAL | FLAG_NO_DATA,
12996 		{ },
12997 		{ { 0, 1 } },
12998 		.fill_helper = bpf_fill_alu64_sub_reg,
12999 		.nr_testruns = NR_PATTERN_RUNS,
13000 	},
13001 	{
13002 		"ALU64_MUL_X: all register value magnitudes",
13003 		{ },
13004 		INTERNAL | FLAG_NO_DATA,
13005 		{ },
13006 		{ { 0, 1 } },
13007 		.fill_helper = bpf_fill_alu64_mul_reg,
13008 		.nr_testruns = NR_PATTERN_RUNS,
13009 	},
13010 	{
13011 		"ALU64_DIV_X: all register value magnitudes",
13012 		{ },
13013 		INTERNAL | FLAG_NO_DATA,
13014 		{ },
13015 		{ { 0, 1 } },
13016 		.fill_helper = bpf_fill_alu64_div_reg,
13017 		.nr_testruns = NR_PATTERN_RUNS,
13018 	},
13019 	{
13020 		"ALU64_MOD_X: all register value magnitudes",
13021 		{ },
13022 		INTERNAL | FLAG_NO_DATA,
13023 		{ },
13024 		{ { 0, 1 } },
13025 		.fill_helper = bpf_fill_alu64_mod_reg,
13026 		.nr_testruns = NR_PATTERN_RUNS,
13027 	},
13028 	/* ALU32 register magnitudes */
13029 	{
13030 		"ALU32_MOV_X: all register value magnitudes",
13031 		{ },
13032 		INTERNAL | FLAG_NO_DATA,
13033 		{ },
13034 		{ { 0, 1 } },
13035 		.fill_helper = bpf_fill_alu32_mov_reg,
13036 		.nr_testruns = NR_PATTERN_RUNS,
13037 	},
13038 	{
13039 		"ALU32_AND_X: all register value magnitudes",
13040 		{ },
13041 		INTERNAL | FLAG_NO_DATA,
13042 		{ },
13043 		{ { 0, 1 } },
13044 		.fill_helper = bpf_fill_alu32_and_reg,
13045 		.nr_testruns = NR_PATTERN_RUNS,
13046 	},
13047 	{
13048 		"ALU32_OR_X: all register value magnitudes",
13049 		{ },
13050 		INTERNAL | FLAG_NO_DATA,
13051 		{ },
13052 		{ { 0, 1 } },
13053 		.fill_helper = bpf_fill_alu32_or_reg,
13054 		.nr_testruns = NR_PATTERN_RUNS,
13055 	},
13056 	{
13057 		"ALU32_XOR_X: all register value magnitudes",
13058 		{ },
13059 		INTERNAL | FLAG_NO_DATA,
13060 		{ },
13061 		{ { 0, 1 } },
13062 		.fill_helper = bpf_fill_alu32_xor_reg,
13063 		.nr_testruns = NR_PATTERN_RUNS,
13064 	},
13065 	{
13066 		"ALU32_ADD_X: all register value magnitudes",
13067 		{ },
13068 		INTERNAL | FLAG_NO_DATA,
13069 		{ },
13070 		{ { 0, 1 } },
13071 		.fill_helper = bpf_fill_alu32_add_reg,
13072 		.nr_testruns = NR_PATTERN_RUNS,
13073 	},
13074 	{
13075 		"ALU32_SUB_X: all register value magnitudes",
13076 		{ },
13077 		INTERNAL | FLAG_NO_DATA,
13078 		{ },
13079 		{ { 0, 1 } },
13080 		.fill_helper = bpf_fill_alu32_sub_reg,
13081 		.nr_testruns = NR_PATTERN_RUNS,
13082 	},
13083 	{
13084 		"ALU32_MUL_X: all register value magnitudes",
13085 		{ },
13086 		INTERNAL | FLAG_NO_DATA,
13087 		{ },
13088 		{ { 0, 1 } },
13089 		.fill_helper = bpf_fill_alu32_mul_reg,
13090 		.nr_testruns = NR_PATTERN_RUNS,
13091 	},
13092 	{
13093 		"ALU32_DIV_X: all register value magnitudes",
13094 		{ },
13095 		INTERNAL | FLAG_NO_DATA,
13096 		{ },
13097 		{ { 0, 1 } },
13098 		.fill_helper = bpf_fill_alu32_div_reg,
13099 		.nr_testruns = NR_PATTERN_RUNS,
13100 	},
13101 	{
13102 		"ALU32_MOD_X: all register value magnitudes",
13103 		{ },
13104 		INTERNAL | FLAG_NO_DATA,
13105 		{ },
13106 		{ { 0, 1 } },
13107 		.fill_helper = bpf_fill_alu32_mod_reg,
13108 		.nr_testruns = NR_PATTERN_RUNS,
13109 	},
13110 	/* LD_IMM64 immediate magnitudes and byte patterns */
13111 	{
13112 		"LD_IMM64: all immediate value magnitudes",
13113 		{ },
13114 		INTERNAL | FLAG_NO_DATA,
13115 		{ },
13116 		{ { 0, 1 } },
13117 		.fill_helper = bpf_fill_ld_imm64_magn,
13118 	},
13119 	{
13120 		"LD_IMM64: checker byte patterns",
13121 		{ },
13122 		INTERNAL | FLAG_NO_DATA,
13123 		{ },
13124 		{ { 0, 1 } },
13125 		.fill_helper = bpf_fill_ld_imm64_checker,
13126 	},
13127 	{
13128 		"LD_IMM64: random positive and zero byte patterns",
13129 		{ },
13130 		INTERNAL | FLAG_NO_DATA,
13131 		{ },
13132 		{ { 0, 1 } },
13133 		.fill_helper = bpf_fill_ld_imm64_pos_zero,
13134 	},
13135 	{
13136 		"LD_IMM64: random negative and zero byte patterns",
13137 		{ },
13138 		INTERNAL | FLAG_NO_DATA,
13139 		{ },
13140 		{ { 0, 1 } },
13141 		.fill_helper = bpf_fill_ld_imm64_neg_zero,
13142 	},
13143 	{
13144 		"LD_IMM64: random positive and negative byte patterns",
13145 		{ },
13146 		INTERNAL | FLAG_NO_DATA,
13147 		{ },
13148 		{ { 0, 1 } },
13149 		.fill_helper = bpf_fill_ld_imm64_pos_neg,
13150 	},
13151 	/* 64-bit ATOMIC register combinations */
13152 	{
13153 		"ATOMIC_DW_ADD: register combinations",
13154 		{ },
13155 		INTERNAL,
13156 		{ },
13157 		{ { 0, 1 } },
13158 		.fill_helper = bpf_fill_atomic64_add_reg_pairs,
13159 		.stack_depth = 8,
13160 	},
13161 	{
13162 		"ATOMIC_DW_AND: register combinations",
13163 		{ },
13164 		INTERNAL,
13165 		{ },
13166 		{ { 0, 1 } },
13167 		.fill_helper = bpf_fill_atomic64_and_reg_pairs,
13168 		.stack_depth = 8,
13169 	},
13170 	{
13171 		"ATOMIC_DW_OR: register combinations",
13172 		{ },
13173 		INTERNAL,
13174 		{ },
13175 		{ { 0, 1 } },
13176 		.fill_helper = bpf_fill_atomic64_or_reg_pairs,
13177 		.stack_depth = 8,
13178 	},
13179 	{
13180 		"ATOMIC_DW_XOR: register combinations",
13181 		{ },
13182 		INTERNAL,
13183 		{ },
13184 		{ { 0, 1 } },
13185 		.fill_helper = bpf_fill_atomic64_xor_reg_pairs,
13186 		.stack_depth = 8,
13187 	},
13188 	{
13189 		"ATOMIC_DW_ADD_FETCH: register combinations",
13190 		{ },
13191 		INTERNAL,
13192 		{ },
13193 		{ { 0, 1 } },
13194 		.fill_helper = bpf_fill_atomic64_add_fetch_reg_pairs,
13195 		.stack_depth = 8,
13196 	},
13197 	{
13198 		"ATOMIC_DW_AND_FETCH: register combinations",
13199 		{ },
13200 		INTERNAL,
13201 		{ },
13202 		{ { 0, 1 } },
13203 		.fill_helper = bpf_fill_atomic64_and_fetch_reg_pairs,
13204 		.stack_depth = 8,
13205 	},
13206 	{
13207 		"ATOMIC_DW_OR_FETCH: register combinations",
13208 		{ },
13209 		INTERNAL,
13210 		{ },
13211 		{ { 0, 1 } },
13212 		.fill_helper = bpf_fill_atomic64_or_fetch_reg_pairs,
13213 		.stack_depth = 8,
13214 	},
13215 	{
13216 		"ATOMIC_DW_XOR_FETCH: register combinations",
13217 		{ },
13218 		INTERNAL,
13219 		{ },
13220 		{ { 0, 1 } },
13221 		.fill_helper = bpf_fill_atomic64_xor_fetch_reg_pairs,
13222 		.stack_depth = 8,
13223 	},
13224 	{
13225 		"ATOMIC_DW_XCHG: register combinations",
13226 		{ },
13227 		INTERNAL,
13228 		{ },
13229 		{ { 0, 1 } },
13230 		.fill_helper = bpf_fill_atomic64_xchg_reg_pairs,
13231 		.stack_depth = 8,
13232 	},
13233 	{
13234 		"ATOMIC_DW_CMPXCHG: register combinations",
13235 		{ },
13236 		INTERNAL,
13237 		{ },
13238 		{ { 0, 1 } },
13239 		.fill_helper = bpf_fill_atomic64_cmpxchg_reg_pairs,
13240 		.stack_depth = 8,
13241 	},
13242 	/* 32-bit ATOMIC register combinations */
13243 	{
13244 		"ATOMIC_W_ADD: register combinations",
13245 		{ },
13246 		INTERNAL,
13247 		{ },
13248 		{ { 0, 1 } },
13249 		.fill_helper = bpf_fill_atomic32_add_reg_pairs,
13250 		.stack_depth = 8,
13251 	},
13252 	{
13253 		"ATOMIC_W_AND: register combinations",
13254 		{ },
13255 		INTERNAL,
13256 		{ },
13257 		{ { 0, 1 } },
13258 		.fill_helper = bpf_fill_atomic32_and_reg_pairs,
13259 		.stack_depth = 8,
13260 	},
13261 	{
13262 		"ATOMIC_W_OR: register combinations",
13263 		{ },
13264 		INTERNAL,
13265 		{ },
13266 		{ { 0, 1 } },
13267 		.fill_helper = bpf_fill_atomic32_or_reg_pairs,
13268 		.stack_depth = 8,
13269 	},
13270 	{
13271 		"ATOMIC_W_XOR: register combinations",
13272 		{ },
13273 		INTERNAL,
13274 		{ },
13275 		{ { 0, 1 } },
13276 		.fill_helper = bpf_fill_atomic32_xor_reg_pairs,
13277 		.stack_depth = 8,
13278 	},
13279 	{
13280 		"ATOMIC_W_ADD_FETCH: register combinations",
13281 		{ },
13282 		INTERNAL,
13283 		{ },
13284 		{ { 0, 1 } },
13285 		.fill_helper = bpf_fill_atomic32_add_fetch_reg_pairs,
13286 		.stack_depth = 8,
13287 	},
13288 	{
13289 		"ATOMIC_W_AND_FETCH: register combinations",
13290 		{ },
13291 		INTERNAL,
13292 		{ },
13293 		{ { 0, 1 } },
13294 		.fill_helper = bpf_fill_atomic32_and_fetch_reg_pairs,
13295 		.stack_depth = 8,
13296 	},
13297 	{
13298 		"ATOMIC_W_OR_FETCH: register combinations",
13299 		{ },
13300 		INTERNAL,
13301 		{ },
13302 		{ { 0, 1 } },
13303 		.fill_helper = bpf_fill_atomic32_or_fetch_reg_pairs,
13304 		.stack_depth = 8,
13305 	},
13306 	{
13307 		"ATOMIC_W_XOR_FETCH: register combinations",
13308 		{ },
13309 		INTERNAL,
13310 		{ },
13311 		{ { 0, 1 } },
13312 		.fill_helper = bpf_fill_atomic32_xor_fetch_reg_pairs,
13313 		.stack_depth = 8,
13314 	},
13315 	{
13316 		"ATOMIC_W_XCHG: register combinations",
13317 		{ },
13318 		INTERNAL,
13319 		{ },
13320 		{ { 0, 1 } },
13321 		.fill_helper = bpf_fill_atomic32_xchg_reg_pairs,
13322 		.stack_depth = 8,
13323 	},
13324 	{
13325 		"ATOMIC_W_CMPXCHG: register combinations",
13326 		{ },
13327 		INTERNAL,
13328 		{ },
13329 		{ { 0, 1 } },
13330 		.fill_helper = bpf_fill_atomic32_cmpxchg_reg_pairs,
13331 		.stack_depth = 8,
13332 	},
13333 	/* 64-bit ATOMIC magnitudes */
13334 	{
13335 		"ATOMIC_DW_ADD: all operand magnitudes",
13336 		{ },
13337 		INTERNAL | FLAG_NO_DATA,
13338 		{ },
13339 		{ { 0, 1 } },
13340 		.fill_helper = bpf_fill_atomic64_add,
13341 		.stack_depth = 8,
13342 		.nr_testruns = NR_PATTERN_RUNS,
13343 	},
13344 	{
13345 		"ATOMIC_DW_AND: all operand magnitudes",
13346 		{ },
13347 		INTERNAL | FLAG_NO_DATA,
13348 		{ },
13349 		{ { 0, 1 } },
13350 		.fill_helper = bpf_fill_atomic64_and,
13351 		.stack_depth = 8,
13352 		.nr_testruns = NR_PATTERN_RUNS,
13353 	},
13354 	{
13355 		"ATOMIC_DW_OR: all operand magnitudes",
13356 		{ },
13357 		INTERNAL | FLAG_NO_DATA,
13358 		{ },
13359 		{ { 0, 1 } },
13360 		.fill_helper = bpf_fill_atomic64_or,
13361 		.stack_depth = 8,
13362 		.nr_testruns = NR_PATTERN_RUNS,
13363 	},
13364 	{
13365 		"ATOMIC_DW_XOR: all operand magnitudes",
13366 		{ },
13367 		INTERNAL | FLAG_NO_DATA,
13368 		{ },
13369 		{ { 0, 1 } },
13370 		.fill_helper = bpf_fill_atomic64_xor,
13371 		.stack_depth = 8,
13372 		.nr_testruns = NR_PATTERN_RUNS,
13373 	},
13374 	{
13375 		"ATOMIC_DW_ADD_FETCH: all operand magnitudes",
13376 		{ },
13377 		INTERNAL | FLAG_NO_DATA,
13378 		{ },
13379 		{ { 0, 1 } },
13380 		.fill_helper = bpf_fill_atomic64_add_fetch,
13381 		.stack_depth = 8,
13382 		.nr_testruns = NR_PATTERN_RUNS,
13383 	},
13384 	{
13385 		"ATOMIC_DW_AND_FETCH: all operand magnitudes",
13386 		{ },
13387 		INTERNAL | FLAG_NO_DATA,
13388 		{ },
13389 		{ { 0, 1 } },
13390 		.fill_helper = bpf_fill_atomic64_and_fetch,
13391 		.stack_depth = 8,
13392 		.nr_testruns = NR_PATTERN_RUNS,
13393 	},
13394 	{
13395 		"ATOMIC_DW_OR_FETCH: all operand magnitudes",
13396 		{ },
13397 		INTERNAL | FLAG_NO_DATA,
13398 		{ },
13399 		{ { 0, 1 } },
13400 		.fill_helper = bpf_fill_atomic64_or_fetch,
13401 		.stack_depth = 8,
13402 		.nr_testruns = NR_PATTERN_RUNS,
13403 	},
13404 	{
13405 		"ATOMIC_DW_XOR_FETCH: all operand magnitudes",
13406 		{ },
13407 		INTERNAL | FLAG_NO_DATA,
13408 		{ },
13409 		{ { 0, 1 } },
13410 		.fill_helper = bpf_fill_atomic64_xor_fetch,
13411 		.stack_depth = 8,
13412 		.nr_testruns = NR_PATTERN_RUNS,
13413 	},
13414 	{
13415 		"ATOMIC_DW_XCHG: all operand magnitudes",
13416 		{ },
13417 		INTERNAL | FLAG_NO_DATA,
13418 		{ },
13419 		{ { 0, 1 } },
13420 		.fill_helper = bpf_fill_atomic64_xchg,
13421 		.stack_depth = 8,
13422 		.nr_testruns = NR_PATTERN_RUNS,
13423 	},
13424 	{
13425 		"ATOMIC_DW_CMPXCHG: all operand magnitudes",
13426 		{ },
13427 		INTERNAL | FLAG_NO_DATA,
13428 		{ },
13429 		{ { 0, 1 } },
13430 		.fill_helper = bpf_fill_cmpxchg64,
13431 		.stack_depth = 8,
13432 		.nr_testruns = NR_PATTERN_RUNS,
13433 	},
13434 	/* 32-bit atomic magnitudes */
13435 	{
13436 		"ATOMIC_W_ADD: all operand magnitudes",
13437 		{ },
13438 		INTERNAL | FLAG_NO_DATA,
13439 		{ },
13440 		{ { 0, 1 } },
13441 		.fill_helper = bpf_fill_atomic32_add,
13442 		.stack_depth = 8,
13443 		.nr_testruns = NR_PATTERN_RUNS,
13444 	},
13445 	{
13446 		"ATOMIC_W_AND: all operand magnitudes",
13447 		{ },
13448 		INTERNAL | FLAG_NO_DATA,
13449 		{ },
13450 		{ { 0, 1 } },
13451 		.fill_helper = bpf_fill_atomic32_and,
13452 		.stack_depth = 8,
13453 		.nr_testruns = NR_PATTERN_RUNS,
13454 	},
13455 	{
13456 		"ATOMIC_W_OR: all operand magnitudes",
13457 		{ },
13458 		INTERNAL | FLAG_NO_DATA,
13459 		{ },
13460 		{ { 0, 1 } },
13461 		.fill_helper = bpf_fill_atomic32_or,
13462 		.stack_depth = 8,
13463 		.nr_testruns = NR_PATTERN_RUNS,
13464 	},
13465 	{
13466 		"ATOMIC_W_XOR: all operand magnitudes",
13467 		{ },
13468 		INTERNAL | FLAG_NO_DATA,
13469 		{ },
13470 		{ { 0, 1 } },
13471 		.fill_helper = bpf_fill_atomic32_xor,
13472 		.stack_depth = 8,
13473 		.nr_testruns = NR_PATTERN_RUNS,
13474 	},
13475 	{
13476 		"ATOMIC_W_ADD_FETCH: all operand magnitudes",
13477 		{ },
13478 		INTERNAL | FLAG_NO_DATA,
13479 		{ },
13480 		{ { 0, 1 } },
13481 		.fill_helper = bpf_fill_atomic32_add_fetch,
13482 		.stack_depth = 8,
13483 		.nr_testruns = NR_PATTERN_RUNS,
13484 	},
13485 	{
13486 		"ATOMIC_W_AND_FETCH: all operand magnitudes",
13487 		{ },
13488 		INTERNAL | FLAG_NO_DATA,
13489 		{ },
13490 		{ { 0, 1 } },
13491 		.fill_helper = bpf_fill_atomic32_and_fetch,
13492 		.stack_depth = 8,
13493 		.nr_testruns = NR_PATTERN_RUNS,
13494 	},
13495 	{
13496 		"ATOMIC_W_OR_FETCH: all operand magnitudes",
13497 		{ },
13498 		INTERNAL | FLAG_NO_DATA,
13499 		{ },
13500 		{ { 0, 1 } },
13501 		.fill_helper = bpf_fill_atomic32_or_fetch,
13502 		.stack_depth = 8,
13503 		.nr_testruns = NR_PATTERN_RUNS,
13504 	},
13505 	{
13506 		"ATOMIC_W_XOR_FETCH: all operand magnitudes",
13507 		{ },
13508 		INTERNAL | FLAG_NO_DATA,
13509 		{ },
13510 		{ { 0, 1 } },
13511 		.fill_helper = bpf_fill_atomic32_xor_fetch,
13512 		.stack_depth = 8,
13513 		.nr_testruns = NR_PATTERN_RUNS,
13514 	},
13515 	{
13516 		"ATOMIC_W_XCHG: all operand magnitudes",
13517 		{ },
13518 		INTERNAL | FLAG_NO_DATA,
13519 		{ },
13520 		{ { 0, 1 } },
13521 		.fill_helper = bpf_fill_atomic32_xchg,
13522 		.stack_depth = 8,
13523 		.nr_testruns = NR_PATTERN_RUNS,
13524 	},
13525 	{
13526 		"ATOMIC_W_CMPXCHG: all operand magnitudes",
13527 		{ },
13528 		INTERNAL | FLAG_NO_DATA,
13529 		{ },
13530 		{ { 0, 1 } },
13531 		.fill_helper = bpf_fill_cmpxchg32,
13532 		.stack_depth = 8,
13533 		.nr_testruns = NR_PATTERN_RUNS,
13534 	},
13535 	/* JMP immediate magnitudes */
13536 	{
13537 		"JMP_JSET_K: all immediate value magnitudes",
13538 		{ },
13539 		INTERNAL | FLAG_NO_DATA,
13540 		{ },
13541 		{ { 0, 1 } },
13542 		.fill_helper = bpf_fill_jmp_jset_imm,
13543 		.nr_testruns = NR_PATTERN_RUNS,
13544 	},
13545 	{
13546 		"JMP_JEQ_K: all immediate value magnitudes",
13547 		{ },
13548 		INTERNAL | FLAG_NO_DATA,
13549 		{ },
13550 		{ { 0, 1 } },
13551 		.fill_helper = bpf_fill_jmp_jeq_imm,
13552 		.nr_testruns = NR_PATTERN_RUNS,
13553 	},
13554 	{
13555 		"JMP_JNE_K: all immediate value magnitudes",
13556 		{ },
13557 		INTERNAL | FLAG_NO_DATA,
13558 		{ },
13559 		{ { 0, 1 } },
13560 		.fill_helper = bpf_fill_jmp_jne_imm,
13561 		.nr_testruns = NR_PATTERN_RUNS,
13562 	},
13563 	{
13564 		"JMP_JGT_K: all immediate value magnitudes",
13565 		{ },
13566 		INTERNAL | FLAG_NO_DATA,
13567 		{ },
13568 		{ { 0, 1 } },
13569 		.fill_helper = bpf_fill_jmp_jgt_imm,
13570 		.nr_testruns = NR_PATTERN_RUNS,
13571 	},
13572 	{
13573 		"JMP_JGE_K: all immediate value magnitudes",
13574 		{ },
13575 		INTERNAL | FLAG_NO_DATA,
13576 		{ },
13577 		{ { 0, 1 } },
13578 		.fill_helper = bpf_fill_jmp_jge_imm,
13579 		.nr_testruns = NR_PATTERN_RUNS,
13580 	},
13581 	{
13582 		"JMP_JLT_K: all immediate value magnitudes",
13583 		{ },
13584 		INTERNAL | FLAG_NO_DATA,
13585 		{ },
13586 		{ { 0, 1 } },
13587 		.fill_helper = bpf_fill_jmp_jlt_imm,
13588 		.nr_testruns = NR_PATTERN_RUNS,
13589 	},
13590 	{
13591 		"JMP_JLE_K: all immediate value magnitudes",
13592 		{ },
13593 		INTERNAL | FLAG_NO_DATA,
13594 		{ },
13595 		{ { 0, 1 } },
13596 		.fill_helper = bpf_fill_jmp_jle_imm,
13597 		.nr_testruns = NR_PATTERN_RUNS,
13598 	},
13599 	{
13600 		"JMP_JSGT_K: all immediate value magnitudes",
13601 		{ },
13602 		INTERNAL | FLAG_NO_DATA,
13603 		{ },
13604 		{ { 0, 1 } },
13605 		.fill_helper = bpf_fill_jmp_jsgt_imm,
13606 		.nr_testruns = NR_PATTERN_RUNS,
13607 	},
13608 	{
13609 		"JMP_JSGE_K: all immediate value magnitudes",
13610 		{ },
13611 		INTERNAL | FLAG_NO_DATA,
13612 		{ },
13613 		{ { 0, 1 } },
13614 		.fill_helper = bpf_fill_jmp_jsge_imm,
13615 		.nr_testruns = NR_PATTERN_RUNS,
13616 	},
13617 	{
13618 		"JMP_JSLT_K: all immediate value magnitudes",
13619 		{ },
13620 		INTERNAL | FLAG_NO_DATA,
13621 		{ },
13622 		{ { 0, 1 } },
13623 		.fill_helper = bpf_fill_jmp_jslt_imm,
13624 		.nr_testruns = NR_PATTERN_RUNS,
13625 	},
13626 	{
13627 		"JMP_JSLE_K: all immediate value magnitudes",
13628 		{ },
13629 		INTERNAL | FLAG_NO_DATA,
13630 		{ },
13631 		{ { 0, 1 } },
13632 		.fill_helper = bpf_fill_jmp_jsle_imm,
13633 		.nr_testruns = NR_PATTERN_RUNS,
13634 	},
13635 	/* JMP register magnitudes */
13636 	{
13637 		"JMP_JSET_X: all register value magnitudes",
13638 		{ },
13639 		INTERNAL | FLAG_NO_DATA,
13640 		{ },
13641 		{ { 0, 1 } },
13642 		.fill_helper = bpf_fill_jmp_jset_reg,
13643 		.nr_testruns = NR_PATTERN_RUNS,
13644 	},
13645 	{
13646 		"JMP_JEQ_X: all register value magnitudes",
13647 		{ },
13648 		INTERNAL | FLAG_NO_DATA,
13649 		{ },
13650 		{ { 0, 1 } },
13651 		.fill_helper = bpf_fill_jmp_jeq_reg,
13652 		.nr_testruns = NR_PATTERN_RUNS,
13653 	},
13654 	{
13655 		"JMP_JNE_X: all register value magnitudes",
13656 		{ },
13657 		INTERNAL | FLAG_NO_DATA,
13658 		{ },
13659 		{ { 0, 1 } },
13660 		.fill_helper = bpf_fill_jmp_jne_reg,
13661 		.nr_testruns = NR_PATTERN_RUNS,
13662 	},
13663 	{
13664 		"JMP_JGT_X: all register value magnitudes",
13665 		{ },
13666 		INTERNAL | FLAG_NO_DATA,
13667 		{ },
13668 		{ { 0, 1 } },
13669 		.fill_helper = bpf_fill_jmp_jgt_reg,
13670 		.nr_testruns = NR_PATTERN_RUNS,
13671 	},
13672 	{
13673 		"JMP_JGE_X: all register value magnitudes",
13674 		{ },
13675 		INTERNAL | FLAG_NO_DATA,
13676 		{ },
13677 		{ { 0, 1 } },
13678 		.fill_helper = bpf_fill_jmp_jge_reg,
13679 		.nr_testruns = NR_PATTERN_RUNS,
13680 	},
13681 	{
13682 		"JMP_JLT_X: all register value magnitudes",
13683 		{ },
13684 		INTERNAL | FLAG_NO_DATA,
13685 		{ },
13686 		{ { 0, 1 } },
13687 		.fill_helper = bpf_fill_jmp_jlt_reg,
13688 		.nr_testruns = NR_PATTERN_RUNS,
13689 	},
13690 	{
13691 		"JMP_JLE_X: all register value magnitudes",
13692 		{ },
13693 		INTERNAL | FLAG_NO_DATA,
13694 		{ },
13695 		{ { 0, 1 } },
13696 		.fill_helper = bpf_fill_jmp_jle_reg,
13697 		.nr_testruns = NR_PATTERN_RUNS,
13698 	},
13699 	{
13700 		"JMP_JSGT_X: all register value magnitudes",
13701 		{ },
13702 		INTERNAL | FLAG_NO_DATA,
13703 		{ },
13704 		{ { 0, 1 } },
13705 		.fill_helper = bpf_fill_jmp_jsgt_reg,
13706 		.nr_testruns = NR_PATTERN_RUNS,
13707 	},
13708 	{
13709 		"JMP_JSGE_X: all register value magnitudes",
13710 		{ },
13711 		INTERNAL | FLAG_NO_DATA,
13712 		{ },
13713 		{ { 0, 1 } },
13714 		.fill_helper = bpf_fill_jmp_jsge_reg,
13715 		.nr_testruns = NR_PATTERN_RUNS,
13716 	},
13717 	{
13718 		"JMP_JSLT_X: all register value magnitudes",
13719 		{ },
13720 		INTERNAL | FLAG_NO_DATA,
13721 		{ },
13722 		{ { 0, 1 } },
13723 		.fill_helper = bpf_fill_jmp_jslt_reg,
13724 		.nr_testruns = NR_PATTERN_RUNS,
13725 	},
13726 	{
13727 		"JMP_JSLE_X: all register value magnitudes",
13728 		{ },
13729 		INTERNAL | FLAG_NO_DATA,
13730 		{ },
13731 		{ { 0, 1 } },
13732 		.fill_helper = bpf_fill_jmp_jsle_reg,
13733 		.nr_testruns = NR_PATTERN_RUNS,
13734 	},
13735 	/* JMP32 immediate magnitudes */
13736 	{
13737 		"JMP32_JSET_K: all immediate value magnitudes",
13738 		{ },
13739 		INTERNAL | FLAG_NO_DATA,
13740 		{ },
13741 		{ { 0, 1 } },
13742 		.fill_helper = bpf_fill_jmp32_jset_imm,
13743 		.nr_testruns = NR_PATTERN_RUNS,
13744 	},
13745 	{
13746 		"JMP32_JEQ_K: all immediate value magnitudes",
13747 		{ },
13748 		INTERNAL | FLAG_NO_DATA,
13749 		{ },
13750 		{ { 0, 1 } },
13751 		.fill_helper = bpf_fill_jmp32_jeq_imm,
13752 		.nr_testruns = NR_PATTERN_RUNS,
13753 	},
13754 	{
13755 		"JMP32_JNE_K: all immediate value magnitudes",
13756 		{ },
13757 		INTERNAL | FLAG_NO_DATA,
13758 		{ },
13759 		{ { 0, 1 } },
13760 		.fill_helper = bpf_fill_jmp32_jne_imm,
13761 		.nr_testruns = NR_PATTERN_RUNS,
13762 	},
13763 	{
13764 		"JMP32_JGT_K: all immediate value magnitudes",
13765 		{ },
13766 		INTERNAL | FLAG_NO_DATA,
13767 		{ },
13768 		{ { 0, 1 } },
13769 		.fill_helper = bpf_fill_jmp32_jgt_imm,
13770 		.nr_testruns = NR_PATTERN_RUNS,
13771 	},
13772 	{
13773 		"JMP32_JGE_K: all immediate value magnitudes",
13774 		{ },
13775 		INTERNAL | FLAG_NO_DATA,
13776 		{ },
13777 		{ { 0, 1 } },
13778 		.fill_helper = bpf_fill_jmp32_jge_imm,
13779 		.nr_testruns = NR_PATTERN_RUNS,
13780 	},
13781 	{
13782 		"JMP32_JLT_K: all immediate value magnitudes",
13783 		{ },
13784 		INTERNAL | FLAG_NO_DATA,
13785 		{ },
13786 		{ { 0, 1 } },
13787 		.fill_helper = bpf_fill_jmp32_jlt_imm,
13788 		.nr_testruns = NR_PATTERN_RUNS,
13789 	},
13790 	{
13791 		"JMP32_JLE_K: all immediate value magnitudes",
13792 		{ },
13793 		INTERNAL | FLAG_NO_DATA,
13794 		{ },
13795 		{ { 0, 1 } },
13796 		.fill_helper = bpf_fill_jmp32_jle_imm,
13797 		.nr_testruns = NR_PATTERN_RUNS,
13798 	},
13799 	{
13800 		"JMP32_JSGT_K: all immediate value magnitudes",
13801 		{ },
13802 		INTERNAL | FLAG_NO_DATA,
13803 		{ },
13804 		{ { 0, 1 } },
13805 		.fill_helper = bpf_fill_jmp32_jsgt_imm,
13806 		.nr_testruns = NR_PATTERN_RUNS,
13807 	},
13808 	{
13809 		"JMP32_JSGE_K: all immediate value magnitudes",
13810 		{ },
13811 		INTERNAL | FLAG_NO_DATA,
13812 		{ },
13813 		{ { 0, 1 } },
13814 		.fill_helper = bpf_fill_jmp32_jsge_imm,
13815 		.nr_testruns = NR_PATTERN_RUNS,
13816 	},
13817 	{
13818 		"JMP32_JSLT_K: all immediate value magnitudes",
13819 		{ },
13820 		INTERNAL | FLAG_NO_DATA,
13821 		{ },
13822 		{ { 0, 1 } },
13823 		.fill_helper = bpf_fill_jmp32_jslt_imm,
13824 		.nr_testruns = NR_PATTERN_RUNS,
13825 	},
13826 	{
13827 		"JMP32_JSLE_K: all immediate value magnitudes",
13828 		{ },
13829 		INTERNAL | FLAG_NO_DATA,
13830 		{ },
13831 		{ { 0, 1 } },
13832 		.fill_helper = bpf_fill_jmp32_jsle_imm,
13833 		.nr_testruns = NR_PATTERN_RUNS,
13834 	},
13835 	/* JMP32 register magnitudes */
13836 	{
13837 		"JMP32_JSET_X: all register value magnitudes",
13838 		{ },
13839 		INTERNAL | FLAG_NO_DATA,
13840 		{ },
13841 		{ { 0, 1 } },
13842 		.fill_helper = bpf_fill_jmp32_jset_reg,
13843 		.nr_testruns = NR_PATTERN_RUNS,
13844 	},
13845 	{
13846 		"JMP32_JEQ_X: all register value magnitudes",
13847 		{ },
13848 		INTERNAL | FLAG_NO_DATA,
13849 		{ },
13850 		{ { 0, 1 } },
13851 		.fill_helper = bpf_fill_jmp32_jeq_reg,
13852 		.nr_testruns = NR_PATTERN_RUNS,
13853 	},
13854 	{
13855 		"JMP32_JNE_X: all register value magnitudes",
13856 		{ },
13857 		INTERNAL | FLAG_NO_DATA,
13858 		{ },
13859 		{ { 0, 1 } },
13860 		.fill_helper = bpf_fill_jmp32_jne_reg,
13861 		.nr_testruns = NR_PATTERN_RUNS,
13862 	},
13863 	{
13864 		"JMP32_JGT_X: all register value magnitudes",
13865 		{ },
13866 		INTERNAL | FLAG_NO_DATA,
13867 		{ },
13868 		{ { 0, 1 } },
13869 		.fill_helper = bpf_fill_jmp32_jgt_reg,
13870 		.nr_testruns = NR_PATTERN_RUNS,
13871 	},
13872 	{
13873 		"JMP32_JGE_X: all register value magnitudes",
13874 		{ },
13875 		INTERNAL | FLAG_NO_DATA,
13876 		{ },
13877 		{ { 0, 1 } },
13878 		.fill_helper = bpf_fill_jmp32_jge_reg,
13879 		.nr_testruns = NR_PATTERN_RUNS,
13880 	},
13881 	{
13882 		"JMP32_JLT_X: all register value magnitudes",
13883 		{ },
13884 		INTERNAL | FLAG_NO_DATA,
13885 		{ },
13886 		{ { 0, 1 } },
13887 		.fill_helper = bpf_fill_jmp32_jlt_reg,
13888 		.nr_testruns = NR_PATTERN_RUNS,
13889 	},
13890 	{
13891 		"JMP32_JLE_X: all register value magnitudes",
13892 		{ },
13893 		INTERNAL | FLAG_NO_DATA,
13894 		{ },
13895 		{ { 0, 1 } },
13896 		.fill_helper = bpf_fill_jmp32_jle_reg,
13897 		.nr_testruns = NR_PATTERN_RUNS,
13898 	},
13899 	{
13900 		"JMP32_JSGT_X: all register value magnitudes",
13901 		{ },
13902 		INTERNAL | FLAG_NO_DATA,
13903 		{ },
13904 		{ { 0, 1 } },
13905 		.fill_helper = bpf_fill_jmp32_jsgt_reg,
13906 		.nr_testruns = NR_PATTERN_RUNS,
13907 	},
13908 	{
13909 		"JMP32_JSGE_X: all register value magnitudes",
13910 		{ },
13911 		INTERNAL | FLAG_NO_DATA,
13912 		{ },
13913 		{ { 0, 1 } },
13914 		.fill_helper = bpf_fill_jmp32_jsge_reg,
13915 		.nr_testruns = NR_PATTERN_RUNS,
13916 	},
13917 	{
13918 		"JMP32_JSLT_X: all register value magnitudes",
13919 		{ },
13920 		INTERNAL | FLAG_NO_DATA,
13921 		{ },
13922 		{ { 0, 1 } },
13923 		.fill_helper = bpf_fill_jmp32_jslt_reg,
13924 		.nr_testruns = NR_PATTERN_RUNS,
13925 	},
13926 	{
13927 		"JMP32_JSLE_X: all register value magnitudes",
13928 		{ },
13929 		INTERNAL | FLAG_NO_DATA,
13930 		{ },
13931 		{ { 0, 1 } },
13932 		.fill_helper = bpf_fill_jmp32_jsle_reg,
13933 		.nr_testruns = NR_PATTERN_RUNS,
13934 	},
13935 	/* Conditional jumps with constant decision */
13936 	{
13937 		"JMP_JSET_K: imm = 0 -> never taken",
13938 		.u.insns_int = {
13939 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13940 			BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
13941 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13942 			BPF_EXIT_INSN(),
13943 		},
13944 		INTERNAL | FLAG_NO_DATA,
13945 		{ },
13946 		{ { 0, 0 } },
13947 	},
13948 	{
13949 		"JMP_JLT_K: imm = 0 -> never taken",
13950 		.u.insns_int = {
13951 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13952 			BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
13953 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13954 			BPF_EXIT_INSN(),
13955 		},
13956 		INTERNAL | FLAG_NO_DATA,
13957 		{ },
13958 		{ { 0, 0 } },
13959 	},
13960 	{
13961 		"JMP_JGE_K: imm = 0 -> always taken",
13962 		.u.insns_int = {
13963 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13964 			BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
13965 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13966 			BPF_EXIT_INSN(),
13967 		},
13968 		INTERNAL | FLAG_NO_DATA,
13969 		{ },
13970 		{ { 0, 1 } },
13971 	},
13972 	{
13973 		"JMP_JGT_K: imm = 0xffffffff -> never taken",
13974 		.u.insns_int = {
13975 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13976 			BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
13977 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13978 			BPF_EXIT_INSN(),
13979 		},
13980 		INTERNAL | FLAG_NO_DATA,
13981 		{ },
13982 		{ { 0, 0 } },
13983 	},
13984 	{
13985 		"JMP_JLE_K: imm = 0xffffffff -> always taken",
13986 		.u.insns_int = {
13987 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
13988 			BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
13989 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
13990 			BPF_EXIT_INSN(),
13991 		},
13992 		INTERNAL | FLAG_NO_DATA,
13993 		{ },
13994 		{ { 0, 1 } },
13995 	},
13996 	{
13997 		"JMP32_JSGT_K: imm = 0x7fffffff -> never taken",
13998 		.u.insns_int = {
13999 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14000 			BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
14001 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14002 			BPF_EXIT_INSN(),
14003 		},
14004 		INTERNAL | FLAG_NO_DATA,
14005 		{ },
14006 		{ { 0, 0 } },
14007 	},
14008 	{
14009 		"JMP32_JSGE_K: imm = -0x80000000 -> always taken",
14010 		.u.insns_int = {
14011 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14012 			BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
14013 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14014 			BPF_EXIT_INSN(),
14015 		},
14016 		INTERNAL | FLAG_NO_DATA,
14017 		{ },
14018 		{ { 0, 1 } },
14019 	},
14020 	{
14021 		"JMP32_JSLT_K: imm = -0x80000000 -> never taken",
14022 		.u.insns_int = {
14023 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14024 			BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
14025 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14026 			BPF_EXIT_INSN(),
14027 		},
14028 		INTERNAL | FLAG_NO_DATA,
14029 		{ },
14030 		{ { 0, 0 } },
14031 	},
14032 	{
14033 		"JMP32_JSLE_K: imm = 0x7fffffff -> always taken",
14034 		.u.insns_int = {
14035 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14036 			BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
14037 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14038 			BPF_EXIT_INSN(),
14039 		},
14040 		INTERNAL | FLAG_NO_DATA,
14041 		{ },
14042 		{ { 0, 1 } },
14043 	},
14044 	{
14045 		"JMP_JEQ_X: dst = src -> always taken",
14046 		.u.insns_int = {
14047 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14048 			BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
14049 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14050 			BPF_EXIT_INSN(),
14051 		},
14052 		INTERNAL | FLAG_NO_DATA,
14053 		{ },
14054 		{ { 0, 1 } },
14055 	},
14056 	{
14057 		"JMP_JGE_X: dst = src -> always taken",
14058 		.u.insns_int = {
14059 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14060 			BPF_JMP_REG(BPF_JGE, R1, R1, 1),
14061 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14062 			BPF_EXIT_INSN(),
14063 		},
14064 		INTERNAL | FLAG_NO_DATA,
14065 		{ },
14066 		{ { 0, 1 } },
14067 	},
14068 	{
14069 		"JMP_JLE_X: dst = src -> always taken",
14070 		.u.insns_int = {
14071 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14072 			BPF_JMP_REG(BPF_JLE, R1, R1, 1),
14073 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14074 			BPF_EXIT_INSN(),
14075 		},
14076 		INTERNAL | FLAG_NO_DATA,
14077 		{ },
14078 		{ { 0, 1 } },
14079 	},
14080 	{
14081 		"JMP_JSGE_X: dst = src -> always taken",
14082 		.u.insns_int = {
14083 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14084 			BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
14085 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14086 			BPF_EXIT_INSN(),
14087 		},
14088 		INTERNAL | FLAG_NO_DATA,
14089 		{ },
14090 		{ { 0, 1 } },
14091 	},
14092 	{
14093 		"JMP_JSLE_X: dst = src -> always taken",
14094 		.u.insns_int = {
14095 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14096 			BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
14097 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14098 			BPF_EXIT_INSN(),
14099 		},
14100 		INTERNAL | FLAG_NO_DATA,
14101 		{ },
14102 		{ { 0, 1 } },
14103 	},
14104 	{
14105 		"JMP_JNE_X: dst = src -> never taken",
14106 		.u.insns_int = {
14107 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14108 			BPF_JMP_REG(BPF_JNE, R1, R1, 1),
14109 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14110 			BPF_EXIT_INSN(),
14111 		},
14112 		INTERNAL | FLAG_NO_DATA,
14113 		{ },
14114 		{ { 0, 0 } },
14115 	},
14116 	{
14117 		"JMP_JGT_X: dst = src -> never taken",
14118 		.u.insns_int = {
14119 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14120 			BPF_JMP_REG(BPF_JGT, R1, R1, 1),
14121 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14122 			BPF_EXIT_INSN(),
14123 		},
14124 		INTERNAL | FLAG_NO_DATA,
14125 		{ },
14126 		{ { 0, 0 } },
14127 	},
14128 	{
14129 		"JMP_JLT_X: dst = src -> never taken",
14130 		.u.insns_int = {
14131 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14132 			BPF_JMP_REG(BPF_JLT, R1, R1, 1),
14133 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14134 			BPF_EXIT_INSN(),
14135 		},
14136 		INTERNAL | FLAG_NO_DATA,
14137 		{ },
14138 		{ { 0, 0 } },
14139 	},
14140 	{
14141 		"JMP_JSGT_X: dst = src -> never taken",
14142 		.u.insns_int = {
14143 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14144 			BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
14145 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14146 			BPF_EXIT_INSN(),
14147 		},
14148 		INTERNAL | FLAG_NO_DATA,
14149 		{ },
14150 		{ { 0, 0 } },
14151 	},
14152 	{
14153 		"JMP_JSLT_X: dst = src -> never taken",
14154 		.u.insns_int = {
14155 			BPF_ALU64_IMM(BPF_MOV, R0, 1),
14156 			BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
14157 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14158 			BPF_EXIT_INSN(),
14159 		},
14160 		INTERNAL | FLAG_NO_DATA,
14161 		{ },
14162 		{ { 0, 0 } },
14163 	},
14164 	/* Short relative jumps */
14165 	{
14166 		"Short relative jump: offset=0",
14167 		.u.insns_int = {
14168 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14169 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 0),
14170 			BPF_EXIT_INSN(),
14171 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
14172 		},
14173 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14174 		{ },
14175 		{ { 0, 0 } },
14176 	},
14177 	{
14178 		"Short relative jump: offset=1",
14179 		.u.insns_int = {
14180 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14181 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 1),
14182 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14183 			BPF_EXIT_INSN(),
14184 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
14185 		},
14186 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14187 		{ },
14188 		{ { 0, 0 } },
14189 	},
14190 	{
14191 		"Short relative jump: offset=2",
14192 		.u.insns_int = {
14193 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14194 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 2),
14195 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14196 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14197 			BPF_EXIT_INSN(),
14198 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
14199 		},
14200 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14201 		{ },
14202 		{ { 0, 0 } },
14203 	},
14204 	{
14205 		"Short relative jump: offset=3",
14206 		.u.insns_int = {
14207 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14208 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 3),
14209 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14210 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14211 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14212 			BPF_EXIT_INSN(),
14213 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
14214 		},
14215 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14216 		{ },
14217 		{ { 0, 0 } },
14218 	},
14219 	{
14220 		"Short relative jump: offset=4",
14221 		.u.insns_int = {
14222 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
14223 			BPF_JMP_IMM(BPF_JEQ, R0, 0, 4),
14224 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14225 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14226 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14227 			BPF_ALU32_IMM(BPF_ADD, R0, 1),
14228 			BPF_EXIT_INSN(),
14229 			BPF_ALU32_IMM(BPF_MOV, R0, -1),
14230 		},
14231 		INTERNAL | FLAG_NO_DATA | FLAG_VERIFIER_ZEXT,
14232 		{ },
14233 		{ { 0, 0 } },
14234 	},
14235 	/* Conditional branch conversions */
14236 	{
14237 		"Long conditional jump: taken at runtime",
14238 		{ },
14239 		INTERNAL | FLAG_NO_DATA,
14240 		{ },
14241 		{ { 0, 1 } },
14242 		.fill_helper = bpf_fill_max_jmp_taken,
14243 	},
14244 	{
14245 		"Long conditional jump: not taken at runtime",
14246 		{ },
14247 		INTERNAL | FLAG_NO_DATA,
14248 		{ },
14249 		{ { 0, 2 } },
14250 		.fill_helper = bpf_fill_max_jmp_not_taken,
14251 	},
14252 	{
14253 		"Long conditional jump: always taken, known at JIT time",
14254 		{ },
14255 		INTERNAL | FLAG_NO_DATA,
14256 		{ },
14257 		{ { 0, 1 } },
14258 		.fill_helper = bpf_fill_max_jmp_always_taken,
14259 	},
14260 	{
14261 		"Long conditional jump: never taken, known at JIT time",
14262 		{ },
14263 		INTERNAL | FLAG_NO_DATA,
14264 		{ },
14265 		{ { 0, 2 } },
14266 		.fill_helper = bpf_fill_max_jmp_never_taken,
14267 	},
14268 	/* Staggered jump sequences, immediate */
14269 	{
14270 		"Staggered jumps: JMP_JA",
14271 		{ },
14272 		INTERNAL | FLAG_NO_DATA,
14273 		{ },
14274 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14275 		.fill_helper = bpf_fill_staggered_ja,
14276 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14277 	},
14278 	{
14279 		"Staggered jumps: JMP_JEQ_K",
14280 		{ },
14281 		INTERNAL | FLAG_NO_DATA,
14282 		{ },
14283 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14284 		.fill_helper = bpf_fill_staggered_jeq_imm,
14285 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14286 	},
14287 	{
14288 		"Staggered jumps: JMP_JNE_K",
14289 		{ },
14290 		INTERNAL | FLAG_NO_DATA,
14291 		{ },
14292 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14293 		.fill_helper = bpf_fill_staggered_jne_imm,
14294 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14295 	},
14296 	{
14297 		"Staggered jumps: JMP_JSET_K",
14298 		{ },
14299 		INTERNAL | FLAG_NO_DATA,
14300 		{ },
14301 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14302 		.fill_helper = bpf_fill_staggered_jset_imm,
14303 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14304 	},
14305 	{
14306 		"Staggered jumps: JMP_JGT_K",
14307 		{ },
14308 		INTERNAL | FLAG_NO_DATA,
14309 		{ },
14310 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14311 		.fill_helper = bpf_fill_staggered_jgt_imm,
14312 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14313 	},
14314 	{
14315 		"Staggered jumps: JMP_JGE_K",
14316 		{ },
14317 		INTERNAL | FLAG_NO_DATA,
14318 		{ },
14319 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14320 		.fill_helper = bpf_fill_staggered_jge_imm,
14321 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14322 	},
14323 	{
14324 		"Staggered jumps: JMP_JLT_K",
14325 		{ },
14326 		INTERNAL | FLAG_NO_DATA,
14327 		{ },
14328 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14329 		.fill_helper = bpf_fill_staggered_jlt_imm,
14330 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14331 	},
14332 	{
14333 		"Staggered jumps: JMP_JLE_K",
14334 		{ },
14335 		INTERNAL | FLAG_NO_DATA,
14336 		{ },
14337 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14338 		.fill_helper = bpf_fill_staggered_jle_imm,
14339 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14340 	},
14341 	{
14342 		"Staggered jumps: JMP_JSGT_K",
14343 		{ },
14344 		INTERNAL | FLAG_NO_DATA,
14345 		{ },
14346 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14347 		.fill_helper = bpf_fill_staggered_jsgt_imm,
14348 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14349 	},
14350 	{
14351 		"Staggered jumps: JMP_JSGE_K",
14352 		{ },
14353 		INTERNAL | FLAG_NO_DATA,
14354 		{ },
14355 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14356 		.fill_helper = bpf_fill_staggered_jsge_imm,
14357 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14358 	},
14359 	{
14360 		"Staggered jumps: JMP_JSLT_K",
14361 		{ },
14362 		INTERNAL | FLAG_NO_DATA,
14363 		{ },
14364 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14365 		.fill_helper = bpf_fill_staggered_jslt_imm,
14366 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14367 	},
14368 	{
14369 		"Staggered jumps: JMP_JSLE_K",
14370 		{ },
14371 		INTERNAL | FLAG_NO_DATA,
14372 		{ },
14373 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14374 		.fill_helper = bpf_fill_staggered_jsle_imm,
14375 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14376 	},
14377 	/* Staggered jump sequences, register */
14378 	{
14379 		"Staggered jumps: JMP_JEQ_X",
14380 		{ },
14381 		INTERNAL | FLAG_NO_DATA,
14382 		{ },
14383 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14384 		.fill_helper = bpf_fill_staggered_jeq_reg,
14385 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14386 	},
14387 	{
14388 		"Staggered jumps: JMP_JNE_X",
14389 		{ },
14390 		INTERNAL | FLAG_NO_DATA,
14391 		{ },
14392 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14393 		.fill_helper = bpf_fill_staggered_jne_reg,
14394 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14395 	},
14396 	{
14397 		"Staggered jumps: JMP_JSET_X",
14398 		{ },
14399 		INTERNAL | FLAG_NO_DATA,
14400 		{ },
14401 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14402 		.fill_helper = bpf_fill_staggered_jset_reg,
14403 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14404 	},
14405 	{
14406 		"Staggered jumps: JMP_JGT_X",
14407 		{ },
14408 		INTERNAL | FLAG_NO_DATA,
14409 		{ },
14410 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14411 		.fill_helper = bpf_fill_staggered_jgt_reg,
14412 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14413 	},
14414 	{
14415 		"Staggered jumps: JMP_JGE_X",
14416 		{ },
14417 		INTERNAL | FLAG_NO_DATA,
14418 		{ },
14419 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14420 		.fill_helper = bpf_fill_staggered_jge_reg,
14421 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14422 	},
14423 	{
14424 		"Staggered jumps: JMP_JLT_X",
14425 		{ },
14426 		INTERNAL | FLAG_NO_DATA,
14427 		{ },
14428 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14429 		.fill_helper = bpf_fill_staggered_jlt_reg,
14430 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14431 	},
14432 	{
14433 		"Staggered jumps: JMP_JLE_X",
14434 		{ },
14435 		INTERNAL | FLAG_NO_DATA,
14436 		{ },
14437 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14438 		.fill_helper = bpf_fill_staggered_jle_reg,
14439 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14440 	},
14441 	{
14442 		"Staggered jumps: JMP_JSGT_X",
14443 		{ },
14444 		INTERNAL | FLAG_NO_DATA,
14445 		{ },
14446 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14447 		.fill_helper = bpf_fill_staggered_jsgt_reg,
14448 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14449 	},
14450 	{
14451 		"Staggered jumps: JMP_JSGE_X",
14452 		{ },
14453 		INTERNAL | FLAG_NO_DATA,
14454 		{ },
14455 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14456 		.fill_helper = bpf_fill_staggered_jsge_reg,
14457 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14458 	},
14459 	{
14460 		"Staggered jumps: JMP_JSLT_X",
14461 		{ },
14462 		INTERNAL | FLAG_NO_DATA,
14463 		{ },
14464 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14465 		.fill_helper = bpf_fill_staggered_jslt_reg,
14466 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14467 	},
14468 	{
14469 		"Staggered jumps: JMP_JSLE_X",
14470 		{ },
14471 		INTERNAL | FLAG_NO_DATA,
14472 		{ },
14473 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14474 		.fill_helper = bpf_fill_staggered_jsle_reg,
14475 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14476 	},
14477 	/* Staggered jump sequences, JMP32 immediate */
14478 	{
14479 		"Staggered jumps: JMP32_JEQ_K",
14480 		{ },
14481 		INTERNAL | FLAG_NO_DATA,
14482 		{ },
14483 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14484 		.fill_helper = bpf_fill_staggered_jeq32_imm,
14485 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14486 	},
14487 	{
14488 		"Staggered jumps: JMP32_JNE_K",
14489 		{ },
14490 		INTERNAL | FLAG_NO_DATA,
14491 		{ },
14492 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14493 		.fill_helper = bpf_fill_staggered_jne32_imm,
14494 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14495 	},
14496 	{
14497 		"Staggered jumps: JMP32_JSET_K",
14498 		{ },
14499 		INTERNAL | FLAG_NO_DATA,
14500 		{ },
14501 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14502 		.fill_helper = bpf_fill_staggered_jset32_imm,
14503 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14504 	},
14505 	{
14506 		"Staggered jumps: JMP32_JGT_K",
14507 		{ },
14508 		INTERNAL | FLAG_NO_DATA,
14509 		{ },
14510 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14511 		.fill_helper = bpf_fill_staggered_jgt32_imm,
14512 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14513 	},
14514 	{
14515 		"Staggered jumps: JMP32_JGE_K",
14516 		{ },
14517 		INTERNAL | FLAG_NO_DATA,
14518 		{ },
14519 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14520 		.fill_helper = bpf_fill_staggered_jge32_imm,
14521 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14522 	},
14523 	{
14524 		"Staggered jumps: JMP32_JLT_K",
14525 		{ },
14526 		INTERNAL | FLAG_NO_DATA,
14527 		{ },
14528 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14529 		.fill_helper = bpf_fill_staggered_jlt32_imm,
14530 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14531 	},
14532 	{
14533 		"Staggered jumps: JMP32_JLE_K",
14534 		{ },
14535 		INTERNAL | FLAG_NO_DATA,
14536 		{ },
14537 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14538 		.fill_helper = bpf_fill_staggered_jle32_imm,
14539 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14540 	},
14541 	{
14542 		"Staggered jumps: JMP32_JSGT_K",
14543 		{ },
14544 		INTERNAL | FLAG_NO_DATA,
14545 		{ },
14546 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14547 		.fill_helper = bpf_fill_staggered_jsgt32_imm,
14548 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14549 	},
14550 	{
14551 		"Staggered jumps: JMP32_JSGE_K",
14552 		{ },
14553 		INTERNAL | FLAG_NO_DATA,
14554 		{ },
14555 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14556 		.fill_helper = bpf_fill_staggered_jsge32_imm,
14557 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14558 	},
14559 	{
14560 		"Staggered jumps: JMP32_JSLT_K",
14561 		{ },
14562 		INTERNAL | FLAG_NO_DATA,
14563 		{ },
14564 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14565 		.fill_helper = bpf_fill_staggered_jslt32_imm,
14566 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14567 	},
14568 	{
14569 		"Staggered jumps: JMP32_JSLE_K",
14570 		{ },
14571 		INTERNAL | FLAG_NO_DATA,
14572 		{ },
14573 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14574 		.fill_helper = bpf_fill_staggered_jsle32_imm,
14575 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14576 	},
14577 	/* Staggered jump sequences, JMP32 register */
14578 	{
14579 		"Staggered jumps: JMP32_JEQ_X",
14580 		{ },
14581 		INTERNAL | FLAG_NO_DATA,
14582 		{ },
14583 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14584 		.fill_helper = bpf_fill_staggered_jeq32_reg,
14585 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14586 	},
14587 	{
14588 		"Staggered jumps: JMP32_JNE_X",
14589 		{ },
14590 		INTERNAL | FLAG_NO_DATA,
14591 		{ },
14592 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14593 		.fill_helper = bpf_fill_staggered_jne32_reg,
14594 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14595 	},
14596 	{
14597 		"Staggered jumps: JMP32_JSET_X",
14598 		{ },
14599 		INTERNAL | FLAG_NO_DATA,
14600 		{ },
14601 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14602 		.fill_helper = bpf_fill_staggered_jset32_reg,
14603 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14604 	},
14605 	{
14606 		"Staggered jumps: JMP32_JGT_X",
14607 		{ },
14608 		INTERNAL | FLAG_NO_DATA,
14609 		{ },
14610 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14611 		.fill_helper = bpf_fill_staggered_jgt32_reg,
14612 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14613 	},
14614 	{
14615 		"Staggered jumps: JMP32_JGE_X",
14616 		{ },
14617 		INTERNAL | FLAG_NO_DATA,
14618 		{ },
14619 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14620 		.fill_helper = bpf_fill_staggered_jge32_reg,
14621 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14622 	},
14623 	{
14624 		"Staggered jumps: JMP32_JLT_X",
14625 		{ },
14626 		INTERNAL | FLAG_NO_DATA,
14627 		{ },
14628 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14629 		.fill_helper = bpf_fill_staggered_jlt32_reg,
14630 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14631 	},
14632 	{
14633 		"Staggered jumps: JMP32_JLE_X",
14634 		{ },
14635 		INTERNAL | FLAG_NO_DATA,
14636 		{ },
14637 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14638 		.fill_helper = bpf_fill_staggered_jle32_reg,
14639 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14640 	},
14641 	{
14642 		"Staggered jumps: JMP32_JSGT_X",
14643 		{ },
14644 		INTERNAL | FLAG_NO_DATA,
14645 		{ },
14646 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14647 		.fill_helper = bpf_fill_staggered_jsgt32_reg,
14648 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14649 	},
14650 	{
14651 		"Staggered jumps: JMP32_JSGE_X",
14652 		{ },
14653 		INTERNAL | FLAG_NO_DATA,
14654 		{ },
14655 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14656 		.fill_helper = bpf_fill_staggered_jsge32_reg,
14657 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14658 	},
14659 	{
14660 		"Staggered jumps: JMP32_JSLT_X",
14661 		{ },
14662 		INTERNAL | FLAG_NO_DATA,
14663 		{ },
14664 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14665 		.fill_helper = bpf_fill_staggered_jslt32_reg,
14666 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14667 	},
14668 	{
14669 		"Staggered jumps: JMP32_JSLE_X",
14670 		{ },
14671 		INTERNAL | FLAG_NO_DATA,
14672 		{ },
14673 		{ { 0, MAX_STAGGERED_JMP_SIZE + 1 } },
14674 		.fill_helper = bpf_fill_staggered_jsle32_reg,
14675 		.nr_testruns = NR_STAGGERED_JMP_RUNS,
14676 	},
14677 };
14678 
14679 static struct net_device dev;
14680 
populate_skb(char * buf,int size)14681 static struct sk_buff *populate_skb(char *buf, int size)
14682 {
14683 	struct sk_buff *skb;
14684 
14685 	if (size >= MAX_DATA)
14686 		return NULL;
14687 
14688 	skb = alloc_skb(MAX_DATA, GFP_KERNEL);
14689 	if (!skb)
14690 		return NULL;
14691 
14692 	__skb_put_data(skb, buf, size);
14693 
14694 	/* Initialize a fake skb with test pattern. */
14695 	skb_reset_mac_header(skb);
14696 	skb->protocol = htons(ETH_P_IP);
14697 	skb->pkt_type = SKB_TYPE;
14698 	skb->mark = SKB_MARK;
14699 	skb->hash = SKB_HASH;
14700 	skb->queue_mapping = SKB_QUEUE_MAP;
14701 	skb->vlan_tci = SKB_VLAN_TCI;
14702 	skb->vlan_proto = htons(ETH_P_IP);
14703 	dev_net_set(&dev, &init_net);
14704 	skb->dev = &dev;
14705 	skb->dev->ifindex = SKB_DEV_IFINDEX;
14706 	skb->dev->type = SKB_DEV_TYPE;
14707 	skb_set_network_header(skb, min(size, ETH_HLEN));
14708 
14709 	return skb;
14710 }
14711 
generate_test_data(struct bpf_test * test,int sub)14712 static void *generate_test_data(struct bpf_test *test, int sub)
14713 {
14714 	struct sk_buff *skb;
14715 	struct page *page;
14716 
14717 	if (test->aux & FLAG_NO_DATA)
14718 		return NULL;
14719 
14720 	if (test->aux & FLAG_LARGE_MEM)
14721 		return kmalloc(test->test[sub].data_size, GFP_KERNEL);
14722 
14723 	/* Test case expects an skb, so populate one. Various
14724 	 * subtests generate skbs of different sizes based on
14725 	 * the same data.
14726 	 */
14727 	skb = populate_skb(test->data, test->test[sub].data_size);
14728 	if (!skb)
14729 		return NULL;
14730 
14731 	if (test->aux & FLAG_SKB_FRAG) {
14732 		/*
14733 		 * when the test requires a fragmented skb, add a
14734 		 * single fragment to the skb, filled with
14735 		 * test->frag_data.
14736 		 */
14737 		page = alloc_page(GFP_KERNEL);
14738 		if (!page)
14739 			goto err_kfree_skb;
14740 
14741 		memcpy(page_address(page), test->frag_data, MAX_DATA);
14742 		skb_add_rx_frag(skb, 0, page, 0, MAX_DATA, MAX_DATA);
14743 	}
14744 
14745 	return skb;
14746 err_kfree_skb:
14747 	kfree_skb(skb);
14748 	return NULL;
14749 }
14750 
release_test_data(const struct bpf_test * test,void * data)14751 static void release_test_data(const struct bpf_test *test, void *data)
14752 {
14753 	if (test->aux & FLAG_NO_DATA)
14754 		return;
14755 
14756 	if (test->aux & FLAG_LARGE_MEM)
14757 		kfree(data);
14758 	else
14759 		kfree_skb(data);
14760 }
14761 
filter_length(int which)14762 static int filter_length(int which)
14763 {
14764 	struct sock_filter *fp;
14765 	int len;
14766 
14767 	if (tests[which].fill_helper)
14768 		return tests[which].u.ptr.len;
14769 
14770 	fp = tests[which].u.insns;
14771 	for (len = MAX_INSNS - 1; len > 0; --len)
14772 		if (fp[len].code != 0 || fp[len].k != 0)
14773 			break;
14774 
14775 	return len + 1;
14776 }
14777 
filter_pointer(int which)14778 static void *filter_pointer(int which)
14779 {
14780 	if (tests[which].fill_helper)
14781 		return tests[which].u.ptr.insns;
14782 	else
14783 		return tests[which].u.insns;
14784 }
14785 
generate_filter(int which,int * err)14786 static struct bpf_prog *generate_filter(int which, int *err)
14787 {
14788 	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14789 	unsigned int flen = filter_length(which);
14790 	void *fptr = filter_pointer(which);
14791 	struct sock_fprog_kern fprog;
14792 	struct bpf_prog *fp;
14793 
14794 	switch (test_type) {
14795 	case CLASSIC:
14796 		fprog.filter = fptr;
14797 		fprog.len = flen;
14798 
14799 		*err = bpf_prog_create(&fp, &fprog);
14800 		if (tests[which].aux & FLAG_EXPECTED_FAIL) {
14801 			if (*err == tests[which].expected_errcode) {
14802 				pr_cont("PASS\n");
14803 				/* Verifier rejected filter as expected. */
14804 				*err = 0;
14805 				return NULL;
14806 			} else {
14807 				pr_cont("UNEXPECTED_PASS\n");
14808 				/* Verifier didn't reject the test that's
14809 				 * bad enough, just return!
14810 				 */
14811 				*err = -EINVAL;
14812 				return NULL;
14813 			}
14814 		}
14815 		if (*err) {
14816 			pr_cont("FAIL to prog_create err=%d len=%d\n",
14817 				*err, fprog.len);
14818 			return NULL;
14819 		}
14820 		break;
14821 
14822 	case INTERNAL:
14823 		fp = bpf_prog_alloc(bpf_prog_size(flen), 0);
14824 		if (fp == NULL) {
14825 			pr_cont("UNEXPECTED_FAIL no memory left\n");
14826 			*err = -ENOMEM;
14827 			return NULL;
14828 		}
14829 
14830 		fp->len = flen;
14831 		/* Type doesn't really matter here as long as it's not unspec. */
14832 		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
14833 		memcpy(fp->insnsi, fptr, fp->len * sizeof(struct bpf_insn));
14834 		fp->aux->stack_depth = tests[which].stack_depth;
14835 		fp->aux->verifier_zext = !!(tests[which].aux &
14836 					    FLAG_VERIFIER_ZEXT);
14837 
14838 		/* We cannot error here as we don't need type compatibility
14839 		 * checks.
14840 		 */
14841 		fp = bpf_prog_select_runtime(fp, err);
14842 		if (*err) {
14843 			pr_cont("FAIL to select_runtime err=%d\n", *err);
14844 			return NULL;
14845 		}
14846 		break;
14847 	}
14848 
14849 	*err = 0;
14850 	return fp;
14851 }
14852 
release_filter(struct bpf_prog * fp,int which)14853 static void release_filter(struct bpf_prog *fp, int which)
14854 {
14855 	__u8 test_type = tests[which].aux & TEST_TYPE_MASK;
14856 
14857 	switch (test_type) {
14858 	case CLASSIC:
14859 		bpf_prog_destroy(fp);
14860 		break;
14861 	case INTERNAL:
14862 		bpf_prog_free(fp);
14863 		break;
14864 	}
14865 }
14866 
__run_one(const struct bpf_prog * fp,const void * data,int runs,u64 * duration)14867 static int __run_one(const struct bpf_prog *fp, const void *data,
14868 		     int runs, u64 *duration)
14869 {
14870 	u64 start, finish;
14871 	int ret = 0, i;
14872 
14873 	migrate_disable();
14874 	start = ktime_get_ns();
14875 
14876 	for (i = 0; i < runs; i++)
14877 		ret = bpf_prog_run(fp, data);
14878 
14879 	finish = ktime_get_ns();
14880 	migrate_enable();
14881 
14882 	*duration = finish - start;
14883 	do_div(*duration, runs);
14884 
14885 	return ret;
14886 }
14887 
run_one(const struct bpf_prog * fp,struct bpf_test * test)14888 static int run_one(const struct bpf_prog *fp, struct bpf_test *test)
14889 {
14890 	int err_cnt = 0, i, runs = MAX_TESTRUNS;
14891 
14892 	if (test->nr_testruns)
14893 		runs = min(test->nr_testruns, MAX_TESTRUNS);
14894 
14895 	for (i = 0; i < MAX_SUBTESTS; i++) {
14896 		void *data;
14897 		u64 duration;
14898 		u32 ret;
14899 
14900 		/*
14901 		 * NOTE: Several sub-tests may be present, in which case
14902 		 * a zero {data_size, result} tuple indicates the end of
14903 		 * the sub-test array. The first test is always run,
14904 		 * even if both data_size and result happen to be zero.
14905 		 */
14906 		if (i > 0 &&
14907 		    test->test[i].data_size == 0 &&
14908 		    test->test[i].result == 0)
14909 			break;
14910 
14911 		data = generate_test_data(test, i);
14912 		if (!data && !(test->aux & FLAG_NO_DATA)) {
14913 			pr_cont("data generation failed ");
14914 			err_cnt++;
14915 			break;
14916 		}
14917 		ret = __run_one(fp, data, runs, &duration);
14918 		release_test_data(test, data);
14919 
14920 		if (ret == test->test[i].result) {
14921 			pr_cont("%lld ", duration);
14922 		} else {
14923 			s32 res = test->test[i].result;
14924 
14925 			pr_cont("ret %d != %d (%#x != %#x)",
14926 				ret, res, ret, res);
14927 			err_cnt++;
14928 		}
14929 	}
14930 
14931 	return err_cnt;
14932 }
14933 
14934 static char test_name[64];
14935 module_param_string(test_name, test_name, sizeof(test_name), 0);
14936 
14937 static int test_id = -1;
14938 module_param(test_id, int, 0);
14939 
14940 static int test_range[2] = { 0, INT_MAX };
14941 module_param_array(test_range, int, NULL, 0);
14942 
exclude_test(int test_id)14943 static bool exclude_test(int test_id)
14944 {
14945 	return test_id < test_range[0] || test_id > test_range[1];
14946 }
14947 
build_test_skb(void)14948 static __init struct sk_buff *build_test_skb(void)
14949 {
14950 	u32 headroom = NET_SKB_PAD + NET_IP_ALIGN + ETH_HLEN;
14951 	struct sk_buff *skb[2];
14952 	struct page *page[2];
14953 	int i, data_size = 8;
14954 
14955 	for (i = 0; i < 2; i++) {
14956 		page[i] = alloc_page(GFP_KERNEL);
14957 		if (!page[i]) {
14958 			if (i == 0)
14959 				goto err_page0;
14960 			else
14961 				goto err_page1;
14962 		}
14963 
14964 		/* this will set skb[i]->head_frag */
14965 		skb[i] = dev_alloc_skb(headroom + data_size);
14966 		if (!skb[i]) {
14967 			if (i == 0)
14968 				goto err_skb0;
14969 			else
14970 				goto err_skb1;
14971 		}
14972 
14973 		skb_reserve(skb[i], headroom);
14974 		skb_put(skb[i], data_size);
14975 		skb[i]->protocol = htons(ETH_P_IP);
14976 		skb_reset_network_header(skb[i]);
14977 		skb_set_mac_header(skb[i], -ETH_HLEN);
14978 
14979 		skb_add_rx_frag(skb[i], 0, page[i], 0, 64, 64);
14980 		// skb_headlen(skb[i]): 8, skb[i]->head_frag = 1
14981 	}
14982 
14983 	/* setup shinfo */
14984 	skb_shinfo(skb[0])->gso_size = 1448;
14985 	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV4;
14986 	skb_shinfo(skb[0])->gso_type |= SKB_GSO_DODGY;
14987 	skb_shinfo(skb[0])->gso_segs = 0;
14988 	skb_shinfo(skb[0])->frag_list = skb[1];
14989 	skb_shinfo(skb[0])->hwtstamps.hwtstamp = 1000;
14990 
14991 	/* adjust skb[0]'s len */
14992 	skb[0]->len += skb[1]->len;
14993 	skb[0]->data_len += skb[1]->data_len;
14994 	skb[0]->truesize += skb[1]->truesize;
14995 
14996 	return skb[0];
14997 
14998 err_skb1:
14999 	__free_page(page[1]);
15000 err_page1:
15001 	kfree_skb(skb[0]);
15002 err_skb0:
15003 	__free_page(page[0]);
15004 err_page0:
15005 	return NULL;
15006 }
15007 
build_test_skb_linear_no_head_frag(void)15008 static __init struct sk_buff *build_test_skb_linear_no_head_frag(void)
15009 {
15010 	unsigned int alloc_size = 2000;
15011 	unsigned int headroom = 102, doffset = 72, data_size = 1308;
15012 	struct sk_buff *skb[2];
15013 	int i;
15014 
15015 	/* skbs linked in a frag_list, both with linear data, with head_frag=0
15016 	 * (data allocated by kmalloc), both have tcp data of 1308 bytes
15017 	 * (total payload is 2616 bytes).
15018 	 * Data offset is 72 bytes (40 ipv6 hdr, 32 tcp hdr). Some headroom.
15019 	 */
15020 	for (i = 0; i < 2; i++) {
15021 		skb[i] = alloc_skb(alloc_size, GFP_KERNEL);
15022 		if (!skb[i]) {
15023 			if (i == 0)
15024 				goto err_skb0;
15025 			else
15026 				goto err_skb1;
15027 		}
15028 
15029 		skb[i]->protocol = htons(ETH_P_IPV6);
15030 		skb_reserve(skb[i], headroom);
15031 		skb_put(skb[i], doffset + data_size);
15032 		skb_reset_network_header(skb[i]);
15033 		if (i == 0)
15034 			skb_reset_mac_header(skb[i]);
15035 		else
15036 			skb_set_mac_header(skb[i], -ETH_HLEN);
15037 		__skb_pull(skb[i], doffset);
15038 	}
15039 
15040 	/* setup shinfo.
15041 	 * mimic bpf_skb_proto_4_to_6, which resets gso_segs and assigns a
15042 	 * reduced gso_size.
15043 	 */
15044 	skb_shinfo(skb[0])->gso_size = 1288;
15045 	skb_shinfo(skb[0])->gso_type = SKB_GSO_TCPV6 | SKB_GSO_DODGY;
15046 	skb_shinfo(skb[0])->gso_segs = 0;
15047 	skb_shinfo(skb[0])->frag_list = skb[1];
15048 
15049 	/* adjust skb[0]'s len */
15050 	skb[0]->len += skb[1]->len;
15051 	skb[0]->data_len += skb[1]->len;
15052 	skb[0]->truesize += skb[1]->truesize;
15053 
15054 	return skb[0];
15055 
15056 err_skb1:
15057 	kfree_skb(skb[0]);
15058 err_skb0:
15059 	return NULL;
15060 }
15061 
15062 struct skb_segment_test {
15063 	const char *descr;
15064 	struct sk_buff *(*build_skb)(void);
15065 	netdev_features_t features;
15066 };
15067 
15068 static struct skb_segment_test skb_segment_tests[] __initconst = {
15069 	{
15070 		.descr = "gso_with_rx_frags",
15071 		.build_skb = build_test_skb,
15072 		.features = NETIF_F_SG | NETIF_F_GSO_PARTIAL | NETIF_F_IP_CSUM |
15073 			    NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM
15074 	},
15075 	{
15076 		.descr = "gso_linear_no_head_frag",
15077 		.build_skb = build_test_skb_linear_no_head_frag,
15078 		.features = NETIF_F_SG | NETIF_F_FRAGLIST |
15079 			    NETIF_F_HW_VLAN_CTAG_TX | NETIF_F_GSO |
15080 			    NETIF_F_GRO | NETIF_F_IPV6_CSUM | NETIF_F_RXCSUM |
15081 			    NETIF_F_HW_VLAN_STAG_TX
15082 	}
15083 };
15084 
test_skb_segment_single(const struct skb_segment_test * test)15085 static __init int test_skb_segment_single(const struct skb_segment_test *test)
15086 {
15087 	struct sk_buff *skb, *segs;
15088 	int ret = -1;
15089 
15090 	skb = test->build_skb();
15091 	if (!skb) {
15092 		pr_info("%s: failed to build_test_skb", __func__);
15093 		goto done;
15094 	}
15095 
15096 	segs = skb_segment(skb, test->features);
15097 	if (!IS_ERR(segs)) {
15098 		kfree_skb_list(segs);
15099 		ret = 0;
15100 	}
15101 	kfree_skb(skb);
15102 done:
15103 	return ret;
15104 }
15105 
test_skb_segment(void)15106 static __init int test_skb_segment(void)
15107 {
15108 	int i, err_cnt = 0, pass_cnt = 0;
15109 
15110 	for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15111 		const struct skb_segment_test *test = &skb_segment_tests[i];
15112 
15113 		cond_resched();
15114 		if (exclude_test(i))
15115 			continue;
15116 
15117 		pr_info("#%d %s ", i, test->descr);
15118 
15119 		if (test_skb_segment_single(test)) {
15120 			pr_cont("FAIL\n");
15121 			err_cnt++;
15122 		} else {
15123 			pr_cont("PASS\n");
15124 			pass_cnt++;
15125 		}
15126 	}
15127 
15128 	pr_info("%s: Summary: %d PASSED, %d FAILED\n", __func__,
15129 		pass_cnt, err_cnt);
15130 	return err_cnt ? -EINVAL : 0;
15131 }
15132 
test_bpf(void)15133 static __init int test_bpf(void)
15134 {
15135 	int i, err_cnt = 0, pass_cnt = 0;
15136 	int jit_cnt = 0, run_cnt = 0;
15137 
15138 	for (i = 0; i < ARRAY_SIZE(tests); i++) {
15139 		struct bpf_prog *fp;
15140 		int err;
15141 
15142 		cond_resched();
15143 		if (exclude_test(i))
15144 			continue;
15145 
15146 		pr_info("#%d %s ", i, tests[i].descr);
15147 
15148 		if (tests[i].fill_helper &&
15149 		    tests[i].fill_helper(&tests[i]) < 0) {
15150 			pr_cont("FAIL to prog_fill\n");
15151 			continue;
15152 		}
15153 
15154 		fp = generate_filter(i, &err);
15155 
15156 		if (tests[i].fill_helper) {
15157 			kfree(tests[i].u.ptr.insns);
15158 			tests[i].u.ptr.insns = NULL;
15159 		}
15160 
15161 		if (fp == NULL) {
15162 			if (err == 0) {
15163 				pass_cnt++;
15164 				continue;
15165 			}
15166 			err_cnt++;
15167 			continue;
15168 		}
15169 
15170 		pr_cont("jited:%u ", fp->jited);
15171 
15172 		run_cnt++;
15173 		if (fp->jited)
15174 			jit_cnt++;
15175 
15176 		err = run_one(fp, &tests[i]);
15177 		release_filter(fp, i);
15178 
15179 		if (err) {
15180 			pr_cont("FAIL (%d times)\n", err);
15181 			err_cnt++;
15182 		} else {
15183 			pr_cont("PASS\n");
15184 			pass_cnt++;
15185 		}
15186 	}
15187 
15188 	pr_info("Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15189 		pass_cnt, err_cnt, jit_cnt, run_cnt);
15190 
15191 	return err_cnt ? -EINVAL : 0;
15192 }
15193 
15194 struct tail_call_test {
15195 	const char *descr;
15196 	struct bpf_insn insns[MAX_INSNS];
15197 	int flags;
15198 	int result;
15199 	int stack_depth;
15200 	bool has_tail_call;
15201 };
15202 
15203 /* Flags that can be passed to tail call test cases */
15204 #define FLAG_NEED_STATE		BIT(0)
15205 #define FLAG_RESULT_IN_STATE	BIT(1)
15206 
15207 /*
15208  * Magic marker used in test snippets for tail calls below.
15209  * BPF_LD/MOV to R2 and R2 with this immediate value is replaced
15210  * with the proper values by the test runner.
15211  */
15212 #define TAIL_CALL_MARKER 0x7a11ca11
15213 
15214 /* Special offset to indicate a NULL call target */
15215 #define TAIL_CALL_NULL 0x7fff
15216 
15217 /* Special offset to indicate an out-of-range index */
15218 #define TAIL_CALL_INVALID 0x7ffe
15219 
15220 #define TAIL_CALL(offset)			       \
15221 	BPF_LD_IMM64(R2, TAIL_CALL_MARKER),	       \
15222 	BPF_RAW_INSN(BPF_ALU | BPF_MOV | BPF_K, R3, 0, \
15223 		     offset, TAIL_CALL_MARKER),	       \
15224 	BPF_JMP_IMM(BPF_TAIL_CALL, 0, 0, 0)
15225 
15226 /*
15227  * A test function to be called from a BPF program, clobbering a lot of
15228  * CPU registers in the process. A JITed BPF program calling this function
15229  * must save and restore any caller-saved registers it uses for internal
15230  * state, for example the current tail call count.
15231  */
BPF_CALL_1(bpf_test_func,u64,arg)15232 BPF_CALL_1(bpf_test_func, u64, arg)
15233 {
15234 	char buf[64];
15235 	long a = 0;
15236 	long b = 1;
15237 	long c = 2;
15238 	long d = 3;
15239 	long e = 4;
15240 	long f = 5;
15241 	long g = 6;
15242 	long h = 7;
15243 
15244 	return snprintf(buf, sizeof(buf),
15245 			"%ld %lu %lx %ld %lu %lx %ld %lu %x",
15246 			a, b, c, d, e, f, g, h, (int)arg);
15247 }
15248 #define BPF_FUNC_test_func __BPF_FUNC_MAX_ID
15249 
15250 /*
15251  * Tail call tests. Each test case may call any other test in the table,
15252  * including itself, specified as a relative index offset from the calling
15253  * test. The index TAIL_CALL_NULL can be used to specify a NULL target
15254  * function to test the JIT error path. Similarly, the index TAIL_CALL_INVALID
15255  * results in a target index that is out of range.
15256  */
15257 static struct tail_call_test tail_call_tests[] = {
15258 	{
15259 		"Tail call leaf",
15260 		.insns = {
15261 			BPF_ALU64_REG(BPF_MOV, R0, R1),
15262 			BPF_ALU64_IMM(BPF_ADD, R0, 1),
15263 			BPF_EXIT_INSN(),
15264 		},
15265 		.result = 1,
15266 	},
15267 	{
15268 		"Tail call 2",
15269 		.insns = {
15270 			BPF_ALU64_IMM(BPF_ADD, R1, 2),
15271 			TAIL_CALL(-1),
15272 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
15273 			BPF_EXIT_INSN(),
15274 		},
15275 		.result = 3,
15276 		.has_tail_call = true,
15277 	},
15278 	{
15279 		"Tail call 3",
15280 		.insns = {
15281 			BPF_ALU64_IMM(BPF_ADD, R1, 3),
15282 			TAIL_CALL(-1),
15283 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
15284 			BPF_EXIT_INSN(),
15285 		},
15286 		.result = 6,
15287 		.has_tail_call = true,
15288 	},
15289 	{
15290 		"Tail call 4",
15291 		.insns = {
15292 			BPF_ALU64_IMM(BPF_ADD, R1, 4),
15293 			TAIL_CALL(-1),
15294 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
15295 			BPF_EXIT_INSN(),
15296 		},
15297 		.result = 10,
15298 		.has_tail_call = true,
15299 	},
15300 	{
15301 		"Tail call load/store leaf",
15302 		.insns = {
15303 			BPF_ALU64_IMM(BPF_MOV, R1, 1),
15304 			BPF_ALU64_IMM(BPF_MOV, R2, 2),
15305 			BPF_ALU64_REG(BPF_MOV, R3, BPF_REG_FP),
15306 			BPF_STX_MEM(BPF_DW, R3, R1, -8),
15307 			BPF_STX_MEM(BPF_DW, R3, R2, -16),
15308 			BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -8),
15309 			BPF_JMP_REG(BPF_JNE, R0, R1, 3),
15310 			BPF_LDX_MEM(BPF_DW, R0, BPF_REG_FP, -16),
15311 			BPF_JMP_REG(BPF_JNE, R0, R2, 1),
15312 			BPF_ALU64_IMM(BPF_MOV, R0, 0),
15313 			BPF_EXIT_INSN(),
15314 		},
15315 		.result = 0,
15316 		.stack_depth = 32,
15317 	},
15318 	{
15319 		"Tail call load/store",
15320 		.insns = {
15321 			BPF_ALU64_IMM(BPF_MOV, R0, 3),
15322 			BPF_STX_MEM(BPF_DW, BPF_REG_FP, R0, -8),
15323 			TAIL_CALL(-1),
15324 			BPF_ALU64_IMM(BPF_MOV, R0, -1),
15325 			BPF_EXIT_INSN(),
15326 		},
15327 		.result = 0,
15328 		.stack_depth = 16,
15329 		.has_tail_call = true,
15330 	},
15331 	{
15332 		"Tail call error path, max count reached",
15333 		.insns = {
15334 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
15335 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
15336 			BPF_STX_MEM(BPF_W, R1, R2, 0),
15337 			TAIL_CALL(0),
15338 			BPF_EXIT_INSN(),
15339 		},
15340 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15341 		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15342 		.has_tail_call = true,
15343 	},
15344 	{
15345 		"Tail call count preserved across function calls",
15346 		.insns = {
15347 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
15348 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
15349 			BPF_STX_MEM(BPF_W, R1, R2, 0),
15350 			BPF_STX_MEM(BPF_DW, R10, R1, -8),
15351 			BPF_CALL_REL(BPF_FUNC_get_numa_node_id),
15352 			BPF_CALL_REL(BPF_FUNC_ktime_get_ns),
15353 			BPF_CALL_REL(BPF_FUNC_ktime_get_boot_ns),
15354 			BPF_CALL_REL(BPF_FUNC_ktime_get_coarse_ns),
15355 			BPF_CALL_REL(BPF_FUNC_jiffies64),
15356 			BPF_CALL_REL(BPF_FUNC_test_func),
15357 			BPF_LDX_MEM(BPF_DW, R1, R10, -8),
15358 			BPF_ALU32_REG(BPF_MOV, R0, R1),
15359 			TAIL_CALL(0),
15360 			BPF_EXIT_INSN(),
15361 		},
15362 		.stack_depth = 8,
15363 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15364 		.result = (MAX_TAIL_CALL_CNT + 1) * MAX_TESTRUNS,
15365 		.has_tail_call = true,
15366 	},
15367 	{
15368 		"Tail call error path, NULL target",
15369 		.insns = {
15370 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
15371 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
15372 			BPF_STX_MEM(BPF_W, R1, R2, 0),
15373 			TAIL_CALL(TAIL_CALL_NULL),
15374 			BPF_EXIT_INSN(),
15375 		},
15376 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15377 		.result = MAX_TESTRUNS,
15378 		.has_tail_call = true,
15379 	},
15380 	{
15381 		"Tail call error path, index out of range",
15382 		.insns = {
15383 			BPF_LDX_MEM(BPF_W, R2, R1, 0),
15384 			BPF_ALU64_IMM(BPF_ADD, R2, 1),
15385 			BPF_STX_MEM(BPF_W, R1, R2, 0),
15386 			TAIL_CALL(TAIL_CALL_INVALID),
15387 			BPF_EXIT_INSN(),
15388 		},
15389 		.flags = FLAG_NEED_STATE | FLAG_RESULT_IN_STATE,
15390 		.result = MAX_TESTRUNS,
15391 		.has_tail_call = true,
15392 	},
15393 };
15394 
destroy_tail_call_tests(struct bpf_array * progs)15395 static void __init destroy_tail_call_tests(struct bpf_array *progs)
15396 {
15397 	int i;
15398 
15399 	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++)
15400 		if (progs->ptrs[i])
15401 			bpf_prog_free(progs->ptrs[i]);
15402 	kfree(progs);
15403 }
15404 
prepare_tail_call_tests(struct bpf_array ** pprogs)15405 static __init int prepare_tail_call_tests(struct bpf_array **pprogs)
15406 {
15407 	int ntests = ARRAY_SIZE(tail_call_tests);
15408 	struct bpf_array *progs;
15409 	int which, err;
15410 
15411 	/* Allocate the table of programs to be used for tail calls */
15412 	progs = kzalloc(struct_size(progs, ptrs, ntests + 1), GFP_KERNEL);
15413 	if (!progs)
15414 		goto out_nomem;
15415 
15416 	/* Create all eBPF programs and populate the table */
15417 	for (which = 0; which < ntests; which++) {
15418 		struct tail_call_test *test = &tail_call_tests[which];
15419 		struct bpf_prog *fp;
15420 		int len, i;
15421 
15422 		/* Compute the number of program instructions */
15423 		for (len = 0; len < MAX_INSNS; len++) {
15424 			struct bpf_insn *insn = &test->insns[len];
15425 
15426 			if (len < MAX_INSNS - 1 &&
15427 			    insn->code == (BPF_LD | BPF_DW | BPF_IMM))
15428 				len++;
15429 			if (insn->code == 0)
15430 				break;
15431 		}
15432 
15433 		/* Allocate and initialize the program */
15434 		fp = bpf_prog_alloc(bpf_prog_size(len), 0);
15435 		if (!fp)
15436 			goto out_nomem;
15437 
15438 		fp->len = len;
15439 		fp->type = BPF_PROG_TYPE_SOCKET_FILTER;
15440 		fp->aux->stack_depth = test->stack_depth;
15441 		fp->aux->tail_call_reachable = test->has_tail_call;
15442 		memcpy(fp->insnsi, test->insns, len * sizeof(struct bpf_insn));
15443 
15444 		/* Relocate runtime tail call offsets and addresses */
15445 		for (i = 0; i < len; i++) {
15446 			struct bpf_insn *insn = &fp->insnsi[i];
15447 			long addr = 0;
15448 
15449 			switch (insn->code) {
15450 			case BPF_LD | BPF_DW | BPF_IMM:
15451 				if (insn->imm != TAIL_CALL_MARKER)
15452 					break;
15453 				insn[0].imm = (u32)(long)progs;
15454 				insn[1].imm = ((u64)(long)progs) >> 32;
15455 				break;
15456 
15457 			case BPF_ALU | BPF_MOV | BPF_K:
15458 				if (insn->imm != TAIL_CALL_MARKER)
15459 					break;
15460 				if (insn->off == TAIL_CALL_NULL)
15461 					insn->imm = ntests;
15462 				else if (insn->off == TAIL_CALL_INVALID)
15463 					insn->imm = ntests + 1;
15464 				else
15465 					insn->imm = which + insn->off;
15466 				insn->off = 0;
15467 				break;
15468 
15469 			case BPF_JMP | BPF_CALL:
15470 				if (insn->src_reg != BPF_PSEUDO_CALL)
15471 					break;
15472 				switch (insn->imm) {
15473 				case BPF_FUNC_get_numa_node_id:
15474 					addr = (long)&numa_node_id;
15475 					break;
15476 				case BPF_FUNC_ktime_get_ns:
15477 					addr = (long)&ktime_get_ns;
15478 					break;
15479 				case BPF_FUNC_ktime_get_boot_ns:
15480 					addr = (long)&ktime_get_boot_fast_ns;
15481 					break;
15482 				case BPF_FUNC_ktime_get_coarse_ns:
15483 					addr = (long)&ktime_get_coarse_ns;
15484 					break;
15485 				case BPF_FUNC_jiffies64:
15486 					addr = (long)&get_jiffies_64;
15487 					break;
15488 				case BPF_FUNC_test_func:
15489 					addr = (long)&bpf_test_func;
15490 					break;
15491 				default:
15492 					err = -EFAULT;
15493 					goto out_err;
15494 				}
15495 				*insn = BPF_EMIT_CALL(addr);
15496 				if ((long)__bpf_call_base + insn->imm != addr)
15497 					*insn = BPF_JMP_A(0); /* Skip: NOP */
15498 				break;
15499 			}
15500 		}
15501 
15502 		fp = bpf_prog_select_runtime(fp, &err);
15503 		if (err)
15504 			goto out_err;
15505 
15506 		progs->ptrs[which] = fp;
15507 	}
15508 
15509 	/* The last entry contains a NULL program pointer */
15510 	progs->map.max_entries = ntests + 1;
15511 	*pprogs = progs;
15512 	return 0;
15513 
15514 out_nomem:
15515 	err = -ENOMEM;
15516 
15517 out_err:
15518 	if (progs)
15519 		destroy_tail_call_tests(progs);
15520 	return err;
15521 }
15522 
test_tail_calls(struct bpf_array * progs)15523 static __init int test_tail_calls(struct bpf_array *progs)
15524 {
15525 	int i, err_cnt = 0, pass_cnt = 0;
15526 	int jit_cnt = 0, run_cnt = 0;
15527 
15528 	for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15529 		struct tail_call_test *test = &tail_call_tests[i];
15530 		struct bpf_prog *fp = progs->ptrs[i];
15531 		int *data = NULL;
15532 		int state = 0;
15533 		u64 duration;
15534 		int ret;
15535 
15536 		cond_resched();
15537 		if (exclude_test(i))
15538 			continue;
15539 
15540 		pr_info("#%d %s ", i, test->descr);
15541 		if (!fp) {
15542 			err_cnt++;
15543 			continue;
15544 		}
15545 		pr_cont("jited:%u ", fp->jited);
15546 
15547 		run_cnt++;
15548 		if (fp->jited)
15549 			jit_cnt++;
15550 
15551 		if (test->flags & FLAG_NEED_STATE)
15552 			data = &state;
15553 		ret = __run_one(fp, data, MAX_TESTRUNS, &duration);
15554 		if (test->flags & FLAG_RESULT_IN_STATE)
15555 			ret = state;
15556 		if (ret == test->result) {
15557 			pr_cont("%lld PASS", duration);
15558 			pass_cnt++;
15559 		} else {
15560 			pr_cont("ret %d != %d FAIL", ret, test->result);
15561 			err_cnt++;
15562 		}
15563 	}
15564 
15565 	pr_info("%s: Summary: %d PASSED, %d FAILED, [%d/%d JIT'ed]\n",
15566 		__func__, pass_cnt, err_cnt, jit_cnt, run_cnt);
15567 
15568 	return err_cnt ? -EINVAL : 0;
15569 }
15570 
15571 static char test_suite[32];
15572 module_param_string(test_suite, test_suite, sizeof(test_suite), 0);
15573 
find_test_index(const char * test_name)15574 static __init int find_test_index(const char *test_name)
15575 {
15576 	int i;
15577 
15578 	if (!strcmp(test_suite, "test_bpf")) {
15579 		for (i = 0; i < ARRAY_SIZE(tests); i++) {
15580 			if (!strcmp(tests[i].descr, test_name))
15581 				return i;
15582 		}
15583 	}
15584 
15585 	if (!strcmp(test_suite, "test_tail_calls")) {
15586 		for (i = 0; i < ARRAY_SIZE(tail_call_tests); i++) {
15587 			if (!strcmp(tail_call_tests[i].descr, test_name))
15588 				return i;
15589 		}
15590 	}
15591 
15592 	if (!strcmp(test_suite, "test_skb_segment")) {
15593 		for (i = 0; i < ARRAY_SIZE(skb_segment_tests); i++) {
15594 			if (!strcmp(skb_segment_tests[i].descr, test_name))
15595 				return i;
15596 		}
15597 	}
15598 
15599 	return -1;
15600 }
15601 
prepare_test_range(void)15602 static __init int prepare_test_range(void)
15603 {
15604 	int valid_range;
15605 
15606 	if (!strcmp(test_suite, "test_bpf"))
15607 		valid_range = ARRAY_SIZE(tests);
15608 	else if (!strcmp(test_suite, "test_tail_calls"))
15609 		valid_range = ARRAY_SIZE(tail_call_tests);
15610 	else if (!strcmp(test_suite, "test_skb_segment"))
15611 		valid_range = ARRAY_SIZE(skb_segment_tests);
15612 	else
15613 		return 0;
15614 
15615 	if (test_id >= 0) {
15616 		/*
15617 		 * if a test_id was specified, use test_range to
15618 		 * cover only that test.
15619 		 */
15620 		if (test_id >= valid_range) {
15621 			pr_err("test_bpf: invalid test_id specified for '%s' suite.\n",
15622 			       test_suite);
15623 			return -EINVAL;
15624 		}
15625 
15626 		test_range[0] = test_id;
15627 		test_range[1] = test_id;
15628 	} else if (*test_name) {
15629 		/*
15630 		 * if a test_name was specified, find it and setup
15631 		 * test_range to cover only that test.
15632 		 */
15633 		int idx = find_test_index(test_name);
15634 
15635 		if (idx < 0) {
15636 			pr_err("test_bpf: no test named '%s' found for '%s' suite.\n",
15637 			       test_name, test_suite);
15638 			return -EINVAL;
15639 		}
15640 		test_range[0] = idx;
15641 		test_range[1] = idx;
15642 	} else if (test_range[0] != 0 || test_range[1] != INT_MAX) {
15643 		/*
15644 		 * check that the supplied test_range is valid.
15645 		 */
15646 		if (test_range[0] < 0 || test_range[1] >= valid_range) {
15647 			pr_err("test_bpf: test_range is out of bound for '%s' suite.\n",
15648 			       test_suite);
15649 			return -EINVAL;
15650 		}
15651 
15652 		if (test_range[1] < test_range[0]) {
15653 			pr_err("test_bpf: test_range is ending before it starts.\n");
15654 			return -EINVAL;
15655 		}
15656 	}
15657 
15658 	return 0;
15659 }
15660 
test_bpf_init(void)15661 static int __init test_bpf_init(void)
15662 {
15663 	struct bpf_array *progs = NULL;
15664 	int ret;
15665 
15666 	if (strlen(test_suite) &&
15667 	    strcmp(test_suite, "test_bpf") &&
15668 	    strcmp(test_suite, "test_tail_calls") &&
15669 	    strcmp(test_suite, "test_skb_segment")) {
15670 		pr_err("test_bpf: invalid test_suite '%s' specified.\n", test_suite);
15671 		return -EINVAL;
15672 	}
15673 
15674 	/*
15675 	 * if test_suite is not specified, but test_id, test_name or test_range
15676 	 * is specified, set 'test_bpf' as the default test suite.
15677 	 */
15678 	if (!strlen(test_suite) &&
15679 	    (test_id != -1 || strlen(test_name) ||
15680 	    (test_range[0] != 0 || test_range[1] != INT_MAX))) {
15681 		pr_info("test_bpf: set 'test_bpf' as the default test_suite.\n");
15682 		strscpy(test_suite, "test_bpf", sizeof(test_suite));
15683 	}
15684 
15685 	ret = prepare_test_range();
15686 	if (ret < 0)
15687 		return ret;
15688 
15689 	if (!strlen(test_suite) || !strcmp(test_suite, "test_bpf")) {
15690 		ret = test_bpf();
15691 		if (ret)
15692 			return ret;
15693 	}
15694 
15695 	if (!strlen(test_suite) || !strcmp(test_suite, "test_tail_calls")) {
15696 		ret = prepare_tail_call_tests(&progs);
15697 		if (ret)
15698 			return ret;
15699 		ret = test_tail_calls(progs);
15700 		destroy_tail_call_tests(progs);
15701 		if (ret)
15702 			return ret;
15703 	}
15704 
15705 	if (!strlen(test_suite) || !strcmp(test_suite, "test_skb_segment"))
15706 		return test_skb_segment();
15707 
15708 	return 0;
15709 }
15710 
test_bpf_exit(void)15711 static void __exit test_bpf_exit(void)
15712 {
15713 }
15714 
15715 module_init(test_bpf_init);
15716 module_exit(test_bpf_exit);
15717 
15718 MODULE_DESCRIPTION("Testsuite for BPF interpreter and BPF JIT compiler");
15719 MODULE_LICENSE("GPL");
15720