diff --git a/tools/testing/selftests/bpf/test_verifier.c b/tools/testing/selftests/bpf/test_verifier.c
index 33f7d38849b8279355bbcfc86c8af8bd23456db3..10d44446e8013a19d8c01e5e15a94a919bae94cb 100644
--- a/tools/testing/selftests/bpf/test_verifier.c
+++ b/tools/testing/selftests/bpf/test_verifier.c
@@ -23,6 +23,7 @@
 #include <stdbool.h>
 #include <sched.h>
 #include <limits.h>
+#include <assert.h>
 
 #include <sys/capability.h>
 
@@ -2577,6 +2578,7 @@ static struct bpf_test tests[] = {
 		},
 		.result = REJECT,
 		.errstr = "invalid stack off=-79992 size=8",
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
 	},
 	{
 		"PTR_TO_STACK store/load - out of bounds high",
@@ -3104,6 +3106,8 @@ static struct bpf_test tests[] = {
 			BPF_STX_MEM(BPF_DW, BPF_REG_1, BPF_REG_0, -8),
 			BPF_EXIT_INSN(),
 		},
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.result_unpriv = REJECT,
 		.result = ACCEPT,
 	},
 	{
@@ -3205,6 +3209,243 @@ static struct bpf_test tests[] = {
 		/* Verifier rewrite for unpriv skips tail call here. */
 		.retval_unpriv = 2,
 	},
+	{
+		"PTR_TO_STACK check high 1",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -1),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK check high 2",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, -1, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, -1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK check high 3",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, -1, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, -1),
+			BPF_EXIT_INSN(),
+		},
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.result_unpriv = REJECT,
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK check high 4",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, 0),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.errstr = "invalid stack off=0 size=1",
+		.result = REJECT,
+	},
+	{
+		"PTR_TO_STACK check high 5",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, (1 << 29) - 1),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr = "invalid stack off",
+	},
+	{
+		"PTR_TO_STACK check high 6",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, (1 << 29) - 1),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, SHRT_MAX, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, SHRT_MAX),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr = "invalid stack off",
+	},
+	{
+		"PTR_TO_STACK check high 7",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, (1 << 29) - 1),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, (1 << 29) - 1),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, SHRT_MAX, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, SHRT_MAX),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.errstr = "fp pointer offset",
+	},
+	{
+		"PTR_TO_STACK check low 1",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -512),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK check low 2",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -513),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 1, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 1),
+			BPF_EXIT_INSN(),
+		},
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK check low 3",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -513),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.errstr = "invalid stack off=-513 size=1",
+		.result = REJECT,
+	},
+	{
+		"PTR_TO_STACK check low 4",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, INT_MIN),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr = "math between fp pointer",
+	},
+	{
+		"PTR_TO_STACK check low 5",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -((1 << 29) - 1)),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr = "invalid stack off",
+	},
+	{
+		"PTR_TO_STACK check low 6",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -((1 << 29) - 1)),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, SHRT_MIN, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, SHRT_MIN),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr = "invalid stack off",
+	},
+	{
+		"PTR_TO_STACK check low 7",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -((1 << 29) - 1)),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -((1 << 29) - 1)),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, SHRT_MIN, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, SHRT_MIN),
+			BPF_EXIT_INSN(),
+		},
+		.result = REJECT,
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
+		.errstr = "fp pointer offset",
+	},
+	{
+		"PTR_TO_STACK mixed reg/k, 1",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -3),
+			BPF_MOV64_IMM(BPF_REG_2, -3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK mixed reg/k, 2",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -16, 0),
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -3),
+			BPF_MOV64_IMM(BPF_REG_2, -3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_MOV64_REG(BPF_REG_5, BPF_REG_10),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_5, -6),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 42,
+	},
+	{
+		"PTR_TO_STACK mixed reg/k, 3",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_1, -3),
+			BPF_MOV64_IMM(BPF_REG_2, -3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_2),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = -3,
+	},
+	{
+		"PTR_TO_STACK reg",
+		.insns = {
+			BPF_MOV64_REG(BPF_REG_1, BPF_REG_10),
+			BPF_MOV64_IMM(BPF_REG_2, -3),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_2),
+			BPF_ST_MEM(BPF_B, BPF_REG_1, 0, 42),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "invalid stack off=0 size=1",
+		.result = ACCEPT,
+		.retval = 42,
+	},
 	{
 		"stack pointer arithmetic",
 		.insns = {
@@ -6610,7 +6851,446 @@ static struct bpf_test tests[] = {
 		.prog_type = BPF_PROG_TYPE_TRACEPOINT,
 	},
 	{
-		"map access: known scalar += value_ptr",
+		"map access: known scalar += value_ptr from different maps",
+		.insns = {
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1,
+				    offsetof(struct __sk_buff, len)),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 1, 3),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 1, 2),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_hash_16b = { 5 },
+		.fixup_map_array_48b = { 8 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R1 tried to add from different maps",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr -= known scalar from different maps",
+		.insns = {
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1,
+				    offsetof(struct __sk_buff, len)),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 1, 3),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 1, 2),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_hash_16b = { 5 },
+		.fixup_map_array_48b = { 8 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 min value is outside of the array range",
+		.retval = 1,
+	},
+	{
+		"map access: known scalar += value_ptr from different maps, but same value properties",
+		.insns = {
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1,
+				    offsetof(struct __sk_buff, len)),
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 1, 3),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 1, 2),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_hash_48b = { 5 },
+		.fixup_map_array_48b = { 8 },
+		.result = ACCEPT,
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, upper oob arith, test 1",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_MOV64_IMM(BPF_REG_1, 48),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, upper oob arith, test 2",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_MOV64_IMM(BPF_REG_1, 49),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, upper oob arith, test 3",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_MOV64_IMM(BPF_REG_1, 47),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr -= known scalar, lower oob arith, test 1",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 5),
+			BPF_MOV64_IMM(BPF_REG_1, 47),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 48),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = REJECT,
+		.errstr = "R0 min value is outside of the array range",
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+	},
+	{
+		"map access: value_ptr -= known scalar, lower oob arith, test 2",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
+			BPF_MOV64_IMM(BPF_REG_1, 47),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 48),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr -= known scalar, lower oob arith, test 3",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 5),
+			BPF_MOV64_IMM(BPF_REG_1, 47),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 47),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: known scalar += value_ptr",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, 1",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, 2",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, 49),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = REJECT,
+		.errstr = "invalid access to map value",
+	},
+	{
+		"map access: value_ptr += known scalar, 3",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, -1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = REJECT,
+		.errstr = "invalid access to map value",
+	},
+	{
+		"map access: value_ptr += known scalar, 4",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 7),
+			BPF_MOV64_IMM(BPF_REG_1, 5),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, -2),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, -1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 1,
+	},
+	{
+		"map access: value_ptr += known scalar, 5",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
+			BPF_MOV64_IMM(BPF_REG_1, (6 + 1) * sizeof(int)),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 0xabcdef12,
+	},
+	{
+		"map access: value_ptr += known scalar, 6",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 5),
+			BPF_MOV64_IMM(BPF_REG_1, (3 + 1) * sizeof(int)),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 3 * sizeof(int)),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_0, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 0xabcdef12,
+	},
+	{
+		"map access: unknown scalar += value_ptr, 1",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 0xf),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 1,
+	},
+	{
+		"map access: unknown scalar += value_ptr, 2",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_LDX_MEM(BPF_W, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 31),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.retval = 0xabcdef12,
+	},
+	{
+		"map access: unknown scalar += value_ptr, 3",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 8),
+			BPF_MOV64_IMM(BPF_REG_1, -1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_MOV64_IMM(BPF_REG_1, 1),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 31),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_array_48b = { 3 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
+		.retval = 0xabcdef12,
+	},
+	{
+		"map access: unknown scalar += value_ptr, 4",
 		.insns = {
 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
 			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
@@ -6618,19 +7298,22 @@ static struct bpf_test tests[] = {
 			BPF_LD_MAP_FD(BPF_REG_1, 0),
 			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
 				     BPF_FUNC_map_lookup_elem),
-			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
-			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
+			BPF_MOV64_IMM(BPF_REG_1, 19),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 31),
 			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
-			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
-			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_1, 0),
 			BPF_EXIT_INSN(),
 		},
 		.fixup_map_array_48b = { 3 },
-		.result = ACCEPT,
-		.retval = 1,
+		.result = REJECT,
+		.errstr = "R1 max value is outside of the array range",
+		.errstr_unpriv = "R1 pointer arithmetic of map value goes out of range",
 	},
 	{
-		"map access: value_ptr += known scalar",
+		"map access: value_ptr += unknown scalar, 1",
 		.insns = {
 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
 			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
@@ -6638,8 +7321,9 @@ static struct bpf_test tests[] = {
 			BPF_LD_MAP_FD(BPF_REG_1, 0),
 			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
 				     BPF_FUNC_map_lookup_elem),
-			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 3),
-			BPF_MOV64_IMM(BPF_REG_1, 4),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
+			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 0xf),
 			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
 			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
 			BPF_MOV64_IMM(BPF_REG_0, 1),
@@ -6650,7 +7334,7 @@ static struct bpf_test tests[] = {
 		.retval = 1,
 	},
 	{
-		"map access: unknown scalar += value_ptr",
+		"map access: value_ptr += unknown scalar, 2",
 		.insns = {
 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
 			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
@@ -6659,19 +7343,18 @@ static struct bpf_test tests[] = {
 			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
 				     BPF_FUNC_map_lookup_elem),
 			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
-			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
-			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 0xf),
-			BPF_ALU64_REG(BPF_ADD, BPF_REG_1, BPF_REG_0),
-			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_1, 0),
-			BPF_MOV64_IMM(BPF_REG_0, 1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 31),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			BPF_LDX_MEM(BPF_W, BPF_REG_0, BPF_REG_0, 0),
 			BPF_EXIT_INSN(),
 		},
 		.fixup_map_array_48b = { 3 },
 		.result = ACCEPT,
-		.retval = 1,
+		.retval = 0xabcdef12,
 	},
 	{
-		"map access: value_ptr += unknown scalar",
+		"map access: value_ptr += unknown scalar, 3",
 		.insns = {
 			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
 			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
@@ -6679,13 +7362,20 @@ static struct bpf_test tests[] = {
 			BPF_LD_MAP_FD(BPF_REG_1, 0),
 			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
 				     BPF_FUNC_map_lookup_elem),
-			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 4),
-			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 11),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_1, BPF_REG_0, 0),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_2, BPF_REG_0, 8),
+			BPF_LDX_MEM(BPF_DW, BPF_REG_3, BPF_REG_0, 16),
 			BPF_ALU64_IMM(BPF_AND, BPF_REG_1, 0xf),
-			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
-			BPF_LDX_MEM(BPF_B, BPF_REG_1, BPF_REG_0, 0),
+			BPF_ALU64_IMM(BPF_AND, BPF_REG_3, 1),
+			BPF_ALU64_IMM(BPF_OR, BPF_REG_3, 1),
+			BPF_JMP_REG(BPF_JGT, BPF_REG_2, BPF_REG_3, 4),
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_3),
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
 			BPF_MOV64_IMM(BPF_REG_0, 1),
 			BPF_EXIT_INSN(),
+			BPF_MOV64_IMM(BPF_REG_0, 2),
+			BPF_JMP_IMM(BPF_JA, 0, 0, -3),
 		},
 		.fixup_map_array_48b = { 3 },
 		.result = ACCEPT,
@@ -6770,6 +7460,8 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_array_48b = { 3 },
 		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
 		.retval = 1,
 	},
 	{
@@ -6837,6 +7529,8 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_array_48b = { 3 },
 		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
 		.retval = 1,
 	},
 	{
@@ -8376,6 +9070,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8400,6 +9095,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8426,6 +9122,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R8 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8451,6 +9148,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R8 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8499,6 +9197,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8570,6 +9269,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8621,6 +9321,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8648,6 +9349,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8674,6 +9376,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8703,6 +9406,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R7 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8733,6 +9437,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 4 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
 	{
@@ -8761,6 +9466,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "unbounded min value",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 		.result_unpriv = REJECT,
 	},
@@ -8813,8 +9519,38 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "R0 min value is negative, either use unsigned index or do a if (index >=0) check.",
+		.errstr_unpriv = "R1 has unknown scalar with mixed signed bounds",
 		.result = REJECT,
 	},
+	{
+		"check subtraction on pointers for unpriv",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_0, 0),
+			BPF_LD_MAP_FD(BPF_REG_ARG1, 0),
+			BPF_MOV64_REG(BPF_REG_ARG2, BPF_REG_FP),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_ARG2, -8),
+			BPF_ST_MEM(BPF_DW, BPF_REG_ARG2, 0, 9),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_MOV64_REG(BPF_REG_9, BPF_REG_FP),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_9, BPF_REG_0),
+			BPF_LD_MAP_FD(BPF_REG_ARG1, 0),
+			BPF_MOV64_REG(BPF_REG_ARG2, BPF_REG_FP),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_ARG2, -8),
+			BPF_ST_MEM(BPF_DW, BPF_REG_ARG2, 0, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JNE, BPF_REG_0, 0, 1),
+			BPF_EXIT_INSN(),
+			BPF_STX_MEM(BPF_DW, BPF_REG_0, BPF_REG_9, 0),
+			BPF_MOV64_IMM(BPF_REG_0, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_hash_8b = { 1, 9 },
+		.result = ACCEPT,
+		.result_unpriv = REJECT,
+		.errstr_unpriv = "R9 pointer -= pointer prohibited",
+	},
 	{
 		"bounds check based on zero-extended MOV",
 		.insns = {
@@ -9145,6 +9881,36 @@ static struct bpf_test tests[] = {
 		.errstr = "R0 unbounded memory access",
 		.result = REJECT
 	},
+	{
+		"bounds check after 32-bit right shift with 64-bit input",
+		.insns = {
+			BPF_ST_MEM(BPF_DW, BPF_REG_10, -8, 0),
+			BPF_MOV64_REG(BPF_REG_2, BPF_REG_10),
+			BPF_ALU64_IMM(BPF_ADD, BPF_REG_2, -8),
+			BPF_LD_MAP_FD(BPF_REG_1, 0),
+			BPF_RAW_INSN(BPF_JMP | BPF_CALL, 0, 0, 0,
+				     BPF_FUNC_map_lookup_elem),
+			BPF_JMP_IMM(BPF_JEQ, BPF_REG_0, 0, 6),
+			/* r1 = 2 */
+			BPF_MOV64_IMM(BPF_REG_1, 2),
+			/* r1 = 1<<32 */
+			BPF_ALU64_IMM(BPF_LSH, BPF_REG_1, 31),
+			/* r1 = 0 (NOT 2!) */
+			BPF_ALU32_IMM(BPF_RSH, BPF_REG_1, 31),
+			/* r1 = 0xffff'fffe (NOT 0!) */
+			BPF_ALU32_IMM(BPF_SUB, BPF_REG_1, 2),
+			/* computes OOB pointer */
+			BPF_ALU64_REG(BPF_ADD, BPF_REG_0, BPF_REG_1),
+			/* OOB access */
+			BPF_LDX_MEM(BPF_B, BPF_REG_0, BPF_REG_0, 0),
+			/* exit */
+			BPF_MOV64_IMM(BPF_REG_0, 0),
+			BPF_EXIT_INSN(),
+		},
+		.fixup_map_hash_8b = { 3 },
+		.errstr = "R0 invalid mem access",
+		.result = REJECT,
+	},
 	{
 		"bounds check map access with off+size signed 32bit overflow. test1",
 		.insns = {
@@ -9185,6 +9951,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "pointer offset 1073741822",
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
 		.result = REJECT
 	},
 	{
@@ -9206,6 +9973,7 @@ static struct bpf_test tests[] = {
 		},
 		.fixup_map_hash_8b = { 3 },
 		.errstr = "pointer offset -1073741822",
+		.errstr_unpriv = "R0 pointer arithmetic of map value goes out of range",
 		.result = REJECT
 	},
 	{
@@ -9377,6 +10145,7 @@ static struct bpf_test tests[] = {
 			BPF_EXIT_INSN()
 		},
 		.errstr = "fp pointer offset 1073741822",
+		.errstr_unpriv = "R1 stack pointer arithmetic goes out of range",
 		.result = REJECT
 	},
 	{
@@ -13718,6 +14487,328 @@ static struct bpf_test tests[] = {
 		.result_unpriv = ACCEPT,
 		.insn_processed = 15,
 	},
+	{
+		"masking, test out of bounds 1",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 5),
+			BPF_MOV32_IMM(BPF_REG_2, 5 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 2",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 1),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 3",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0xffffffff),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 4",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0xffffffff),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 5",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 6",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 7",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, 5),
+			BPF_MOV32_IMM(BPF_REG_2, 5 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 8",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, 1),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 9",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, 0xffffffff),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 10",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, 0xffffffff),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 11",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test out of bounds 12",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_1, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test in bounds 1",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 4),
+			BPF_MOV32_IMM(BPF_REG_2, 5 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 4,
+	},
+	{
+		"masking, test in bounds 2",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test in bounds 3",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0xfffffffe),
+			BPF_MOV32_IMM(BPF_REG_2, 0xffffffff - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0xfffffffe,
+	},
+	{
+		"masking, test in bounds 4",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0xabcde),
+			BPF_MOV32_IMM(BPF_REG_2, 0xabcdef - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0xabcde,
+	},
+	{
+		"masking, test in bounds 5",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 0),
+			BPF_MOV32_IMM(BPF_REG_2, 1 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
+	{
+		"masking, test in bounds 6",
+		.insns = {
+			BPF_MOV32_IMM(BPF_REG_1, 46),
+			BPF_MOV32_IMM(BPF_REG_2, 47 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_1),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_1, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_1),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 46,
+	},
+	{
+		"masking, test in bounds 7",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_3, -46),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_3, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 47 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_3, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_3),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 46,
+	},
+	{
+		"masking, test in bounds 8",
+		.insns = {
+			BPF_MOV64_IMM(BPF_REG_3, -47),
+			BPF_ALU64_IMM(BPF_MUL, BPF_REG_3, -1),
+			BPF_MOV32_IMM(BPF_REG_2, 47 - 1),
+			BPF_ALU64_REG(BPF_SUB, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_REG(BPF_OR, BPF_REG_2, BPF_REG_3),
+			BPF_ALU64_IMM(BPF_NEG, BPF_REG_2, 0),
+			BPF_ALU64_IMM(BPF_ARSH, BPF_REG_2, 63),
+			BPF_ALU64_REG(BPF_AND, BPF_REG_3, BPF_REG_2),
+			BPF_MOV64_REG(BPF_REG_0, BPF_REG_3),
+			BPF_EXIT_INSN(),
+		},
+		.result = ACCEPT,
+		.retval = 0,
+	},
 	{
 		"reference tracking in call: free reference in subprog and outside",
 		.insns = {
@@ -14413,6 +15504,16 @@ static int create_map(uint32_t type, uint32_t size_key,
 	return fd;
 }
 
+static void update_map(int fd, int index)
+{
+	struct test_val value = {
+		.index = (6 + 1) * sizeof(int),
+		.foo[6] = 0xabcdef12,
+	};
+
+	assert(!bpf_map_update_elem(fd, &index, &value, 0));
+}
+
 static int create_prog_dummy1(enum bpf_prog_type prog_type)
 {
 	struct bpf_insn prog[] = {
@@ -14564,6 +15665,7 @@ static void do_test_fixup(struct bpf_test *test, enum bpf_prog_type prog_type,
 	if (*fixup_map_array_48b) {
 		map_fds[3] = create_map(BPF_MAP_TYPE_ARRAY, sizeof(int),
 					sizeof(struct test_val), 1);
+		update_map(map_fds[3], 0);
 		do {
 			prog[*fixup_map_array_48b].imm = map_fds[3];
 			fixup_map_array_48b++;