2019-01-25 15:24:43 -08:00
{
" pointer/scalar confusion in state equality check (way 1) " ,
. insns = {
BPF_ST_MEM ( BPF_DW , BPF_REG_10 , - 8 , 0 ) ,
BPF_MOV64_REG ( BPF_REG_2 , BPF_REG_10 ) ,
BPF_ALU64_IMM ( BPF_ADD , BPF_REG_2 , - 8 ) ,
BPF_LD_MAP_FD ( BPF_REG_1 , 0 ) ,
BPF_RAW_INSN ( BPF_JMP | BPF_CALL , 0 , 0 , 0 , BPF_FUNC_map_lookup_elem ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_0 , 0 , 2 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_0 , BPF_REG_0 , 0 ) ,
BPF_JMP_A ( 1 ) ,
BPF_MOV64_REG ( BPF_REG_0 , BPF_REG_10 ) ,
BPF_JMP_A ( 0 ) ,
BPF_EXIT_INSN ( ) ,
} ,
. fixup_map_hash_8b = { 3 } ,
. result = ACCEPT ,
. retval = POINTER_VALUE ,
. result_unpriv = REJECT ,
. errstr_unpriv = " R0 leaks addr as return value "
} ,
{
" pointer/scalar confusion in state equality check (way 2) " ,
. insns = {
BPF_ST_MEM ( BPF_DW , BPF_REG_10 , - 8 , 0 ) ,
BPF_MOV64_REG ( BPF_REG_2 , BPF_REG_10 ) ,
BPF_ALU64_IMM ( BPF_ADD , BPF_REG_2 , - 8 ) ,
BPF_LD_MAP_FD ( BPF_REG_1 , 0 ) ,
BPF_RAW_INSN ( BPF_JMP | BPF_CALL , 0 , 0 , 0 , BPF_FUNC_map_lookup_elem ) ,
BPF_JMP_IMM ( BPF_JNE , BPF_REG_0 , 0 , 2 ) ,
BPF_MOV64_REG ( BPF_REG_0 , BPF_REG_10 ) ,
BPF_JMP_A ( 1 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_0 , BPF_REG_0 , 0 ) ,
BPF_EXIT_INSN ( ) ,
} ,
. fixup_map_hash_8b = { 3 } ,
. result = ACCEPT ,
. retval = POINTER_VALUE ,
. result_unpriv = REJECT ,
. errstr_unpriv = " R0 leaks addr as return value "
} ,
{
" liveness pruning and write screening " ,
. insns = {
/* Get an unknown value */
BPF_LDX_MEM ( BPF_W , BPF_REG_2 , BPF_REG_1 , 0 ) ,
/* branch conditions teach us nothing about R2 */
BPF_JMP_IMM ( BPF_JGE , BPF_REG_2 , 0 , 1 ) ,
BPF_MOV64_IMM ( BPF_REG_0 , 0 ) ,
BPF_JMP_IMM ( BPF_JGE , BPF_REG_2 , 0 , 1 ) ,
BPF_MOV64_IMM ( BPF_REG_0 , 0 ) ,
BPF_EXIT_INSN ( ) ,
} ,
. errstr = " R0 !read_ok " ,
. result = REJECT ,
. prog_type = BPF_PROG_TYPE_LWT_IN ,
} ,
{
" varlen_map_value_access pruning " ,
. insns = {
BPF_ST_MEM ( BPF_DW , BPF_REG_10 , - 8 , 0 ) ,
BPF_MOV64_REG ( BPF_REG_2 , BPF_REG_10 ) ,
BPF_ALU64_IMM ( BPF_ADD , BPF_REG_2 , - 8 ) ,
BPF_LD_MAP_FD ( BPF_REG_1 , 0 ) ,
BPF_RAW_INSN ( BPF_JMP | BPF_CALL , 0 , 0 , 0 , BPF_FUNC_map_lookup_elem ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_0 , 0 , 8 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_1 , BPF_REG_0 , 0 ) ,
BPF_MOV32_IMM ( BPF_REG_2 , MAX_ENTRIES ) ,
BPF_JMP_REG ( BPF_JSGT , BPF_REG_2 , BPF_REG_1 , 1 ) ,
BPF_MOV32_IMM ( BPF_REG_1 , 0 ) ,
BPF_ALU32_IMM ( BPF_LSH , BPF_REG_1 , 2 ) ,
BPF_ALU64_REG ( BPF_ADD , BPF_REG_0 , BPF_REG_1 ) ,
BPF_JMP_IMM ( BPF_JA , 0 , 0 , 0 ) ,
BPF_ST_MEM ( BPF_DW , BPF_REG_0 , 0 , offsetof ( struct test_val , foo ) ) ,
BPF_EXIT_INSN ( ) ,
} ,
. fixup_map_hash_48b = { 3 } ,
. errstr_unpriv = " R0 leaks addr " ,
. errstr = " R0 unbounded memory access " ,
. result_unpriv = REJECT ,
. result = REJECT ,
. flags = F_NEEDS_EFFICIENT_UNALIGNED_ACCESS ,
} ,
2019-01-25 15:24:44 -08:00
{
" search pruning: all branches should be verified (nop operation) " ,
. insns = {
BPF_MOV64_REG ( BPF_REG_2 , BPF_REG_10 ) ,
BPF_ALU64_IMM ( BPF_ADD , BPF_REG_2 , - 8 ) ,
BPF_ST_MEM ( BPF_DW , BPF_REG_2 , 0 , 0 ) ,
BPF_LD_MAP_FD ( BPF_REG_1 , 0 ) ,
BPF_EMIT_CALL ( BPF_FUNC_map_lookup_elem ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_0 , 0 , 11 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_3 , BPF_REG_0 , 0 ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_3 , 0xbeef , 2 ) ,
BPF_MOV64_IMM ( BPF_REG_4 , 0 ) ,
BPF_JMP_A ( 1 ) ,
BPF_MOV64_IMM ( BPF_REG_4 , 1 ) ,
BPF_STX_MEM ( BPF_DW , BPF_REG_10 , BPF_REG_4 , - 16 ) ,
BPF_EMIT_CALL ( BPF_FUNC_ktime_get_ns ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_5 , BPF_REG_10 , - 16 ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_5 , 0 , 2 ) ,
BPF_MOV64_IMM ( BPF_REG_6 , 0 ) ,
BPF_ST_MEM ( BPF_DW , BPF_REG_6 , 0 , 0xdead ) ,
BPF_EXIT_INSN ( ) ,
} ,
. fixup_map_hash_8b = { 3 } ,
. errstr = " R6 invalid mem access 'inv' " ,
. result = REJECT ,
. prog_type = BPF_PROG_TYPE_TRACEPOINT ,
} ,
{
" search pruning: all branches should be verified (invalid stack access) " ,
. insns = {
BPF_MOV64_REG ( BPF_REG_2 , BPF_REG_10 ) ,
BPF_ALU64_IMM ( BPF_ADD , BPF_REG_2 , - 8 ) ,
BPF_ST_MEM ( BPF_DW , BPF_REG_2 , 0 , 0 ) ,
BPF_LD_MAP_FD ( BPF_REG_1 , 0 ) ,
BPF_EMIT_CALL ( BPF_FUNC_map_lookup_elem ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_0 , 0 , 8 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_3 , BPF_REG_0 , 0 ) ,
BPF_MOV64_IMM ( BPF_REG_4 , 0 ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_3 , 0xbeef , 2 ) ,
BPF_STX_MEM ( BPF_DW , BPF_REG_10 , BPF_REG_4 , - 16 ) ,
BPF_JMP_A ( 1 ) ,
BPF_STX_MEM ( BPF_DW , BPF_REG_10 , BPF_REG_4 , - 24 ) ,
BPF_EMIT_CALL ( BPF_FUNC_ktime_get_ns ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_5 , BPF_REG_10 , - 16 ) ,
BPF_EXIT_INSN ( ) ,
} ,
. fixup_map_hash_8b = { 3 } ,
. errstr = " invalid read from stack off -16+0 size 8 " ,
. result = REJECT ,
. prog_type = BPF_PROG_TYPE_TRACEPOINT ,
} ,
{
" allocated_stack " ,
. insns = {
BPF_ALU64_REG ( BPF_MOV , BPF_REG_6 , BPF_REG_1 ) ,
BPF_RAW_INSN ( BPF_JMP | BPF_CALL , 0 , 0 , 0 , BPF_FUNC_get_prandom_u32 ) ,
BPF_ALU64_REG ( BPF_MOV , BPF_REG_7 , BPF_REG_0 ) ,
BPF_JMP_IMM ( BPF_JEQ , BPF_REG_0 , 0 , 5 ) ,
BPF_MOV64_IMM ( BPF_REG_0 , 0 ) ,
BPF_STX_MEM ( BPF_DW , BPF_REG_10 , BPF_REG_6 , - 8 ) ,
BPF_LDX_MEM ( BPF_DW , BPF_REG_6 , BPF_REG_10 , - 8 ) ,
BPF_STX_MEM ( BPF_B , BPF_REG_10 , BPF_REG_7 , - 9 ) ,
BPF_LDX_MEM ( BPF_B , BPF_REG_7 , BPF_REG_10 , - 9 ) ,
BPF_JMP_IMM ( BPF_JNE , BPF_REG_0 , 0 , 0 ) ,
BPF_JMP_IMM ( BPF_JNE , BPF_REG_0 , 0 , 0 ) ,
BPF_JMP_IMM ( BPF_JNE , BPF_REG_0 , 0 , 0 ) ,
BPF_JMP_IMM ( BPF_JNE , BPF_REG_0 , 0 , 0 ) ,
BPF_EXIT_INSN ( ) ,
} ,
. result = ACCEPT ,
. result_unpriv = ACCEPT ,
. insn_processed = 15 ,
} ,