2019-11-14 19:02:54 +01:00
// SPDX-License-Identifier: GPL-2.0
2021-01-15 18:09:53 +01:00
/*
* KCSAN short boot - time selftests .
*
* Copyright ( C ) 2019 , Google LLC .
*/
2019-11-14 19:02:54 +01:00
2020-07-31 10:17:23 +02:00
# define pr_fmt(fmt) "kcsan: " fmt
2021-11-30 12:44:21 +01:00
# include <linux/atomic.h>
# include <linux/bitops.h>
2019-11-14 19:02:54 +01:00
# include <linux/init.h>
2021-11-30 12:44:21 +01:00
# include <linux/kcsan-checks.h>
2019-11-14 19:02:54 +01:00
# include <linux/kernel.h>
# include <linux/printk.h>
# include <linux/random.h>
2021-11-30 12:44:21 +01:00
# include <linux/sched.h>
# include <linux/spinlock.h>
2019-11-14 19:02:54 +01:00
# include <linux/types.h>
# include "encoding.h"
# define ITERS_PER_TEST 2000
/*
* Test watchpoint encode and decode : check that encoding some access ' s info ,
* and then subsequent decode preserves the access ' s info .
*/
2021-08-13 10:10:55 +02:00
static bool __init test_encode_decode ( void )
2019-11-14 19:02:54 +01:00
{
int i ;
for ( i = 0 ; i < ITERS_PER_TEST ; + + i ) {
2022-10-09 20:44:02 -06:00
size_t size = get_random_u32_inclusive ( 1 , MAX_ENCODABLE_SIZE ) ;
2022-10-09 20:44:02 -06:00
bool is_write = ! ! get_random_u32_below ( 2 ) ;
2021-08-13 10:10:55 +02:00
unsigned long verif_masked_addr ;
long encoded_watchpoint ;
bool verif_is_write ;
2019-11-14 19:02:54 +01:00
unsigned long addr ;
2021-08-13 10:10:55 +02:00
size_t verif_size ;
2019-11-14 19:02:54 +01:00
2022-10-05 17:49:46 +02:00
get_random_bytes ( & addr , sizeof ( addr ) ) ;
2020-10-22 13:45:52 +02:00
if ( addr < PAGE_SIZE )
addr = PAGE_SIZE ;
2019-11-14 19:02:54 +01:00
if ( WARN_ON ( ! check_encodable ( addr , size ) ) )
return false ;
2021-08-13 10:10:55 +02:00
encoded_watchpoint = encode_watchpoint ( addr , size , is_write ) ;
/* Check special watchpoints */
if ( WARN_ON ( decode_watchpoint ( INVALID_WATCHPOINT , & verif_masked_addr , & verif_size , & verif_is_write ) ) )
2019-11-14 19:02:54 +01:00
return false ;
2021-08-13 10:10:55 +02:00
if ( WARN_ON ( decode_watchpoint ( CONSUMED_WATCHPOINT , & verif_masked_addr , & verif_size , & verif_is_write ) ) )
return false ;
/* Check decoding watchpoint returns same data */
if ( WARN_ON ( ! decode_watchpoint ( encoded_watchpoint , & verif_masked_addr , & verif_size , & verif_is_write ) ) )
return false ;
if ( WARN_ON ( verif_masked_addr ! = ( addr & WATCHPOINT_ADDR_MASK ) ) )
goto fail ;
if ( WARN_ON ( verif_size ! = size ) )
goto fail ;
if ( WARN_ON ( is_write ! = verif_is_write ) )
goto fail ;
continue ;
fail :
pr_err ( " %s fail: %s %zu bytes @ %lx -> encoded: %lx -> %s %zu bytes @ %lx \n " ,
__func__ , is_write ? " write " : " read " , size , addr , encoded_watchpoint ,
verif_is_write ? " write " : " read " , verif_size , verif_masked_addr ) ;
return false ;
2019-11-14 19:02:54 +01:00
}
return true ;
}
/* Test access matching function. */
2021-08-13 10:10:55 +02:00
static bool __init test_matching_access ( void )
2019-11-14 19:02:54 +01:00
{
if ( WARN_ON ( ! matching_access ( 10 , 1 , 10 , 1 ) ) )
return false ;
if ( WARN_ON ( ! matching_access ( 10 , 2 , 11 , 1 ) ) )
return false ;
if ( WARN_ON ( ! matching_access ( 10 , 1 , 9 , 2 ) ) )
return false ;
if ( WARN_ON ( matching_access ( 10 , 1 , 11 , 1 ) ) )
return false ;
if ( WARN_ON ( matching_access ( 9 , 1 , 10 , 1 ) ) )
return false ;
2020-02-05 11:14:19 +01:00
/*
* An access of size 0 could match another access , as demonstrated here .
* Rather than add more comparisons to ' matching_access ( ) ' , which would
* end up in the fast - path for * all * checks , check_access ( ) simply
* returns for all accesses of size 0.
*/
if ( WARN_ON ( ! matching_access ( 8 , 8 , 12 , 0 ) ) )
return false ;
2019-11-14 19:02:54 +01:00
return true ;
}
2021-11-30 12:44:21 +01:00
/*
* Correct memory barrier instrumentation is critical to avoiding false
* positives : simple test to check at boot certain barriers are always properly
* instrumented . See kcsan_test for a more complete test .
*/
2021-12-04 00:38:17 +01:00
static DEFINE_SPINLOCK ( test_spinlock ) ;
2021-11-30 12:44:21 +01:00
static bool __init test_barrier ( void )
{
# ifdef CONFIG_KCSAN_WEAK_MEMORY
struct kcsan_scoped_access * reorder_access = & current - > kcsan_ctx . reorder_access ;
# else
struct kcsan_scoped_access * reorder_access = NULL ;
# endif
bool ret = true ;
arch_spinlock_t arch_spinlock = __ARCH_SPIN_LOCK_UNLOCKED ;
atomic_t dummy ;
long test_var ;
if ( ! reorder_access | | ! IS_ENABLED ( CONFIG_SMP ) )
return true ;
# define __KCSAN_CHECK_BARRIER(access_type, barrier, name) \
do { \
reorder_access - > type = ( access_type ) | KCSAN_ACCESS_SCOPED ; \
reorder_access - > size = 1 ; \
barrier ; \
if ( reorder_access - > size ! = 0 ) { \
pr_err ( " improperly instrumented type=( " # access_type " ): " name " \n " ) ; \
ret = false ; \
} \
} while ( 0 )
# define KCSAN_CHECK_READ_BARRIER(b) __KCSAN_CHECK_BARRIER(0, b, #b)
# define KCSAN_CHECK_WRITE_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE, b, #b)
# define KCSAN_CHECK_RW_BARRIER(b) __KCSAN_CHECK_BARRIER(KCSAN_ACCESS_WRITE | KCSAN_ACCESS_COMPOUND, b, #b)
kcsan_nestable_atomic_begin ( ) ; /* No watchpoints in called functions. */
KCSAN_CHECK_READ_BARRIER ( mb ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( rmb ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_mb ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_rmb ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( dma_rmb ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_mb__before_atomic ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_mb__after_atomic ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_mb__after_spinlock ( ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_store_mb ( test_var , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( smp_store_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( xchg ( & test_var , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( xchg_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( cmpxchg ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( cmpxchg_release ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( atomic_set_release ( & dummy , 0 ) ) ;
KCSAN_CHECK_READ_BARRIER ( atomic_add_return ( 1 , & dummy ) ) ;
KCSAN_CHECK_READ_BARRIER ( atomic_add_return_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_READ_BARRIER ( atomic_fetch_add ( 1 , & dummy ) ) ;
KCSAN_CHECK_READ_BARRIER ( atomic_fetch_add_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_READ_BARRIER ( test_and_set_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_READ_BARRIER ( test_and_clear_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_READ_BARRIER ( test_and_change_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_READ_BARRIER ( clear_bit_unlock ( 0 , & test_var ) ) ;
KCSAN_CHECK_READ_BARRIER ( __clear_bit_unlock ( 0 , & test_var ) ) ;
arch_spin_lock ( & arch_spinlock ) ;
KCSAN_CHECK_READ_BARRIER ( arch_spin_unlock ( & arch_spinlock ) ) ;
2021-12-04 00:38:17 +01:00
spin_lock ( & test_spinlock ) ;
KCSAN_CHECK_READ_BARRIER ( spin_unlock ( & test_spinlock ) ) ;
2021-11-30 12:44:21 +01:00
KCSAN_CHECK_WRITE_BARRIER ( mb ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( wmb ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_mb ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_wmb ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( dma_wmb ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_mb__before_atomic ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_mb__after_atomic ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_mb__after_spinlock ( ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_store_mb ( test_var , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( smp_store_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( xchg ( & test_var , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( xchg_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( cmpxchg ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( cmpxchg_release ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( atomic_set_release ( & dummy , 0 ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( atomic_add_return ( 1 , & dummy ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( atomic_add_return_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( atomic_fetch_add ( 1 , & dummy ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( atomic_fetch_add_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( test_and_set_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( test_and_clear_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( test_and_change_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( clear_bit_unlock ( 0 , & test_var ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( __clear_bit_unlock ( 0 , & test_var ) ) ;
arch_spin_lock ( & arch_spinlock ) ;
KCSAN_CHECK_WRITE_BARRIER ( arch_spin_unlock ( & arch_spinlock ) ) ;
2021-12-04 00:38:17 +01:00
spin_lock ( & test_spinlock ) ;
KCSAN_CHECK_WRITE_BARRIER ( spin_unlock ( & test_spinlock ) ) ;
2021-11-30 12:44:21 +01:00
KCSAN_CHECK_RW_BARRIER ( mb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( wmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( rmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_mb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_wmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_rmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( dma_wmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( dma_rmb ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_mb__before_atomic ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_mb__after_atomic ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_mb__after_spinlock ( ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_store_mb ( test_var , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( smp_store_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( xchg ( & test_var , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( xchg_release ( & test_var , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( cmpxchg ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( cmpxchg_release ( & test_var , 0 , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( atomic_set_release ( & dummy , 0 ) ) ;
KCSAN_CHECK_RW_BARRIER ( atomic_add_return ( 1 , & dummy ) ) ;
KCSAN_CHECK_RW_BARRIER ( atomic_add_return_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_RW_BARRIER ( atomic_fetch_add ( 1 , & dummy ) ) ;
KCSAN_CHECK_RW_BARRIER ( atomic_fetch_add_release ( 1 , & dummy ) ) ;
KCSAN_CHECK_RW_BARRIER ( test_and_set_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_RW_BARRIER ( test_and_clear_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_RW_BARRIER ( test_and_change_bit ( 0 , & test_var ) ) ;
KCSAN_CHECK_RW_BARRIER ( clear_bit_unlock ( 0 , & test_var ) ) ;
KCSAN_CHECK_RW_BARRIER ( __clear_bit_unlock ( 0 , & test_var ) ) ;
arch_spin_lock ( & arch_spinlock ) ;
KCSAN_CHECK_RW_BARRIER ( arch_spin_unlock ( & arch_spinlock ) ) ;
2021-12-04 00:38:17 +01:00
spin_lock ( & test_spinlock ) ;
KCSAN_CHECK_RW_BARRIER ( spin_unlock ( & test_spinlock ) ) ;
2023-10-04 17:53:07 +01:00
KCSAN_CHECK_RW_BARRIER ( xor_unlock_is_negative_byte ( 1 , & test_var ) ) ;
KCSAN_CHECK_READ_BARRIER ( xor_unlock_is_negative_byte ( 1 , & test_var ) ) ;
KCSAN_CHECK_WRITE_BARRIER ( xor_unlock_is_negative_byte ( 1 , & test_var ) ) ;
2021-11-30 12:44:21 +01:00
kcsan_nestable_atomic_end ( ) ;
return ret ;
}
2019-11-14 19:02:54 +01:00
static int __init kcsan_selftest ( void )
{
int passed = 0 ;
int total = 0 ;
# define RUN_TEST(do_test) \
do { \
+ + total ; \
if ( do_test ( ) ) \
+ + passed ; \
else \
2020-07-31 10:17:23 +02:00
pr_err ( " selftest: " # do_test " failed " ) ; \
2019-11-14 19:02:54 +01:00
} while ( 0 )
RUN_TEST ( test_encode_decode ) ;
RUN_TEST ( test_matching_access ) ;
2021-11-30 12:44:21 +01:00
RUN_TEST ( test_barrier ) ;
2019-11-14 19:02:54 +01:00
2020-07-31 10:17:23 +02:00
pr_info ( " selftest: %d/%d tests passed \n " , passed , total ) ;
2019-11-14 19:02:54 +01:00
if ( passed ! = total )
2020-07-31 10:17:23 +02:00
panic ( " selftests failed " ) ;
2019-11-14 19:02:54 +01:00
return 0 ;
}
postcore_initcall ( kcsan_selftest ) ;