2019-02-01 16:14:14 -08:00
/* SPDX-License-Identifier: (LGPL-2.1 OR BSD-2-Clause) */
/* Copyright (c) 2019 Facebook */
# ifndef __LIBBPF_LIBBPF_UTIL_H
# define __LIBBPF_LIBBPF_UTIL_H
# include <stdbool.h>
# ifdef __cplusplus
extern " C " {
# endif
2019-04-16 14:58:11 +02:00
/* Use these barrier functions instead of smp_[rw]mb() when they are
* used in a libbpf header file . That way they can be built into the
* application that uses libbpf .
*/
# if defined(__i386__) || defined(__x86_64__)
# define libbpf_smp_rmb() asm volatile("" : : : "memory")
# define libbpf_smp_wmb() asm volatile("" : : : "memory")
# define libbpf_smp_mb() \
asm volatile ( " lock; addl $0,-4(%%rsp) " : : : " memory " , " cc " )
2019-04-16 14:58:12 +02:00
/* Hinders stores to be observed before older loads. */
# define libbpf_smp_rwmb() asm volatile("" : : : "memory")
2019-04-16 14:58:11 +02:00
# elif defined(__aarch64__)
# define libbpf_smp_rmb() asm volatile("dmb ishld" : : : "memory")
# define libbpf_smp_wmb() asm volatile("dmb ishst" : : : "memory")
# define libbpf_smp_mb() asm volatile("dmb ish" : : : "memory")
2019-04-16 14:58:12 +02:00
# define libbpf_smp_rwmb() libbpf_smp_mb()
2019-04-16 14:58:11 +02:00
# elif defined(__arm__)
/* These are only valid for armv7 and above */
# define libbpf_smp_rmb() asm volatile("dmb ish" : : : "memory")
# define libbpf_smp_wmb() asm volatile("dmb ishst" : : : "memory")
# define libbpf_smp_mb() asm volatile("dmb ish" : : : "memory")
2019-04-16 14:58:12 +02:00
# define libbpf_smp_rwmb() libbpf_smp_mb()
2019-04-16 14:58:11 +02:00
# else
2019-04-18 09:21:10 +02:00
/* Architecture missing native barrier functions. */
2019-04-16 14:58:11 +02:00
# define libbpf_smp_rmb() __sync_synchronize()
# define libbpf_smp_wmb() __sync_synchronize()
# define libbpf_smp_mb() __sync_synchronize()
2019-04-16 14:58:12 +02:00
# define libbpf_smp_rwmb() __sync_synchronize()
2019-04-16 14:58:11 +02:00
# endif
2019-02-01 16:14:14 -08:00
# ifdef __cplusplus
} /* extern "C" */
# endif
# endif