2012-03-05 11:49:32 +00:00
/*
* PMU support
*
* Copyright ( C ) 2012 ARM Limited
* Author : Will Deacon < will . deacon @ arm . com >
*
* This code is based heavily on the ARMv7 perf event code .
*
* This program is free software ; you can redistribute it and / or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < http : //www.gnu.org/licenses/>.
*/
# include <asm/irq_regs.h>
2016-03-24 16:01:16 +00:00
# include <asm/perf_event.h>
2016-04-21 05:58:43 -07:00
# include <asm/sysreg.h>
2016-01-25 17:31:13 +00:00
# include <asm/virt.h>
2012-03-05 11:49:32 +00:00
2016-09-14 17:32:29 -05:00
# include <linux/acpi.h>
2015-10-02 10:55:03 +01:00
# include <linux/of.h>
# include <linux/perf/arm_pmu.h>
# include <linux/platform_device.h>
2012-03-05 11:49:32 +00:00
/*
* ARMv8 PMUv3 Performance Events handling code .
* Common event types .
*/
2015-10-22 07:07:01 -07:00
/* Required events. */
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_SW_INCR 0x00
# define ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL 0x03
# define ARMV8_PMUV3_PERFCTR_L1D_CACHE 0x04
# define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED 0x10
# define ARMV8_PMUV3_PERFCTR_CPU_CYCLES 0x11
# define ARMV8_PMUV3_PERFCTR_BR_PRED 0x12
2012-03-05 11:49:32 +00:00
2015-10-22 07:07:01 -07:00
/* At least one of the following is required. */
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_INST_RETIRED 0x08
# define ARMV8_PMUV3_PERFCTR_INST_SPEC 0x1B
2012-03-05 11:49:32 +00:00
2015-10-22 07:07:01 -07:00
/* Common architectural events. */
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_LD_RETIRED 0x06
# define ARMV8_PMUV3_PERFCTR_ST_RETIRED 0x07
2015-10-22 07:07:01 -07:00
# define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_EXC_RETURN 0x0A
# define ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED 0x0B
# define ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED 0x0C
# define ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED 0x0D
# define ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED 0x0E
# define ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED 0x0F
# define ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED 0x1C
2015-10-22 07:07:32 -07:00
# define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E
# define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21
2015-10-22 07:07:01 -07:00
/* Common microarchitectural events. */
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL 0x01
# define ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL 0x02
# define ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL 0x05
2015-10-22 07:07:01 -07:00
# define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_L1I_CACHE 0x14
# define ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB 0x15
# define ARMV8_PMUV3_PERFCTR_L2D_CACHE 0x16
# define ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL 0x17
# define ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB 0x18
2015-10-22 07:07:01 -07:00
# define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_MEMORY_ERROR 0x1A
2015-10-22 07:07:01 -07:00
# define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D
2015-10-22 07:07:32 -07:00
# define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F
# define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20
# define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22
# define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23
# define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24
# define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25
# define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26
# define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27
# define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28
# define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29
# define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A
# define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B
# define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C
# define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL 0x2E
2015-10-22 07:07:32 -07:00
# define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F
2016-04-21 05:58:41 -07:00
# define ARMV8_PMUV3_PERFCTR_L2I_TLB 0x30
/* ARMv8 recommended implementation defined event types */
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD 0x40
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR 0x41
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD 0x42
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR 0x43
2016-04-21 05:58:42 -07:00
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_INNER 0x44
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_OUTER 0x45
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_VICTIM 0x46
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_CLEAN 0x47
# define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_INVAL 0x48
2016-04-21 05:58:41 -07:00
# define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD 0x4C
# define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR 0x4D
# define ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD 0x4E
# define ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR 0x4F
2016-04-21 05:58:42 -07:00
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_RD 0x50
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WR 0x51
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_RD 0x52
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_WR 0x53
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_VICTIM 0x56
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_CLEAN 0x57
# define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_INVAL 0x58
# define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_RD 0x5C
# define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_WR 0x5D
# define ARMV8_IMPDEF_PERFCTR_L2D_TLB_RD 0x5E
# define ARMV8_IMPDEF_PERFCTR_L2D_TLB_WR 0x5F
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD 0x60
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR 0x61
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_SHARED 0x62
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NOT_SHARED 0x63
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NORMAL 0x64
# define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_PERIPH 0x65
# define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_RD 0x66
# define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_WR 0x67
# define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LD_SPEC 0x68
# define ARMV8_IMPDEF_PERFCTR_UNALIGNED_ST_SPEC 0x69
# define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LDST_SPEC 0x6A
# define ARMV8_IMPDEF_PERFCTR_LDREX_SPEC 0x6C
# define ARMV8_IMPDEF_PERFCTR_STREX_PASS_SPEC 0x6D
# define ARMV8_IMPDEF_PERFCTR_STREX_FAIL_SPEC 0x6E
# define ARMV8_IMPDEF_PERFCTR_STREX_SPEC 0x6F
# define ARMV8_IMPDEF_PERFCTR_LD_SPEC 0x70
# define ARMV8_IMPDEF_PERFCTR_ST_SPEC 0x71
# define ARMV8_IMPDEF_PERFCTR_LDST_SPEC 0x72
# define ARMV8_IMPDEF_PERFCTR_DP_SPEC 0x73
# define ARMV8_IMPDEF_PERFCTR_ASE_SPEC 0x74
# define ARMV8_IMPDEF_PERFCTR_VFP_SPEC 0x75
# define ARMV8_IMPDEF_PERFCTR_PC_WRITE_SPEC 0x76
# define ARMV8_IMPDEF_PERFCTR_CRYPTO_SPEC 0x77
# define ARMV8_IMPDEF_PERFCTR_BR_IMMED_SPEC 0x78
# define ARMV8_IMPDEF_PERFCTR_BR_RETURN_SPEC 0x79
# define ARMV8_IMPDEF_PERFCTR_BR_INDIRECT_SPEC 0x7A
# define ARMV8_IMPDEF_PERFCTR_ISB_SPEC 0x7C
# define ARMV8_IMPDEF_PERFCTR_DSB_SPEC 0x7D
# define ARMV8_IMPDEF_PERFCTR_DMB_SPEC 0x7E
# define ARMV8_IMPDEF_PERFCTR_EXC_UNDEF 0x81
# define ARMV8_IMPDEF_PERFCTR_EXC_SVC 0x82
# define ARMV8_IMPDEF_PERFCTR_EXC_PABORT 0x83
# define ARMV8_IMPDEF_PERFCTR_EXC_DABORT 0x84
# define ARMV8_IMPDEF_PERFCTR_EXC_IRQ 0x86
# define ARMV8_IMPDEF_PERFCTR_EXC_FIQ 0x87
# define ARMV8_IMPDEF_PERFCTR_EXC_SMC 0x88
# define ARMV8_IMPDEF_PERFCTR_EXC_HVC 0x8A
# define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_PABORT 0x8B
# define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_DABORT 0x8C
# define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_OTHER 0x8D
# define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_IRQ 0x8E
# define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_FIQ 0x8F
# define ARMV8_IMPDEF_PERFCTR_RC_LD_SPEC 0x90
# define ARMV8_IMPDEF_PERFCTR_RC_ST_SPEC 0x91
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_RD 0xA0
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WR 0xA1
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_RD 0xA2
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_WR 0xA3
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_VICTIM 0xA6
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_CLEAN 0xA7
# define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_INVAL 0xA8
2016-02-18 17:50:10 +01:00
2015-10-02 10:55:04 +01:00
/* ARMv8 Cortex-A53 specific event types. */
2016-04-21 05:58:41 -07:00
# define ARMV8_A53_PERFCTR_PREF_LINEFILL 0xC2
2015-10-02 10:55:04 +01:00
2016-02-18 17:50:11 +01:00
/* ARMv8 Cavium ThunderX specific event types. */
2016-04-21 05:58:41 -07:00
# define ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST 0xE9
# define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS 0xEA
# define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS 0xEB
# define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS 0xEC
# define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS 0xED
2015-10-02 10:55:05 +01:00
2012-03-05 11:49:32 +00:00
/* PMUv3 HW events mapping. */
2016-09-14 17:32:30 -05:00
/*
* ARMv8 Architectural defined events , not all of these may
* be supported on any given implementation . Undefined events will
* be disabled at run - time .
*/
2012-03-05 11:49:32 +00:00
static const unsigned armv8_pmuv3_perf_map [ PERF_COUNT_HW_MAX ] = {
2015-07-21 11:36:39 +01:00
PERF_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ PERF_COUNT_HW_CPU_CYCLES ] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ,
[ PERF_COUNT_HW_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_INST_RETIRED ,
[ PERF_COUNT_HW_CACHE_REFERENCES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ PERF_COUNT_HW_CACHE_MISSES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
2016-09-14 17:32:30 -05:00
[ PERF_COUNT_HW_BRANCH_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED ,
2016-04-21 05:58:41 -07:00
[ PERF_COUNT_HW_BRANCH_MISSES ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2016-09-14 17:32:30 -05:00
[ PERF_COUNT_HW_BUS_CYCLES ] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES ,
[ PERF_COUNT_HW_STALLED_CYCLES_FRONTEND ] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND ,
[ PERF_COUNT_HW_STALLED_CYCLES_BACKEND ] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND ,
2012-03-05 11:49:32 +00:00
} ;
2015-10-02 10:55:04 +01:00
/* ARM Cortex-A53 HW events mapping. */
static const unsigned armv8_a53_perf_map [ PERF_COUNT_HW_MAX ] = {
PERF_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ PERF_COUNT_HW_CPU_CYCLES ] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ,
[ PERF_COUNT_HW_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_INST_RETIRED ,
[ PERF_COUNT_HW_CACHE_REFERENCES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ PERF_COUNT_HW_CACHE_MISSES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ PERF_COUNT_HW_BRANCH_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED ,
[ PERF_COUNT_HW_BRANCH_MISSES ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2015-10-02 10:55:04 +01:00
[ PERF_COUNT_HW_BUS_CYCLES ] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES ,
} ;
2015-12-22 14:45:35 +00:00
/* ARM Cortex-A57 and Cortex-A72 events mapping. */
2015-10-02 10:55:05 +01:00
static const unsigned armv8_a57_perf_map [ PERF_COUNT_HW_MAX ] = {
PERF_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ PERF_COUNT_HW_CPU_CYCLES ] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ,
[ PERF_COUNT_HW_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_INST_RETIRED ,
[ PERF_COUNT_HW_CACHE_REFERENCES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ PERF_COUNT_HW_CACHE_MISSES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ PERF_COUNT_HW_BRANCH_MISSES ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2015-10-02 10:55:05 +01:00
[ PERF_COUNT_HW_BUS_CYCLES ] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES ,
} ;
2016-02-18 17:50:11 +01:00
static const unsigned armv8_thunder_perf_map [ PERF_COUNT_HW_MAX ] = {
PERF_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ PERF_COUNT_HW_CPU_CYCLES ] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ,
[ PERF_COUNT_HW_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_INST_RETIRED ,
[ PERF_COUNT_HW_CACHE_REFERENCES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ PERF_COUNT_HW_CACHE_MISSES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ PERF_COUNT_HW_BRANCH_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED ,
[ PERF_COUNT_HW_BRANCH_MISSES ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2016-02-18 17:50:11 +01:00
[ PERF_COUNT_HW_STALLED_CYCLES_FRONTEND ] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND ,
[ PERF_COUNT_HW_STALLED_CYCLES_BACKEND ] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND ,
} ;
2016-04-21 05:58:45 -07:00
/* Broadcom Vulcan events mapping */
static const unsigned armv8_vulcan_perf_map [ PERF_COUNT_HW_MAX ] = {
PERF_MAP_ALL_UNSUPPORTED ,
[ PERF_COUNT_HW_CPU_CYCLES ] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ,
[ PERF_COUNT_HW_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_INST_RETIRED ,
[ PERF_COUNT_HW_CACHE_REFERENCES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ PERF_COUNT_HW_CACHE_MISSES ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ PERF_COUNT_HW_BRANCH_INSTRUCTIONS ] = ARMV8_PMUV3_PERFCTR_BR_RETIRED ,
[ PERF_COUNT_HW_BRANCH_MISSES ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ PERF_COUNT_HW_BUS_CYCLES ] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES ,
[ PERF_COUNT_HW_STALLED_CYCLES_FRONTEND ] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND ,
[ PERF_COUNT_HW_STALLED_CYCLES_BACKEND ] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND ,
} ;
2012-03-05 11:49:32 +00:00
static const unsigned armv8_pmuv3_perf_cache_map [ PERF_COUNT_HW_CACHE_MAX ]
[ PERF_COUNT_HW_CACHE_OP_MAX ]
[ PERF_COUNT_HW_CACHE_RESULT_MAX ] = {
2015-07-21 11:36:39 +01:00
PERF_CACHE_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
2015-07-21 11:36:39 +01:00
2016-09-14 17:32:30 -05:00
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1D_TLB ,
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ,
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB ,
2016-04-21 05:58:41 -07:00
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2012-03-05 11:49:32 +00:00
} ;
2015-10-02 10:55:04 +01:00
static const unsigned armv8_a53_perf_cache_map [ PERF_COUNT_HW_CACHE_MAX ]
[ PERF_COUNT_HW_CACHE_OP_MAX ]
[ PERF_COUNT_HW_CACHE_RESULT_MAX ] = {
PERF_CACHE_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ,
[ C ( L1D ) ] [ C ( OP_PREFETCH ) ] [ C ( RESULT_MISS ) ] = ARMV8_A53_PERFCTR_PREF_LINEFILL ,
2015-10-02 10:55:04 +01:00
2016-04-21 05:58:41 -07:00
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ,
2015-10-02 10:55:04 +01:00
2016-04-21 05:58:41 -07:00
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ,
2015-10-02 10:55:04 +01:00
2016-04-21 05:58:41 -07:00
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2015-10-02 10:55:04 +01:00
} ;
2015-10-02 10:55:05 +01:00
static const unsigned armv8_a57_perf_cache_map [ PERF_COUNT_HW_CACHE_MAX ]
[ PERF_COUNT_HW_CACHE_OP_MAX ]
[ PERF_COUNT_HW_CACHE_RESULT_MAX ] = {
PERF_CACHE_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR ,
2015-10-02 10:55:05 +01:00
2016-04-21 05:58:41 -07:00
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ,
2015-10-02 10:55:05 +01:00
2016-04-21 05:58:41 -07:00
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD ,
[ C ( DTLB ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR ,
2015-10-02 10:55:05 +01:00
2016-04-21 05:58:41 -07:00
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ,
2015-10-02 10:55:05 +01:00
2016-04-21 05:58:41 -07:00
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2015-10-02 10:55:05 +01:00
} ;
2016-02-18 17:50:11 +01:00
static const unsigned armv8_thunder_perf_cache_map [ PERF_COUNT_HW_CACHE_MAX ]
[ PERF_COUNT_HW_CACHE_OP_MAX ]
[ PERF_COUNT_HW_CACHE_RESULT_MAX ] = {
PERF_CACHE_MAP_ALL_UNSUPPORTED ,
2016-04-21 05:58:41 -07:00
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST ,
[ C ( L1D ) ] [ C ( OP_PREFETCH ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS ,
[ C ( L1D ) ] [ C ( OP_PREFETCH ) ] [ C ( RESULT_MISS ) ] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ,
[ C ( L1I ) ] [ C ( OP_PREFETCH ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS ,
[ C ( L1I ) ] [ C ( OP_PREFETCH ) ] [ C ( RESULT_MISS ) ] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD ,
[ C ( DTLB ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR ,
[ C ( DTLB ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR ,
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
2015-10-02 10:55:05 +01:00
} ;
2016-04-21 05:58:45 -07:00
static const unsigned armv8_vulcan_perf_cache_map [ PERF_COUNT_HW_CACHE_MAX ]
[ PERF_COUNT_HW_CACHE_OP_MAX ]
[ PERF_COUNT_HW_CACHE_RESULT_MAX ] = {
PERF_CACHE_MAP_ALL_UNSUPPORTED ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD ,
[ C ( L1D ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR ,
[ C ( L1D ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE ,
[ C ( L1I ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ,
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ,
[ C ( ITLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_L1I_TLB ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD ,
[ C ( DTLB ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR ,
[ C ( DTLB ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD ,
[ C ( DTLB ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_READ ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_PMUV3_PERFCTR_BR_PRED ,
[ C ( BPU ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_MISS ) ] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ,
[ C ( NODE ) ] [ C ( OP_READ ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD ,
[ C ( NODE ) ] [ C ( OP_WRITE ) ] [ C ( RESULT_ACCESS ) ] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR ,
} ;
2016-04-21 05:58:44 -07:00
static ssize_t
armv8pmu_events_sysfs_show ( struct device * dev ,
struct device_attribute * attr , char * page )
{
struct perf_pmu_events_attr * pmu_attr ;
pmu_attr = container_of ( attr , struct perf_pmu_events_attr , attr ) ;
return sprintf ( page , " event=0x%03llx \n " , pmu_attr - > id ) ;
}
2015-10-22 07:07:32 -07:00
# define ARMV8_EVENT_ATTR_RESOLVE(m) #m
# define ARMV8_EVENT_ATTR(name, config) \
2016-04-21 05:58:44 -07:00
PMU_EVENT_ATTR ( name , armv8_event_attr_ # # name , \
config , armv8pmu_events_sysfs_show )
2015-10-22 07:07:32 -07:00
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( sw_incr , ARMV8_PMUV3_PERFCTR_SW_INCR ) ;
ARMV8_EVENT_ATTR ( l1i_cache_refill , ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL ) ;
ARMV8_EVENT_ATTR ( l1i_tlb_refill , ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL ) ;
ARMV8_EVENT_ATTR ( l1d_cache_refill , ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL ) ;
ARMV8_EVENT_ATTR ( l1d_cache , ARMV8_PMUV3_PERFCTR_L1D_CACHE ) ;
ARMV8_EVENT_ATTR ( l1d_tlb_refill , ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL ) ;
ARMV8_EVENT_ATTR ( ld_retired , ARMV8_PMUV3_PERFCTR_LD_RETIRED ) ;
ARMV8_EVENT_ATTR ( st_retired , ARMV8_PMUV3_PERFCTR_ST_RETIRED ) ;
ARMV8_EVENT_ATTR ( inst_retired , ARMV8_PMUV3_PERFCTR_INST_RETIRED ) ;
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( exc_taken , ARMV8_PMUV3_PERFCTR_EXC_TAKEN ) ;
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( exc_return , ARMV8_PMUV3_PERFCTR_EXC_RETURN ) ;
ARMV8_EVENT_ATTR ( cid_write_retired , ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED ) ;
ARMV8_EVENT_ATTR ( pc_write_retired , ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED ) ;
ARMV8_EVENT_ATTR ( br_immed_retired , ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED ) ;
ARMV8_EVENT_ATTR ( br_return_retired , ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED ) ;
ARMV8_EVENT_ATTR ( unaligned_ldst_retired , ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED ) ;
ARMV8_EVENT_ATTR ( br_mis_pred , ARMV8_PMUV3_PERFCTR_BR_MIS_PRED ) ;
ARMV8_EVENT_ATTR ( cpu_cycles , ARMV8_PMUV3_PERFCTR_CPU_CYCLES ) ;
ARMV8_EVENT_ATTR ( br_pred , ARMV8_PMUV3_PERFCTR_BR_PRED ) ;
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( mem_access , ARMV8_PMUV3_PERFCTR_MEM_ACCESS ) ;
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( l1i_cache , ARMV8_PMUV3_PERFCTR_L1I_CACHE ) ;
ARMV8_EVENT_ATTR ( l1d_cache_wb , ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB ) ;
ARMV8_EVENT_ATTR ( l2d_cache , ARMV8_PMUV3_PERFCTR_L2D_CACHE ) ;
ARMV8_EVENT_ATTR ( l2d_cache_refill , ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL ) ;
ARMV8_EVENT_ATTR ( l2d_cache_wb , ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB ) ;
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( bus_access , ARMV8_PMUV3_PERFCTR_BUS_ACCESS ) ;
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( memory_error , ARMV8_PMUV3_PERFCTR_MEMORY_ERROR ) ;
ARMV8_EVENT_ATTR ( inst_spec , ARMV8_PMUV3_PERFCTR_INST_SPEC ) ;
ARMV8_EVENT_ATTR ( ttbr_write_retired , ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED ) ;
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( bus_cycles , ARMV8_PMUV3_PERFCTR_BUS_CYCLES ) ;
2016-04-25 15:05:24 +01:00
/* Don't expose the chain event in /sys, since it's useless in isolation */
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( l1d_cache_allocate , ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE ) ;
ARMV8_EVENT_ATTR ( l2d_cache_allocate , ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE ) ;
ARMV8_EVENT_ATTR ( br_retired , ARMV8_PMUV3_PERFCTR_BR_RETIRED ) ;
ARMV8_EVENT_ATTR ( br_mis_pred_retired , ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED ) ;
ARMV8_EVENT_ATTR ( stall_frontend , ARMV8_PMUV3_PERFCTR_STALL_FRONTEND ) ;
ARMV8_EVENT_ATTR ( stall_backend , ARMV8_PMUV3_PERFCTR_STALL_BACKEND ) ;
ARMV8_EVENT_ATTR ( l1d_tlb , ARMV8_PMUV3_PERFCTR_L1D_TLB ) ;
ARMV8_EVENT_ATTR ( l1i_tlb , ARMV8_PMUV3_PERFCTR_L1I_TLB ) ;
ARMV8_EVENT_ATTR ( l2i_cache , ARMV8_PMUV3_PERFCTR_L2I_CACHE ) ;
ARMV8_EVENT_ATTR ( l2i_cache_refill , ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL ) ;
ARMV8_EVENT_ATTR ( l3d_cache_allocate , ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE ) ;
ARMV8_EVENT_ATTR ( l3d_cache_refill , ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL ) ;
ARMV8_EVENT_ATTR ( l3d_cache , ARMV8_PMUV3_PERFCTR_L3D_CACHE ) ;
ARMV8_EVENT_ATTR ( l3d_cache_wb , ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB ) ;
ARMV8_EVENT_ATTR ( l2d_tlb_refill , ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL ) ;
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( l2i_tlb_refill , ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL ) ;
2015-10-22 07:07:32 -07:00
ARMV8_EVENT_ATTR ( l2d_tlb , ARMV8_PMUV3_PERFCTR_L2D_TLB ) ;
2016-04-21 05:58:41 -07:00
ARMV8_EVENT_ATTR ( l2i_tlb , ARMV8_PMUV3_PERFCTR_L2I_TLB ) ;
2015-10-22 07:07:32 -07:00
static struct attribute * armv8_pmuv3_event_attrs [ ] = {
& armv8_event_attr_sw_incr . attr . attr ,
& armv8_event_attr_l1i_cache_refill . attr . attr ,
& armv8_event_attr_l1i_tlb_refill . attr . attr ,
& armv8_event_attr_l1d_cache_refill . attr . attr ,
& armv8_event_attr_l1d_cache . attr . attr ,
& armv8_event_attr_l1d_tlb_refill . attr . attr ,
& armv8_event_attr_ld_retired . attr . attr ,
& armv8_event_attr_st_retired . attr . attr ,
& armv8_event_attr_inst_retired . attr . attr ,
& armv8_event_attr_exc_taken . attr . attr ,
& armv8_event_attr_exc_return . attr . attr ,
& armv8_event_attr_cid_write_retired . attr . attr ,
& armv8_event_attr_pc_write_retired . attr . attr ,
& armv8_event_attr_br_immed_retired . attr . attr ,
& armv8_event_attr_br_return_retired . attr . attr ,
& armv8_event_attr_unaligned_ldst_retired . attr . attr ,
& armv8_event_attr_br_mis_pred . attr . attr ,
& armv8_event_attr_cpu_cycles . attr . attr ,
& armv8_event_attr_br_pred . attr . attr ,
& armv8_event_attr_mem_access . attr . attr ,
& armv8_event_attr_l1i_cache . attr . attr ,
& armv8_event_attr_l1d_cache_wb . attr . attr ,
& armv8_event_attr_l2d_cache . attr . attr ,
& armv8_event_attr_l2d_cache_refill . attr . attr ,
& armv8_event_attr_l2d_cache_wb . attr . attr ,
& armv8_event_attr_bus_access . attr . attr ,
& armv8_event_attr_memory_error . attr . attr ,
& armv8_event_attr_inst_spec . attr . attr ,
& armv8_event_attr_ttbr_write_retired . attr . attr ,
& armv8_event_attr_bus_cycles . attr . attr ,
& armv8_event_attr_l1d_cache_allocate . attr . attr ,
& armv8_event_attr_l2d_cache_allocate . attr . attr ,
& armv8_event_attr_br_retired . attr . attr ,
& armv8_event_attr_br_mis_pred_retired . attr . attr ,
& armv8_event_attr_stall_frontend . attr . attr ,
& armv8_event_attr_stall_backend . attr . attr ,
& armv8_event_attr_l1d_tlb . attr . attr ,
& armv8_event_attr_l1i_tlb . attr . attr ,
& armv8_event_attr_l2i_cache . attr . attr ,
& armv8_event_attr_l2i_cache_refill . attr . attr ,
& armv8_event_attr_l3d_cache_allocate . attr . attr ,
& armv8_event_attr_l3d_cache_refill . attr . attr ,
& armv8_event_attr_l3d_cache . attr . attr ,
& armv8_event_attr_l3d_cache_wb . attr . attr ,
& armv8_event_attr_l2d_tlb_refill . attr . attr ,
2016-04-21 05:58:41 -07:00
& armv8_event_attr_l2i_tlb_refill . attr . attr ,
2015-10-22 07:07:32 -07:00
& armv8_event_attr_l2d_tlb . attr . attr ,
2016-04-21 05:58:41 -07:00
& armv8_event_attr_l2i_tlb . attr . attr ,
2015-12-22 14:42:57 +00:00
NULL ,
2015-10-22 07:07:32 -07:00
} ;
2016-04-21 05:58:44 -07:00
static umode_t
armv8pmu_event_attr_is_visible ( struct kobject * kobj ,
struct attribute * attr , int unused )
{
struct device * dev = kobj_to_dev ( kobj ) ;
struct pmu * pmu = dev_get_drvdata ( dev ) ;
struct arm_pmu * cpu_pmu = container_of ( pmu , struct arm_pmu , pmu ) ;
struct perf_pmu_events_attr * pmu_attr ;
pmu_attr = container_of ( attr , struct perf_pmu_events_attr , attr . attr ) ;
if ( test_bit ( pmu_attr - > id , cpu_pmu - > pmceid_bitmap ) )
return attr - > mode ;
return 0 ;
}
2015-10-22 07:07:32 -07:00
static struct attribute_group armv8_pmuv3_events_attr_group = {
. name = " events " ,
. attrs = armv8_pmuv3_event_attrs ,
2016-04-21 05:58:44 -07:00
. is_visible = armv8pmu_event_attr_is_visible ,
2015-10-22 07:07:32 -07:00
} ;
2015-12-22 14:42:57 +00:00
PMU_FORMAT_ATTR ( event , " config:0-9 " ) ;
static struct attribute * armv8_pmuv3_format_attrs [ ] = {
& format_attr_event . attr ,
NULL ,
} ;
static struct attribute_group armv8_pmuv3_format_attr_group = {
. name = " format " ,
. attrs = armv8_pmuv3_format_attrs ,
} ;
2012-03-05 11:49:32 +00:00
/*
* Perf Events ' indices
*/
# define ARMV8_IDX_CYCLE_COUNTER 0
# define ARMV8_IDX_COUNTER0 1
2015-10-02 10:55:03 +01:00
# define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
( ARMV8_IDX_CYCLE_COUNTER + cpu_pmu - > num_events - 1 )
2012-03-05 11:49:32 +00:00
/*
* ARMv8 low level PMU access
*/
/*
* Perf Event to low level counters mapping
*/
# define ARMV8_IDX_TO_COUNTER(x) \
2016-03-24 16:01:16 +00:00
( ( ( x ) - ARMV8_IDX_COUNTER0 ) & ARMV8_PMU_COUNTER_MASK )
2012-03-05 11:49:32 +00:00
static inline u32 armv8pmu_pmcr_read ( void )
{
2016-04-21 05:58:43 -07:00
return read_sysreg ( pmcr_el0 ) ;
2012-03-05 11:49:32 +00:00
}
static inline void armv8pmu_pmcr_write ( u32 val )
{
2016-03-24 16:01:16 +00:00
val & = ARMV8_PMU_PMCR_MASK ;
2012-03-05 11:49:32 +00:00
isb ( ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( val , pmcr_el0 ) ;
2012-03-05 11:49:32 +00:00
}
static inline int armv8pmu_has_overflowed ( u32 pmovsr )
{
2016-03-24 16:01:16 +00:00
return pmovsr & ARMV8_PMU_OVERFLOWED_MASK ;
2012-03-05 11:49:32 +00:00
}
2015-10-02 10:55:03 +01:00
static inline int armv8pmu_counter_valid ( struct arm_pmu * cpu_pmu , int idx )
2012-03-05 11:49:32 +00:00
{
2015-10-02 10:55:03 +01:00
return idx > = ARMV8_IDX_CYCLE_COUNTER & &
idx < = ARMV8_IDX_COUNTER_LAST ( cpu_pmu ) ;
2012-03-05 11:49:32 +00:00
}
static inline int armv8pmu_counter_has_overflowed ( u32 pmnc , int idx )
{
2015-10-02 10:55:03 +01:00
return pmnc & BIT ( ARMV8_IDX_TO_COUNTER ( idx ) ) ;
2012-03-05 11:49:32 +00:00
}
static inline int armv8pmu_select_counter ( int idx )
{
2015-10-02 10:55:03 +01:00
u32 counter = ARMV8_IDX_TO_COUNTER ( idx ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( counter , pmselr_el0 ) ;
2012-03-05 11:49:32 +00:00
isb ( ) ;
return idx ;
}
2015-10-02 10:55:03 +01:00
static inline u32 armv8pmu_read_counter ( struct perf_event * event )
2012-03-05 11:49:32 +00:00
{
2015-10-02 10:55:03 +01:00
struct arm_pmu * cpu_pmu = to_arm_pmu ( event - > pmu ) ;
struct hw_perf_event * hwc = & event - > hw ;
int idx = hwc - > idx ;
2012-03-05 11:49:32 +00:00
u32 value = 0 ;
2015-10-02 10:55:03 +01:00
if ( ! armv8pmu_counter_valid ( cpu_pmu , idx ) )
2012-03-05 11:49:32 +00:00
pr_err ( " CPU%u reading wrong counter %d \n " ,
smp_processor_id ( ) , idx ) ;
else if ( idx = = ARMV8_IDX_CYCLE_COUNTER )
2016-04-21 05:58:43 -07:00
value = read_sysreg ( pmccntr_el0 ) ;
2012-03-05 11:49:32 +00:00
else if ( armv8pmu_select_counter ( idx ) = = idx )
2016-04-21 05:58:43 -07:00
value = read_sysreg ( pmxevcntr_el0 ) ;
2012-03-05 11:49:32 +00:00
return value ;
}
2015-10-02 10:55:03 +01:00
static inline void armv8pmu_write_counter ( struct perf_event * event , u32 value )
2012-03-05 11:49:32 +00:00
{
2015-10-02 10:55:03 +01:00
struct arm_pmu * cpu_pmu = to_arm_pmu ( event - > pmu ) ;
struct hw_perf_event * hwc = & event - > hw ;
int idx = hwc - > idx ;
if ( ! armv8pmu_counter_valid ( cpu_pmu , idx ) )
2012-03-05 11:49:32 +00:00
pr_err ( " CPU%u writing wrong counter %d \n " ,
smp_processor_id ( ) , idx ) ;
2016-02-18 17:50:13 +01:00
else if ( idx = = ARMV8_IDX_CYCLE_COUNTER ) {
/*
* Set the upper 32 bits as this is a 64 bit counter but we only
* count using the lower 32 bits and we want an interrupt when
* it overflows .
*/
u64 value64 = 0xffffffff00000000ULL | value ;
2016-04-21 05:58:43 -07:00
write_sysreg ( value64 , pmccntr_el0 ) ;
2016-02-18 17:50:13 +01:00
} else if ( armv8pmu_select_counter ( idx ) = = idx )
2016-04-21 05:58:43 -07:00
write_sysreg ( value , pmxevcntr_el0 ) ;
2012-03-05 11:49:32 +00:00
}
static inline void armv8pmu_write_evtype ( int idx , u32 val )
{
if ( armv8pmu_select_counter ( idx ) = = idx ) {
2016-03-24 16:01:16 +00:00
val & = ARMV8_PMU_EVTYPE_MASK ;
2016-04-21 05:58:43 -07:00
write_sysreg ( val , pmxevtyper_el0 ) ;
2012-03-05 11:49:32 +00:00
}
}
static inline int armv8pmu_enable_counter ( int idx )
{
2015-10-02 10:55:03 +01:00
u32 counter = ARMV8_IDX_TO_COUNTER ( idx ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( BIT ( counter ) , pmcntenset_el0 ) ;
2012-03-05 11:49:32 +00:00
return idx ;
}
static inline int armv8pmu_disable_counter ( int idx )
{
2015-10-02 10:55:03 +01:00
u32 counter = ARMV8_IDX_TO_COUNTER ( idx ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( BIT ( counter ) , pmcntenclr_el0 ) ;
2012-03-05 11:49:32 +00:00
return idx ;
}
static inline int armv8pmu_enable_intens ( int idx )
{
2015-10-02 10:55:03 +01:00
u32 counter = ARMV8_IDX_TO_COUNTER ( idx ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( BIT ( counter ) , pmintenset_el1 ) ;
2012-03-05 11:49:32 +00:00
return idx ;
}
static inline int armv8pmu_disable_intens ( int idx )
{
2015-10-02 10:55:03 +01:00
u32 counter = ARMV8_IDX_TO_COUNTER ( idx ) ;
2016-04-21 05:58:43 -07:00
write_sysreg ( BIT ( counter ) , pmintenclr_el1 ) ;
2012-03-05 11:49:32 +00:00
isb ( ) ;
/* Clear the overflow flag in case an interrupt is pending. */
2016-04-21 05:58:43 -07:00
write_sysreg ( BIT ( counter ) , pmovsclr_el0 ) ;
2012-03-05 11:49:32 +00:00
isb ( ) ;
2015-10-02 10:55:03 +01:00
2012-03-05 11:49:32 +00:00
return idx ;
}
static inline u32 armv8pmu_getreset_flags ( void )
{
u32 value ;
/* Read */
2016-04-21 05:58:43 -07:00
value = read_sysreg ( pmovsclr_el0 ) ;
2012-03-05 11:49:32 +00:00
/* Write to clear flags */
2016-03-24 16:01:16 +00:00
value & = ARMV8_PMU_OVSR_MASK ;
2016-04-21 05:58:43 -07:00
write_sysreg ( value , pmovsclr_el0 ) ;
2012-03-05 11:49:32 +00:00
return value ;
}
2015-10-02 10:55:03 +01:00
static void armv8pmu_enable_event ( struct perf_event * event )
2012-03-05 11:49:32 +00:00
{
unsigned long flags ;
2015-10-02 10:55:03 +01:00
struct hw_perf_event * hwc = & event - > hw ;
struct arm_pmu * cpu_pmu = to_arm_pmu ( event - > pmu ) ;
struct pmu_hw_events * events = this_cpu_ptr ( cpu_pmu - > hw_events ) ;
int idx = hwc - > idx ;
2012-03-05 11:49:32 +00:00
/*
* Enable counter and interrupt , and set the counter to count
* the event that we ' re interested in .
*/
raw_spin_lock_irqsave ( & events - > pmu_lock , flags ) ;
/*
* Disable counter
*/
armv8pmu_disable_counter ( idx ) ;
/*
* Set event ( if destined for PMNx counters ) .
*/
armv8pmu_write_evtype ( idx , hwc - > config_base ) ;
/*
* Enable interrupt for this counter
*/
armv8pmu_enable_intens ( idx ) ;
/*
* Enable counter
*/
armv8pmu_enable_counter ( idx ) ;
raw_spin_unlock_irqrestore ( & events - > pmu_lock , flags ) ;
}
2015-10-02 10:55:03 +01:00
static void armv8pmu_disable_event ( struct perf_event * event )
2012-03-05 11:49:32 +00:00
{
unsigned long flags ;
2015-10-02 10:55:03 +01:00
struct hw_perf_event * hwc = & event - > hw ;
struct arm_pmu * cpu_pmu = to_arm_pmu ( event - > pmu ) ;
struct pmu_hw_events * events = this_cpu_ptr ( cpu_pmu - > hw_events ) ;
int idx = hwc - > idx ;
2012-03-05 11:49:32 +00:00
/*
* Disable counter and interrupt
*/
raw_spin_lock_irqsave ( & events - > pmu_lock , flags ) ;
/*
* Disable counter
*/
armv8pmu_disable_counter ( idx ) ;
/*
* Disable interrupt for this counter
*/
armv8pmu_disable_intens ( idx ) ;
raw_spin_unlock_irqrestore ( & events - > pmu_lock , flags ) ;
}
static irqreturn_t armv8pmu_handle_irq ( int irq_num , void * dev )
{
u32 pmovsr ;
struct perf_sample_data data ;
2015-10-02 10:55:03 +01:00
struct arm_pmu * cpu_pmu = ( struct arm_pmu * ) dev ;
struct pmu_hw_events * cpuc = this_cpu_ptr ( cpu_pmu - > hw_events ) ;
2012-03-05 11:49:32 +00:00
struct pt_regs * regs ;
int idx ;
/*
* Get and reset the IRQ flags
*/
pmovsr = armv8pmu_getreset_flags ( ) ;
/*
* Did an overflow occur ?
*/
if ( ! armv8pmu_has_overflowed ( pmovsr ) )
return IRQ_NONE ;
/*
* Handle the counter ( s ) overflow ( s )
*/
regs = get_irq_regs ( ) ;
for ( idx = 0 ; idx < cpu_pmu - > num_events ; + + idx ) {
struct perf_event * event = cpuc - > events [ idx ] ;
struct hw_perf_event * hwc ;
/* Ignore if we don't have an event. */
if ( ! event )
continue ;
/*
* We have a single interrupt for all counters . Check that
* each counter has overflowed before we process it .
*/
if ( ! armv8pmu_counter_has_overflowed ( pmovsr , idx ) )
continue ;
hwc = & event - > hw ;
2015-10-02 10:55:03 +01:00
armpmu_event_update ( event ) ;
2012-03-05 11:49:32 +00:00
perf_sample_data_init ( & data , 0 , hwc - > last_period ) ;
2015-10-02 10:55:03 +01:00
if ( ! armpmu_event_set_period ( event ) )
2012-03-05 11:49:32 +00:00
continue ;
if ( perf_event_overflow ( event , & data , regs ) )
2015-10-02 10:55:03 +01:00
cpu_pmu - > disable ( event ) ;
2012-03-05 11:49:32 +00:00
}
/*
* Handle the pending perf events .
*
* Note : this call * must * be run with interrupts disabled . For
* platforms that can have the PMU interrupts raised as an NMI , this
* will not work .
*/
irq_work_run ( ) ;
return IRQ_HANDLED ;
}
2015-10-02 10:55:03 +01:00
static void armv8pmu_start ( struct arm_pmu * cpu_pmu )
2012-03-05 11:49:32 +00:00
{
unsigned long flags ;
2015-10-02 10:55:03 +01:00
struct pmu_hw_events * events = this_cpu_ptr ( cpu_pmu - > hw_events ) ;
2012-03-05 11:49:32 +00:00
raw_spin_lock_irqsave ( & events - > pmu_lock , flags ) ;
/* Enable all counters */
2016-03-24 16:01:16 +00:00
armv8pmu_pmcr_write ( armv8pmu_pmcr_read ( ) | ARMV8_PMU_PMCR_E ) ;
2012-03-05 11:49:32 +00:00
raw_spin_unlock_irqrestore ( & events - > pmu_lock , flags ) ;
}
2015-10-02 10:55:03 +01:00
static void armv8pmu_stop ( struct arm_pmu * cpu_pmu )
2012-03-05 11:49:32 +00:00
{
unsigned long flags ;
2015-10-02 10:55:03 +01:00
struct pmu_hw_events * events = this_cpu_ptr ( cpu_pmu - > hw_events ) ;
2012-03-05 11:49:32 +00:00
raw_spin_lock_irqsave ( & events - > pmu_lock , flags ) ;
/* Disable all counters */
2016-03-24 16:01:16 +00:00
armv8pmu_pmcr_write ( armv8pmu_pmcr_read ( ) & ~ ARMV8_PMU_PMCR_E ) ;
2012-03-05 11:49:32 +00:00
raw_spin_unlock_irqrestore ( & events - > pmu_lock , flags ) ;
}
static int armv8pmu_get_event_idx ( struct pmu_hw_events * cpuc ,
2015-10-02 10:55:03 +01:00
struct perf_event * event )
2012-03-05 11:49:32 +00:00
{
int idx ;
2015-10-02 10:55:03 +01:00
struct arm_pmu * cpu_pmu = to_arm_pmu ( event - > pmu ) ;
struct hw_perf_event * hwc = & event - > hw ;
2016-03-24 16:01:16 +00:00
unsigned long evtype = hwc - > config_base & ARMV8_PMU_EVTYPE_EVENT ;
2012-03-05 11:49:32 +00:00
/* Always place a cycle counter into the cycle counter. */
2016-04-21 05:58:41 -07:00
if ( evtype = = ARMV8_PMUV3_PERFCTR_CPU_CYCLES ) {
2012-03-05 11:49:32 +00:00
if ( test_and_set_bit ( ARMV8_IDX_CYCLE_COUNTER , cpuc - > used_mask ) )
return - EAGAIN ;
return ARMV8_IDX_CYCLE_COUNTER ;
}
/*
* For anything other than a cycle counter , try and use
* the events counters
*/
for ( idx = ARMV8_IDX_COUNTER0 ; idx < cpu_pmu - > num_events ; + + idx ) {
if ( ! test_and_set_bit ( idx , cpuc - > used_mask ) )
return idx ;
}
/* The counters are all in use. */
return - EAGAIN ;
}
/*
* Add an event filter to a given event . This will only work for PMUv2 PMUs .
*/
static int armv8pmu_set_event_filter ( struct hw_perf_event * event ,
struct perf_event_attr * attr )
{
unsigned long config_base = 0 ;
if ( attr - > exclude_idle )
return - EPERM ;
2016-01-25 17:31:13 +00:00
if ( is_kernel_in_hyp_mode ( ) & &
attr - > exclude_kernel ! = attr - > exclude_hv )
return - EINVAL ;
2012-03-05 11:49:32 +00:00
if ( attr - > exclude_user )
2016-03-24 16:01:16 +00:00
config_base | = ARMV8_PMU_EXCLUDE_EL0 ;
2016-01-25 17:31:13 +00:00
if ( ! is_kernel_in_hyp_mode ( ) & & attr - > exclude_kernel )
2016-03-24 16:01:16 +00:00
config_base | = ARMV8_PMU_EXCLUDE_EL1 ;
2012-03-05 11:49:32 +00:00
if ( ! attr - > exclude_hv )
2016-03-24 16:01:16 +00:00
config_base | = ARMV8_PMU_INCLUDE_EL2 ;
2012-03-05 11:49:32 +00:00
/*
* Install the filter into config_base as this is used to
* construct the event type .
*/
event - > config_base = config_base ;
return 0 ;
}
static void armv8pmu_reset ( void * info )
{
2015-10-02 10:55:03 +01:00
struct arm_pmu * cpu_pmu = ( struct arm_pmu * ) info ;
2012-03-05 11:49:32 +00:00
u32 idx , nb_cnt = cpu_pmu - > num_events ;
/* The counter and interrupt enable registers are unknown at reset. */
2015-10-02 10:55:03 +01:00
for ( idx = ARMV8_IDX_CYCLE_COUNTER ; idx < nb_cnt ; + + idx ) {
armv8pmu_disable_counter ( idx ) ;
armv8pmu_disable_intens ( idx ) ;
}
2012-03-05 11:49:32 +00:00
2016-02-18 17:50:13 +01:00
/*
* Initialize & Reset PMNC . Request overflow interrupt for
* 64 bit cycle counter but cheat in armv8pmu_write_counter ( ) .
*/
2016-03-24 16:01:16 +00:00
armv8pmu_pmcr_write ( ARMV8_PMU_PMCR_P | ARMV8_PMU_PMCR_C |
ARMV8_PMU_PMCR_LC ) ;
2012-03-05 11:49:32 +00:00
}
static int armv8_pmuv3_map_event ( struct perf_event * event )
{
2016-09-14 17:32:30 -05:00
int hw_event_id ;
struct arm_pmu * armpmu = to_arm_pmu ( event - > pmu ) ;
hw_event_id = armpmu_map_event ( event , & armv8_pmuv3_perf_map ,
& armv8_pmuv3_perf_cache_map ,
ARMV8_PMU_EVTYPE_EVENT ) ;
if ( hw_event_id < 0 )
return hw_event_id ;
/* disable micro/arch events not supported by this PMU */
if ( ( hw_event_id < ARMV8_PMUV3_MAX_COMMON_EVENTS ) & &
! test_bit ( hw_event_id , armpmu - > pmceid_bitmap ) ) {
return - EOPNOTSUPP ;
}
return hw_event_id ;
2012-03-05 11:49:32 +00:00
}
2015-10-02 10:55:04 +01:00
static int armv8_a53_map_event ( struct perf_event * event )
{
return armpmu_map_event ( event , & armv8_a53_perf_map ,
& armv8_a53_perf_cache_map ,
2016-03-24 16:01:16 +00:00
ARMV8_PMU_EVTYPE_EVENT ) ;
2015-10-02 10:55:04 +01:00
}
2015-10-02 10:55:05 +01:00
static int armv8_a57_map_event ( struct perf_event * event )
{
return armpmu_map_event ( event , & armv8_a57_perf_map ,
& armv8_a57_perf_cache_map ,
2016-03-24 16:01:16 +00:00
ARMV8_PMU_EVTYPE_EVENT ) ;
2015-10-02 10:55:05 +01:00
}
2016-02-18 17:50:11 +01:00
static int armv8_thunder_map_event ( struct perf_event * event )
{
return armpmu_map_event ( event , & armv8_thunder_perf_map ,
& armv8_thunder_perf_cache_map ,
2016-03-24 16:01:16 +00:00
ARMV8_PMU_EVTYPE_EVENT ) ;
2016-02-18 17:50:11 +01:00
}
2016-04-21 05:58:45 -07:00
static int armv8_vulcan_map_event ( struct perf_event * event )
{
return armpmu_map_event ( event , & armv8_vulcan_perf_map ,
& armv8_vulcan_perf_cache_map ,
ARMV8_PMU_EVTYPE_EVENT ) ;
}
2016-04-21 05:58:44 -07:00
static void __armv8pmu_probe_pmu ( void * info )
2012-03-05 11:49:32 +00:00
{
2016-04-21 05:58:44 -07:00
struct arm_pmu * cpu_pmu = info ;
u32 pmceid [ 2 ] ;
2012-03-05 11:49:32 +00:00
/* Read the nb of CNTx counters supported from PMNC */
2016-04-21 05:58:44 -07:00
cpu_pmu - > num_events = ( armv8pmu_pmcr_read ( ) > > ARMV8_PMU_PMCR_N_SHIFT )
& ARMV8_PMU_PMCR_N_MASK ;
2012-03-05 11:49:32 +00:00
2015-10-02 10:55:03 +01:00
/* Add the CPU cycles counter */
2016-04-21 05:58:44 -07:00
cpu_pmu - > num_events + = 1 ;
pmceid [ 0 ] = read_sysreg ( pmceid0_el0 ) ;
pmceid [ 1 ] = read_sysreg ( pmceid1_el0 ) ;
bitmap_from_u32array ( cpu_pmu - > pmceid_bitmap ,
ARMV8_PMUV3_MAX_COMMON_EVENTS , pmceid ,
ARRAY_SIZE ( pmceid ) ) ;
2012-03-05 11:49:32 +00:00
}
2016-04-21 05:58:44 -07:00
static int armv8pmu_probe_pmu ( struct arm_pmu * cpu_pmu )
2012-03-05 11:49:32 +00:00
{
2016-04-21 05:58:44 -07:00
return smp_call_function_any ( & cpu_pmu - > supported_cpus ,
__armv8pmu_probe_pmu ,
cpu_pmu , 1 ) ;
2012-03-05 11:49:32 +00:00
}
2015-10-02 10:55:04 +01:00
static void armv8_pmu_init ( struct arm_pmu * cpu_pmu )
2012-03-05 11:49:32 +00:00
{
2015-10-02 10:55:03 +01:00
cpu_pmu - > handle_irq = armv8pmu_handle_irq ,
cpu_pmu - > enable = armv8pmu_enable_event ,
cpu_pmu - > disable = armv8pmu_disable_event ,
cpu_pmu - > read_counter = armv8pmu_read_counter ,
cpu_pmu - > write_counter = armv8pmu_write_counter ,
cpu_pmu - > get_event_idx = armv8pmu_get_event_idx ,
cpu_pmu - > start = armv8pmu_start ,
cpu_pmu - > stop = armv8pmu_stop ,
cpu_pmu - > reset = armv8pmu_reset ,
cpu_pmu - > max_period = ( 1LLU < < 32 ) - 1 ,
2015-10-02 10:55:04 +01:00
cpu_pmu - > set_event_filter = armv8pmu_set_event_filter ;
}
static int armv8_pmuv3_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
2015-10-02 10:55:03 +01:00
cpu_pmu - > name = " armv8_pmuv3 " ;
cpu_pmu - > map_event = armv8_pmuv3_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:44 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
2015-10-02 10:55:04 +01:00
}
static int armv8_a53_pmu_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
cpu_pmu - > name = " armv8_cortex_a53 " ;
cpu_pmu - > map_event = armv8_a53_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:44 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
2012-03-05 11:49:32 +00:00
}
2015-10-02 10:55:05 +01:00
static int armv8_a57_pmu_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
cpu_pmu - > name = " armv8_cortex_a57 " ;
cpu_pmu - > map_event = armv8_a57_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:44 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
2015-10-02 10:55:05 +01:00
}
2015-12-22 14:45:35 +00:00
static int armv8_a72_pmu_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
cpu_pmu - > name = " armv8_cortex_a72 " ;
cpu_pmu - > map_event = armv8_a57_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:44 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
2015-12-22 14:45:35 +00:00
}
2016-02-18 17:50:11 +01:00
static int armv8_thunder_pmu_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
cpu_pmu - > name = " armv8_cavium_thunder " ;
cpu_pmu - > map_event = armv8_thunder_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:44 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
2016-02-18 17:50:11 +01:00
}
2016-04-21 05:58:45 -07:00
static int armv8_vulcan_pmu_init ( struct arm_pmu * cpu_pmu )
{
armv8_pmu_init ( cpu_pmu ) ;
cpu_pmu - > name = " armv8_brcm_vulcan " ;
cpu_pmu - > map_event = armv8_vulcan_map_event ;
2016-09-09 14:08:27 +01:00
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_EVENTS ] =
& armv8_pmuv3_events_attr_group ;
cpu_pmu - > attr_groups [ ARMPMU_ATTR_GROUP_FORMATS ] =
& armv8_pmuv3_format_attr_group ;
2016-04-21 05:58:45 -07:00
return armv8pmu_probe_pmu ( cpu_pmu ) ;
}
2015-10-02 10:55:03 +01:00
static const struct of_device_id armv8_pmu_of_device_ids [ ] = {
{ . compatible = " arm,armv8-pmuv3 " , . data = armv8_pmuv3_init } ,
2015-10-02 10:55:04 +01:00
{ . compatible = " arm,cortex-a53-pmu " , . data = armv8_a53_pmu_init } ,
2015-10-02 10:55:05 +01:00
{ . compatible = " arm,cortex-a57-pmu " , . data = armv8_a57_pmu_init } ,
2015-12-22 14:45:35 +00:00
{ . compatible = " arm,cortex-a72-pmu " , . data = armv8_a72_pmu_init } ,
2016-02-18 17:50:11 +01:00
{ . compatible = " cavium,thunder-pmu " , . data = armv8_thunder_pmu_init } ,
2016-04-21 05:58:45 -07:00
{ . compatible = " brcm,vulcan-pmu " , . data = armv8_vulcan_pmu_init } ,
2012-03-05 11:49:32 +00:00
{ } ,
} ;
2016-09-14 17:32:30 -05:00
/*
* Non DT systems have their micro / arch events probed at run - time .
* A fairly complete list of generic events are provided and ones that
* aren ' t supported by the current PMU are disabled .
*/
2016-09-14 17:32:29 -05:00
static const struct pmu_probe_info armv8_pmu_probe_table [ ] = {
2016-09-14 17:32:30 -05:00
PMU_PROBE ( 0 , 0 , armv8_pmuv3_init ) , /* enable all defined counters */
2016-09-14 17:32:29 -05:00
{ /* sentinel value */ }
} ;
2015-10-02 10:55:03 +01:00
static int armv8_pmu_device_probe ( struct platform_device * pdev )
2012-03-05 11:49:32 +00:00
{
2016-09-14 17:32:29 -05:00
if ( acpi_disabled )
return arm_pmu_device_probe ( pdev , armv8_pmu_of_device_ids ,
NULL ) ;
return arm_pmu_device_probe ( pdev , armv8_pmu_of_device_ids ,
armv8_pmu_probe_table ) ;
2012-03-05 11:49:32 +00:00
}
2015-10-02 10:55:03 +01:00
static struct platform_driver armv8_pmu_driver = {
2012-03-05 11:49:32 +00:00
. driver = {
2015-10-02 10:55:03 +01:00
. name = " armv8-pmu " ,
. of_match_table = armv8_pmu_of_device_ids ,
2012-03-05 11:49:32 +00:00
} ,
2015-10-02 10:55:03 +01:00
. probe = armv8_pmu_device_probe ,
2012-03-05 11:49:32 +00:00
} ;
2016-08-10 20:59:15 +08:00
builtin_platform_driver ( armv8_pmu_driver ) ;