OSDN Git Service

Merge tag 'perf-urgent-2023-09-10' of git://git.kernel.org/pub/scm/linux/kernel/git...
[tomoyo/tomoyo-test1.git] / arch / arm / include / asm / arm_pmuv3.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5
6 #ifndef __ASM_PMUV3_H
7 #define __ASM_PMUV3_H
8
9 #include <asm/cp15.h>
10 #include <asm/cputype.h>
11
12 #define PMCCNTR                 __ACCESS_CP15_64(0, c9)
13
14 #define PMCR                    __ACCESS_CP15(c9,  0, c12, 0)
15 #define PMCNTENSET              __ACCESS_CP15(c9,  0, c12, 1)
16 #define PMCNTENCLR              __ACCESS_CP15(c9,  0, c12, 2)
17 #define PMOVSR                  __ACCESS_CP15(c9,  0, c12, 3)
18 #define PMSELR                  __ACCESS_CP15(c9,  0, c12, 5)
19 #define PMCEID0                 __ACCESS_CP15(c9,  0, c12, 6)
20 #define PMCEID1                 __ACCESS_CP15(c9,  0, c12, 7)
21 #define PMXEVTYPER              __ACCESS_CP15(c9,  0, c13, 1)
22 #define PMXEVCNTR               __ACCESS_CP15(c9,  0, c13, 2)
23 #define PMUSERENR               __ACCESS_CP15(c9,  0, c14, 0)
24 #define PMINTENSET              __ACCESS_CP15(c9,  0, c14, 1)
25 #define PMINTENCLR              __ACCESS_CP15(c9,  0, c14, 2)
26 #define PMMIR                   __ACCESS_CP15(c9,  0, c14, 6)
27 #define PMCCFILTR               __ACCESS_CP15(c14, 0, c15, 7)
28
29 #define PMEVCNTR0               __ACCESS_CP15(c14, 0, c8, 0)
30 #define PMEVCNTR1               __ACCESS_CP15(c14, 0, c8, 1)
31 #define PMEVCNTR2               __ACCESS_CP15(c14, 0, c8, 2)
32 #define PMEVCNTR3               __ACCESS_CP15(c14, 0, c8, 3)
33 #define PMEVCNTR4               __ACCESS_CP15(c14, 0, c8, 4)
34 #define PMEVCNTR5               __ACCESS_CP15(c14, 0, c8, 5)
35 #define PMEVCNTR6               __ACCESS_CP15(c14, 0, c8, 6)
36 #define PMEVCNTR7               __ACCESS_CP15(c14, 0, c8, 7)
37 #define PMEVCNTR8               __ACCESS_CP15(c14, 0, c9, 0)
38 #define PMEVCNTR9               __ACCESS_CP15(c14, 0, c9, 1)
39 #define PMEVCNTR10              __ACCESS_CP15(c14, 0, c9, 2)
40 #define PMEVCNTR11              __ACCESS_CP15(c14, 0, c9, 3)
41 #define PMEVCNTR12              __ACCESS_CP15(c14, 0, c9, 4)
42 #define PMEVCNTR13              __ACCESS_CP15(c14, 0, c9, 5)
43 #define PMEVCNTR14              __ACCESS_CP15(c14, 0, c9, 6)
44 #define PMEVCNTR15              __ACCESS_CP15(c14, 0, c9, 7)
45 #define PMEVCNTR16              __ACCESS_CP15(c14, 0, c10, 0)
46 #define PMEVCNTR17              __ACCESS_CP15(c14, 0, c10, 1)
47 #define PMEVCNTR18              __ACCESS_CP15(c14, 0, c10, 2)
48 #define PMEVCNTR19              __ACCESS_CP15(c14, 0, c10, 3)
49 #define PMEVCNTR20              __ACCESS_CP15(c14, 0, c10, 4)
50 #define PMEVCNTR21              __ACCESS_CP15(c14, 0, c10, 5)
51 #define PMEVCNTR22              __ACCESS_CP15(c14, 0, c10, 6)
52 #define PMEVCNTR23              __ACCESS_CP15(c14, 0, c10, 7)
53 #define PMEVCNTR24              __ACCESS_CP15(c14, 0, c11, 0)
54 #define PMEVCNTR25              __ACCESS_CP15(c14, 0, c11, 1)
55 #define PMEVCNTR26              __ACCESS_CP15(c14, 0, c11, 2)
56 #define PMEVCNTR27              __ACCESS_CP15(c14, 0, c11, 3)
57 #define PMEVCNTR28              __ACCESS_CP15(c14, 0, c11, 4)
58 #define PMEVCNTR29              __ACCESS_CP15(c14, 0, c11, 5)
59 #define PMEVCNTR30              __ACCESS_CP15(c14, 0, c11, 6)
60
61 #define PMEVTYPER0              __ACCESS_CP15(c14, 0, c12, 0)
62 #define PMEVTYPER1              __ACCESS_CP15(c14, 0, c12, 1)
63 #define PMEVTYPER2              __ACCESS_CP15(c14, 0, c12, 2)
64 #define PMEVTYPER3              __ACCESS_CP15(c14, 0, c12, 3)
65 #define PMEVTYPER4              __ACCESS_CP15(c14, 0, c12, 4)
66 #define PMEVTYPER5              __ACCESS_CP15(c14, 0, c12, 5)
67 #define PMEVTYPER6              __ACCESS_CP15(c14, 0, c12, 6)
68 #define PMEVTYPER7              __ACCESS_CP15(c14, 0, c12, 7)
69 #define PMEVTYPER8              __ACCESS_CP15(c14, 0, c13, 0)
70 #define PMEVTYPER9              __ACCESS_CP15(c14, 0, c13, 1)
71 #define PMEVTYPER10             __ACCESS_CP15(c14, 0, c13, 2)
72 #define PMEVTYPER11             __ACCESS_CP15(c14, 0, c13, 3)
73 #define PMEVTYPER12             __ACCESS_CP15(c14, 0, c13, 4)
74 #define PMEVTYPER13             __ACCESS_CP15(c14, 0, c13, 5)
75 #define PMEVTYPER14             __ACCESS_CP15(c14, 0, c13, 6)
76 #define PMEVTYPER15             __ACCESS_CP15(c14, 0, c13, 7)
77 #define PMEVTYPER16             __ACCESS_CP15(c14, 0, c14, 0)
78 #define PMEVTYPER17             __ACCESS_CP15(c14, 0, c14, 1)
79 #define PMEVTYPER18             __ACCESS_CP15(c14, 0, c14, 2)
80 #define PMEVTYPER19             __ACCESS_CP15(c14, 0, c14, 3)
81 #define PMEVTYPER20             __ACCESS_CP15(c14, 0, c14, 4)
82 #define PMEVTYPER21             __ACCESS_CP15(c14, 0, c14, 5)
83 #define PMEVTYPER22             __ACCESS_CP15(c14, 0, c14, 6)
84 #define PMEVTYPER23             __ACCESS_CP15(c14, 0, c14, 7)
85 #define PMEVTYPER24             __ACCESS_CP15(c14, 0, c15, 0)
86 #define PMEVTYPER25             __ACCESS_CP15(c14, 0, c15, 1)
87 #define PMEVTYPER26             __ACCESS_CP15(c14, 0, c15, 2)
88 #define PMEVTYPER27             __ACCESS_CP15(c14, 0, c15, 3)
89 #define PMEVTYPER28             __ACCESS_CP15(c14, 0, c15, 4)
90 #define PMEVTYPER29             __ACCESS_CP15(c14, 0, c15, 5)
91 #define PMEVTYPER30             __ACCESS_CP15(c14, 0, c15, 6)
92
93 #define RETURN_READ_PMEVCNTRN(n) \
94         return read_sysreg(PMEVCNTR##n)
95 static inline unsigned long read_pmevcntrn(int n)
96 {
97         PMEVN_SWITCH(n, RETURN_READ_PMEVCNTRN);
98         return 0;
99 }
100
101 #define WRITE_PMEVCNTRN(n) \
102         write_sysreg(val, PMEVCNTR##n)
103 static inline void write_pmevcntrn(int n, unsigned long val)
104 {
105         PMEVN_SWITCH(n, WRITE_PMEVCNTRN);
106 }
107
108 #define WRITE_PMEVTYPERN(n) \
109         write_sysreg(val, PMEVTYPER##n)
110 static inline void write_pmevtypern(int n, unsigned long val)
111 {
112         PMEVN_SWITCH(n, WRITE_PMEVTYPERN);
113 }
114
115 static inline unsigned long read_pmmir(void)
116 {
117         return read_sysreg(PMMIR);
118 }
119
120 static inline u32 read_pmuver(void)
121 {
122         /* PMUVers is not a signed field */
123         u32 dfr0 = read_cpuid_ext(CPUID_EXT_DFR0);
124
125         return (dfr0 >> 24) & 0xf;
126 }
127
128 static inline void write_pmcr(u32 val)
129 {
130         write_sysreg(val, PMCR);
131 }
132
133 static inline u32 read_pmcr(void)
134 {
135         return read_sysreg(PMCR);
136 }
137
138 static inline void write_pmselr(u32 val)
139 {
140         write_sysreg(val, PMSELR);
141 }
142
143 static inline void write_pmccntr(u64 val)
144 {
145         write_sysreg(val, PMCCNTR);
146 }
147
148 static inline u64 read_pmccntr(void)
149 {
150         return read_sysreg(PMCCNTR);
151 }
152
153 static inline void write_pmxevcntr(u32 val)
154 {
155         write_sysreg(val, PMXEVCNTR);
156 }
157
158 static inline u32 read_pmxevcntr(void)
159 {
160         return read_sysreg(PMXEVCNTR);
161 }
162
163 static inline void write_pmxevtyper(u32 val)
164 {
165         write_sysreg(val, PMXEVTYPER);
166 }
167
168 static inline void write_pmcntenset(u32 val)
169 {
170         write_sysreg(val, PMCNTENSET);
171 }
172
173 static inline void write_pmcntenclr(u32 val)
174 {
175         write_sysreg(val, PMCNTENCLR);
176 }
177
178 static inline void write_pmintenset(u32 val)
179 {
180         write_sysreg(val, PMINTENSET);
181 }
182
183 static inline void write_pmintenclr(u32 val)
184 {
185         write_sysreg(val, PMINTENCLR);
186 }
187
188 static inline void write_pmccfiltr(u32 val)
189 {
190         write_sysreg(val, PMCCFILTR);
191 }
192
193 static inline void write_pmovsclr(u32 val)
194 {
195         write_sysreg(val, PMOVSR);
196 }
197
198 static inline u32 read_pmovsclr(void)
199 {
200         return read_sysreg(PMOVSR);
201 }
202
203 static inline void write_pmuserenr(u32 val)
204 {
205         write_sysreg(val, PMUSERENR);
206 }
207
208 static inline u32 read_pmceid0(void)
209 {
210         return read_sysreg(PMCEID0);
211 }
212
213 static inline u32 read_pmceid1(void)
214 {
215         return read_sysreg(PMCEID1);
216 }
217
218 static inline void kvm_set_pmu_events(u32 set, struct perf_event_attr *attr) {}
219 static inline void kvm_clr_pmu_events(u32 clr) {}
220 static inline bool kvm_pmu_counter_deferred(struct perf_event_attr *attr)
221 {
222         return false;
223 }
224
225 static inline bool kvm_set_pmuserenr(u64 val)
226 {
227         return false;
228 }
229
230 static inline void kvm_vcpu_pmu_resync_el0(void) {}
231
232 /* PMU Version in DFR Register */
233 #define ARMV8_PMU_DFR_VER_NI        0
234 #define ARMV8_PMU_DFR_VER_V3P4      0x5
235 #define ARMV8_PMU_DFR_VER_V3P5      0x6
236 #define ARMV8_PMU_DFR_VER_IMP_DEF   0xF
237
238 static inline bool pmuv3_implemented(int pmuver)
239 {
240         return !(pmuver == ARMV8_PMU_DFR_VER_IMP_DEF ||
241                  pmuver == ARMV8_PMU_DFR_VER_NI);
242 }
243
244 static inline bool is_pmuv3p4(int pmuver)
245 {
246         return pmuver >= ARMV8_PMU_DFR_VER_V3P4;
247 }
248
249 static inline bool is_pmuv3p5(int pmuver)
250 {
251         return pmuver >= ARMV8_PMU_DFR_VER_V3P5;
252 }
253
254 #endif