ARM: perf: index ARMv7 event counters starting from zero
The current ARMv7 PMU backend indexes event counters from two, with index zero being reserved and index one being used to represent the cycle counter. This patch tidies up the code by indexing from one instead (with zero for the cycle counter). This allows us to remove many of the accessor macros along with the counter enumeration and makes the code much more readable. Acked-by: Jamie Iles <jamie@jamieiles.com> Reviewed-by: Jean Pihet <j-pihet@ti.com> Signed-off-by: Will Deacon <will.deacon@arm.com>
This commit is contained in:
parent
25e29c7c0f
commit
c691bb6249
1 changed files with 90 additions and 153 deletions
|
@ -676,24 +676,25 @@ static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Perf Events counters
|
* Perf Events' indices
|
||||||
*/
|
*/
|
||||||
enum armv7_counters {
|
#define ARMV7_IDX_CYCLE_COUNTER 0
|
||||||
ARMV7_CYCLE_COUNTER = 1, /* Cycle counter */
|
#define ARMV7_IDX_COUNTER0 1
|
||||||
ARMV7_COUNTER0 = 2, /* First event counter */
|
#define ARMV7_IDX_COUNTER_LAST (ARMV7_IDX_CYCLE_COUNTER + armpmu->num_events - 1)
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
#define ARMV7_MAX_COUNTERS 32
|
||||||
* The cycle counter is ARMV7_CYCLE_COUNTER.
|
#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
|
||||||
* The first event counter is ARMV7_COUNTER0.
|
|
||||||
* The last event counter is (ARMV7_COUNTER0 + armpmu->num_events - 1).
|
|
||||||
*/
|
|
||||||
#define ARMV7_COUNTER_LAST (ARMV7_COUNTER0 + armpmu->num_events - 1)
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* ARMv7 low level PMNC access
|
* ARMv7 low level PMNC access
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Perf Event to low level counters mapping
|
||||||
|
*/
|
||||||
|
#define ARMV7_IDX_TO_COUNTER(x) \
|
||||||
|
(((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Per-CPU PMNC: config reg
|
* Per-CPU PMNC: config reg
|
||||||
*/
|
*/
|
||||||
|
@ -707,54 +708,14 @@ enum armv7_counters {
|
||||||
#define ARMV7_PMNC_N_MASK 0x1f
|
#define ARMV7_PMNC_N_MASK 0x1f
|
||||||
#define ARMV7_PMNC_MASK 0x3f /* Mask for writable bits */
|
#define ARMV7_PMNC_MASK 0x3f /* Mask for writable bits */
|
||||||
|
|
||||||
/*
|
|
||||||
* Available counters
|
|
||||||
*/
|
|
||||||
#define ARMV7_CNT0 0 /* First event counter */
|
|
||||||
#define ARMV7_CCNT 31 /* Cycle counter */
|
|
||||||
|
|
||||||
/* Perf Event to low level counters mapping */
|
|
||||||
#define ARMV7_EVENT_CNT_TO_CNTx (ARMV7_COUNTER0 - ARMV7_CNT0)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* CNTENS: counters enable reg
|
|
||||||
*/
|
|
||||||
#define ARMV7_CNTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
|
|
||||||
#define ARMV7_CNTENS_C (1 << ARMV7_CCNT)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* CNTENC: counters disable reg
|
|
||||||
*/
|
|
||||||
#define ARMV7_CNTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
|
|
||||||
#define ARMV7_CNTENC_C (1 << ARMV7_CCNT)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* INTENS: counters overflow interrupt enable reg
|
|
||||||
*/
|
|
||||||
#define ARMV7_INTENS_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
|
|
||||||
#define ARMV7_INTENS_C (1 << ARMV7_CCNT)
|
|
||||||
|
|
||||||
/*
|
|
||||||
* INTENC: counters overflow interrupt disable reg
|
|
||||||
*/
|
|
||||||
#define ARMV7_INTENC_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
|
|
||||||
#define ARMV7_INTENC_C (1 << ARMV7_CCNT)
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* EVTSEL: Event selection reg
|
* EVTSEL: Event selection reg
|
||||||
*/
|
*/
|
||||||
#define ARMV7_EVTSEL_MASK 0xff /* Mask for writable bits */
|
#define ARMV7_EVTSEL_MASK 0xff /* Mask for writable bits */
|
||||||
|
|
||||||
/*
|
|
||||||
* SELECT: Counter selection reg
|
|
||||||
*/
|
|
||||||
#define ARMV7_SELECT_MASK 0x1f /* Mask for writable bits */
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* FLAG: counters overflow flag status reg
|
* FLAG: counters overflow flag status reg
|
||||||
*/
|
*/
|
||||||
#define ARMV7_FLAG_P(idx) (1 << (idx - ARMV7_EVENT_CNT_TO_CNTx))
|
|
||||||
#define ARMV7_FLAG_C (1 << ARMV7_CCNT)
|
|
||||||
#define ARMV7_FLAG_MASK 0xffffffff /* Mask for writable bits */
|
#define ARMV7_FLAG_MASK 0xffffffff /* Mask for writable bits */
|
||||||
#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
|
#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
|
||||||
|
|
||||||
|
@ -777,34 +738,39 @@ static inline int armv7_pmnc_has_overflowed(u32 pmnc)
|
||||||
return pmnc & ARMV7_OVERFLOWED_MASK;
|
return pmnc & ARMV7_OVERFLOWED_MASK;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc,
|
static inline int armv7_pmnc_counter_valid(int idx)
|
||||||
enum armv7_counters counter)
|
{
|
||||||
|
return idx >= ARMV7_IDX_CYCLE_COUNTER && idx <= ARMV7_IDX_COUNTER_LAST;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
|
||||||
{
|
{
|
||||||
int ret = 0;
|
int ret = 0;
|
||||||
|
u32 counter;
|
||||||
|
|
||||||
if (counter == ARMV7_CYCLE_COUNTER)
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
ret = pmnc & ARMV7_FLAG_C;
|
|
||||||
else if ((counter >= ARMV7_COUNTER0) && (counter <= ARMV7_COUNTER_LAST))
|
|
||||||
ret = pmnc & ARMV7_FLAG_P(counter);
|
|
||||||
else
|
|
||||||
pr_err("CPU%u checking wrong counter %d overflow status\n",
|
pr_err("CPU%u checking wrong counter %d overflow status\n",
|
||||||
smp_processor_id(), counter);
|
smp_processor_id(), idx);
|
||||||
|
} else {
|
||||||
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
|
ret = pmnc & BIT(counter);
|
||||||
|
}
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int armv7_pmnc_select_counter(int idx)
|
static inline int armv7_pmnc_select_counter(int idx)
|
||||||
{
|
{
|
||||||
u32 val;
|
u32 counter;
|
||||||
|
|
||||||
if ((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST)) {
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
pr_err("CPU%u selecting wrong PMNC counter"
|
pr_err("CPU%u selecting wrong PMNC counter %d\n",
|
||||||
" %d\n", smp_processor_id(), idx);
|
smp_processor_id(), idx);
|
||||||
return -1;
|
return -EINVAL;
|
||||||
}
|
}
|
||||||
|
|
||||||
val = (idx - ARMV7_EVENT_CNT_TO_CNTx) & ARMV7_SELECT_MASK;
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (val));
|
asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
|
||||||
isb();
|
isb();
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
|
@ -814,30 +780,26 @@ static inline u32 armv7pmu_read_counter(int idx)
|
||||||
{
|
{
|
||||||
u32 value = 0;
|
u32 value = 0;
|
||||||
|
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
if (!armv7_pmnc_counter_valid(idx))
|
||||||
asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
|
|
||||||
else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
|
|
||||||
if (armv7_pmnc_select_counter(idx) == idx)
|
|
||||||
asm volatile("mrc p15, 0, %0, c9, c13, 2"
|
|
||||||
: "=r" (value));
|
|
||||||
} else
|
|
||||||
pr_err("CPU%u reading wrong counter %d\n",
|
pr_err("CPU%u reading wrong counter %d\n",
|
||||||
smp_processor_id(), idx);
|
smp_processor_id(), idx);
|
||||||
|
else if (idx == ARMV7_IDX_CYCLE_COUNTER)
|
||||||
|
asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
|
||||||
|
else if (armv7_pmnc_select_counter(idx) == idx)
|
||||||
|
asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
|
||||||
|
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void armv7pmu_write_counter(int idx, u32 value)
|
static inline void armv7pmu_write_counter(int idx, u32 value)
|
||||||
{
|
{
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
if (!armv7_pmnc_counter_valid(idx))
|
||||||
asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
|
|
||||||
else if ((idx >= ARMV7_COUNTER0) && (idx <= ARMV7_COUNTER_LAST)) {
|
|
||||||
if (armv7_pmnc_select_counter(idx) == idx)
|
|
||||||
asm volatile("mcr p15, 0, %0, c9, c13, 2"
|
|
||||||
: : "r" (value));
|
|
||||||
} else
|
|
||||||
pr_err("CPU%u writing wrong counter %d\n",
|
pr_err("CPU%u writing wrong counter %d\n",
|
||||||
smp_processor_id(), idx);
|
smp_processor_id(), idx);
|
||||||
|
else if (idx == ARMV7_IDX_CYCLE_COUNTER)
|
||||||
|
asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
|
||||||
|
else if (armv7_pmnc_select_counter(idx) == idx)
|
||||||
|
asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
|
static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
|
||||||
|
@ -850,86 +812,61 @@ static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
|
||||||
|
|
||||||
static inline int armv7_pmnc_enable_counter(int idx)
|
static inline int armv7_pmnc_enable_counter(int idx)
|
||||||
{
|
{
|
||||||
u32 val;
|
u32 counter;
|
||||||
|
|
||||||
if ((idx != ARMV7_CYCLE_COUNTER) &&
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
|
pr_err("CPU%u enabling wrong PMNC counter %d\n",
|
||||||
pr_err("CPU%u enabling wrong PMNC counter"
|
smp_processor_id(), idx);
|
||||||
" %d\n", smp_processor_id(), idx);
|
return -EINVAL;
|
||||||
return -1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
val = ARMV7_CNTENS_C;
|
asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
|
||||||
else
|
|
||||||
val = ARMV7_CNTENS_P(idx);
|
|
||||||
|
|
||||||
asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (val));
|
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int armv7_pmnc_disable_counter(int idx)
|
static inline int armv7_pmnc_disable_counter(int idx)
|
||||||
{
|
{
|
||||||
u32 val;
|
u32 counter;
|
||||||
|
|
||||||
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
if ((idx != ARMV7_CYCLE_COUNTER) &&
|
pr_err("CPU%u disabling wrong PMNC counter %d\n",
|
||||||
((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
|
smp_processor_id(), idx);
|
||||||
pr_err("CPU%u disabling wrong PMNC counter"
|
return -EINVAL;
|
||||||
" %d\n", smp_processor_id(), idx);
|
|
||||||
return -1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
val = ARMV7_CNTENC_C;
|
asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
|
||||||
else
|
|
||||||
val = ARMV7_CNTENC_P(idx);
|
|
||||||
|
|
||||||
asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (val));
|
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int armv7_pmnc_enable_intens(int idx)
|
static inline int armv7_pmnc_enable_intens(int idx)
|
||||||
{
|
{
|
||||||
u32 val;
|
u32 counter;
|
||||||
|
|
||||||
if ((idx != ARMV7_CYCLE_COUNTER) &&
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
|
pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
|
||||||
pr_err("CPU%u enabling wrong PMNC counter"
|
smp_processor_id(), idx);
|
||||||
" interrupt enable %d\n", smp_processor_id(), idx);
|
return -EINVAL;
|
||||||
return -1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
val = ARMV7_INTENS_C;
|
asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
|
||||||
else
|
|
||||||
val = ARMV7_INTENS_P(idx);
|
|
||||||
|
|
||||||
asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (val));
|
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int armv7_pmnc_disable_intens(int idx)
|
static inline int armv7_pmnc_disable_intens(int idx)
|
||||||
{
|
{
|
||||||
u32 val;
|
u32 counter;
|
||||||
|
|
||||||
if ((idx != ARMV7_CYCLE_COUNTER) &&
|
if (!armv7_pmnc_counter_valid(idx)) {
|
||||||
((idx < ARMV7_COUNTER0) || (idx > ARMV7_COUNTER_LAST))) {
|
pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
|
||||||
pr_err("CPU%u disabling wrong PMNC counter"
|
smp_processor_id(), idx);
|
||||||
" interrupt enable %d\n", smp_processor_id(), idx);
|
return -EINVAL;
|
||||||
return -1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (idx == ARMV7_CYCLE_COUNTER)
|
counter = ARMV7_IDX_TO_COUNTER(idx);
|
||||||
val = ARMV7_INTENC_C;
|
asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
|
||||||
else
|
|
||||||
val = ARMV7_INTENC_P(idx);
|
|
||||||
|
|
||||||
asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (val));
|
|
||||||
|
|
||||||
return idx;
|
return idx;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -973,14 +910,14 @@ static void armv7_pmnc_dump_regs(void)
|
||||||
asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
|
asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
|
||||||
printk(KERN_INFO "CCNT =0x%08x\n", val);
|
printk(KERN_INFO "CCNT =0x%08x\n", val);
|
||||||
|
|
||||||
for (cnt = ARMV7_COUNTER0; cnt < ARMV7_COUNTER_LAST; cnt++) {
|
for (cnt = ARMV7_IDX_COUNTER0; cnt <= ARMV7_IDX_COUNTER_LAST; cnt++) {
|
||||||
armv7_pmnc_select_counter(cnt);
|
armv7_pmnc_select_counter(cnt);
|
||||||
asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
|
asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
|
||||||
printk(KERN_INFO "CNT[%d] count =0x%08x\n",
|
printk(KERN_INFO "CNT[%d] count =0x%08x\n",
|
||||||
cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
|
ARMV7_IDX_TO_COUNTER(cnt), val);
|
||||||
asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
|
asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
|
||||||
printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
|
printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
|
||||||
cnt-ARMV7_EVENT_CNT_TO_CNTx, val);
|
ARMV7_IDX_TO_COUNTER(cnt), val);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -1004,7 +941,7 @@ static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
|
||||||
* Set event (if destined for PMNx counters)
|
* Set event (if destined for PMNx counters)
|
||||||
* We don't need to set the event if it's a cycle count
|
* We don't need to set the event if it's a cycle count
|
||||||
*/
|
*/
|
||||||
if (idx != ARMV7_CYCLE_COUNTER)
|
if (idx != ARMV7_IDX_CYCLE_COUNTER)
|
||||||
armv7_pmnc_write_evtsel(idx, hwc->config_base);
|
armv7_pmnc_write_evtsel(idx, hwc->config_base);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -1069,7 +1006,7 @@ static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
|
||||||
perf_sample_data_init(&data, 0);
|
perf_sample_data_init(&data, 0);
|
||||||
|
|
||||||
cpuc = &__get_cpu_var(cpu_hw_events);
|
cpuc = &__get_cpu_var(cpu_hw_events);
|
||||||
for (idx = 0; idx <= armpmu->num_events; ++idx) {
|
for (idx = 0; idx < armpmu->num_events; ++idx) {
|
||||||
struct perf_event *event = cpuc->events[idx];
|
struct perf_event *event = cpuc->events[idx];
|
||||||
struct hw_perf_event *hwc;
|
struct hw_perf_event *hwc;
|
||||||
|
|
||||||
|
@ -1132,23 +1069,23 @@ static int armv7pmu_get_event_idx(struct cpu_hw_events *cpuc,
|
||||||
|
|
||||||
/* Always place a cycle counter into the cycle counter. */
|
/* Always place a cycle counter into the cycle counter. */
|
||||||
if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
|
if (event->config_base == ARMV7_PERFCTR_CPU_CYCLES) {
|
||||||
if (test_and_set_bit(ARMV7_CYCLE_COUNTER, cpuc->used_mask))
|
if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
|
||||||
return -EAGAIN;
|
return -EAGAIN;
|
||||||
|
|
||||||
return ARMV7_CYCLE_COUNTER;
|
return ARMV7_IDX_CYCLE_COUNTER;
|
||||||
} else {
|
|
||||||
/*
|
|
||||||
* For anything other than a cycle counter, try and use
|
|
||||||
* the events counters
|
|
||||||
*/
|
|
||||||
for (idx = ARMV7_COUNTER0; idx <= armpmu->num_events; ++idx) {
|
|
||||||
if (!test_and_set_bit(idx, cpuc->used_mask))
|
|
||||||
return idx;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* The counters are all in use. */
|
|
||||||
return -EAGAIN;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* For anything other than a cycle counter, try and use
|
||||||
|
* the events counters
|
||||||
|
*/
|
||||||
|
for (idx = ARMV7_IDX_COUNTER0; idx < armpmu->num_events; ++idx) {
|
||||||
|
if (!test_and_set_bit(idx, cpuc->used_mask))
|
||||||
|
return idx;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* The counters are all in use. */
|
||||||
|
return -EAGAIN;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void armv7pmu_reset(void *info)
|
static void armv7pmu_reset(void *info)
|
||||||
|
@ -1156,7 +1093,7 @@ static void armv7pmu_reset(void *info)
|
||||||
u32 idx, nb_cnt = armpmu->num_events;
|
u32 idx, nb_cnt = armpmu->num_events;
|
||||||
|
|
||||||
/* The counter and interrupt enable registers are unknown at reset. */
|
/* The counter and interrupt enable registers are unknown at reset. */
|
||||||
for (idx = 1; idx < nb_cnt; ++idx)
|
for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx)
|
||||||
armv7pmu_disable_event(NULL, idx);
|
armv7pmu_disable_event(NULL, idx);
|
||||||
|
|
||||||
/* Initialize & Reset PMNC: C and P bits */
|
/* Initialize & Reset PMNC: C and P bits */
|
||||||
|
|
Loading…
Reference in a new issue