Lines Matching refs:ARMV8_PMU_CYCLE_IDX
86 return (pmc->idx == ARMV8_PMU_CYCLE_IDX || in kvm_pmc_is_64bit()
94 return (pmc->idx < ARMV8_PMU_CYCLE_IDX && (val & ARMV8_PMU_PMCR_LP)) || in kvm_pmc_has_64bit_overflow()
95 (pmc->idx == ARMV8_PMU_CYCLE_IDX && (val & ARMV8_PMU_PMCR_LC)); in kvm_pmc_has_64bit_overflow()
100 return (!(pmc->idx & 1) && (pmc->idx + 1) < ARMV8_PMU_CYCLE_IDX && in kvm_pmu_counter_can_chain()
106 return (idx == ARMV8_PMU_CYCLE_IDX) ? PMCCNTR_EL0 : PMEVCNTR0_EL0 + idx; in counter_index_to_reg()
111 return (idx == ARMV8_PMU_CYCLE_IDX) ? PMCCFILTR_EL0 : PMEVTYPER0_EL0 + idx; in counter_index_to_evtreg()
158 if (vcpu_mode_is_32bit(vcpu) && pmc->idx != ARMV8_PMU_CYCLE_IDX && in kvm_pmu_set_pmc_value()
273 return BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
275 return GENMASK(val - 1, 0) | BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_valid_counter_mask()
451 for_each_set_bit(i, &mask, ARMV8_PMU_CYCLE_IDX) { in kvm_pmu_counter_increment()
574 kvm_pmu_set_counter_value(vcpu, ARMV8_PMU_CYCLE_IDX, 0); in kvm_pmu_handle_pmcr()
578 mask &= ~BIT(ARMV8_PMU_CYCLE_IDX); in kvm_pmu_handle_pmcr()
609 if (pmc->idx == ARMV8_PMU_CYCLE_IDX) in kvm_pmu_create_perf_event()