Lines Matching refs:ctr_mask
259 unsigned long ctr_mask) in kvm_pmu_validate_counter_mask() argument
262 if (!ctr_mask || (ctr_base + __fls(ctr_mask) >= kvm_pmu_num_counters(kvpmu))) in kvm_pmu_validate_counter_mask()
481 unsigned long ctr_mask, unsigned long flags, u64 ival, in kvm_riscv_vcpu_pmu_ctr_start() argument
490 if (kvm_pmu_validate_counter_mask(kvpmu, ctr_base, ctr_mask) < 0) { in kvm_riscv_vcpu_pmu_ctr_start()
508 for_each_set_bit(i, &ctr_mask, RISCV_MAX_COUNTERS) { in kvm_riscv_vcpu_pmu_ctr_start()
557 unsigned long ctr_mask, unsigned long flags, in kvm_riscv_vcpu_pmu_ctr_stop() argument
568 if (kvm_pmu_validate_counter_mask(kvpmu, ctr_base, ctr_mask) < 0) { in kvm_riscv_vcpu_pmu_ctr_stop()
579 for_each_set_bit(i, &ctr_mask, RISCV_MAX_COUNTERS) { in kvm_riscv_vcpu_pmu_ctr_stop()
654 unsigned long ctr_mask, unsigned long flags, in kvm_riscv_vcpu_pmu_ctr_cfg_match() argument
680 if (kvm_pmu_validate_counter_mask(kvpmu, ctr_base, ctr_mask) < 0) { in kvm_riscv_vcpu_pmu_ctr_cfg_match()
697 if (!test_bit(ctr_base + __ffs(ctr_mask), kvpmu->pmc_in_use)) { in kvm_riscv_vcpu_pmu_ctr_cfg_match()
701 ctr_idx = ctr_base + __ffs(ctr_mask); in kvm_riscv_vcpu_pmu_ctr_cfg_match()
703 ctr_idx = pmu_get_pmc_index(kvpmu, eidx, ctr_base, ctr_mask); in kvm_riscv_vcpu_pmu_ctr_cfg_match()