Home
last modified time | relevance | path

Searched refs:X86_PMC_IDX_MAX (Results 1 – 13 of 13) sorted by relevance

/linux-6.12.1/arch/x86/events/
Dperf_event.h53 unsigned long idxmsk[BITS_TO_LONGS(X86_PMC_IDX_MAX)];
121 struct perf_event *owners[X86_PMC_IDX_MAX];
122 struct event_constraint event_constraints[X86_PMC_IDX_MAX];
197 enum intel_excl_state_type state[X86_PMC_IDX_MAX];
240 struct perf_event *events[X86_PMC_IDX_MAX]; /* in counter order */
241 unsigned long active_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)];
242 unsigned long dirty[BITS_TO_LONGS(X86_PMC_IDX_MAX)];
252 int assign[X86_PMC_IDX_MAX]; /* event to counter assignment */
253 u64 tags[X86_PMC_IDX_MAX];
255 struct perf_event *event_list[X86_PMC_IDX_MAX]; /* in enabled order */
[all …]
Dcore.c213 for_each_set_bit(i, (unsigned long *)&cntr_mask, X86_PMC_IDX_MAX) { in reserve_pmc_hardware()
218 for_each_set_bit(i, (unsigned long *)&cntr_mask, X86_PMC_IDX_MAX) { in reserve_pmc_hardware()
229 i = X86_PMC_IDX_MAX; in reserve_pmc_hardware()
244 for_each_set_bit(i, (unsigned long *)&cntr_mask, X86_PMC_IDX_MAX) { in release_pmc_hardware()
269 for_each_set_bit(i, cntr_mask, X86_PMC_IDX_MAX) { in check_hw_exists()
288 for_each_set_bit(i, fixed_cntr_mask, X86_PMC_IDX_MAX) { in check_hw_exists()
689 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in x86_pmu_disable_all()
746 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in x86_pmu_enable_all()
883 for_each_set_bit_from(idx, c->idxmsk, X86_PMC_IDX_MAX) { in __perf_sched_find_counter()
1004 for (i = 0, wmin = X86_PMC_IDX_MAX, wmax = 0; i < n; i++) { in x86_schedule_events()
[all …]
/linux-6.12.1/arch/x86/kvm/
Dpmu.c497 DECLARE_BITMAP(bitmap, X86_PMC_IDX_MAX); in kvm_pmu_handle_event()
502 bitmap_copy(bitmap, pmu->reprogram_pmi, X86_PMC_IDX_MAX); in kvm_pmu_handle_event()
730 bitmap_zero(pmu->reprogram_pmi, X86_PMC_IDX_MAX); in kvm_pmu_reset()
776 bitmap_zero(pmu->all_valid_pmc_idx, X86_PMC_IDX_MAX); in kvm_pmu_refresh()
808 DECLARE_BITMAP(bitmask, X86_PMC_IDX_MAX); in kvm_pmu_cleanup()
814 pmu->pmc_in_use, X86_PMC_IDX_MAX); in kvm_pmu_cleanup()
823 bitmap_zero(pmu->pmc_in_use, X86_PMC_IDX_MAX); in kvm_pmu_cleanup()
866 DECLARE_BITMAP(bitmap, X86_PMC_IDX_MAX); in kvm_pmu_trigger_event()
871 BUILD_BUG_ON(sizeof(pmu->global_ctrl) * BITS_PER_BYTE != X86_PMC_IDX_MAX); in kvm_pmu_trigger_event()
874 bitmap_copy(bitmap, pmu->all_valid_pmc_idx, X86_PMC_IDX_MAX); in kvm_pmu_trigger_event()
[all …]
Dpmu.h93 for_each_set_bit(i, bitmap, X86_PMC_IDX_MAX) \
243 for_each_set_bit(bit, (unsigned long *)&diff, X86_PMC_IDX_MAX) in reprogram_counters()
/linux-6.12.1/arch/x86/events/amd/
Dcore.c305 static unsigned int event_offsets[X86_PMC_IDX_MAX] __read_mostly;
306 static unsigned int count_offsets[X86_PMC_IDX_MAX] __read_mostly;
435 for_each_set_bit(i, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in __amd_put_nb_event_constraints()
547 for_each_set_bit(i, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in amd_alloc_nb()
740 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in amd_pmu_check_overflow()
760 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in amd_pmu_enable_all()
983 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in amd_pmu_v2_handle_irq()
/linux-6.12.1/arch/x86/events/intel/
Dp4.c922 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in p4_pmu_disable_all()
986 static DEFINE_PER_CPU(unsigned long [BITS_TO_LONGS(X86_PMC_IDX_MAX)], p4_running);
1001 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in p4_pmu_enable_all()
1043 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in p4_pmu_handle_irq()
1242 unsigned long used_mask[BITS_TO_LONGS(X86_PMC_IDX_MAX)]; in p4_pmu_schedule_events()
1252 bitmap_zero(used_mask, X86_PMC_IDX_MAX); in p4_pmu_schedule_events()
1398 for_each_set_bit(i, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in p4_pmu_init()
Dknc.c242 for_each_set_bit(bit, (unsigned long *)&status, X86_PMC_IDX_MAX) { in knc_pmu_handle_irq()
Dcore.c3003 for_each_set_bit(bit, (unsigned long *)&guest_pebs_idxs, X86_PMC_IDX_MAX) { in x86_pmu_handle_guest_pebs()
3104 for_each_set_bit(bit, (unsigned long *)&status, X86_PMC_IDX_MAX) { in handle_pmi_common()
3623 for_each_set_bit(i, c->idxmsk, X86_PMC_IDX_MAX) { in intel_get_excl_constraints()
3675 bitmap_copy(c1->idxmsk, c2->idxmsk, X86_PMC_IDX_MAX); in intel_get_event_constraints()
4265 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in core_guest_get_msrs()
4298 for_each_set_bit(idx, x86_pmu.cntr_mask, X86_PMC_IDX_MAX) { in core_pmu_enable_all()
4768 size_t sz = X86_PMC_IDX_MAX * sizeof(struct event_constraint); in intel_cpuc_prepare()
Dds.c2407 intel_pmu_pebs_event_update_no_drain(cpuc, X86_PMC_IDX_MAX); in intel_pmu_drain_pebs_icl()
2417 for_each_set_bit(bit, (unsigned long *)&pebs_status, X86_PMC_IDX_MAX) in intel_pmu_drain_pebs_icl()
2421 for_each_set_bit(bit, (unsigned long *)&mask, X86_PMC_IDX_MAX) { in intel_pmu_drain_pebs_icl()
Dlbr.c943 int i, j, pos = 0, order[X86_PMC_IDX_MAX]; in intel_pmu_lbr_counters_reorder()
/linux-6.12.1/arch/x86/include/asm/
Dperf_event.h15 #define X86_PMC_IDX_MAX 64 macro
Dkvm_host.h568 DECLARE_BITMAP(reprogram_pmi, X86_PMC_IDX_MAX);
571 DECLARE_BITMAP(all_valid_pmc_idx, X86_PMC_IDX_MAX);
572 DECLARE_BITMAP(pmc_in_use, X86_PMC_IDX_MAX);
/linux-6.12.1/arch/x86/events/zhaoxin/
Dcore.c386 for_each_set_bit(bit, (unsigned long *)&status, X86_PMC_IDX_MAX) { in zhaoxin_pmu_handle_irq()