/linux-6.12.1/arch/powerpc/perf/ |
D | hv-24x7.c | 690 size_t event_idx, in catalog_event_len_validate() argument 701 if (event_idx >= event_entry_count) { in catalog_event_len_validate() 709 event_idx); in catalog_event_len_validate() 717 event_idx, ev_len, event); in catalog_event_len_validate() 722 event_idx, ev_len, ev_end, end, in catalog_event_len_validate() 730 event_idx, event_data_bytes, event, end, in catalog_event_len_validate() 737 event_idx, event, ev_end, offset, calc_ev_end); in catalog_event_len_validate() 761 event_data_bytes, junk_events, event_idx, event_attr_ct, i, in create_events_from_catalog() local 866 for (junk_events = 0, event = event_data, event_idx = 0, attr_max = 0; in create_events_from_catalog() 868 event_idx++, event = (void *)event + ev_len) { in create_events_from_catalog() [all …]
|
D | core-book3s.c | 2218 .event_idx = power_pmu_event_idx,
|
/linux-6.12.1/arch/riscv/kvm/ |
D | vcpu_pmu.c | 222 fevent_code = get_event_code(pmc->event_idx); in pmu_fw_ctr_read_hi() 246 fevent_code = get_event_code(pmc->event_idx); in pmu_ctr_read() 523 fevent_code = get_event_code(pmc->event_idx); in kvm_riscv_vcpu_pmu_ctr_start() 585 fevent_code = get_event_code(pmc->event_idx); in kvm_riscv_vcpu_pmu_ctr_stop() 629 pmc->event_idx = SBI_PMU_EVENT_IDX_INVALID; in kvm_riscv_vcpu_pmu_ctr_stop() 725 pmc->event_idx = eidx; in kvm_riscv_vcpu_pmu_ctr_cfg_match() 800 pmc->event_idx = SBI_PMU_EVENT_IDX_INVALID; in kvm_riscv_vcpu_pmu_init() 838 pmc->event_idx = SBI_PMU_EVENT_IDX_INVALID; in kvm_riscv_vcpu_pmu_deinit()
|
/linux-6.12.1/include/uapi/linux/ |
D | virtio_ring.h | 222 static inline int vring_need_event(__u16 event_idx, __u16 new_idx, __u16 old) in vring_need_event() argument 229 return (__u16)(new_idx - event_idx - 1) < (__u16)(new_idx - old); in vring_need_event()
|
/linux-6.12.1/drivers/perf/ |
D | cxl_pmu.c | 89 int event_idx; /* configurable counters */ member 203 pmu_ev->event_idx = j; in cxl_pmu_parse_caps() 515 int *event_idx) in cxl_pmu_get_event_idx() argument 732 int event_idx = 0; in cxl_pmu_event_add() local 736 rc = cxl_pmu_get_event_idx(event, &idx, &event_idx); in cxl_pmu_event_add() 743 hwc->event_base = event_idx; in cxl_pmu_event_add()
|
D | marvell_cn10k_tad_pmu.c | 83 u32 event_idx = event->attr.config; in tad_pmu_event_counter_start() local 99 reg_val = event_idx & 0xFF; in tad_pmu_event_counter_start()
|
D | riscv_pmu_sbi.c | 118 uint32_t event_idx; member 306 0, cmask, 0, edata->event_idx, 0, 0); in pmu_sbi_check_event() 312 edata->event_idx = -ENOENT; in pmu_sbi_check_event() 490 ret = pmu_cache_event_map[cache_type][cache_op][cache_result].event_idx; in pmu_event_find_cache() 523 ret = pmu_hw_event_map[event->attr.config].event_idx; in pmu_sbi_event_map()
|
D | riscv_pmu.c | 413 .event_idx = riscv_pmu_event_idx, in riscv_pmu_alloc()
|
D | arm_pmuv3.c | 1312 cpu_pmu->pmu.event_idx = armv8pmu_user_event_idx; in armv8_pmu_init()
|
/linux-6.12.1/arch/riscv/include/asm/ |
D | kvm_vcpu_pmu.h | 39 unsigned long event_idx; member
|
/linux-6.12.1/include/net/ |
D | amt.h | 369 u8 event_idx; member
|
D | cfg80211.h | 7886 int approxlen, int event_idx, gfp_t gfp) in cfg80211_vendor_event_alloc() argument 7890 0, event_idx, approxlen, gfp); in cfg80211_vendor_event_alloc() 7921 int event_idx, gfp_t gfp) in cfg80211_vendor_event_alloc_ucast() argument 7925 portid, event_idx, approxlen, gfp); in cfg80211_vendor_event_alloc_ucast()
|
/linux-6.12.1/drivers/net/ |
D | amt.c | 904 index = (amt->event_idx + amt->nr_events) % AMT_MAX_EVENTS; in amt_queue_event() 908 amt->event_idx %= AMT_MAX_EVENTS; in amt_queue_event() 2877 event = amt->events[amt->event_idx].event; in amt_event_work() 2878 skb = amt->events[amt->event_idx].skb; in amt_event_work() 2879 amt->events[amt->event_idx].event = AMT_EVENT_NONE; in amt_event_work() 2880 amt->events[amt->event_idx].skb = NULL; in amt_event_work() 2882 amt->event_idx++; in amt_event_work() 2883 amt->event_idx %= AMT_MAX_EVENTS; in amt_event_work() 2990 amt->event_idx = 0; in amt_dev_open()
|
/linux-6.12.1/arch/x86/events/intel/ |
D | p4.c | 731 static bool p4_event_match_cpu_model(unsigned int event_idx) in p4_event_match_cpu_model() argument 734 if (event_idx == P4_EVENT_INSTR_COMPLETED) { in p4_event_match_cpu_model()
|
/linux-6.12.1/drivers/nvme/host/ |
D | pci.c | 346 static inline int nvme_dbbuf_need_event(u16 event_idx, u16 new_idx, u16 old) in nvme_dbbuf_need_event() argument 348 return (u16)(new_idx - event_idx - 1) < (u16)(new_idx - old); in nvme_dbbuf_need_event() 356 u16 old_value, event_idx; in nvme_dbbuf_update_and_check_event() local 375 event_idx = le32_to_cpu(*dbbuf_ei); in nvme_dbbuf_update_and_check_event() 376 if (!nvme_dbbuf_need_event(event_idx, value, old_value)) in nvme_dbbuf_update_and_check_event()
|
/linux-6.12.1/drivers/virtio/ |
D | virtio_ring.c | 1556 u16 new, old, off_wrap, flags, wrap_counter, event_idx; in virtqueue_kick_prepare_packed() local 1592 event_idx = off_wrap & ~(1 << VRING_PACKED_EVENT_F_WRAP_CTR); in virtqueue_kick_prepare_packed() 1594 event_idx -= vq->packed.vring.num; in virtqueue_kick_prepare_packed() 1596 needs_kick = vring_need_event(event_idx, new, old); in virtqueue_kick_prepare_packed()
|
/linux-6.12.1/include/linux/ |
D | perf_event.h | 471 int (*event_idx) (struct perf_event *event); /*optional */ member
|
/linux-6.12.1/drivers/gpu/drm/i915/ |
D | i915_pmu.c | 1301 pmu->base.event_idx = i915_pmu_event_event_idx; in i915_pmu_register()
|
/linux-6.12.1/arch/x86/events/ |
D | core.c | 2702 .event_idx = x86_pmu_event_idx,
|
/linux-6.12.1/kernel/events/ |
D | core.c | 6216 return event->pmu->event_idx(event); in perf_event_index() 11809 if (!pmu->event_idx) in perf_pmu_register() 11810 pmu->event_idx = perf_event_idx_default; in perf_pmu_register()
|