Home
last modified time | relevance | path

Searched refs:event_list (Results 1 – 25 of 58) sorted by relevance

123

/linux-6.12.1/drivers/usb/usbip/
Dusbip_event.c20 static LIST_HEAD(event_list);
47 if (!list_empty(&event_list)) { in get_event()
48 ue = list_first_entry(&event_list, struct usbip_event, node); in get_event()
156 list_for_each_entry_reverse(ue, &event_list, node) { in usbip_event_add()
167 list_add_tail(&ue->node, &event_list); in usbip_event_add()
/linux-6.12.1/tools/perf/scripts/python/
Dnetdev-times.py122 event_list = hunk['event_list']
123 for i in range(len(event_list)):
124 event = event_list[i]
129 if i == len(event_list) - 1:
348 event_list = 0
354 event_list = net_rx_dic[cpu]['event_list']
356 if irq_list == [] or event_list == 0:
359 'irq_list':irq_list, 'event_list':event_list}
367 event_list = net_rx_dic[cpu]['event_list']
371 event_list.append(rec_data)
[all …]
/linux-6.12.1/drivers/acpi/
Devged.c45 struct list_head event_list; member
137 list_add_tail(&event->node, &geddev->event_list); in acpi_ged_request_interrupt()
151 INIT_LIST_HEAD(&geddev->event_list); in ged_probe()
168 list_for_each_entry_safe(event, next, &geddev->event_list, node) { in ged_shutdown()
/linux-6.12.1/drivers/dma/idxd/
Dperfmon.c69 idxd_pmu->event_list[n] = leader; in perfmon_collect_events()
70 idxd_pmu->event_list[n]->hw.idx = n; in perfmon_collect_events()
85 idxd_pmu->event_list[n] = event; in perfmon_collect_events()
86 idxd_pmu->event_list[n]->hw.idx = n; in perfmon_collect_events()
154 event = fake_pmu->event_list[i]; in perfmon_validate_group()
254 event = idxd->idxd_pmu->event_list[i]; in perfmon_counter_overflow()
352 if (event != idxd->idxd_pmu->event_list[i]) in perfmon_pmu_event_stop()
356 idxd->idxd_pmu->event_list[i - 1] = idxd->idxd_pmu->event_list[i]; in perfmon_pmu_event_stop()
/linux-6.12.1/drivers/gpu/drm/
Ddrm_file.c154 INIT_LIST_HEAD(&file->event_list); in drm_file_alloc()
205 list_for_each_entry_safe(e, et, &file_priv->event_list, link) { in drm_events_release()
258 WARN_ON(!list_empty(&file->event_list)); in drm_file_free()
538 if (!list_empty(&file_priv->event_list)) { in drm_read()
539 e = list_first_entry(&file_priv->event_list, in drm_read()
557 !list_empty(&file_priv->event_list)); in drm_read()
569 list_add(&e->link, &file_priv->event_list); in drm_read()
614 if (!list_empty(&file_priv->event_list)) in drm_poll()
755 &e->file_priv->event_list); in drm_send_event_helper()
/linux-6.12.1/drivers/infiniband/hw/mlx5/
Ddevx.c67 struct list_head event_list; /* headed in ev_file->event_list or in member
86 struct list_head event_list; member
1677 struct list_head event_list; member
1691 INIT_LIST_HEAD(&ev_queue->event_list); in devx_init_event_queue()
1735 INIT_LIST_HEAD(&ev_file->event_list); in UVERBS_HANDLER()
1759 list_add_tail(&async_data->list, &ev_queue->event_list); in devx_query_callback()
2104 list_add_tail(&event_sub->event_list, &sub_list); in UVERBS_HANDLER()
2129 list_for_each_entry_safe(event_sub, tmp_sub, &sub_list, event_list) { in UVERBS_HANDLER()
2133 list_del_init(&event_sub->event_list); in UVERBS_HANDLER()
2162 list_for_each_entry_safe(event_sub, tmp_sub, &sub_list, event_list) { in UVERBS_HANDLER()
[all …]
/linux-6.12.1/drivers/gpu/drm/exynos/
Dexynos_drm_g2d.c228 struct list_head event_list; member
375 list_add_tail(&node->event->base.link, &file_priv->event_list); in g2d_add_cmdlist_to_inuse()
919 if (list_empty(&runqueue_node->event_list)) in g2d_finish_event()
922 e = list_first_entry(&runqueue_node->event_list, in g2d_finish_event()
1307 struct list_head *event_list; in exynos_g2d_exec_ioctl() local
1314 event_list = &runqueue_node->event_list; in exynos_g2d_exec_ioctl()
1316 INIT_LIST_HEAD(event_list); in exynos_g2d_exec_ioctl()
1321 list_splice_init(&file_priv->event_list, event_list); in exynos_g2d_exec_ioctl()
1353 INIT_LIST_HEAD(&file_priv->event_list); in g2d_open()
Dexynos_drm_drv.h188 struct list_head event_list; member
/linux-6.12.1/drivers/infiniband/core/
Duverbs_main.c170 list_for_each_entry_safe(evt, tmp, &uobj->event_list, obj_list) { in ib_uverbs_release_uevent()
235 while (list_empty(&ev_queue->event_list)) { in ib_uverbs_event_read()
246 (!list_empty(&ev_queue->event_list) || in ib_uverbs_event_read()
253 event = list_entry(ev_queue->event_list.next, struct ib_uverbs_event, list); in ib_uverbs_event_read()
259 list_del(ev_queue->event_list.next); in ib_uverbs_event_read()
309 if (!list_empty(&ev_queue->event_list)) in ib_uverbs_event_poll()
393 list_add_tail(&entry->list, &ev_queue->event_list); in ib_uverbs_comp_handler()
428 list_add_tail(&entry->list, &async_file->ev_queue.event_list); in ib_uverbs_async_handler()
442 &eobj->event_list, &eobj->events_reported); in uverbs_uobj_event()
481 INIT_LIST_HEAD(&ev_queue->event_list); in ib_uverbs_init_event_queue()
Ducma.c80 struct list_head event_list; member
319 list_add_tail(&uevent->list, &ctx->file->event_list); in ucma_connect_event_handler()
354 list_add_tail(&uevent->list, &ctx->file->event_list); in ucma_event_handler()
386 while (list_empty(&file->event_list)) { in ucma_get_event()
393 !list_empty(&file->event_list))) in ucma_get_event()
399 uevent = list_first_entry(&file->event_list, struct ucma_event, list); in ucma_get_event()
510 list_for_each_entry_safe(uevent, tmp, &mc->ctx->file->event_list, list) { in ucma_cleanup_mc_events()
529 list_for_each_entry_safe(uevent, tmp, &ctx->file->event_list, list) { in ucma_cleanup_ctx_events()
1617 LIST_HEAD(event_list); in ucma_migrate_id()
1663 list_for_each_entry_safe(uevent, tmp, &cur_file->event_list, list) in ucma_migrate_id()
[all …]
Duverbs.h122 struct list_head event_list; member
185 struct list_head event_list; member
Duverbs_std_types_cq.c113 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
Duverbs_std_types_wq.c69 INIT_LIST_HEAD(&obj->uevent.event_list); in UVERBS_HANDLER()
/linux-6.12.1/include/media/i2c/
Dsaa6588.h22 poll_table *event_list; member
/linux-6.12.1/arch/x86/events/intel/
Duncore.c409 box->event_list[n] = leader; in uncore_collect_events()
424 box->event_list[n] = event; in uncore_collect_events()
472 c = uncore_get_event_constraint(box, box->event_list[i]); in uncore_assign_events()
480 hwc = &box->event_list[i]->hw; in uncore_assign_events()
506 uncore_put_event_constraint(box, box->event_list[i]); in uncore_assign_events()
619 event = box->event_list[i]; in uncore_pmu_event_add()
637 event = box->event_list[i]; in uncore_pmu_event_add()
672 if (event == box->event_list[i]) { in uncore_pmu_event_del()
676 box->event_list[i - 1] = box->event_list[i]; in uncore_pmu_event_del()
/linux-6.12.1/drivers/net/wireless/ath/ath10k/
Dqmi.h97 struct list_head event_list; member
Dqmi.c888 list_add_tail(&event->list, &qmi->event_list); in ath10k_qmi_driver_event_post()
1034 while (!list_empty(&qmi->event_list)) { in ath10k_qmi_driver_event_work()
1035 event = list_first_entry(&qmi->event_list, in ath10k_qmi_driver_event_work()
1104 INIT_LIST_HEAD(&qmi->event_list); in ath10k_qmi_init()
/linux-6.12.1/drivers/firewire/
Dcore-cdev.c57 struct list_head event_list; member
300 INIT_LIST_HEAD(&client->event_list); in fw_device_op_open()
324 list_add_tail(&event->link, &client->event_list); in queue_event()
338 !list_empty(&client->event_list) || in dequeue_event()
343 if (list_empty(&client->event_list) && in dequeue_event()
348 event = list_first_entry(&client->event_list, struct event, link); in dequeue_event()
1889 list_for_each_entry_safe(event, next_event, &client->event_list, link) in fw_device_op_release()
1906 if (!list_empty(&client->event_list)) in fw_device_op_poll()
/linux-6.12.1/include/drm/
Ddrm_file.h371 struct list_head event_list; member
/linux-6.12.1/net/vmw_vsock/
Dvirtio_transport.c62 struct virtio_vsock_event event_list[8]; member
401 for (i = 0; i < ARRAY_SIZE(vsock->event_list); i++) { in virtio_vsock_event_fill()
402 struct virtio_vsock_event *event = &vsock->event_list[i]; in virtio_vsock_event_fill()
/linux-6.12.1/drivers/scsi/
Dvirtio_scsi.c81 struct virtio_scsi_event_node event_list[VIRTIO_SCSI_EVENT_LEN]; member
260 vscsi->event_list[i].vscsi = vscsi; in virtscsi_kick_event_all()
261 virtscsi_kick_event(vscsi, &vscsi->event_list[i]); in virtscsi_kick_event_all()
277 cancel_work_sync(&vscsi->event_list[i].work); in virtscsi_cancel_event_work()
/linux-6.12.1/arch/x86/events/
Dcore.c1019 c = static_call(x86_pmu_get_event_constraints)(cpuc, i, cpuc->event_list[i]); in x86_schedule_events()
1033 hwc = &cpuc->event_list[i]->hw; in x86_schedule_events()
1104 e = cpuc->event_list[i]; in x86_schedule_events()
1151 cpuc->event_list[n] = event; in collect_event()
1308 event = cpuc->event_list[i]; in x86_pmu_enable()
1335 event = cpuc->event_list[i]; in x86_pmu_enable()
1635 if (event == cpuc->event_list[i]) in x86_pmu_del()
1650 cpuc->event_list[i-1] = cpuc->event_list[i]; in x86_pmu_del()
/linux-6.12.1/drivers/iommu/intel/
Dperfmon.c410 iommu_pmu->event_list[idx] = event; in iommu_pmu_assign_event()
466 iommu_pmu->event_list[idx] = NULL; in iommu_pmu_del()
505 event = iommu_pmu->event_list[i]; in iommu_pmu_counter_overflow()
/linux-6.12.1/kernel/events/
Dinternal.h36 struct list_head event_list; member
/linux-6.12.1/net/sctp/
Dulpqueue.c742 struct sk_buff_head *event_list; in sctp_ulpq_retrieve_ordered() local
751 event_list = (struct sk_buff_head *) sctp_event2skb(event)->prev; in sctp_ulpq_retrieve_ordered()
776 __skb_queue_tail(event_list, pos); in sctp_ulpq_retrieve_ordered()

123