/linux-6.12.1/drivers/perf/ |
D | marvell_cn10k_ddr_pmu.c | 410 u64 prev_count, new_count, mask; in cn10k_ddr_perf_event_update() local 413 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_perf_event_update() 415 } while (local64_xchg(&hwc->prev_count, new_count) != prev_count); in cn10k_ddr_perf_event_update() 419 local64_add((new_count - prev_count) & mask, &event->count); in cn10k_ddr_perf_event_update() 428 local64_set(&hwc->prev_count, 0); in cn10k_ddr_perf_event_start() 546 local64_set(&hwc->prev_count, 0); in cn10k_ddr_perf_event_update_all() 554 u64 prev_count, new_count; in cn10k_ddr_pmu_overflow_handler() local 561 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_pmu_overflow_handler() 567 if (new_count < prev_count) in cn10k_ddr_pmu_overflow_handler() 574 prev_count = local64_read(&hwc->prev_count); in cn10k_ddr_pmu_overflow_handler() [all …]
|
D | arm_dsu_pmu.c | 334 u64 delta, prev_count, new_count; in dsu_pmu_event_update() local 338 prev_count = local64_read(&hwc->prev_count); in dsu_pmu_event_update() 340 } while (local64_cmpxchg(&hwc->prev_count, prev_count, new_count) != in dsu_pmu_event_update() 341 prev_count); in dsu_pmu_event_update() 342 delta = (new_count - prev_count) & DSU_PMU_COUNTER_MASK(hwc->idx); in dsu_pmu_event_update() 368 local64_set(&event->hw.prev_count, val); in dsu_pmu_set_event_period()
|
D | arm_dmc620_pmu.c | 327 u64 delta, prev_count, new_count; in dmc620_pmu_event_update() local 331 prev_count = local64_read(&hwc->prev_count); in dmc620_pmu_event_update() 333 } while (local64_cmpxchg(&hwc->prev_count, in dmc620_pmu_event_update() 334 prev_count, new_count) != prev_count); in dmc620_pmu_event_update() 335 delta = (new_count - prev_count) & DMC620_CNT_MAX_PERIOD; in dmc620_pmu_event_update() 343 local64_set(&event->hw.prev_count, DMC620_CNT_INIT); in dmc620_pmu_event_set_period()
|
D | qcom_l3_pmu.c | 206 local64_set(&event->hw.prev_count, 0); in qcom_l3_cache__64bit_counter_start() 247 prev = local64_read(&event->hw.prev_count); in qcom_l3_cache__64bit_counter_update() 253 } while (local64_cmpxchg(&event->hw.prev_count, prev, new) != prev); in qcom_l3_cache__64bit_counter_update() 285 local64_set(&event->hw.prev_count, 0); in qcom_l3_cache__32bit_counter_start() 323 prev = local64_read(&event->hw.prev_count); in qcom_l3_cache__32bit_counter_update() 325 } while (local64_cmpxchg(&event->hw.prev_count, prev, new) != prev); in qcom_l3_cache__32bit_counter_update()
|
D | riscv_pmu.c | 174 prev_raw_count = local64_read(&hwc->prev_count); in riscv_pmu_event_update() 176 oldval = local64_cmpxchg(&hwc->prev_count, prev_raw_count, in riscv_pmu_event_update() 233 local64_set(&hwc->prev_count, (u64)-left); in riscv_pmu_event_set_period() 252 init_val = local64_read(&hwc->prev_count) & max_period; in riscv_pmu_start()
|
D | alibaba_uncore_drw_pmu.c | 303 prev = local64_read(&hwc->prev_count); in ali_drw_pmu_event_update() 305 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in ali_drw_pmu_event_update() 328 local64_set(&event->hw.prev_count, pre_val); in ali_drw_pmu_event_set_period() 587 local64_read(&event->hw.prev_count); in ali_drw_pmu_start()
|
D | thunderx2_pmu.c | 379 local64_set(&hwc->prev_count, 0); in uncore_start_event_l3c() 409 local64_set(&hwc->prev_count, 0); in uncore_start_event_dmc() 452 local64_set(&event->hw.prev_count, 0ULL); in uncore_start_event_ccpi2() 484 prev = local64_xchg(&hwc->prev_count, new); in tx2_uncore_event_update() 488 prev = local64_xchg(&hwc->prev_count, new); in tx2_uncore_event_update()
|
D | qcom_l2_pmu.c | 304 prev = local64_read(&hwc->prev_count); in l2_cache_event_update() 306 } while (local64_cmpxchg(&hwc->prev_count, prev, now) != prev); in l2_cache_event_update() 335 local64_set(&hwc->prev_count, new); in l2_cache_cluster_set_period() 604 local64_set(&hwc->prev_count, 0); in l2_cache_event_add()
|
/linux-6.12.1/arch/arm/mm/ |
D | cache-l2x0-pmu.c | 120 u64 prev_count, new_count, mask; in l2x0_pmu_event_read() local 123 prev_count = local64_read(&hw->prev_count); in l2x0_pmu_event_read() 125 } while (local64_xchg(&hw->prev_count, new_count) != prev_count); in l2x0_pmu_event_read() 128 local64_add((new_count - prev_count) & mask, &event->count); in l2x0_pmu_event_read() 147 local64_set(&hw->prev_count, 0); in l2x0_pmu_event_configure()
|
/linux-6.12.1/arch/powerpc/perf/ |
D | 8xx-pmu.c | 115 local64_set(&event->hw.prev_count, val); in mpc8xx_pmu_add() 128 prev = local64_read(&event->hw.prev_count); in mpc8xx_pmu_read() 149 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in mpc8xx_pmu_read()
|
D | core-fsl-emb.c | 175 prev = local64_read(&event->hw.prev_count); in fsl_emb_pmu_read() 178 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in fsl_emb_pmu_read() 314 local64_set(&event->hw.prev_count, val); in fsl_emb_pmu_add() 602 prev = local64_read(&event->hw.prev_count); in record_and_restart() 625 local64_set(&event->hw.prev_count, val); in record_and_restart()
|
D | core-book3s.c | 1162 local64_set(&event->hw.prev_count, val); in power_pmu_read() 1172 prev = local64_read(&event->hw.prev_count); in power_pmu_read() 1178 } while (local64_cmpxchg(&event->hw.prev_count, prev, val) != prev); in power_pmu_read() 1222 prev = local64_read(&event->hw.prev_count); in freeze_limited_counters() 1241 prev = local64_read(&event->hw.prev_count); in thaw_limited_counters() 1243 local64_set(&event->hw.prev_count, val); in thaw_limited_counters() 1541 val = local64_read(&event->hw.prev_count); in power_pmu_enable() 1549 local64_set(&event->hw.prev_count, val); in power_pmu_enable() 2168 local64_set(&event->hw.prev_count, 0); in power_pmu_event_init() 2243 prev = local64_read(&event->hw.prev_count); in record_and_restart() [all …]
|
/linux-6.12.1/drivers/misc/ |
D | ntsync.c | 77 __u32 prev_count; in ntsync_sem_post() local 89 prev_count = sem->u.sem.count; in ntsync_sem_post() 94 if (!ret && put_user(prev_count, user_args)) in ntsync_sem_post()
|
/linux-6.12.1/fs/gfs2/ |
D | file.c | 780 size_t *prev_count, in should_fault_in_pages() argument 797 if (*prev_count != count) { in should_fault_in_pages() 805 *prev_count = count; in should_fault_in_pages() 815 size_t prev_count = 0, window_size = 0; in gfs2_file_direct_read() local 856 if (should_fault_in_pages(to, iocb, &prev_count, &window_size)) { in gfs2_file_direct_read() 879 size_t prev_count = 0, window_size = 0; in gfs2_file_direct_write() local 925 enough_retries = prev_count == iov_iter_count(from) && in gfs2_file_direct_write() 927 if (should_fault_in_pages(from, iocb, &prev_count, &window_size)) { in gfs2_file_direct_write() 952 size_t prev_count = 0, window_size = 0; in gfs2_file_read_iter() local 995 if (should_fault_in_pages(to, iocb, &prev_count, &window_size)) { in gfs2_file_read_iter() [all …]
|
/linux-6.12.1/arch/x86/events/ |
D | msr.c | 247 prev = local64_read(&event->hw.prev_count); in msr_event_update() 250 } while (!local64_try_cmpxchg(&event->hw.prev_count, &prev, now)); in msr_event_update() 269 local64_set(&event->hw.prev_count, now); in msr_event_start()
|
/linux-6.12.1/drivers/iommu/intel/ |
D | perfmon.c | 305 u64 prev_count, new_count, delta; in iommu_pmu_event_update() local 309 prev_count = local64_read(&hwc->prev_count); in iommu_pmu_event_update() 311 if (local64_xchg(&hwc->prev_count, new_count) != prev_count) in iommu_pmu_event_update() 318 delta = (new_count << shift) - (prev_count << shift); in iommu_pmu_event_update() 344 local64_set((&hwc->prev_count), count); in iommu_pmu_start()
|
/linux-6.12.1/kernel/ |
D | softirq.c | 544 int prev_count; in handle_softirqs() local 549 prev_count = preempt_count(); in handle_softirqs() 556 if (unlikely(prev_count != preempt_count())) { in handle_softirqs() 559 prev_count, preempt_count()); in handle_softirqs() 560 preempt_count_set(prev_count); in handle_softirqs()
|
/linux-6.12.1/drivers/perf/hisilicon/ |
D | hisi_uncore_pmu.c | 272 local64_set(&hwc->prev_count, val); in hisi_uncore_pmu_set_event_period() 287 prev_raw_count = local64_read(&hwc->prev_count); in hisi_uncore_pmu_event_update() 288 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in hisi_uncore_pmu_event_update() 312 u64 prev_raw_count = local64_read(&hwc->prev_count); in hisi_uncore_pmu_start()
|
D | hisi_pcie_pmu.c | 452 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_event_update() 454 } while (local64_cmpxchg(&hwc->prev_count, prev_cnt, in hisi_pcie_pmu_event_update() 475 local64_set(&hwc->prev_count, HISI_PCIE_INIT_VAL); in hisi_pcie_pmu_set_period() 488 local64_set(&hwc->prev_count, cnt); in hisi_pcie_pmu_set_period() 550 prev_cnt = local64_read(&hwc->prev_count); in hisi_pcie_pmu_start()
|
/linux-6.12.1/arch/arm/mach-imx/ |
D | mmdc.c | 314 prev_raw_count = local64_read(&hwc->prev_count); in mmdc_pmu_event_update() 317 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in mmdc_pmu_event_update() 342 local64_set(&hwc->prev_count, 0); in mmdc_pmu_event_start() 377 local64_set(&hwc->prev_count, mmdc_pmu_read_counter(pmu_mmdc, cfg)); in mmdc_pmu_event_add()
|
/linux-6.12.1/tools/perf/arch/x86/util/ |
D | iostat.c | 438 struct perf_counts_values *prev_count = in iostat_print_metric() local 441 prev_count_val = prev_count->val; in iostat_print_metric() 442 prev_count->val = count->val; in iostat_print_metric()
|
/linux-6.12.1/arch/xtensa/kernel/ |
D | perf_event.c | 152 prev_raw_count = local64_read(&hwc->prev_count); in xtensa_perf_event_update() 154 } while (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in xtensa_perf_event_update() 190 local64_set(&hwc->prev_count, -left); in xtensa_perf_event_set_period()
|
/linux-6.12.1/arch/sh/kernel/ |
D | perf_event.c | 178 prev_raw_count = local64_read(&hwc->prev_count); in sh_perf_event_update() 181 if (local64_cmpxchg(&hwc->prev_count, prev_raw_count, in sh_perf_event_update()
|
/linux-6.12.1/arch/arc/kernel/ |
D | perf_event.c | 283 u64 prev_raw_count = local64_read(&hwc->prev_count); in arc_perf_event_update() 291 local64_set(&hwc->prev_count, new_raw_count); in arc_perf_event_update() 432 local64_set(&hwc->prev_count, value); in arc_pmu_event_set_period() 550 local64_set(&hwc->prev_count, 0); in arc_pmu_add()
|
/linux-6.12.1/drivers/dma/idxd/ |
D | perfmon.c | 217 prev_raw_count = local64_read(&hwc->prev_count); in perfmon_pmu_event_update() 220 } while (!local64_try_cmpxchg(&hwc->prev_count, in perfmon_pmu_event_update() 330 local64_set(&event->hw.prev_count, cntrdata); in perfmon_pmu_event_start()
|