Home
last modified time | relevance | path

Searched refs:event_base (Results 1 – 25 of 32) sorted by relevance

12

/linux-6.12.1/drivers/clocksource/
Dtimer-qcom.c34 static void __iomem *event_base; variable
42 u32 ctrl = readl_relaxed(event_base + TIMER_ENABLE); in msm_timer_interrupt()
44 writel_relaxed(ctrl, event_base + TIMER_ENABLE); in msm_timer_interrupt()
53 u32 ctrl = readl_relaxed(event_base + TIMER_ENABLE); in msm_timer_set_next_event()
56 writel_relaxed(ctrl, event_base + TIMER_ENABLE); in msm_timer_set_next_event()
58 writel_relaxed(ctrl, event_base + TIMER_CLEAR); in msm_timer_set_next_event()
59 writel_relaxed(cycles, event_base + TIMER_MATCH_VAL); in msm_timer_set_next_event()
65 writel_relaxed(ctrl | TIMER_ENABLE_EN, event_base + TIMER_ENABLE); in msm_timer_set_next_event()
73 ctrl = readl_relaxed(event_base + TIMER_ENABLE); in msm_timer_shutdown()
75 writel_relaxed(ctrl, event_base + TIMER_ENABLE); in msm_timer_shutdown()
[all …]
/linux-6.12.1/arch/x86/events/
Dmsr.c223 event->hw.event_base = msr[cfg].msr; in msr_event_init()
233 if (event->hw.event_base) in msr_read_counter()
234 rdmsrl(event->hw.event_base, now); in msr_read_counter()
253 if (unlikely(event->hw.event_base == MSR_SMI_COUNT)) { in msr_event_update()
256 } else if (unlikely(event->hw.event_base == MSR_IA32_THERM_STATUS)) { in msr_event_update()
Drapl.c186 rdmsrl(event->hw.event_base, raw); in rapl_read_counter()
214 rdmsrl(event->hw.event_base, new_raw_count); in rapl_event_update()
394 event->hw.event_base = rapl_msrs[bit].msr; in rapl_pmu_event_init()
Dcore.c124 if (unlikely(!hwc->event_base)) in x86_perf_event_update()
1232 hwc->event_base = 0; in x86_assign_hw_event()
1241 hwc->event_base = x86_pmu_fixed_ctr_addr(idx - INTEL_PMC_IDX_FIXED); in x86_assign_hw_event()
1248 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event()
1375 if (unlikely(!hwc->event_base)) in x86_perf_event_set_period()
1413 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in x86_perf_event_set_period()
/linux-6.12.1/drivers/perf/
Dthunderx2_pmu.c334 hwc->event_base = (unsigned long)tx2_pmu->base in init_cntr_base_l3c()
350 hwc->event_base = (unsigned long)tx2_pmu->base in init_cntr_base_dmc()
364 hwc->event_base = (unsigned long)tx2_pmu->base; in init_cntr_base_ccpi2()
380 reg_writel(0, hwc->event_base); in uncore_start_event_l3c()
410 reg_writel(0, hwc->event_base); in uncore_start_event_dmc()
451 hwc->event_base + CCPI2_PERF_CTL); in uncore_start_event_ccpi2()
460 reg_writel(0, hwc->event_base + CCPI2_PERF_CTL); in uncore_stop_event_ccpi2()
480 hwc->event_base + CCPI2_COUNTER_SEL); in tx2_uncore_event_update()
481 new = reg_readl(hwc->event_base + CCPI2_COUNTER_DATA_H); in tx2_uncore_event_update()
483 reg_readl(hwc->event_base + CCPI2_COUNTER_DATA_L); in tx2_uncore_event_update()
[all …]
Darm-ccn.c893 dt_cfg = hw->event_base; in arm_ccn_pmu_xp_dt_config()
947 hw->event_base = CCN_XP_DT_CONFIG__DT_CFG__WATCHPOINT(wp); in arm_ccn_pmu_xp_watchpoint_config()
990 hw->event_base = CCN_XP_DT_CONFIG__DT_CFG__XP_PMU_EVENT(hw->config_base); in arm_ccn_pmu_xp_event_config()
1013 hw->event_base = CCN_XP_DT_CONFIG__DT_CFG__DEVICE_PMU_EVENT(port, in arm_ccn_pmu_node_event_config()
Driscv_pmu_sbi.c432 cmask, cflags, hwc->event_base, hwc->config, in pmu_sbi_ctr_get_idx()
436 cmask, cflags, hwc->event_base, hwc->config, 0); in pmu_sbi_ctr_get_idx()
440 hwc->event_base, hwc->config); in pmu_sbi_ctr_get_idx()
Driscv_pmu.c332 hwc->event_base = mapped_event; in riscv_pmu_event_init()
Dcxl_pmu.c650 hwc->event_base); in cxl_pmu_event_start()
743 hwc->event_base = event_idx; in cxl_pmu_event_add()
/linux-6.12.1/arch/alpha/kernel/
Dperf_event.c351 evtype[n] = group->hw.event_base; in collect_events()
359 evtype[n] = pe->hw.event_base; in collect_events()
459 cpuc->evtype[n0] = event->hw.event_base; in alpha_pmu_add()
642 hwc->event_base = ev; in __hw_perf_event_init()
656 evtypes[n] = hwc->event_base; in __hw_perf_event_init()
/linux-6.12.1/arch/x86/events/intel/
Duncore_discovery.c493 hwc->event_base = uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event()
504 hwc->event_base = box_ctl + uncore_pci_perf_ctr(box, hwc->idx); in intel_generic_uncore_assign_hw_event()
509 hwc->event_base = box_ctl + box->pmu->type->perf_ctr + hwc->idx; in intel_generic_uncore_assign_hw_event()
569 pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count); in intel_generic_uncore_pci_read_counter()
570 pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1); in intel_generic_uncore_pci_read_counter()
Dcstate.c295 event->hw.event_base = core_msr[cfg].msr; in cstate_pmu_event_init()
302 event->hw.event_base = pkg_msr[cfg].msr; in cstate_pmu_event_init()
309 event->hw.event_base = module_msr[cfg].msr; in cstate_pmu_event_init()
323 rdmsrl(event->hw.event_base, val); in cstate_pmu_read_counter()
Duncore.c153 rdmsrl(event->hw.event_base, count); in uncore_msr_read_counter()
170 if (!uncore_mmio_is_valid_offset(box, event->hw.event_base)) in uncore_mmio_read_counter()
173 return readq(box->io_addr + event->hw.event_base); in uncore_mmio_read_counter()
262 hwc->event_base = uncore_fixed_ctr(box); in uncore_assign_hw_event()
271 hwc->event_base = uncore_perf_ctr(box, hwc->idx); in uncore_assign_hw_event()
798 event->hw.event_base = uncore_freerunning_counter(box, event); in uncore_pmu_event_init()
Dp4.c874 rdmsrl(hwc->event_base, v); in p4_pmu_clear_cccr_ovf()
1017 if (hwc->event_base) { in p4_pmu_set_period()
1026 wrmsrl(hwc->event_base, (u64)(-left) & x86_pmu.cntval_mask); in p4_pmu_set_period()
/linux-6.12.1/arch/s390/include/asm/
Dpai.h78 #define PAI_SAVE_AREA(x) ((x)->hw.event_base)
/linux-6.12.1/arch/mips/kernel/
Dperf_event_mipsxx.c325 cntr_mask = (hwc->event_base >> 10) & 0xffff; in mipsxx_pmu_alloc_counter()
327 cntr_mask = (hwc->event_base >> 8) & 0xffff; in mipsxx_pmu_alloc_counter()
352 unsigned int range = evt->event_base >> 24; in mipsxx_pmu_enable_event()
357 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0x3ff) | in mipsxx_pmu_enable_event()
362 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base & 0xff) | in mipsxx_pmu_enable_event()
440 M_PERFCTL_EVENT(hwc->event_base & 0x3ff)); in mipspmu_event_set_period()
1506 hwc->event_base = mipspmu_perf_event_encode(pev); in __hw_perf_event_init()
/linux-6.12.1/arch/loongarch/kernel/
Dperf_event.c274 cpuc->saved_ctrl[idx] = M_PERFCTL_EVENT(evt->event_base) | in loongarch_pmu_enable_event()
548 event->hw.event_base = 0xffffffff; in loongarch_pmu_event_init()
772 hwc->event_base = loongarch_pmu_perf_event_encode(pev); in __hw_perf_event_init()
/linux-6.12.1/drivers/perf/hisilicon/
Dhisi_pcie_pmu.c391 hwc->event_base = HISI_PCIE_EXT_CNT; in hisi_pcie_pmu_event_init()
393 hwc->event_base = HISI_PCIE_CNT; in hisi_pcie_pmu_event_init()
415 return hisi_pcie_pmu_readq(pcie_pmu, event->hw.event_base, idx); in hisi_pcie_pmu_read_counter()
551 hisi_pcie_pmu_writeq(pcie_pmu, hwc->event_base, idx, prev_cnt); in hisi_pcie_pmu_start()
Dhns3_pmu.c1214 return hns3_pmu_readq(hns3_pmu, event->hw.event_base, event->hw.idx); in hns3_pmu_read_counter()
1271 hwc->event_base = HNS3_PMU_REG_EVENT_EXT_COUNTER; in hns3_pmu_event_init()
1273 hwc->event_base = HNS3_PMU_REG_EVENT_COUNTER; in hns3_pmu_event_init()
/linux-6.12.1/arch/sparc/kernel/
Dperf_event.c1356 events[n] = group->hw.event_base; in collect_events()
1365 events[n] = event->hw.event_base; in collect_events()
1385 cpuc->events[n0] = event->hw.event_base; in sparc_pmu_add()
1455 hwc->event_base = perf_event_encode(pmap); in sparc_pmu_event_init()
1461 hwc->event_base = attr->config; in sparc_pmu_event_init()
1481 events[n] = hwc->event_base; in sparc_pmu_event_init()
/linux-6.12.1/drivers/fpga/
Ddfl-fme-perf.c788 struct fme_perf_event_ops *ops = get_event_ops(event->hw.event_base); in fme_perf_event_destroy()
826 hwc->event_base = evtype; in fme_perf_event_init()
844 struct fme_perf_event_ops *ops = get_event_ops(event->hw.event_base); in fme_perf_event_update()
858 struct fme_perf_event_ops *ops = get_event_ops(event->hw.event_base); in fme_perf_event_start()
/linux-6.12.1/arch/x86/events/amd/
Duncore.c109 rdmsrl(hwc->event_base, new); in amd_uncore_read()
124 wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_start()
178 hwc->event_base = pmu->msr_base + 1 + (2 * hwc->idx); in amd_uncore_add()
886 wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_umc_start()
/linux-6.12.1/drivers/dma/idxd/
Dperfmon.c101 hwc->event_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event()
189 event->hw.event_base = ioread64(PERFMON_TABLE_OFFSET(idxd)); in perfmon_pmu_event_init()
/linux-6.12.1/arch/powerpc/perf/
Dimc-pmu.c562 event->hw.event_base = (u64)pcni->vbase + l_config; in nest_imc_event_init()
894 event->hw.event_base = (u64)pcmi->vbase + (config & IMC_EVENT_OFFSET_MASK); in core_imc_event_init()
1043 return (__be64 *)event->hw.event_base; in get_event_base_addr()
Dcore-book3s.c1597 flags[n] = group->hw.event_base; in collect_events()
1606 flags[n] = event->hw.event_base; in collect_events()
1639 cpuhw->flags[n0] = event->hw.event_base; in power_pmu_add()
2159 event->hw.event_base = cflags[n]; in power_pmu_event_init()

12