Home
last modified time | relevance | path

Searched refs:sample_period (Results 1 – 25 of 167) sorted by relevance

1234567

/linux-6.12.1/tools/testing/selftests/powerpc/pmu/ebb/
Dlost_exception_test.c49 orig_period = max_period = sample_period = 400; in test_body()
51 mtspr(SPRN_PMC4, pmc_sample_period(sample_period)); in test_body()
66 if (sample_period >= (orig_period + 200)) in test_body()
67 sample_period = orig_period; in test_body()
69 sample_period++; in test_body()
71 if (sample_period > max_period) in test_body()
72 max_period = sample_period; in test_body()
Dmulti_counter_test.c58 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in multi_counter()
59 mtspr(SPRN_PMC2, pmc_sample_period(sample_period)); in multi_counter()
60 mtspr(SPRN_PMC3, pmc_sample_period(sample_period)); in multi_counter()
61 mtspr(SPRN_PMC4, pmc_sample_period(sample_period)); in multi_counter()
62 mtspr(SPRN_PMC5, pmc_sample_period(sample_period)); in multi_counter()
63 mtspr(SPRN_PMC6, pmc_sample_period(sample_period)); in multi_counter()
Debb.c31 u64 sample_period = 0x40000000ull; variable
71 bool ebb_check_count(int pmc, u64 sample_period, int fudge) in ebb_check_count() argument
77 lower = ebb_state.stats.ebb_count * (sample_period - fudge); in ebb_check_count()
85 upper = ebb_state.stats.ebb_count * (sample_period + fudge); in ebb_check_count()
119 found += count_pmc(i, sample_period); in standard_ebb_callee()
252 int count_pmc(int pmc, uint32_t sample_period) in count_pmc() argument
258 start_value = pmc_sample_period(sample_period); in count_pmc()
381 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in ebb_child()
Debb.h46 bool ebb_check_count(int pmc, u64 sample_period, int fudge);
58 int count_pmc(int pmc, uint32_t sample_period);
69 extern u64 sample_period;
Dback_to_back_ebbs_test.c41 count_pmc(1, sample_period); in ebb_callee()
81 sample_period = 5; in back_to_back_ebbs()
84 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in back_to_back_ebbs()
Dno_handler_test.c38 sample_period = 1000; in no_handler_test()
39 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in no_handler_test()
Dcycles_test.c35 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in cycles()
50 FAIL_IF(!ebb_check_count(1, sample_period, 100)); in cycles()
Dinstruction_count_test.c43 count_pmc(4, sample_period); in do_count_loop()
101 count_pmc(4, sample_period); in pmc4_ebb_callee()
125 sample_period = COUNTER_OVERFLOW; in instruction_count()
Dpmae_handling_test.c42 count_pmc(1, sample_period); in syscall_ebb_callee()
77 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in test_body()
Dpmc56_overflow_test.c30 count_pmc(2, sample_period); in ebb_callee()
69 mtspr(SPRN_PMC2, pmc_sample_period(sample_period)); in pmc56_overflow()
Dcycles_with_freeze_test.c48 count_pmc(1, sample_period); in ebb_callee()
74 mtspr(SPRN_PMC1, pmc_sample_period(sample_period)); in cycles_with_freeze()
/linux-6.12.1/tools/testing/selftests/bpf/verifier/
Dperf_event_sample_period.c7 offsetof(struct bpf_perf_event_data, sample_period)),
10 offsetof(struct bpf_perf_event_data, sample_period) + 7),
23 offsetof(struct bpf_perf_event_data, sample_period)),
26 offsetof(struct bpf_perf_event_data, sample_period) + 6),
39 offsetof(struct bpf_perf_event_data, sample_period)),
42 offsetof(struct bpf_perf_event_data, sample_period) + 4),
54 offsetof(struct bpf_perf_event_data, sample_period)),
/linux-6.12.1/tools/testing/selftests/bpf/progs/
Dverifier_unpriv_perf.c28 -(__s32) offsetof(struct bpf_perf_event_data, sample_period) - 8), in fill_of_different_pointers_ldx()
29 __imm_const(sample_period, in fill_of_different_pointers_ldx()
30 offsetof(struct bpf_perf_event_data, sample_period)) in fill_of_different_pointers_ldx()
/linux-6.12.1/samples/bpf/
Dtracex6_user.c104 .sample_period = SAMPLE_PERIOD, in test_bpf_perf_event()
113 .sample_period = SAMPLE_PERIOD, in test_bpf_perf_event()
122 .sample_period = SAMPLE_PERIOD, in test_bpf_perf_event()
132 .sample_period = SAMPLE_PERIOD, in test_bpf_perf_event()
144 .sample_period = SAMPLE_PERIOD, in test_bpf_perf_event()
156 .sample_period = 0, in test_bpf_perf_event()
Dtrace_event_kern.c50 if (ctx->sample_period < 10000) in bpf_prog1()
57 bpf_trace_printk(fmt, sizeof(fmt), cpu, ctx->sample_period, in bpf_prog1()
/linux-6.12.1/tools/perf/arch/arm64/util/
Darm-spe.c119 static __u64 sample_period; in arm_spe_pmu__sample_period() local
121 if (sample_period) in arm_spe_pmu__sample_period()
122 return sample_period; in arm_spe_pmu__sample_period()
129 &sample_period) != 1) { in arm_spe_pmu__sample_period()
131 sample_period = 4096; in arm_spe_pmu__sample_period()
133 return sample_period; in arm_spe_pmu__sample_period()
141 evsel->core.attr.sample_period = arm_spe_pmu__sample_period(evsel->pmu); in arm_spe_setup_evsel()
274 tracking_evsel->core.attr.sample_period = 1; in arm_spe_recording_options()
536 attr->sample_period = arm_spe_pmu__sample_period(arm_spe_pmu); in arm_spe_pmu_default_config()
/linux-6.12.1/drivers/thermal/intel/int340x_thermal/
Dacpi_thermal_rel.h49 u64 sample_period; member
64 u64 sample_period; member
110 u64 sample_period; member
121 u64 sample_period; member
/linux-6.12.1/drivers/media/rc/
Dene_ir.c30 static int sample_period; variable
103 if (sample_period != ENE_DEFAULT_SAMPLE_PERIOD) in ene_hw_detect()
392 if (sample_period == ENE_DEFAULT_SAMPLE_PERIOD) in ene_rx_setup()
397 (sample_period + sample_period_adjust) | in ene_rx_setup()
443 dev->rdev->rx_resolution = sample_period; in ene_rx_setup()
450 dev->rdev->min_timeout = 127 * sample_period; in ene_rx_setup()
635 dev->tx_sample = DIV_ROUND_CLOSEST(sample, sample_period); in ene_tx_sample()
644 dbg("TX: sample %8d (%s)", raw_tx * sample_period, in ene_tx_sample()
783 hw_sample = hw_value * sample_period; in ene_isr()
1025 if (sample_period < 5 || sample_period > 0x7F) in ene_probe()
[all …]
/linux-6.12.1/tools/perf/tests/attr/
Dtest-record-group210 sample_period=1234000
20 sample_period=6789000
/linux-6.12.1/drivers/gpu/drm/xe/
Dxe_gt_sriov_pf_policy.c277 &gt->sriov.pf.policy.guc.sample_period, value); in pf_provision_sample_period()
285 return pf_provision_sample_period(gt, gt->sriov.pf.policy.guc.sample_period); in pf_reprovision_sample_period()
293 gt->sriov.pf.policy.guc.sample_period = 0; in pf_sanitize_sample_period()
331 value = gt->sriov.pf.policy.guc.sample_period; in xe_gt_sriov_pf_policy_get_sample_period()
397 policy->sample_period, policy->sample_period ? "ms" : "(disabled)"); in print_guc_policies()
/linux-6.12.1/drivers/iio/adc/
Ddln2-adc.c65 unsigned int sample_period; member
357 if (dln2->sample_period) { in dln2_adc_read_raw()
358 microhertz = 1000000000 / dln2->sample_period; in dln2_adc_read_raw()
389 dln2->sample_period = in dln2_adc_write_raw()
391 if (dln2->sample_period > 65535) { in dln2_adc_write_raw()
392 dln2->sample_period = 65535; in dln2_adc_write_raw()
406 dln2->trigger_chan, dln2->sample_period); in dln2_adc_write_raw()
546 dln2->sample_period); in dln2_adc_triggered_buffer_postenable()
/linux-6.12.1/arch/x86/events/amd/
Dibs.c104 s64 period = hwc->sample_period; in perf_event_set_period()
297 if (hwc->sample_period) { in perf_ibs_init()
301 if (!event->attr.sample_freq && hwc->sample_period & 0x0f) in perf_ibs_init()
308 hwc->sample_period &= ~0x0FULL; in perf_ibs_init()
309 if (!hwc->sample_period) in perf_ibs_init()
310 hwc->sample_period = 0x10; in perf_ibs_init()
314 event->attr.sample_period = max_cnt << 4; in perf_ibs_init()
315 hwc->sample_period = event->attr.sample_period; in perf_ibs_init()
318 if (!hwc->sample_period) in perf_ibs_init()
325 hwc->last_period = hwc->sample_period; in perf_ibs_init()
[all …]
/linux-6.12.1/arch/powerpc/perf/
Dcore-fsl-emb.c309 if (event->hw.sample_period) { in fsl_emb_pmu_add()
380 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_start()
408 if (event->hw.idx < 0 || !event->hw.sample_period) in fsl_emb_pmu_stop()
546 event->hw.last_period = event->hw.sample_period; in fsl_emb_pmu_event_init()
592 u64 period = event->hw.sample_period; in record_and_restart()
618 event->hw.last_period = event->hw.sample_period; in record_and_restart()
/linux-6.12.1/kernel/
Dwatchdog.c312 static u64 __read_mostly sample_period; variable
391 u16 sample_period_16 = get_16bit_precision(sample_period); in update_cpustat()
410 u64 sample_period_second = sample_period; in print_cpustat()
565 sample_period = get_softlockup_thresh() * ((u64)NSEC_PER_SEC / NUM_SAMPLE_PERIODS); in set_sample_period()
566 watchdog_update_hrtimer_threshold(sample_period); in set_sample_period()
698 hrtimer_forward_now(hrtimer, ns_to_ktime(sample_period)); in watchdog_timer_fn()
794 hrtimer_start(hrtimer, ns_to_ktime(sample_period), in watchdog_enable()
/linux-6.12.1/arch/riscv/kvm/
Dvcpu_pmu.c40 u64 sample_period; in kvm_pmu_get_sample_period() local
43 sample_period = counter_val_mask; in kvm_pmu_get_sample_period()
45 sample_period = (-pmc->counter_val) & counter_val_mask; in kvm_pmu_get_sample_period()
47 return sample_period; in kvm_pmu_get_sample_period()
300 perf_event->attr.sample_period = period; in kvm_riscv_pmu_overflow()
301 perf_event->hw.sample_period = period; in kvm_riscv_pmu_overflow()
326 attr->sample_period = kvm_pmu_get_sample_period(pmc); in kvm_pmu_create_perf_event()

1234567