/linux-6.12.1/arch/powerpc/platforms/cell/spufs/ |
D | backing_ops.c | 40 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 41 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 42 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 43 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 55 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() 62 ctx->csa.prob.mb_stat_R &= ~(0x0000ff); in spu_backing_mbox_read() 63 ctx->csa.spu_chnlcnt_RW[28] = 1; in spu_backing_mbox_read() [all …]
|
D | switch.c | 81 static inline int check_spu_isolate(struct spu_state *csa, struct spu *spu) in check_spu_isolate() argument 97 static inline void disable_interrupts(struct spu_state *csa, struct spu *spu) in disable_interrupts() argument 111 if (csa) { in disable_interrupts() 112 csa->priv1.int_mask_class0_RW = spu_int_mask_get(spu, 0); in disable_interrupts() 113 csa->priv1.int_mask_class1_RW = spu_int_mask_get(spu, 1); in disable_interrupts() 114 csa->priv1.int_mask_class2_RW = spu_int_mask_get(spu, 2); in disable_interrupts() 134 static inline void set_watchdog_timer(struct spu_state *csa, struct spu *spu) in set_watchdog_timer() argument 147 static inline void inhibit_user_access(struct spu_state *csa, struct spu *spu) in inhibit_user_access() argument 158 static inline void set_switch_pending(struct spu_state *csa, struct spu *spu) in set_switch_pending() argument 167 static inline void save_mfc_cntl(struct spu_state *csa, struct spu *spu) in save_mfc_cntl() argument [all …]
|
D | lscsa_alloc.c | 21 int spu_alloc_lscsa(struct spu_state *csa) in spu_alloc_lscsa() argument 29 csa->lscsa = lscsa; in spu_alloc_lscsa() 38 void spu_free_lscsa(struct spu_state *csa) in spu_free_lscsa() argument 43 if (csa->lscsa == NULL) in spu_free_lscsa() 46 for (p = csa->lscsa->ls; p < csa->lscsa->ls + LS_SIZE; p += PAGE_SIZE) in spu_free_lscsa() 49 vfree(csa->lscsa); in spu_free_lscsa()
|
D | fault.c | 55 unsigned long stat = ctx->csa.class_0_pending & CLASS0_INTR_MASK; in spufs_handle_class0() 61 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 65 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 69 spufs_handle_event(ctx, ctx->csa.class_0_dar, in spufs_handle_class0() 72 ctx->csa.class_0_pending = 0; in spufs_handle_class0() 102 ea = ctx->csa.class_1_dar; in spufs_handle_class1() 103 dsisr = ctx->csa.class_1_dsisr; in spufs_handle_class1() 141 ctx->csa.class_1_dar = ctx->csa.class_1_dsisr = 0; in spufs_handle_class1()
|
D | file.c | 249 pfn = vmalloc_to_pfn(ctx->csa.lscsa->ls + offset); in spufs_mem_mmap_fault() 470 return spufs_dump_emit(cprm, ctx->csa.lscsa->gprs, in spufs_regs_dump() 471 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_dump() 483 if (*pos >= sizeof(ctx->csa.lscsa->gprs)) in spufs_regs_read() 489 ret = simple_read_from_buffer(buffer, size, pos, ctx->csa.lscsa->gprs, in spufs_regs_read() 490 sizeof(ctx->csa.lscsa->gprs)); in spufs_regs_read() 500 struct spu_lscsa *lscsa = ctx->csa.lscsa; in spufs_regs_write() 527 return spufs_dump_emit(cprm, &ctx->csa.lscsa->fpcr, in spufs_fpcr_dump() 528 sizeof(ctx->csa.lscsa->fpcr)); in spufs_fpcr_dump() 541 ret = simple_read_from_buffer(buffer, size, pos, &ctx->csa.lscsa->fpcr, in spufs_fpcr_read() [all …]
|
D | run.c | 30 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 31 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 34 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 35 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 72 dsisr = ctx->csa.class_1_dsisr; in spu_stopped() 76 if (ctx->csa.class_0_pending) in spu_stopped()
|
D | spufs.h | 69 struct spu_state csa; /* SPU context save area. */ member 344 extern int spu_init_csa(struct spu_state *csa); 345 extern void spu_fini_csa(struct spu_state *csa); 350 extern int spu_alloc_lscsa(struct spu_state *csa); 351 extern void spu_free_lscsa(struct spu_state *csa);
|
D | context.c | 35 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 77 spu_fini_csa(&ctx->csa); in destroy_spu_context()
|
D | sched.c | 237 spu_restore(&ctx->csa, spu); in spu_bind_context() 439 spu_save(&ctx->csa, spu); in spu_unbind_context()
|
/linux-6.12.1/drivers/hwtracing/coresight/ |
D | coresight-etm4x-core.c | 72 struct csdev_access *csa; member 159 struct csdev_access *csa) in etm_detect_os_lock() argument 161 u32 oslsr = etm4x_relaxed_read32(csa, TRCOSLSR); in etm_detect_os_lock() 167 struct csdev_access *csa, u32 val) in etm_write_os_lock() argument 173 etm4x_relaxed_write32(csa, val, TRCOSLAR); in etm_write_os_lock() 189 struct csdev_access *csa) in etm4_os_unlock_csa() argument 194 etm_write_os_lock(drvdata, csa, 0x0); in etm4_os_unlock_csa() 214 struct csdev_access *csa) in etm4_cs_lock() argument 217 if (csa->io_mem) in etm4_cs_lock() 218 CS_LOCK(csa->base); in etm4_cs_lock() [all …]
|
D | coresight-tpiu.c | 68 static void tpiu_enable_hw(struct csdev_access *csa) in tpiu_enable_hw() argument 70 CS_UNLOCK(csa->base); in tpiu_enable_hw() 74 CS_LOCK(csa->base); in tpiu_enable_hw() 89 static void tpiu_disable_hw(struct csdev_access *csa) in tpiu_disable_hw() argument 91 CS_UNLOCK(csa->base); in tpiu_disable_hw() 94 csdev_access_relaxed_write32(csa, FFCR_STOP_FI, TPIU_FFCR); in tpiu_disable_hw() 96 csdev_access_relaxed_write32(csa, FFCR_STOP_FI | FFCR_FON_MAN, TPIU_FFCR); in tpiu_disable_hw() 98 coresight_timeout(csa, TPIU_FFCR, FFCR_FON_MAN_BIT, 0); in tpiu_disable_hw() 100 coresight_timeout(csa, TPIU_FFSR, FFSR_FT_STOPPED_BIT, 1); in tpiu_disable_hw() 102 CS_LOCK(csa->base); in tpiu_disable_hw()
|
D | coresight-etm4x.h | 513 #define etm4x_relaxed_read32(csa, offset) \ argument 514 ((u32)((csa)->io_mem ? \ 515 readl_relaxed((csa)->base + (offset)) : \ 518 #define etm4x_relaxed_read64(csa, offset) \ argument 519 ((u64)((csa)->io_mem ? \ 520 readq_relaxed((csa)->base + (offset)) : \ 523 #define etm4x_read32(csa, offset) \ argument 525 u32 __val = etm4x_relaxed_read32((csa), (offset)); \ 530 #define etm4x_read64(csa, offset) \ argument 532 u64 __val = etm4x_relaxed_read64((csa), (offset)); \ [all …]
|
D | coresight-tmc-core.c | 40 struct csdev_access *csa = &csdev->access; in tmc_wait_for_tmcready() local 43 if (coresight_timeout(csa, TMC_STS, TMC_STS_TMCREADY_BIT, 1)) { in tmc_wait_for_tmcready() 54 struct csdev_access *csa = &csdev->access; in tmc_flush_and_stop() local 63 if (coresight_timeout(csa, TMC_FFCR, TMC_FFCR_FLUSHMAN_BIT, 0)) { in tmc_flush_and_stop()
|
D | coresight-etb10.c | 254 struct csdev_access *csa = &drvdata->csdev->access; in __etb_disable_hw() local 266 if (coresight_timeout(csa, ETB_FFCR, ETB_FFCR_BIT, 0)) { in __etb_disable_hw() 274 if (coresight_timeout(csa, ETB_FFSR, ETB_FFSR_BIT, 1)) { in __etb_disable_hw()
|
D | coresight-catu.c | 395 struct csdev_access *csa = &drvdata->csdev->access; in catu_wait_for_ready() local 397 return coresight_timeout(csa, CATU_STATUS, CATU_STATUS_READY, 1); in catu_wait_for_ready()
|
D | coresight-stm.c | 262 struct csdev_access *csa = &csdev->access; in stm_disable() local 275 coresight_timeout(csa, STMTCSR, STMTCSR_BUSY_BIT, 0); in stm_disable()
|
D | coresight-core.c | 1030 int coresight_timeout(struct csdev_access *csa, u32 offset, in coresight_timeout() argument 1037 val = csdev_access_read32(csa, offset); in coresight_timeout()
|
/linux-6.12.1/include/linux/ |
D | coresight.h | 414 static inline u32 csdev_access_relaxed_read32(struct csdev_access *csa, in csdev_access_relaxed_read32() argument 417 if (likely(csa->io_mem)) in csdev_access_relaxed_read32() 418 return readl_relaxed(csa->base + offset); in csdev_access_relaxed_read32() 420 return csa->read(offset, true, false); in csdev_access_relaxed_read32() 470 static inline u32 coresight_get_pid(struct csdev_access *csa) in coresight_get_pid() argument 475 pid |= csdev_access_relaxed_read32(csa, CORESIGHT_PIDRn(i)) << (i * 8); in coresight_get_pid() 480 static inline u64 csdev_access_relaxed_read_pair(struct csdev_access *csa, in csdev_access_relaxed_read_pair() argument 483 if (likely(csa->io_mem)) { in csdev_access_relaxed_read_pair() 484 return readl_relaxed(csa->base + lo_offset) | in csdev_access_relaxed_read_pair() 485 ((u64)readl_relaxed(csa->base + hi_offset) << 32); in csdev_access_relaxed_read_pair() [all …]
|
/linux-6.12.1/net/mac80211/ |
D | mesh.c | 953 struct mesh_csa_settings *csa; in ieee80211_mesh_build_beacon() local 1028 csa = rcu_dereference(ifmsh->csa); in ieee80211_mesh_build_beacon() 1029 if (csa) { in ieee80211_mesh_build_beacon() 1040 csa->settings.chandef.chan->center_freq); in ieee80211_mesh_build_beacon() 1041 bcn->cntdwn_current_counter = csa->settings.count; in ieee80211_mesh_build_beacon() 1043 *pos++ = csa->settings.count; in ieee80211_mesh_build_beacon() 1052 *pos++ |= csa->settings.block_tx ? in ieee80211_mesh_build_beacon() 1059 switch (csa->settings.chandef.width) { in ieee80211_mesh_build_beacon() 1066 ct = cfg80211_get_chandef_type(&csa->settings.chandef); in ieee80211_mesh_build_beacon() 1083 chandef = &csa->settings.chandef; in ieee80211_mesh_build_beacon() [all …]
|
D | link.c | 40 wiphy_work_init(&link->csa.finalize_work, in ieee80211_link_init() 80 &link->csa.finalize_work); in ieee80211_link_stop() 384 &link->u.mgd.csa.switch_work, in _ieee80211_set_active_links() 385 link->u.mgd.csa.time - in _ieee80211_set_active_links()
|
D | mlme.c | 2081 u.mgd.csa.switch_work.work); in ieee80211_csa_switch_work() 2104 link->conf->chanreq = link->csa.chanreq; in ieee80211_csa_switch_work() 2105 cfg80211_ch_switch_notify(sdata->dev, &link->csa.chanreq.oper, in ieee80211_csa_switch_work() 2138 &link->csa.chanreq)) { in ieee80211_csa_switch_work() 2146 link->u.mgd.csa.waiting_bcn = true; in ieee80211_csa_switch_work() 2149 if (link->u.mgd.csa.ap_chandef.chan->band == NL80211_BAND_6GHZ && in ieee80211_csa_switch_work() 2151 ieee80211_rearrange_tpe(&link->u.mgd.csa.tpe, in ieee80211_csa_switch_work() 2152 &link->u.mgd.csa.ap_chandef, in ieee80211_csa_switch_work() 2154 if (memcmp(&link->conf->tpe, &link->u.mgd.csa.tpe, in ieee80211_csa_switch_work() 2155 sizeof(link->u.mgd.csa.tpe))) { in ieee80211_csa_switch_work() [all …]
|
D | cfg.c | 917 const struct ieee80211_csa_settings *csa, in ieee80211_set_probe_resp() argument 935 if (csa) in ieee80211_set_probe_resp() 936 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_presp, in ieee80211_set_probe_resp() 937 csa->n_counter_offsets_presp * in ieee80211_set_probe_resp() 1096 const struct ieee80211_csa_settings *csa, in ieee80211_assign_beacon() argument 1182 if (csa) { in ieee80211_assign_beacon() 1183 new->cntdwn_current_counter = csa->count; in ieee80211_assign_beacon() 1184 memcpy(new->cntdwn_counter_offsets, csa->counter_offsets_beacon, in ieee80211_assign_beacon() 1185 csa->n_counter_offsets_beacon * in ieee80211_assign_beacon() 1206 params->probe_resp_len, csa, cca, link); in ieee80211_assign_beacon() [all …]
|
/linux-6.12.1/kernel/cgroup/ |
D | cpuset.c | 722 struct cpuset **csa; /* array of all cpuset ptrs */ in generate_sched_domains() local 736 csa = NULL; in generate_sched_domains() 757 csa = kmalloc_array(nr_cpusets(), sizeof(cp), GFP_KERNEL); in generate_sched_domains() 758 if (!csa) in generate_sched_domains() 764 csa[csn++] = &top_cpuset; in generate_sched_domains() 789 csa[csn++] = cp; in generate_sched_domains() 802 csa[csn++] = cp; in generate_sched_domains() 821 uf_node_init(&csa[i]->node); in generate_sched_domains() 826 if (cpusets_overlap(csa[i], csa[j])) { in generate_sched_domains() 832 uf_union(&csa[i]->node, &csa[j]->node); in generate_sched_domains() [all …]
|
/linux-6.12.1/drivers/net/wireless/intel/iwlwifi/dvm/ |
D | rx.c | 47 struct iwl_csa_notification *csa = (void *)pkt->data; in iwlagn_rx_csa() local 58 if (!le32_to_cpu(csa->status) && csa->channel == priv->switch_channel) { in iwlagn_rx_csa() 59 rxon->channel = csa->channel; in iwlagn_rx_csa() 60 ctx->staging.channel = csa->channel; in iwlagn_rx_csa() 62 le16_to_cpu(csa->channel)); in iwlagn_rx_csa() 66 le16_to_cpu(csa->channel)); in iwlagn_rx_csa()
|
/linux-6.12.1/drivers/net/wireless/intel/iwlegacy/ |
D | common.c | 4113 struct il_csa_notification *csa = &(pkt->u.csa_notif); in il_hdl_csa() local 4119 if (!le32_to_cpu(csa->status) && csa->channel == il->switch_channel) { in il_hdl_csa() 4120 rxon->channel = csa->channel; in il_hdl_csa() 4121 il->staging.channel = csa->channel; in il_hdl_csa() 4122 D_11H("CSA notif: channel %d\n", le16_to_cpu(csa->channel)); in il_hdl_csa() 4126 le16_to_cpu(csa->channel)); in il_hdl_csa()
|