/linux-6.12.1/drivers/gpu/drm/i915/gt/ |
D | gen8_engine_cs.c | 16 u32 *cs, flags = 0; in gen8_emit_flush_rcs() local 58 cs = intel_ring_begin(rq, len); in gen8_emit_flush_rcs() 59 if (IS_ERR(cs)) in gen8_emit_flush_rcs() 60 return PTR_ERR(cs); in gen8_emit_flush_rcs() 63 cs = gen8_emit_pipe_control(cs, 0, 0); in gen8_emit_flush_rcs() 66 cs = gen8_emit_pipe_control(cs, PIPE_CONTROL_DC_FLUSH_ENABLE, in gen8_emit_flush_rcs() 69 cs = gen8_emit_pipe_control(cs, flags, LRC_PPHWSP_SCRATCH_ADDR); in gen8_emit_flush_rcs() 72 cs = gen8_emit_pipe_control(cs, PIPE_CONTROL_CS_STALL, 0); in gen8_emit_flush_rcs() 74 intel_ring_advance(rq, cs); in gen8_emit_flush_rcs() 81 u32 cmd, *cs; in gen8_emit_flush_xcs() local [all …]
|
D | gen6_engine_cs.c | 60 u32 *cs; in gen6_emit_post_sync_nonzero_flush() local 62 cs = intel_ring_begin(rq, 6); in gen6_emit_post_sync_nonzero_flush() 63 if (IS_ERR(cs)) in gen6_emit_post_sync_nonzero_flush() 64 return PTR_ERR(cs); in gen6_emit_post_sync_nonzero_flush() 66 *cs++ = GFX_OP_PIPE_CONTROL(5); in gen6_emit_post_sync_nonzero_flush() 67 *cs++ = PIPE_CONTROL_CS_STALL | PIPE_CONTROL_STALL_AT_SCOREBOARD; in gen6_emit_post_sync_nonzero_flush() 68 *cs++ = scratch_addr | PIPE_CONTROL_GLOBAL_GTT; in gen6_emit_post_sync_nonzero_flush() 69 *cs++ = 0; /* low dword */ in gen6_emit_post_sync_nonzero_flush() 70 *cs++ = 0; /* high dword */ in gen6_emit_post_sync_nonzero_flush() 71 *cs++ = MI_NOOP; in gen6_emit_post_sync_nonzero_flush() [all …]
|
D | gen2_engine_cs.c | 19 u32 cmd, *cs; in gen2_emit_flush() local 25 cs = intel_ring_begin(rq, 2 + 4 * num_store_dw); in gen2_emit_flush() 26 if (IS_ERR(cs)) in gen2_emit_flush() 27 return PTR_ERR(cs); in gen2_emit_flush() 29 *cs++ = cmd; in gen2_emit_flush() 31 *cs++ = MI_STORE_DWORD_INDEX; in gen2_emit_flush() 32 *cs++ = I915_GEM_HWS_SCRATCH * sizeof(u32); in gen2_emit_flush() 33 *cs++ = 0; in gen2_emit_flush() 34 *cs++ = MI_FLUSH | MI_NO_WRITE_FLUSH; in gen2_emit_flush() 36 *cs++ = cmd; in gen2_emit_flush() [all …]
|
D | gen7_renderclear.c | 102 static u32 batch_offset(const struct batch_chunk *bc, u32 *cs) in batch_offset() argument 104 return (cs - bc->start) * sizeof(*bc->start) + bc->offset; in batch_offset() 148 u32 *cs = batch_alloc_items(state, 32, 8); in gen7_fill_surface_state() local 149 u32 offset = batch_offset(state, cs); in gen7_fill_surface_state() 155 *cs++ = SURFACE_2D << 29 | in gen7_fill_surface_state() 159 *cs++ = batch_addr(state) + dst_offset; in gen7_fill_surface_state() 161 *cs++ = ((surface_h / 4 - 1) << 16) | (surface_w / 4 - 1); in gen7_fill_surface_state() 162 *cs++ = surface_w; in gen7_fill_surface_state() 163 *cs++ = 0; in gen7_fill_surface_state() 164 *cs++ = 0; in gen7_fill_surface_state() [all …]
|
D | intel_migrate.c | 336 u32 *cs; in emit_no_arbitration() local 338 cs = intel_ring_begin(rq, 2); in emit_no_arbitration() 339 if (IS_ERR(cs)) in emit_no_arbitration() 340 return PTR_ERR(cs); in emit_no_arbitration() 343 *cs++ = MI_ARB_ON_OFF; in emit_no_arbitration() 344 *cs++ = MI_NOOP; in emit_no_arbitration() 345 intel_ring_advance(rq, cs); in emit_no_arbitration() 376 u32 *hdr, *cs; in emit_pte() local 403 cs = intel_ring_begin(rq, I915_EMIT_PTE_NUM_DWORDS); in emit_pte() 404 if (IS_ERR(cs)) in emit_pte() [all …]
|
D | gen8_engine_cs.h | 43 u32 *gen8_emit_fini_breadcrumb_xcs(struct i915_request *rq, u32 *cs); 44 u32 *gen12_emit_fini_breadcrumb_xcs(struct i915_request *rq, u32 *cs); 46 u32 *gen8_emit_fini_breadcrumb_rcs(struct i915_request *rq, u32 *cs); 47 u32 *gen11_emit_fini_breadcrumb_rcs(struct i915_request *rq, u32 *cs); 48 u32 *gen12_emit_fini_breadcrumb_rcs(struct i915_request *rq, u32 *cs); 50 u32 *gen12_emit_aux_table_inv(struct intel_engine_cs *engine, u32 *cs); 79 __gen8_emit_write_rcs(u32 *cs, u32 value, u32 offset, u32 flags0, u32 flags1) in __gen8_emit_write_rcs() argument 81 *cs++ = GFX_OP_PIPE_CONTROL(6) | flags0; in __gen8_emit_write_rcs() 82 *cs++ = flags1 | PIPE_CONTROL_QW_WRITE; in __gen8_emit_write_rcs() 83 *cs++ = offset; in __gen8_emit_write_rcs() [all …]
|
D | selftest_engine_pm.c | 34 static u32 *emit_wait(u32 *cs, u32 offset, int op, u32 value) in emit_wait() argument 36 *cs++ = MI_SEMAPHORE_WAIT | in emit_wait() 40 *cs++ = value; in emit_wait() 41 *cs++ = offset; in emit_wait() 42 *cs++ = 0; in emit_wait() 44 return cs; in emit_wait() 47 static u32 *emit_store(u32 *cs, u32 offset, u32 value) in emit_store() argument 49 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT; in emit_store() 50 *cs++ = offset; in emit_store() 51 *cs++ = 0; in emit_store() [all …]
|
D | intel_lrc.c | 1045 static u32 *setup_predicate_disable_wa(const struct intel_context *ce, u32 *cs) in setup_predicate_disable_wa() argument 1048 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT | (4 - 2); in setup_predicate_disable_wa() 1049 *cs++ = lrc_indirect_bb(ce) + DG2_PREDICATE_RESULT_WA; in setup_predicate_disable_wa() 1050 *cs++ = 0; in setup_predicate_disable_wa() 1051 *cs++ = 0; /* No predication */ in setup_predicate_disable_wa() 1054 *cs++ = MI_BATCH_BUFFER_END | BIT(15); in setup_predicate_disable_wa() 1055 *cs++ = MI_SET_PREDICATE | MI_SET_PREDICATE_DISABLE; in setup_predicate_disable_wa() 1058 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT | (4 - 2); in setup_predicate_disable_wa() 1059 *cs++ = lrc_indirect_bb(ce) + DG2_PREDICATE_RESULT_WA; in setup_predicate_disable_wa() 1060 *cs++ = 0; in setup_predicate_disable_wa() [all …]
|
D | selftest_lrc.c | 84 u32 *cs; in emit_semaphore_signal() local 90 cs = intel_ring_begin(rq, 4); in emit_semaphore_signal() 91 if (IS_ERR(cs)) { in emit_semaphore_signal() 93 return PTR_ERR(cs); in emit_semaphore_signal() 96 *cs++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT; in emit_semaphore_signal() 97 *cs++ = offset; in emit_semaphore_signal() 98 *cs++ = 0; in emit_semaphore_signal() 99 *cs++ = 1; in emit_semaphore_signal() 101 intel_ring_advance(rq, cs); in emit_semaphore_signal() 414 u32 *cs; in __live_lrc_state() local [all …]
|
/linux-6.12.1/kernel/time/ |
D | clocksource.c | 23 static noinline u64 cycles_to_nsec_safe(struct clocksource *cs, u64 start, u64 end) in cycles_to_nsec_safe() argument 25 u64 delta = clocksource_delta(end, start, cs->mask); in cycles_to_nsec_safe() 27 if (likely(delta < cs->max_cycles)) in cycles_to_nsec_safe() 28 return clocksource_cyc2ns(delta, cs->mult, cs->shift); in cycles_to_nsec_safe() 30 return mul_u64_u32_shr(delta, cs->mult, cs->shift); in cycles_to_nsec_safe() 174 static void __clocksource_change_rating(struct clocksource *cs, int rating); 194 static void __clocksource_unstable(struct clocksource *cs) in __clocksource_unstable() argument 196 cs->flags &= ~(CLOCK_SOURCE_VALID_FOR_HRES | CLOCK_SOURCE_WATCHDOG); in __clocksource_unstable() 197 cs->flags |= CLOCK_SOURCE_UNSTABLE; in __clocksource_unstable() 203 if (list_empty(&cs->list)) { in __clocksource_unstable() [all …]
|
/linux-6.12.1/kernel/cgroup/ |
D | cpuset.c | 135 struct cpuset *cs = task_cs(p); in inc_dl_tasks_cs() local 137 cs->nr_deadline_tasks++; in inc_dl_tasks_cs() 142 struct cpuset *cs = task_cs(p); in dec_dl_tasks_cs() local 144 cs->nr_deadline_tasks--; in dec_dl_tasks_cs() 147 static inline int is_partition_valid(const struct cpuset *cs) in is_partition_valid() argument 149 return cs->partition_root_state > 0; in is_partition_valid() 152 static inline int is_partition_invalid(const struct cpuset *cs) in is_partition_invalid() argument 154 return cs->partition_root_state < 0; in is_partition_invalid() 160 static inline void make_partition_invalid(struct cpuset *cs) in make_partition_invalid() argument 162 if (cs->partition_root_state > 0) in make_partition_invalid() [all …]
|
D | cpuset-v1.c | 10 struct cpuset *cs; member 147 static int update_relax_domain_level(struct cpuset *cs, s64 val) in update_relax_domain_level() argument 154 if (val != cs->relax_domain_level) { in update_relax_domain_level() 155 cs->relax_domain_level = val; in update_relax_domain_level() 156 if (!cpumask_empty(cs->cpus_allowed) && in update_relax_domain_level() 157 is_sched_load_balance(cs)) in update_relax_domain_level() 167 struct cpuset *cs = css_cs(css); in cpuset_write_s64() local 173 if (!is_cpuset_online(cs)) in cpuset_write_s64() 178 retval = update_relax_domain_level(cs, val); in cpuset_write_s64() 192 struct cpuset *cs = css_cs(css); in cpuset_read_s64() local [all …]
|
D | cpuset-internal.h | 196 static inline struct cpuset *parent_cs(struct cpuset *cs) in parent_cs() argument 198 return css_cs(cs->css.parent); in parent_cs() 202 static inline bool is_cpuset_online(struct cpuset *cs) in is_cpuset_online() argument 204 return test_bit(CS_ONLINE, &cs->flags) && !css_is_dying(&cs->css); in is_cpuset_online() 207 static inline int is_cpu_exclusive(const struct cpuset *cs) in is_cpu_exclusive() argument 209 return test_bit(CS_CPU_EXCLUSIVE, &cs->flags); in is_cpu_exclusive() 212 static inline int is_mem_exclusive(const struct cpuset *cs) in is_mem_exclusive() argument 214 return test_bit(CS_MEM_EXCLUSIVE, &cs->flags); in is_mem_exclusive() 217 static inline int is_mem_hardwall(const struct cpuset *cs) in is_mem_hardwall() argument 219 return test_bit(CS_MEM_HARDWALL, &cs->flags); in is_mem_hardwall() [all …]
|
/linux-6.12.1/drivers/gpu/drm/i915/pxp/ |
D | intel_pxp_cmd.c | 23 static u32 *pxp_emit_session_selection(u32 *cs, u32 idx) in pxp_emit_session_selection() argument 25 *cs++ = MFX_WAIT_PXP; in pxp_emit_session_selection() 28 *cs++ = MI_FLUSH_DW; in pxp_emit_session_selection() 29 *cs++ = 0; in pxp_emit_session_selection() 30 *cs++ = 0; in pxp_emit_session_selection() 33 *cs++ = MI_SET_APPID | MI_SET_APPID_SESSION_ID(idx); in pxp_emit_session_selection() 35 *cs++ = MFX_WAIT_PXP; in pxp_emit_session_selection() 38 *cs++ = MI_FLUSH_DW | MI_FLUSH_DW_PROTECTED_MEM_EN | in pxp_emit_session_selection() 40 *cs++ = I915_GEM_HWS_PXP_ADDR | MI_FLUSH_DW_USE_GTT; in pxp_emit_session_selection() 41 *cs++ = 0; in pxp_emit_session_selection() [all …]
|
/linux-6.12.1/drivers/scsi/ |
D | myrs.c | 104 static void myrs_qcmd(struct myrs_hba *cs, struct myrs_cmdblk *cmd_blk) in myrs_qcmd() argument 106 void __iomem *base = cs->io_base; in myrs_qcmd() 108 union myrs_cmd_mbox *next_mbox = cs->next_cmd_mbox; in myrs_qcmd() 110 cs->write_cmd_mbox(next_mbox, mbox); in myrs_qcmd() 112 if (cs->prev_cmd_mbox1->words[0] == 0 || in myrs_qcmd() 113 cs->prev_cmd_mbox2->words[0] == 0) in myrs_qcmd() 114 cs->get_cmd_mbox(base); in myrs_qcmd() 116 cs->prev_cmd_mbox2 = cs->prev_cmd_mbox1; in myrs_qcmd() 117 cs->prev_cmd_mbox1 = next_mbox; in myrs_qcmd() 119 if (++next_mbox > cs->last_cmd_mbox) in myrs_qcmd() [all …]
|
/linux-6.12.1/sound/core/ |
D | pcm_iec958.c | 29 int snd_pcm_create_iec958_consumer_default(u8 *cs, size_t len) in snd_pcm_create_iec958_consumer_default() argument 34 memset(cs, 0, len); in snd_pcm_create_iec958_consumer_default() 36 cs[0] = IEC958_AES0_CON_NOT_COPYRIGHT | IEC958_AES0_CON_EMPHASIS_NONE; in snd_pcm_create_iec958_consumer_default() 37 cs[1] = IEC958_AES1_CON_GENERAL; in snd_pcm_create_iec958_consumer_default() 38 cs[2] = IEC958_AES2_CON_SOURCE_UNSPEC | IEC958_AES2_CON_CHANNEL_UNSPEC; in snd_pcm_create_iec958_consumer_default() 39 cs[3] = IEC958_AES3_CON_CLOCK_1000PPM | IEC958_AES3_CON_FS_NOTID; in snd_pcm_create_iec958_consumer_default() 42 cs[4] = IEC958_AES4_CON_WORDLEN_NOTID; in snd_pcm_create_iec958_consumer_default() 49 u8 *cs, size_t len) in fill_iec958_consumer() argument 54 if ((cs[3] & IEC958_AES3_CON_FS) == IEC958_AES3_CON_FS_NOTID) { in fill_iec958_consumer() 83 cs[3] &= ~IEC958_AES3_CON_FS; in fill_iec958_consumer() [all …]
|
/linux-6.12.1/include/linux/mfd/syscon/ |
D | atmel-smc.h | 18 #define ATMEL_SMC_SETUP(cs) (((cs) * 0x10)) argument 19 #define ATMEL_HSMC_SETUP(layout, cs) \ argument 20 ((layout)->timing_regs_offset + ((cs) * 0x14)) 21 #define ATMEL_SMC_PULSE(cs) (((cs) * 0x10) + 0x4) argument 22 #define ATMEL_HSMC_PULSE(layout, cs) \ argument 23 ((layout)->timing_regs_offset + ((cs) * 0x14) + 0x4) 24 #define ATMEL_SMC_CYCLE(cs) (((cs) * 0x10) + 0x8) argument 25 #define ATMEL_HSMC_CYCLE(layout, cs) \ argument 26 ((layout)->timing_regs_offset + ((cs) * 0x14) + 0x8) 32 #define ATMEL_SMC_MODE(cs) (((cs) * 0x10) + 0xc) argument [all …]
|
/linux-6.12.1/arch/mips/bcm63xx/ |
D | cs.c | 24 static int is_valid_cs(unsigned int cs) in is_valid_cs() argument 26 if (cs > 6) in is_valid_cs() 35 int bcm63xx_set_cs_base(unsigned int cs, u32 base, unsigned int size) in bcm63xx_set_cs_base() argument 40 if (!is_valid_cs(cs)) in bcm63xx_set_cs_base() 55 bcm_mpi_writel(val, MPI_CSBASE_REG(cs)); in bcm63xx_set_cs_base() 66 int bcm63xx_set_cs_timing(unsigned int cs, unsigned int wait, in bcm63xx_set_cs_timing() argument 72 if (!is_valid_cs(cs)) in bcm63xx_set_cs_timing() 76 val = bcm_mpi_readl(MPI_CSCTL_REG(cs)); in bcm63xx_set_cs_timing() 83 bcm_mpi_writel(val, MPI_CSCTL_REG(cs)); in bcm63xx_set_cs_timing() 94 int bcm63xx_set_cs_param(unsigned int cs, u32 params) in bcm63xx_set_cs_param() argument [all …]
|
/linux-6.12.1/include/linux/ |
D | clocksource.h | 101 u64 (*read)(struct clocksource *cs); 121 int (*enable)(struct clocksource *cs); 122 void (*disable)(struct clocksource *cs); 123 void (*suspend)(struct clocksource *cs); 124 void (*resume)(struct clocksource *cs); 125 void (*mark_unstable)(struct clocksource *cs); 126 void (*tick_stable)(struct clocksource *cs); 218 extern void clocksource_change_rating(struct clocksource *cs, int rating); 222 extern void clocksource_mark_unstable(struct clocksource *cs); 224 clocksource_start_suspend_timing(struct clocksource *cs, u64 start_cycles); [all …]
|
/linux-6.12.1/net/ceph/ |
D | string_table.c | 13 struct ceph_string *cs, *exist; in ceph_find_or_create_string() local 40 cs = kmalloc(sizeof(*cs) + len + 1, GFP_NOFS); in ceph_find_or_create_string() 41 if (!cs) in ceph_find_or_create_string() 44 kref_init(&cs->kref); in ceph_find_or_create_string() 45 cs->len = len; in ceph_find_or_create_string() 46 memcpy(cs->str, str, len); in ceph_find_or_create_string() 47 cs->str[len] = 0; in ceph_find_or_create_string() 68 rb_link_node(&cs->node, parent, p); in ceph_find_or_create_string() 69 rb_insert_color(&cs->node, &string_tree); in ceph_find_or_create_string() 80 kfree(cs); in ceph_find_or_create_string() [all …]
|
/linux-6.12.1/fs/fuse/ |
D | dev.c | 709 static void fuse_copy_init(struct fuse_copy_state *cs, int write, in fuse_copy_init() argument 712 memset(cs, 0, sizeof(*cs)); in fuse_copy_init() 713 cs->write = write; in fuse_copy_init() 714 cs->iter = iter; in fuse_copy_init() 718 static void fuse_copy_finish(struct fuse_copy_state *cs) in fuse_copy_finish() argument 720 if (cs->currbuf) { in fuse_copy_finish() 721 struct pipe_buffer *buf = cs->currbuf; in fuse_copy_finish() 723 if (cs->write) in fuse_copy_finish() 724 buf->len = PAGE_SIZE - cs->len; in fuse_copy_finish() 725 cs->currbuf = NULL; in fuse_copy_finish() [all …]
|
/linux-6.12.1/drivers/accel/habanalabs/common/ |
D | command_submission.c | 250 void cs_get(struct hl_cs *cs) in cs_get() argument 252 kref_get(&cs->refcount); in cs_get() 255 static int cs_get_unless_zero(struct hl_cs *cs) in cs_get_unless_zero() argument 257 return kref_get_unless_zero(&cs->refcount); in cs_get_unless_zero() 260 static void cs_put(struct hl_cs *cs) in cs_put() argument 262 kref_put(&cs->refcount, cs_do_release); in cs_put() 277 bool cs_needs_completion(struct hl_cs *cs) in cs_needs_completion() argument 282 if (cs->staged_cs && !cs->staged_last) in cs_needs_completion() 288 bool cs_needs_timeout(struct hl_cs *cs) in cs_needs_timeout() argument 293 if (cs->staged_cs && !cs->staged_first) in cs_needs_timeout() [all …]
|
/linux-6.12.1/drivers/memory/ |
D | stm32-fmc2-ebi.c | 234 const struct stm32_fmc2_prop *prop, int cs); 235 u32 (*calculate)(struct stm32_fmc2_ebi *ebi, int cs, u32 setup); 238 int cs, u32 setup); 243 int cs) in stm32_fmc2_ebi_check_mux() argument 248 ret = regmap_read(ebi->regmap, FMC2_BCR(cs), &bcr); in stm32_fmc2_ebi_check_mux() 260 int cs) in stm32_fmc2_ebi_check_waitcfg() argument 265 ret = regmap_read(ebi->regmap, FMC2_BCR(cs), &bcr); in stm32_fmc2_ebi_check_waitcfg() 277 int cs) in stm32_fmc2_ebi_check_sync_trans() argument 282 ret = regmap_read(ebi->regmap, FMC2_BCR(cs), &bcr); in stm32_fmc2_ebi_check_sync_trans() 294 int cs) in stm32_fmc2_ebi_mp25_check_cclk() argument [all …]
|
D | omap-gpmc.c | 278 void gpmc_cs_write_reg(int cs, int idx, u32 val) in gpmc_cs_write_reg() argument 282 reg_addr = gpmc_base + GPMC_CS0_OFFSET + (cs * GPMC_CS_SIZE) + idx; in gpmc_cs_write_reg() 286 static u32 gpmc_cs_read_reg(int cs, int idx) in gpmc_cs_read_reg() argument 290 reg_addr = gpmc_base + GPMC_CS0_OFFSET + (cs * GPMC_CS_SIZE) + idx; in gpmc_cs_read_reg() 313 static unsigned long gpmc_get_clk_period(int cs, enum gpmc_clk_domain cd) in gpmc_get_clk_period() argument 322 l = gpmc_cs_read_reg(cs, GPMC_CS_CONFIG1); in gpmc_get_clk_period() 335 static unsigned int gpmc_ns_to_clk_ticks(unsigned int time_ns, int cs, in gpmc_ns_to_clk_ticks() argument 341 tick_ps = gpmc_get_clk_period(cs, cd); in gpmc_ns_to_clk_ticks() 361 static unsigned int gpmc_clk_ticks_to_ns(unsigned int ticks, int cs, in gpmc_clk_ticks_to_ns() argument 364 return ticks * gpmc_get_clk_period(cs, cd) / 1000; in gpmc_clk_ticks_to_ns() [all …]
|
/linux-6.12.1/drivers/net/slip/ |
D | slhc.c | 232 struct cstate *cs = lcs->next; in slhc_compress() local 297 if( ip->saddr == cs->cs_ip.saddr in slhc_compress() 298 && ip->daddr == cs->cs_ip.daddr in slhc_compress() 299 && th->source == cs->cs_tcp.source in slhc_compress() 300 && th->dest == cs->cs_tcp.dest) in slhc_compress() 304 if ( cs == ocs ) in slhc_compress() 306 lcs = cs; in slhc_compress() 307 cs = cs->next; in slhc_compress() 329 } else if (cs == ocs) { in slhc_compress() 334 lcs->next = cs->next; in slhc_compress() [all …]
|