Home
last modified time | relevance | path

Searched refs:ms (Results 1 – 25 of 736) sorted by relevance

12345678910>>...30

/linux-6.12.1/drivers/spi/
Dspi-mpc52xx.c87 int (*state)(int irq, struct mpc52xx_spi *ms, u8 status, u8 data);
100 static void mpc52xx_spi_chipsel(struct mpc52xx_spi *ms, int value) in mpc52xx_spi_chipsel() argument
104 if (ms->gpio_cs_count > 0) { in mpc52xx_spi_chipsel()
105 cs = spi_get_chipselect(ms->message->spi, 0); in mpc52xx_spi_chipsel()
106 gpiod_set_value(ms->gpio_cs[cs], value); in mpc52xx_spi_chipsel()
108 out_8(ms->regs + SPI_PORTDATA, value ? 0 : 0x08); in mpc52xx_spi_chipsel()
117 static void mpc52xx_spi_start_transfer(struct mpc52xx_spi *ms) in mpc52xx_spi_start_transfer() argument
119 ms->rx_buf = ms->transfer->rx_buf; in mpc52xx_spi_start_transfer()
120 ms->tx_buf = ms->transfer->tx_buf; in mpc52xx_spi_start_transfer()
121 ms->len = ms->transfer->len; in mpc52xx_spi_start_transfer()
[all …]
/linux-6.12.1/drivers/scsi/
Dmesh.c186 static void mesh_done(struct mesh_state *ms, int start_next);
187 static void mesh_interrupt(struct mesh_state *ms);
188 static void cmd_complete(struct mesh_state *ms);
189 static void set_dma_cmds(struct mesh_state *ms, struct scsi_cmnd *cmd);
190 static void halt_dma(struct mesh_state *ms);
191 static void phase_mismatch(struct mesh_state *ms);
213 static void dlog(struct mesh_state *ms, char *fmt, int a) in dlog() argument
215 struct mesh_target *tp = &ms->tgts[ms->conn_tgt]; in dlog()
219 slp = &ms->log[ms->log_ix]; in dlog()
222 tlp->phase = (ms->msgphase << 4) + ms->phase; in dlog()
[all …]
/linux-6.12.1/drivers/md/
Ddm-raid1.c51 struct mirror_set *ms; member
99 struct mirror_set *ms = context; in wakeup_mirrord() local
101 queue_work(ms->kmirrord_wq, &ms->kmirrord_work); in wakeup_mirrord()
106 struct mirror_set *ms = from_timer(ms, t, timer); in delayed_wake_fn() local
108 clear_bit(0, &ms->timer_pending); in delayed_wake_fn()
109 wakeup_mirrord(ms); in delayed_wake_fn()
112 static void delayed_wake(struct mirror_set *ms) in delayed_wake() argument
114 if (test_and_set_bit(0, &ms->timer_pending)) in delayed_wake()
117 ms->timer.expires = jiffies + HZ / 5; in delayed_wake()
118 add_timer(&ms->timer); in delayed_wake()
[all …]
/linux-6.12.1/drivers/accel/ivpu/
Divpu_ms.c22 struct ivpu_ms_instance *ms; in get_instance_by_mask() local
26 list_for_each_entry(ms, &file_priv->ms_instance_list, ms_instance_node) in get_instance_by_mask()
27 if (ms->mask == metric_mask) in get_instance_by_mask()
28 return ms; in get_instance_by_mask()
38 struct ivpu_ms_instance *ms; in ivpu_ms_start_ioctl() local
55 ms = kzalloc(sizeof(*ms), GFP_KERNEL); in ivpu_ms_start_ioctl()
56 if (!ms) { in ivpu_ms_start_ioctl()
61 ms->mask = args->metric_group_mask; in ivpu_ms_start_ioctl()
63 ret = ivpu_jsm_metric_streamer_info(vdev, ms->mask, 0, 0, &sample_size, NULL); in ivpu_ms_start_ioctl()
69 ms->bo = ivpu_bo_create_global(vdev, PAGE_ALIGN(single_buff_size * MS_NUM_BUFFERS), in ivpu_ms_start_ioctl()
[all …]
/linux-6.12.1/drivers/gpu/drm/amd/display/dc/dml2/
Ddisplay_mode_core.c6220 CalculatePrefetchSchedule_params->DSCDelay = mode_lib->ms.DSCDelayPerState[k]; in set_calculate_prefetch_schedule_params()
6221 …CalculatePrefetchSchedule_params->EnhancedPrefetchScheduleAccelerationFinal = mode_lib->ms.policy.… in set_calculate_prefetch_schedule_params()
6222 …arams->DPPCLKDelaySubtotalPlusCNVCFormater = mode_lib->ms.ip.dppclk_delay_subtotal + mode_lib->ms.… in set_calculate_prefetch_schedule_params()
6223 CalculatePrefetchSchedule_params->DPPCLKDelaySCL = mode_lib->ms.ip.dppclk_delay_scl; in set_calculate_prefetch_schedule_params()
6224 … CalculatePrefetchSchedule_params->DPPCLKDelaySCLLBOnly = mode_lib->ms.ip.dppclk_delay_scl_lb_only; in set_calculate_prefetch_schedule_params()
6225 …CalculatePrefetchSchedule_params->DPPCLKDelayCNVCCursor = mode_lib->ms.ip.dppclk_delay_cnvc_cursor; in set_calculate_prefetch_schedule_params()
6226 CalculatePrefetchSchedule_params->DISPCLKDelaySubtotal = mode_lib->ms.ip.dispclk_delay_subtotal; in set_calculate_prefetch_schedule_params()
6227 …edule_params->DPP_RECOUT_WIDTH = (dml_uint_t)(mode_lib->ms.SwathWidthYThisState[k] / mode_lib->ms.… in set_calculate_prefetch_schedule_params()
6228 …CalculatePrefetchSchedule_params->OutputFormat = mode_lib->ms.cache_display_cfg.output.OutputForma… in set_calculate_prefetch_schedule_params()
6229 …CalculatePrefetchSchedule_params->MaxInterDCNTileRepeaters = mode_lib->ms.ip.max_inter_dcn_tile_re… in set_calculate_prefetch_schedule_params()
[all …]
/linux-6.12.1/Documentation/fb/
Dviafb.modes15 # Sync Width 3.813 us 0.064 ms
17 # Front Porch 0.636 us 0.318 ms
19 # Back Porch 1.907 us 1.048 ms
21 # Active Time 25.422 us 15.253 ms
23 # Blank Time 6.356 us 1.430 ms
40 # Sync Width 2.032 us 0.080 ms
42 # Front Porch 0.508 us 0.027 ms
44 # Back Porch 3.810 us 0.427 ms
46 # Active Time 20.317 us 12.800 ms
48 # Blank Time 6.349 us 0.533 ms
[all …]
/linux-6.12.1/Documentation/translations/zh_CN/scheduler/
Dsched-bwc.rst78 cpu.cfs_period_us=100ms
86 小配额是1ms。周期长度也有一个1s的上限。当带宽限制以分层方式使用时,存在额外的限制,这些在下
104 /proc/sys/kernel/sched_cfs_bandwidth_slice_us (default=5ms)
144 1ms以外的所有时间片都可以返回到全局池中。这是在编译时由min_cfs_rq_runtime变量配置的。这
154 即任务组正在运行的每个cpu上未使用的片断量(通常每个cpu最多1ms或由min_cfs_rq_runtime定
157 长的时间窗口。这也限制了突发能力,每个cpu不超过1ms。这为在高核数机器上有小配额限制的高线程
164 可能在某些时期使用多达1ms的额外配额,从而阻止cpu绑定的应用程序完全使用其配额,这也是同样的数
172 如果周期是250ms,配额也是250ms,那么该组将每250ms获得价值1个CPU的运行时间。
174 # echo 250000 > cpu.cfs_quota_us /* quota = 250ms */
175 # echo 250000 > cpu.cfs_period_us /* period = 250ms */
[all …]
/linux-6.12.1/lib/zstd/compress/
Dzstd_lazy.h25 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip);
26 void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip);
28 void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip);
33 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
36 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
39 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
42 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
45 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
48 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
51 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
[all …]
Dzstd_lazy.c20 ZSTD_updateDUBT(ZSTD_matchState_t* ms, in ZSTD_updateDUBT() argument
24 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_updateDUBT()
25 U32* const hashTable = ms->hashTable; in ZSTD_updateDUBT()
28 U32* const bt = ms->chainTable; in ZSTD_updateDUBT()
32 const BYTE* const base = ms->window.base; in ZSTD_updateDUBT()
34 U32 idx = ms->nextToUpdate; in ZSTD_updateDUBT()
38 idx, target, ms->window.dictLimit); in ZSTD_updateDUBT()
42 assert(idx >= ms->window.dictLimit); /* condition for valid base+idx */ in ZSTD_updateDUBT()
55 ms->nextToUpdate = target; in ZSTD_updateDUBT()
64 ZSTD_insertDUBT1(const ZSTD_matchState_t* ms, in ZSTD_insertDUBT1() argument
[all …]
Dzstd_fast.c15 void ZSTD_fillHashTable(ZSTD_matchState_t* ms, in ZSTD_fillHashTable() argument
19 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_fillHashTable()
20 U32* const hashTable = ms->hashTable; in ZSTD_fillHashTable()
23 const BYTE* const base = ms->window.base; in ZSTD_fillHashTable()
24 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillHashTable()
94 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_fast_noDict_generic() argument
98 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_compressBlock_fast_noDict_generic()
99 U32* const hashTable = ms->hashTable; in ZSTD_compressBlock_fast_noDict_generic()
103 const BYTE* const base = ms->window.base; in ZSTD_compressBlock_fast_noDict_generic()
106 const U32 prefixStartIndex = ZSTD_getLowestPrefixIndex(ms, endIndex, cParams->windowLog); in ZSTD_compressBlock_fast_noDict_generic()
[all …]
Dzstd_opt.c382 static U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms, in ZSTD_insertAndFindFirstIndexHash3() argument
386 U32* const hashTable3 = ms->hashTable3; in ZSTD_insertAndFindFirstIndexHash3()
387 U32 const hashLog3 = ms->hashLog3; in ZSTD_insertAndFindFirstIndexHash3()
388 const BYTE* const base = ms->window.base; in ZSTD_insertAndFindFirstIndexHash3()
412 const ZSTD_matchState_t* ms, in ZSTD_insertBt1() argument
417 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_insertBt1()
418 U32* const hashTable = ms->hashTable; in ZSTD_insertBt1()
421 U32* const bt = ms->chainTable; in ZSTD_insertBt1()
426 const BYTE* const base = ms->window.base; in ZSTD_insertBt1()
427 const BYTE* const dictBase = ms->window.dictBase; in ZSTD_insertBt1()
[all …]
Dzstd_double_fast.c15 void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms, in ZSTD_fillDoubleHashTable() argument
18 const ZSTD_compressionParameters* const cParams = &ms->cParams; in ZSTD_fillDoubleHashTable()
19 U32* const hashLarge = ms->hashTable; in ZSTD_fillDoubleHashTable()
22 U32* const hashSmall = ms->chainTable; in ZSTD_fillDoubleHashTable()
24 const BYTE* const base = ms->window.base; in ZSTD_fillDoubleHashTable()
25 const BYTE* ip = base + ms->nextToUpdate; in ZSTD_fillDoubleHashTable()
52 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], in ZSTD_compressBlock_doubleFast_noDict_generic() argument
55 ZSTD_compressionParameters const* cParams = &ms->cParams; in ZSTD_compressBlock_doubleFast_noDict_generic()
56 U32* const hashLong = ms->hashTable; in ZSTD_compressBlock_doubleFast_noDict_generic()
58 U32* const hashSmall = ms->chainTable; in ZSTD_compressBlock_doubleFast_noDict_generic()
[all …]
Dzstd_opt.h18 void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend);
21 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
24 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
27 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
32 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
35 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
39 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
42 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
/linux-6.12.1/kernel/
Dmodule_signature.c21 int mod_check_sig(const struct module_signature *ms, size_t file_len, in mod_check_sig() argument
24 if (be32_to_cpu(ms->sig_len) >= file_len - sizeof(*ms)) in mod_check_sig()
27 if (ms->id_type != PKEY_ID_PKCS7) { in mod_check_sig()
33 if (ms->algo != 0 || in mod_check_sig()
34 ms->hash != 0 || in mod_check_sig()
35 ms->signer_len != 0 || in mod_check_sig()
36 ms->key_id_len != 0 || in mod_check_sig()
37 ms->__pad[0] != 0 || in mod_check_sig()
38 ms->__pad[1] != 0 || in mod_check_sig()
39 ms->__pad[2] != 0) { in mod_check_sig()
/linux-6.12.1/fs/dlm/
Drequestqueue.c34 const struct dlm_message *ms) in dlm_add_requestqueue() argument
37 int length = le16_to_cpu(ms->m_header.h_length) - in dlm_add_requestqueue()
48 memcpy(&e->request, ms, sizeof(*ms)); in dlm_add_requestqueue()
49 memcpy(&e->request.m_extra, ms->m_extra, length); in dlm_add_requestqueue()
68 struct dlm_message *ms; in dlm_process_requestqueue() local
80 ms = &e->request; in dlm_process_requestqueue()
84 le32_to_cpu(ms->m_type), in dlm_process_requestqueue()
85 le32_to_cpu(ms->m_header.h_nodeid), in dlm_process_requestqueue()
86 le32_to_cpu(ms->m_lkid), le32_to_cpu(ms->m_remid), in dlm_process_requestqueue()
87 from_dlm_errno(le32_to_cpu(ms->m_result)), in dlm_process_requestqueue()
[all …]
Dlock.c89 const struct dlm_message *ms, bool local);
90 static int receive_extralen(const struct dlm_message *ms);
1764 const struct dlm_message *ms) in _remove_from_waiters() argument
1802 (lkb->lkb_wait_type == DLM_MSG_CONVERT) && ms && !ms->m_result && in _remove_from_waiters()
1821 lkb->lkb_id, ms ? le32_to_cpu(ms->m_header.h_nodeid) : 0, in _remove_from_waiters()
1868 const struct dlm_message *ms, bool local) in remove_from_waiters_ms() argument
1878 error = _remove_from_waiters(lkb, le32_to_cpu(ms->m_type), ms); in remove_from_waiters_ms()
1967 const struct dlm_message *ms) in set_lvb_lock_pc() argument
1979 int len = receive_extralen(ms); in set_lvb_lock_pc()
1982 memcpy(lkb->lkb_lvbptr, ms->m_extra, len); in set_lvb_lock_pc()
[all …]
/linux-6.12.1/mm/
Dsparse.c164 static void __section_mark_present(struct mem_section *ms, in __section_mark_present() argument
170 ms->section_mem_map |= SECTION_MARKED_PRESENT; in __section_mark_present()
199 struct mem_section *ms; in subsection_map_init() local
204 ms = __nr_to_section(nr); in subsection_map_init()
205 subsection_mask_set(ms->usage->subsection_map, pfn, pfns); in subsection_map_init()
230 struct mem_section *ms; in memory_present() local
235 ms = __nr_to_section(section_nr); in memory_present()
236 if (!ms->section_mem_map) { in memory_present()
237 ms->section_mem_map = sparse_encode_early_nid(nid) | in memory_present()
239 __section_mark_present(ms, section_nr); in memory_present()
[all …]
/linux-6.12.1/include/asm-generic/
Dmmiowb.h37 struct mmiowb_state *ms = __mmiowb_state(); in mmiowb_set_pending() local
39 if (likely(ms->nesting_count)) in mmiowb_set_pending()
40 ms->mmiowb_pending = ms->nesting_count; in mmiowb_set_pending()
45 struct mmiowb_state *ms = __mmiowb_state(); in mmiowb_spin_lock() local
46 ms->nesting_count++; in mmiowb_spin_lock()
51 struct mmiowb_state *ms = __mmiowb_state(); in mmiowb_spin_unlock() local
53 if (unlikely(ms->mmiowb_pending)) { in mmiowb_spin_unlock()
54 ms->mmiowb_pending = 0; in mmiowb_spin_unlock()
58 ms->nesting_count--; in mmiowb_spin_unlock()
/linux-6.12.1/include/trace/events/
Ddlm.h393 TP_PROTO(uint32_t dst, uint32_t h_seq, const struct dlm_message *ms,
396 TP_ARGS(dst, h_seq, ms, name, namelen),
425 le16_to_cpu(ms->m_header.h_length) - sizeof(*ms))
432 __entry->h_version = le32_to_cpu(ms->m_header.h_version);
433 __entry->h_lockspace = le32_to_cpu(ms->m_header.u.h_lockspace);
434 __entry->h_nodeid = le32_to_cpu(ms->m_header.h_nodeid);
435 __entry->h_length = le16_to_cpu(ms->m_header.h_length);
436 __entry->h_cmd = ms->m_header.h_cmd;
437 __entry->m_type = le32_to_cpu(ms->m_type);
438 __entry->m_nodeid = le32_to_cpu(ms->m_nodeid);
[all …]
/linux-6.12.1/drivers/hid/
Dhid-microsoft.c62 struct ms_data *ms = hid_get_drvdata(hdev); in ms_report_fixup() local
63 unsigned long quirks = ms->quirks; in ms_report_fixup()
186 struct ms_data *ms = hid_get_drvdata(hdev); in ms_input_mapping() local
187 unsigned long quirks = ms->quirks; in ms_input_mapping()
213 struct ms_data *ms = hid_get_drvdata(hdev); in ms_input_mapped() local
214 unsigned long quirks = ms->quirks; in ms_input_mapped()
225 struct ms_data *ms = hid_get_drvdata(hdev); in ms_event() local
226 unsigned long quirks = ms->quirks; in ms_event()
283 struct ms_data *ms = container_of(work, struct ms_data, ff_worker); in ms_ff_worker() local
284 struct hid_device *hdev = ms->hdev; in ms_ff_worker()
[all …]
/linux-6.12.1/drivers/platform/x86/amd/pmf/
Dcnqf.c273 struct cnqf_mode_settings *ms; in amd_pmf_update_mode_set() local
276 ms = &config_store.mode_set[idx][CNQF_MODE_QUIET]; in amd_pmf_update_mode_set()
277 ms->power_floor = out->ps[APMF_CNQF_QUIET].pfloor; in amd_pmf_update_mode_set()
278 ms->power_control.fppt = out->ps[APMF_CNQF_QUIET].fppt; in amd_pmf_update_mode_set()
279 ms->power_control.sppt = out->ps[APMF_CNQF_QUIET].sppt; in amd_pmf_update_mode_set()
280 ms->power_control.sppt_apu_only = out->ps[APMF_CNQF_QUIET].sppt_apu_only; in amd_pmf_update_mode_set()
281 ms->power_control.spl = out->ps[APMF_CNQF_QUIET].spl; in amd_pmf_update_mode_set()
282 ms->power_control.stt_min = out->ps[APMF_CNQF_QUIET].stt_min_limit; in amd_pmf_update_mode_set()
283 ms->power_control.stt_skin_temp[STT_TEMP_APU] = in amd_pmf_update_mode_set()
285 ms->power_control.stt_skin_temp[STT_TEMP_HS2] = in amd_pmf_update_mode_set()
[all …]
/linux-6.12.1/tools/perf/arch/loongarch/annotate/
Dinstructions.c8 static int loongarch_call__parse(struct arch *arch, struct ins_operands *ops, struct map_symbol *ms, in loongarch_call__parse() argument
12 struct map *map = ms->map; in loongarch_call__parse()
14 .ms = { .map = map, }, in loongarch_call__parse()
43 if (maps__find_ams(ms->maps, &target) == 0 && in loongarch_call__parse()
44 map__rip_2objdump(target.ms.map, map__map_ip(target.ms.map, target.addr)) == ops->target.addr) in loongarch_call__parse()
45 ops->target.sym = target.ms.sym; in loongarch_call__parse()
55 static int loongarch_jump__parse(struct arch *arch, struct ins_operands *ops, struct map_symbol *ms, in loongarch_jump__parse() argument
58 struct map *map = ms->map; in loongarch_jump__parse()
59 struct symbol *sym = ms->sym; in loongarch_jump__parse()
61 .ms = { .map = map, }, in loongarch_jump__parse()
[all …]
/linux-6.12.1/drivers/gpu/drm/amd/display/dc/dml2/dml21/src/dml2_core/
Ddml2_core_shared.c766 memset(&mode_lib->ms, 0, sizeof(struct dml2_core_internal_mode_support)); in dml2_core_shared_mode_support()
768 mode_lib->ms.num_active_planes = display_cfg->num_planes; in dml2_core_shared_mode_support()
771 mode_lib->ms.state_idx = in_out_params->min_clk_index; in dml2_core_shared_mode_support()
772 mode_lib->ms.SOCCLK = ((double)mode_lib->soc.clk_table.socclk.clk_values_khz[0] / 1000); in dml2_core_shared_mode_support()
773 …mode_lib->ms.DCFCLK = ((double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].… in dml2_core_shared_mode_support()
774 …mode_lib->ms.FabricClock = ((double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_in… in dml2_core_shared_mode_support()
775 mode_lib->ms.MaxDCFCLK = (double)min_clk_table->max_clocks_khz.dcfclk / 1000; in dml2_core_shared_mode_support()
776 mode_lib->ms.MaxFabricClock = (double)min_clk_table->max_clocks_khz.fclk / 1000; in dml2_core_shared_mode_support()
777 mode_lib->ms.max_dispclk_freq_mhz = (double)min_clk_table->max_clocks_khz.dispclk / 1000; in dml2_core_shared_mode_support()
778 mode_lib->ms.max_dscclk_freq_mhz = (double)min_clk_table->max_clocks_khz.dscclk / 1000; in dml2_core_shared_mode_support()
[all …]
Ddml2_core_dcn4_calcs.c282 dml_get_per_plane_var_func(num_mcaches_plane0, unsigned int, mode_lib->ms.num_mcaches_l);
283 dml_get_per_plane_var_func(mcache_row_bytes_plane0, unsigned int, mode_lib->ms.mcache_row_bytes_l);
284 dml_get_per_plane_var_func(mcache_shift_granularity_plane0, unsigned int, mode_lib->ms.mcache_shift…
285 dml_get_per_plane_var_func(num_mcaches_plane1, unsigned int, mode_lib->ms.num_mcaches_c);
286 dml_get_per_plane_var_func(mcache_row_bytes_plane1, unsigned int, mode_lib->ms.mcache_row_bytes_c);
287 dml_get_per_plane_var_func(mcache_shift_granularity_plane1, unsigned int, mode_lib->ms.mcache_shift…
288 dml_get_per_plane_var_func(mall_comb_mcache_l, unsigned int, mode_lib->ms.mall_comb_mcache_l);
289 dml_get_per_plane_var_func(mall_comb_mcache_c, unsigned int, mode_lib->ms.mall_comb_mcache_c);
290 dml_get_per_plane_var_func(lc_comb_mcache, unsigned int, mode_lib->ms.lc_comb_mcache);
291 dml_get_per_plane_var_func(subviewport_lines_needed_in_mall, unsigned int, mode_lib->ms.SubViewport…
[all …]
/linux-6.12.1/net/mac80211/
Drc80211_minstrel_ht_debugfs.c22 struct minstrel_debugfs_info *ms; in minstrel_stats_read() local
24 ms = file->private_data; in minstrel_stats_read()
25 return simple_read_from_buffer(buf, len, ppos, ms->buf, ms->len); in minstrel_stats_read()
148 struct minstrel_debugfs_info *ms; in minstrel_ht_stats_open() local
152 ms = kmalloc(32768, GFP_KERNEL); in minstrel_ht_stats_open()
153 if (!ms) in minstrel_ht_stats_open()
156 file->private_data = ms; in minstrel_ht_stats_open()
157 p = ms->buf; in minstrel_ht_stats_open()
179 ms->len = p - ms->buf; in minstrel_ht_stats_open()
180 WARN_ON(ms->len + sizeof(*ms) > 32768); in minstrel_ht_stats_open()
[all …]

12345678910>>...30