Home
last modified time | relevance | path

Searched refs:start_idx (Results 1 – 25 of 78) sorted by relevance

1234

/linux-6.12.1/drivers/net/wireless/ath/ath9k/
Dar9003_aic.c312 int16_t start_idx, end_idx; in ar9003_aic_cal_post_process() local
317 start_idx = ar9003_aic_find_valid(cal_sram_valid, 0, i); in ar9003_aic_cal_post_process()
320 if (start_idx < 0) { in ar9003_aic_cal_post_process()
322 start_idx = end_idx; in ar9003_aic_cal_post_process()
323 end_idx = ar9003_aic_find_valid(cal_sram_valid, 1, start_idx); in ar9003_aic_cal_post_process()
331 ((aic_sram[start_idx].dir_path_gain_lin - in ar9003_aic_cal_post_process()
333 (start_idx - i) + ((end_idx - i) >> 1)) / in ar9003_aic_cal_post_process()
335 aic_sram[start_idx].dir_path_gain_lin; in ar9003_aic_cal_post_process()
337 ((aic_sram[start_idx].quad_path_gain_lin - in ar9003_aic_cal_post_process()
339 (start_idx - i) + ((end_idx - i) >> 1)) / in ar9003_aic_cal_post_process()
[all …]
/linux-6.12.1/fs/fuse/
Ddax.c182 static int fuse_setup_one_mapping(struct inode *inode, unsigned long start_idx, in fuse_setup_one_mapping() argument
190 loff_t offset = start_idx << FUSE_DAX_SHIFT; in fuse_setup_one_mapping()
221 dmap->itn.start = dmap->itn.last = start_idx; in fuse_setup_one_mapping()
317 unsigned long start_idx = start >> FUSE_DAX_SHIFT; in inode_reclaim_dmap_range() local
322 node = interval_tree_iter_first(&fi->dax->tree, start_idx, in inode_reclaim_dmap_range()
441 unsigned long start_idx = pos >> FUSE_DAX_SHIFT; in fuse_setup_new_dax_mapping() local
477 node = interval_tree_iter_first(&fi->dax->tree, start_idx, start_idx); in fuse_setup_new_dax_mapping()
569 unsigned long start_idx = pos >> FUSE_DAX_SHIFT; in fuse_iomap_begin() local
589 node = interval_tree_iter_first(&fi->dax->tree, start_idx, start_idx); in fuse_iomap_begin()
952 unsigned long start_idx; in inode_inline_reclaim_one_dmap() local
[all …]
/linux-6.12.1/drivers/net/wireless/silabs/wfx/
Dscan.c40 static int send_scan_req(struct wfx_vif *wvif, struct cfg80211_scan_request *req, int start_idx) in send_scan_req() argument
46 for (i = start_idx; i < req->n_channels; i++) { in send_scan_req()
47 ch_start = req->channels[start_idx]; in send_scan_req()
58 ret = wfx_hif_scan(wvif, req, start_idx, i - start_idx); in send_scan_req()
76 } else if (wvif->scan_nb_chan_done > i - start_idx) { in send_scan_req()
81 if (req->channels[start_idx]->max_power != vif->bss_conf.txpower) in send_scan_req()
/linux-6.12.1/drivers/infiniband/hw/irdma/
Dhmc.c198 if (info->start_idx >= info->hmc_info->hmc_obj[info->rsrc_type].cnt) in irdma_hmc_finish_add_sd_reg()
201 if ((info->start_idx + info->count) > in irdma_hmc_finish_add_sd_reg()
231 if (info->start_idx >= info->hmc_info->hmc_obj[info->rsrc_type].cnt) in irdma_sc_create_hmc_obj()
234 if ((info->start_idx + info->count) > in irdma_sc_create_hmc_obj()
238 info->rsrc_type, info->start_idx, info->count, in irdma_sc_create_hmc_obj()
244 info->start_idx, info->count, &sd_idx, in irdma_sc_create_hmc_obj()
252 info->start_idx, info->count, &pd_idx, in irdma_sc_create_hmc_obj()
379 if (info->start_idx >= info->hmc_info->hmc_obj[info->rsrc_type].cnt) { in irdma_sc_del_hmc_obj()
382 info->start_idx, info->rsrc_type, in irdma_sc_del_hmc_obj()
387 if ((info->start_idx + info->count) > in irdma_sc_del_hmc_obj()
[all …]
Dhmc.h127 u32 start_idx; member
138 u32 start_idx; member
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/diag/
Drsc_dump.c59 int read_size, int start_idx) in mlx5_rsc_dump_read_menu_sgmt() argument
70 if (!start_idx) { in mlx5_rsc_dump_read_menu_sgmt()
79 for (i = 0; start_idx + i < num_of_items; i++) { in mlx5_rsc_dump_read_menu_sgmt()
82 return start_idx + i; in mlx5_rsc_dump_read_menu_sgmt()
189 int start_idx = 0; in mlx5_rsc_dump_menu() local
211 start_idx = mlx5_rsc_dump_read_menu_sgmt(dev->rsc_dump, page, size, start_idx); in mlx5_rsc_dump_menu()
/linux-6.12.1/drivers/infiniband/ulp/opa_vnic/
Dopa_vnic_vema_iface.c317 u16 start_idx, num_macs, idx = 0, count = 0; in opa_vnic_query_mcast_macs() local
320 start_idx = be16_to_cpu(macs->start_idx); in opa_vnic_query_mcast_macs()
325 if (start_idx > idx++) in opa_vnic_query_mcast_macs()
349 u16 start_idx, tot_macs, num_macs, idx = 0, count = 0, em_macs = 0; in opa_vnic_query_ucast_macs() local
352 start_idx = be16_to_cpu(macs->start_idx); in opa_vnic_query_ucast_macs()
365 if (start_idx > idx++) in opa_vnic_query_ucast_macs()
377 if (start_idx > idx++) in opa_vnic_query_ucast_macs()
/linux-6.12.1/fs/squashfs/
Dblock.c85 int start_idx = 0, end_idx = 0; in squashfs_bio_read_cached() local
119 bio_trim(bio, start_idx * PAGE_SECTORS, in squashfs_bio_read_cached()
120 (end_idx - start_idx) * PAGE_SECTORS); in squashfs_bio_read_cached()
126 start_idx = idx; in squashfs_bio_read_cached()
134 bio_trim(bio, start_idx * PAGE_SECTORS, in squashfs_bio_read_cached()
135 (end_idx - start_idx) * PAGE_SECTORS); in squashfs_bio_read_cached()
/linux-6.12.1/drivers/cpuidle/
Ddt_idle_states.c150 unsigned int start_idx) in dt_init_idle_driver() argument
157 unsigned int state_idx = start_idx; in dt_init_idle_driver()
222 return state_idx - start_idx; in dt_init_idle_driver()
Ddt_idle_states.h7 unsigned int start_idx);
/linux-6.12.1/drivers/net/dsa/microchip/
Dksz9477_acl.c240 int start_idx, end_idx, contiguous_count; in ksz9477_acl_get_cont_entr() local
268 start_idx = find_first_bit((unsigned long *)&val, 16); in ksz9477_acl_get_cont_entr()
272 contiguous_count = end_idx - start_idx + 1; in ksz9477_acl_get_cont_entr()
284 for (i = start_idx; i <= end_idx; i++) { in ksz9477_acl_get_cont_entr()
296 if (i > start_idx) { in ksz9477_acl_get_cont_entr()
470 u16 start_idx, in ksz9477_move_entries_downwards() argument
477 for (i = start_idx; i < end_idx; i++) { in ksz9477_move_entries_downwards()
509 u16 start_idx, u16 num_entries_to_move, in ksz9477_move_entries_upwards() argument
515 for (i = start_idx; i > target_idx; i--) { in ksz9477_move_entries_upwards()
/linux-6.12.1/drivers/net/ethernet/intel/i40e/
Di40e_lan_hmc.c304 if (info->start_idx >= info->hmc_info->hmc_obj[info->rsrc_type].cnt) { in i40e_create_lan_hmc_object()
310 if ((info->start_idx + info->count) > in i40e_create_lan_hmc_object()
320 info->start_idx, info->count, in i40e_create_lan_hmc_object()
329 info->start_idx, info->count, &pd_idx, in i40e_create_lan_hmc_object()
446 info.start_idx = 0; in i40e_configure_lan_hmc()
556 if (info->start_idx >= info->hmc_info->hmc_obj[info->rsrc_type].cnt) { in i40e_delete_lan_hmc_object()
563 if ((info->start_idx + info->count) > in i40e_delete_lan_hmc_object()
572 info->start_idx, info->count, &pd_idx, in i40e_delete_lan_hmc_object()
595 info->start_idx, info->count, in i40e_delete_lan_hmc_object()
639 info.start_idx = 0; in i40e_shutdown_lan_hmc()
Di40e_lan_hmc.h129 u32 start_idx; member
138 u32 start_idx; member
/linux-6.12.1/drivers/infiniband/core/
Dumem_odp.c352 int pfn_index, dma_index, ret = 0, start_idx; in ib_umem_odp_map_dma_and_lock() local
406 start_idx = (range.start - ib_umem_start(umem_odp)) >> page_shift; in ib_umem_odp_map_dma_and_lock()
407 dma_index = start_idx; in ib_umem_odp_map_dma_and_lock()
458 ret = dma_index - start_idx; in ib_umem_odp_map_dma_and_lock()
/linux-6.12.1/fs/f2fs/
Dcompress.c965 unsigned int start_idx = cluster_idx << in __f2fs_cluster_blocks() local
970 ret = f2fs_get_dnode_of_data(&dn, start_idx, LOOKUP_NODE); in __f2fs_cluster_blocks()
1094 pgoff_t start_idx = start_idx_of_cluster(cc); in prepare_compress_overwrite() local
1098 ret = f2fs_is_compressed_cluster(cc->inode, start_idx); in prepare_compress_overwrite()
1108 page = f2fs_pagecache_get_page(mapping, start_idx + i, in prepare_compress_overwrite()
1141 page = find_lock_page(mapping, start_idx + i); in prepare_compress_overwrite()
1214 pgoff_t start_idx = from >> (PAGE_SHIFT + log_cluster_size) << in f2fs_truncate_partial_cluster() local
1218 err = f2fs_is_compressed_cluster(inode, start_idx); in f2fs_truncate_partial_cluster()
1228 start_idx, &fsdata); in f2fs_truncate_partial_cluster()
1253 f2fs_compress_write_end(inode, fsdata, start_idx, true); in f2fs_truncate_partial_cluster()
[all …]
/linux-6.12.1/drivers/net/ethernet/mellanox/mlx5/core/lib/
Ddm.c256 u64 start_idx; in mlx5_dm_sw_icm_dealloc() local
295 start_idx = (addr - icm_start_addr) >> MLX5_LOG_SW_ICM_BLOCK_SIZE(dev); in mlx5_dm_sw_icm_dealloc()
298 start_idx, num_blocks); in mlx5_dm_sw_icm_dealloc()
/linux-6.12.1/drivers/infiniband/hw/qib/
Dqib_sdma.c110 idx = txp->start_idx; in clear_sdma_activelist()
345 idx = txp->start_idx; in qib_sdma_make_progress()
377 idx = txp->start_idx; in qib_sdma_make_progress()
496 tx->txreq.start_idx = 0; in complete_sdma_err_req()
563 tx->txreq.start_idx = tail; in qib_sdma_verbs_send()
730 txp->start_idx, txp->next_descq_idx); in dump_sdma_state()
/linux-6.12.1/drivers/net/ethernet/google/gve/
Dgve_tx.c337 if (cfg->start_idx + cfg->num_rings > cfg->qcfg->max_queues) { in gve_tx_alloc_rings_gqi()
343 if (cfg->start_idx == 0) { in gve_tx_alloc_rings_gqi()
354 for (i = cfg->start_idx; i < cfg->start_idx + cfg->num_rings; i++) { in gve_tx_alloc_rings_gqi()
370 if (cfg->start_idx == 0) in gve_tx_alloc_rings_gqi()
384 for (i = cfg->start_idx; i < cfg->start_idx + cfg->num_rings; i++) in gve_tx_free_rings_gqi()
387 if (cfg->start_idx == 0) { in gve_tx_free_rings_gqi()
Dgve_tx_dqo.c385 if (cfg->start_idx + cfg->num_rings > cfg->qcfg->max_queues) { in gve_tx_alloc_rings_dqo()
391 if (cfg->start_idx == 0) { in gve_tx_alloc_rings_dqo()
402 for (i = cfg->start_idx; i < cfg->start_idx + cfg->num_rings; i++) { in gve_tx_alloc_rings_dqo()
418 if (cfg->start_idx == 0) in gve_tx_alloc_rings_dqo()
432 for (i = cfg->start_idx; i < cfg->start_idx + cfg->num_rings; i++) in gve_tx_free_rings_dqo()
435 if (cfg->start_idx == 0) { in gve_tx_free_rings_dqo()
/linux-6.12.1/drivers/media/test-drivers/vicodec/
Dcodec-v4l2-fwht.c60 unsigned int start_idx) in v4l2_fwht_find_nth_fmt() argument
69 if (start_idx == 0) in v4l2_fwht_find_nth_fmt()
71 start_idx--; in v4l2_fwht_find_nth_fmt()
Dcodec-v4l2-fwht.h59 unsigned int start_idx);
/linux-6.12.1/tools/bpf/bpftool/
Dcfg.c402 unsigned int start_idx; in draw_bb_node() local
404 start_idx = bb->head - func->start; in draw_bb_node()
405 dump_xlated_for_graph(dd, bb->head, bb->tail, start_idx, in draw_bb_node()
/linux-6.12.1/drivers/perf/
Darm_dmc620_pmu.c296 int idx, start_idx, end_idx; in dmc620_get_event_idx() local
299 start_idx = 0; in dmc620_get_event_idx()
302 start_idx = DMC620_PMU_CLKDIV2_MAX_COUNTERS; in dmc620_get_event_idx()
306 for (idx = start_idx; idx < end_idx; ++idx) { in dmc620_get_event_idx()
/linux-6.12.1/drivers/dma/ti/
Dk3-udma.c165 u32 start_idx[3]; member
452 if (chan_id >= tpl_map->start_idx[i]) in udma_get_chan_tpl_index()
1350 start = ud->res##_tpl.start_idx[tpl]; \
1503 chan_id = ud->tchan_tpl.start_idx[ud->tchan_tpl.levels - 1]; in udma_get_chan_pair()
4574 ud->tchan_tpl.start_idx[0] = 8; in udma_setup_resources()
4578 ud->tchan_tpl.start_idx[0] = 2; in udma_setup_resources()
4581 ud->tchan_tpl.start_idx[1] = UDMA_CAP3_UCHAN_CNT(cap3); in udma_setup_resources()
4582 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4585 ud->tchan_tpl.start_idx[0] = UDMA_CAP3_HCHAN_CNT(cap3); in udma_setup_resources()
4591 ud->rchan_tpl.start_idx[0] = ud->tchan_tpl.start_idx[0]; in udma_setup_resources()
[all …]
/linux-6.12.1/sound/hda/ext/
Dhdac_ext_stream.c90 int snd_hdac_ext_stream_init_all(struct hdac_bus *bus, int start_idx, in snd_hdac_ext_stream_init_all() argument
96 int i, tag, idx = start_idx; in snd_hdac_ext_stream_init_all()

1234