/linux-6.12.1/drivers/i3c/master/mipi-i3c-hci/ |
D | cmd_v2.c | 93 struct hci_xfer *xfer, in hci_cmd_v2_prep_private_xfer() argument 97 u8 *data = xfer->data; in hci_cmd_v2_prep_private_xfer() 98 unsigned int data_len = xfer->data_len; in hci_cmd_v2_prep_private_xfer() 99 bool rnw = xfer->rnw; in hci_cmd_v2_prep_private_xfer() 101 xfer->cmd_tid = hci_get_tid(); in hci_cmd_v2_prep_private_xfer() 104 xfer->cmd_desc[0] = in hci_cmd_v2_prep_private_xfer() 106 CMD_U0_TID(xfer->cmd_tid) | in hci_cmd_v2_prep_private_xfer() 111 xfer->cmd_desc[1] = in hci_cmd_v2_prep_private_xfer() 113 xfer->cmd_desc[2] = 0; in hci_cmd_v2_prep_private_xfer() 114 xfer->cmd_desc[3] = 0; in hci_cmd_v2_prep_private_xfer() [all …]
|
D | cmd_v1.c | 146 static void fill_data_bytes(struct hci_xfer *xfer, u8 *data, in fill_data_bytes() argument 149 xfer->cmd_desc[1] = 0; in fill_data_bytes() 152 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_4(data[3]); in fill_data_bytes() 155 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_3(data[2]); in fill_data_bytes() 158 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_2(data[1]); in fill_data_bytes() 161 xfer->cmd_desc[1] |= CMD_I1_DATA_BYTE_1(data[0]); in fill_data_bytes() 167 xfer->data = NULL; in fill_data_bytes() 171 struct hci_xfer *xfer, in hci_cmd_v1_prep_ccc() argument 176 u8 *data = xfer->data; in hci_cmd_v1_prep_ccc() 177 unsigned int data_len = xfer->data_len; in hci_cmd_v1_prep_ccc() [all …]
|
D | pio.c | 227 static void hci_pio_write_cmd(struct i3c_hci *hci, struct hci_xfer *xfer) in hci_pio_write_cmd() argument 229 DBG("cmd_desc[%d] = 0x%08x", 0, xfer->cmd_desc[0]); in hci_pio_write_cmd() 230 DBG("cmd_desc[%d] = 0x%08x", 1, xfer->cmd_desc[1]); in hci_pio_write_cmd() 231 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[0]); in hci_pio_write_cmd() 232 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[1]); in hci_pio_write_cmd() 234 DBG("cmd_desc[%d] = 0x%08x", 2, xfer->cmd_desc[2]); in hci_pio_write_cmd() 235 DBG("cmd_desc[%d] = 0x%08x", 3, xfer->cmd_desc[3]); in hci_pio_write_cmd() 236 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[2]); in hci_pio_write_cmd() 237 pio_reg_write(COMMAND_QUEUE_PORT, xfer->cmd_desc[3]); in hci_pio_write_cmd() 243 struct hci_xfer *xfer = pio->curr_rx; in hci_pio_do_rx() local [all …]
|
D | core.c | 194 struct hci_xfer *xfer; in i3c_hci_send_ccc_cmd() local 204 xfer = hci_alloc_xfer(nxfers); in i3c_hci_send_ccc_cmd() 205 if (!xfer) in i3c_hci_send_ccc_cmd() 209 xfer->data = NULL; in i3c_hci_send_ccc_cmd() 210 xfer->data_len = 0; in i3c_hci_send_ccc_cmd() 211 xfer->rnw = false; in i3c_hci_send_ccc_cmd() 212 hci->cmd->prep_ccc(hci, xfer, I3C_BROADCAST_ADDR, in i3c_hci_send_ccc_cmd() 214 xfer++; in i3c_hci_send_ccc_cmd() 218 xfer[i].data = ccc->dests[i].payload.data; in i3c_hci_send_ccc_cmd() 219 xfer[i].data_len = ccc->dests[i].payload.len; in i3c_hci_send_ccc_cmd() [all …]
|
D | dma.c | 130 void *xfer, *resp, *ibi_status, *ibi_data; member 167 if (rh->xfer) in hci_dma_cleanup() 170 rh->xfer, rh->xfer_dma); in hci_dma_cleanup() 242 rh->xfer = dma_alloc_coherent(&hci->master.dev, xfers_sz, in hci_dma_init() 250 if (!rh->xfer || !rh->resp || !rh->src_xfers) in hci_dma_init() 345 struct hci_xfer *xfer; in hci_dma_unmap_xfer() local 349 xfer = xfer_list + i; in hci_dma_unmap_xfer() 350 if (!xfer->data) in hci_dma_unmap_xfer() 353 xfer->data_dma, xfer->data_len, in hci_dma_unmap_xfer() 354 xfer->rnw ? DMA_FROM_DEVICE : DMA_TO_DEVICE); in hci_dma_unmap_xfer() [all …]
|
/linux-6.12.1/drivers/firmware/arm_scmi/ |
D | msg.c | 33 static size_t msg_command_size(struct scmi_xfer *xfer) in msg_command_size() argument 35 return sizeof(struct scmi_msg_payld) + xfer->tx.len; in msg_command_size() 45 static size_t msg_response_size(struct scmi_xfer *xfer) in msg_response_size() argument 47 return sizeof(struct scmi_msg_payld) + sizeof(__le32) + xfer->rx.len; in msg_response_size() 56 static void msg_tx_prepare(struct scmi_msg_payld *msg, struct scmi_xfer *xfer) in msg_tx_prepare() argument 58 msg->msg_header = cpu_to_le32(pack_scmi_header(&xfer->hdr)); in msg_tx_prepare() 59 if (xfer->tx.buf) in msg_tx_prepare() 60 memcpy(msg->msg_payload, xfer->tx.buf, xfer->tx.len); in msg_tx_prepare() 83 size_t len, struct scmi_xfer *xfer) in msg_fetch_response() argument 87 xfer->hdr.status = le32_to_cpu(msg->msg_payload[0]); in msg_fetch_response() [all …]
|
D | driver.c | 511 struct scmi_xfer *xfer) in scmi_xfer_token_set() argument 523 next_token = (xfer->transfer_id & (MSG_TOKEN_MAX - 1)); in scmi_xfer_token_set() 549 xfer->hdr.seq = (u16)xfer_id; in scmi_xfer_token_set() 561 struct scmi_xfer *xfer) in scmi_xfer_token_clear() argument 563 clear_bit(xfer->hdr.seq, minfo->xfer_alloc_table); in scmi_xfer_token_clear() 579 scmi_xfer_inflight_register_unlocked(struct scmi_xfer *xfer, in scmi_xfer_inflight_register_unlocked() argument 583 set_bit(xfer->hdr.seq, minfo->xfer_alloc_table); in scmi_xfer_inflight_register_unlocked() 584 hash_add(minfo->pending_xfers, &xfer->node, xfer->hdr.seq); in scmi_xfer_inflight_register_unlocked() 585 xfer->pending = true; in scmi_xfer_inflight_register_unlocked() 602 static int scmi_xfer_inflight_register(struct scmi_xfer *xfer, in scmi_xfer_inflight_register() argument [all …]
|
D | raw_mode.c | 206 struct scmi_xfer *xfer; member 336 scmi_xfer_raw_waiter_get(struct scmi_raw_mode_info *raw, struct scmi_xfer *xfer, in scmi_xfer_raw_waiter_get() argument 349 xfer->async_done = &rw->async_response; in scmi_xfer_raw_waiter_get() 353 rw->xfer = xfer; in scmi_xfer_raw_waiter_get() 363 if (rw->xfer) { in scmi_xfer_raw_waiter_put() 364 rw->xfer->async_done = NULL; in scmi_xfer_raw_waiter_put() 365 rw->xfer = NULL; in scmi_xfer_raw_waiter_put() 379 trace_scmi_xfer_response_wait(rw->xfer->transfer_id, rw->xfer->hdr.id, in scmi_xfer_raw_waiter_enqueue() 380 rw->xfer->hdr.protocol_id, in scmi_xfer_raw_waiter_enqueue() 381 rw->xfer->hdr.seq, in scmi_xfer_raw_waiter_enqueue() [all …]
|
D | shmem.c | 38 struct scmi_xfer *xfer, in shmem_tx_prepare() argument 71 iowrite32(xfer->hdr.poll_completion ? 0 : SCMI_SHMEM_FLAG_INTR_ENABLED, in shmem_tx_prepare() 73 iowrite32(sizeof(shmem->msg_header) + xfer->tx.len, &shmem->length); in shmem_tx_prepare() 74 iowrite32(pack_scmi_header(&xfer->hdr), &shmem->msg_header); in shmem_tx_prepare() 75 if (xfer->tx.buf) in shmem_tx_prepare() 76 memcpy_toio(shmem->msg_payload, xfer->tx.buf, xfer->tx.len); in shmem_tx_prepare() 85 struct scmi_xfer *xfer) in shmem_fetch_response() argument 89 xfer->hdr.status = ioread32(shmem->msg_payload); in shmem_fetch_response() 91 xfer->rx.len = min_t(size_t, xfer->rx.len, len > 8 ? len - 8 : 0); in shmem_fetch_response() 94 memcpy_fromio(xfer->rx.buf, shmem->msg_payload + 4, xfer->rx.len); in shmem_fetch_response() [all …]
|
/linux-6.12.1/drivers/firmware/ |
D | ti_sci.c | 236 struct ti_sci_xfer *xfer; in ti_sci_rx_callback() local 250 xfer = &minfo->xfer_block[xfer_id]; in ti_sci_rx_callback() 259 if (mbox_msg->len < xfer->rx_len) { in ti_sci_rx_callback() 261 mbox_msg->len, xfer->rx_len); in ti_sci_rx_callback() 268 memcpy(xfer->xfer_buf, mbox_msg->buf, xfer->rx_len); in ti_sci_rx_callback() 269 complete(&xfer->done); in ti_sci_rx_callback() 295 struct ti_sci_xfer *xfer; in ti_sci_get_one_xfer() local 335 xfer = &minfo->xfer_block[xfer_id]; in ti_sci_get_one_xfer() 337 hdr = (struct ti_sci_msg_hdr *)xfer->tx_message.buf; in ti_sci_get_one_xfer() 338 xfer->tx_message.len = tx_message_size; in ti_sci_get_one_xfer() [all …]
|
/linux-6.12.1/drivers/iio/imu/ |
D | adis_buffer.c | 36 adis->xfer = kcalloc(2, sizeof(*adis->xfer), GFP_KERNEL); in adis_update_scan_mode_burst() 37 if (!adis->xfer) in adis_update_scan_mode_burst() 42 kfree(adis->xfer); in adis_update_scan_mode_burst() 43 adis->xfer = NULL; in adis_update_scan_mode_burst() 51 adis->xfer[0].tx_buf = tx; in adis_update_scan_mode_burst() 52 adis->xfer[0].bits_per_word = 8; in adis_update_scan_mode_burst() 53 adis->xfer[0].len = 2; in adis_update_scan_mode_burst() 55 adis->xfer[0].speed_hz = adis->data->burst_max_speed_hz; in adis_update_scan_mode_burst() 56 adis->xfer[1].rx_buf = adis->buffer; in adis_update_scan_mode_burst() 57 adis->xfer[1].bits_per_word = 8; in adis_update_scan_mode_burst() [all …]
|
/linux-6.12.1/drivers/spi/ |
D | spi-atmel.c | 489 static inline bool atmel_spi_is_vmalloc_xfer(struct spi_transfer *xfer) in atmel_spi_is_vmalloc_xfer() argument 491 return is_vmalloc_addr(xfer->tx_buf) || is_vmalloc_addr(xfer->rx_buf); in atmel_spi_is_vmalloc_xfer() 495 struct spi_transfer *xfer) in atmel_spi_use_dma() argument 497 return as->use_dma && xfer->len >= DMA_MIN_BYTES; in atmel_spi_use_dma() 502 struct spi_transfer *xfer) in atmel_spi_can_dma() argument 507 return atmel_spi_use_dma(as, xfer) && in atmel_spi_can_dma() 508 !atmel_spi_is_vmalloc_xfer(xfer); in atmel_spi_can_dma() 510 return atmel_spi_use_dma(as, xfer); in atmel_spi_can_dma() 653 struct spi_transfer *xfer) in atmel_spi_next_xfer_single() argument 656 unsigned long xfer_pos = xfer->len - as->current_remaining_bytes; in atmel_spi_next_xfer_single() [all …]
|
D | spi-axi-spi-engine.c | 150 struct spi_transfer *xfer) in spi_engine_gen_xfer() argument 154 if (xfer->bits_per_word <= 8) in spi_engine_gen_xfer() 155 len = xfer->len; in spi_engine_gen_xfer() 156 else if (xfer->bits_per_word <= 16) in spi_engine_gen_xfer() 157 len = xfer->len / 2; in spi_engine_gen_xfer() 159 len = xfer->len / 4; in spi_engine_gen_xfer() 165 if (xfer->tx_buf) in spi_engine_gen_xfer() 167 if (xfer->rx_buf) in spi_engine_gen_xfer() 223 struct spi_transfer *xfer; in spi_engine_precompile_message() local 225 list_for_each_entry(xfer, &msg->transfers, transfer_list) { in spi_engine_precompile_message() [all …]
|
D | spi-qcom-qspi.c | 171 struct qspi_xfer xfer; member 203 const struct qspi_xfer *xfer; in qcom_qspi_pio_xfer_cfg() local 205 xfer = &ctrl->xfer; in qcom_qspi_pio_xfer_cfg() 208 pio_xfer_cfg |= xfer->dir; in qcom_qspi_pio_xfer_cfg() 209 if (xfer->is_last) in qcom_qspi_pio_xfer_cfg() 214 iomode = qspi_buswidth_to_iomode(ctrl, xfer->buswidth); in qcom_qspi_pio_xfer_cfg() 226 pio_xfer_ctrl |= ctrl->xfer.rem_bytes; in qcom_qspi_pio_xfer_ctrl() 240 if (ctrl->xfer.dir == QSPI_WRITE) in qcom_qspi_pio_xfer() 262 ctrl->xfer.rem_bytes = 0; in qcom_qspi_handle_err() 323 virt_cmd_desc->direction = ctrl->xfer.dir; in qcom_qspi_alloc_desc() [all …]
|
D | spi.c | 314 struct spi_transfer *xfer, in spi_statistics_add_transfer_stats() argument 317 int l2len = min(fls(xfer->len), SPI_STATISTICS_HISTO_SIZE) - 1; in spi_statistics_add_transfer_stats() 330 u64_stats_add(&stats->bytes, xfer->len); in spi_statistics_add_transfer_stats() 331 if (spi_valid_txbuf(msg, xfer)) in spi_statistics_add_transfer_stats() 332 u64_stats_add(&stats->bytes_tx, xfer->len); in spi_statistics_add_transfer_stats() 333 if (spi_valid_rxbuf(msg, xfer)) in spi_statistics_add_transfer_stats() 334 u64_stats_add(&stats->bytes_rx, xfer->len); in spi_statistics_add_transfer_stats() 1228 struct spi_transfer *xfer; in __spi_map_msg() local 1249 list_for_each_entry(xfer, &msg->transfers, transfer_list) { in __spi_map_msg() 1253 if (!ctlr->can_dma(ctlr, msg->spi, xfer)) in __spi_map_msg() [all …]
|
D | spi-slave-mt27xx.c | 164 struct spi_transfer *xfer) in mtk_spi_slave_fifo_transfer() argument 172 if (xfer->rx_buf) in mtk_spi_slave_fifo_transfer() 174 if (xfer->tx_buf) in mtk_spi_slave_fifo_transfer() 178 cnt = xfer->len / 4; in mtk_spi_slave_fifo_transfer() 179 if (xfer->tx_buf) in mtk_spi_slave_fifo_transfer() 181 xfer->tx_buf, cnt); in mtk_spi_slave_fifo_transfer() 183 remainder = xfer->len % 4; in mtk_spi_slave_fifo_transfer() 184 if (xfer->tx_buf && remainder > 0) { in mtk_spi_slave_fifo_transfer() 186 memcpy(®_val, xfer->tx_buf + cnt * 4, remainder); in mtk_spi_slave_fifo_transfer() 201 struct spi_transfer *xfer) in mtk_spi_slave_dma_transfer() argument [all …]
|
D | spi-rockchip.c | 198 struct spi_transfer *xfer; /* Store xfer temporarily */ member 370 struct spi_transfer *xfer) in rockchip_spi_prepare_irq() argument 372 rs->tx = xfer->tx_buf; in rockchip_spi_prepare_irq() 373 rs->rx = xfer->rx_buf; in rockchip_spi_prepare_irq() 374 rs->tx_left = rs->tx ? xfer->len / rs->n_bytes : 0; in rockchip_spi_prepare_irq() 375 rs->rx_left = xfer->len / rs->n_bytes; in rockchip_spi_prepare_irq() 439 struct spi_controller *ctlr, struct spi_transfer *xfer) in rockchip_spi_prepare_dma() argument 445 rs->tx = xfer->tx_buf; in rockchip_spi_prepare_dma() 446 rs->rx = xfer->rx_buf; in rockchip_spi_prepare_dma() 449 if (xfer->rx_buf) { in rockchip_spi_prepare_dma() [all …]
|
/linux-6.12.1/drivers/dma/dw-edma/ |
D | dw-edma-core.c | 362 dw_edma_device_transfer(struct dw_edma_transfer *xfer) in dw_edma_device_transfer() argument 364 struct dw_edma_chan *chan = dchan2dw_edma_chan(xfer->dchan); in dw_edma_device_transfer() 365 enum dma_transfer_direction dir = xfer->direction; in dw_edma_device_transfer() 414 if (xfer->type == EDMA_XFER_CYCLIC) { in dw_edma_device_transfer() 415 if (!xfer->xfer.cyclic.len || !xfer->xfer.cyclic.cnt) in dw_edma_device_transfer() 417 } else if (xfer->type == EDMA_XFER_SCATTER_GATHER) { in dw_edma_device_transfer() 418 if (xfer->xfer.sg.len < 1) in dw_edma_device_transfer() 420 } else if (xfer->type == EDMA_XFER_INTERLEAVED) { in dw_edma_device_transfer() 421 if (!xfer->xfer.il->numf || xfer->xfer.il->frame_size < 1) in dw_edma_device_transfer() 423 if (!xfer->xfer.il->src_inc || !xfer->xfer.il->dst_inc) in dw_edma_device_transfer() [all …]
|
/linux-6.12.1/include/trace/events/ |
D | spi.h | 174 #define spi_valid_txbuf(msg, xfer) \ argument 175 (xfer->tx_buf && xfer->tx_buf != msg->spi->controller->dummy_tx) 176 #define spi_valid_rxbuf(msg, xfer) \ argument 177 (xfer->rx_buf && xfer->rx_buf != msg->spi->controller->dummy_rx) 181 TP_PROTO(struct spi_message *msg, struct spi_transfer *xfer), 183 TP_ARGS(msg, xfer), 188 __field( struct spi_transfer *, xfer ) 191 spi_valid_rxbuf(msg, xfer) ? 192 (xfer->len < 64 ? xfer->len : 64) : 0) 194 spi_valid_txbuf(msg, xfer) ? [all …]
|
/linux-6.12.1/drivers/net/ethernet/micrel/ |
D | ks8851_spi.c | 120 struct spi_transfer *xfer = &kss->spi_xfer1; in ks8851_wrreg16_spi() local 128 xfer->tx_buf = txb; in ks8851_wrreg16_spi() 129 xfer->rx_buf = NULL; in ks8851_wrreg16_spi() 130 xfer->len = 4; in ks8851_wrreg16_spi() 151 struct spi_transfer *xfer; in ks8851_rdreg() local 161 xfer = kss->spi_xfer2; in ks8851_rdreg() 163 xfer->tx_buf = txb; in ks8851_rdreg() 164 xfer->rx_buf = NULL; in ks8851_rdreg() 165 xfer->len = 2; in ks8851_rdreg() 167 xfer++; in ks8851_rdreg() [all …]
|
/linux-6.12.1/drivers/staging/greybus/ |
D | spilib.c | 151 static struct spi_transfer *get_next_xfer(struct spi_transfer *xfer, in get_next_xfer() argument 154 if (xfer == list_last_entry(&msg->transfers, struct spi_transfer, in get_next_xfer() 158 return list_next_entry(xfer, transfer_list); in get_next_xfer() 167 struct spi_transfer *xfer; in gb_spi_operation_create() local 178 xfer = spi->first_xfer; in gb_spi_operation_create() 184 spi->last_xfer = xfer; in gb_spi_operation_create() 186 if (!xfer->tx_buf && !xfer->rx_buf) { in gb_spi_operation_create() 188 "bufferless transfer, length %u\n", xfer->len); in gb_spi_operation_create() 196 if (xfer->tx_buf) { in gb_spi_operation_create() 197 len = xfer->len - spi->tx_xfer_offset; in gb_spi_operation_create() [all …]
|
/linux-6.12.1/drivers/gpu/drm/bridge/ |
D | nwl-dsi.c | 112 struct nwl_dsi_transfer *xfer; member 368 struct nwl_dsi_transfer *xfer = dsi->xfer; in nwl_dsi_read_packet() local 370 u8 *payload = xfer->msg->rx_buf; in nwl_dsi_read_packet() 376 xfer->status = 0; in nwl_dsi_read_packet() 378 if (xfer->rx_word_count == 0) { in nwl_dsi_read_packet() 385 xfer->status = err; in nwl_dsi_read_packet() 390 if (channel != xfer->msg->channel) { in nwl_dsi_read_packet() 393 xfer->cmd, channel, xfer->msg->channel); in nwl_dsi_read_packet() 394 xfer->status = -EINVAL; in nwl_dsi_read_packet() 401 if (xfer->msg->rx_len > 1) { in nwl_dsi_read_packet() [all …]
|
/linux-6.12.1/drivers/i2c/busses/ |
D | i2c-mlxcpld.c | 74 struct mlxcpld_i2c_curr_xfer xfer; member 218 priv->xfer.msg = msgs; in mlxcpld_i2c_set_transf_data() 219 priv->xfer.msg_num = num; in mlxcpld_i2c_set_transf_data() 227 priv->xfer.cmd = msgs[num - 1].flags & I2C_M_RD; in mlxcpld_i2c_set_transf_data() 229 if (priv->xfer.cmd == I2C_M_RD && comm_len != msgs[0].len) { in mlxcpld_i2c_set_transf_data() 230 priv->xfer.addr_width = msgs[0].len; in mlxcpld_i2c_set_transf_data() 231 priv->xfer.data_len = comm_len - priv->xfer.addr_width; in mlxcpld_i2c_set_transf_data() 233 priv->xfer.addr_width = 0; in mlxcpld_i2c_set_transf_data() 234 priv->xfer.data_len = comm_len; in mlxcpld_i2c_set_transf_data() 304 if (priv->xfer.cmd != I2C_M_RD) in mlxcpld_i2c_wait_for_tc() [all …]
|
/linux-6.12.1/drivers/input/touchscreen/ |
D | ad7877.c | 147 struct spi_transfer xfer[6]; member 177 struct spi_transfer xfer[AD7877_NR_SENSE + 2]; member 212 req->xfer[0].tx_buf = &req->command; in ad7877_read() 213 req->xfer[0].len = 2; in ad7877_read() 214 req->xfer[0].cs_change = 1; in ad7877_read() 216 req->xfer[1].rx_buf = &req->sample; in ad7877_read() 217 req->xfer[1].len = 2; in ad7877_read() 219 spi_message_add_tail(&req->xfer[0], &req->msg); in ad7877_read() 220 spi_message_add_tail(&req->xfer[1], &req->msg); in ad7877_read() 242 req->xfer[0].tx_buf = &req->command; in ad7877_write() [all …]
|
/linux-6.12.1/drivers/net/can/spi/mcp251xfd/ |
D | mcp251xfd-regmap.c | 31 struct spi_transfer xfer[] = { in mcp251xfd_regmap_nocrc_gather_write() local 47 return spi_sync_transfer(spi, xfer, ARRAY_SIZE(xfer)); in mcp251xfd_regmap_nocrc_gather_write() 106 struct spi_transfer xfer[2] = { }; in mcp251xfd_regmap_nocrc_update_bits() local 110 spi_message_add_tail(&xfer[0], &msg); in mcp251xfd_regmap_nocrc_update_bits() 113 xfer[0].tx_buf = buf_tx; in mcp251xfd_regmap_nocrc_update_bits() 114 xfer[0].len = sizeof(buf_tx->cmd); in mcp251xfd_regmap_nocrc_update_bits() 116 xfer[1].rx_buf = buf_rx->data; in mcp251xfd_regmap_nocrc_update_bits() 117 xfer[1].len = len; in mcp251xfd_regmap_nocrc_update_bits() 118 spi_message_add_tail(&xfer[1], &msg); in mcp251xfd_regmap_nocrc_update_bits() 120 xfer[0].tx_buf = buf_tx; in mcp251xfd_regmap_nocrc_update_bits() [all …]
|