Lines Matching refs:txqs
27 page_ptr = (void *)((u8 *)skb->cb + trans_pcie->txqs.page_offs); in get_workaround_page()
587 WARN_ON(trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
595 WARN_ON(!trans_pcie->txqs.bc_table_dword); in iwl_pcie_gen2_update_byte_tbl()
630 if (le16_to_cpu(tfd->num_tbs) >= trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_set_tb()
632 trans_pcie->txqs.tfd.max_tbs); in iwl_txq_gen2_set_tb()
654 if (num_tbs > trans_pcie->txqs.tfd.max_tbs) { in iwl_txq_gen2_tfd_unmap()
729 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_tx()
738 if (WARN_ONCE(!test_bit(txq_id, trans_pcie->txqs.queue_used), in iwl_txq_gen2_tx()
757 trans_pcie->txqs.dev_cmd_offs); in iwl_txq_gen2_tx()
825 struct iwl_txq *txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_unmap()
833 if (txq_id != trans_pcie->txqs.cmd.q_id) { in iwl_txq_gen2_unmap()
867 trans_pcie->txqs.tfd.size * txq->n_window, in iwl_txq_gen2_free_memory()
876 dma_pool_free(trans_pcie->txqs.bc_pool, in iwl_txq_gen2_free_memory()
899 txq = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_free()
907 if (txq_id == trans_pcie->txqs.cmd.q_id) in iwl_txq_gen2_free()
916 trans_pcie->txqs.txq[txq_id] = NULL; in iwl_txq_gen2_free()
918 clear_bit(txq_id, trans_pcie->txqs.queue_used); in iwl_txq_gen2_free()
929 WARN_ON(!trans_pcie->txqs.bc_tbl_size); in iwl_txq_dyn_alloc_dma()
931 bc_tbl_size = trans_pcie->txqs.bc_tbl_size; in iwl_txq_dyn_alloc_dma()
941 txq->bc_tbl.addr = dma_pool_alloc(trans_pcie->txqs.bc_pool, GFP_KERNEL, in iwl_txq_dyn_alloc_dma()
988 if (qid >= ARRAY_SIZE(trans_pcie->txqs.txq)) { in iwl_pcie_txq_alloc_response()
994 if (test_and_set_bit(qid, trans_pcie->txqs.queue_used)) { in iwl_pcie_txq_alloc_response()
1000 if (WARN_ONCE(trans_pcie->txqs.txq[qid], in iwl_pcie_txq_alloc_response()
1007 trans_pcie->txqs.txq[qid] = txq; in iwl_pcie_txq_alloc_response()
1040 size = min_t(u32, size, trans_pcie->txqs.bc_tbl_size / sizeof(u16)); in iwl_txq_dyn_alloc()
1065 if (trans_pcie->txqs.queue_alloc_cmd_ver == 0) { in iwl_txq_dyn_alloc()
1082 } else if (trans_pcie->txqs.queue_alloc_cmd_ver == 3) { in iwl_txq_dyn_alloc()
1125 if (!test_and_clear_bit(queue, trans_pcie->txqs.queue_used)) { in iwl_txq_dyn_free()
1141 memset(trans_pcie->txqs.queue_used, 0, in iwl_txq_gen2_tx_free()
1142 sizeof(trans_pcie->txqs.queue_used)); in iwl_txq_gen2_tx_free()
1145 for (i = 0; i < ARRAY_SIZE(trans_pcie->txqs.txq); i++) { in iwl_txq_gen2_tx_free()
1146 if (!trans_pcie->txqs.txq[i]) in iwl_txq_gen2_tx_free()
1160 if (!trans_pcie->txqs.txq[txq_id]) { in iwl_txq_gen2_init()
1166 trans_pcie->txqs.txq[txq_id] = queue; in iwl_txq_gen2_init()
1173 queue = trans_pcie->txqs.txq[txq_id]; in iwl_txq_gen2_init()
1177 (txq_id == trans_pcie->txqs.cmd.q_id)); in iwl_txq_gen2_init()
1182 trans_pcie->txqs.txq[txq_id]->id = txq_id; in iwl_txq_gen2_init()
1183 set_bit(txq_id, trans_pcie->txqs.queue_used); in iwl_txq_gen2_init()
1207 struct iwl_txq *txq = trans_pcie->txqs.txq[trans_pcie->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
1322 cpu_to_le16(QUEUE_TO_SEQ(trans_pcie->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
1370 cmd_size, txq->write_ptr, idx, trans_pcie->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()