Lines Matching refs:trans_pcie
42 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_regs() local
43 struct pci_dev *pdev = trans_pcie->pci_dev; in iwl_trans_pcie_dump_regs()
47 if (trans_pcie->pcie_dbg_dumped_once) in iwl_trans_pcie_dump_regs()
126 trans_pcie->pcie_dbg_dumped_once = 1; in iwl_trans_pcie_dump_regs()
257 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_apm_config() local
268 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_LNKCTL, &lctl); in iwl_pcie_apm_config()
271 pcie_capability_read_word(trans_pcie->pci_dev, PCI_EXP_DEVCTL2, &cap); in iwl_pcie_apm_config()
541 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_nic_init() local
545 spin_lock_bh(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
547 spin_unlock_bh(&trans_pcie->irq_lock); in iwl_pcie_nic_init()
690 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_load_firmware_chunk() local
693 trans_pcie->ucode_write_complete = false; in iwl_pcie_load_firmware_chunk()
702 ret = wait_event_timeout(trans_pcie->ucode_write_waitq, in iwl_pcie_load_firmware_chunk()
703 trans_pcie->ucode_write_complete, 5 * HZ); in iwl_pcie_load_firmware_chunk()
1067 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_check_hw_rf_kill() local
1077 if (trans_pcie->opmode_down) in iwl_pcie_check_hw_rf_kill()
1146 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_non_rx_causes() local
1147 int val = trans_pcie->def_irq | MSIX_NON_AUTO_CLEAR_CAUSE; in iwl_pcie_map_non_rx_causes()
1165 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_map_rx_causes() local
1167 trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 1 : 0; in iwl_pcie_map_rx_causes()
1185 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_NON_RX) in iwl_pcie_map_rx_causes()
1189 if (trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS) in iwl_pcie_map_rx_causes()
1193 void iwl_pcie_conf_msix_hw(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_conf_msix_hw() argument
1195 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_conf_msix_hw()
1197 if (!trans_pcie->msix_enabled) { in iwl_pcie_conf_msix_hw()
1224 static void iwl_pcie_init_msix(struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix() argument
1226 struct iwl_trans *trans = trans_pcie->trans; in iwl_pcie_init_msix()
1228 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_pcie_init_msix()
1230 if (!trans_pcie->msix_enabled) in iwl_pcie_init_msix()
1233 trans_pcie->fh_init_mask = ~iwl_read32(trans, CSR_MSIX_FH_INT_MASK_AD); in iwl_pcie_init_msix()
1234 trans_pcie->fh_mask = trans_pcie->fh_init_mask; in iwl_pcie_init_msix()
1235 trans_pcie->hw_init_mask = ~iwl_read32(trans, CSR_MSIX_HW_INT_MASK_AD); in iwl_pcie_init_msix()
1236 trans_pcie->hw_mask = trans_pcie->hw_init_mask; in iwl_pcie_init_msix()
1241 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_stop_device() local
1243 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_stop_device()
1245 if (trans_pcie->is_down) in _iwl_trans_pcie_stop_device()
1248 trans_pcie->is_down = true; in _iwl_trans_pcie_stop_device()
1301 iwl_pcie_conf_msix_hw(trans_pcie); in _iwl_trans_pcie_stop_device()
1326 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_synchronize_irqs() local
1328 if (trans_pcie->msix_enabled) { in iwl_pcie_synchronize_irqs()
1331 for (i = 0; i < trans_pcie->alloc_vecs; i++) in iwl_pcie_synchronize_irqs()
1332 synchronize_irq(trans_pcie->msix_entries[i].vector); in iwl_pcie_synchronize_irqs()
1334 synchronize_irq(trans_pcie->pci_dev->irq); in iwl_pcie_synchronize_irqs()
1341 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_fw() local
1365 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1375 if (trans_pcie->is_down) { in iwl_trans_pcie_start_fw()
1421 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_fw()
1462 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_stop_device() local
1469 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1470 trans_pcie->opmode_down = true; in iwl_trans_pcie_stop_device()
1474 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_stop_device()
1479 struct iwl_trans_pcie __maybe_unused *trans_pcie = in iwl_trans_pcie_rf_kill() local
1482 lockdep_assert_held(&trans_pcie->mutex); in iwl_trans_pcie_rf_kill()
1533 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_d3_handshake() local
1547 ret = wait_event_timeout(trans_pcie->sx_waitq, in iwl_pcie_d3_handshake()
1548 trans_pcie->sx_complete, 2 * HZ); in iwl_pcie_d3_handshake()
1551 trans_pcie->sx_complete = false; in iwl_pcie_d3_handshake()
1584 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_d3_resume() local
1613 iwl_pcie_conf_msix_hw(trans_pcie); in iwl_trans_pcie_d3_resume()
1614 if (!trans_pcie->msix_enabled) in iwl_trans_pcie_d3_resume()
1655 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_set_interrupt_capa() local
1668 trans_pcie->msix_entries[i].entry = i; in iwl_pcie_set_interrupt_capa()
1670 num_irqs = pci_enable_msix_range(pdev, trans_pcie->msix_entries, in iwl_pcie_set_interrupt_capa()
1679 trans_pcie->def_irq = (num_irqs == max_irqs) ? num_irqs - 1 : 0; in iwl_pcie_set_interrupt_capa()
1693 trans_pcie->trans->num_rx_queues = num_irqs + 1; in iwl_pcie_set_interrupt_capa()
1694 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX | in iwl_pcie_set_interrupt_capa()
1697 trans_pcie->trans->num_rx_queues = num_irqs; in iwl_pcie_set_interrupt_capa()
1698 trans_pcie->shared_vec_mask = IWL_SHARED_IRQ_NON_RX; in iwl_pcie_set_interrupt_capa()
1700 trans_pcie->trans->num_rx_queues = num_irqs - 1; in iwl_pcie_set_interrupt_capa()
1705 trans_pcie->trans->num_rx_queues, trans_pcie->shared_vec_mask); in iwl_pcie_set_interrupt_capa()
1707 WARN_ON(trans_pcie->trans->num_rx_queues > IWL_MAX_RX_HW_QUEUES); in iwl_pcie_set_interrupt_capa()
1709 trans_pcie->alloc_vecs = num_irqs; in iwl_pcie_set_interrupt_capa()
1710 trans_pcie->msix_enabled = true; in iwl_pcie_set_interrupt_capa()
1730 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_pcie_irq_set_affinity() local
1732 i = trans_pcie->shared_vec_mask & IWL_SHARED_IRQ_FIRST_RSS ? 0 : 1; in iwl_pcie_irq_set_affinity()
1733 iter_rx_q = trans_pcie->trans->num_rx_queues - 1 + i; in iwl_pcie_irq_set_affinity()
1741 cpumask_set_cpu(cpu, &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1742 ret = irq_set_affinity_hint(trans_pcie->msix_entries[i].vector, in iwl_pcie_irq_set_affinity()
1743 &trans_pcie->affinity_mask[i]); in iwl_pcie_irq_set_affinity()
1745 IWL_ERR(trans_pcie->trans, in iwl_pcie_irq_set_affinity()
1747 trans_pcie->msix_entries[i].vector); in iwl_pcie_irq_set_affinity()
1753 struct iwl_trans_pcie *trans_pcie) in iwl_pcie_init_msix_handler() argument
1757 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_pcie_init_msix_handler()
1760 const char *qname = queue_name(&pdev->dev, trans_pcie, i); in iwl_pcie_init_msix_handler()
1765 msix_entry = &trans_pcie->msix_entries[i]; in iwl_pcie_init_msix_handler()
1769 (i == trans_pcie->def_irq) ? in iwl_pcie_init_msix_handler()
1776 IWL_ERR(trans_pcie->trans, in iwl_pcie_init_msix_handler()
1782 iwl_pcie_irq_set_affinity(trans_pcie->trans); in iwl_pcie_init_msix_handler()
1841 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in _iwl_trans_pcie_start_hw() local
1844 lockdep_assert_held(&trans_pcie->mutex); in _iwl_trans_pcie_start_hw()
1871 iwl_pcie_init_msix(trans_pcie); in _iwl_trans_pcie_start_hw()
1876 trans_pcie->opmode_down = false; in _iwl_trans_pcie_start_hw()
1879 trans_pcie->is_down = false; in _iwl_trans_pcie_start_hw()
1889 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_start_hw() local
1892 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1894 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_start_hw()
1901 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_op_mode_leave() local
1903 mutex_lock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1914 mutex_unlock(&trans_pcie->mutex); in iwl_trans_pcie_op_mode_leave()
1963 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_configure() local
1968 trans_pcie->txqs.cmd.q_id = trans_cfg->cmd_queue; in iwl_trans_pcie_configure()
1969 trans_pcie->txqs.cmd.fifo = trans_cfg->cmd_fifo; in iwl_trans_pcie_configure()
1970 trans_pcie->txqs.page_offs = trans_cfg->cb_data_offs; in iwl_trans_pcie_configure()
1971 trans_pcie->txqs.dev_cmd_offs = trans_cfg->cb_data_offs + sizeof(void *); in iwl_trans_pcie_configure()
1972 trans_pcie->txqs.queue_alloc_cmd_ver = trans_cfg->queue_alloc_cmd_ver; in iwl_trans_pcie_configure()
1975 trans_pcie->n_no_reclaim_cmds = 0; in iwl_trans_pcie_configure()
1977 trans_pcie->n_no_reclaim_cmds = trans_cfg->n_no_reclaim_cmds; in iwl_trans_pcie_configure()
1978 if (trans_pcie->n_no_reclaim_cmds) in iwl_trans_pcie_configure()
1979 memcpy(trans_pcie->no_reclaim_cmds, trans_cfg->no_reclaim_cmds, in iwl_trans_pcie_configure()
1980 trans_pcie->n_no_reclaim_cmds * sizeof(u8)); in iwl_trans_pcie_configure()
1982 trans_pcie->rx_buf_size = trans_cfg->rx_buf_size; in iwl_trans_pcie_configure()
1983 trans_pcie->rx_page_order = in iwl_trans_pcie_configure()
1984 iwl_trans_get_rb_size_order(trans_pcie->rx_buf_size); in iwl_trans_pcie_configure()
1985 trans_pcie->rx_buf_bytes = in iwl_trans_pcie_configure()
1986 iwl_trans_get_rb_size(trans_pcie->rx_buf_size); in iwl_trans_pcie_configure()
1987 trans_pcie->supported_dma_mask = DMA_BIT_MASK(12); in iwl_trans_pcie_configure()
1989 trans_pcie->supported_dma_mask = DMA_BIT_MASK(11); in iwl_trans_pcie_configure()
1991 trans_pcie->txqs.bc_table_dword = trans_cfg->bc_table_dword; in iwl_trans_pcie_configure()
1992 trans_pcie->scd_set_active = trans_cfg->scd_set_active; in iwl_trans_pcie_configure()
1998 trans_pcie->fw_reset_handshake = trans_cfg->fw_reset_handshake; in iwl_trans_pcie_configure()
2050 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_free() local
2061 if (trans_pcie->rba.alloc_wq) { in iwl_trans_pcie_free()
2062 destroy_workqueue(trans_pcie->rba.alloc_wq); in iwl_trans_pcie_free()
2063 trans_pcie->rba.alloc_wq = NULL; in iwl_trans_pcie_free()
2066 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_free()
2067 for (i = 0; i < trans_pcie->alloc_vecs; i++) { in iwl_trans_pcie_free()
2069 trans_pcie->msix_entries[i].vector, in iwl_trans_pcie_free()
2073 trans_pcie->msix_enabled = false; in iwl_trans_pcie_free()
2078 free_netdev(trans_pcie->napi_dev); in iwl_trans_pcie_free()
2084 iwl_trans_pcie_free_pnvm_dram_regions(&trans_pcie->pnvm_data, in iwl_trans_pcie_free()
2086 iwl_trans_pcie_free_pnvm_dram_regions(&trans_pcie->reduced_tables_data, in iwl_trans_pcie_free()
2089 mutex_destroy(&trans_pcie->mutex); in iwl_trans_pcie_free()
2091 if (trans_pcie->txqs.tso_hdr_page) { in iwl_trans_pcie_free()
2094 per_cpu_ptr(trans_pcie->txqs.tso_hdr_page, i); in iwl_trans_pcie_free()
2100 free_percpu(trans_pcie->txqs.tso_hdr_page); in iwl_trans_pcie_free()
2195 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in __iwl_trans_pcie_grab_nic_access() local
2204 spin_lock(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2206 if (trans_pcie->cmd_hold_nic_awake) in __iwl_trans_pcie_grab_nic_access()
2256 spin_unlock(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2265 __release(&trans_pcie->reg_lock); in __iwl_trans_pcie_grab_nic_access()
2285 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_release_nic_access() local
2287 lockdep_assert_held(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2293 __acquire(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2295 if (trans_pcie->cmd_hold_nic_awake) in iwl_trans_pcie_release_nic_access()
2310 spin_unlock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_release_nic_access()
2393 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_rxq_dma_data() local
2395 if (queue >= trans->num_rx_queues || !trans_pcie->rxq) in iwl_trans_pcie_rxq_dma_data()
2398 data->fr_bd_cb = trans_pcie->rxq[queue].bd_dma; in iwl_trans_pcie_rxq_dma_data()
2399 data->urbd_stts_wrptr = trans_pcie->rxq[queue].rb_stts_dma; in iwl_trans_pcie_rxq_dma_data()
2400 data->ur_bd_cb = trans_pcie->rxq[queue].used_bd_dma; in iwl_trans_pcie_rxq_dma_data()
2408 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_wait_txq_empty() local
2418 if (!test_bit(txq_idx, trans_pcie->txqs.queue_used)) in iwl_trans_pcie_wait_txq_empty()
2422 txq = trans_pcie->txqs.txq[txq_idx]; in iwl_trans_pcie_wait_txq_empty()
2470 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_wait_txqs_empty() local
2479 if (cnt == trans_pcie->txqs.cmd.q_id) in iwl_trans_pcie_wait_txqs_empty()
2481 if (!test_bit(cnt, trans_pcie->txqs.queue_used)) in iwl_trans_pcie_wait_txqs_empty()
2497 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_set_bits_mask() local
2499 spin_lock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_set_bits_mask()
2501 spin_unlock_bh(&trans_pcie->reg_lock); in iwl_trans_pcie_set_bits_mask()
2654 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_tx_queue_seq_show() local
2655 struct iwl_txq *txq = trans_pcie->txqs.txq[state->pos]; in iwl_dbgfs_tx_queue_seq_show()
2659 !!test_bit(state->pos, trans_pcie->txqs.queue_used), in iwl_dbgfs_tx_queue_seq_show()
2660 !!test_bit(state->pos, trans_pcie->txqs.queue_stopped)); in iwl_dbgfs_tx_queue_seq_show()
2670 if (state->pos == trans_pcie->txqs.cmd.q_id) in iwl_dbgfs_tx_queue_seq_show()
2703 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rx_queue_read() local
2710 if (!trans_pcie->rxq) in iwl_dbgfs_rx_queue_read()
2718 struct iwl_rxq *rxq = &trans_pcie->rxq[i]; in iwl_dbgfs_rx_queue_read()
2752 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_read() local
2753 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_read()
2810 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_interrupt_write() local
2811 struct isr_statistics *isr_stats = &trans_pcie->isr_stats; in iwl_dbgfs_interrupt_write()
2858 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_read() local
2863 trans_pcie->debug_rfkill, in iwl_dbgfs_rfkill_read()
2875 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rfkill_write() local
2882 if (new_value == trans_pcie->debug_rfkill) in iwl_dbgfs_rfkill_write()
2885 trans_pcie->debug_rfkill, new_value); in iwl_dbgfs_rfkill_write()
2886 trans_pcie->debug_rfkill = new_value; in iwl_dbgfs_rfkill_write()
2896 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_monitor_data_open() local
2904 if (trans_pcie->fw_mon_data.state != IWL_FW_MON_DBGFS_STATE_CLOSED) in iwl_dbgfs_monitor_data_open()
2907 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_OPEN; in iwl_dbgfs_monitor_data_open()
2914 struct iwl_trans_pcie *trans_pcie = in iwl_dbgfs_monitor_data_release() local
2917 if (trans_pcie->fw_mon_data.state == IWL_FW_MON_DBGFS_STATE_OPEN) in iwl_dbgfs_monitor_data_release()
2918 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_CLOSED; in iwl_dbgfs_monitor_data_release()
2945 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_monitor_data_read() local
2947 struct cont_rec *data = &trans_pcie->fw_mon_data; in iwl_dbgfs_monitor_data_read()
3028 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_dbgfs_rf_read() local
3030 if (!trans_pcie->rf_name[0]) in iwl_dbgfs_rf_read()
3034 trans_pcie->rf_name, in iwl_dbgfs_rf_read()
3035 strlen(trans_pcie->rf_name)); in iwl_dbgfs_rf_read()
3076 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_debugfs_cleanup() local
3077 struct cont_rec *data = &trans_pcie->fw_mon_data; in iwl_trans_pcie_debugfs_cleanup()
3087 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_get_cmdlen() local
3091 for (i = 0; i < trans_pcie->txqs.tfd.max_tbs; i++) in iwl_trans_pcie_get_cmdlen()
3101 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_rbs() local
3102 int max_len = trans_pcie->rx_buf_bytes; in iwl_trans_pcie_dump_rbs()
3104 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_rbs()
3357 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_dump_data() local
3359 struct iwl_txq *cmdq = trans_pcie->txqs.txq[trans_pcie->txqs.cmd.q_id]; in iwl_trans_pcie_dump_data()
3402 struct iwl_rxq *rxq = &trans_pcie->rxq[0]; in iwl_trans_pcie_dump_data()
3408 (PAGE_SIZE << trans_pcie->rx_page_order)); in iwl_trans_pcie_dump_data()
3426 u16 tfd_size = trans_pcie->txqs.tfd.size; in iwl_trans_pcie_dump_data()
3513 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_sync_nmi() local
3515 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_sync_nmi()
3533 struct iwl_trans_pcie *trans_pcie, **priv; in iwl_trans_pcie_alloc() local
3556 trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_alloc()
3559 trans_pcie->txqs.tfd.addr_size = 64; in iwl_trans_pcie_alloc()
3560 trans_pcie->txqs.tfd.max_tbs = IWL_TFH_NUM_TBS; in iwl_trans_pcie_alloc()
3561 trans_pcie->txqs.tfd.size = sizeof(struct iwl_tfh_tfd); in iwl_trans_pcie_alloc()
3563 trans_pcie->txqs.tfd.addr_size = 36; in iwl_trans_pcie_alloc()
3564 trans_pcie->txqs.tfd.max_tbs = IWL_NUM_OF_TBS; in iwl_trans_pcie_alloc()
3565 trans_pcie->txqs.tfd.size = sizeof(struct iwl_tfd); in iwl_trans_pcie_alloc()
3567 trans->max_skb_frags = IWL_TRANS_PCIE_MAX_FRAGS(trans_pcie); in iwl_trans_pcie_alloc()
3570 trans_pcie->txqs.cmd.wdg_timeout = IWL_DEF_WD_TIMEOUT; in iwl_trans_pcie_alloc()
3572 trans_pcie->txqs.tso_hdr_page = alloc_percpu(struct iwl_tso_hdr_page); in iwl_trans_pcie_alloc()
3573 if (!trans_pcie->txqs.tso_hdr_page) { in iwl_trans_pcie_alloc()
3579 trans_pcie->txqs.bc_tbl_size = in iwl_trans_pcie_alloc()
3582 trans_pcie->txqs.bc_tbl_size = in iwl_trans_pcie_alloc()
3585 trans_pcie->txqs.bc_tbl_size = sizeof(struct iwlagn_scd_bc_tbl); in iwl_trans_pcie_alloc()
3592 trans_pcie->txqs.bc_pool = in iwl_trans_pcie_alloc()
3594 trans_pcie->txqs.bc_tbl_size, in iwl_trans_pcie_alloc()
3596 if (!trans_pcie->txqs.bc_pool) { in iwl_trans_pcie_alloc()
3603 WARN_ON(trans_pcie->txqs.tfd.addr_size != in iwl_trans_pcie_alloc()
3609 trans_pcie->napi_dev = alloc_netdev_dummy(sizeof(struct iwl_trans_pcie *)); in iwl_trans_pcie_alloc()
3610 if (!trans_pcie->napi_dev) { in iwl_trans_pcie_alloc()
3615 priv = netdev_priv(trans_pcie->napi_dev); in iwl_trans_pcie_alloc()
3616 *priv = trans_pcie; in iwl_trans_pcie_alloc()
3618 trans_pcie->trans = trans; in iwl_trans_pcie_alloc()
3619 trans_pcie->opmode_down = true; in iwl_trans_pcie_alloc()
3620 spin_lock_init(&trans_pcie->irq_lock); in iwl_trans_pcie_alloc()
3621 spin_lock_init(&trans_pcie->reg_lock); in iwl_trans_pcie_alloc()
3622 spin_lock_init(&trans_pcie->alloc_page_lock); in iwl_trans_pcie_alloc()
3623 mutex_init(&trans_pcie->mutex); in iwl_trans_pcie_alloc()
3624 init_waitqueue_head(&trans_pcie->ucode_write_waitq); in iwl_trans_pcie_alloc()
3625 init_waitqueue_head(&trans_pcie->fw_reset_waitq); in iwl_trans_pcie_alloc()
3626 init_waitqueue_head(&trans_pcie->imr_waitq); in iwl_trans_pcie_alloc()
3628 trans_pcie->rba.alloc_wq = alloc_workqueue("rb_allocator", in iwl_trans_pcie_alloc()
3630 if (!trans_pcie->rba.alloc_wq) { in iwl_trans_pcie_alloc()
3634 INIT_WORK(&trans_pcie->rba.rx_alloc, iwl_pcie_rx_allocator_work); in iwl_trans_pcie_alloc()
3636 trans_pcie->debug_rfkill = -1; in iwl_trans_pcie_alloc()
3651 addr_size = trans_pcie->txqs.tfd.addr_size; in iwl_trans_pcie_alloc()
3675 trans_pcie->hw_base = table[0]; in iwl_trans_pcie_alloc()
3676 if (!trans_pcie->hw_base) { in iwl_trans_pcie_alloc()
3686 trans_pcie->pci_dev = pdev; in iwl_trans_pcie_alloc()
3714 init_waitqueue_head(&trans_pcie->sx_waitq); in iwl_trans_pcie_alloc()
3720 if (trans_pcie->msix_enabled) { in iwl_trans_pcie_alloc()
3721 ret = iwl_pcie_init_msix_handler(pdev, trans_pcie); in iwl_trans_pcie_alloc()
3740 trans_pcie->fw_mon_data.state = IWL_FW_MON_DBGFS_STATE_CLOSED; in iwl_trans_pcie_alloc()
3741 mutex_init(&trans_pcie->fw_mon_data.mutex); in iwl_trans_pcie_alloc()
3751 destroy_workqueue(trans_pcie->rba.alloc_wq); in iwl_trans_pcie_alloc()
3753 free_netdev(trans_pcie->napi_dev); in iwl_trans_pcie_alloc()
3755 free_percpu(trans_pcie->txqs.tso_hdr_page); in iwl_trans_pcie_alloc()
3782 struct iwl_trans_pcie *trans_pcie = IWL_TRANS_GET_PCIE_TRANS(trans); in iwl_trans_pcie_copy_imr() local
3785 trans_pcie->imr_status = IMR_D2S_REQUESTED; in iwl_trans_pcie_copy_imr()
3787 ret = wait_event_timeout(trans_pcie->imr_waitq, in iwl_trans_pcie_copy_imr()
3788 trans_pcie->imr_status != in iwl_trans_pcie_copy_imr()
3790 if (!ret || trans_pcie->imr_status == IMR_D2S_ERROR) { in iwl_trans_pcie_copy_imr()
3795 trans_pcie->imr_status = IMR_D2S_IDLE; in iwl_trans_pcie_copy_imr()