Lines Matching refs:vport

171 static void idpf_tx_desc_rel_all(struct idpf_vport *vport)  in idpf_tx_desc_rel_all()  argument
175 if (!vport->txq_grps) in idpf_tx_desc_rel_all()
178 for (i = 0; i < vport->num_txq_grp; i++) { in idpf_tx_desc_rel_all()
179 struct idpf_txq_group *txq_grp = &vport->txq_grps[i]; in idpf_tx_desc_rel_all()
184 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_tx_desc_rel_all()
242 static int idpf_tx_desc_alloc(const struct idpf_vport *vport, in idpf_tx_desc_alloc() argument
284 static int idpf_compl_desc_alloc(const struct idpf_vport *vport, in idpf_compl_desc_alloc() argument
308 static int idpf_tx_desc_alloc_all(struct idpf_vport *vport) in idpf_tx_desc_alloc_all() argument
316 for (i = 0; i < vport->num_txq_grp; i++) { in idpf_tx_desc_alloc_all()
317 for (j = 0; j < vport->txq_grps[i].num_txq; j++) { in idpf_tx_desc_alloc_all()
318 struct idpf_tx_queue *txq = vport->txq_grps[i].txqs[j]; in idpf_tx_desc_alloc_all()
322 err = idpf_tx_desc_alloc(vport, txq); in idpf_tx_desc_alloc_all()
324 pci_err(vport->adapter->pdev, in idpf_tx_desc_alloc_all()
330 if (!idpf_is_queue_model_split(vport->txq_model)) in idpf_tx_desc_alloc_all()
359 if (!idpf_is_queue_model_split(vport->txq_model)) in idpf_tx_desc_alloc_all()
363 err = idpf_compl_desc_alloc(vport, vport->txq_grps[i].complq); in idpf_tx_desc_alloc_all()
365 pci_err(vport->adapter->pdev, in idpf_tx_desc_alloc_all()
374 idpf_tx_desc_rel_all(vport); in idpf_tx_desc_alloc_all()
524 static void idpf_rx_desc_rel_all(struct idpf_vport *vport) in idpf_rx_desc_rel_all() argument
526 struct device *dev = &vport->adapter->pdev->dev; in idpf_rx_desc_rel_all()
531 if (!vport->rxq_grps) in idpf_rx_desc_rel_all()
534 for (i = 0; i < vport->num_rxq_grp; i++) { in idpf_rx_desc_rel_all()
535 rx_qgrp = &vport->rxq_grps[i]; in idpf_rx_desc_rel_all()
537 if (!idpf_is_queue_model_split(vport->rxq_model)) { in idpf_rx_desc_rel_all()
552 for (j = 0; j < vport->num_bufqs_per_qgrp; j++) { in idpf_rx_desc_rel_all()
809 int idpf_rx_bufs_init_all(struct idpf_vport *vport) in idpf_rx_bufs_init_all() argument
811 bool split = idpf_is_queue_model_split(vport->rxq_model); in idpf_rx_bufs_init_all()
814 for (i = 0; i < vport->num_rxq_grp; i++) { in idpf_rx_bufs_init_all()
815 struct idpf_rxq_group *rx_qgrp = &vport->rxq_grps[i]; in idpf_rx_bufs_init_all()
835 for (j = 0; j < vport->num_bufqs_per_qgrp; j++) { in idpf_rx_bufs_init_all()
862 static int idpf_rx_desc_alloc(const struct idpf_vport *vport, in idpf_rx_desc_alloc() argument
865 struct device *dev = &vport->adapter->pdev->dev; in idpf_rx_desc_alloc()
894 static int idpf_bufq_desc_alloc(const struct idpf_vport *vport, in idpf_bufq_desc_alloc() argument
897 struct device *dev = &vport->adapter->pdev->dev; in idpf_bufq_desc_alloc()
921 static int idpf_rx_desc_alloc_all(struct idpf_vport *vport) in idpf_rx_desc_alloc_all() argument
927 for (i = 0; i < vport->num_rxq_grp; i++) { in idpf_rx_desc_alloc_all()
928 rx_qgrp = &vport->rxq_grps[i]; in idpf_rx_desc_alloc_all()
929 if (idpf_is_queue_model_split(vport->rxq_model)) in idpf_rx_desc_alloc_all()
937 if (idpf_is_queue_model_split(vport->rxq_model)) in idpf_rx_desc_alloc_all()
942 err = idpf_rx_desc_alloc(vport, q); in idpf_rx_desc_alloc_all()
944 pci_err(vport->adapter->pdev, in idpf_rx_desc_alloc_all()
951 if (!idpf_is_queue_model_split(vport->rxq_model)) in idpf_rx_desc_alloc_all()
954 for (j = 0; j < vport->num_bufqs_per_qgrp; j++) { in idpf_rx_desc_alloc_all()
959 err = idpf_bufq_desc_alloc(vport, q); in idpf_rx_desc_alloc_all()
961 pci_err(vport->adapter->pdev, in idpf_rx_desc_alloc_all()
972 idpf_rx_desc_rel_all(vport); in idpf_rx_desc_alloc_all()
981 static void idpf_txq_group_rel(struct idpf_vport *vport) in idpf_txq_group_rel() argument
986 if (!vport->txq_grps) in idpf_txq_group_rel()
989 split = idpf_is_queue_model_split(vport->txq_model); in idpf_txq_group_rel()
990 flow_sch_en = !idpf_is_cap_ena(vport->adapter, IDPF_OTHER_CAPS, in idpf_txq_group_rel()
993 for (i = 0; i < vport->num_txq_grp; i++) { in idpf_txq_group_rel()
994 struct idpf_txq_group *txq_grp = &vport->txq_grps[i]; in idpf_txq_group_rel()
1010 kfree(vport->txq_grps); in idpf_txq_group_rel()
1011 vport->txq_grps = NULL; in idpf_txq_group_rel()
1022 for (i = 0; i < rx_qgrp->vport->num_bufqs_per_qgrp; i++) { in idpf_rxq_sw_queue_rel()
1038 static void idpf_rxq_group_rel(struct idpf_vport *vport) in idpf_rxq_group_rel() argument
1042 if (!vport->rxq_grps) in idpf_rxq_group_rel()
1045 for (i = 0; i < vport->num_rxq_grp; i++) { in idpf_rxq_group_rel()
1046 struct idpf_rxq_group *rx_qgrp = &vport->rxq_grps[i]; in idpf_rxq_group_rel()
1050 if (idpf_is_queue_model_split(vport->rxq_model)) { in idpf_rxq_group_rel()
1068 kfree(vport->rxq_grps); in idpf_rxq_group_rel()
1069 vport->rxq_grps = NULL; in idpf_rxq_group_rel()
1076 static void idpf_vport_queue_grp_rel_all(struct idpf_vport *vport) in idpf_vport_queue_grp_rel_all() argument
1078 idpf_txq_group_rel(vport); in idpf_vport_queue_grp_rel_all()
1079 idpf_rxq_group_rel(vport); in idpf_vport_queue_grp_rel_all()
1088 void idpf_vport_queues_rel(struct idpf_vport *vport) in idpf_vport_queues_rel() argument
1090 idpf_tx_desc_rel_all(vport); in idpf_vport_queues_rel()
1091 idpf_rx_desc_rel_all(vport); in idpf_vport_queues_rel()
1092 idpf_vport_queue_grp_rel_all(vport); in idpf_vport_queues_rel()
1094 kfree(vport->txqs); in idpf_vport_queues_rel()
1095 vport->txqs = NULL; in idpf_vport_queues_rel()
1108 static int idpf_vport_init_fast_path_txqs(struct idpf_vport *vport) in idpf_vport_init_fast_path_txqs() argument
1112 vport->txqs = kcalloc(vport->num_txq, sizeof(*vport->txqs), in idpf_vport_init_fast_path_txqs()
1115 if (!vport->txqs) in idpf_vport_init_fast_path_txqs()
1118 for (i = 0; i < vport->num_txq_grp; i++) { in idpf_vport_init_fast_path_txqs()
1119 struct idpf_txq_group *tx_grp = &vport->txq_grps[i]; in idpf_vport_init_fast_path_txqs()
1122 vport->txqs[k] = tx_grp->txqs[j]; in idpf_vport_init_fast_path_txqs()
1123 vport->txqs[k]->idx = k; in idpf_vport_init_fast_path_txqs()
1135 void idpf_vport_init_num_qs(struct idpf_vport *vport, in idpf_vport_init_num_qs() argument
1139 u16 idx = vport->idx; in idpf_vport_init_num_qs()
1141 config_data = &vport->adapter->vport_config[idx]->user_config; in idpf_vport_init_num_qs()
1142 vport->num_txq = le16_to_cpu(vport_msg->num_tx_q); in idpf_vport_init_num_qs()
1143 vport->num_rxq = le16_to_cpu(vport_msg->num_rx_q); in idpf_vport_init_num_qs()
1152 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_vport_init_num_qs()
1153 vport->num_complq = le16_to_cpu(vport_msg->num_tx_complq); in idpf_vport_init_num_qs()
1154 if (idpf_is_queue_model_split(vport->rxq_model)) in idpf_vport_init_num_qs()
1155 vport->num_bufq = le16_to_cpu(vport_msg->num_rx_bufq); in idpf_vport_init_num_qs()
1158 if (!idpf_is_queue_model_split(vport->rxq_model)) { in idpf_vport_init_num_qs()
1159 vport->num_bufqs_per_qgrp = 0; in idpf_vport_init_num_qs()
1164 vport->num_bufqs_per_qgrp = IDPF_MAX_BUFQS_PER_RXQ_GRP; in idpf_vport_init_num_qs()
1171 void idpf_vport_calc_num_q_desc(struct idpf_vport *vport) in idpf_vport_calc_num_q_desc() argument
1174 int num_bufqs = vport->num_bufqs_per_qgrp; in idpf_vport_calc_num_q_desc()
1176 u16 idx = vport->idx; in idpf_vport_calc_num_q_desc()
1179 config_data = &vport->adapter->vport_config[idx]->user_config; in idpf_vport_calc_num_q_desc()
1183 vport->complq_desc_count = 0; in idpf_vport_calc_num_q_desc()
1185 vport->txq_desc_count = num_req_txq_desc; in idpf_vport_calc_num_q_desc()
1186 if (idpf_is_queue_model_split(vport->txq_model)) { in idpf_vport_calc_num_q_desc()
1187 vport->complq_desc_count = num_req_txq_desc; in idpf_vport_calc_num_q_desc()
1188 if (vport->complq_desc_count < IDPF_MIN_TXQ_COMPLQ_DESC) in idpf_vport_calc_num_q_desc()
1189 vport->complq_desc_count = in idpf_vport_calc_num_q_desc()
1193 vport->txq_desc_count = IDPF_DFLT_TX_Q_DESC_COUNT; in idpf_vport_calc_num_q_desc()
1194 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_vport_calc_num_q_desc()
1195 vport->complq_desc_count = in idpf_vport_calc_num_q_desc()
1200 vport->rxq_desc_count = num_req_rxq_desc; in idpf_vport_calc_num_q_desc()
1202 vport->rxq_desc_count = IDPF_DFLT_RX_Q_DESC_COUNT; in idpf_vport_calc_num_q_desc()
1205 if (!vport->bufq_desc_count[i]) in idpf_vport_calc_num_q_desc()
1206 vport->bufq_desc_count[i] = in idpf_vport_calc_num_q_desc()
1207 IDPF_RX_BUFQ_DESC_COUNT(vport->rxq_desc_count, in idpf_vport_calc_num_q_desc()
1285 void idpf_vport_calc_num_q_groups(struct idpf_vport *vport) in idpf_vport_calc_num_q_groups() argument
1287 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_vport_calc_num_q_groups()
1288 vport->num_txq_grp = vport->num_txq; in idpf_vport_calc_num_q_groups()
1290 vport->num_txq_grp = IDPF_DFLT_SINGLEQ_TX_Q_GROUPS; in idpf_vport_calc_num_q_groups()
1292 if (idpf_is_queue_model_split(vport->rxq_model)) in idpf_vport_calc_num_q_groups()
1293 vport->num_rxq_grp = vport->num_rxq; in idpf_vport_calc_num_q_groups()
1295 vport->num_rxq_grp = IDPF_DFLT_SINGLEQ_RX_Q_GROUPS; in idpf_vport_calc_num_q_groups()
1304 static void idpf_vport_calc_numq_per_grp(struct idpf_vport *vport, in idpf_vport_calc_numq_per_grp() argument
1307 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_vport_calc_numq_per_grp()
1310 *num_txq = vport->num_txq; in idpf_vport_calc_numq_per_grp()
1312 if (idpf_is_queue_model_split(vport->rxq_model)) in idpf_vport_calc_numq_per_grp()
1315 *num_rxq = vport->num_rxq; in idpf_vport_calc_numq_per_grp()
1324 static void idpf_rxq_set_descids(const struct idpf_vport *vport, in idpf_rxq_set_descids() argument
1327 if (idpf_is_queue_model_split(vport->rxq_model)) { in idpf_rxq_set_descids()
1330 if (vport->base_rxd) in idpf_rxq_set_descids()
1344 static int idpf_txq_group_alloc(struct idpf_vport *vport, u16 num_txq) in idpf_txq_group_alloc() argument
1349 vport->txq_grps = kcalloc(vport->num_txq_grp, in idpf_txq_group_alloc()
1350 sizeof(*vport->txq_grps), GFP_KERNEL); in idpf_txq_group_alloc()
1351 if (!vport->txq_grps) in idpf_txq_group_alloc()
1354 split = idpf_is_queue_model_split(vport->txq_model); in idpf_txq_group_alloc()
1355 flow_sch_en = !idpf_is_cap_ena(vport->adapter, IDPF_OTHER_CAPS, in idpf_txq_group_alloc()
1358 for (i = 0; i < vport->num_txq_grp; i++) { in idpf_txq_group_alloc()
1359 struct idpf_txq_group *tx_qgrp = &vport->txq_grps[i]; in idpf_txq_group_alloc()
1360 struct idpf_adapter *adapter = vport->adapter; in idpf_txq_group_alloc()
1364 tx_qgrp->vport = vport; in idpf_txq_group_alloc()
1387 q->desc_count = vport->txq_desc_count; in idpf_txq_group_alloc()
1390 q->netdev = vport->netdev; in idpf_txq_group_alloc()
1394 q->clean_budget = vport->compln_clean_budget; in idpf_txq_group_alloc()
1396 vport->crc_enable); in idpf_txq_group_alloc()
1419 tx_qgrp->complq->desc_count = vport->complq_desc_count; in idpf_txq_group_alloc()
1421 tx_qgrp->complq->netdev = vport->netdev; in idpf_txq_group_alloc()
1422 tx_qgrp->complq->clean_budget = vport->compln_clean_budget; in idpf_txq_group_alloc()
1431 idpf_txq_group_rel(vport); in idpf_txq_group_alloc()
1443 static int idpf_rxq_group_alloc(struct idpf_vport *vport, u16 num_rxq) in idpf_rxq_group_alloc() argument
1448 vport->rxq_grps = kcalloc(vport->num_rxq_grp, in idpf_rxq_group_alloc()
1450 if (!vport->rxq_grps) in idpf_rxq_group_alloc()
1453 hs = idpf_vport_get_hsplit(vport) == ETHTOOL_TCP_DATA_SPLIT_ENABLED; in idpf_rxq_group_alloc()
1455 for (i = 0; i < vport->num_rxq_grp; i++) { in idpf_rxq_group_alloc()
1456 struct idpf_rxq_group *rx_qgrp = &vport->rxq_grps[i]; in idpf_rxq_group_alloc()
1459 rx_qgrp->vport = vport; in idpf_rxq_group_alloc()
1460 if (!idpf_is_queue_model_split(vport->rxq_model)) { in idpf_rxq_group_alloc()
1485 rx_qgrp->splitq.bufq_sets = kcalloc(vport->num_bufqs_per_qgrp, in idpf_rxq_group_alloc()
1493 for (j = 0; j < vport->num_bufqs_per_qgrp; j++) { in idpf_rxq_group_alloc()
1500 q->desc_count = vport->bufq_desc_count[j]; in idpf_rxq_group_alloc()
1517 vport->bufq_desc_count[j]; in idpf_rxq_group_alloc()
1534 if (!idpf_is_queue_model_split(vport->rxq_model)) { in idpf_rxq_group_alloc()
1541 if (vport->num_bufqs_per_qgrp > IDPF_SINGLE_BUFQ_PER_RXQ_GRP) in idpf_rxq_group_alloc()
1548 q->desc_count = vport->rxq_desc_count; in idpf_rxq_group_alloc()
1549 q->rx_ptype_lkup = vport->rx_ptype_lkup; in idpf_rxq_group_alloc()
1550 q->netdev = vport->netdev; in idpf_rxq_group_alloc()
1554 q->rx_max_pkt_size = vport->netdev->mtu + in idpf_rxq_group_alloc()
1556 idpf_rxq_set_descids(vport, q); in idpf_rxq_group_alloc()
1562 idpf_rxq_group_rel(vport); in idpf_rxq_group_alloc()
1573 static int idpf_vport_queue_grp_alloc_all(struct idpf_vport *vport) in idpf_vport_queue_grp_alloc_all() argument
1578 idpf_vport_calc_numq_per_grp(vport, &num_txq, &num_rxq); in idpf_vport_queue_grp_alloc_all()
1580 err = idpf_txq_group_alloc(vport, num_txq); in idpf_vport_queue_grp_alloc_all()
1584 err = idpf_rxq_group_alloc(vport, num_rxq); in idpf_vport_queue_grp_alloc_all()
1591 idpf_vport_queue_grp_rel_all(vport); in idpf_vport_queue_grp_alloc_all()
1603 int idpf_vport_queues_alloc(struct idpf_vport *vport) in idpf_vport_queues_alloc() argument
1607 err = idpf_vport_queue_grp_alloc_all(vport); in idpf_vport_queues_alloc()
1611 err = idpf_tx_desc_alloc_all(vport); in idpf_vport_queues_alloc()
1615 err = idpf_rx_desc_alloc_all(vport); in idpf_vport_queues_alloc()
1619 err = idpf_vport_init_fast_path_txqs(vport); in idpf_vport_queues_alloc()
1626 idpf_vport_queues_rel(vport); in idpf_vport_queues_alloc()
1638 struct idpf_vport *vport = priv->vport; in idpf_tx_handle_sw_marker() local
1645 for (i = 0; i < vport->num_txq; i++) in idpf_tx_handle_sw_marker()
1649 if (idpf_queue_has(SW_MARKER, vport->txqs[i])) in idpf_tx_handle_sw_marker()
1653 set_bit(IDPF_VPORT_SW_MARKER, vport->flags); in idpf_tx_handle_sw_marker()
1654 wake_up(&vport->sw_marker_wq); in idpf_tx_handle_sw_marker()
2837 struct idpf_vport *vport = idpf_netdev_to_vport(netdev); in idpf_tx_start() local
2840 if (unlikely(skb_get_queue_mapping(skb) >= vport->num_txq)) { in idpf_tx_start()
2846 tx_q = vport->txqs[skb_get_queue_mapping(skb)]; in idpf_tx_start()
2857 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_tx_start()
3517 static void idpf_vport_intr_napi_del_all(struct idpf_vport *vport) in idpf_vport_intr_napi_del_all() argument
3521 for (v_idx = 0; v_idx < vport->num_q_vectors; v_idx++) in idpf_vport_intr_napi_del_all()
3522 netif_napi_del(&vport->q_vectors[v_idx].napi); in idpf_vport_intr_napi_del_all()
3529 static void idpf_vport_intr_napi_dis_all(struct idpf_vport *vport) in idpf_vport_intr_napi_dis_all() argument
3533 for (v_idx = 0; v_idx < vport->num_q_vectors; v_idx++) in idpf_vport_intr_napi_dis_all()
3534 napi_disable(&vport->q_vectors[v_idx].napi); in idpf_vport_intr_napi_dis_all()
3543 void idpf_vport_intr_rel(struct idpf_vport *vport) in idpf_vport_intr_rel() argument
3545 for (u32 v_idx = 0; v_idx < vport->num_q_vectors; v_idx++) { in idpf_vport_intr_rel()
3546 struct idpf_q_vector *q_vector = &vport->q_vectors[v_idx]; in idpf_vport_intr_rel()
3560 kfree(vport->q_vectors); in idpf_vport_intr_rel()
3561 vport->q_vectors = NULL; in idpf_vport_intr_rel()
3568 static void idpf_vport_intr_rel_irq(struct idpf_vport *vport) in idpf_vport_intr_rel_irq() argument
3570 struct idpf_adapter *adapter = vport->adapter; in idpf_vport_intr_rel_irq()
3573 for (vector = 0; vector < vport->num_q_vectors; vector++) { in idpf_vport_intr_rel_irq()
3574 struct idpf_q_vector *q_vector = &vport->q_vectors[vector]; in idpf_vport_intr_rel_irq()
3581 vidx = vport->q_vector_idxs[vector]; in idpf_vport_intr_rel_irq()
3594 static void idpf_vport_intr_dis_irq_all(struct idpf_vport *vport) in idpf_vport_intr_dis_irq_all() argument
3596 struct idpf_q_vector *q_vector = vport->q_vectors; in idpf_vport_intr_dis_irq_all()
3599 for (q_idx = 0; q_idx < vport->num_q_vectors; q_idx++) in idpf_vport_intr_dis_irq_all()
3729 static int idpf_vport_intr_req_irq(struct idpf_vport *vport) in idpf_vport_intr_req_irq() argument
3731 struct idpf_adapter *adapter = vport->adapter; in idpf_vport_intr_req_irq()
3736 if_name = netdev_name(vport->netdev); in idpf_vport_intr_req_irq()
3738 for (vector = 0; vector < vport->num_q_vectors; vector++) { in idpf_vport_intr_req_irq()
3739 struct idpf_q_vector *q_vector = &vport->q_vectors[vector]; in idpf_vport_intr_req_irq()
3742 vidx = vport->q_vector_idxs[vector]; in idpf_vport_intr_req_irq()
3760 netdev_err(vport->netdev, in idpf_vport_intr_req_irq()
3772 vidx = vport->q_vector_idxs[vector]; in idpf_vport_intr_req_irq()
3774 kfree(free_irq(irq_num, &vport->q_vectors[vector])); in idpf_vport_intr_req_irq()
3804 static void idpf_vport_intr_ena_irq_all(struct idpf_vport *vport) in idpf_vport_intr_ena_irq_all() argument
3810 for (q_idx = 0; q_idx < vport->num_q_vectors; q_idx++) { in idpf_vport_intr_ena_irq_all()
3811 struct idpf_q_vector *qv = &vport->q_vectors[q_idx]; in idpf_vport_intr_ena_irq_all()
3816 itr = vport->tx_itr_profile[qv->tx_dim.profile_ix]; in idpf_vport_intr_ena_irq_all()
3824 itr = vport->rx_itr_profile[qv->rx_dim.profile_ix]; in idpf_vport_intr_ena_irq_all()
3839 void idpf_vport_intr_deinit(struct idpf_vport *vport) in idpf_vport_intr_deinit() argument
3841 idpf_vport_intr_dis_irq_all(vport); in idpf_vport_intr_deinit()
3842 idpf_vport_intr_napi_dis_all(vport); in idpf_vport_intr_deinit()
3843 idpf_vport_intr_napi_del_all(vport); in idpf_vport_intr_deinit()
3844 idpf_vport_intr_rel_irq(vport); in idpf_vport_intr_deinit()
3854 struct idpf_vport *vport; in idpf_tx_dim_work() local
3860 vport = q_vector->vport; in idpf_tx_dim_work()
3862 if (dim->profile_ix >= ARRAY_SIZE(vport->tx_itr_profile)) in idpf_tx_dim_work()
3863 dim->profile_ix = ARRAY_SIZE(vport->tx_itr_profile) - 1; in idpf_tx_dim_work()
3866 itr = vport->tx_itr_profile[dim->profile_ix]; in idpf_tx_dim_work()
3880 struct idpf_vport *vport; in idpf_rx_dim_work() local
3886 vport = q_vector->vport; in idpf_rx_dim_work()
3888 if (dim->profile_ix >= ARRAY_SIZE(vport->rx_itr_profile)) in idpf_rx_dim_work()
3889 dim->profile_ix = ARRAY_SIZE(vport->rx_itr_profile) - 1; in idpf_rx_dim_work()
3892 itr = vport->rx_itr_profile[dim->profile_ix]; in idpf_rx_dim_work()
3918 static void idpf_vport_intr_napi_ena_all(struct idpf_vport *vport) in idpf_vport_intr_napi_ena_all() argument
3922 for (q_idx = 0; q_idx < vport->num_q_vectors; q_idx++) { in idpf_vport_intr_napi_ena_all()
3923 struct idpf_q_vector *q_vector = &vport->q_vectors[q_idx]; in idpf_vport_intr_napi_ena_all()
4053 static void idpf_vport_intr_map_vector_to_qs(struct idpf_vport *vport) in idpf_vport_intr_map_vector_to_qs() argument
4055 bool split = idpf_is_queue_model_split(vport->rxq_model); in idpf_vport_intr_map_vector_to_qs()
4056 u16 num_txq_grp = vport->num_txq_grp; in idpf_vport_intr_map_vector_to_qs()
4061 for (i = 0, qv_idx = 0; i < vport->num_rxq_grp; i++) { in idpf_vport_intr_map_vector_to_qs()
4064 if (qv_idx >= vport->num_q_vectors) in idpf_vport_intr_map_vector_to_qs()
4067 rx_qgrp = &vport->rxq_grps[i]; in idpf_vport_intr_map_vector_to_qs()
4080 q->q_vector = &vport->q_vectors[qv_idx]; in idpf_vport_intr_map_vector_to_qs()
4090 for (u32 j = 0; j < vport->num_bufqs_per_qgrp; j++) { in idpf_vport_intr_map_vector_to_qs()
4094 bufq->q_vector = &vport->q_vectors[qv_idx]; in idpf_vport_intr_map_vector_to_qs()
4104 split = idpf_is_queue_model_split(vport->txq_model); in idpf_vport_intr_map_vector_to_qs()
4109 if (qv_idx >= vport->num_q_vectors) in idpf_vport_intr_map_vector_to_qs()
4112 tx_qgrp = &vport->txq_grps[i]; in idpf_vport_intr_map_vector_to_qs()
4119 q->q_vector = &vport->q_vectors[qv_idx]; in idpf_vport_intr_map_vector_to_qs()
4126 q->q_vector = &vport->q_vectors[qv_idx]; in idpf_vport_intr_map_vector_to_qs()
4140 static int idpf_vport_intr_init_vec_idx(struct idpf_vport *vport) in idpf_vport_intr_init_vec_idx() argument
4142 struct idpf_adapter *adapter = vport->adapter; in idpf_vport_intr_init_vec_idx()
4149 for (i = 0; i < vport->num_q_vectors; i++) in idpf_vport_intr_init_vec_idx()
4150 vport->q_vectors[i].v_idx = vport->q_vector_idxs[i]; in idpf_vport_intr_init_vec_idx()
4162 for (i = 0; i < vport->num_q_vectors; i++) in idpf_vport_intr_init_vec_idx()
4163 vport->q_vectors[i].v_idx = vecids[vport->q_vector_idxs[i]]; in idpf_vport_intr_init_vec_idx()
4174 static void idpf_vport_intr_napi_add_all(struct idpf_vport *vport) in idpf_vport_intr_napi_add_all() argument
4179 if (idpf_is_queue_model_split(vport->txq_model)) in idpf_vport_intr_napi_add_all()
4184 for (v_idx = 0; v_idx < vport->num_q_vectors; v_idx++) { in idpf_vport_intr_napi_add_all()
4185 struct idpf_q_vector *q_vector = &vport->q_vectors[v_idx]; in idpf_vport_intr_napi_add_all()
4187 netif_napi_add(vport->netdev, &q_vector->napi, napi_poll); in idpf_vport_intr_napi_add_all()
4202 int idpf_vport_intr_alloc(struct idpf_vport *vport) in idpf_vport_intr_alloc() argument
4208 vport->q_vectors = kcalloc(vport->num_q_vectors, in idpf_vport_intr_alloc()
4210 if (!vport->q_vectors) in idpf_vport_intr_alloc()
4213 txqs_per_vector = DIV_ROUND_UP(vport->num_txq_grp, in idpf_vport_intr_alloc()
4214 vport->num_q_vectors); in idpf_vport_intr_alloc()
4215 rxqs_per_vector = DIV_ROUND_UP(vport->num_rxq_grp, in idpf_vport_intr_alloc()
4216 vport->num_q_vectors); in idpf_vport_intr_alloc()
4217 bufqs_per_vector = vport->num_bufqs_per_qgrp * in idpf_vport_intr_alloc()
4218 DIV_ROUND_UP(vport->num_rxq_grp, in idpf_vport_intr_alloc()
4219 vport->num_q_vectors); in idpf_vport_intr_alloc()
4220 complqs_per_vector = DIV_ROUND_UP(vport->num_txq_grp, in idpf_vport_intr_alloc()
4221 vport->num_q_vectors); in idpf_vport_intr_alloc()
4223 for (v_idx = 0; v_idx < vport->num_q_vectors; v_idx++) { in idpf_vport_intr_alloc()
4224 q_vector = &vport->q_vectors[v_idx]; in idpf_vport_intr_alloc()
4225 q_vector->vport = vport; in idpf_vport_intr_alloc()
4248 if (!idpf_is_queue_model_split(vport->rxq_model)) in idpf_vport_intr_alloc()
4267 idpf_vport_intr_rel(vport); in idpf_vport_intr_alloc()
4278 int idpf_vport_intr_init(struct idpf_vport *vport) in idpf_vport_intr_init() argument
4282 err = idpf_vport_intr_init_vec_idx(vport); in idpf_vport_intr_init()
4286 idpf_vport_intr_map_vector_to_qs(vport); in idpf_vport_intr_init()
4287 idpf_vport_intr_napi_add_all(vport); in idpf_vport_intr_init()
4289 err = vport->adapter->dev_ops.reg_ops.intr_reg_init(vport); in idpf_vport_intr_init()
4293 err = idpf_vport_intr_req_irq(vport); in idpf_vport_intr_init()
4300 idpf_vport_intr_napi_del_all(vport); in idpf_vport_intr_init()
4305 void idpf_vport_intr_ena(struct idpf_vport *vport) in idpf_vport_intr_ena() argument
4307 idpf_vport_intr_napi_ena_all(vport); in idpf_vport_intr_ena()
4308 idpf_vport_intr_ena_irq_all(vport); in idpf_vport_intr_ena()
4317 int idpf_config_rss(struct idpf_vport *vport) in idpf_config_rss() argument
4321 err = idpf_send_get_set_rss_key_msg(vport, false); in idpf_config_rss()
4325 return idpf_send_get_set_rss_lut_msg(vport, false); in idpf_config_rss()
4332 static void idpf_fill_dflt_rss_lut(struct idpf_vport *vport) in idpf_fill_dflt_rss_lut() argument
4334 struct idpf_adapter *adapter = vport->adapter; in idpf_fill_dflt_rss_lut()
4335 u16 num_active_rxq = vport->num_rxq; in idpf_fill_dflt_rss_lut()
4339 rss_data = &adapter->vport_config[vport->idx]->user_config.rss_data; in idpf_fill_dflt_rss_lut()
4353 int idpf_init_rss(struct idpf_vport *vport) in idpf_init_rss() argument
4355 struct idpf_adapter *adapter = vport->adapter; in idpf_init_rss()
4359 rss_data = &adapter->vport_config[vport->idx]->user_config.rss_data; in idpf_init_rss()
4375 idpf_fill_dflt_rss_lut(vport); in idpf_init_rss()
4377 return idpf_config_rss(vport); in idpf_init_rss()
4384 void idpf_deinit_rss(struct idpf_vport *vport) in idpf_deinit_rss() argument
4386 struct idpf_adapter *adapter = vport->adapter; in idpf_deinit_rss()
4389 rss_data = &adapter->vport_config[vport->idx]->user_config.rss_data; in idpf_deinit_rss()