Lines Matching refs:vmdq

22 	struct ixgbe_ring_feature *vmdq = &adapter->ring_feature[RING_F_VMDQ];  in ixgbe_cache_ring_dcb_sriov()  local
36 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
39 if ((reg_idx & ~vmdq->mask) >= tcs) { in ixgbe_cache_ring_dcb_sriov()
41 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
47 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
50 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
51 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
66 u16 queues_per_pool = __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
69 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
71 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
77 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
79 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
188 struct ixgbe_ring_feature *vmdq = &adapter->ring_feature[RING_F_VMDQ]; in ixgbe_cache_ring_sriov() local
199 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
207 if ((reg_idx & ~vmdq->mask) >= rss->indices) { in ixgbe_cache_ring_sriov()
209 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
223 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
232 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()