Searched refs:op_own (Results 1 – 13 of 13) sorted by relevance
777 __be32 op_own, bool bf_ok, in mlx4_en_tx_write_desc() argument783 op_own |= htonl((bf_index & 0xffff) << 8); in mlx4_en_tx_write_desc()788 tx_desc->ctrl.owner_opcode = op_own; in mlx4_en_tx_write_desc()803 tx_desc->ctrl.owner_opcode = op_own; in mlx4_en_tx_write_desc()891 __be32 op_own; in mlx4_en_xmit() local1023 op_own = cpu_to_be32(MLX4_OPCODE_LSO | (1 << 6)) | in mlx4_en_xmit()1059 op_own = cpu_to_be32(MLX4_OPCODE_SEND) | in mlx4_en_xmit()1083 op_own |= cpu_to_be32(MLX4_WQE_CTRL_IIP | MLX4_WQE_CTRL_ILP); in mlx4_en_xmit()1085 op_own |= cpu_to_be32(MLX4_WQE_CTRL_IIP); in mlx4_en_xmit()1117 op_own, bf_ok, send_doorbell); in mlx4_en_xmit()[all …]
90 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & (cq->ibcq.cqe + 1)))) { in get_sw_cqe()883 cqe64->op_own = MLX5_CQE_INVALID << 4; in init_cq_frag_buf()1114 owner_bit = dest64->op_own & MLX5_CQE_OWNER_MASK; in __mlx5_ib_cq_clean()1116 dest64->op_own = owner_bit | in __mlx5_ib_cq_clean()1117 (dest64->op_own & ~MLX5_CQE_OWNER_MASK); in __mlx5_ib_cq_clean()1251 dcqe64->op_own = (dcqe64->op_own & ~MLX5_CQE_OWNER_MASK) | sw_own; in copy_resize_cqes()
818 u8 op_own; member869 u8 op_own; member909 return (cqe->op_own >> 2) & 0x3; in mlx5_get_cqe_format()914 return cqe->op_own >> 4; in get_cqe_opcode()1066 u8 op_own; member
132 u8 op_own = mlx5_cqwq_get_ctr_wrap_cnt(wq, cqcc) & 1; in mlx5e_cqes_update_owner() local140 cqe->op_own = op_own; in mlx5e_cqes_update_owner()144 op_own = !op_own; in mlx5e_cqes_update_owner()148 cqe->op_own = op_own; in mlx5e_cqes_update_owner()163 title->op_own &= 0xf0; in mlx5e_decompress_cqe()164 title->op_own |= 0x01 & (cqcc >> wq->fbc.log_sz); in mlx5e_decompress_cqe()
234 u8 cqe_ownership_bit = cqe->op_own & MLX5_CQE_OWNER_MASK; in mlx5_cqwq_get_cqe()
62 cqe->op_own = 0xf1; in mlx5_wc_create_cqwq()
2116 cqe->op_own = 0xf1; in mlx5e_alloc_cq_common()
67 cqe->op_own = 0xf1; in mlx5_aso_alloc_cq()
1020 cqe64->op_own = MLX5_CQE_INVALID << 4; in init_cq_frag_buf()1677 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & (cq->ncqe)))) { in get_sw_cqe()
437 cqe->op_own = MLX5_CQE_INVALID << 4 | MLX5_CQE_OWNER_MASK; in mlx5_fpga_conn_create_cq()
783 cqe->op_own = 0xf1; in hws_send_ring_alloc_cq()
1090 cqe->op_own = MLX5_CQE_INVALID << 4 | MLX5_CQE_OWNER_MASK; in dr_create_cq()
352 cqe64->op_own = MLX5_CQE_INVALID << 4; in cq_frag_buf_init()361 !((cqe64->op_own & MLX5_CQE_OWNER_MASK) ^ !!(n & cq->cqe))) in get_sw_cqe()