Searched refs:cs_flags (Results 1 – 16 of 16) sorted by relevance
125 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()128 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()131 eseg->cs_flags |= MLX5_ETH_WQE_L4_INNER_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()135 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_ipsec_txwqe_build_eseg_csum()
194 if (p->cs_flags & RADEON_CS_USE_VM) in radeon_cs_parser_relocs()301 p->cs_flags = 0; in radeon_cs_parser_init()360 p->cs_flags = p->chunks[i].kdata[0]; in radeon_cs_parser_init()370 if ((p->cs_flags & RADEON_CS_USE_VM) && in radeon_cs_parser_init()380 if ((p->cs_flags & RADEON_CS_USE_VM) == 0) { in radeon_cs_parser_init()473 if (parser->cs_flags & RADEON_CS_USE_VM) in radeon_cs_ib_chunk()560 if ((parser->cs_flags & RADEON_CS_USE_VM) == 0) in radeon_cs_ib_vm_chunk()622 if (parser->cs_flags & RADEON_CS_USE_VM) { in radeon_cs_ib_fill()
220 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r200_packet0_check()292 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r200_packet0_check()
714 if (p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS) { in r300_packet0_check()778 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r300_packet0_check()863 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r300_packet0_check()
1172 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_cs_handle_reg()1360 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_cs_handle_reg()1378 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_cs_handle_reg()1446 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_cs_handle_reg()1474 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_cs_handle_reg()2361 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in evergreen_packet3_check()
1030 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS) && in r600_cs_check_reg()1131 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS) && in r600_cs_check_reg()1495 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r600_check_texture_resource()1966 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r600_packet3_check()
1311 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r100_reloc_pitch_offset()1653 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r100_packet0_check()1734 if (!(p->cs_flags & RADEON_CS_KEEP_TILING_FLAGS)) { in r100_packet0_check()
1043 u32 cs_flags; member
218 static int rseq_need_restart(struct task_struct *t, u32 cs_flags) in rseq_need_restart() argument223 if (rseq_warn_flags("rseq_cs", cs_flags)) in rseq_need_restart()
127 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM; in mlx5e_txwqe_build_eseg_csum()129 eseg->cs_flags |= MLX5_ETH_WQE_L3_INNER_CSUM | in mlx5e_txwqe_build_eseg_csum()133 eseg->cs_flags |= MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()138 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | MLX5_ETH_WQE_L4_CSUM; in mlx5e_txwqe_build_eseg_csum()
1366 if ((args->in.cs_flags & HL_CS_FLAGS_STAGED_SUBMISSION) && in hl_cs_sanity_checks()1372 cs_type_flags = args->in.cs_flags & HL_CS_FLAGS_TYPE_MASK; in hl_cs_sanity_checks()1702 if (do_ctx_switch || (args->in.cs_flags & HL_CS_FLAGS_FORCE_RESTORE)) { in hl_cs_ctx_switch()2577 cs_type = hl_cs_get_cs_type(args->in.cs_flags & in hl_cs_ioctl()2581 flags = args->in.cs_flags; in hl_cs_ioctl()2597 &cs_seq, args->in.cs_flags, timeout, in hl_cs_ioctl()2623 args->in.cs_flags, in hl_cs_ioctl()
266 u8 cs_flags; member
1706 __u32 cs_flags; member
302 eseg->cs_flags |= MLX5_ETH_WQE_L3_CSUM | MLX5_ETH_WQE_L4_CSUM; in mlx5e_xsk_request_checksum()
62 eseg->cs_flags = MLX5_ETH_WQE_L3_CSUM | in set_eth_seg()
4401 enum chg_state_flags cs_flags; in receive_state() local4551 cs_flags = CS_VERBOSE + (os.conn < C_CONNECTED && ns.conn >= C_CONNECTED ? 0 : CS_HARD); in receive_state()4564 rv = _drbd_set_state(device, ns, cs_flags, NULL); in receive_state()