Lines Matching refs:emad
82 } emad; member
285 MLXSW_ITEM_BUF(emad, eth_hdr, dmac, 0x00, 6);
291 MLXSW_ITEM_BUF(emad, eth_hdr, smac, 0x06, 6);
297 MLXSW_ITEM32(emad, eth_hdr, ethertype, 0x0C, 16, 16);
303 MLXSW_ITEM32(emad, eth_hdr, mlx_proto, 0x0C, 8, 8);
309 MLXSW_ITEM32(emad, eth_hdr, ver, 0x0C, 4, 4);
315 MLXSW_ITEM32(emad, op_tlv, type, 0x00, 27, 5);
321 MLXSW_ITEM32(emad, op_tlv, len, 0x00, 16, 11);
329 MLXSW_ITEM32(emad, op_tlv, dr, 0x00, 15, 1);
346 MLXSW_ITEM32(emad, op_tlv, status, 0x00, 8, 7);
351 MLXSW_ITEM32(emad, op_tlv, register_id, 0x04, 16, 16);
356 MLXSW_ITEM32(emad, op_tlv, r, 0x04, 15, 1);
365 MLXSW_ITEM32(emad, op_tlv, method, 0x04, 8, 7);
370 MLXSW_ITEM32(emad, op_tlv, class, 0x04, 0, 8);
375 MLXSW_ITEM64(emad, op_tlv, tid, 0x08, 0, 64);
381 MLXSW_ITEM32(emad, string_tlv, type, 0x00, 27, 5);
386 MLXSW_ITEM32(emad, string_tlv, len, 0x00, 16, 11);
393 MLXSW_ITEM_BUF(emad, string_tlv, string, 0x04,
400 MLXSW_ITEM32(emad, latency_tlv, type, 0x00, 27, 5);
405 MLXSW_ITEM32(emad, latency_tlv, len, 0x00, 16, 11);
410 MLXSW_ITEM32(emad, latency_tlv, latency_time, 0x04, 0, 32);
416 MLXSW_ITEM32(emad, reg_tlv, type, 0x00, 27, 5);
421 MLXSW_ITEM32(emad, reg_tlv, len, 0x00, 16, 11);
427 MLXSW_ITEM32(emad, end_tlv, type, 0x00, 27, 5);
433 MLXSW_ITEM32(emad, end_tlv, len, 0x00, 16, 11);
530 if (mlxsw_core->emad.enable_latency_tlv) { in mlxsw_emad_construct()
535 if (mlxsw_core->emad.enable_string_tlv) { in mlxsw_emad_construct()
759 spin_lock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_trans_finish()
761 spin_unlock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_trans_finish()
839 list_for_each_entry_rcu(trans, &mlxsw_core->emad.trans_list, list) { in mlxsw_emad_rx_listener_func()
866 mlxsw_core->emad.enable_string_tlv = string_tlv; in mlxsw_emad_tlv_enable()
869 mlxsw_core->emad.enable_latency_tlv = latency_tlv; in mlxsw_emad_tlv_enable()
876 mlxsw_core->emad.enable_latency_tlv = false; in mlxsw_emad_tlv_disable()
877 mlxsw_core->emad.enable_string_tlv = false; in mlxsw_emad_tlv_disable()
900 atomic64_set(&mlxsw_core->emad.tid, tid); in mlxsw_emad_init()
902 INIT_LIST_HEAD(&mlxsw_core->emad.trans_list); in mlxsw_emad_init()
903 spin_lock_init(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_init()
914 mlxsw_core->emad.use_emad = true; in mlxsw_emad_init()
932 mlxsw_core->emad.use_emad = false; in mlxsw_emad_fini()
948 if (mlxsw_core->emad.enable_string_tlv) in mlxsw_emad_alloc()
950 if (mlxsw_core->emad.enable_latency_tlv) in mlxsw_emad_alloc()
1000 spin_lock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_reg_access()
1001 list_add_tail_rcu(&trans->list, &mlxsw_core->emad.trans_list); in mlxsw_emad_reg_access()
1002 spin_unlock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_reg_access()
1009 spin_lock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_reg_access()
1011 spin_unlock_bh(&mlxsw_core->emad.trans_list_lock); in mlxsw_emad_reg_access()
2673 return atomic64_inc_return(&mlxsw_core->emad.tid); in mlxsw_core_tid_get()
2914 if (!mlxsw_core->emad.use_emad) in mlxsw_core_reg_access()