Lines Matching refs:bat_iv

152 	spin_lock_init(&orig_node->bat_iv.ogm_cnt_lock);  in batadv_iv_ogm_orig_get()
197 mutex_lock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_enable()
201 atomic_set(&hard_iface->bat_iv.ogm_seqno, random_seqno); in batadv_iv_ogm_iface_enable()
203 hard_iface->bat_iv.ogm_buff_len = BATADV_OGM_HLEN; in batadv_iv_ogm_iface_enable()
204 ogm_buff = kmalloc(hard_iface->bat_iv.ogm_buff_len, GFP_ATOMIC); in batadv_iv_ogm_iface_enable()
206 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_enable()
210 hard_iface->bat_iv.ogm_buff = ogm_buff; in batadv_iv_ogm_iface_enable()
220 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_enable()
227 mutex_lock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_disable()
229 kfree(hard_iface->bat_iv.ogm_buff); in batadv_iv_ogm_iface_disable()
230 hard_iface->bat_iv.ogm_buff = NULL; in batadv_iv_ogm_iface_disable()
232 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_disable()
240 mutex_lock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_update_mac()
242 ogm_buff = hard_iface->bat_iv.ogm_buff; in batadv_iv_ogm_iface_update_mac()
253 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_iface_update_mac()
262 mutex_lock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_primary_iface_set()
264 ogm_buff = hard_iface->bat_iv.ogm_buff; in batadv_iv_ogm_primary_iface_set()
272 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_primary_iface_set()
762 spin_lock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_slide_own_bcast_window()
763 word = orig_ifinfo->bat_iv.bcast_own; in batadv_iv_ogm_slide_own_bcast_window()
765 w = &orig_ifinfo->bat_iv.bcast_own_sum; in batadv_iv_ogm_slide_own_bcast_window()
768 spin_unlock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_slide_own_bcast_window()
782 unsigned char **ogm_buff = &hard_iface->bat_iv.ogm_buff; in batadv_iv_ogm_schedule_buff()
785 int *ogm_buff_len = &hard_iface->bat_iv.ogm_buff_len; in batadv_iv_ogm_schedule_buff()
790 lockdep_assert_held(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_schedule_buff()
821 seqno = (u32)atomic_read(&hard_iface->bat_iv.ogm_seqno); in batadv_iv_ogm_schedule_buff()
823 atomic_inc(&hard_iface->bat_iv.ogm_seqno); in batadv_iv_ogm_schedule_buff()
867 mutex_lock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_schedule()
869 mutex_unlock(&hard_iface->bat_iv.ogm_buff_mutex); in batadv_iv_ogm_schedule()
892 spin_lock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_orig_ifinfo_sum()
893 sum = orig_ifinfo->bat_iv.bcast_own_sum; in batadv_iv_orig_ifinfo_sum()
894 spin_unlock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_orig_ifinfo_sum()
959 batadv_ring_buffer_set(neigh_ifinfo->bat_iv.tq_recv, in batadv_iv_ogm_orig_update()
960 &neigh_ifinfo->bat_iv.tq_index, 0); in batadv_iv_ogm_orig_update()
961 tq_avg = batadv_ring_buffer_avg(neigh_ifinfo->bat_iv.tq_recv); in batadv_iv_ogm_orig_update()
962 neigh_ifinfo->bat_iv.tq_avg = tq_avg; in batadv_iv_ogm_orig_update()
996 batadv_ring_buffer_set(neigh_ifinfo->bat_iv.tq_recv, in batadv_iv_ogm_orig_update()
997 &neigh_ifinfo->bat_iv.tq_index, in batadv_iv_ogm_orig_update()
999 tq_avg = batadv_ring_buffer_avg(neigh_ifinfo->bat_iv.tq_recv); in batadv_iv_ogm_orig_update()
1000 neigh_ifinfo->bat_iv.tq_avg = tq_avg; in batadv_iv_ogm_orig_update()
1023 if (router_ifinfo->bat_iv.tq_avg > neigh_ifinfo->bat_iv.tq_avg) in batadv_iv_ogm_orig_update()
1031 neigh_ifinfo->bat_iv.tq_avg == router_ifinfo->bat_iv.tq_avg) { in batadv_iv_ogm_orig_update()
1117 neigh_rq_count = neigh_ifinfo->bat_iv.real_packet_count; in batadv_iv_ogm_calc_tq()
1235 spin_lock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_update_seqnos()
1255 is_dup = batadv_test_bit(neigh_ifinfo->bat_iv.real_bits, in batadv_iv_ogm_update_seqnos()
1271 bitmap = neigh_ifinfo->bat_iv.real_bits; in batadv_iv_ogm_update_seqnos()
1277 neigh_ifinfo->bat_iv.real_packet_count = packet_count; in batadv_iv_ogm_update_seqnos()
1291 spin_unlock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_update_seqnos()
1372 if ((router_ifinfo && router_ifinfo->bat_iv.tq_avg != 0) && in batadv_iv_ogm_process_per_outif()
1531 spin_lock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_process_reply()
1534 batadv_set_bit(orig_ifinfo->bat_iv.bcast_own, bit_pos); in batadv_iv_ogm_process_reply()
1535 weight = &orig_ifinfo->bat_iv.bcast_own_sum; in batadv_iv_ogm_process_reply()
1536 *weight = bitmap_weight(orig_ifinfo->bat_iv.bcast_own, in batadv_iv_ogm_process_reply()
1538 spin_unlock_bh(&orig_node->bat_iv.ogm_cnt_lock); in batadv_iv_ogm_process_reply()
1582 if_incoming_seqno = atomic_read(&if_incoming->bat_iv.ogm_seqno); in batadv_iv_ogm_process()
1791 *tq_avg = n_ifinfo->bat_iv.tq_avg; in batadv_iv_ogm_neigh_get_tq_avg()
2033 tq1 = neigh1_ifinfo->bat_iv.tq_avg; in batadv_iv_ogm_neigh_diff()
2034 tq2 = neigh2_ifinfo->bat_iv.tq_avg; in batadv_iv_ogm_neigh_diff()
2274 tq_avg = router_ifinfo->bat_iv.tq_avg; in batadv_iv_gw_get_best_gw_node()
2361 gw_tq_avg = router_gw_ifinfo->bat_iv.tq_avg; in batadv_iv_gw_is_eligible()
2362 orig_tq_avg = router_orig_ifinfo->bat_iv.tq_avg; in batadv_iv_gw_is_eligible()
2440 nla_put_u8(msg, BATADV_ATTR_TQ, router_ifinfo->bat_iv.tq_avg) || in batadv_iv_gw_dump_entry()