Lines Matching refs:peer

21 static void wg_packet_send_handshake_initiation(struct wg_peer *peer)  in wg_packet_send_handshake_initiation()  argument
25 if (!wg_birthdate_has_expired(atomic64_read(&peer->last_sent_handshake), in wg_packet_send_handshake_initiation()
29 atomic64_set(&peer->last_sent_handshake, ktime_get_coarse_boottime_ns()); in wg_packet_send_handshake_initiation()
31 peer->device->dev->name, peer->internal_id, in wg_packet_send_handshake_initiation()
32 &peer->endpoint.addr); in wg_packet_send_handshake_initiation()
34 if (wg_noise_handshake_create_initiation(&packet, &peer->handshake)) { in wg_packet_send_handshake_initiation()
35 wg_cookie_add_mac_to_packet(&packet, sizeof(packet), peer); in wg_packet_send_handshake_initiation()
36 wg_timers_any_authenticated_packet_traversal(peer); in wg_packet_send_handshake_initiation()
37 wg_timers_any_authenticated_packet_sent(peer); in wg_packet_send_handshake_initiation()
38 atomic64_set(&peer->last_sent_handshake, in wg_packet_send_handshake_initiation()
40 wg_socket_send_buffer_to_peer(peer, &packet, sizeof(packet), in wg_packet_send_handshake_initiation()
42 wg_timers_handshake_initiated(peer); in wg_packet_send_handshake_initiation()
48 struct wg_peer *peer = container_of(work, struct wg_peer, in wg_packet_handshake_send_worker() local
51 wg_packet_send_handshake_initiation(peer); in wg_packet_handshake_send_worker()
52 wg_peer_put(peer); in wg_packet_handshake_send_worker()
55 void wg_packet_send_queued_handshake_initiation(struct wg_peer *peer, in wg_packet_send_queued_handshake_initiation() argument
59 peer->timer_handshake_attempts = 0; in wg_packet_send_queued_handshake_initiation()
66 if (!wg_birthdate_has_expired(atomic64_read(&peer->last_sent_handshake), in wg_packet_send_queued_handshake_initiation()
68 unlikely(READ_ONCE(peer->is_dead))) in wg_packet_send_queued_handshake_initiation()
71 wg_peer_get(peer); in wg_packet_send_queued_handshake_initiation()
75 if (!queue_work(peer->device->handshake_send_wq, in wg_packet_send_queued_handshake_initiation()
76 &peer->transmit_handshake_work)) in wg_packet_send_queued_handshake_initiation()
80 wg_peer_put(peer); in wg_packet_send_queued_handshake_initiation()
85 void wg_packet_send_handshake_response(struct wg_peer *peer) in wg_packet_send_handshake_response() argument
89 atomic64_set(&peer->last_sent_handshake, ktime_get_coarse_boottime_ns()); in wg_packet_send_handshake_response()
91 peer->device->dev->name, peer->internal_id, in wg_packet_send_handshake_response()
92 &peer->endpoint.addr); in wg_packet_send_handshake_response()
94 if (wg_noise_handshake_create_response(&packet, &peer->handshake)) { in wg_packet_send_handshake_response()
95 wg_cookie_add_mac_to_packet(&packet, sizeof(packet), peer); in wg_packet_send_handshake_response()
96 if (wg_noise_handshake_begin_session(&peer->handshake, in wg_packet_send_handshake_response()
97 &peer->keypairs)) { in wg_packet_send_handshake_response()
98 wg_timers_session_derived(peer); in wg_packet_send_handshake_response()
99 wg_timers_any_authenticated_packet_traversal(peer); in wg_packet_send_handshake_response()
100 wg_timers_any_authenticated_packet_sent(peer); in wg_packet_send_handshake_response()
101 atomic64_set(&peer->last_sent_handshake, in wg_packet_send_handshake_response()
103 wg_socket_send_buffer_to_peer(peer, &packet, in wg_packet_send_handshake_response()
124 static void keep_key_fresh(struct wg_peer *peer) in keep_key_fresh() argument
130 keypair = rcu_dereference_bh(peer->keypairs.current_keypair); in keep_key_fresh()
138 wg_packet_send_queued_handshake_initiation(peer, false); in keep_key_fresh()
221 void wg_packet_send_keepalive(struct wg_peer *peer) in wg_packet_send_keepalive() argument
225 if (skb_queue_empty_lockless(&peer->staged_packet_queue)) { in wg_packet_send_keepalive()
231 skb->dev = peer->device->dev; in wg_packet_send_keepalive()
233 skb_queue_tail(&peer->staged_packet_queue, skb); in wg_packet_send_keepalive()
235 peer->device->dev->name, peer->internal_id, in wg_packet_send_keepalive()
236 &peer->endpoint.addr); in wg_packet_send_keepalive()
239 wg_packet_send_staged_packets(peer); in wg_packet_send_keepalive()
242 static void wg_packet_create_data_done(struct wg_peer *peer, struct sk_buff *first) in wg_packet_create_data_done() argument
247 wg_timers_any_authenticated_packet_traversal(peer); in wg_packet_create_data_done()
248 wg_timers_any_authenticated_packet_sent(peer); in wg_packet_create_data_done()
251 if (likely(!wg_socket_send_skb_to_peer(peer, skb, in wg_packet_create_data_done()
257 wg_timers_data_sent(peer); in wg_packet_create_data_done()
259 keep_key_fresh(peer); in wg_packet_create_data_done()
264 struct wg_peer *peer = container_of(work, struct wg_peer, transmit_packet_work); in wg_packet_tx_worker() local
269 while ((first = wg_prev_queue_peek(&peer->tx_queue)) != NULL && in wg_packet_tx_worker()
272 wg_prev_queue_drop_peeked(&peer->tx_queue); in wg_packet_tx_worker()
276 wg_packet_create_data_done(peer, first); in wg_packet_tx_worker()
281 wg_peer_put(peer); in wg_packet_tx_worker()
311 static void wg_packet_create_data(struct wg_peer *peer, struct sk_buff *first) in wg_packet_create_data() argument
313 struct wg_device *wg = peer->device; in wg_packet_create_data()
317 if (unlikely(READ_ONCE(peer->is_dead))) in wg_packet_create_data()
320 ret = wg_queue_enqueue_per_device_and_peer(&wg->encrypt_queue, &peer->tx_queue, first, in wg_packet_create_data()
329 wg_peer_put(peer); in wg_packet_create_data()
333 void wg_packet_purge_staged_packets(struct wg_peer *peer) in wg_packet_purge_staged_packets() argument
335 spin_lock_bh(&peer->staged_packet_queue.lock); in wg_packet_purge_staged_packets()
336 DEV_STATS_ADD(peer->device->dev, tx_dropped, in wg_packet_purge_staged_packets()
337 peer->staged_packet_queue.qlen); in wg_packet_purge_staged_packets()
338 __skb_queue_purge(&peer->staged_packet_queue); in wg_packet_purge_staged_packets()
339 spin_unlock_bh(&peer->staged_packet_queue.lock); in wg_packet_purge_staged_packets()
342 void wg_packet_send_staged_packets(struct wg_peer *peer) in wg_packet_send_staged_packets() argument
350 spin_lock_bh(&peer->staged_packet_queue.lock); in wg_packet_send_staged_packets()
351 skb_queue_splice_init(&peer->staged_packet_queue, &packets); in wg_packet_send_staged_packets()
352 spin_unlock_bh(&peer->staged_packet_queue.lock); in wg_packet_send_staged_packets()
359 rcu_dereference_bh(peer->keypairs.current_keypair)); in wg_packet_send_staged_packets()
386 wg_peer_get(keypair->entry.peer); in wg_packet_send_staged_packets()
388 wg_packet_create_data(peer, packets.next); in wg_packet_send_staged_packets()
406 spin_lock_bh(&peer->staged_packet_queue.lock); in wg_packet_send_staged_packets()
407 skb_queue_splice(&packets, &peer->staged_packet_queue); in wg_packet_send_staged_packets()
408 spin_unlock_bh(&peer->staged_packet_queue.lock); in wg_packet_send_staged_packets()
413 wg_packet_send_queued_handshake_initiation(peer, false); in wg_packet_send_staged_packets()