Lines Matching refs:sg_in
60 struct scatterlist sg_in[3]; in tls_enc_record() local
89 sg_init_table(sg_in, ARRAY_SIZE(sg_in)); in tls_enc_record()
91 sg_set_buf(sg_in, aad, TLS_AAD_SPACE_SIZE); in tls_enc_record()
93 chain_to_walk(sg_in + 1, in); in tls_enc_record()
120 aead_request_set_crypt(aead_req, sg_in, sg_out, len, iv); in tls_enc_record()
148 struct crypto_aead *aead, struct scatterlist *sg_in, in tls_enc_records() argument
155 scatterwalk_start(&in, sg_in); in tls_enc_records()
235 static int fill_sg_in(struct scatterlist *sg_in, in fill_sg_in() argument
282 sg_set_page(sg_in + i, skb_frag_page(frag), in fill_sg_in()
288 sg_in[i].length += remaining; in fill_sg_in()
293 if (skb_to_sgvec(skb, &sg_in[i], tcp_payload_offset, payload_len) < 0) in fill_sg_in()
319 struct scatterlist *sg_in, in tls_enc_skb() argument
360 if (tls_enc_records(aead_req, ctx->aead_send, sg_in, sg_out, aad, iv, in tls_enc_skb()
389 struct scatterlist *sg_in, sg_out[3]; in tls_sw_fallback() local
405 sg_in = kmalloc_array(sg_in_max_elements, sizeof(*sg_in), GFP_ATOMIC); in tls_sw_fallback()
406 if (!sg_in) in tls_sw_fallback()
409 sg_init_table(sg_in, sg_in_max_elements); in tls_sw_fallback()
412 if (fill_sg_in(sg_in, skb, ctx, &rcd_sn, &sync_size, &resync_sgs)) { in tls_sw_fallback()
419 nskb = tls_enc_skb(tls_ctx, sg_out, sg_in, skb, sync_size, rcd_sn); in tls_sw_fallback()
423 put_page(sg_page(&sg_in[--resync_sgs])); in tls_sw_fallback()
424 kfree(sg_in); in tls_sw_fallback()