blob: d3a06dc878645b0fc9644ef2bc9b2437bee5c035 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_encrypt.c : IPSec ESP encrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
21
Damjan Marion91f17dc2019-03-18 18:59:25 +010022#include <vnet/crypto/crypto.h>
23
Ed Warnickecb9cada2015-12-08 15:45:58 -070024#include <vnet/ipsec/ipsec.h>
Neale Ranns28287212019-12-16 00:53:11 +000025#include <vnet/ipsec/ipsec_tun.h>
Neale Ranns93688d72022-08-09 03:34:51 +000026#include <vnet/ipsec/ipsec.api_enum.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027#include <vnet/ipsec/esp.h>
Neale Ranns041add72020-01-02 04:06:10 +000028#include <vnet/tunnel/tunnel_dp.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070029
Neale Ranns4a58e492020-12-21 13:19:10 +000030#define foreach_esp_encrypt_next \
31 _ (DROP4, "ip4-drop") \
32 _ (DROP6, "ip6-drop") \
33 _ (DROP_MPLS, "mpls-drop") \
34 _ (HANDOFF4, "handoff4") \
35 _ (HANDOFF6, "handoff6") \
36 _ (HANDOFF_MPLS, "handoff-mpls") \
37 _ (INTERFACE_OUTPUT, "interface-output")
Ed Warnickecb9cada2015-12-08 15:45:58 -070038
39#define _(v, s) ESP_ENCRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070040typedef enum
41{
Ed Warnickecb9cada2015-12-08 15:45:58 -070042 foreach_esp_encrypt_next
43#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070044 ESP_ENCRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070045} esp_encrypt_next_t;
46
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070047typedef struct
48{
Neale Ranns8d7c5022019-02-06 01:41:05 -080049 u32 sa_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -070050 u32 spi;
51 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000052 u32 sa_seq_hi;
Klement Sekera4b089f22018-04-17 18:04:57 +020053 u8 udp_encap;
Ed Warnickecb9cada2015-12-08 15:45:58 -070054 ipsec_crypto_alg_t crypto_alg;
55 ipsec_integ_alg_t integ_alg;
56} esp_encrypt_trace_t;
57
Fan Zhangf5395782020-04-29 14:00:03 +010058typedef struct
59{
60 u32 next_index;
61} esp_encrypt_post_trace_t;
62
Neale Ranns93688d72022-08-09 03:34:51 +000063typedef vl_counter_esp_encrypt_enum_t esp_encrypt_error_t;
64
Ed Warnickecb9cada2015-12-08 15:45:58 -070065/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070066static u8 *
67format_esp_encrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070068{
69 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
70 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070071 esp_encrypt_trace_t *t = va_arg (*args, esp_encrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -070072
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020073 s =
74 format (s,
Neale Ranns6afaae12019-07-17 15:07:14 +000075 "esp: sa-index %d spi %u (0x%08x) seq %u sa-seq-hi %u crypto %U integrity %U%s",
76 t->sa_index, t->spi, t->spi, t->seq, t->sa_seq_hi,
77 format_ipsec_crypto_alg,
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020078 t->crypto_alg, format_ipsec_integ_alg, t->integ_alg,
79 t->udp_encap ? " udp-encap-enabled" : "");
Ed Warnickecb9cada2015-12-08 15:45:58 -070080 return s;
81}
82
Fan Zhangf5395782020-04-29 14:00:03 +010083static u8 *
84format_esp_post_encrypt_trace (u8 * s, va_list * args)
85{
86 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
87 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
88 esp_encrypt_post_trace_t *t = va_arg (*args, esp_encrypt_post_trace_t *);
89
90 s = format (s, "esp-post: next node index %u", t->next_index);
91 return s;
92}
93
Damjan Marionc59b9a22019-03-19 15:38:40 +010094/* pad packet in input buffer */
95static_always_inline u8 *
Neale Rannsf16e9a52021-02-25 19:09:24 +000096esp_add_footer_and_icv (vlib_main_t *vm, vlib_buffer_t **last, u8 esp_align,
Fan Zhangb8cb2232024-03-12 20:39:12 +000097 u8 icv_sz, u16 buffer_data_size, uword total_len)
Ed Warnickecb9cada2015-12-08 15:45:58 -070098{
Damjan Marionc59b9a22019-03-19 15:38:40 +010099 static const u8 pad_data[ESP_MAX_BLOCK_SIZE] = {
100 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
Milan Lenco7885c742020-08-20 13:23:09 +0200101 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00,
Damjan Marionc59b9a22019-03-19 15:38:40 +0100102 };
Ed Warnickecb9cada2015-12-08 15:45:58 -0700103
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000104 u16 min_length = total_len + sizeof (esp_footer_t);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500105 u16 new_length = round_pow2 (min_length, esp_align);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100106 u8 pad_bytes = new_length - min_length;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000107 esp_footer_t *f = (esp_footer_t *) (vlib_buffer_get_current (last[0]) +
108 last[0]->current_length + pad_bytes);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000109 u16 tail_sz = sizeof (esp_footer_t) + pad_bytes + icv_sz;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700110
Benoît Ganne217ba5a2021-06-14 17:23:56 +0200111 if (last[0]->current_data + last[0]->current_length + tail_sz >
112 buffer_data_size)
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000113 {
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000114 u32 tmp_bi = 0;
115 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
116 return 0;
Filip Tehlarc2c1bfd2020-02-13 07:49:30 +0000117
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000118 vlib_buffer_t *tmp = vlib_get_buffer (vm, tmp_bi);
119 last[0]->next_buffer = tmp_bi;
120 last[0]->flags |= VLIB_BUFFER_NEXT_PRESENT;
121 f = (esp_footer_t *) (vlib_buffer_get_current (tmp) + pad_bytes);
122 tmp->current_length += tail_sz;
123 last[0] = tmp;
124 }
125 else
126 last[0]->current_length += tail_sz;
127
128 f->pad_length = pad_bytes;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100129 if (pad_bytes)
Benoît Ganne4505f012019-12-07 09:14:27 -0700130 {
131 ASSERT (pad_bytes <= ESP_MAX_BLOCK_SIZE);
132 pad_bytes = clib_min (ESP_MAX_BLOCK_SIZE, pad_bytes);
133 clib_memcpy_fast ((u8 *) f - pad_bytes, pad_data, pad_bytes);
134 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100135
Damjan Marionc59b9a22019-03-19 15:38:40 +0100136 return &f->next_header;
137}
138
139static_always_inline void
140esp_update_ip4_hdr (ip4_header_t * ip4, u16 len, int is_transport, int is_udp)
141{
Neale Ranns1b582b82019-04-18 19:49:13 -0700142 ip_csum_t sum;
143 u16 old_len;
144
145 len = clib_net_to_host_u16 (len);
146 old_len = ip4->length;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100147
148 if (is_transport)
149 {
150 u8 prot = is_udp ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100151
Neale Ranns1b582b82019-04-18 19:49:13 -0700152 sum = ip_csum_update (ip4->checksum, ip4->protocol,
153 prot, ip4_header_t, protocol);
154 ip4->protocol = prot;
155
156 sum = ip_csum_update (sum, old_len, len, ip4_header_t, length);
157 }
158 else
159 sum = ip_csum_update (ip4->checksum, old_len, len, ip4_header_t, length);
160
161 ip4->length = len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100162 ip4->checksum = ip_csum_fold (sum);
163}
164
165static_always_inline void
166esp_fill_udp_hdr (ipsec_sa_t * sa, udp_header_t * udp, u16 len)
167{
168 clib_memcpy_fast (udp, &sa->udp_hdr, sizeof (udp_header_t));
169 udp->length = clib_net_to_host_u16 (len);
170}
171
172static_always_inline u8
173ext_hdr_is_pre_esp (u8 nexthdr)
174{
175#ifdef CLIB_HAVE_VEC128
176 static const u8x16 ext_hdr_types = {
177 IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS,
178 IP_PROTOCOL_IPV6_ROUTE,
179 IP_PROTOCOL_IPV6_FRAGMENTATION,
180 };
181
182 return !u8x16_is_all_zero (ext_hdr_types == u8x16_splat (nexthdr));
183#else
Piotr Bronowski3a6bc6f2023-02-23 09:56:49 +0000184 return (!(nexthdr ^ IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS) ||
185 !(nexthdr ^ IP_PROTOCOL_IPV6_ROUTE) ||
186 !(nexthdr ^ IP_PROTOCOL_IPV6_FRAGMENTATION));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100187#endif
188}
189
190static_always_inline u8
Neale Ranns02950402019-12-20 00:54:57 +0000191esp_get_ip6_hdr_len (ip6_header_t * ip6, ip6_ext_header_t ** ext_hdr)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100192{
193 /* this code assumes that HbH, route and frag headers will be before
194 others, if that is not the case, they will end up encrypted */
Damjan Marionc59b9a22019-03-19 15:38:40 +0100195 u8 len = sizeof (ip6_header_t);
196 ip6_ext_header_t *p;
197
198 /* if next packet doesn't have ext header */
199 if (ext_hdr_is_pre_esp (ip6->protocol) == 0)
Neale Ranns02950402019-12-20 00:54:57 +0000200 {
201 *ext_hdr = NULL;
202 return len;
203 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100204
Ole Troan03092c12021-11-23 15:55:39 +0100205 p = ip6_next_header (ip6);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100206 len += ip6_ext_header_len (p);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100207 while (ext_hdr_is_pre_esp (p->next_hdr))
208 {
209 len += ip6_ext_header_len (p);
210 p = ip6_ext_next_header (p);
211 }
212
Neale Ranns02950402019-12-20 00:54:57 +0000213 *ext_hdr = p;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100214 return len;
215}
216
Benoît Ganne02dfd292022-01-18 15:56:41 +0100217/* IPsec IV generation: IVs requirements differ depending of the
218 * encryption mode: IVs must be unpredictable for AES-CBC whereas it can
219 * be predictable but should never be reused with the same key material
220 * for CTR and GCM.
Benoît Ganne5527a782022-01-18 15:56:41 +0100221 * To avoid reusing the same IVs between multiple VPP instances and between
222 * restarts, we use a properly chosen PRNG to generate IVs. To ensure the IV is
223 * unpredictable for CBC, it is then encrypted using the same key as the
224 * message. You can refer to NIST SP800-38a and NIST SP800-38d for more
225 * details. */
Benoît Ganne02dfd292022-01-18 15:56:41 +0100226static_always_inline void *
227esp_generate_iv (ipsec_sa_t *sa, void *payload, int iv_sz)
228{
229 ASSERT (iv_sz >= sizeof (u64));
230 u64 *iv = (u64 *) (payload - iv_sz);
231 clib_memset_u8 (iv, 0, iv_sz);
Benoît Ganne5527a782022-01-18 15:56:41 +0100232 *iv = clib_pcg64i_random_r (&sa->iv_prng);
Benoît Ganne02dfd292022-01-18 15:56:41 +0100233 return iv;
234}
235
Damjan Marionc59b9a22019-03-19 15:38:40 +0100236static_always_inline void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000237esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
238 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000239 u16 * nexts, vnet_crypto_op_chunk_t * chunks,
240 u16 drop_next)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000241{
242 u32 n_fail, n_ops = vec_len (ops);
243 vnet_crypto_op_t *op = ops;
244
245 if (n_ops == 0)
246 return;
247
248 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
249
250 while (n_fail)
251 {
252 ASSERT (op - ops < n_ops);
253
254 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
255 {
256 u32 bi = op->user_data;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100257 esp_encrypt_set_next_index (b[bi], node, vm->thread_index,
258 ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR,
259 bi, nexts, drop_next,
260 vnet_buffer (b[bi])->ipsec.sad_index);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000261 n_fail--;
262 }
263 op++;
264 }
265}
266
267static_always_inline void
Damjan Marionc59b9a22019-03-19 15:38:40 +0100268esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000269 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
270 u16 drop_next)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100271{
272 u32 n_fail, n_ops = vec_len (ops);
273 vnet_crypto_op_t *op = ops;
274
275 if (n_ops == 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700276 return;
277
Damjan Marionc59b9a22019-03-19 15:38:40 +0100278 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700279
Damjan Marionc59b9a22019-03-19 15:38:40 +0100280 while (n_fail)
281 {
282 ASSERT (op - ops < n_ops);
283
284 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
285 {
286 u32 bi = op->user_data;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100287 esp_encrypt_set_next_index (b[bi], node, vm->thread_index,
288 ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR,
289 bi, nexts, drop_next,
290 vnet_buffer (b[bi])->ipsec.sad_index);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100291 n_fail--;
292 }
293 op++;
294 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700295}
296
Fan Zhangf5395782020-04-29 14:00:03 +0100297static_always_inline u32
298esp_encrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
299 ipsec_sa_t * sa0, vlib_buffer_t * b,
300 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
301 u32 start_len, u16 * n_ch)
302{
303 vnet_crypto_op_chunk_t *ch;
304 vlib_buffer_t *cb = b;
305 u32 n_chunks = 1;
306 u32 total_len;
307 vec_add2 (ptd->chunks, ch, 1);
308 total_len = ch->len = start_len;
309 ch->src = ch->dst = start;
310 cb = vlib_get_buffer (vm, cb->next_buffer);
311
312 while (1)
313 {
314 vec_add2 (ptd->chunks, ch, 1);
315 n_chunks += 1;
316 if (lb == cb)
317 total_len += ch->len = cb->current_length - icv_sz;
318 else
319 total_len += ch->len = cb->current_length;
320 ch->src = ch->dst = vlib_buffer_get_current (cb);
321
322 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
323 break;
324
325 cb = vlib_get_buffer (vm, cb->next_buffer);
326 }
327
328 if (n_ch)
329 *n_ch = n_chunks;
330
331 return total_len;
332}
333
334static_always_inline u32
335esp_encrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
336 ipsec_sa_t * sa0, vlib_buffer_t * b,
337 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
338 u32 start_len, u8 * digest, u16 * n_ch)
339{
340 vnet_crypto_op_chunk_t *ch;
341 vlib_buffer_t *cb = b;
342 u32 n_chunks = 1;
343 u32 total_len;
344 vec_add2 (ptd->chunks, ch, 1);
345 total_len = ch->len = start_len;
346 ch->src = start;
347 cb = vlib_get_buffer (vm, cb->next_buffer);
348
349 while (1)
350 {
351 vec_add2 (ptd->chunks, ch, 1);
352 n_chunks += 1;
353 if (lb == cb)
354 {
355 total_len += ch->len = cb->current_length - icv_sz;
356 if (ipsec_sa_is_set_USE_ESN (sa0))
357 {
358 u32 seq_hi = clib_net_to_host_u32 (sa0->seq_hi);
359 clib_memcpy_fast (digest, &seq_hi, sizeof (seq_hi));
360 ch->len += sizeof (seq_hi);
361 total_len += sizeof (seq_hi);
362 }
363 }
364 else
365 total_len += ch->len = cb->current_length;
366 ch->src = vlib_buffer_get_current (cb);
367
368 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
369 break;
370
371 cb = vlib_get_buffer (vm, cb->next_buffer);
372 }
373
374 if (n_ch)
375 *n_ch = n_chunks;
376
377 return total_len;
378}
379
380always_inline void
Benoît Ganne490b9272021-01-22 18:03:09 +0100381esp_prepare_sync_op (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
382 vnet_crypto_op_t **crypto_ops,
Neale Ranns5b891102021-06-28 13:31:28 +0000383 vnet_crypto_op_t **integ_ops, ipsec_sa_t *sa0, u32 seq_hi,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000384 u8 *payload, u16 payload_len, u8 iv_sz, u8 icv_sz, u32 bi,
385 vlib_buffer_t **b, vlib_buffer_t *lb, u32 hdr_len,
386 esp_header_t *esp)
Fan Zhangf5395782020-04-29 14:00:03 +0100387{
388 if (sa0->crypto_enc_op_id)
389 {
390 vnet_crypto_op_t *op;
391 vec_add2_aligned (crypto_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
392 vnet_crypto_op_init (op, sa0->crypto_enc_op_id);
Benoît Ganne02dfd292022-01-18 15:56:41 +0100393 u8 *crypto_start = payload;
394 /* esp_add_footer_and_icv() in esp_encrypt_inline() makes sure we always
395 * have enough space for ESP header and footer which includes ICV */
396 ASSERT (payload_len > icv_sz);
397 u16 crypto_len = payload_len - icv_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100398
Benoît Ganne02dfd292022-01-18 15:56:41 +0100399 /* generate the IV in front of the payload */
400 void *pkt_iv = esp_generate_iv (sa0, payload, iv_sz);
401
Fan Zhangf5395782020-04-29 14:00:03 +0100402 op->key_index = sa0->crypto_key_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000403 op->user_data = bi;
Fan Zhangf5395782020-04-29 14:00:03 +0100404
Benoît Ganne490b9272021-01-22 18:03:09 +0100405 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100406 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100407 /* construct nonce in a scratch space in front of the IP header */
408 esp_ctr_nonce_t *nonce =
Benoît Ganne02dfd292022-01-18 15:56:41 +0100409 (esp_ctr_nonce_t *) (pkt_iv - hdr_len - sizeof (*nonce));
Benoît Ganne490b9272021-01-22 18:03:09 +0100410 if (ipsec_sa_is_set_IS_AEAD (sa0))
411 {
412 /* constuct aad in a scratch space in front of the nonce */
413 op->aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000414 op->aad_len = esp_aad_fill (op->aad, esp, sa0, seq_hi);
Benoît Ganne02dfd292022-01-18 15:56:41 +0100415 op->tag = payload + crypto_len;
Benoît Ganne490b9272021-01-22 18:03:09 +0100416 op->tag_len = 16;
Benoît Ganne84e66582023-03-10 17:33:03 +0100417 if (PREDICT_FALSE (ipsec_sa_is_set_IS_NULL_GMAC (sa0)))
418 {
419 /* RFC-4543 ENCR_NULL_AUTH_AES_GMAC: IV is part of AAD */
420 crypto_start -= iv_sz;
421 crypto_len += iv_sz;
422 }
Benoît Ganne490b9272021-01-22 18:03:09 +0100423 }
424 else
425 {
426 nonce->ctr = clib_host_to_net_u32 (1);
427 }
Fan Zhangf5395782020-04-29 14:00:03 +0100428
Fan Zhangf5395782020-04-29 14:00:03 +0100429 nonce->salt = sa0->salt;
Benoît Ganne02dfd292022-01-18 15:56:41 +0100430 nonce->iv = *(u64 *) pkt_iv;
Fan Zhangf5395782020-04-29 14:00:03 +0100431 op->iv = (u8 *) nonce;
432 }
433 else
434 {
Benoît Ganne02dfd292022-01-18 15:56:41 +0100435 /* construct zero iv in front of the IP header */
436 op->iv = pkt_iv - hdr_len - iv_sz;
437 clib_memset_u8 (op->iv, 0, iv_sz);
438 /* include iv field in crypto */
439 crypto_start -= iv_sz;
440 crypto_len += iv_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100441 }
442
Benoît Ganne5527a782022-01-18 15:56:41 +0100443 if (PREDICT_FALSE (lb != b[0]))
Fan Zhangf5395782020-04-29 14:00:03 +0100444 {
445 /* is chained */
446 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
447 op->chunk_index = vec_len (ptd->chunks);
448 op->tag = vlib_buffer_get_tail (lb) - icv_sz;
Benoît Ganne02dfd292022-01-18 15:56:41 +0100449 esp_encrypt_chain_crypto (vm, ptd, sa0, b[0], lb, icv_sz,
450 crypto_start, crypto_len + icv_sz,
451 &op->n_chunks);
452 }
453 else
454 {
455 /* not chained */
456 op->src = op->dst = crypto_start;
457 op->len = crypto_len;
Fan Zhangf5395782020-04-29 14:00:03 +0100458 }
459 }
460
461 if (sa0->integ_op_id)
462 {
463 vnet_crypto_op_t *op;
464 vec_add2_aligned (integ_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
465 vnet_crypto_op_init (op, sa0->integ_op_id);
466 op->src = payload - iv_sz - sizeof (esp_header_t);
467 op->digest = payload + payload_len - icv_sz;
468 op->key_index = sa0->integ_key_index;
469 op->digest_len = icv_sz;
470 op->len = payload_len - icv_sz + iv_sz + sizeof (esp_header_t);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000471 op->user_data = bi;
Fan Zhangf5395782020-04-29 14:00:03 +0100472
473 if (lb != b[0])
474 {
475 /* is chained */
476 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
477 op->chunk_index = vec_len (ptd->chunks);
478 op->digest = vlib_buffer_get_tail (lb) - icv_sz;
479
480 esp_encrypt_chain_integ (vm, ptd, sa0, b[0], lb, icv_sz,
481 payload - iv_sz - sizeof (esp_header_t),
482 payload_len + iv_sz +
483 sizeof (esp_header_t), op->digest,
484 &op->n_chunks);
485 }
486 else if (ipsec_sa_is_set_USE_ESN (sa0))
487 {
Neale Ranns5b891102021-06-28 13:31:28 +0000488 u32 tmp = clib_net_to_host_u32 (seq_hi);
489 clib_memcpy_fast (op->digest, &tmp, sizeof (seq_hi));
Fan Zhangf5395782020-04-29 14:00:03 +0100490 op->len += sizeof (seq_hi);
491 }
492 }
493}
494
Neale Rannsfc811342021-02-26 10:35:33 +0000495static_always_inline void
496esp_prepare_async_frame (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
497 vnet_crypto_async_frame_t *async_frame,
498 ipsec_sa_t *sa, vlib_buffer_t *b, esp_header_t *esp,
499 u8 *payload, u32 payload_len, u8 iv_sz, u8 icv_sz,
500 u32 bi, u16 next, u32 hdr_len, u16 async_next,
501 vlib_buffer_t *lb)
Fan Zhangf5395782020-04-29 14:00:03 +0100502{
503 esp_post_data_t *post = esp_post_data (b);
504 u8 *tag, *iv, *aad = 0;
505 u8 flag = 0;
Benoît Ganne5527a782022-01-18 15:56:41 +0100506 const u32 key_index = sa->crypto_key_index;
Benoît Ganne02dfd292022-01-18 15:56:41 +0100507 i16 crypto_start_offset, integ_start_offset;
Fan Zhangf5395782020-04-29 14:00:03 +0100508 u16 crypto_total_len, integ_total_len;
509
Fan Zhang18f0e312020-10-19 13:08:34 +0100510 post->next_index = next;
Fan Zhangf5395782020-04-29 14:00:03 +0100511
512 /* crypto */
Benoît Ganne02dfd292022-01-18 15:56:41 +0100513 crypto_start_offset = integ_start_offset = payload - b->data;
Fan Zhangf5395782020-04-29 14:00:03 +0100514 crypto_total_len = integ_total_len = payload_len - icv_sz;
515 tag = payload + crypto_total_len;
516
Benoît Ganne02dfd292022-01-18 15:56:41 +0100517 /* generate the IV in front of the payload */
518 void *pkt_iv = esp_generate_iv (sa, payload, iv_sz);
519
Benoît Ganne490b9272021-01-22 18:03:09 +0100520 if (ipsec_sa_is_set_IS_CTR (sa))
Fan Zhangf5395782020-04-29 14:00:03 +0100521 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100522 /* construct nonce in a scratch space in front of the IP header */
Benoît Ganne02dfd292022-01-18 15:56:41 +0100523 esp_ctr_nonce_t *nonce =
524 (esp_ctr_nonce_t *) (pkt_iv - hdr_len - sizeof (*nonce));
Benoît Ganne490b9272021-01-22 18:03:09 +0100525 if (ipsec_sa_is_set_IS_AEAD (sa))
526 {
527 /* constuct aad in a scratch space in front of the nonce */
528 aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000529 esp_aad_fill (aad, esp, sa, sa->seq_hi);
Benoît Ganne84e66582023-03-10 17:33:03 +0100530 if (PREDICT_FALSE (ipsec_sa_is_set_IS_NULL_GMAC (sa)))
531 {
532 /* RFC-4543 ENCR_NULL_AUTH_AES_GMAC: IV is part of AAD */
533 crypto_start_offset -= iv_sz;
534 crypto_total_len += iv_sz;
535 }
Benoît Ganne490b9272021-01-22 18:03:09 +0100536 }
537 else
538 {
539 nonce->ctr = clib_host_to_net_u32 (1);
540 }
541
542 nonce->salt = sa->salt;
Benoît Ganne02dfd292022-01-18 15:56:41 +0100543 nonce->iv = *(u64 *) pkt_iv;
Benoît Ganne490b9272021-01-22 18:03:09 +0100544 iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100545 }
Benoît Ganne490b9272021-01-22 18:03:09 +0100546 else
Fan Zhangf5395782020-04-29 14:00:03 +0100547 {
Benoît Ganne02dfd292022-01-18 15:56:41 +0100548 /* construct zero iv in front of the IP header */
549 iv = pkt_iv - hdr_len - iv_sz;
550 clib_memset_u8 (iv, 0, iv_sz);
551 /* include iv field in crypto */
552 crypto_start_offset -= iv_sz;
553 crypto_total_len += iv_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100554 }
555
Benoît Ganne490b9272021-01-22 18:03:09 +0100556 if (lb != b)
557 {
558 /* chain */
559 flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
560 tag = vlib_buffer_get_tail (lb) - icv_sz;
Benoît Ganne02dfd292022-01-18 15:56:41 +0100561 crypto_total_len = esp_encrypt_chain_crypto (
562 vm, ptd, sa, b, lb, icv_sz, b->data + crypto_start_offset,
563 crypto_total_len + icv_sz, 0);
Benoît Ganne490b9272021-01-22 18:03:09 +0100564 }
565
566 if (sa->integ_op_id)
567 {
Benoît Ganne02dfd292022-01-18 15:56:41 +0100568 integ_start_offset -= iv_sz + sizeof (esp_header_t);
Benoît Ganne490b9272021-01-22 18:03:09 +0100569 integ_total_len += iv_sz + sizeof (esp_header_t);
570
571 if (b != lb)
572 {
573 integ_total_len = esp_encrypt_chain_integ (
574 vm, ptd, sa, b, lb, icv_sz,
575 payload - iv_sz - sizeof (esp_header_t),
576 payload_len + iv_sz + sizeof (esp_header_t), tag, 0);
577 }
578 else if (ipsec_sa_is_set_USE_ESN (sa))
579 {
580 u32 seq_hi = clib_net_to_host_u32 (sa->seq_hi);
581 clib_memcpy_fast (tag, &seq_hi, sizeof (seq_hi));
582 integ_total_len += sizeof (seq_hi);
583 }
584 }
585
Neale Rannsfc811342021-02-26 10:35:33 +0000586 /* this always succeeds because we know the frame is not full */
587 vnet_crypto_async_add_to_frame (vm, async_frame, key_index, crypto_total_len,
588 integ_total_len - crypto_total_len,
589 crypto_start_offset, integ_start_offset, bi,
590 async_next, iv, tag, aad, flag);
Fan Zhangf5395782020-04-29 14:00:03 +0100591}
592
Klement Sekerabe5a5dd2018-10-09 16:05:48 +0200593always_inline uword
Neale Ranns4a58e492020-12-21 13:19:10 +0000594esp_encrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
595 vlib_frame_t *frame, vnet_link_t lt, int is_tun,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000596 u16 async_next_node)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700597{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700598 ipsec_main_t *im = &ipsec_main;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100599 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, vm->thread_index);
600 u32 *from = vlib_frame_vector_args (frame);
601 u32 n_left = frame->n_vectors;
602 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100603 u32 thread_index = vm->thread_index;
604 u16 buffer_data_size = vlib_buffer_get_default_data_size (vm);
605 u32 current_sa_index = ~0, current_sa_packets = 0;
606 u32 current_sa_bytes = 0, spi = 0;
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500607 u8 esp_align = 4, iv_sz = 0, icv_sz = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100608 ipsec_sa_t *sa0 = 0;
Matthew Smithff719392024-02-12 18:39:21 +0000609 u8 sa_drop_no_crypto = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000610 vlib_buffer_t *lb;
611 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
612 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Neale Rannsfc811342021-02-26 10:35:33 +0000613 vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
Fan Zhangf5395782020-04-29 14:00:03 +0100614 int is_async = im->async_mode;
Neale Rannsfc811342021-02-26 10:35:33 +0000615 vnet_crypto_async_op_id_t async_op = ~0;
Neale Ranns4a58e492020-12-21 13:19:10 +0000616 u16 drop_next =
617 (lt == VNET_LINK_IP6 ? ESP_ENCRYPT_NEXT_DROP6 :
618 (lt == VNET_LINK_IP4 ? ESP_ENCRYPT_NEXT_DROP4 :
619 ESP_ENCRYPT_NEXT_DROP_MPLS));
620 u16 handoff_next = (lt == VNET_LINK_IP6 ?
621 ESP_ENCRYPT_NEXT_HANDOFF6 :
622 (lt == VNET_LINK_IP4 ? ESP_ENCRYPT_NEXT_HANDOFF4 :
623 ESP_ENCRYPT_NEXT_HANDOFF_MPLS));
Neale Rannsf16e9a52021-02-25 19:09:24 +0000624 vlib_buffer_t *sync_bufs[VLIB_FRAME_SIZE];
625 u16 sync_nexts[VLIB_FRAME_SIZE], *sync_next = sync_nexts, n_sync = 0;
Damjan Mariondd298e82022-10-12 16:02:18 +0200626 u16 n_async = 0;
627 u16 noop_nexts[VLIB_FRAME_SIZE], n_noop = 0;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000628 u32 sync_bi[VLIB_FRAME_SIZE];
629 u32 noop_bi[VLIB_FRAME_SIZE];
630 esp_encrypt_error_t err;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700631
Damjan Marionc59b9a22019-03-19 15:38:40 +0100632 vlib_get_buffers (vm, from, b, n_left);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000633
634 vec_reset_length (ptd->crypto_ops);
635 vec_reset_length (ptd->integ_ops);
636 vec_reset_length (ptd->chained_crypto_ops);
637 vec_reset_length (ptd->chained_integ_ops);
Neale Rannsfc811342021-02-26 10:35:33 +0000638 vec_reset_length (ptd->async_frames);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000639 vec_reset_length (ptd->chunks);
Neale Rannsfc811342021-02-26 10:35:33 +0000640 clib_memset (async_frames, 0, sizeof (async_frames));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100641
642 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700643 {
Zhiyong Yang1cff6432019-04-30 05:33:53 -0400644 u32 sa_index0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100645 dpo_id_t *dpo;
646 esp_header_t *esp;
647 u8 *payload, *next_hdr_ptr;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000648 u16 payload_len, payload_len_total, n_bufs;
Neale Ranns4ec36c52020-03-31 09:21:29 -0400649 u32 hdr_len;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700650
Neale Rannsf16e9a52021-02-25 19:09:24 +0000651 err = ESP_ENCRYPT_ERROR_RX_PKTS;
652
Damjan Marionc59b9a22019-03-19 15:38:40 +0100653 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700654 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100655 u8 *p;
656 vlib_prefetch_buffer_header (b[2], LOAD);
657 p = vlib_buffer_get_current (b[1]);
Damjan Marionaf7fb042021-07-15 11:54:41 +0200658 clib_prefetch_load (p);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100659 p -= CLIB_CACHE_LINE_BYTES;
Damjan Marionaf7fb042021-07-15 11:54:41 +0200660 clib_prefetch_load (p);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000661 /* speculate that the trailer goes in the first buffer */
662 CLIB_PREFETCH (vlib_buffer_get_tail (b[1]),
663 CLIB_CACHE_LINE_BYTES, LOAD);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700664 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700665
Neale Ranns25edf142019-03-22 08:12:48 +0000666 if (is_tun)
667 {
668 /* we are on a ipsec tunnel's feature arc */
Neale Ranns28287212019-12-16 00:53:11 +0000669 vnet_buffer (b[0])->ipsec.sad_index =
670 sa_index0 = ipsec_tun_protect_get_sa_out
671 (vnet_buffer (b[0])->ip.adj_index[VLIB_TX]);
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000672
673 if (PREDICT_FALSE (INDEX_INVALID == sa_index0))
674 {
675 err = ESP_ENCRYPT_ERROR_NO_PROTECTION;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100676 noop_nexts[n_noop] = drop_next;
677 b[0]->error = node->errors[err];
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000678 goto trace;
679 }
Neale Ranns25edf142019-03-22 08:12:48 +0000680 }
681 else
682 sa_index0 = vnet_buffer (b[0])->ipsec.sad_index;
683
684 if (sa_index0 != current_sa_index)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100685 {
Damjan Marion21f265f2019-06-05 15:45:50 +0200686 if (current_sa_packets)
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100687 vlib_increment_combined_counter (
688 &ipsec_sa_counters, thread_index, current_sa_index,
689 current_sa_packets, current_sa_bytes);
Damjan Marion21f265f2019-06-05 15:45:50 +0200690 current_sa_packets = current_sa_bytes = 0;
691
Neale Rannsc5fe57d2021-02-25 16:01:28 +0000692 sa0 = ipsec_sa_get (sa_index0);
Matthew Smithdac9e562023-11-16 02:27:29 +0000693 current_sa_index = sa_index0;
Neale Ranns123b5eb2020-10-16 14:03:55 +0000694
Matthew Smithff719392024-02-12 18:39:21 +0000695 sa_drop_no_crypto = ((sa0->crypto_alg == IPSEC_CRYPTO_ALG_NONE &&
696 sa0->integ_alg == IPSEC_INTEG_ALG_NONE) &&
697 !ipsec_sa_is_set_NO_ALGO_NO_DROP (sa0));
698
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100699 vlib_prefetch_combined_counter (&ipsec_sa_counters, thread_index,
700 current_sa_index);
701
Neale Ranns123b5eb2020-10-16 14:03:55 +0000702 /* fetch the second cacheline ASAP */
Damjan Marionaf7fb042021-07-15 11:54:41 +0200703 clib_prefetch_load (sa0->cacheline1);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000704
Damjan Marionc59b9a22019-03-19 15:38:40 +0100705 spi = clib_net_to_host_u32 (sa0->spi);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500706 esp_align = sa0->esp_block_align;
Damjan Marion7c22ff72019-04-04 12:25:44 +0200707 icv_sz = sa0->integ_icv_size;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100708 iv_sz = sa0->crypto_iv_size;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000709 is_async = im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
Neale Rannsfc811342021-02-26 10:35:33 +0000710 }
Fan Zhangf5395782020-04-29 14:00:03 +0100711
Matthew Smithff719392024-02-12 18:39:21 +0000712 if (PREDICT_FALSE (sa_drop_no_crypto != 0))
713 {
714 err = ESP_ENCRYPT_ERROR_NO_ENCRYPTION;
715 esp_encrypt_set_next_index (b[0], node, thread_index, err, n_noop,
716 noop_nexts, drop_next, sa_index0);
717 goto trace;
718 }
719
Benoît Ganne5527a782022-01-18 15:56:41 +0100720 if (PREDICT_FALSE ((u16) ~0 == sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +0000721 {
722 /* this is the first packet to use this SA, claim the SA
723 * for this thread. this could happen simultaneously on
724 * another thread */
Neale Ranns1a52d372021-02-04 11:33:32 +0000725 clib_atomic_cmp_and_swap (&sa0->thread_index, ~0,
Neale Rannsf62a8c02019-04-02 08:13:33 +0000726 ipsec_sa_assign_thread (thread_index));
727 }
728
Neale Ranns1a52d372021-02-04 11:33:32 +0000729 if (PREDICT_FALSE (thread_index != sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +0000730 {
Neale Rannsaa7d7662021-02-10 08:42:49 +0000731 vnet_buffer (b[0])->ipsec.thread_index = sa0->thread_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000732 err = ESP_ENCRYPT_ERROR_HANDOFF;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100733 esp_encrypt_set_next_index (b[0], node, thread_index, err, n_noop,
734 noop_nexts, handoff_next,
735 current_sa_index);
Neale Rannsf62a8c02019-04-02 08:13:33 +0000736 goto trace;
737 }
738
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000739 lb = b[0];
740 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
741 if (n_bufs == 0)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100742 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000743 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100744 esp_encrypt_set_next_index (b[0], node, thread_index, err, n_noop,
745 noop_nexts, drop_next, current_sa_index);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100746 goto trace;
747 }
748
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000749 if (n_bufs > 1)
750 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000751 /* find last buffer in the chain */
752 while (lb->flags & VLIB_BUFFER_NEXT_PRESENT)
753 lb = vlib_get_buffer (vm, lb->next_buffer);
754 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000755
Damjan Marionc59b9a22019-03-19 15:38:40 +0100756 if (PREDICT_FALSE (esp_seq_advance (sa0)))
757 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000758 err = ESP_ENCRYPT_ERROR_SEQ_CYCLED;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100759 esp_encrypt_set_next_index (b[0], node, thread_index, err, n_noop,
760 noop_nexts, drop_next, current_sa_index);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100761 goto trace;
762 }
763
764 /* space for IV */
765 hdr_len = iv_sz;
766
Damjan Mariond709cbc2019-03-26 13:16:42 +0100767 if (ipsec_sa_is_set_IS_TUNNEL (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100768 {
769 payload = vlib_buffer_get_current (b[0]);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000770 next_hdr_ptr = esp_add_footer_and_icv (
Fan Zhangb8cb2232024-03-12 20:39:12 +0000771 vm, &lb, esp_align, icv_sz, buffer_data_size,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000772 vlib_buffer_length_in_chain (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000773 if (!next_hdr_ptr)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000774 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000775 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100776 esp_encrypt_set_next_index (b[0], node, thread_index, err,
777 n_noop, noop_nexts, drop_next,
778 current_sa_index);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000779 goto trace;
780 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000781 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000782 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000783 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100784
785 /* ESP header */
786 hdr_len += sizeof (*esp);
787 esp = (esp_header_t *) (payload - hdr_len);
788
789 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100790 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100791 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100792 hdr_len += sizeof (udp_header_t);
793 esp_fill_udp_hdr (sa0, (udp_header_t *) (payload - hdr_len),
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000794 payload_len_total + hdr_len);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100795 }
796
797 /* IP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100798 if (ipsec_sa_is_set_IS_TUNNEL_V6 (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100799 {
800 ip6_header_t *ip6;
801 u16 len = sizeof (ip6_header_t);
802 hdr_len += len;
803 ip6 = (ip6_header_t *) (payload - hdr_len);
Neale Ranns041add72020-01-02 04:06:10 +0000804 clib_memcpy_fast (ip6, &sa0->ip6_hdr, sizeof (ip6_header_t));
805
Neale Ranns4a58e492020-12-21 13:19:10 +0000806 if (VNET_LINK_IP6 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000807 {
808 *next_hdr_ptr = IP_PROTOCOL_IPV6;
809 tunnel_encap_fixup_6o6 (sa0->tunnel_flags,
810 (const ip6_header_t *) payload,
811 ip6);
812 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000813 else if (VNET_LINK_IP4 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000814 {
815 *next_hdr_ptr = IP_PROTOCOL_IP_IN_IP;
Neale Rannsa91cb452021-02-04 11:02:52 +0000816 tunnel_encap_fixup_4o6 (sa0->tunnel_flags, b[0],
817 (const ip4_header_t *) payload, ip6);
Neale Ranns041add72020-01-02 04:06:10 +0000818 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000819 else if (VNET_LINK_MPLS == lt)
820 {
821 *next_hdr_ptr = IP_PROTOCOL_MPLS_IN_IP;
822 tunnel_encap_fixup_mplso6 (
Neale Rannsa91cb452021-02-04 11:02:52 +0000823 sa0->tunnel_flags, b[0],
824 (const mpls_unicast_header_t *) payload, ip6);
Neale Ranns4a58e492020-12-21 13:19:10 +0000825 }
826 else
827 ASSERT (0);
828
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000829 len = payload_len_total + hdr_len - len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100830 ip6->payload_length = clib_net_to_host_u16 (len);
Neale Ranns45d6d832021-01-19 13:38:47 +0000831 b[0]->flags |= VNET_BUFFER_F_LOCALLY_ORIGINATED;
Damjan Marionc98275f2019-03-06 14:05:01 +0100832 }
833 else
834 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100835 ip4_header_t *ip4;
836 u16 len = sizeof (ip4_header_t);
837 hdr_len += len;
838 ip4 = (ip4_header_t *) (payload - hdr_len);
Neale Ranns041add72020-01-02 04:06:10 +0000839 clib_memcpy_fast (ip4, &sa0->ip4_hdr, sizeof (ip4_header_t));
840
Neale Ranns4a58e492020-12-21 13:19:10 +0000841 if (VNET_LINK_IP6 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000842 {
843 *next_hdr_ptr = IP_PROTOCOL_IPV6;
844 tunnel_encap_fixup_6o4_w_chksum (sa0->tunnel_flags,
845 (const ip6_header_t *)
846 payload, ip4);
847 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000848 else if (VNET_LINK_IP4 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000849 {
850 *next_hdr_ptr = IP_PROTOCOL_IP_IN_IP;
851 tunnel_encap_fixup_4o4_w_chksum (sa0->tunnel_flags,
852 (const ip4_header_t *)
853 payload, ip4);
854 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000855 else if (VNET_LINK_MPLS == lt)
856 {
857 *next_hdr_ptr = IP_PROTOCOL_MPLS_IN_IP;
858 tunnel_encap_fixup_mplso4_w_chksum (
859 sa0->tunnel_flags, (const mpls_unicast_header_t *) payload,
860 ip4);
861 }
862 else
863 ASSERT (0);
864
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000865 len = payload_len_total + hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100866 esp_update_ip4_hdr (ip4, len, /* is_transport */ 0, 0);
Damjan Marionc98275f2019-03-06 14:05:01 +0100867 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100868
Neale Ranns72f2a3a2019-06-17 15:43:38 +0000869 dpo = &sa0->dpo;
Neale Ranns25edf142019-03-22 08:12:48 +0000870 if (!is_tun)
871 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000872 sync_next[0] = dpo->dpoi_next_node;
Neale Ranns25edf142019-03-22 08:12:48 +0000873 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] = dpo->dpoi_index;
874 }
Neale Ranns4ec36c52020-03-31 09:21:29 -0400875 else
Neale Rannsf16e9a52021-02-25 19:09:24 +0000876 sync_next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Neale Ranns9ec846c2021-02-09 14:04:02 +0000877 b[0]->flags |= VNET_BUFFER_F_LOCALLY_ORIGINATED;
Damjan Marionc98275f2019-03-06 14:05:01 +0100878 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100879 else /* transport mode */
Damjan Marionc98275f2019-03-06 14:05:01 +0100880 {
Ole Troan03092c12021-11-23 15:55:39 +0100881 u8 *l2_hdr, l2_len, *ip_hdr;
882 u16 ip_len;
Neale Ranns02950402019-12-20 00:54:57 +0000883 ip6_ext_header_t *ext_hdr;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100884 udp_header_t *udp = 0;
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400885 u16 udp_len = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100886 u8 *old_ip_hdr = vlib_buffer_get_current (b[0]);
Damjan Marionc98275f2019-03-06 14:05:01 +0100887
Ole Troan03092c12021-11-23 15:55:39 +0100888 /*
889 * Get extension header chain length. It might be longer than the
890 * buffer's pre_data area.
891 */
Neale Ranns4a58e492020-12-21 13:19:10 +0000892 ip_len =
893 (VNET_LINK_IP6 == lt ?
894 esp_get_ip6_hdr_len ((ip6_header_t *) old_ip_hdr, &ext_hdr) :
895 ip4_header_bytes ((ip4_header_t *) old_ip_hdr));
Ole Troan03092c12021-11-23 15:55:39 +0100896 if ((old_ip_hdr - ip_len) < &b[0]->pre_data[0])
897 {
898 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100899 esp_encrypt_set_next_index (b[0], node, thread_index, err,
900 n_noop, noop_nexts, drop_next,
901 current_sa_index);
Ole Troan03092c12021-11-23 15:55:39 +0100902 goto trace;
903 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100904
Damjan Marionc59b9a22019-03-19 15:38:40 +0100905 vlib_buffer_advance (b[0], ip_len);
906 payload = vlib_buffer_get_current (b[0]);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000907 next_hdr_ptr = esp_add_footer_and_icv (
Fan Zhangb8cb2232024-03-12 20:39:12 +0000908 vm, &lb, esp_align, icv_sz, buffer_data_size,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000909 vlib_buffer_length_in_chain (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000910 if (!next_hdr_ptr)
Fan Zhang18f0e312020-10-19 13:08:34 +0100911 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000912 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
Arthur de Kerhorad95b062022-11-16 19:12:05 +0100913 esp_encrypt_set_next_index (b[0], node, thread_index, err,
914 n_noop, noop_nexts, drop_next,
915 current_sa_index);
Fan Zhang18f0e312020-10-19 13:08:34 +0100916 goto trace;
917 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000918
919 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000920 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000921 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100922
923 /* ESP header */
924 hdr_len += sizeof (*esp);
925 esp = (esp_header_t *) (payload - hdr_len);
926
927 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100928 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100929 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100930 hdr_len += sizeof (udp_header_t);
931 udp = (udp_header_t *) (payload - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100932 }
933
Damjan Marionc59b9a22019-03-19 15:38:40 +0100934 /* IP header */
935 hdr_len += ip_len;
936 ip_hdr = payload - hdr_len;
937
938 /* L2 header */
Neale Rannsc87b66c2019-02-07 07:26:12 -0800939 if (!is_tun)
940 {
941 l2_len = vnet_buffer (b[0])->ip.save_rewrite_length;
942 hdr_len += l2_len;
943 l2_hdr = payload - hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100944
Neale Rannsc87b66c2019-02-07 07:26:12 -0800945 /* copy l2 and ip header */
946 clib_memcpy_le32 (l2_hdr, old_ip_hdr - l2_len, l2_len);
947 }
948 else
949 l2_len = 0;
950
Matthew Smith6f1eb482022-08-09 22:19:38 +0000951 u16 len;
952 len = payload_len_total + hdr_len - l2_len;
953
Neale Ranns4a58e492020-12-21 13:19:10 +0000954 if (VNET_LINK_IP6 == lt)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100955 {
Neale Ranns02950402019-12-20 00:54:57 +0000956 ip6_header_t *ip6 = (ip6_header_t *) (old_ip_hdr);
957 if (PREDICT_TRUE (NULL == ext_hdr))
958 {
959 *next_hdr_ptr = ip6->protocol;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000960 ip6->protocol =
961 (udp) ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Neale Ranns02950402019-12-20 00:54:57 +0000962 }
963 else
964 {
965 *next_hdr_ptr = ext_hdr->next_hdr;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000966 ext_hdr->next_hdr =
967 (udp) ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Neale Ranns02950402019-12-20 00:54:57 +0000968 }
Neale Rannsd207fd72019-04-18 17:18:12 -0700969 ip6->payload_length =
Matthew Smith6f1eb482022-08-09 22:19:38 +0000970 clib_host_to_net_u16 (len - sizeof (ip6_header_t));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100971 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000972 else if (VNET_LINK_IP4 == lt)
Damjan Marionc98275f2019-03-06 14:05:01 +0100973 {
Neale Ranns02950402019-12-20 00:54:57 +0000974 ip4_header_t *ip4 = (ip4_header_t *) (old_ip_hdr);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100975 *next_hdr_ptr = ip4->protocol;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000976 esp_update_ip4_hdr (ip4, len, /* is_transport */ 1,
977 (udp != NULL));
Damjan Marionc98275f2019-03-06 14:05:01 +0100978 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100979
Neale Ranns02950402019-12-20 00:54:57 +0000980 clib_memcpy_le64 (ip_hdr, old_ip_hdr, ip_len);
981
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400982 if (udp)
983 {
Matthew Smith6f1eb482022-08-09 22:19:38 +0000984 udp_len = len - ip_len;
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400985 esp_fill_udp_hdr (sa0, udp, udp_len);
986 }
987
Neale Rannsf16e9a52021-02-25 19:09:24 +0000988 sync_next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Damjan Marionc98275f2019-03-06 14:05:01 +0100989 }
990
PiotrX Kleskifdca4dd2020-05-05 14:14:22 +0200991 if (lb != b[0])
992 {
993 crypto_ops = &ptd->chained_crypto_ops;
994 integ_ops = &ptd->chained_integ_ops;
995 }
996 else
997 {
998 crypto_ops = &ptd->crypto_ops;
999 integ_ops = &ptd->integ_ops;
1000 }
1001
Damjan Marionc59b9a22019-03-19 15:38:40 +01001002 esp->spi = spi;
1003 esp->seq = clib_net_to_host_u32 (sa0->seq);
Damjan Marionc98275f2019-03-06 14:05:01 +01001004
Fan Zhangf5395782020-04-29 14:00:03 +01001005 if (is_async)
Matthew Smith51d56ba2021-06-04 09:18:37 -05001006 {
1007 async_op = sa0->crypto_async_enc_op_id;
1008
1009 /* get a frame for this op if we don't yet have one or it's full
1010 */
1011 if (NULL == async_frames[async_op] ||
1012 vnet_crypto_async_frame_is_full (async_frames[async_op]))
1013 {
1014 async_frames[async_op] =
1015 vnet_crypto_async_get_frame (vm, async_op);
gaoginskxf441b5d2021-06-07 12:07:01 +01001016
1017 if (PREDICT_FALSE (!async_frames[async_op]))
1018 {
1019 err = ESP_ENCRYPT_ERROR_NO_AVAIL_FRAME;
1020 esp_encrypt_set_next_index (b[0], node, thread_index, err,
1021 n_noop, noop_nexts, drop_next,
1022 current_sa_index);
1023 goto trace;
1024 }
1025
Matthew Smith51d56ba2021-06-04 09:18:37 -05001026 /* Save the frame to the list we'll submit at the end */
1027 vec_add1 (ptd->async_frames, async_frames[async_op]);
1028 }
1029
1030 esp_prepare_async_frame (vm, ptd, async_frames[async_op], sa0, b[0],
1031 esp, payload, payload_len, iv_sz, icv_sz,
1032 from[b - bufs], sync_next[0], hdr_len,
1033 async_next_node, lb);
1034 }
Fan Zhangf5395782020-04-29 14:00:03 +01001035 else
Neale Ranns5b891102021-06-28 13:31:28 +00001036 esp_prepare_sync_op (vm, ptd, crypto_ops, integ_ops, sa0, sa0->seq_hi,
1037 payload, payload_len, iv_sz, icv_sz, n_sync, b,
1038 lb, hdr_len, esp);
Damjan Marionc98275f2019-03-06 14:05:01 +01001039
Damjan Marionc59b9a22019-03-19 15:38:40 +01001040 vlib_buffer_advance (b[0], 0LL - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +01001041
Damjan Marionc59b9a22019-03-19 15:38:40 +01001042 current_sa_packets += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001043 current_sa_bytes += payload_len_total;
Damjan Marionc59b9a22019-03-19 15:38:40 +01001044
1045 trace:
1046 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
Damjan Marionc98275f2019-03-06 14:05:01 +01001047 {
Damjan Marionc59b9a22019-03-19 15:38:40 +01001048 esp_encrypt_trace_t *tr = vlib_add_trace (vm, node, b[0],
1049 sizeof (*tr));
Neale Ranns6fdcc3d2021-10-08 07:30:47 +00001050 if (INDEX_INVALID == sa_index0)
1051 clib_memset_u8 (tr, 0xff, sizeof (*tr));
1052 else
1053 {
1054 tr->sa_index = sa_index0;
1055 tr->spi = sa0->spi;
Neale Ranns6fdcc3d2021-10-08 07:30:47 +00001056 tr->seq = sa0->seq;
1057 tr->sa_seq_hi = sa0->seq_hi;
1058 tr->udp_encap = ipsec_sa_is_set_UDP_ENCAP (sa0);
1059 tr->crypto_alg = sa0->crypto_alg;
1060 tr->integ_alg = sa0->integ_alg;
1061 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001062 }
Neale Rannsf16e9a52021-02-25 19:09:24 +00001063
Damjan Marionc98275f2019-03-06 14:05:01 +01001064 /* next */
Neale Rannsf16e9a52021-02-25 19:09:24 +00001065 if (ESP_ENCRYPT_ERROR_RX_PKTS != err)
1066 {
1067 noop_bi[n_noop] = from[b - bufs];
1068 n_noop++;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001069 }
1070 else if (!is_async)
1071 {
1072 sync_bi[n_sync] = from[b - bufs];
1073 sync_bufs[n_sync] = b[0];
1074 n_sync++;
1075 sync_next++;
1076 }
1077 else
1078 {
1079 n_async++;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001080 }
Damjan Marionc59b9a22019-03-19 15:38:40 +01001081 n_left -= 1;
Damjan Marionc59b9a22019-03-19 15:38:40 +01001082 b += 1;
Damjan Marionc98275f2019-03-06 14:05:01 +01001083 }
1084
Neale Ranns6fdcc3d2021-10-08 07:30:47 +00001085 if (INDEX_INVALID != current_sa_index)
1086 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1087 current_sa_index, current_sa_packets,
1088 current_sa_bytes);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001089 if (n_sync)
Fan Zhangf5395782020-04-29 14:00:03 +01001090 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001091 esp_process_ops (vm, node, ptd->crypto_ops, sync_bufs, sync_nexts,
1092 drop_next);
1093 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, sync_bufs,
1094 sync_nexts, ptd->chunks, drop_next);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001095
Neale Rannsf16e9a52021-02-25 19:09:24 +00001096 esp_process_ops (vm, node, ptd->integ_ops, sync_bufs, sync_nexts,
1097 drop_next);
1098 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, sync_bufs,
1099 sync_nexts, ptd->chunks, drop_next);
Neale Rannsfc811342021-02-26 10:35:33 +00001100
Neale Rannsf16e9a52021-02-25 19:09:24 +00001101 vlib_buffer_enqueue_to_next (vm, node, sync_bi, sync_nexts, n_sync);
Fan Zhangf5395782020-04-29 14:00:03 +01001102 }
Neale Rannsf16e9a52021-02-25 19:09:24 +00001103 if (n_async)
Fan Zhangf5395782020-04-29 14:00:03 +01001104 {
Neale Rannsfc811342021-02-26 10:35:33 +00001105 /* submit all of the open frames */
1106 vnet_crypto_async_frame_t **async_frame;
1107
1108 vec_foreach (async_frame, ptd->async_frames)
Fan Zhang18f0e312020-10-19 13:08:34 +01001109 {
Neale Rannsfc811342021-02-26 10:35:33 +00001110 if (vnet_crypto_async_submit_open_frame (vm, *async_frame) < 0)
1111 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001112 n_noop += esp_async_recycle_failed_submit (
1113 vm, *async_frame, node, ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR,
Arthur de Kerhorad95b062022-11-16 19:12:05 +01001114 IPSEC_SA_ERROR_CRYPTO_ENGINE_ERROR, n_noop, noop_bi,
Xiaoming Jiang0c1454c2023-05-05 02:28:20 +00001115 noop_nexts, drop_next, true);
Neale Rannsfc811342021-02-26 10:35:33 +00001116 vnet_crypto_async_reset_frame (*async_frame);
1117 vnet_crypto_async_free_frame (vm, *async_frame);
1118 }
Fan Zhang18f0e312020-10-19 13:08:34 +01001119 }
Fan Zhangf5395782020-04-29 14:00:03 +01001120 }
Neale Rannsf16e9a52021-02-25 19:09:24 +00001121 if (n_noop)
1122 vlib_buffer_enqueue_to_next (vm, node, noop_bi, noop_nexts, n_noop);
1123
1124 vlib_node_increment_counter (vm, node->node_index, ESP_ENCRYPT_ERROR_RX_PKTS,
1125 frame->n_vectors);
Damjan Marionc59b9a22019-03-19 15:38:40 +01001126
Damjan Marionc59b9a22019-03-19 15:38:40 +01001127 return frame->n_vectors;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001128}
1129
Fan Zhangf5395782020-04-29 14:00:03 +01001130always_inline uword
1131esp_encrypt_post_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
1132 vlib_frame_t * frame)
1133{
1134 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1135 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
1136 u32 *from = vlib_frame_vector_args (frame);
1137 u32 n_left = frame->n_vectors;
1138
1139 vlib_get_buffers (vm, from, b, n_left);
1140
1141 if (n_left >= 4)
1142 {
1143 vlib_prefetch_buffer_header (b[0], LOAD);
1144 vlib_prefetch_buffer_header (b[1], LOAD);
1145 vlib_prefetch_buffer_header (b[2], LOAD);
1146 vlib_prefetch_buffer_header (b[3], LOAD);
1147 }
1148
1149 while (n_left > 8)
1150 {
1151 vlib_prefetch_buffer_header (b[4], LOAD);
1152 vlib_prefetch_buffer_header (b[5], LOAD);
1153 vlib_prefetch_buffer_header (b[6], LOAD);
1154 vlib_prefetch_buffer_header (b[7], LOAD);
1155
1156 next[0] = (esp_post_data (b[0]))->next_index;
1157 next[1] = (esp_post_data (b[1]))->next_index;
1158 next[2] = (esp_post_data (b[2]))->next_index;
1159 next[3] = (esp_post_data (b[3]))->next_index;
1160
1161 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
1162 {
1163 if (b[0]->flags & VLIB_BUFFER_IS_TRACED)
1164 {
1165 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1166 sizeof (*tr));
1167 tr->next_index = next[0];
1168 }
1169 if (b[1]->flags & VLIB_BUFFER_IS_TRACED)
1170 {
1171 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[1],
1172 sizeof (*tr));
1173 tr->next_index = next[1];
1174 }
1175 if (b[2]->flags & VLIB_BUFFER_IS_TRACED)
1176 {
1177 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[2],
1178 sizeof (*tr));
1179 tr->next_index = next[2];
1180 }
1181 if (b[3]->flags & VLIB_BUFFER_IS_TRACED)
1182 {
1183 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[3],
1184 sizeof (*tr));
1185 tr->next_index = next[3];
1186 }
1187 }
1188
1189 b += 4;
1190 next += 4;
1191 n_left -= 4;
1192 }
1193
1194 while (n_left > 0)
1195 {
1196 next[0] = (esp_post_data (b[0]))->next_index;
1197 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1198 {
1199 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1200 sizeof (*tr));
1201 tr->next_index = next[0];
1202 }
1203
1204 b += 1;
1205 next += 1;
1206 n_left -= 1;
1207 }
1208
1209 vlib_node_increment_counter (vm, node->node_index,
1210 ESP_ENCRYPT_ERROR_POST_RX_PKTS,
1211 frame->n_vectors);
1212 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1213 return frame->n_vectors;
1214}
1215
Klement Sekerab8f35442018-10-29 13:38:19 +01001216VLIB_NODE_FN (esp4_encrypt_node) (vlib_main_t * vm,
1217 vlib_node_runtime_t * node,
1218 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001219{
Neale Ranns4a58e492020-12-21 13:19:10 +00001220 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP4, 0,
Fan Zhangf5395782020-04-29 14:00:03 +01001221 esp_encrypt_async_next.esp4_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001222}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001223
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001224VLIB_REGISTER_NODE (esp4_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001225 .name = "esp4-encrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001226 .vector_size = sizeof (u32),
1227 .format_trace = format_esp_encrypt_trace,
1228 .type = VLIB_NODE_TYPE_INTERNAL,
1229
Neale Ranns93688d72022-08-09 03:34:51 +00001230 .n_errors = ESP_ENCRYPT_N_ERROR,
1231 .error_counters = esp_encrypt_error_counters,
Ed Warnickecb9cada2015-12-08 15:45:58 -07001232
1233 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Ranns4a58e492020-12-21 13:19:10 +00001234 .next_nodes = { [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1235 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1236 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1237 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-handoff",
1238 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-handoff",
1239 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "error-drop",
1240 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "interface-output" },
Ed Warnickecb9cada2015-12-08 15:45:58 -07001241};
1242
Fan Zhangf5395782020-04-29 14:00:03 +01001243VLIB_NODE_FN (esp4_encrypt_post_node) (vlib_main_t * vm,
1244 vlib_node_runtime_t * node,
1245 vlib_frame_t * from_frame)
1246{
1247 return esp_encrypt_post_inline (vm, node, from_frame);
1248}
1249
Fan Zhangf5395782020-04-29 14:00:03 +01001250VLIB_REGISTER_NODE (esp4_encrypt_post_node) = {
1251 .name = "esp4-encrypt-post",
1252 .vector_size = sizeof (u32),
1253 .format_trace = format_esp_post_encrypt_trace,
1254 .type = VLIB_NODE_TYPE_INTERNAL,
1255 .sibling_of = "esp4-encrypt",
1256
Neale Ranns93688d72022-08-09 03:34:51 +00001257 .n_errors = ESP_ENCRYPT_N_ERROR,
1258 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001259};
Fan Zhangf5395782020-04-29 14:00:03 +01001260
Klement Sekerab8f35442018-10-29 13:38:19 +01001261VLIB_NODE_FN (esp6_encrypt_node) (vlib_main_t * vm,
1262 vlib_node_runtime_t * node,
1263 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001264{
Neale Ranns4a58e492020-12-21 13:19:10 +00001265 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP6, 0,
Fan Zhangf5395782020-04-29 14:00:03 +01001266 esp_encrypt_async_next.esp6_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001267}
1268
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001269VLIB_REGISTER_NODE (esp6_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001270 .name = "esp6-encrypt",
1271 .vector_size = sizeof (u32),
1272 .format_trace = format_esp_encrypt_trace,
1273 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001274 .sibling_of = "esp4-encrypt",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001275
Neale Ranns93688d72022-08-09 03:34:51 +00001276 .n_errors = ESP_ENCRYPT_N_ERROR,
1277 .error_counters = esp_encrypt_error_counters,
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001278};
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001279
Fan Zhangf5395782020-04-29 14:00:03 +01001280VLIB_NODE_FN (esp6_encrypt_post_node) (vlib_main_t * vm,
1281 vlib_node_runtime_t * node,
1282 vlib_frame_t * from_frame)
1283{
1284 return esp_encrypt_post_inline (vm, node, from_frame);
1285}
1286
Fan Zhangf5395782020-04-29 14:00:03 +01001287VLIB_REGISTER_NODE (esp6_encrypt_post_node) = {
1288 .name = "esp6-encrypt-post",
1289 .vector_size = sizeof (u32),
1290 .format_trace = format_esp_post_encrypt_trace,
1291 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001292 .sibling_of = "esp4-encrypt",
Fan Zhangf5395782020-04-29 14:00:03 +01001293
Neale Ranns93688d72022-08-09 03:34:51 +00001294 .n_errors = ESP_ENCRYPT_N_ERROR,
1295 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001296};
Fan Zhangf5395782020-04-29 14:00:03 +01001297
Neale Ranns25edf142019-03-22 08:12:48 +00001298VLIB_NODE_FN (esp4_encrypt_tun_node) (vlib_main_t * vm,
1299 vlib_node_runtime_t * node,
1300 vlib_frame_t * from_frame)
1301{
Neale Ranns4a58e492020-12-21 13:19:10 +00001302 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP4, 1,
Fan Zhangf5395782020-04-29 14:00:03 +01001303 esp_encrypt_async_next.esp4_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001304}
1305
Neale Ranns25edf142019-03-22 08:12:48 +00001306VLIB_REGISTER_NODE (esp4_encrypt_tun_node) = {
1307 .name = "esp4-encrypt-tun",
1308 .vector_size = sizeof (u32),
1309 .format_trace = format_esp_encrypt_trace,
1310 .type = VLIB_NODE_TYPE_INTERNAL,
1311
Neale Ranns93688d72022-08-09 03:34:51 +00001312 .n_errors = ESP_ENCRYPT_N_ERROR,
1313 .error_counters = esp_encrypt_error_counters,
Neale Rannsd7603d92019-03-28 08:56:10 +00001314
Neale Rannsf62a8c02019-04-02 08:13:33 +00001315 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001316 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001317 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1318 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
Neale Ranns4a58e492020-12-21 13:19:10 +00001319 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001320 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
Neale Ranns4a58e492020-12-21 13:19:10 +00001321 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
1322 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001323 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001324 },
Neale Ranns25edf142019-03-22 08:12:48 +00001325};
1326
Fan Zhangf5395782020-04-29 14:00:03 +01001327VLIB_NODE_FN (esp4_encrypt_tun_post_node) (vlib_main_t * vm,
1328 vlib_node_runtime_t * node,
1329 vlib_frame_t * from_frame)
1330{
1331 return esp_encrypt_post_inline (vm, node, from_frame);
1332}
1333
Fan Zhangf5395782020-04-29 14:00:03 +01001334VLIB_REGISTER_NODE (esp4_encrypt_tun_post_node) = {
1335 .name = "esp4-encrypt-tun-post",
1336 .vector_size = sizeof (u32),
1337 .format_trace = format_esp_post_encrypt_trace,
1338 .type = VLIB_NODE_TYPE_INTERNAL,
1339 .sibling_of = "esp4-encrypt-tun",
1340
Neale Ranns93688d72022-08-09 03:34:51 +00001341 .n_errors = ESP_ENCRYPT_N_ERROR,
1342 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001343};
Neale Ranns25edf142019-03-22 08:12:48 +00001344
1345VLIB_NODE_FN (esp6_encrypt_tun_node) (vlib_main_t * vm,
1346 vlib_node_runtime_t * node,
1347 vlib_frame_t * from_frame)
1348{
Neale Ranns4a58e492020-12-21 13:19:10 +00001349 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP6, 1,
Fan Zhangf5395782020-04-29 14:00:03 +01001350 esp_encrypt_async_next.esp6_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001351}
1352
Neale Ranns25edf142019-03-22 08:12:48 +00001353VLIB_REGISTER_NODE (esp6_encrypt_tun_node) = {
1354 .name = "esp6-encrypt-tun",
1355 .vector_size = sizeof (u32),
1356 .format_trace = format_esp_encrypt_trace,
1357 .type = VLIB_NODE_TYPE_INTERNAL,
1358
Neale Ranns93688d72022-08-09 03:34:51 +00001359 .n_errors = ESP_ENCRYPT_N_ERROR,
1360 .error_counters = esp_encrypt_error_counters,
Neale Rannsd7603d92019-03-28 08:56:10 +00001361
Neale Rannsf62a8c02019-04-02 08:13:33 +00001362 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001363 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001364 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1365 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
Neale Ranns4a58e492020-12-21 13:19:10 +00001366 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1367 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001368 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
Neale Ranns4a58e492020-12-21 13:19:10 +00001369 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001370 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001371 },
Neale Ranns25edf142019-03-22 08:12:48 +00001372};
1373
Neale Ranns25edf142019-03-22 08:12:48 +00001374
Fan Zhangf5395782020-04-29 14:00:03 +01001375VLIB_NODE_FN (esp6_encrypt_tun_post_node) (vlib_main_t * vm,
1376 vlib_node_runtime_t * node,
1377 vlib_frame_t * from_frame)
1378{
1379 return esp_encrypt_post_inline (vm, node, from_frame);
1380}
1381
Fan Zhangf5395782020-04-29 14:00:03 +01001382VLIB_REGISTER_NODE (esp6_encrypt_tun_post_node) = {
1383 .name = "esp6-encrypt-tun-post",
1384 .vector_size = sizeof (u32),
1385 .format_trace = format_esp_post_encrypt_trace,
1386 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Ranns4a58e492020-12-21 13:19:10 +00001387 .sibling_of = "esp-mpls-encrypt-tun",
Fan Zhangf5395782020-04-29 14:00:03 +01001388
Neale Ranns93688d72022-08-09 03:34:51 +00001389 .n_errors = ESP_ENCRYPT_N_ERROR,
1390 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001391};
Fan Zhangf5395782020-04-29 14:00:03 +01001392
Neale Ranns4a58e492020-12-21 13:19:10 +00001393VLIB_NODE_FN (esp_mpls_encrypt_tun_node)
1394(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
1395{
1396 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_MPLS, 1,
1397 esp_encrypt_async_next.esp_mpls_tun_post_next);
1398}
1399
1400VLIB_REGISTER_NODE (esp_mpls_encrypt_tun_node) = {
1401 .name = "esp-mpls-encrypt-tun",
1402 .vector_size = sizeof (u32),
1403 .format_trace = format_esp_encrypt_trace,
1404 .type = VLIB_NODE_TYPE_INTERNAL,
1405
Neale Ranns93688d72022-08-09 03:34:51 +00001406 .n_errors = ESP_ENCRYPT_N_ERROR,
1407 .error_counters = esp_encrypt_error_counters,
Neale Ranns4a58e492020-12-21 13:19:10 +00001408
1409 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
1410 .next_nodes = {
1411 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1412 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1413 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1414 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
1415 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
1416 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
1417 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
1418 },
1419};
1420
1421VLIB_NODE_FN (esp_mpls_encrypt_tun_post_node)
1422(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
1423{
1424 return esp_encrypt_post_inline (vm, node, from_frame);
1425}
1426
1427VLIB_REGISTER_NODE (esp_mpls_encrypt_tun_post_node) = {
1428 .name = "esp-mpls-encrypt-tun-post",
1429 .vector_size = sizeof (u32),
1430 .format_trace = format_esp_post_encrypt_trace,
1431 .type = VLIB_NODE_TYPE_INTERNAL,
1432 .sibling_of = "esp-mpls-encrypt-tun",
1433
Neale Ranns93688d72022-08-09 03:34:51 +00001434 .n_errors = ESP_ENCRYPT_N_ERROR,
1435 .error_counters = esp_encrypt_error_counters,
Neale Ranns4a58e492020-12-21 13:19:10 +00001436};
1437
Neale Ranns2d498302021-02-25 08:38:58 +00001438#ifndef CLIB_MARCH_VARIANT
1439
1440static clib_error_t *
1441esp_encrypt_init (vlib_main_t *vm)
1442{
1443 ipsec_main_t *im = &ipsec_main;
1444
1445 im->esp4_enc_fq_index =
1446 vlib_frame_queue_main_init (esp4_encrypt_node.index, 0);
1447 im->esp6_enc_fq_index =
1448 vlib_frame_queue_main_init (esp6_encrypt_node.index, 0);
1449 im->esp4_enc_tun_fq_index =
1450 vlib_frame_queue_main_init (esp4_encrypt_tun_node.index, 0);
1451 im->esp6_enc_tun_fq_index =
1452 vlib_frame_queue_main_init (esp6_encrypt_tun_node.index, 0);
1453 im->esp_mpls_enc_tun_fq_index =
1454 vlib_frame_queue_main_init (esp_mpls_encrypt_tun_node.index, 0);
1455
1456 return 0;
1457}
1458
1459VLIB_INIT_FUNCTION (esp_encrypt_init);
1460
1461#endif
1462
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001463/*
1464 * fd.io coding-style-patch-verification: ON
1465 *
1466 * Local Variables:
1467 * eval: (c-set-style "gnu")
1468 * End:
1469 */