blob: 4d312be1f94dccf75994fa981c0abe0090a3d2f6 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_encrypt.c : IPSec ESP encrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
21
Damjan Marion91f17dc2019-03-18 18:59:25 +010022#include <vnet/crypto/crypto.h>
23
Ed Warnickecb9cada2015-12-08 15:45:58 -070024#include <vnet/ipsec/ipsec.h>
Neale Ranns28287212019-12-16 00:53:11 +000025#include <vnet/ipsec/ipsec_tun.h>
Neale Ranns93688d72022-08-09 03:34:51 +000026#include <vnet/ipsec/ipsec.api_enum.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027#include <vnet/ipsec/esp.h>
Neale Ranns041add72020-01-02 04:06:10 +000028#include <vnet/tunnel/tunnel_dp.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070029
Neale Ranns4a58e492020-12-21 13:19:10 +000030#define foreach_esp_encrypt_next \
31 _ (DROP4, "ip4-drop") \
32 _ (DROP6, "ip6-drop") \
33 _ (DROP_MPLS, "mpls-drop") \
34 _ (HANDOFF4, "handoff4") \
35 _ (HANDOFF6, "handoff6") \
36 _ (HANDOFF_MPLS, "handoff-mpls") \
37 _ (INTERFACE_OUTPUT, "interface-output")
Ed Warnickecb9cada2015-12-08 15:45:58 -070038
39#define _(v, s) ESP_ENCRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070040typedef enum
41{
Ed Warnickecb9cada2015-12-08 15:45:58 -070042 foreach_esp_encrypt_next
43#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070044 ESP_ENCRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070045} esp_encrypt_next_t;
46
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070047typedef struct
48{
Neale Ranns8d7c5022019-02-06 01:41:05 -080049 u32 sa_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -070050 u32 spi;
51 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000052 u32 sa_seq_hi;
Klement Sekera4b089f22018-04-17 18:04:57 +020053 u8 udp_encap;
Ed Warnickecb9cada2015-12-08 15:45:58 -070054 ipsec_crypto_alg_t crypto_alg;
55 ipsec_integ_alg_t integ_alg;
56} esp_encrypt_trace_t;
57
Fan Zhangf5395782020-04-29 14:00:03 +010058typedef struct
59{
60 u32 next_index;
61} esp_encrypt_post_trace_t;
62
Neale Ranns93688d72022-08-09 03:34:51 +000063typedef vl_counter_esp_encrypt_enum_t esp_encrypt_error_t;
64
Ed Warnickecb9cada2015-12-08 15:45:58 -070065/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070066static u8 *
67format_esp_encrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070068{
69 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
70 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070071 esp_encrypt_trace_t *t = va_arg (*args, esp_encrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -070072
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020073 s =
74 format (s,
Neale Ranns6afaae12019-07-17 15:07:14 +000075 "esp: sa-index %d spi %u (0x%08x) seq %u sa-seq-hi %u crypto %U integrity %U%s",
76 t->sa_index, t->spi, t->spi, t->seq, t->sa_seq_hi,
77 format_ipsec_crypto_alg,
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020078 t->crypto_alg, format_ipsec_integ_alg, t->integ_alg,
79 t->udp_encap ? " udp-encap-enabled" : "");
Ed Warnickecb9cada2015-12-08 15:45:58 -070080 return s;
81}
82
Fan Zhangf5395782020-04-29 14:00:03 +010083static u8 *
84format_esp_post_encrypt_trace (u8 * s, va_list * args)
85{
86 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
87 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
88 esp_encrypt_post_trace_t *t = va_arg (*args, esp_encrypt_post_trace_t *);
89
90 s = format (s, "esp-post: next node index %u", t->next_index);
91 return s;
92}
93
Damjan Marionc59b9a22019-03-19 15:38:40 +010094/* pad packet in input buffer */
95static_always_inline u8 *
Neale Rannsf16e9a52021-02-25 19:09:24 +000096esp_add_footer_and_icv (vlib_main_t *vm, vlib_buffer_t **last, u8 esp_align,
97 u8 icv_sz, vlib_node_runtime_t *node,
Filip Tehlarefcad1a2020-02-04 09:36:04 +000098 u16 buffer_data_size, uword total_len)
Ed Warnickecb9cada2015-12-08 15:45:58 -070099{
Damjan Marionc59b9a22019-03-19 15:38:40 +0100100 static const u8 pad_data[ESP_MAX_BLOCK_SIZE] = {
101 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
Milan Lenco7885c742020-08-20 13:23:09 +0200102 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00,
Damjan Marionc59b9a22019-03-19 15:38:40 +0100103 };
Ed Warnickecb9cada2015-12-08 15:45:58 -0700104
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000105 u16 min_length = total_len + sizeof (esp_footer_t);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500106 u16 new_length = round_pow2 (min_length, esp_align);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100107 u8 pad_bytes = new_length - min_length;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000108 esp_footer_t *f = (esp_footer_t *) (vlib_buffer_get_current (last[0]) +
109 last[0]->current_length + pad_bytes);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000110 u16 tail_sz = sizeof (esp_footer_t) + pad_bytes + icv_sz;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111
Benoît Ganne217ba5a2021-06-14 17:23:56 +0200112 if (last[0]->current_data + last[0]->current_length + tail_sz >
113 buffer_data_size)
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000114 {
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000115 u32 tmp_bi = 0;
116 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
117 return 0;
Filip Tehlarc2c1bfd2020-02-13 07:49:30 +0000118
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000119 vlib_buffer_t *tmp = vlib_get_buffer (vm, tmp_bi);
120 last[0]->next_buffer = tmp_bi;
121 last[0]->flags |= VLIB_BUFFER_NEXT_PRESENT;
122 f = (esp_footer_t *) (vlib_buffer_get_current (tmp) + pad_bytes);
123 tmp->current_length += tail_sz;
124 last[0] = tmp;
125 }
126 else
127 last[0]->current_length += tail_sz;
128
129 f->pad_length = pad_bytes;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100130 if (pad_bytes)
Benoît Ganne4505f012019-12-07 09:14:27 -0700131 {
132 ASSERT (pad_bytes <= ESP_MAX_BLOCK_SIZE);
133 pad_bytes = clib_min (ESP_MAX_BLOCK_SIZE, pad_bytes);
134 clib_memcpy_fast ((u8 *) f - pad_bytes, pad_data, pad_bytes);
135 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100136
Damjan Marionc59b9a22019-03-19 15:38:40 +0100137 return &f->next_header;
138}
139
140static_always_inline void
141esp_update_ip4_hdr (ip4_header_t * ip4, u16 len, int is_transport, int is_udp)
142{
Neale Ranns1b582b82019-04-18 19:49:13 -0700143 ip_csum_t sum;
144 u16 old_len;
145
146 len = clib_net_to_host_u16 (len);
147 old_len = ip4->length;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100148
149 if (is_transport)
150 {
151 u8 prot = is_udp ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100152
Neale Ranns1b582b82019-04-18 19:49:13 -0700153 sum = ip_csum_update (ip4->checksum, ip4->protocol,
154 prot, ip4_header_t, protocol);
155 ip4->protocol = prot;
156
157 sum = ip_csum_update (sum, old_len, len, ip4_header_t, length);
158 }
159 else
160 sum = ip_csum_update (ip4->checksum, old_len, len, ip4_header_t, length);
161
162 ip4->length = len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100163 ip4->checksum = ip_csum_fold (sum);
164}
165
166static_always_inline void
167esp_fill_udp_hdr (ipsec_sa_t * sa, udp_header_t * udp, u16 len)
168{
169 clib_memcpy_fast (udp, &sa->udp_hdr, sizeof (udp_header_t));
170 udp->length = clib_net_to_host_u16 (len);
171}
172
173static_always_inline u8
174ext_hdr_is_pre_esp (u8 nexthdr)
175{
176#ifdef CLIB_HAVE_VEC128
177 static const u8x16 ext_hdr_types = {
178 IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS,
179 IP_PROTOCOL_IPV6_ROUTE,
180 IP_PROTOCOL_IPV6_FRAGMENTATION,
181 };
182
183 return !u8x16_is_all_zero (ext_hdr_types == u8x16_splat (nexthdr));
184#else
185 return ((nexthdr ^ IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS) |
186 (nexthdr ^ IP_PROTOCOL_IPV6_ROUTE) |
Damjan Mariond0b26602021-10-31 20:02:19 +0100187 ((nexthdr ^ IP_PROTOCOL_IPV6_FRAGMENTATION) != 0));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100188#endif
189}
190
191static_always_inline u8
Neale Ranns02950402019-12-20 00:54:57 +0000192esp_get_ip6_hdr_len (ip6_header_t * ip6, ip6_ext_header_t ** ext_hdr)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100193{
194 /* this code assumes that HbH, route and frag headers will be before
195 others, if that is not the case, they will end up encrypted */
Damjan Marionc59b9a22019-03-19 15:38:40 +0100196 u8 len = sizeof (ip6_header_t);
197 ip6_ext_header_t *p;
198
199 /* if next packet doesn't have ext header */
200 if (ext_hdr_is_pre_esp (ip6->protocol) == 0)
Neale Ranns02950402019-12-20 00:54:57 +0000201 {
202 *ext_hdr = NULL;
203 return len;
204 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100205
Ole Troan03092c12021-11-23 15:55:39 +0100206 p = ip6_next_header (ip6);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100207 len += ip6_ext_header_len (p);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100208 while (ext_hdr_is_pre_esp (p->next_hdr))
209 {
210 len += ip6_ext_header_len (p);
211 p = ip6_ext_next_header (p);
212 }
213
Neale Ranns02950402019-12-20 00:54:57 +0000214 *ext_hdr = p;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100215 return len;
216}
217
Damjan Marionc59b9a22019-03-19 15:38:40 +0100218static_always_inline void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000219esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
220 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000221 u16 * nexts, vnet_crypto_op_chunk_t * chunks,
222 u16 drop_next)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000223{
224 u32 n_fail, n_ops = vec_len (ops);
225 vnet_crypto_op_t *op = ops;
226
227 if (n_ops == 0)
228 return;
229
230 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
231
232 while (n_fail)
233 {
234 ASSERT (op - ops < n_ops);
235
236 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
237 {
238 u32 bi = op->user_data;
239 b[bi]->error = node->errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000240 nexts[bi] = drop_next;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000241 n_fail--;
242 }
243 op++;
244 }
245}
246
247static_always_inline void
Damjan Marionc59b9a22019-03-19 15:38:40 +0100248esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000249 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
250 u16 drop_next)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100251{
252 u32 n_fail, n_ops = vec_len (ops);
253 vnet_crypto_op_t *op = ops;
254
255 if (n_ops == 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700256 return;
257
Damjan Marionc59b9a22019-03-19 15:38:40 +0100258 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700259
Damjan Marionc59b9a22019-03-19 15:38:40 +0100260 while (n_fail)
261 {
262 ASSERT (op - ops < n_ops);
263
264 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
265 {
266 u32 bi = op->user_data;
267 b[bi]->error = node->errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000268 nexts[bi] = drop_next;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100269 n_fail--;
270 }
271 op++;
272 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700273}
274
Fan Zhangf5395782020-04-29 14:00:03 +0100275static_always_inline u32
276esp_encrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
277 ipsec_sa_t * sa0, vlib_buffer_t * b,
278 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
279 u32 start_len, u16 * n_ch)
280{
281 vnet_crypto_op_chunk_t *ch;
282 vlib_buffer_t *cb = b;
283 u32 n_chunks = 1;
284 u32 total_len;
285 vec_add2 (ptd->chunks, ch, 1);
286 total_len = ch->len = start_len;
287 ch->src = ch->dst = start;
288 cb = vlib_get_buffer (vm, cb->next_buffer);
289
290 while (1)
291 {
292 vec_add2 (ptd->chunks, ch, 1);
293 n_chunks += 1;
294 if (lb == cb)
295 total_len += ch->len = cb->current_length - icv_sz;
296 else
297 total_len += ch->len = cb->current_length;
298 ch->src = ch->dst = vlib_buffer_get_current (cb);
299
300 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
301 break;
302
303 cb = vlib_get_buffer (vm, cb->next_buffer);
304 }
305
306 if (n_ch)
307 *n_ch = n_chunks;
308
309 return total_len;
310}
311
312static_always_inline u32
313esp_encrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
314 ipsec_sa_t * sa0, vlib_buffer_t * b,
315 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
316 u32 start_len, u8 * digest, u16 * n_ch)
317{
318 vnet_crypto_op_chunk_t *ch;
319 vlib_buffer_t *cb = b;
320 u32 n_chunks = 1;
321 u32 total_len;
322 vec_add2 (ptd->chunks, ch, 1);
323 total_len = ch->len = start_len;
324 ch->src = start;
325 cb = vlib_get_buffer (vm, cb->next_buffer);
326
327 while (1)
328 {
329 vec_add2 (ptd->chunks, ch, 1);
330 n_chunks += 1;
331 if (lb == cb)
332 {
333 total_len += ch->len = cb->current_length - icv_sz;
334 if (ipsec_sa_is_set_USE_ESN (sa0))
335 {
336 u32 seq_hi = clib_net_to_host_u32 (sa0->seq_hi);
337 clib_memcpy_fast (digest, &seq_hi, sizeof (seq_hi));
338 ch->len += sizeof (seq_hi);
339 total_len += sizeof (seq_hi);
340 }
341 }
342 else
343 total_len += ch->len = cb->current_length;
344 ch->src = vlib_buffer_get_current (cb);
345
346 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
347 break;
348
349 cb = vlib_get_buffer (vm, cb->next_buffer);
350 }
351
352 if (n_ch)
353 *n_ch = n_chunks;
354
355 return total_len;
356}
357
358always_inline void
Benoît Ganne490b9272021-01-22 18:03:09 +0100359esp_prepare_sync_op (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
360 vnet_crypto_op_t **crypto_ops,
Neale Ranns5b891102021-06-28 13:31:28 +0000361 vnet_crypto_op_t **integ_ops, ipsec_sa_t *sa0, u32 seq_hi,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000362 u8 *payload, u16 payload_len, u8 iv_sz, u8 icv_sz, u32 bi,
363 vlib_buffer_t **b, vlib_buffer_t *lb, u32 hdr_len,
364 esp_header_t *esp)
Fan Zhangf5395782020-04-29 14:00:03 +0100365{
366 if (sa0->crypto_enc_op_id)
367 {
368 vnet_crypto_op_t *op;
369 vec_add2_aligned (crypto_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
370 vnet_crypto_op_init (op, sa0->crypto_enc_op_id);
371
372 op->src = op->dst = payload;
373 op->key_index = sa0->crypto_key_index;
374 op->len = payload_len - icv_sz;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000375 op->user_data = bi;
Fan Zhangf5395782020-04-29 14:00:03 +0100376
Benoît Ganne490b9272021-01-22 18:03:09 +0100377 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100378 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100379 ASSERT (sizeof (u64) == iv_sz);
380 /* construct nonce in a scratch space in front of the IP header */
381 esp_ctr_nonce_t *nonce =
382 (esp_ctr_nonce_t *) (payload - sizeof (u64) - hdr_len -
383 sizeof (*nonce));
384 u64 *pkt_iv = (u64 *) (payload - sizeof (u64));
Fan Zhangf5395782020-04-29 14:00:03 +0100385
Benoît Ganne490b9272021-01-22 18:03:09 +0100386 if (ipsec_sa_is_set_IS_AEAD (sa0))
387 {
388 /* constuct aad in a scratch space in front of the nonce */
389 op->aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000390 op->aad_len = esp_aad_fill (op->aad, esp, sa0, seq_hi);
Benoît Ganne490b9272021-01-22 18:03:09 +0100391 op->tag = payload + op->len;
392 op->tag_len = 16;
393 }
394 else
395 {
396 nonce->ctr = clib_host_to_net_u32 (1);
397 }
Fan Zhangf5395782020-04-29 14:00:03 +0100398
Fan Zhangf5395782020-04-29 14:00:03 +0100399 nonce->salt = sa0->salt;
Benoît Ganne490b9272021-01-22 18:03:09 +0100400 nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa0->ctr_iv_counter++);
Fan Zhangf5395782020-04-29 14:00:03 +0100401 op->iv = (u8 *) nonce;
402 }
403 else
404 {
405 op->iv = payload - iv_sz;
406 op->flags = VNET_CRYPTO_OP_FLAG_INIT_IV;
407 }
408
409 if (lb != b[0])
410 {
411 /* is chained */
412 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
413 op->chunk_index = vec_len (ptd->chunks);
414 op->tag = vlib_buffer_get_tail (lb) - icv_sz;
415 esp_encrypt_chain_crypto (vm, ptd, sa0, b[0], lb, icv_sz, payload,
416 payload_len, &op->n_chunks);
417 }
418 }
419
420 if (sa0->integ_op_id)
421 {
422 vnet_crypto_op_t *op;
423 vec_add2_aligned (integ_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
424 vnet_crypto_op_init (op, sa0->integ_op_id);
425 op->src = payload - iv_sz - sizeof (esp_header_t);
426 op->digest = payload + payload_len - icv_sz;
427 op->key_index = sa0->integ_key_index;
428 op->digest_len = icv_sz;
429 op->len = payload_len - icv_sz + iv_sz + sizeof (esp_header_t);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000430 op->user_data = bi;
Fan Zhangf5395782020-04-29 14:00:03 +0100431
432 if (lb != b[0])
433 {
434 /* is chained */
435 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
436 op->chunk_index = vec_len (ptd->chunks);
437 op->digest = vlib_buffer_get_tail (lb) - icv_sz;
438
439 esp_encrypt_chain_integ (vm, ptd, sa0, b[0], lb, icv_sz,
440 payload - iv_sz - sizeof (esp_header_t),
441 payload_len + iv_sz +
442 sizeof (esp_header_t), op->digest,
443 &op->n_chunks);
444 }
445 else if (ipsec_sa_is_set_USE_ESN (sa0))
446 {
Neale Ranns5b891102021-06-28 13:31:28 +0000447 u32 tmp = clib_net_to_host_u32 (seq_hi);
448 clib_memcpy_fast (op->digest, &tmp, sizeof (seq_hi));
Fan Zhangf5395782020-04-29 14:00:03 +0100449 op->len += sizeof (seq_hi);
450 }
451 }
452}
453
Neale Rannsfc811342021-02-26 10:35:33 +0000454static_always_inline void
455esp_prepare_async_frame (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
456 vnet_crypto_async_frame_t *async_frame,
457 ipsec_sa_t *sa, vlib_buffer_t *b, esp_header_t *esp,
458 u8 *payload, u32 payload_len, u8 iv_sz, u8 icv_sz,
459 u32 bi, u16 next, u32 hdr_len, u16 async_next,
460 vlib_buffer_t *lb)
Fan Zhangf5395782020-04-29 14:00:03 +0100461{
462 esp_post_data_t *post = esp_post_data (b);
463 u8 *tag, *iv, *aad = 0;
464 u8 flag = 0;
465 u32 key_index;
466 i16 crypto_start_offset, integ_start_offset = 0;
467 u16 crypto_total_len, integ_total_len;
468
Fan Zhang18f0e312020-10-19 13:08:34 +0100469 post->next_index = next;
Fan Zhangf5395782020-04-29 14:00:03 +0100470
471 /* crypto */
472 crypto_start_offset = payload - b->data;
473 crypto_total_len = integ_total_len = payload_len - icv_sz;
474 tag = payload + crypto_total_len;
475
Fan Zhangf5395782020-04-29 14:00:03 +0100476 key_index = sa->linked_key_index;
477
Benoît Ganne490b9272021-01-22 18:03:09 +0100478 if (ipsec_sa_is_set_IS_CTR (sa))
Fan Zhangf5395782020-04-29 14:00:03 +0100479 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100480 ASSERT (sizeof (u64) == iv_sz);
481 /* construct nonce in a scratch space in front of the IP header */
482 esp_ctr_nonce_t *nonce = (esp_ctr_nonce_t *) (payload - sizeof (u64) -
483 hdr_len - sizeof (*nonce));
484 u64 *pkt_iv = (u64 *) (payload - sizeof (u64));
485
486 if (ipsec_sa_is_set_IS_AEAD (sa))
487 {
488 /* constuct aad in a scratch space in front of the nonce */
489 aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000490 esp_aad_fill (aad, esp, sa, sa->seq_hi);
Benoît Ganne490b9272021-01-22 18:03:09 +0100491 key_index = sa->crypto_key_index;
492 }
493 else
494 {
495 nonce->ctr = clib_host_to_net_u32 (1);
496 }
497
498 nonce->salt = sa->salt;
499 nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa->ctr_iv_counter++);
500 iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100501 }
Benoît Ganne490b9272021-01-22 18:03:09 +0100502 else
Fan Zhangf5395782020-04-29 14:00:03 +0100503 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100504 iv = payload - iv_sz;
505 flag |= VNET_CRYPTO_OP_FLAG_INIT_IV;
Fan Zhangf5395782020-04-29 14:00:03 +0100506 }
507
Benoît Ganne490b9272021-01-22 18:03:09 +0100508 if (lb != b)
509 {
510 /* chain */
511 flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
512 tag = vlib_buffer_get_tail (lb) - icv_sz;
513 crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb, icv_sz,
514 payload, payload_len, 0);
515 }
516
517 if (sa->integ_op_id)
518 {
519 integ_start_offset = crypto_start_offset - iv_sz - sizeof (esp_header_t);
520 integ_total_len += iv_sz + sizeof (esp_header_t);
521
522 if (b != lb)
523 {
524 integ_total_len = esp_encrypt_chain_integ (
525 vm, ptd, sa, b, lb, icv_sz,
526 payload - iv_sz - sizeof (esp_header_t),
527 payload_len + iv_sz + sizeof (esp_header_t), tag, 0);
528 }
529 else if (ipsec_sa_is_set_USE_ESN (sa))
530 {
531 u32 seq_hi = clib_net_to_host_u32 (sa->seq_hi);
532 clib_memcpy_fast (tag, &seq_hi, sizeof (seq_hi));
533 integ_total_len += sizeof (seq_hi);
534 }
535 }
536
Neale Rannsfc811342021-02-26 10:35:33 +0000537 /* this always succeeds because we know the frame is not full */
538 vnet_crypto_async_add_to_frame (vm, async_frame, key_index, crypto_total_len,
539 integ_total_len - crypto_total_len,
540 crypto_start_offset, integ_start_offset, bi,
541 async_next, iv, tag, aad, flag);
Fan Zhangf5395782020-04-29 14:00:03 +0100542}
543
Klement Sekerabe5a5dd2018-10-09 16:05:48 +0200544always_inline uword
Neale Ranns4a58e492020-12-21 13:19:10 +0000545esp_encrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
546 vlib_frame_t *frame, vnet_link_t lt, int is_tun,
Neale Rannsf16e9a52021-02-25 19:09:24 +0000547 u16 async_next_node)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700548{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700549 ipsec_main_t *im = &ipsec_main;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100550 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, vm->thread_index);
551 u32 *from = vlib_frame_vector_args (frame);
552 u32 n_left = frame->n_vectors;
553 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100554 u32 thread_index = vm->thread_index;
555 u16 buffer_data_size = vlib_buffer_get_default_data_size (vm);
556 u32 current_sa_index = ~0, current_sa_packets = 0;
557 u32 current_sa_bytes = 0, spi = 0;
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500558 u8 esp_align = 4, iv_sz = 0, icv_sz = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100559 ipsec_sa_t *sa0 = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000560 vlib_buffer_t *lb;
561 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
562 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Neale Rannsfc811342021-02-26 10:35:33 +0000563 vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
Fan Zhangf5395782020-04-29 14:00:03 +0100564 int is_async = im->async_mode;
Neale Rannsfc811342021-02-26 10:35:33 +0000565 vnet_crypto_async_op_id_t async_op = ~0;
Neale Ranns4a58e492020-12-21 13:19:10 +0000566 u16 drop_next =
567 (lt == VNET_LINK_IP6 ? ESP_ENCRYPT_NEXT_DROP6 :
568 (lt == VNET_LINK_IP4 ? ESP_ENCRYPT_NEXT_DROP4 :
569 ESP_ENCRYPT_NEXT_DROP_MPLS));
570 u16 handoff_next = (lt == VNET_LINK_IP6 ?
571 ESP_ENCRYPT_NEXT_HANDOFF6 :
572 (lt == VNET_LINK_IP4 ? ESP_ENCRYPT_NEXT_HANDOFF4 :
573 ESP_ENCRYPT_NEXT_HANDOFF_MPLS));
Neale Rannsf16e9a52021-02-25 19:09:24 +0000574 vlib_buffer_t *sync_bufs[VLIB_FRAME_SIZE];
575 u16 sync_nexts[VLIB_FRAME_SIZE], *sync_next = sync_nexts, n_sync = 0;
Damjan Mariondd298e82022-10-12 16:02:18 +0200576 u16 n_async = 0;
577 u16 noop_nexts[VLIB_FRAME_SIZE], n_noop = 0;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000578 u32 sync_bi[VLIB_FRAME_SIZE];
579 u32 noop_bi[VLIB_FRAME_SIZE];
580 esp_encrypt_error_t err;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700581
Damjan Marionc59b9a22019-03-19 15:38:40 +0100582 vlib_get_buffers (vm, from, b, n_left);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000583
584 vec_reset_length (ptd->crypto_ops);
585 vec_reset_length (ptd->integ_ops);
586 vec_reset_length (ptd->chained_crypto_ops);
587 vec_reset_length (ptd->chained_integ_ops);
Neale Rannsfc811342021-02-26 10:35:33 +0000588 vec_reset_length (ptd->async_frames);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000589 vec_reset_length (ptd->chunks);
Neale Rannsfc811342021-02-26 10:35:33 +0000590 clib_memset (async_frames, 0, sizeof (async_frames));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100591
592 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700593 {
Zhiyong Yang1cff6432019-04-30 05:33:53 -0400594 u32 sa_index0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100595 dpo_id_t *dpo;
596 esp_header_t *esp;
597 u8 *payload, *next_hdr_ptr;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000598 u16 payload_len, payload_len_total, n_bufs;
Neale Ranns4ec36c52020-03-31 09:21:29 -0400599 u32 hdr_len;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700600
Neale Rannsf16e9a52021-02-25 19:09:24 +0000601 err = ESP_ENCRYPT_ERROR_RX_PKTS;
602
Damjan Marionc59b9a22019-03-19 15:38:40 +0100603 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700604 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100605 u8 *p;
606 vlib_prefetch_buffer_header (b[2], LOAD);
607 p = vlib_buffer_get_current (b[1]);
Damjan Marionaf7fb042021-07-15 11:54:41 +0200608 clib_prefetch_load (p);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100609 p -= CLIB_CACHE_LINE_BYTES;
Damjan Marionaf7fb042021-07-15 11:54:41 +0200610 clib_prefetch_load (p);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000611 /* speculate that the trailer goes in the first buffer */
612 CLIB_PREFETCH (vlib_buffer_get_tail (b[1]),
613 CLIB_CACHE_LINE_BYTES, LOAD);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700614 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700615
Neale Ranns25edf142019-03-22 08:12:48 +0000616 if (is_tun)
617 {
618 /* we are on a ipsec tunnel's feature arc */
Neale Ranns28287212019-12-16 00:53:11 +0000619 vnet_buffer (b[0])->ipsec.sad_index =
620 sa_index0 = ipsec_tun_protect_get_sa_out
621 (vnet_buffer (b[0])->ip.adj_index[VLIB_TX]);
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000622
623 if (PREDICT_FALSE (INDEX_INVALID == sa_index0))
624 {
625 err = ESP_ENCRYPT_ERROR_NO_PROTECTION;
626 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
627 drop_next);
628 goto trace;
629 }
Neale Ranns25edf142019-03-22 08:12:48 +0000630 }
631 else
632 sa_index0 = vnet_buffer (b[0])->ipsec.sad_index;
633
634 if (sa_index0 != current_sa_index)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100635 {
Damjan Marion21f265f2019-06-05 15:45:50 +0200636 if (current_sa_packets)
637 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
638 current_sa_index,
639 current_sa_packets,
640 current_sa_bytes);
641 current_sa_packets = current_sa_bytes = 0;
642
Neale Rannsc5fe57d2021-02-25 16:01:28 +0000643 sa0 = ipsec_sa_get (sa_index0);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000644
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000645 if (PREDICT_FALSE ((sa0->crypto_alg == IPSEC_CRYPTO_ALG_NONE &&
646 sa0->integ_alg == IPSEC_INTEG_ALG_NONE) &&
647 !ipsec_sa_is_set_NO_ALGO_NO_DROP (sa0)))
648 {
649 err = ESP_ENCRYPT_ERROR_NO_ENCRYPTION;
650 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
651 drop_next);
652 goto trace;
653 }
Neale Ranns123b5eb2020-10-16 14:03:55 +0000654 /* fetch the second cacheline ASAP */
Damjan Marionaf7fb042021-07-15 11:54:41 +0200655 clib_prefetch_load (sa0->cacheline1);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000656
Damjan Marionc59b9a22019-03-19 15:38:40 +0100657 current_sa_index = sa_index0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100658 spi = clib_net_to_host_u32 (sa0->spi);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500659 esp_align = sa0->esp_block_align;
Damjan Marion7c22ff72019-04-04 12:25:44 +0200660 icv_sz = sa0->integ_icv_size;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100661 iv_sz = sa0->crypto_iv_size;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000662 is_async = im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
Neale Rannsfc811342021-02-26 10:35:33 +0000663 }
Fan Zhangf5395782020-04-29 14:00:03 +0100664
Neale Ranns1a52d372021-02-04 11:33:32 +0000665 if (PREDICT_FALSE (~0 == sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +0000666 {
667 /* this is the first packet to use this SA, claim the SA
668 * for this thread. this could happen simultaneously on
669 * another thread */
Neale Ranns1a52d372021-02-04 11:33:32 +0000670 clib_atomic_cmp_and_swap (&sa0->thread_index, ~0,
Neale Rannsf62a8c02019-04-02 08:13:33 +0000671 ipsec_sa_assign_thread (thread_index));
672 }
673
Neale Ranns1a52d372021-02-04 11:33:32 +0000674 if (PREDICT_FALSE (thread_index != sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +0000675 {
Neale Rannsaa7d7662021-02-10 08:42:49 +0000676 vnet_buffer (b[0])->ipsec.thread_index = sa0->thread_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000677 err = ESP_ENCRYPT_ERROR_HANDOFF;
678 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
679 handoff_next);
Neale Rannsf62a8c02019-04-02 08:13:33 +0000680 goto trace;
681 }
682
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000683 lb = b[0];
684 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
685 if (n_bufs == 0)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100686 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000687 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
688 esp_set_next_index (b[0], node, err, n_noop, noop_nexts, drop_next);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100689 goto trace;
690 }
691
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000692 if (n_bufs > 1)
693 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000694 /* find last buffer in the chain */
695 while (lb->flags & VLIB_BUFFER_NEXT_PRESENT)
696 lb = vlib_get_buffer (vm, lb->next_buffer);
697 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000698
Damjan Marionc59b9a22019-03-19 15:38:40 +0100699 if (PREDICT_FALSE (esp_seq_advance (sa0)))
700 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000701 err = ESP_ENCRYPT_ERROR_SEQ_CYCLED;
702 esp_set_next_index (b[0], node, err, n_noop, noop_nexts, drop_next);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100703 goto trace;
704 }
705
706 /* space for IV */
707 hdr_len = iv_sz;
708
Damjan Mariond709cbc2019-03-26 13:16:42 +0100709 if (ipsec_sa_is_set_IS_TUNNEL (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100710 {
711 payload = vlib_buffer_get_current (b[0]);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000712 next_hdr_ptr = esp_add_footer_and_icv (
713 vm, &lb, esp_align, icv_sz, node, buffer_data_size,
714 vlib_buffer_length_in_chain (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000715 if (!next_hdr_ptr)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000716 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000717 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
718 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
719 drop_next);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000720 goto trace;
721 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000722 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000723 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000724 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100725
726 /* ESP header */
727 hdr_len += sizeof (*esp);
728 esp = (esp_header_t *) (payload - hdr_len);
729
730 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100731 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100732 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100733 hdr_len += sizeof (udp_header_t);
734 esp_fill_udp_hdr (sa0, (udp_header_t *) (payload - hdr_len),
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000735 payload_len_total + hdr_len);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100736 }
737
738 /* IP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100739 if (ipsec_sa_is_set_IS_TUNNEL_V6 (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100740 {
741 ip6_header_t *ip6;
742 u16 len = sizeof (ip6_header_t);
743 hdr_len += len;
744 ip6 = (ip6_header_t *) (payload - hdr_len);
Neale Ranns041add72020-01-02 04:06:10 +0000745 clib_memcpy_fast (ip6, &sa0->ip6_hdr, sizeof (ip6_header_t));
746
Neale Ranns4a58e492020-12-21 13:19:10 +0000747 if (VNET_LINK_IP6 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000748 {
749 *next_hdr_ptr = IP_PROTOCOL_IPV6;
750 tunnel_encap_fixup_6o6 (sa0->tunnel_flags,
751 (const ip6_header_t *) payload,
752 ip6);
753 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000754 else if (VNET_LINK_IP4 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000755 {
756 *next_hdr_ptr = IP_PROTOCOL_IP_IN_IP;
Neale Rannsa91cb452021-02-04 11:02:52 +0000757 tunnel_encap_fixup_4o6 (sa0->tunnel_flags, b[0],
758 (const ip4_header_t *) payload, ip6);
Neale Ranns041add72020-01-02 04:06:10 +0000759 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000760 else if (VNET_LINK_MPLS == lt)
761 {
762 *next_hdr_ptr = IP_PROTOCOL_MPLS_IN_IP;
763 tunnel_encap_fixup_mplso6 (
Neale Rannsa91cb452021-02-04 11:02:52 +0000764 sa0->tunnel_flags, b[0],
765 (const mpls_unicast_header_t *) payload, ip6);
Neale Ranns4a58e492020-12-21 13:19:10 +0000766 }
767 else
768 ASSERT (0);
769
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000770 len = payload_len_total + hdr_len - len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100771 ip6->payload_length = clib_net_to_host_u16 (len);
Neale Ranns45d6d832021-01-19 13:38:47 +0000772 b[0]->flags |= VNET_BUFFER_F_LOCALLY_ORIGINATED;
Damjan Marionc98275f2019-03-06 14:05:01 +0100773 }
774 else
775 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100776 ip4_header_t *ip4;
777 u16 len = sizeof (ip4_header_t);
778 hdr_len += len;
779 ip4 = (ip4_header_t *) (payload - hdr_len);
Neale Ranns041add72020-01-02 04:06:10 +0000780 clib_memcpy_fast (ip4, &sa0->ip4_hdr, sizeof (ip4_header_t));
781
Neale Ranns4a58e492020-12-21 13:19:10 +0000782 if (VNET_LINK_IP6 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000783 {
784 *next_hdr_ptr = IP_PROTOCOL_IPV6;
785 tunnel_encap_fixup_6o4_w_chksum (sa0->tunnel_flags,
786 (const ip6_header_t *)
787 payload, ip4);
788 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000789 else if (VNET_LINK_IP4 == lt)
Neale Ranns041add72020-01-02 04:06:10 +0000790 {
791 *next_hdr_ptr = IP_PROTOCOL_IP_IN_IP;
792 tunnel_encap_fixup_4o4_w_chksum (sa0->tunnel_flags,
793 (const ip4_header_t *)
794 payload, ip4);
795 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000796 else if (VNET_LINK_MPLS == lt)
797 {
798 *next_hdr_ptr = IP_PROTOCOL_MPLS_IN_IP;
799 tunnel_encap_fixup_mplso4_w_chksum (
800 sa0->tunnel_flags, (const mpls_unicast_header_t *) payload,
801 ip4);
802 }
803 else
804 ASSERT (0);
805
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000806 len = payload_len_total + hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100807 esp_update_ip4_hdr (ip4, len, /* is_transport */ 0, 0);
Damjan Marionc98275f2019-03-06 14:05:01 +0100808 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100809
Neale Ranns72f2a3a2019-06-17 15:43:38 +0000810 dpo = &sa0->dpo;
Neale Ranns25edf142019-03-22 08:12:48 +0000811 if (!is_tun)
812 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000813 sync_next[0] = dpo->dpoi_next_node;
Neale Ranns25edf142019-03-22 08:12:48 +0000814 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] = dpo->dpoi_index;
815 }
Neale Ranns4ec36c52020-03-31 09:21:29 -0400816 else
Neale Rannsf16e9a52021-02-25 19:09:24 +0000817 sync_next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Neale Ranns9ec846c2021-02-09 14:04:02 +0000818 b[0]->flags |= VNET_BUFFER_F_LOCALLY_ORIGINATED;
Damjan Marionc98275f2019-03-06 14:05:01 +0100819 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100820 else /* transport mode */
Damjan Marionc98275f2019-03-06 14:05:01 +0100821 {
Ole Troan03092c12021-11-23 15:55:39 +0100822 u8 *l2_hdr, l2_len, *ip_hdr;
823 u16 ip_len;
Neale Ranns02950402019-12-20 00:54:57 +0000824 ip6_ext_header_t *ext_hdr;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100825 udp_header_t *udp = 0;
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400826 u16 udp_len = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100827 u8 *old_ip_hdr = vlib_buffer_get_current (b[0]);
Damjan Marionc98275f2019-03-06 14:05:01 +0100828
Ole Troan03092c12021-11-23 15:55:39 +0100829 /*
830 * Get extension header chain length. It might be longer than the
831 * buffer's pre_data area.
832 */
Neale Ranns4a58e492020-12-21 13:19:10 +0000833 ip_len =
834 (VNET_LINK_IP6 == lt ?
835 esp_get_ip6_hdr_len ((ip6_header_t *) old_ip_hdr, &ext_hdr) :
836 ip4_header_bytes ((ip4_header_t *) old_ip_hdr));
Ole Troan03092c12021-11-23 15:55:39 +0100837 if ((old_ip_hdr - ip_len) < &b[0]->pre_data[0])
838 {
839 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
840 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
841 drop_next);
842 goto trace;
843 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100844
Damjan Marionc59b9a22019-03-19 15:38:40 +0100845 vlib_buffer_advance (b[0], ip_len);
846 payload = vlib_buffer_get_current (b[0]);
Neale Rannsf16e9a52021-02-25 19:09:24 +0000847 next_hdr_ptr = esp_add_footer_and_icv (
848 vm, &lb, esp_align, icv_sz, node, buffer_data_size,
849 vlib_buffer_length_in_chain (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000850 if (!next_hdr_ptr)
Fan Zhang18f0e312020-10-19 13:08:34 +0100851 {
Neale Rannsf16e9a52021-02-25 19:09:24 +0000852 err = ESP_ENCRYPT_ERROR_NO_BUFFERS;
853 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
854 drop_next);
Fan Zhang18f0e312020-10-19 13:08:34 +0100855 goto trace;
856 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000857
858 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000859 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000860 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100861
862 /* ESP header */
863 hdr_len += sizeof (*esp);
864 esp = (esp_header_t *) (payload - hdr_len);
865
866 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100867 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100868 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100869 hdr_len += sizeof (udp_header_t);
870 udp = (udp_header_t *) (payload - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100871 }
872
Damjan Marionc59b9a22019-03-19 15:38:40 +0100873 /* IP header */
874 hdr_len += ip_len;
875 ip_hdr = payload - hdr_len;
876
877 /* L2 header */
Neale Rannsc87b66c2019-02-07 07:26:12 -0800878 if (!is_tun)
879 {
880 l2_len = vnet_buffer (b[0])->ip.save_rewrite_length;
881 hdr_len += l2_len;
882 l2_hdr = payload - hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100883
Neale Rannsc87b66c2019-02-07 07:26:12 -0800884 /* copy l2 and ip header */
885 clib_memcpy_le32 (l2_hdr, old_ip_hdr - l2_len, l2_len);
886 }
887 else
888 l2_len = 0;
889
Matthew Smith6f1eb482022-08-09 22:19:38 +0000890 u16 len;
891 len = payload_len_total + hdr_len - l2_len;
892
Neale Ranns4a58e492020-12-21 13:19:10 +0000893 if (VNET_LINK_IP6 == lt)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100894 {
Neale Ranns02950402019-12-20 00:54:57 +0000895 ip6_header_t *ip6 = (ip6_header_t *) (old_ip_hdr);
896 if (PREDICT_TRUE (NULL == ext_hdr))
897 {
898 *next_hdr_ptr = ip6->protocol;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000899 ip6->protocol =
900 (udp) ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Neale Ranns02950402019-12-20 00:54:57 +0000901 }
902 else
903 {
904 *next_hdr_ptr = ext_hdr->next_hdr;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000905 ext_hdr->next_hdr =
906 (udp) ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Neale Ranns02950402019-12-20 00:54:57 +0000907 }
Neale Rannsd207fd72019-04-18 17:18:12 -0700908 ip6->payload_length =
Matthew Smith6f1eb482022-08-09 22:19:38 +0000909 clib_host_to_net_u16 (len - sizeof (ip6_header_t));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100910 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000911 else if (VNET_LINK_IP4 == lt)
Damjan Marionc98275f2019-03-06 14:05:01 +0100912 {
Neale Ranns02950402019-12-20 00:54:57 +0000913 ip4_header_t *ip4 = (ip4_header_t *) (old_ip_hdr);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100914 *next_hdr_ptr = ip4->protocol;
Matthew Smith6f1eb482022-08-09 22:19:38 +0000915 esp_update_ip4_hdr (ip4, len, /* is_transport */ 1,
916 (udp != NULL));
Damjan Marionc98275f2019-03-06 14:05:01 +0100917 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100918
Neale Ranns02950402019-12-20 00:54:57 +0000919 clib_memcpy_le64 (ip_hdr, old_ip_hdr, ip_len);
920
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400921 if (udp)
922 {
Matthew Smith6f1eb482022-08-09 22:19:38 +0000923 udp_len = len - ip_len;
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400924 esp_fill_udp_hdr (sa0, udp, udp_len);
925 }
926
Neale Rannsf16e9a52021-02-25 19:09:24 +0000927 sync_next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Damjan Marionc98275f2019-03-06 14:05:01 +0100928 }
929
PiotrX Kleskifdca4dd2020-05-05 14:14:22 +0200930 if (lb != b[0])
931 {
932 crypto_ops = &ptd->chained_crypto_ops;
933 integ_ops = &ptd->chained_integ_ops;
934 }
935 else
936 {
937 crypto_ops = &ptd->crypto_ops;
938 integ_ops = &ptd->integ_ops;
939 }
940
Damjan Marionc59b9a22019-03-19 15:38:40 +0100941 esp->spi = spi;
942 esp->seq = clib_net_to_host_u32 (sa0->seq);
Damjan Marionc98275f2019-03-06 14:05:01 +0100943
Fan Zhangf5395782020-04-29 14:00:03 +0100944 if (is_async)
Matthew Smith51d56ba2021-06-04 09:18:37 -0500945 {
946 async_op = sa0->crypto_async_enc_op_id;
947
948 /* get a frame for this op if we don't yet have one or it's full
949 */
950 if (NULL == async_frames[async_op] ||
951 vnet_crypto_async_frame_is_full (async_frames[async_op]))
952 {
953 async_frames[async_op] =
954 vnet_crypto_async_get_frame (vm, async_op);
955 /* Save the frame to the list we'll submit at the end */
956 vec_add1 (ptd->async_frames, async_frames[async_op]);
957 }
958
959 esp_prepare_async_frame (vm, ptd, async_frames[async_op], sa0, b[0],
960 esp, payload, payload_len, iv_sz, icv_sz,
961 from[b - bufs], sync_next[0], hdr_len,
962 async_next_node, lb);
963 }
Fan Zhangf5395782020-04-29 14:00:03 +0100964 else
Neale Ranns5b891102021-06-28 13:31:28 +0000965 esp_prepare_sync_op (vm, ptd, crypto_ops, integ_ops, sa0, sa0->seq_hi,
966 payload, payload_len, iv_sz, icv_sz, n_sync, b,
967 lb, hdr_len, esp);
Damjan Marionc98275f2019-03-06 14:05:01 +0100968
Damjan Marionc59b9a22019-03-19 15:38:40 +0100969 vlib_buffer_advance (b[0], 0LL - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100970
Damjan Marionc59b9a22019-03-19 15:38:40 +0100971 current_sa_packets += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000972 current_sa_bytes += payload_len_total;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100973
974 trace:
975 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
Damjan Marionc98275f2019-03-06 14:05:01 +0100976 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100977 esp_encrypt_trace_t *tr = vlib_add_trace (vm, node, b[0],
978 sizeof (*tr));
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000979 if (INDEX_INVALID == sa_index0)
980 clib_memset_u8 (tr, 0xff, sizeof (*tr));
981 else
982 {
983 tr->sa_index = sa_index0;
984 tr->spi = sa0->spi;
Neale Ranns6fdcc3d2021-10-08 07:30:47 +0000985 tr->seq = sa0->seq;
986 tr->sa_seq_hi = sa0->seq_hi;
987 tr->udp_encap = ipsec_sa_is_set_UDP_ENCAP (sa0);
988 tr->crypto_alg = sa0->crypto_alg;
989 tr->integ_alg = sa0->integ_alg;
990 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100991 }
Neale Rannsf16e9a52021-02-25 19:09:24 +0000992
Damjan Marionc98275f2019-03-06 14:05:01 +0100993 /* next */
Neale Rannsf16e9a52021-02-25 19:09:24 +0000994 if (ESP_ENCRYPT_ERROR_RX_PKTS != err)
995 {
996 noop_bi[n_noop] = from[b - bufs];
997 n_noop++;
Neale Rannsf16e9a52021-02-25 19:09:24 +0000998 }
999 else if (!is_async)
1000 {
1001 sync_bi[n_sync] = from[b - bufs];
1002 sync_bufs[n_sync] = b[0];
1003 n_sync++;
1004 sync_next++;
1005 }
1006 else
1007 {
1008 n_async++;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001009 }
Damjan Marionc59b9a22019-03-19 15:38:40 +01001010 n_left -= 1;
Damjan Marionc59b9a22019-03-19 15:38:40 +01001011 b += 1;
Damjan Marionc98275f2019-03-06 14:05:01 +01001012 }
1013
Neale Ranns6fdcc3d2021-10-08 07:30:47 +00001014 if (INDEX_INVALID != current_sa_index)
1015 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1016 current_sa_index, current_sa_packets,
1017 current_sa_bytes);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001018 if (n_sync)
Fan Zhangf5395782020-04-29 14:00:03 +01001019 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001020 esp_process_ops (vm, node, ptd->crypto_ops, sync_bufs, sync_nexts,
1021 drop_next);
1022 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, sync_bufs,
1023 sync_nexts, ptd->chunks, drop_next);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001024
Neale Rannsf16e9a52021-02-25 19:09:24 +00001025 esp_process_ops (vm, node, ptd->integ_ops, sync_bufs, sync_nexts,
1026 drop_next);
1027 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, sync_bufs,
1028 sync_nexts, ptd->chunks, drop_next);
Neale Rannsfc811342021-02-26 10:35:33 +00001029
Neale Rannsf16e9a52021-02-25 19:09:24 +00001030 vlib_buffer_enqueue_to_next (vm, node, sync_bi, sync_nexts, n_sync);
Fan Zhangf5395782020-04-29 14:00:03 +01001031 }
Neale Rannsf16e9a52021-02-25 19:09:24 +00001032 if (n_async)
Fan Zhangf5395782020-04-29 14:00:03 +01001033 {
Neale Rannsfc811342021-02-26 10:35:33 +00001034 /* submit all of the open frames */
1035 vnet_crypto_async_frame_t **async_frame;
1036
1037 vec_foreach (async_frame, ptd->async_frames)
Fan Zhang18f0e312020-10-19 13:08:34 +01001038 {
Neale Rannsfc811342021-02-26 10:35:33 +00001039 if (vnet_crypto_async_submit_open_frame (vm, *async_frame) < 0)
1040 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001041 n_noop += esp_async_recycle_failed_submit (
1042 vm, *async_frame, node, ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR,
Matthew Smithc14b8cf2021-12-01 20:02:35 +00001043 n_noop, noop_bi, noop_nexts, drop_next);
Neale Rannsfc811342021-02-26 10:35:33 +00001044 vnet_crypto_async_reset_frame (*async_frame);
1045 vnet_crypto_async_free_frame (vm, *async_frame);
1046 }
Fan Zhang18f0e312020-10-19 13:08:34 +01001047 }
Fan Zhangf5395782020-04-29 14:00:03 +01001048 }
Neale Rannsf16e9a52021-02-25 19:09:24 +00001049 if (n_noop)
1050 vlib_buffer_enqueue_to_next (vm, node, noop_bi, noop_nexts, n_noop);
1051
1052 vlib_node_increment_counter (vm, node->node_index, ESP_ENCRYPT_ERROR_RX_PKTS,
1053 frame->n_vectors);
Damjan Marionc59b9a22019-03-19 15:38:40 +01001054
Damjan Marionc59b9a22019-03-19 15:38:40 +01001055 return frame->n_vectors;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001056}
1057
Fan Zhangf5395782020-04-29 14:00:03 +01001058always_inline uword
1059esp_encrypt_post_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
1060 vlib_frame_t * frame)
1061{
1062 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1063 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
1064 u32 *from = vlib_frame_vector_args (frame);
1065 u32 n_left = frame->n_vectors;
1066
1067 vlib_get_buffers (vm, from, b, n_left);
1068
1069 if (n_left >= 4)
1070 {
1071 vlib_prefetch_buffer_header (b[0], LOAD);
1072 vlib_prefetch_buffer_header (b[1], LOAD);
1073 vlib_prefetch_buffer_header (b[2], LOAD);
1074 vlib_prefetch_buffer_header (b[3], LOAD);
1075 }
1076
1077 while (n_left > 8)
1078 {
1079 vlib_prefetch_buffer_header (b[4], LOAD);
1080 vlib_prefetch_buffer_header (b[5], LOAD);
1081 vlib_prefetch_buffer_header (b[6], LOAD);
1082 vlib_prefetch_buffer_header (b[7], LOAD);
1083
1084 next[0] = (esp_post_data (b[0]))->next_index;
1085 next[1] = (esp_post_data (b[1]))->next_index;
1086 next[2] = (esp_post_data (b[2]))->next_index;
1087 next[3] = (esp_post_data (b[3]))->next_index;
1088
1089 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
1090 {
1091 if (b[0]->flags & VLIB_BUFFER_IS_TRACED)
1092 {
1093 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1094 sizeof (*tr));
1095 tr->next_index = next[0];
1096 }
1097 if (b[1]->flags & VLIB_BUFFER_IS_TRACED)
1098 {
1099 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[1],
1100 sizeof (*tr));
1101 tr->next_index = next[1];
1102 }
1103 if (b[2]->flags & VLIB_BUFFER_IS_TRACED)
1104 {
1105 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[2],
1106 sizeof (*tr));
1107 tr->next_index = next[2];
1108 }
1109 if (b[3]->flags & VLIB_BUFFER_IS_TRACED)
1110 {
1111 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[3],
1112 sizeof (*tr));
1113 tr->next_index = next[3];
1114 }
1115 }
1116
1117 b += 4;
1118 next += 4;
1119 n_left -= 4;
1120 }
1121
1122 while (n_left > 0)
1123 {
1124 next[0] = (esp_post_data (b[0]))->next_index;
1125 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1126 {
1127 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1128 sizeof (*tr));
1129 tr->next_index = next[0];
1130 }
1131
1132 b += 1;
1133 next += 1;
1134 n_left -= 1;
1135 }
1136
1137 vlib_node_increment_counter (vm, node->node_index,
1138 ESP_ENCRYPT_ERROR_POST_RX_PKTS,
1139 frame->n_vectors);
1140 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1141 return frame->n_vectors;
1142}
1143
Klement Sekerab8f35442018-10-29 13:38:19 +01001144VLIB_NODE_FN (esp4_encrypt_node) (vlib_main_t * vm,
1145 vlib_node_runtime_t * node,
1146 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001147{
Neale Ranns4a58e492020-12-21 13:19:10 +00001148 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP4, 0,
Fan Zhangf5395782020-04-29 14:00:03 +01001149 esp_encrypt_async_next.esp4_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001150}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001151
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001152/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001153VLIB_REGISTER_NODE (esp4_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001154 .name = "esp4-encrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001155 .vector_size = sizeof (u32),
1156 .format_trace = format_esp_encrypt_trace,
1157 .type = VLIB_NODE_TYPE_INTERNAL,
1158
Neale Ranns93688d72022-08-09 03:34:51 +00001159 .n_errors = ESP_ENCRYPT_N_ERROR,
1160 .error_counters = esp_encrypt_error_counters,
Ed Warnickecb9cada2015-12-08 15:45:58 -07001161
1162 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Ranns4a58e492020-12-21 13:19:10 +00001163 .next_nodes = { [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1164 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1165 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1166 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-handoff",
1167 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-handoff",
1168 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "error-drop",
1169 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "interface-output" },
Ed Warnickecb9cada2015-12-08 15:45:58 -07001170};
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001171/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001172
Fan Zhangf5395782020-04-29 14:00:03 +01001173VLIB_NODE_FN (esp4_encrypt_post_node) (vlib_main_t * vm,
1174 vlib_node_runtime_t * node,
1175 vlib_frame_t * from_frame)
1176{
1177 return esp_encrypt_post_inline (vm, node, from_frame);
1178}
1179
1180/* *INDENT-OFF* */
1181VLIB_REGISTER_NODE (esp4_encrypt_post_node) = {
1182 .name = "esp4-encrypt-post",
1183 .vector_size = sizeof (u32),
1184 .format_trace = format_esp_post_encrypt_trace,
1185 .type = VLIB_NODE_TYPE_INTERNAL,
1186 .sibling_of = "esp4-encrypt",
1187
Neale Ranns93688d72022-08-09 03:34:51 +00001188 .n_errors = ESP_ENCRYPT_N_ERROR,
1189 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001190};
1191/* *INDENT-ON* */
1192
Klement Sekerab8f35442018-10-29 13:38:19 +01001193VLIB_NODE_FN (esp6_encrypt_node) (vlib_main_t * vm,
1194 vlib_node_runtime_t * node,
1195 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001196{
Neale Ranns4a58e492020-12-21 13:19:10 +00001197 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP6, 0,
Fan Zhangf5395782020-04-29 14:00:03 +01001198 esp_encrypt_async_next.esp6_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001199}
1200
1201/* *INDENT-OFF* */
1202VLIB_REGISTER_NODE (esp6_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001203 .name = "esp6-encrypt",
1204 .vector_size = sizeof (u32),
1205 .format_trace = format_esp_encrypt_trace,
1206 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001207 .sibling_of = "esp4-encrypt",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001208
Neale Ranns93688d72022-08-09 03:34:51 +00001209 .n_errors = ESP_ENCRYPT_N_ERROR,
1210 .error_counters = esp_encrypt_error_counters,
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001211};
1212/* *INDENT-ON* */
1213
Fan Zhangf5395782020-04-29 14:00:03 +01001214VLIB_NODE_FN (esp6_encrypt_post_node) (vlib_main_t * vm,
1215 vlib_node_runtime_t * node,
1216 vlib_frame_t * from_frame)
1217{
1218 return esp_encrypt_post_inline (vm, node, from_frame);
1219}
1220
1221/* *INDENT-OFF* */
1222VLIB_REGISTER_NODE (esp6_encrypt_post_node) = {
1223 .name = "esp6-encrypt-post",
1224 .vector_size = sizeof (u32),
1225 .format_trace = format_esp_post_encrypt_trace,
1226 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001227 .sibling_of = "esp4-encrypt",
Fan Zhangf5395782020-04-29 14:00:03 +01001228
Neale Ranns93688d72022-08-09 03:34:51 +00001229 .n_errors = ESP_ENCRYPT_N_ERROR,
1230 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001231};
1232/* *INDENT-ON* */
1233
Neale Ranns25edf142019-03-22 08:12:48 +00001234VLIB_NODE_FN (esp4_encrypt_tun_node) (vlib_main_t * vm,
1235 vlib_node_runtime_t * node,
1236 vlib_frame_t * from_frame)
1237{
Neale Ranns4a58e492020-12-21 13:19:10 +00001238 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP4, 1,
Fan Zhangf5395782020-04-29 14:00:03 +01001239 esp_encrypt_async_next.esp4_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001240}
1241
1242/* *INDENT-OFF* */
1243VLIB_REGISTER_NODE (esp4_encrypt_tun_node) = {
1244 .name = "esp4-encrypt-tun",
1245 .vector_size = sizeof (u32),
1246 .format_trace = format_esp_encrypt_trace,
1247 .type = VLIB_NODE_TYPE_INTERNAL,
1248
Neale Ranns93688d72022-08-09 03:34:51 +00001249 .n_errors = ESP_ENCRYPT_N_ERROR,
1250 .error_counters = esp_encrypt_error_counters,
Neale Rannsd7603d92019-03-28 08:56:10 +00001251
Neale Rannsf62a8c02019-04-02 08:13:33 +00001252 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001253 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001254 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1255 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
Neale Ranns4a58e492020-12-21 13:19:10 +00001256 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001257 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
Neale Ranns4a58e492020-12-21 13:19:10 +00001258 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
1259 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001260 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001261 },
Neale Ranns25edf142019-03-22 08:12:48 +00001262};
1263
Fan Zhangf5395782020-04-29 14:00:03 +01001264VLIB_NODE_FN (esp4_encrypt_tun_post_node) (vlib_main_t * vm,
1265 vlib_node_runtime_t * node,
1266 vlib_frame_t * from_frame)
1267{
1268 return esp_encrypt_post_inline (vm, node, from_frame);
1269}
1270
1271/* *INDENT-OFF* */
1272VLIB_REGISTER_NODE (esp4_encrypt_tun_post_node) = {
1273 .name = "esp4-encrypt-tun-post",
1274 .vector_size = sizeof (u32),
1275 .format_trace = format_esp_post_encrypt_trace,
1276 .type = VLIB_NODE_TYPE_INTERNAL,
1277 .sibling_of = "esp4-encrypt-tun",
1278
Neale Ranns93688d72022-08-09 03:34:51 +00001279 .n_errors = ESP_ENCRYPT_N_ERROR,
1280 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001281};
Neale Ranns25edf142019-03-22 08:12:48 +00001282/* *INDENT-ON* */
1283
1284VLIB_NODE_FN (esp6_encrypt_tun_node) (vlib_main_t * vm,
1285 vlib_node_runtime_t * node,
1286 vlib_frame_t * from_frame)
1287{
Neale Ranns4a58e492020-12-21 13:19:10 +00001288 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_IP6, 1,
Fan Zhangf5395782020-04-29 14:00:03 +01001289 esp_encrypt_async_next.esp6_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001290}
1291
1292/* *INDENT-OFF* */
1293VLIB_REGISTER_NODE (esp6_encrypt_tun_node) = {
1294 .name = "esp6-encrypt-tun",
1295 .vector_size = sizeof (u32),
1296 .format_trace = format_esp_encrypt_trace,
1297 .type = VLIB_NODE_TYPE_INTERNAL,
1298
Neale Ranns93688d72022-08-09 03:34:51 +00001299 .n_errors = ESP_ENCRYPT_N_ERROR,
1300 .error_counters = esp_encrypt_error_counters,
Neale Rannsd7603d92019-03-28 08:56:10 +00001301
Neale Rannsf62a8c02019-04-02 08:13:33 +00001302 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001303 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001304 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1305 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
Neale Ranns4a58e492020-12-21 13:19:10 +00001306 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1307 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001308 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
Neale Ranns4a58e492020-12-21 13:19:10 +00001309 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001310 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001311 },
Neale Ranns25edf142019-03-22 08:12:48 +00001312};
1313
Neale Ranns25edf142019-03-22 08:12:48 +00001314/* *INDENT-ON* */
1315
Fan Zhangf5395782020-04-29 14:00:03 +01001316VLIB_NODE_FN (esp6_encrypt_tun_post_node) (vlib_main_t * vm,
1317 vlib_node_runtime_t * node,
1318 vlib_frame_t * from_frame)
1319{
1320 return esp_encrypt_post_inline (vm, node, from_frame);
1321}
1322
1323/* *INDENT-OFF* */
1324VLIB_REGISTER_NODE (esp6_encrypt_tun_post_node) = {
1325 .name = "esp6-encrypt-tun-post",
1326 .vector_size = sizeof (u32),
1327 .format_trace = format_esp_post_encrypt_trace,
1328 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Ranns4a58e492020-12-21 13:19:10 +00001329 .sibling_of = "esp-mpls-encrypt-tun",
Fan Zhangf5395782020-04-29 14:00:03 +01001330
Neale Ranns93688d72022-08-09 03:34:51 +00001331 .n_errors = ESP_ENCRYPT_N_ERROR,
1332 .error_counters = esp_encrypt_error_counters,
Fan Zhangf5395782020-04-29 14:00:03 +01001333};
1334/* *INDENT-ON* */
1335
Neale Ranns4a58e492020-12-21 13:19:10 +00001336VLIB_NODE_FN (esp_mpls_encrypt_tun_node)
1337(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
1338{
1339 return esp_encrypt_inline (vm, node, from_frame, VNET_LINK_MPLS, 1,
1340 esp_encrypt_async_next.esp_mpls_tun_post_next);
1341}
1342
1343VLIB_REGISTER_NODE (esp_mpls_encrypt_tun_node) = {
1344 .name = "esp-mpls-encrypt-tun",
1345 .vector_size = sizeof (u32),
1346 .format_trace = format_esp_encrypt_trace,
1347 .type = VLIB_NODE_TYPE_INTERNAL,
1348
Neale Ranns93688d72022-08-09 03:34:51 +00001349 .n_errors = ESP_ENCRYPT_N_ERROR,
1350 .error_counters = esp_encrypt_error_counters,
Neale Ranns4a58e492020-12-21 13:19:10 +00001351
1352 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
1353 .next_nodes = {
1354 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1355 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1356 [ESP_ENCRYPT_NEXT_DROP_MPLS] = "mpls-drop",
1357 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
1358 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
1359 [ESP_ENCRYPT_NEXT_HANDOFF_MPLS] = "esp-mpls-encrypt-tun-handoff",
1360 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
1361 },
1362};
1363
1364VLIB_NODE_FN (esp_mpls_encrypt_tun_post_node)
1365(vlib_main_t *vm, vlib_node_runtime_t *node, vlib_frame_t *from_frame)
1366{
1367 return esp_encrypt_post_inline (vm, node, from_frame);
1368}
1369
1370VLIB_REGISTER_NODE (esp_mpls_encrypt_tun_post_node) = {
1371 .name = "esp-mpls-encrypt-tun-post",
1372 .vector_size = sizeof (u32),
1373 .format_trace = format_esp_post_encrypt_trace,
1374 .type = VLIB_NODE_TYPE_INTERNAL,
1375 .sibling_of = "esp-mpls-encrypt-tun",
1376
Neale Ranns93688d72022-08-09 03:34:51 +00001377 .n_errors = ESP_ENCRYPT_N_ERROR,
1378 .error_counters = esp_encrypt_error_counters,
Neale Ranns4a58e492020-12-21 13:19:10 +00001379};
1380
Neale Ranns2d498302021-02-25 08:38:58 +00001381#ifndef CLIB_MARCH_VARIANT
1382
1383static clib_error_t *
1384esp_encrypt_init (vlib_main_t *vm)
1385{
1386 ipsec_main_t *im = &ipsec_main;
1387
1388 im->esp4_enc_fq_index =
1389 vlib_frame_queue_main_init (esp4_encrypt_node.index, 0);
1390 im->esp6_enc_fq_index =
1391 vlib_frame_queue_main_init (esp6_encrypt_node.index, 0);
1392 im->esp4_enc_tun_fq_index =
1393 vlib_frame_queue_main_init (esp4_encrypt_tun_node.index, 0);
1394 im->esp6_enc_tun_fq_index =
1395 vlib_frame_queue_main_init (esp6_encrypt_tun_node.index, 0);
1396 im->esp_mpls_enc_tun_fq_index =
1397 vlib_frame_queue_main_init (esp_mpls_encrypt_tun_node.index, 0);
1398
1399 return 0;
1400}
1401
1402VLIB_INIT_FUNCTION (esp_encrypt_init);
1403
1404#endif
1405
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001406/*
1407 * fd.io coding-style-patch-verification: ON
1408 *
1409 * Local Variables:
1410 * eval: (c-set-style "gnu")
1411 * End:
1412 */