blob: f5461684166a0f96bab64970f96af5f39d4adc4e [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_encrypt.c : IPSec ESP encrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
21
Damjan Marion91f17dc2019-03-18 18:59:25 +010022#include <vnet/crypto/crypto.h>
23
Ed Warnickecb9cada2015-12-08 15:45:58 -070024#include <vnet/ipsec/ipsec.h>
Neale Ranns28287212019-12-16 00:53:11 +000025#include <vnet/ipsec/ipsec_tun.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070026#include <vnet/ipsec/esp.h>
27
Ed Warnickecb9cada2015-12-08 15:45:58 -070028#define foreach_esp_encrypt_next \
Neale Rannsb1fd80f2020-05-12 13:33:56 +000029_(DROP4, "ip4-drop") \
30_(DROP6, "ip6-drop") \
Neale Rannsb1fd80f2020-05-12 13:33:56 +000031_(HANDOFF4, "handoff4") \
32_(HANDOFF6, "handoff6") \
Ed Warnickecb9cada2015-12-08 15:45:58 -070033_(INTERFACE_OUTPUT, "interface-output")
34
35#define _(v, s) ESP_ENCRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070036typedef enum
37{
Ed Warnickecb9cada2015-12-08 15:45:58 -070038 foreach_esp_encrypt_next
39#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070040 ESP_ENCRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070041} esp_encrypt_next_t;
42
Damjan Marionc59b9a22019-03-19 15:38:40 +010043#define foreach_esp_encrypt_error \
44 _(RX_PKTS, "ESP pkts received") \
Fan Zhangf5395782020-04-29 14:00:03 +010045 _(POST_RX_PKTS, "ESP-post pkts received") \
Damjan Marionc59b9a22019-03-19 15:38:40 +010046 _(SEQ_CYCLED, "sequence number cycled (packet dropped)") \
47 _(CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
Fan Zhangf5395782020-04-29 14:00:03 +010048 _(CRYPTO_QUEUE_FULL, "crypto queue full (packet dropped)") \
Filip Tehlarefcad1a2020-02-04 09:36:04 +000049 _(NO_BUFFERS, "no buffers (packet dropped)") \
Ed Warnickecb9cada2015-12-08 15:45:58 -070050
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070051typedef enum
52{
Ed Warnickecb9cada2015-12-08 15:45:58 -070053#define _(sym,str) ESP_ENCRYPT_ERROR_##sym,
54 foreach_esp_encrypt_error
55#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070056 ESP_ENCRYPT_N_ERROR,
Ed Warnickecb9cada2015-12-08 15:45:58 -070057} esp_encrypt_error_t;
58
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070059static char *esp_encrypt_error_strings[] = {
Ed Warnickecb9cada2015-12-08 15:45:58 -070060#define _(sym,string) string,
61 foreach_esp_encrypt_error
62#undef _
63};
64
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070065typedef struct
66{
Neale Ranns8d7c5022019-02-06 01:41:05 -080067 u32 sa_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -070068 u32 spi;
69 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000070 u32 sa_seq_hi;
Klement Sekera4b089f22018-04-17 18:04:57 +020071 u8 udp_encap;
Ed Warnickecb9cada2015-12-08 15:45:58 -070072 ipsec_crypto_alg_t crypto_alg;
73 ipsec_integ_alg_t integ_alg;
74} esp_encrypt_trace_t;
75
Fan Zhangf5395782020-04-29 14:00:03 +010076typedef struct
77{
78 u32 next_index;
79} esp_encrypt_post_trace_t;
80
Ed Warnickecb9cada2015-12-08 15:45:58 -070081/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070082static u8 *
83format_esp_encrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070084{
85 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
86 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070087 esp_encrypt_trace_t *t = va_arg (*args, esp_encrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -070088
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020089 s =
90 format (s,
Neale Ranns6afaae12019-07-17 15:07:14 +000091 "esp: sa-index %d spi %u (0x%08x) seq %u sa-seq-hi %u crypto %U integrity %U%s",
92 t->sa_index, t->spi, t->spi, t->seq, t->sa_seq_hi,
93 format_ipsec_crypto_alg,
Guillaume Solignac8f818cc2019-05-15 12:02:33 +020094 t->crypto_alg, format_ipsec_integ_alg, t->integ_alg,
95 t->udp_encap ? " udp-encap-enabled" : "");
Ed Warnickecb9cada2015-12-08 15:45:58 -070096 return s;
97}
98
Fan Zhangf5395782020-04-29 14:00:03 +010099static u8 *
100format_esp_post_encrypt_trace (u8 * s, va_list * args)
101{
102 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
103 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
104 esp_encrypt_post_trace_t *t = va_arg (*args, esp_encrypt_post_trace_t *);
105
106 s = format (s, "esp-post: next node index %u", t->next_index);
107 return s;
108}
109
Damjan Marionc59b9a22019-03-19 15:38:40 +0100110/* pad packet in input buffer */
111static_always_inline u8 *
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000112esp_add_footer_and_icv (vlib_main_t * vm, vlib_buffer_t ** last,
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500113 u8 esp_align, u8 icv_sz,
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000114 u16 * next, vlib_node_runtime_t * node,
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000115 u16 buffer_data_size, uword total_len)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700116{
Damjan Marionc59b9a22019-03-19 15:38:40 +0100117 static const u8 pad_data[ESP_MAX_BLOCK_SIZE] = {
118 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
Milan Lenco7885c742020-08-20 13:23:09 +0200119 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x00,
Damjan Marionc59b9a22019-03-19 15:38:40 +0100120 };
Ed Warnickecb9cada2015-12-08 15:45:58 -0700121
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000122 u16 min_length = total_len + sizeof (esp_footer_t);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500123 u16 new_length = round_pow2 (min_length, esp_align);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100124 u8 pad_bytes = new_length - min_length;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000125 esp_footer_t *f = (esp_footer_t *) (vlib_buffer_get_current (last[0]) +
126 last[0]->current_length + pad_bytes);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000127 u16 tail_sz = sizeof (esp_footer_t) + pad_bytes + icv_sz;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700128
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000129 if (last[0]->current_length + tail_sz > buffer_data_size)
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000130 {
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000131 u32 tmp_bi = 0;
132 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
133 return 0;
Filip Tehlarc2c1bfd2020-02-13 07:49:30 +0000134
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000135 vlib_buffer_t *tmp = vlib_get_buffer (vm, tmp_bi);
136 last[0]->next_buffer = tmp_bi;
137 last[0]->flags |= VLIB_BUFFER_NEXT_PRESENT;
138 f = (esp_footer_t *) (vlib_buffer_get_current (tmp) + pad_bytes);
139 tmp->current_length += tail_sz;
140 last[0] = tmp;
141 }
142 else
143 last[0]->current_length += tail_sz;
144
145 f->pad_length = pad_bytes;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100146 if (pad_bytes)
Benoît Ganne4505f012019-12-07 09:14:27 -0700147 {
148 ASSERT (pad_bytes <= ESP_MAX_BLOCK_SIZE);
149 pad_bytes = clib_min (ESP_MAX_BLOCK_SIZE, pad_bytes);
150 clib_memcpy_fast ((u8 *) f - pad_bytes, pad_data, pad_bytes);
151 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100152
Damjan Marionc59b9a22019-03-19 15:38:40 +0100153 return &f->next_header;
154}
155
156static_always_inline void
157esp_update_ip4_hdr (ip4_header_t * ip4, u16 len, int is_transport, int is_udp)
158{
Neale Ranns1b582b82019-04-18 19:49:13 -0700159 ip_csum_t sum;
160 u16 old_len;
161
162 len = clib_net_to_host_u16 (len);
163 old_len = ip4->length;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100164
165 if (is_transport)
166 {
167 u8 prot = is_udp ? IP_PROTOCOL_UDP : IP_PROTOCOL_IPSEC_ESP;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100168
Neale Ranns1b582b82019-04-18 19:49:13 -0700169 sum = ip_csum_update (ip4->checksum, ip4->protocol,
170 prot, ip4_header_t, protocol);
171 ip4->protocol = prot;
172
173 sum = ip_csum_update (sum, old_len, len, ip4_header_t, length);
174 }
175 else
176 sum = ip_csum_update (ip4->checksum, old_len, len, ip4_header_t, length);
177
178 ip4->length = len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100179 ip4->checksum = ip_csum_fold (sum);
180}
181
182static_always_inline void
183esp_fill_udp_hdr (ipsec_sa_t * sa, udp_header_t * udp, u16 len)
184{
185 clib_memcpy_fast (udp, &sa->udp_hdr, sizeof (udp_header_t));
186 udp->length = clib_net_to_host_u16 (len);
187}
188
189static_always_inline u8
190ext_hdr_is_pre_esp (u8 nexthdr)
191{
192#ifdef CLIB_HAVE_VEC128
193 static const u8x16 ext_hdr_types = {
194 IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS,
195 IP_PROTOCOL_IPV6_ROUTE,
196 IP_PROTOCOL_IPV6_FRAGMENTATION,
197 };
198
199 return !u8x16_is_all_zero (ext_hdr_types == u8x16_splat (nexthdr));
200#else
201 return ((nexthdr ^ IP_PROTOCOL_IP6_HOP_BY_HOP_OPTIONS) |
202 (nexthdr ^ IP_PROTOCOL_IPV6_ROUTE) |
203 (nexthdr ^ IP_PROTOCOL_IPV6_FRAGMENTATION) != 0);
204#endif
205}
206
207static_always_inline u8
Neale Ranns02950402019-12-20 00:54:57 +0000208esp_get_ip6_hdr_len (ip6_header_t * ip6, ip6_ext_header_t ** ext_hdr)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100209{
210 /* this code assumes that HbH, route and frag headers will be before
211 others, if that is not the case, they will end up encrypted */
Damjan Marionc59b9a22019-03-19 15:38:40 +0100212 u8 len = sizeof (ip6_header_t);
213 ip6_ext_header_t *p;
214
215 /* if next packet doesn't have ext header */
216 if (ext_hdr_is_pre_esp (ip6->protocol) == 0)
Neale Ranns02950402019-12-20 00:54:57 +0000217 {
218 *ext_hdr = NULL;
219 return len;
220 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100221
222 p = (void *) (ip6 + 1);
223 len += ip6_ext_header_len (p);
224
225 while (ext_hdr_is_pre_esp (p->next_hdr))
226 {
227 len += ip6_ext_header_len (p);
228 p = ip6_ext_next_header (p);
229 }
230
Neale Ranns02950402019-12-20 00:54:57 +0000231 *ext_hdr = p;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100232 return len;
233}
234
Damjan Marionc59b9a22019-03-19 15:38:40 +0100235static_always_inline void
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000236esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
237 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000238 u16 * nexts, vnet_crypto_op_chunk_t * chunks,
239 u16 drop_next)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000240{
241 u32 n_fail, n_ops = vec_len (ops);
242 vnet_crypto_op_t *op = ops;
243
244 if (n_ops == 0)
245 return;
246
247 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
248
249 while (n_fail)
250 {
251 ASSERT (op - ops < n_ops);
252
253 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
254 {
255 u32 bi = op->user_data;
256 b[bi]->error = node->errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000257 nexts[bi] = drop_next;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000258 n_fail--;
259 }
260 op++;
261 }
262}
263
264static_always_inline void
Damjan Marionc59b9a22019-03-19 15:38:40 +0100265esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000266 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
267 u16 drop_next)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100268{
269 u32 n_fail, n_ops = vec_len (ops);
270 vnet_crypto_op_t *op = ops;
271
272 if (n_ops == 0)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700273 return;
274
Damjan Marionc59b9a22019-03-19 15:38:40 +0100275 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700276
Damjan Marionc59b9a22019-03-19 15:38:40 +0100277 while (n_fail)
278 {
279 ASSERT (op - ops < n_ops);
280
281 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
282 {
283 u32 bi = op->user_data;
284 b[bi]->error = node->errors[ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR];
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000285 nexts[bi] = drop_next;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100286 n_fail--;
287 }
288 op++;
289 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700290}
291
Damjan Mariond97918e2019-04-25 18:28:31 +0200292typedef struct
293{
294 u32 salt;
295 u64 iv;
296} __clib_packed esp_gcm_nonce_t;
297
298STATIC_ASSERT_SIZEOF (esp_gcm_nonce_t, 12);
299
Fan Zhangf5395782020-04-29 14:00:03 +0100300static_always_inline u32
301esp_encrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
302 ipsec_sa_t * sa0, vlib_buffer_t * b,
303 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
304 u32 start_len, u16 * n_ch)
305{
306 vnet_crypto_op_chunk_t *ch;
307 vlib_buffer_t *cb = b;
308 u32 n_chunks = 1;
309 u32 total_len;
310 vec_add2 (ptd->chunks, ch, 1);
311 total_len = ch->len = start_len;
312 ch->src = ch->dst = start;
313 cb = vlib_get_buffer (vm, cb->next_buffer);
314
315 while (1)
316 {
317 vec_add2 (ptd->chunks, ch, 1);
318 n_chunks += 1;
319 if (lb == cb)
320 total_len += ch->len = cb->current_length - icv_sz;
321 else
322 total_len += ch->len = cb->current_length;
323 ch->src = ch->dst = vlib_buffer_get_current (cb);
324
325 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
326 break;
327
328 cb = vlib_get_buffer (vm, cb->next_buffer);
329 }
330
331 if (n_ch)
332 *n_ch = n_chunks;
333
334 return total_len;
335}
336
337static_always_inline u32
338esp_encrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
339 ipsec_sa_t * sa0, vlib_buffer_t * b,
340 vlib_buffer_t * lb, u8 icv_sz, u8 * start,
341 u32 start_len, u8 * digest, u16 * n_ch)
342{
343 vnet_crypto_op_chunk_t *ch;
344 vlib_buffer_t *cb = b;
345 u32 n_chunks = 1;
346 u32 total_len;
347 vec_add2 (ptd->chunks, ch, 1);
348 total_len = ch->len = start_len;
349 ch->src = start;
350 cb = vlib_get_buffer (vm, cb->next_buffer);
351
352 while (1)
353 {
354 vec_add2 (ptd->chunks, ch, 1);
355 n_chunks += 1;
356 if (lb == cb)
357 {
358 total_len += ch->len = cb->current_length - icv_sz;
359 if (ipsec_sa_is_set_USE_ESN (sa0))
360 {
361 u32 seq_hi = clib_net_to_host_u32 (sa0->seq_hi);
362 clib_memcpy_fast (digest, &seq_hi, sizeof (seq_hi));
363 ch->len += sizeof (seq_hi);
364 total_len += sizeof (seq_hi);
365 }
366 }
367 else
368 total_len += ch->len = cb->current_length;
369 ch->src = vlib_buffer_get_current (cb);
370
371 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
372 break;
373
374 cb = vlib_get_buffer (vm, cb->next_buffer);
375 }
376
377 if (n_ch)
378 *n_ch = n_chunks;
379
380 return total_len;
381}
382
383always_inline void
384esp_prepare_sync_op (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
385 vnet_crypto_op_t ** crypto_ops,
386 vnet_crypto_op_t ** integ_ops, ipsec_sa_t * sa0,
387 u8 * payload, u16 payload_len, u8 iv_sz, u8 icv_sz,
388 vlib_buffer_t ** bufs, vlib_buffer_t ** b,
389 vlib_buffer_t * lb, u32 hdr_len, esp_header_t * esp,
390 esp_gcm_nonce_t * nonce)
391{
392 if (sa0->crypto_enc_op_id)
393 {
394 vnet_crypto_op_t *op;
395 vec_add2_aligned (crypto_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
396 vnet_crypto_op_init (op, sa0->crypto_enc_op_id);
397
398 op->src = op->dst = payload;
399 op->key_index = sa0->crypto_key_index;
400 op->len = payload_len - icv_sz;
401 op->user_data = b - bufs;
402
403 if (ipsec_sa_is_set_IS_AEAD (sa0))
404 {
405 /*
406 * construct the AAD in a scratch space in front
407 * of the IP header.
408 */
409 op->aad = payload - hdr_len - sizeof (esp_aead_t);
410 op->aad_len = esp_aad_fill (op->aad, esp, sa0);
411
412 op->tag = payload + op->len;
413 op->tag_len = 16;
414
415 u64 *iv = (u64 *) (payload - iv_sz);
416 nonce->salt = sa0->salt;
417 nonce->iv = *iv = clib_host_to_net_u64 (sa0->gcm_iv_counter++);
418 op->iv = (u8 *) nonce;
419 }
420 else
421 {
422 op->iv = payload - iv_sz;
423 op->flags = VNET_CRYPTO_OP_FLAG_INIT_IV;
424 }
425
426 if (lb != b[0])
427 {
428 /* is chained */
429 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
430 op->chunk_index = vec_len (ptd->chunks);
431 op->tag = vlib_buffer_get_tail (lb) - icv_sz;
432 esp_encrypt_chain_crypto (vm, ptd, sa0, b[0], lb, icv_sz, payload,
433 payload_len, &op->n_chunks);
434 }
435 }
436
437 if (sa0->integ_op_id)
438 {
439 vnet_crypto_op_t *op;
440 vec_add2_aligned (integ_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
441 vnet_crypto_op_init (op, sa0->integ_op_id);
442 op->src = payload - iv_sz - sizeof (esp_header_t);
443 op->digest = payload + payload_len - icv_sz;
444 op->key_index = sa0->integ_key_index;
445 op->digest_len = icv_sz;
446 op->len = payload_len - icv_sz + iv_sz + sizeof (esp_header_t);
447 op->user_data = b - bufs;
448
449 if (lb != b[0])
450 {
451 /* is chained */
452 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
453 op->chunk_index = vec_len (ptd->chunks);
454 op->digest = vlib_buffer_get_tail (lb) - icv_sz;
455
456 esp_encrypt_chain_integ (vm, ptd, sa0, b[0], lb, icv_sz,
457 payload - iv_sz - sizeof (esp_header_t),
458 payload_len + iv_sz +
459 sizeof (esp_header_t), op->digest,
460 &op->n_chunks);
461 }
462 else if (ipsec_sa_is_set_USE_ESN (sa0))
463 {
464 u32 seq_hi = clib_net_to_host_u32 (sa0->seq_hi);
465 clib_memcpy_fast (op->digest, &seq_hi, sizeof (seq_hi));
466 op->len += sizeof (seq_hi);
467 }
468 }
469}
470
471static_always_inline int
472esp_prepare_async_frame (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
473 vnet_crypto_async_frame_t ** async_frame,
474 ipsec_sa_t * sa, vlib_buffer_t * b,
475 esp_header_t * esp, u8 * payload, u32 payload_len,
Fan Zhang18f0e312020-10-19 13:08:34 +0100476 u8 iv_sz, u8 icv_sz, u32 bi, u16 next, u32 hdr_len,
Fan Zhangf5395782020-04-29 14:00:03 +0100477 u16 async_next, vlib_buffer_t * lb)
478{
479 esp_post_data_t *post = esp_post_data (b);
480 u8 *tag, *iv, *aad = 0;
481 u8 flag = 0;
482 u32 key_index;
483 i16 crypto_start_offset, integ_start_offset = 0;
484 u16 crypto_total_len, integ_total_len;
485
Fan Zhang18f0e312020-10-19 13:08:34 +0100486 post->next_index = next;
Fan Zhangf5395782020-04-29 14:00:03 +0100487
488 /* crypto */
489 crypto_start_offset = payload - b->data;
490 crypto_total_len = integ_total_len = payload_len - icv_sz;
491 tag = payload + crypto_total_len;
492
493 /* aead */
494 if (ipsec_sa_is_set_IS_AEAD (sa))
495 {
496 esp_gcm_nonce_t *nonce;
497 u64 *pkt_iv = (u64 *) (payload - iv_sz);
498
499 aad = payload - hdr_len - sizeof (esp_aead_t);
500 esp_aad_fill (aad, esp, sa);
501 nonce = (esp_gcm_nonce_t *) (aad - sizeof (*nonce));
502 nonce->salt = sa->salt;
503 nonce->iv = *pkt_iv = clib_host_to_net_u64 (sa->gcm_iv_counter++);
504 iv = (u8 *) nonce;
505 key_index = sa->crypto_key_index;
506
507 if (lb != b)
508 {
509 /* chain */
510 flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
511 tag = vlib_buffer_get_tail (lb) - icv_sz;
512 crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb,
513 icv_sz, payload,
514 payload_len, 0);
515 }
516 goto out;
517 }
518
519 /* cipher then hash */
520 iv = payload - iv_sz;
521 integ_start_offset = crypto_start_offset - iv_sz - sizeof (esp_header_t);
522 integ_total_len += iv_sz + sizeof (esp_header_t);
523 flag |= VNET_CRYPTO_OP_FLAG_INIT_IV;
524 key_index = sa->linked_key_index;
525
526 if (b != lb)
527 {
528 flag |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
529 crypto_total_len = esp_encrypt_chain_crypto (vm, ptd, sa, b, lb,
530 icv_sz, payload,
531 payload_len, 0);
532 tag = vlib_buffer_get_tail (lb) - icv_sz;
533 integ_total_len = esp_encrypt_chain_integ (vm, ptd, sa, b, lb, icv_sz,
534 payload - iv_sz -
535 sizeof (esp_header_t),
536 payload_len + iv_sz +
537 sizeof (esp_header_t),
538 tag, 0);
539 }
540 else if (ipsec_sa_is_set_USE_ESN (sa) && !ipsec_sa_is_set_IS_AEAD (sa))
541 {
542 u32 seq_hi = clib_net_to_host_u32 (sa->seq_hi);
543 clib_memcpy_fast (tag, &seq_hi, sizeof (seq_hi));
544 integ_total_len += sizeof (seq_hi);
545 }
546
547out:
548 return vnet_crypto_async_add_to_frame (vm, async_frame, key_index,
549 crypto_total_len,
550 integ_total_len - crypto_total_len,
551 crypto_start_offset,
552 integ_start_offset, bi, async_next,
553 iv, tag, aad, flag);
554}
555
Klement Sekerabe5a5dd2018-10-09 16:05:48 +0200556always_inline uword
Damjan Marionc59b9a22019-03-19 15:38:40 +0100557esp_encrypt_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
Fan Zhangf5395782020-04-29 14:00:03 +0100558 vlib_frame_t * frame, int is_ip6, int is_tun,
559 u16 async_next)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700560{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700561 ipsec_main_t *im = &ipsec_main;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100562 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, vm->thread_index);
563 u32 *from = vlib_frame_vector_args (frame);
564 u32 n_left = frame->n_vectors;
565 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marionc98275f2019-03-06 14:05:01 +0100566 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
Damjan Mariond97918e2019-04-25 18:28:31 +0200567 esp_gcm_nonce_t nonces[VLIB_FRAME_SIZE], *nonce = nonces;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100568 u32 thread_index = vm->thread_index;
569 u16 buffer_data_size = vlib_buffer_get_default_data_size (vm);
570 u32 current_sa_index = ~0, current_sa_packets = 0;
571 u32 current_sa_bytes = 0, spi = 0;
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500572 u8 esp_align = 4, iv_sz = 0, icv_sz = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100573 ipsec_sa_t *sa0 = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000574 vlib_buffer_t *lb;
575 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
576 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Fan Zhangf5395782020-04-29 14:00:03 +0100577 vnet_crypto_async_frame_t *async_frame = 0;
578 int is_async = im->async_mode;
579 vnet_crypto_async_op_id_t last_async_op = ~0;
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000580 u16 drop_next = (is_ip6 ? ESP_ENCRYPT_NEXT_DROP6 : ESP_ENCRYPT_NEXT_DROP4);
Fan Zhang18f0e312020-10-19 13:08:34 +0100581 u16 n_async_drop = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700582
Damjan Marionc59b9a22019-03-19 15:38:40 +0100583 vlib_get_buffers (vm, from, b, n_left);
Fan Zhangf5395782020-04-29 14:00:03 +0100584 if (!is_async)
585 {
586 vec_reset_length (ptd->crypto_ops);
587 vec_reset_length (ptd->integ_ops);
588 vec_reset_length (ptd->chained_crypto_ops);
589 vec_reset_length (ptd->chained_integ_ops);
590 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000591 vec_reset_length (ptd->chunks);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100592
593 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700594 {
Zhiyong Yang1cff6432019-04-30 05:33:53 -0400595 u32 sa_index0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100596 dpo_id_t *dpo;
597 esp_header_t *esp;
598 u8 *payload, *next_hdr_ptr;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000599 u16 payload_len, payload_len_total, n_bufs;
Neale Ranns4ec36c52020-03-31 09:21:29 -0400600 u32 hdr_len;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700601
Damjan Marionc59b9a22019-03-19 15:38:40 +0100602 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700603 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100604 u8 *p;
605 vlib_prefetch_buffer_header (b[2], LOAD);
606 p = vlib_buffer_get_current (b[1]);
607 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
608 p -= CLIB_CACHE_LINE_BYTES;
609 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000610 /* speculate that the trailer goes in the first buffer */
611 CLIB_PREFETCH (vlib_buffer_get_tail (b[1]),
612 CLIB_CACHE_LINE_BYTES, LOAD);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700613 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700614
Neale Ranns25edf142019-03-22 08:12:48 +0000615 if (is_tun)
616 {
617 /* we are on a ipsec tunnel's feature arc */
Neale Ranns28287212019-12-16 00:53:11 +0000618 vnet_buffer (b[0])->ipsec.sad_index =
619 sa_index0 = ipsec_tun_protect_get_sa_out
620 (vnet_buffer (b[0])->ip.adj_index[VLIB_TX]);
Neale Ranns25edf142019-03-22 08:12:48 +0000621 }
622 else
623 sa_index0 = vnet_buffer (b[0])->ipsec.sad_index;
624
625 if (sa_index0 != current_sa_index)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100626 {
Damjan Marion21f265f2019-06-05 15:45:50 +0200627 if (current_sa_packets)
628 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
629 current_sa_index,
630 current_sa_packets,
631 current_sa_bytes);
632 current_sa_packets = current_sa_bytes = 0;
633
Damjan Marionc59b9a22019-03-19 15:38:40 +0100634 sa0 = pool_elt_at_index (im->sad, sa_index0);
Neale Ranns123b5eb2020-10-16 14:03:55 +0000635
636 /* fetch the second cacheline ASAP */
637 CLIB_PREFETCH (sa0->cacheline1, CLIB_CACHE_LINE_BYTES, LOAD);
638
Damjan Marionc59b9a22019-03-19 15:38:40 +0100639 current_sa_index = sa_index0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100640 spi = clib_net_to_host_u32 (sa0->spi);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500641 esp_align = sa0->esp_block_align;
Damjan Marion7c22ff72019-04-04 12:25:44 +0200642 icv_sz = sa0->integ_icv_size;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100643 iv_sz = sa0->crypto_iv_size;
Fan Zhangf5395782020-04-29 14:00:03 +0100644
645 /* submit frame when op_id is different then the old one */
646 if (is_async && sa0->crypto_async_enc_op_id != last_async_op)
647 {
648 if (async_frame && async_frame->n_elts)
649 {
Fan Zhang18f0e312020-10-19 13:08:34 +0100650 if (vnet_crypto_async_submit_open_frame (vm, async_frame))
651 esp_async_recycle_failed_submit (async_frame, b, from,
652 nexts, &n_async_drop,
653 drop_next,
654 ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR);
Fan Zhangf5395782020-04-29 14:00:03 +0100655 }
656 async_frame =
657 vnet_crypto_async_get_frame (vm, sa0->crypto_async_enc_op_id);
658 last_async_op = sa0->crypto_async_enc_op_id;
659 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100660 }
661
Neale Rannsf62a8c02019-04-02 08:13:33 +0000662 if (PREDICT_FALSE (~0 == sa0->encrypt_thread_index))
663 {
664 /* this is the first packet to use this SA, claim the SA
665 * for this thread. this could happen simultaneously on
666 * another thread */
667 clib_atomic_cmp_and_swap (&sa0->encrypt_thread_index, ~0,
668 ipsec_sa_assign_thread (thread_index));
669 }
670
671 if (PREDICT_TRUE (thread_index != sa0->encrypt_thread_index))
672 {
Fan Zhang18f0e312020-10-19 13:08:34 +0100673 esp_set_next_index (is_async, from, nexts, from[b - bufs],
674 &n_async_drop,
675 (is_ip6 ? ESP_ENCRYPT_NEXT_HANDOFF6 :
676 ESP_ENCRYPT_NEXT_HANDOFF4), next);
Neale Rannsf62a8c02019-04-02 08:13:33 +0000677 goto trace;
678 }
679
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000680 lb = b[0];
681 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
682 if (n_bufs == 0)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100683 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000684 b[0]->error = node->errors[ESP_ENCRYPT_ERROR_NO_BUFFERS];
Fan Zhang18f0e312020-10-19 13:08:34 +0100685 esp_set_next_index (is_async, from, nexts, from[b - bufs],
686 &n_async_drop, drop_next, next);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100687 goto trace;
688 }
689
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000690 if (n_bufs > 1)
691 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000692 /* find last buffer in the chain */
693 while (lb->flags & VLIB_BUFFER_NEXT_PRESENT)
694 lb = vlib_get_buffer (vm, lb->next_buffer);
695 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000696
Damjan Marionc59b9a22019-03-19 15:38:40 +0100697 if (PREDICT_FALSE (esp_seq_advance (sa0)))
698 {
699 b[0]->error = node->errors[ESP_ENCRYPT_ERROR_SEQ_CYCLED];
Fan Zhang18f0e312020-10-19 13:08:34 +0100700 esp_set_next_index (is_async, from, nexts, from[b - bufs],
701 &n_async_drop, drop_next, next);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100702 goto trace;
703 }
704
705 /* space for IV */
706 hdr_len = iv_sz;
707
Damjan Mariond709cbc2019-03-26 13:16:42 +0100708 if (ipsec_sa_is_set_IS_TUNNEL (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100709 {
710 payload = vlib_buffer_get_current (b[0]);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500711 next_hdr_ptr = esp_add_footer_and_icv (vm, &lb, esp_align, icv_sz,
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000712 next, node,
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000713 buffer_data_size,
714 vlib_buffer_length_in_chain
715 (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000716 if (!next_hdr_ptr)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000717 {
718 b[0]->error = node->errors[ESP_ENCRYPT_ERROR_NO_BUFFERS];
Fan Zhang18f0e312020-10-19 13:08:34 +0100719 esp_set_next_index (is_async, from, nexts, from[b - bufs],
720 &n_async_drop, drop_next, next);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000721 goto trace;
722 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000723 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000724 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000725 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100726
727 /* ESP header */
728 hdr_len += sizeof (*esp);
729 esp = (esp_header_t *) (payload - hdr_len);
730
731 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100732 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100733 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100734 hdr_len += sizeof (udp_header_t);
735 esp_fill_udp_hdr (sa0, (udp_header_t *) (payload - hdr_len),
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000736 payload_len_total + hdr_len);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100737 }
738
739 /* IP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100740 if (ipsec_sa_is_set_IS_TUNNEL_V6 (sa0))
Damjan Marionc59b9a22019-03-19 15:38:40 +0100741 {
742 ip6_header_t *ip6;
743 u16 len = sizeof (ip6_header_t);
744 hdr_len += len;
745 ip6 = (ip6_header_t *) (payload - hdr_len);
746 clib_memcpy_fast (ip6, &sa0->ip6_hdr, len);
Neale Ranns987aea82019-03-27 13:40:35 +0000747 *next_hdr_ptr = (is_ip6 ?
748 IP_PROTOCOL_IPV6 : IP_PROTOCOL_IP_IN_IP);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000749 len = payload_len_total + hdr_len - len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100750 ip6->payload_length = clib_net_to_host_u16 (len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100751 }
752 else
753 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100754 ip4_header_t *ip4;
755 u16 len = sizeof (ip4_header_t);
756 hdr_len += len;
757 ip4 = (ip4_header_t *) (payload - hdr_len);
758 clib_memcpy_fast (ip4, &sa0->ip4_hdr, len);
Neale Ranns987aea82019-03-27 13:40:35 +0000759 *next_hdr_ptr = (is_ip6 ?
760 IP_PROTOCOL_IPV6 : IP_PROTOCOL_IP_IN_IP);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000761 len = payload_len_total + hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100762 esp_update_ip4_hdr (ip4, len, /* is_transport */ 0, 0);
Damjan Marionc98275f2019-03-06 14:05:01 +0100763 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100764
Neale Ranns72f2a3a2019-06-17 15:43:38 +0000765 dpo = &sa0->dpo;
Neale Ranns25edf142019-03-22 08:12:48 +0000766 if (!is_tun)
767 {
768 next[0] = dpo->dpoi_next_node;
769 vnet_buffer (b[0])->ip.adj_index[VLIB_TX] = dpo->dpoi_index;
770 }
Neale Ranns4ec36c52020-03-31 09:21:29 -0400771 else
772 next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Damjan Marionc98275f2019-03-06 14:05:01 +0100773 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100774 else /* transport mode */
Damjan Marionc98275f2019-03-06 14:05:01 +0100775 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100776 u8 *l2_hdr, l2_len, *ip_hdr, ip_len;
Neale Ranns02950402019-12-20 00:54:57 +0000777 ip6_ext_header_t *ext_hdr;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100778 udp_header_t *udp = 0;
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400779 u16 udp_len = 0;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100780 u8 *old_ip_hdr = vlib_buffer_get_current (b[0]);
Damjan Marionc98275f2019-03-06 14:05:01 +0100781
Damjan Marionc59b9a22019-03-19 15:38:40 +0100782 ip_len = is_ip6 ?
Neale Ranns02950402019-12-20 00:54:57 +0000783 esp_get_ip6_hdr_len ((ip6_header_t *) old_ip_hdr, &ext_hdr) :
Damjan Marionc59b9a22019-03-19 15:38:40 +0100784 ip4_header_bytes ((ip4_header_t *) old_ip_hdr);
Damjan Marionc98275f2019-03-06 14:05:01 +0100785
Damjan Marionc59b9a22019-03-19 15:38:40 +0100786 vlib_buffer_advance (b[0], ip_len);
787 payload = vlib_buffer_get_current (b[0]);
Christian Hoppsfb7e7ed2019-11-03 07:02:15 -0500788 next_hdr_ptr = esp_add_footer_and_icv (vm, &lb, esp_align, icv_sz,
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000789 next, node,
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000790 buffer_data_size,
791 vlib_buffer_length_in_chain
792 (vm, b[0]));
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000793 if (!next_hdr_ptr)
Fan Zhang18f0e312020-10-19 13:08:34 +0100794 {
795 esp_set_next_index (is_async, from, nexts, from[b - bufs],
796 &n_async_drop, drop_next, next);
797 goto trace;
798 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000799
800 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Filip Tehlar8cdb1a02019-11-18 22:21:37 +0000801 payload_len = b[0]->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000802 payload_len_total = vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100803
804 /* ESP header */
805 hdr_len += sizeof (*esp);
806 esp = (esp_header_t *) (payload - hdr_len);
807
808 /* optional UDP header */
Damjan Mariond709cbc2019-03-26 13:16:42 +0100809 if (ipsec_sa_is_set_UDP_ENCAP (sa0))
Damjan Marionc98275f2019-03-06 14:05:01 +0100810 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100811 hdr_len += sizeof (udp_header_t);
812 udp = (udp_header_t *) (payload - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100813 }
814
Damjan Marionc59b9a22019-03-19 15:38:40 +0100815 /* IP header */
816 hdr_len += ip_len;
817 ip_hdr = payload - hdr_len;
818
819 /* L2 header */
Neale Rannsc87b66c2019-02-07 07:26:12 -0800820 if (!is_tun)
821 {
822 l2_len = vnet_buffer (b[0])->ip.save_rewrite_length;
823 hdr_len += l2_len;
824 l2_hdr = payload - hdr_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100825
Neale Rannsc87b66c2019-02-07 07:26:12 -0800826 /* copy l2 and ip header */
827 clib_memcpy_le32 (l2_hdr, old_ip_hdr - l2_len, l2_len);
828 }
829 else
830 l2_len = 0;
831
Damjan Marionc98275f2019-03-06 14:05:01 +0100832 if (is_ip6)
Damjan Marionc59b9a22019-03-19 15:38:40 +0100833 {
Neale Ranns02950402019-12-20 00:54:57 +0000834 ip6_header_t *ip6 = (ip6_header_t *) (old_ip_hdr);
835 if (PREDICT_TRUE (NULL == ext_hdr))
836 {
837 *next_hdr_ptr = ip6->protocol;
838 ip6->protocol = IP_PROTOCOL_IPSEC_ESP;
839 }
840 else
841 {
842 *next_hdr_ptr = ext_hdr->next_hdr;
843 ext_hdr->next_hdr = IP_PROTOCOL_IPSEC_ESP;
844 }
Neale Rannsd207fd72019-04-18 17:18:12 -0700845 ip6->payload_length =
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000846 clib_host_to_net_u16 (payload_len_total + hdr_len - l2_len -
Neale Ranns02950402019-12-20 00:54:57 +0000847 sizeof (ip6_header_t));
Damjan Marionc59b9a22019-03-19 15:38:40 +0100848 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100849 else
Damjan Marionc98275f2019-03-06 14:05:01 +0100850 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100851 u16 len;
Neale Ranns02950402019-12-20 00:54:57 +0000852 ip4_header_t *ip4 = (ip4_header_t *) (old_ip_hdr);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100853 *next_hdr_ptr = ip4->protocol;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000854 len = payload_len_total + hdr_len - l2_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100855 if (udp)
856 {
857 esp_update_ip4_hdr (ip4, len, /* is_transport */ 1, 1);
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400858 udp_len = len - ip_len;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100859 }
860 else
861 esp_update_ip4_hdr (ip4, len, /* is_transport */ 1, 0);
Damjan Marionc98275f2019-03-06 14:05:01 +0100862 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100863
Neale Ranns02950402019-12-20 00:54:57 +0000864 clib_memcpy_le64 (ip_hdr, old_ip_hdr, ip_len);
865
Alexander Chernavinb0d2eda2020-03-25 10:56:52 -0400866 if (udp)
867 {
868 esp_fill_udp_hdr (sa0, udp, udp_len);
869 }
870
Neale Ranns4ec36c52020-03-31 09:21:29 -0400871 next[0] = ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT;
Damjan Marionc98275f2019-03-06 14:05:01 +0100872 }
873
PiotrX Kleskifdca4dd2020-05-05 14:14:22 +0200874 if (lb != b[0])
875 {
876 crypto_ops = &ptd->chained_crypto_ops;
877 integ_ops = &ptd->chained_integ_ops;
878 }
879 else
880 {
881 crypto_ops = &ptd->crypto_ops;
882 integ_ops = &ptd->integ_ops;
883 }
884
Damjan Marionc59b9a22019-03-19 15:38:40 +0100885 esp->spi = spi;
886 esp->seq = clib_net_to_host_u32 (sa0->seq);
Damjan Marionc98275f2019-03-06 14:05:01 +0100887
Fan Zhangf5395782020-04-29 14:00:03 +0100888 if (is_async)
Damjan Marionc98275f2019-03-06 14:05:01 +0100889 {
Fan Zhangf5395782020-04-29 14:00:03 +0100890 if (PREDICT_FALSE (sa0->crypto_async_enc_op_id == 0))
Fan Zhang18f0e312020-10-19 13:08:34 +0100891 {
892 esp_set_next_index (is_async, from, nexts, from[b - bufs],
893 &n_async_drop, drop_next, next);
894 goto trace;
895 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000896
Fan Zhangf5395782020-04-29 14:00:03 +0100897 if (esp_prepare_async_frame (vm, ptd, &async_frame, sa0, b[0], esp,
898 payload, payload_len, iv_sz,
Fan Zhang18f0e312020-10-19 13:08:34 +0100899 icv_sz, from[b - bufs], next[0],
900 hdr_len, async_next, lb))
Neale Ranns47feb112019-04-11 15:14:07 +0000901 {
Fan Zhang18f0e312020-10-19 13:08:34 +0100902 /* The fail only caused by submission, free the whole frame. */
903 if (async_frame->n_elts)
904 esp_async_recycle_failed_submit (async_frame, b, from, nexts,
905 &n_async_drop, drop_next,
906 ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR);
907 b[0]->error = ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR;
908 esp_set_next_index (1, from, nexts, from[b - bufs],
909 &n_async_drop, drop_next, next);
Fan Zhangf5395782020-04-29 14:00:03 +0100910 goto trace;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000911 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100912 }
Fan Zhangf5395782020-04-29 14:00:03 +0100913 else
Damjan Marionc98275f2019-03-06 14:05:01 +0100914 {
Fan Zhangf5395782020-04-29 14:00:03 +0100915 esp_prepare_sync_op (vm, ptd, crypto_ops, integ_ops, sa0, payload,
916 payload_len, iv_sz, icv_sz, bufs, b, lb,
917 hdr_len, esp, nonce++);
Damjan Marionc98275f2019-03-06 14:05:01 +0100918 }
919
Damjan Marionc59b9a22019-03-19 15:38:40 +0100920 vlib_buffer_advance (b[0], 0LL - hdr_len);
Damjan Marionc98275f2019-03-06 14:05:01 +0100921
Damjan Marionc59b9a22019-03-19 15:38:40 +0100922 current_sa_packets += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000923 current_sa_bytes += payload_len_total;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100924
925 trace:
926 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
Damjan Marionc98275f2019-03-06 14:05:01 +0100927 {
Damjan Marionc59b9a22019-03-19 15:38:40 +0100928 esp_encrypt_trace_t *tr = vlib_add_trace (vm, node, b[0],
929 sizeof (*tr));
930 tr->sa_index = sa_index0;
931 tr->spi = sa0->spi;
Neale Ranns6afaae12019-07-17 15:07:14 +0000932 tr->seq = sa0->seq;
933 tr->sa_seq_hi = sa0->seq_hi;
Damjan Mariond709cbc2019-03-26 13:16:42 +0100934 tr->udp_encap = ipsec_sa_is_set_UDP_ENCAP (sa0);
Damjan Marionc59b9a22019-03-19 15:38:40 +0100935 tr->crypto_alg = sa0->crypto_alg;
936 tr->integ_alg = sa0->integ_alg;
Damjan Marionc98275f2019-03-06 14:05:01 +0100937 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100938 /* next */
Damjan Marionc59b9a22019-03-19 15:38:40 +0100939 n_left -= 1;
Damjan Marionc98275f2019-03-06 14:05:01 +0100940 next += 1;
Damjan Marionc59b9a22019-03-19 15:38:40 +0100941 b += 1;
Damjan Marionc98275f2019-03-06 14:05:01 +0100942 }
943
Damjan Marionc59b9a22019-03-19 15:38:40 +0100944 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
945 current_sa_index, current_sa_packets,
946 current_sa_bytes);
Fan Zhangf5395782020-04-29 14:00:03 +0100947 if (!is_async)
948 {
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000949 esp_process_ops (vm, node, ptd->crypto_ops, bufs, nexts, drop_next);
Fan Zhangf5395782020-04-29 14:00:03 +0100950 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, bufs, nexts,
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000951 ptd->chunks, drop_next);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000952
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000953 esp_process_ops (vm, node, ptd->integ_ops, bufs, nexts, drop_next);
Fan Zhangf5395782020-04-29 14:00:03 +0100954 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, bufs, nexts,
Neale Rannsb1fd80f2020-05-12 13:33:56 +0000955 ptd->chunks, drop_next);
Fan Zhangf5395782020-04-29 14:00:03 +0100956 }
Fan Zhang18f0e312020-10-19 13:08:34 +0100957 else
Fan Zhangf5395782020-04-29 14:00:03 +0100958 {
Fan Zhang18f0e312020-10-19 13:08:34 +0100959 if (async_frame && async_frame->n_elts)
960 {
961 if (vnet_crypto_async_submit_open_frame (vm, async_frame) < 0)
962 esp_async_recycle_failed_submit (async_frame, b, from, nexts,
963 &n_async_drop, drop_next,
964 ESP_ENCRYPT_ERROR_CRYPTO_ENGINE_ERROR);
965 }
966 vlib_node_increment_counter (vm, node->node_index,
967 ESP_ENCRYPT_ERROR_RX_PKTS,
968 frame->n_vectors);
969 if (n_async_drop)
970 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_async_drop);
971
972 return frame->n_vectors;
Fan Zhangf5395782020-04-29 14:00:03 +0100973 }
Damjan Marionc59b9a22019-03-19 15:38:40 +0100974
975 vlib_node_increment_counter (vm, node->node_index,
976 ESP_ENCRYPT_ERROR_RX_PKTS, frame->n_vectors);
977
978 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
979 return frame->n_vectors;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700980}
981
Fan Zhangf5395782020-04-29 14:00:03 +0100982always_inline uword
983esp_encrypt_post_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
984 vlib_frame_t * frame)
985{
986 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
987 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
988 u32 *from = vlib_frame_vector_args (frame);
989 u32 n_left = frame->n_vectors;
990
991 vlib_get_buffers (vm, from, b, n_left);
992
993 if (n_left >= 4)
994 {
995 vlib_prefetch_buffer_header (b[0], LOAD);
996 vlib_prefetch_buffer_header (b[1], LOAD);
997 vlib_prefetch_buffer_header (b[2], LOAD);
998 vlib_prefetch_buffer_header (b[3], LOAD);
999 }
1000
1001 while (n_left > 8)
1002 {
1003 vlib_prefetch_buffer_header (b[4], LOAD);
1004 vlib_prefetch_buffer_header (b[5], LOAD);
1005 vlib_prefetch_buffer_header (b[6], LOAD);
1006 vlib_prefetch_buffer_header (b[7], LOAD);
1007
1008 next[0] = (esp_post_data (b[0]))->next_index;
1009 next[1] = (esp_post_data (b[1]))->next_index;
1010 next[2] = (esp_post_data (b[2]))->next_index;
1011 next[3] = (esp_post_data (b[3]))->next_index;
1012
1013 if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE))
1014 {
1015 if (b[0]->flags & VLIB_BUFFER_IS_TRACED)
1016 {
1017 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1018 sizeof (*tr));
1019 tr->next_index = next[0];
1020 }
1021 if (b[1]->flags & VLIB_BUFFER_IS_TRACED)
1022 {
1023 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[1],
1024 sizeof (*tr));
1025 tr->next_index = next[1];
1026 }
1027 if (b[2]->flags & VLIB_BUFFER_IS_TRACED)
1028 {
1029 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[2],
1030 sizeof (*tr));
1031 tr->next_index = next[2];
1032 }
1033 if (b[3]->flags & VLIB_BUFFER_IS_TRACED)
1034 {
1035 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[3],
1036 sizeof (*tr));
1037 tr->next_index = next[3];
1038 }
1039 }
1040
1041 b += 4;
1042 next += 4;
1043 n_left -= 4;
1044 }
1045
1046 while (n_left > 0)
1047 {
1048 next[0] = (esp_post_data (b[0]))->next_index;
1049 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1050 {
1051 esp_encrypt_post_trace_t *tr = vlib_add_trace (vm, node, b[0],
1052 sizeof (*tr));
1053 tr->next_index = next[0];
1054 }
1055
1056 b += 1;
1057 next += 1;
1058 n_left -= 1;
1059 }
1060
1061 vlib_node_increment_counter (vm, node->node_index,
1062 ESP_ENCRYPT_ERROR_POST_RX_PKTS,
1063 frame->n_vectors);
1064 vlib_buffer_enqueue_to_next (vm, node, from, nexts, frame->n_vectors);
1065 return frame->n_vectors;
1066}
1067
Klement Sekerab8f35442018-10-29 13:38:19 +01001068VLIB_NODE_FN (esp4_encrypt_node) (vlib_main_t * vm,
1069 vlib_node_runtime_t * node,
1070 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001071{
Fan Zhangf5395782020-04-29 14:00:03 +01001072 return esp_encrypt_inline (vm, node, from_frame, 0 /* is_ip6 */ , 0,
1073 esp_encrypt_async_next.esp4_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001074}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001075
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001076/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001077VLIB_REGISTER_NODE (esp4_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001078 .name = "esp4-encrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001079 .vector_size = sizeof (u32),
1080 .format_trace = format_esp_encrypt_trace,
1081 .type = VLIB_NODE_TYPE_INTERNAL,
1082
1083 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1084 .error_strings = esp_encrypt_error_strings,
1085
1086 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
1087 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001088 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1089 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1090 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-handoff",
1091 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-handoff",
Fan Zhang18f0e312020-10-19 13:08:34 +01001092 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "interface-output"
Ed Warnickecb9cada2015-12-08 15:45:58 -07001093 },
1094};
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001095/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -07001096
Fan Zhangf5395782020-04-29 14:00:03 +01001097VLIB_NODE_FN (esp4_encrypt_post_node) (vlib_main_t * vm,
1098 vlib_node_runtime_t * node,
1099 vlib_frame_t * from_frame)
1100{
1101 return esp_encrypt_post_inline (vm, node, from_frame);
1102}
1103
1104/* *INDENT-OFF* */
1105VLIB_REGISTER_NODE (esp4_encrypt_post_node) = {
1106 .name = "esp4-encrypt-post",
1107 .vector_size = sizeof (u32),
1108 .format_trace = format_esp_post_encrypt_trace,
1109 .type = VLIB_NODE_TYPE_INTERNAL,
1110 .sibling_of = "esp4-encrypt",
1111
1112 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1113 .error_strings = esp_encrypt_error_strings,
1114};
1115/* *INDENT-ON* */
1116
Klement Sekerab8f35442018-10-29 13:38:19 +01001117VLIB_NODE_FN (esp6_encrypt_node) (vlib_main_t * vm,
1118 vlib_node_runtime_t * node,
1119 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001120{
Fan Zhangf5395782020-04-29 14:00:03 +01001121 return esp_encrypt_inline (vm, node, from_frame, 1 /* is_ip6 */ , 0,
1122 esp_encrypt_async_next.esp6_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001123}
1124
1125/* *INDENT-OFF* */
1126VLIB_REGISTER_NODE (esp6_encrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001127 .name = "esp6-encrypt",
1128 .vector_size = sizeof (u32),
1129 .format_trace = format_esp_encrypt_trace,
1130 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001131 .sibling_of = "esp4-encrypt",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001132
1133 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1134 .error_strings = esp_encrypt_error_strings,
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001135};
1136/* *INDENT-ON* */
1137
Fan Zhangf5395782020-04-29 14:00:03 +01001138VLIB_NODE_FN (esp6_encrypt_post_node) (vlib_main_t * vm,
1139 vlib_node_runtime_t * node,
1140 vlib_frame_t * from_frame)
1141{
1142 return esp_encrypt_post_inline (vm, node, from_frame);
1143}
1144
1145/* *INDENT-OFF* */
1146VLIB_REGISTER_NODE (esp6_encrypt_post_node) = {
1147 .name = "esp6-encrypt-post",
1148 .vector_size = sizeof (u32),
1149 .format_trace = format_esp_post_encrypt_trace,
1150 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001151 .sibling_of = "esp4-encrypt",
Fan Zhangf5395782020-04-29 14:00:03 +01001152
1153 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1154 .error_strings = esp_encrypt_error_strings,
1155};
1156/* *INDENT-ON* */
1157
Neale Ranns25edf142019-03-22 08:12:48 +00001158VLIB_NODE_FN (esp4_encrypt_tun_node) (vlib_main_t * vm,
1159 vlib_node_runtime_t * node,
1160 vlib_frame_t * from_frame)
1161{
Fan Zhangf5395782020-04-29 14:00:03 +01001162 return esp_encrypt_inline (vm, node, from_frame, 0 /* is_ip6 */ , 1,
1163 esp_encrypt_async_next.esp4_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001164}
1165
1166/* *INDENT-OFF* */
1167VLIB_REGISTER_NODE (esp4_encrypt_tun_node) = {
1168 .name = "esp4-encrypt-tun",
1169 .vector_size = sizeof (u32),
1170 .format_trace = format_esp_encrypt_trace,
1171 .type = VLIB_NODE_TYPE_INTERNAL,
1172
1173 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1174 .error_strings = esp_encrypt_error_strings,
Neale Rannsd7603d92019-03-28 08:56:10 +00001175
Neale Rannsf62a8c02019-04-02 08:13:33 +00001176 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001177 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001178 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1179 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1180 [ESP_ENCRYPT_NEXT_HANDOFF4] = "esp4-encrypt-tun-handoff",
1181 [ESP_ENCRYPT_NEXT_HANDOFF6] = "error-drop",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001182 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001183 },
Neale Ranns25edf142019-03-22 08:12:48 +00001184};
1185
Fan Zhangf5395782020-04-29 14:00:03 +01001186VLIB_NODE_FN (esp4_encrypt_tun_post_node) (vlib_main_t * vm,
1187 vlib_node_runtime_t * node,
1188 vlib_frame_t * from_frame)
1189{
1190 return esp_encrypt_post_inline (vm, node, from_frame);
1191}
1192
1193/* *INDENT-OFF* */
1194VLIB_REGISTER_NODE (esp4_encrypt_tun_post_node) = {
1195 .name = "esp4-encrypt-tun-post",
1196 .vector_size = sizeof (u32),
1197 .format_trace = format_esp_post_encrypt_trace,
1198 .type = VLIB_NODE_TYPE_INTERNAL,
1199 .sibling_of = "esp4-encrypt-tun",
1200
1201 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1202 .error_strings = esp_encrypt_error_strings,
1203};
Neale Ranns25edf142019-03-22 08:12:48 +00001204/* *INDENT-ON* */
1205
1206VLIB_NODE_FN (esp6_encrypt_tun_node) (vlib_main_t * vm,
1207 vlib_node_runtime_t * node,
1208 vlib_frame_t * from_frame)
1209{
Fan Zhangf5395782020-04-29 14:00:03 +01001210 return esp_encrypt_inline (vm, node, from_frame, 1 /* is_ip6 */ , 1,
1211 esp_encrypt_async_next.esp6_tun_post_next);
Neale Ranns25edf142019-03-22 08:12:48 +00001212}
1213
1214/* *INDENT-OFF* */
1215VLIB_REGISTER_NODE (esp6_encrypt_tun_node) = {
1216 .name = "esp6-encrypt-tun",
1217 .vector_size = sizeof (u32),
1218 .format_trace = format_esp_encrypt_trace,
1219 .type = VLIB_NODE_TYPE_INTERNAL,
1220
1221 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1222 .error_strings = esp_encrypt_error_strings,
Neale Rannsd7603d92019-03-28 08:56:10 +00001223
Neale Rannsf62a8c02019-04-02 08:13:33 +00001224 .n_next_nodes = ESP_ENCRYPT_N_NEXT,
Neale Rannsd7603d92019-03-28 08:56:10 +00001225 .next_nodes = {
Neale Rannsb1fd80f2020-05-12 13:33:56 +00001226 [ESP_ENCRYPT_NEXT_DROP4] = "ip4-drop",
1227 [ESP_ENCRYPT_NEXT_DROP6] = "ip6-drop",
1228 [ESP_ENCRYPT_NEXT_HANDOFF4] = "error-drop",
1229 [ESP_ENCRYPT_NEXT_HANDOFF6] = "esp6-encrypt-tun-handoff",
Neale Ranns4ec36c52020-03-31 09:21:29 -04001230 [ESP_ENCRYPT_NEXT_INTERFACE_OUTPUT] = "adj-midchain-tx",
Neale Rannsd7603d92019-03-28 08:56:10 +00001231 },
Neale Ranns25edf142019-03-22 08:12:48 +00001232};
1233
Neale Ranns25edf142019-03-22 08:12:48 +00001234/* *INDENT-ON* */
1235
Fan Zhangf5395782020-04-29 14:00:03 +01001236VLIB_NODE_FN (esp6_encrypt_tun_post_node) (vlib_main_t * vm,
1237 vlib_node_runtime_t * node,
1238 vlib_frame_t * from_frame)
1239{
1240 return esp_encrypt_post_inline (vm, node, from_frame);
1241}
1242
1243/* *INDENT-OFF* */
1244VLIB_REGISTER_NODE (esp6_encrypt_tun_post_node) = {
1245 .name = "esp6-encrypt-tun-post",
1246 .vector_size = sizeof (u32),
1247 .format_trace = format_esp_post_encrypt_trace,
1248 .type = VLIB_NODE_TYPE_INTERNAL,
1249 .sibling_of = "esp6-encrypt-tun",
1250
1251 .n_errors = ARRAY_LEN(esp_encrypt_error_strings),
1252 .error_strings = esp_encrypt_error_strings,
1253};
1254/* *INDENT-ON* */
1255
Matthew Smith401aedf2019-07-08 14:45:04 -05001256typedef struct
1257{
1258 u32 sa_index;
1259} esp_no_crypto_trace_t;
1260
1261static u8 *
1262format_esp_no_crypto_trace (u8 * s, va_list * args)
1263{
1264 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
1265 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
1266 esp_no_crypto_trace_t *t = va_arg (*args, esp_no_crypto_trace_t *);
1267
1268 s = format (s, "esp-no-crypto: sa-index %u", t->sa_index);
1269
1270 return s;
1271}
1272
1273enum
1274{
1275 ESP_NO_CRYPTO_NEXT_DROP,
1276 ESP_NO_CRYPTO_N_NEXT,
1277};
1278
1279enum
1280{
1281 ESP_NO_CRYPTO_ERROR_RX_PKTS,
1282};
1283
1284static char *esp_no_crypto_error_strings[] = {
1285 "Outbound ESP packets received",
1286};
1287
1288always_inline uword
1289esp_no_crypto_inline (vlib_main_t * vm, vlib_node_runtime_t * node,
1290 vlib_frame_t * frame)
1291{
1292 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Matthew Smith401aedf2019-07-08 14:45:04 -05001293 u32 *from = vlib_frame_vector_args (frame);
1294 u32 n_left = frame->n_vectors;
1295
1296 vlib_get_buffers (vm, from, b, n_left);
1297
1298 while (n_left > 0)
1299 {
Matthew Smith401aedf2019-07-08 14:45:04 -05001300 u32 sa_index0;
1301
1302 /* packets are always going to be dropped, but get the sa_index */
Neale Ranns4ec36c52020-03-31 09:21:29 -04001303 sa_index0 = ipsec_tun_protect_get_sa_out
1304 (vnet_buffer (b[0])->ip.adj_index[VLIB_TX]);
Matthew Smith401aedf2019-07-08 14:45:04 -05001305
1306 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1307 {
1308 esp_no_crypto_trace_t *tr = vlib_add_trace (vm, node, b[0],
1309 sizeof (*tr));
1310 tr->sa_index = sa_index0;
1311 }
1312
1313 n_left -= 1;
Matthew Smith401aedf2019-07-08 14:45:04 -05001314 b += 1;
1315 }
1316
1317 vlib_node_increment_counter (vm, node->node_index,
1318 ESP_NO_CRYPTO_ERROR_RX_PKTS, frame->n_vectors);
1319
Neale Ranns4ec36c52020-03-31 09:21:29 -04001320 vlib_buffer_enqueue_to_single_next (vm, node, from,
1321 ESP_NO_CRYPTO_NEXT_DROP,
1322 frame->n_vectors);
Matthew Smith401aedf2019-07-08 14:45:04 -05001323
1324 return frame->n_vectors;
1325}
1326
1327VLIB_NODE_FN (esp4_no_crypto_tun_node) (vlib_main_t * vm,
1328 vlib_node_runtime_t * node,
1329 vlib_frame_t * from_frame)
1330{
1331 return esp_no_crypto_inline (vm, node, from_frame);
1332}
1333
1334/* *INDENT-OFF* */
1335VLIB_REGISTER_NODE (esp4_no_crypto_tun_node) =
1336{
1337 .name = "esp4-no-crypto",
1338 .vector_size = sizeof (u32),
1339 .format_trace = format_esp_no_crypto_trace,
1340 .n_errors = ARRAY_LEN(esp_no_crypto_error_strings),
1341 .error_strings = esp_no_crypto_error_strings,
1342 .n_next_nodes = ESP_NO_CRYPTO_N_NEXT,
1343 .next_nodes = {
1344 [ESP_NO_CRYPTO_NEXT_DROP] = "ip4-drop",
1345 },
1346};
1347
Matthew Smith401aedf2019-07-08 14:45:04 -05001348VLIB_NODE_FN (esp6_no_crypto_tun_node) (vlib_main_t * vm,
1349 vlib_node_runtime_t * node,
1350 vlib_frame_t * from_frame)
1351{
1352 return esp_no_crypto_inline (vm, node, from_frame);
1353}
1354
1355/* *INDENT-OFF* */
1356VLIB_REGISTER_NODE (esp6_no_crypto_tun_node) =
1357{
1358 .name = "esp6-no-crypto",
1359 .vector_size = sizeof (u32),
1360 .format_trace = format_esp_no_crypto_trace,
1361 .n_errors = ARRAY_LEN(esp_no_crypto_error_strings),
1362 .error_strings = esp_no_crypto_error_strings,
1363 .n_next_nodes = ESP_NO_CRYPTO_N_NEXT,
1364 .next_nodes = {
1365 [ESP_NO_CRYPTO_NEXT_DROP] = "ip6-drop",
1366 },
1367};
Matthew Smith401aedf2019-07-08 14:45:04 -05001368/* *INDENT-ON* */
1369
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001370/*
1371 * fd.io coding-style-patch-verification: ON
1372 *
1373 * Local Variables:
1374 * eval: (c-set-style "gnu")
1375 * End:
1376 */