blob: a8d890bd2e797e61de52ee524ec455957f7789dc [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_decrypt.c : IPSec ESP decrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
John Lo90430b62020-01-31 23:48:30 -050021#include <vnet/l2/l2_input.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070022
23#include <vnet/ipsec/ipsec.h>
24#include <vnet/ipsec/esp.h>
Neale Ranns918c1612019-02-21 23:34:59 -080025#include <vnet/ipsec/ipsec_io.h>
Neale Rannsc87b66c2019-02-07 07:26:12 -080026#include <vnet/ipsec/ipsec_tun.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027
Neale Ranns119c0d72020-11-26 13:12:37 +000028#include <vnet/gre/packet.h>
Neale Ranns568acbb2019-12-18 05:54:40 +000029
Ed Warnickecb9cada2015-12-08 15:45:58 -070030#define foreach_esp_decrypt_next \
31_(DROP, "error-drop") \
Kingwel Xie561d1ca2019-03-05 22:56:17 -050032_(IP4_INPUT, "ip4-input-no-checksum") \
Neale Rannsf62a8c02019-04-02 08:13:33 +000033_(IP6_INPUT, "ip6-input") \
Neale Ranns568acbb2019-12-18 05:54:40 +000034_(L2_INPUT, "l2-input") \
Fan Zhang18f0e312020-10-19 13:08:34 +010035_(HANDOFF, "handoff")
Ed Warnickecb9cada2015-12-08 15:45:58 -070036
37#define _(v, s) ESP_DECRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070038typedef enum
39{
Ed Warnickecb9cada2015-12-08 15:45:58 -070040 foreach_esp_decrypt_next
41#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070042 ESP_DECRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070043} esp_decrypt_next_t;
44
Fan Zhangf5395782020-04-29 14:00:03 +010045#define foreach_esp_decrypt_post_next \
46_(DROP, "error-drop") \
47_(IP4_INPUT, "ip4-input-no-checksum") \
48_(IP6_INPUT, "ip6-input") \
49_(L2_INPUT, "l2-input")
50
51#define _(v, s) ESP_DECRYPT_POST_NEXT_##v,
52typedef enum
53{
54 foreach_esp_decrypt_post_next
55#undef _
56 ESP_DECRYPT_POST_N_NEXT,
57} esp_decrypt_post_next_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070058
Damjan Marionb4fff3a2019-03-25 15:54:40 +010059#define foreach_esp_decrypt_error \
60 _(RX_PKTS, "ESP pkts received") \
Fan Zhangf5395782020-04-29 14:00:03 +010061 _(RX_POST_PKTS, "ESP-POST pkts received") \
Damjan Marionb4fff3a2019-03-25 15:54:40 +010062 _(DECRYPTION_FAILED, "ESP decryption failed") \
63 _(INTEG_ERROR, "Integrity check failed") \
64 _(CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
65 _(REPLAY, "SA replayed packet") \
Damjan Mariona829b132019-04-24 23:39:16 +020066 _(RUNT, "undersized packet") \
Filip Tehlarefcad1a2020-02-04 09:36:04 +000067 _(NO_BUFFERS, "no buffers (packet dropped)") \
Damjan Marionb4fff3a2019-03-25 15:54:40 +010068 _(OVERSIZED_HEADER, "buffer with oversized header (dropped)") \
Neale Ranns12989b52019-09-26 16:20:19 +000069 _(NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \
70 _(TUN_NO_PROTO, "no tunnel protocol") \
Neale Ranns02950402019-12-20 00:54:57 +000071 _(UNSUP_PAYLOAD, "unsupported payload") \
Ed Warnickecb9cada2015-12-08 15:45:58 -070072
73
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070074typedef enum
75{
Ed Warnickecb9cada2015-12-08 15:45:58 -070076#define _(sym,str) ESP_DECRYPT_ERROR_##sym,
77 foreach_esp_decrypt_error
78#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070079 ESP_DECRYPT_N_ERROR,
Ed Warnickecb9cada2015-12-08 15:45:58 -070080} esp_decrypt_error_t;
81
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070082static char *esp_decrypt_error_strings[] = {
Ed Warnickecb9cada2015-12-08 15:45:58 -070083#define _(sym,string) string,
84 foreach_esp_decrypt_error
85#undef _
86};
87
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070088typedef struct
89{
Damjan Marionb4fff3a2019-03-25 15:54:40 +010090 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000091 u32 sa_seq;
92 u32 sa_seq_hi;
Ed Warnickecb9cada2015-12-08 15:45:58 -070093 ipsec_crypto_alg_t crypto_alg;
94 ipsec_integ_alg_t integ_alg;
95} esp_decrypt_trace_t;
96
97/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070098static u8 *
99format_esp_decrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700100{
101 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
102 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700103 esp_decrypt_trace_t *t = va_arg (*args, esp_decrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700104
Neale Ranns6afaae12019-07-17 15:07:14 +0000105 s =
106 format (s,
107 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u",
108 format_ipsec_crypto_alg, t->crypto_alg, format_ipsec_integ_alg,
109 t->integ_alg, t->seq, t->sa_seq, t->sa_seq_hi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700110 return s;
111}
112
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100113#define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700114
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000115static_always_inline void
116esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
117 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
118 int e)
119{
120 vnet_crypto_op_t *op = ops;
121 u32 n_fail, n_ops = vec_len (ops);
122
123 if (n_ops == 0)
124 return;
125
126 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
127
128 while (n_fail)
129 {
130 ASSERT (op - ops < n_ops);
131 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
132 {
133 u32 err, bi = op->user_data;
134 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
135 err = e;
136 else
137 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
138 b[bi]->error = node->errors[err];
139 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
140 n_fail--;
141 }
142 op++;
143 }
144}
145
146static_always_inline void
147esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
148 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
149 u16 * nexts, vnet_crypto_op_chunk_t * chunks, int e)
150{
151
152 vnet_crypto_op_t *op = ops;
153 u32 n_fail, n_ops = vec_len (ops);
154
155 if (n_ops == 0)
156 return;
157
158 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
159
160 while (n_fail)
161 {
162 ASSERT (op - ops < n_ops);
163 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
164 {
165 u32 err, bi = op->user_data;
166 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
167 err = e;
168 else
169 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
170 b[bi]->error = node->errors[err];
171 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
172 n_fail--;
173 }
174 op++;
175 }
176}
177
178always_inline void
179esp_remove_tail (vlib_main_t * vm, vlib_buffer_t * b, vlib_buffer_t * last,
180 u16 tail)
181{
182 vlib_buffer_t *before_last = b;
183
184 if (last->current_length > tail)
185 {
186 last->current_length -= tail;
187 return;
188 }
189 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
190
191 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
192 {
193 before_last = b;
194 b = vlib_get_buffer (vm, b->next_buffer);
195 }
196 before_last->current_length -= tail - last->current_length;
197 vlib_buffer_free_one (vm, before_last->next_buffer);
198 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
199}
200
201/* ICV is splitted in last two buffers so move it to the last buffer and
202 return pointer to it */
203static_always_inline u8 *
204esp_move_icv (vlib_main_t * vm, vlib_buffer_t * first,
Fan Zhangf5395782020-04-29 14:00:03 +0100205 esp_decrypt_packet_data2_t * pd2, u16 icv_sz, u16 * dif)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000206{
207 vlib_buffer_t *before_last, *bp;
Fan Zhangf5395782020-04-29 14:00:03 +0100208 u16 last_sz = pd2->lb->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000209 u16 first_sz = icv_sz - last_sz;
210
211 bp = before_last = first;
212 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
213 {
214 before_last = bp;
215 bp = vlib_get_buffer (vm, bp->next_buffer);
216 }
217
Fan Zhangf5395782020-04-29 14:00:03 +0100218 u8 *lb_curr = vlib_buffer_get_current (pd2->lb);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000219 memmove (lb_curr + first_sz, lb_curr, last_sz);
220 clib_memcpy_fast (lb_curr, vlib_buffer_get_tail (before_last) - first_sz,
221 first_sz);
222 before_last->current_length -= first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100223 clib_memset (vlib_buffer_get_tail (before_last), 0, first_sz);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000224 if (dif)
225 dif[0] = first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100226 pd2->lb = before_last;
227 pd2->icv_removed = 1;
228 pd2->free_buffer_index = before_last->next_buffer;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000229 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
230 return lb_curr;
231}
232
Fan Zhangf5395782020-04-29 14:00:03 +0100233static_always_inline i16
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000234esp_insert_esn (vlib_main_t * vm, ipsec_sa_t * sa,
Fan Zhangf5395782020-04-29 14:00:03 +0100235 esp_decrypt_packet_data2_t * pd2, u32 * data_len,
236 u8 ** digest, u16 * len, vlib_buffer_t * b, u8 * payload)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000237{
238 if (!ipsec_sa_is_set_USE_ESN (sa))
Fan Zhangf5395782020-04-29 14:00:03 +0100239 return 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000240
241 /* shift ICV by 4 bytes to insert ESN */
242 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
243 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa->seq_hi);
244
Fan Zhangf5395782020-04-29 14:00:03 +0100245 if (pd2->icv_removed)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000246 {
Fan Zhangf5395782020-04-29 14:00:03 +0100247 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000248 if (space_left >= sz)
249 {
Fan Zhangf5395782020-04-29 14:00:03 +0100250 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi, sz);
251 *data_len += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000252 }
253 else
Fan Zhangf5395782020-04-29 14:00:03 +0100254 return sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000255
256 len[0] = b->current_length;
257 }
258 else
259 {
260 clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
261 clib_memcpy_fast (payload + len[0], &seq_hi, sz);
262 clib_memcpy_fast (payload + len[0] + sz, tmp, ESP_MAX_ICV_SIZE);
Fan Zhangf5395782020-04-29 14:00:03 +0100263 *data_len += sz;
264 *digest += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000265 }
Fan Zhangf5395782020-04-29 14:00:03 +0100266 return sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000267}
268
269static_always_inline u8 *
270esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
Fan Zhangf5395782020-04-29 14:00:03 +0100271 esp_decrypt_packet_data2_t * pd2, u16 icv_sz,
272 ipsec_sa_t * sa, u8 * extra_esn, u32 * len)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000273{
274 u16 dif = 0;
Fan Zhangf5395782020-04-29 14:00:03 +0100275 u8 *digest = esp_move_icv (vm, first, pd2, icv_sz, &dif);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000276 if (dif)
Fan Zhangf5395782020-04-29 14:00:03 +0100277 *len -= dif;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000278
279 if (ipsec_sa_is_set_USE_ESN (sa))
280 {
281 u8 sz = sizeof (sa->seq_hi);
282 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
Fan Zhangf5395782020-04-29 14:00:03 +0100283 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000284
285 if (space_left >= sz)
286 {
Fan Zhangf5395782020-04-29 14:00:03 +0100287 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi, sz);
288 *len += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000289 }
290 else
291 {
292 /* no space for ESN at the tail, use the next buffer
293 * (with ICV data) */
Fan Zhangf5395782020-04-29 14:00:03 +0100294 ASSERT (pd2->icv_removed);
295 vlib_buffer_t *tmp = vlib_get_buffer (vm, pd2->free_buffer_index);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000296 clib_memcpy_fast (vlib_buffer_get_current (tmp) - sz, &seq_hi, sz);
297 extra_esn[0] = 1;
298 }
299 }
300 return digest;
301}
302
Fan Zhangf5395782020-04-29 14:00:03 +0100303static_always_inline int
304esp_decrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
305 esp_decrypt_packet_data2_t * pd2,
306 ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
307 u8 * start_src, u32 start_len,
308 u8 ** digest, u16 * n_ch, u32 * integ_total_len)
309{
310 vnet_crypto_op_chunk_t *ch;
311 vlib_buffer_t *cb = vlib_get_buffer (vm, b->next_buffer);
312 u16 n_chunks = 1;
313 u32 total_len;
314 vec_add2 (ptd->chunks, ch, 1);
315 total_len = ch->len = start_len;
316 ch->src = start_src;
317
318 while (1)
319 {
320 vec_add2 (ptd->chunks, ch, 1);
321 n_chunks += 1;
322 ch->src = vlib_buffer_get_current (cb);
323 if (pd2->lb == cb)
324 {
325 if (pd2->icv_removed)
326 ch->len = cb->current_length;
327 else
328 ch->len = cb->current_length - icv_sz;
329 if (ipsec_sa_is_set_USE_ESN (sa0))
330 {
331 u32 seq_hi = clib_host_to_net_u32 (sa0->seq_hi);
332 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa0->seq_hi);
333 u8 *esn;
334 vlib_buffer_t *tmp_b;
335 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
336 if (space_left < sz)
337 {
338 if (pd2->icv_removed)
339 {
340 /* use pre-data area from the last bufer
341 that was removed from the chain */
342 tmp_b = vlib_get_buffer (vm, pd2->free_buffer_index);
343 esn = tmp_b->data - sz;
344 }
345 else
346 {
347 /* no space, need to allocate new buffer */
348 u32 tmp_bi = 0;
349 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
350 return -1;
351 tmp_b = vlib_get_buffer (vm, tmp_bi);
352 esn = tmp_b->data;
353 pd2->free_buffer_index = tmp_bi;
354 }
355 clib_memcpy_fast (esn, &seq_hi, sz);
356
357 vec_add2 (ptd->chunks, ch, 1);
358 n_chunks += 1;
359 ch->src = esn;
360 ch->len = sz;
361 }
362 else
363 {
364 if (pd2->icv_removed)
365 {
366 clib_memcpy_fast (vlib_buffer_get_tail
367 (pd2->lb), &seq_hi, sz);
368 }
369 else
370 {
371 clib_memcpy_fast (tmp, *digest, ESP_MAX_ICV_SIZE);
372 clib_memcpy_fast (*digest, &seq_hi, sz);
373 clib_memcpy_fast (*digest + sz, tmp, ESP_MAX_ICV_SIZE);
374 *digest += sz;
375 }
376 ch->len += sz;
377 }
378 }
379 total_len += ch->len;
380 break;
381 }
382 else
383 total_len += ch->len = cb->current_length;
384
385 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
386 break;
387
388 cb = vlib_get_buffer (vm, cb->next_buffer);
389 }
390
391 if (n_ch)
392 *n_ch = n_chunks;
393 if (integ_total_len)
394 *integ_total_len = total_len;
395
396 return 0;
397}
398
399static_always_inline u32
400esp_decrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
401 esp_decrypt_packet_data2_t * pd2,
402 ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
403 u8 * start, u32 start_len, u8 ** tag, u16 * n_ch)
404{
405 vnet_crypto_op_chunk_t *ch;
406 vlib_buffer_t *cb = b;
407 u16 n_chunks = 1;
408 u32 total_len;
409 vec_add2 (ptd->chunks, ch, 1);
410 total_len = ch->len = start_len;
411 ch->src = ch->dst = start;
412 cb = vlib_get_buffer (vm, cb->next_buffer);
413 n_chunks = 1;
414
415 while (1)
416 {
417 vec_add2 (ptd->chunks, ch, 1);
418 n_chunks += 1;
419 ch->src = ch->dst = vlib_buffer_get_current (cb);
420 if (pd2->lb == cb)
421 {
422 if (ipsec_sa_is_set_IS_AEAD (sa0))
423 {
424 if (pd2->lb->current_length < icv_sz)
425 {
426 u16 dif = 0;
427 *tag = esp_move_icv (vm, b, pd2, icv_sz, &dif);
428
429 /* this chunk does not contain crypto data */
430 n_chunks -= 1;
431 /* and fix previous chunk's length as it might have
432 been changed */
433 ASSERT (n_chunks > 0);
434 if (pd2->lb == b)
435 {
436 total_len -= dif;
437 ch[-1].len -= dif;
438 }
439 else
440 {
441 total_len = total_len + pd2->lb->current_length -
442 ch[-1].len;
443 ch[-1].len = pd2->lb->current_length;
444 }
445 break;
446 }
447 else
448 *tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
449 }
450
451 if (pd2->icv_removed)
452 total_len += ch->len = cb->current_length;
453 else
454 total_len += ch->len = cb->current_length - icv_sz;
455 }
456 else
457 total_len += ch->len = cb->current_length;
458
459 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
460 break;
461
462 cb = vlib_get_buffer (vm, cb->next_buffer);
463 }
464
465 if (n_ch)
466 *n_ch = n_chunks;
467
468 return total_len;
469}
470
471static_always_inline void
472esp_decrypt_prepare_sync_op (vlib_main_t * vm, vlib_node_runtime_t * node,
473 ipsec_per_thread_data_t * ptd,
474 vnet_crypto_op_t *** crypto_ops,
475 vnet_crypto_op_t *** integ_ops,
476 vnet_crypto_op_t * op,
477 ipsec_sa_t * sa0, u8 * payload,
478 u16 len, u8 icv_sz, u8 iv_sz,
479 esp_decrypt_packet_data_t * pd,
480 esp_decrypt_packet_data2_t * pd2,
481 vlib_buffer_t * b, u16 * next, u32 index)
482{
483 const u8 esp_sz = sizeof (esp_header_t);
484
485 if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
486 {
487 vnet_crypto_op_init (op, sa0->integ_op_id);
488 op->key_index = sa0->integ_key_index;
489 op->src = payload;
490 op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
491 op->user_data = index;
492 op->digest = payload + len;
493 op->digest_len = icv_sz;
494 op->len = len;
495
496 if (pd->is_chain)
497 {
498 /* buffer is chained */
499 op->len = pd->current_length;
500
501 /* special case when ICV is splitted and needs to be reassembled
502 * first -> move it to the last buffer. Also take into account
503 * that ESN needs to be added after encrypted data and may or
504 * may not fit in the tail.*/
505 if (pd2->lb->current_length < icv_sz)
506 {
507 u8 extra_esn = 0;
508 op->digest =
509 esp_move_icv_esn (vm, b, pd2, icv_sz, sa0,
510 &extra_esn, &op->len);
511
512 if (extra_esn)
513 {
514 /* esn is in the last buffer, that was unlinked from
515 * the chain */
516 op->len = b->current_length;
517 }
518 else
519 {
520 if (pd2->lb == b)
521 {
522 /* we now have a single buffer of crypto data, adjust
523 * the length (second buffer contains only ICV) */
524 *integ_ops = &ptd->integ_ops;
525 *crypto_ops = &ptd->crypto_ops;
526 len = b->current_length;
527 goto out;
528 }
529 }
530 }
531 else
532 op->digest = vlib_buffer_get_tail (pd2->lb) - icv_sz;
533
534 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
535 op->chunk_index = vec_len (ptd->chunks);
536 if (esp_decrypt_chain_integ (vm, ptd, pd2, sa0, b, icv_sz,
537 payload, pd->current_length,
538 &op->digest, &op->n_chunks, 0) < 0)
539 {
540 b->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
541 next[0] = ESP_DECRYPT_NEXT_DROP;
542 return;
543 }
544 }
545 else
546 esp_insert_esn (vm, sa0, pd2, &op->len, &op->digest, &len, b,
547 payload);
548 out:
549 vec_add_aligned (*(integ_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
550 }
551
552 payload += esp_sz;
553 len -= esp_sz;
554
555 if (sa0->crypto_dec_op_id != VNET_CRYPTO_OP_NONE)
556 {
557 vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
558 op->key_index = sa0->crypto_key_index;
559 op->iv = payload;
560
561 if (ipsec_sa_is_set_IS_AEAD (sa0))
562 {
563 esp_header_t *esp0;
564 esp_aead_t *aad;
565 u8 *scratch;
566
567 /*
568 * construct the AAD and the nonce (Salt || IV) in a scratch
569 * space in front of the IP header.
570 */
571 scratch = payload - esp_sz;
572 esp0 = (esp_header_t *) (scratch);
573
574 scratch -= (sizeof (*aad) + pd->hdr_sz);
575 op->aad = scratch;
576
577 op->aad_len = esp_aad_fill (op->aad, esp0, sa0);
578
579 /*
580 * we don't need to refer to the ESP header anymore so we
581 * can overwrite it with the salt and use the IV where it is
582 * to form the nonce = (Salt + IV)
583 */
584 op->iv -= sizeof (sa0->salt);
585 clib_memcpy_fast (op->iv, &sa0->salt, sizeof (sa0->salt));
586
587 op->tag = payload + len;
588 op->tag_len = 16;
589 }
590 op->src = op->dst = payload += iv_sz;
591 op->len = len - iv_sz;
592 op->user_data = index;
593
594 if (pd->is_chain && (pd2->lb != b))
595 {
596 /* buffer is chained */
597 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
598 op->chunk_index = vec_len (ptd->chunks);
599 esp_decrypt_chain_crypto (vm, ptd, pd2, sa0, b, icv_sz,
600 payload, len - pd->iv_sz + pd->icv_sz,
601 &op->tag, &op->n_chunks);
602 }
603
604 vec_add_aligned (*(crypto_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
605 }
606}
607
608static_always_inline int
609esp_decrypt_prepare_async_frame (vlib_main_t * vm,
610 vlib_node_runtime_t * node,
611 ipsec_per_thread_data_t * ptd,
612 vnet_crypto_async_frame_t ** f,
613 ipsec_sa_t * sa0, u8 * payload, u16 len,
614 u8 icv_sz, u8 iv_sz,
615 esp_decrypt_packet_data_t * pd,
616 esp_decrypt_packet_data2_t * pd2, u32 bi,
617 vlib_buffer_t * b, u16 * next,
618 u16 async_next)
619{
620 const u8 esp_sz = sizeof (esp_header_t);
621 u32 current_protect_index = vnet_buffer (b)->ipsec.protect_index;
622 esp_decrypt_packet_data_t *async_pd = &(esp_post_data (b))->decrypt_data;
623 esp_decrypt_packet_data2_t *async_pd2 = esp_post_data2 (b);
624 u8 *tag = payload + len, *iv = payload + esp_sz, *aad = 0;
625 u32 key_index;
626 u32 crypto_len, integ_len = 0;
627 i16 crypto_start_offset, integ_start_offset = 0;
628 u8 flags = 0;
629
630 if (!ipsec_sa_is_set_IS_AEAD (sa0))
631 {
632 /* linked algs */
633 key_index = sa0->linked_key_index;
634 integ_start_offset = payload - b->data;
635 integ_len = len;
636
637 if (pd->is_chain)
638 {
639 /* buffer is chained */
640 integ_len = pd->current_length;
641
642 /* special case when ICV is splitted and needs to be reassembled
643 * first -> move it to the last buffer. Also take into account
644 * that ESN needs to be added after encrypted data and may or
645 * may not fit in the tail.*/
646 if (pd2->lb->current_length < icv_sz)
647 {
648 u8 extra_esn = 0;
649 tag = esp_move_icv_esn (vm, b, pd2, icv_sz, sa0,
650 &extra_esn, &integ_len);
651
652 if (extra_esn)
653 {
654 /* esn is in the last buffer, that was unlinked from
655 * the chain */
656 integ_len = b->current_length;
657 }
658 else
659 {
660 if (pd2->lb == b)
661 {
662 /* we now have a single buffer of crypto data, adjust
663 * the length (second buffer contains only ICV) */
664 len = b->current_length;
665 goto out;
666 }
667 }
668 }
669 else
670 tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
671
672 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
673 if (esp_decrypt_chain_integ (vm, ptd, pd2, sa0, b, icv_sz, payload,
674 pd->current_length, &tag,
675 0, &integ_len) < 0)
676 {
677 /* allocate buffer failed, will not add to frame and drop */
678 b->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
679 next[0] = ESP_DECRYPT_NEXT_DROP;
Fan Zhang18f0e312020-10-19 13:08:34 +0100680 return -1;
Fan Zhangf5395782020-04-29 14:00:03 +0100681 }
682 }
683 else
684 esp_insert_esn (vm, sa0, pd2, &integ_len, &tag, &len, b, payload);
685 }
686 else
687 key_index = sa0->crypto_key_index;
688
689out:
690 /* crypto */
691 payload += esp_sz;
692 len -= esp_sz;
693 iv = payload;
694
695 if (ipsec_sa_is_set_IS_AEAD (sa0))
696 {
697 esp_header_t *esp0;
698 u8 *scratch;
699
700 /*
701 * construct the AAD and the nonce (Salt || IV) in a scratch
702 * space in front of the IP header.
703 */
704 scratch = payload - esp_sz;
705 esp0 = (esp_header_t *) (scratch);
706
707 scratch -= (sizeof (esp_aead_t) + pd->hdr_sz);
708 aad = scratch;
709
710 esp_aad_fill (aad, esp0, sa0);
711
712 /*
713 * we don't need to refer to the ESP header anymore so we
714 * can overwrite it with the salt and use the IV where it is
715 * to form the nonce = (Salt + IV)
716 */
717 iv -= sizeof (sa0->salt);
718 clib_memcpy_fast (iv, &sa0->salt, sizeof (sa0->salt));
719
720 tag = payload + len;
721 }
722
723 crypto_start_offset = (payload += iv_sz) - b->data;
724 crypto_len = len - iv_sz;
725
726 if (pd->is_chain && (pd2->lb != b))
727 {
728 /* buffer is chained */
729 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
730
731 crypto_len = esp_decrypt_chain_crypto (vm, ptd, pd2, sa0, b, icv_sz,
732 payload,
733 len - pd->iv_sz + pd->icv_sz,
734 &tag, 0);
735 }
736
737 *async_pd = *pd;
738 *async_pd2 = *pd2;
739 pd->protect_index = current_protect_index;
Fan Zhangf5395782020-04-29 14:00:03 +0100740
741 /* for AEAD integ_len - crypto_len will be negative, it is ok since it
742 * is ignored by the engine. */
743 return vnet_crypto_async_add_to_frame (vm, f, key_index, crypto_len,
744 integ_len - crypto_len,
745 crypto_start_offset,
746 integ_start_offset,
747 bi, async_next, iv, tag, aad, flags);
748}
749
750static_always_inline void
751esp_decrypt_post_crypto (vlib_main_t * vm, vlib_node_runtime_t * node,
752 esp_decrypt_packet_data_t * pd,
753 esp_decrypt_packet_data2_t * pd2, vlib_buffer_t * b,
754 u16 * next, int is_ip6, int is_tun, int is_async)
755{
756 ipsec_main_t *im = &ipsec_main;
757 ipsec_sa_t *sa0 = vec_elt_at_index (im->sad, pd->sa_index);
758 vlib_buffer_t *lb = b;
759 const u8 esp_sz = sizeof (esp_header_t);
760 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
761 u8 pad_length = 0, next_header = 0;
762 u16 icv_sz;
763
764 /*
765 * redo the anti-reply check
766 * in this frame say we have sequence numbers, s, s+1, s+1, s+1
767 * and s and s+1 are in the window. When we did the anti-replay
768 * check above we did so against the state of the window (W),
769 * after packet s-1. So each of the packets in the sequence will be
770 * accepted.
771 * This time s will be cheked against Ws-1, s+1 chceked against Ws
772 * (i.e. the window state is updated/advnaced)
773 * so this time the successive s+! packet will be dropped.
774 * This is a consequence of batching the decrypts. If the
775 * check-dcrypt-advance process was done for each packet it would
776 * be fine. But we batch the decrypts because it's much more efficient
777 * to do so in SW and if we offload to HW and the process is async.
778 *
779 * You're probably thinking, but this means an attacker can send the
780 * above sequence and cause VPP to perform decrpyts that will fail,
781 * and that's true. But if the attacker can determine s (a valid
782 * sequence number in the window) which is non-trivial, it can generate
783 * a sequence s, s+1, s+2, s+3, ... s+n and nothing will prevent any
784 * implementation, sequential or batching, from decrypting these.
785 */
786 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
787 {
788 b->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
789 next[0] = ESP_DECRYPT_NEXT_DROP;
790 return;
791 }
792
793 ipsec_sa_anti_replay_advance (sa0, pd->seq);
794
795 if (pd->is_chain)
796 {
797 lb = pd2->lb;
798 icv_sz = pd2->icv_removed ? 0 : pd->icv_sz;
799 if (pd2->free_buffer_index)
800 {
801 vlib_buffer_free_one (vm, pd2->free_buffer_index);
802 lb->next_buffer = 0;
803 }
804 if (lb->current_length < sizeof (esp_footer_t) + icv_sz)
805 {
806 /* esp footer is either splitted in two buffers or in the before
807 * last buffer */
808
809 vlib_buffer_t *before_last = b, *bp = b;
810 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
811 {
812 before_last = bp;
813 bp = vlib_get_buffer (vm, bp->next_buffer);
814 }
815 u8 *bt = vlib_buffer_get_tail (before_last);
816
817 if (lb->current_length == icv_sz)
818 {
819 esp_footer_t *f = (esp_footer_t *) (bt - sizeof (*f));
820 pad_length = f->pad_length;
821 next_header = f->next_header;
822 }
823 else
824 {
825 pad_length = (bt - 1)[0];
826 next_header = ((u8 *) vlib_buffer_get_current (lb))[0];
827 }
828 }
829 else
830 {
831 esp_footer_t *f =
832 (esp_footer_t *) (lb->data + lb->current_data +
833 lb->current_length - sizeof (esp_footer_t) -
834 icv_sz);
835 pad_length = f->pad_length;
836 next_header = f->next_header;
837 }
838 }
839 else
840 {
841 icv_sz = pd->icv_sz;
842 esp_footer_t *f =
843 (esp_footer_t *) (lb->data + lb->current_data + lb->current_length -
844 sizeof (esp_footer_t) - icv_sz);
845 pad_length = f->pad_length;
846 next_header = f->next_header;
847 }
848
849 u16 adv = pd->iv_sz + esp_sz;
850 u16 tail = sizeof (esp_footer_t) + pad_length + icv_sz;
851 u16 tail_orig = sizeof (esp_footer_t) + pad_length + pd->icv_sz;
852 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
853
854 if ((pd->flags & tun_flags) == 0 && !is_tun) /* transport mode */
855 {
856 u8 udp_sz = (is_ip6 == 0 && pd->flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
857 sizeof (udp_header_t) : 0;
858 u16 ip_hdr_sz = pd->hdr_sz - udp_sz;
859 u8 *old_ip = b->data + pd->current_data - ip_hdr_sz - udp_sz;
860 u8 *ip = old_ip + adv + udp_sz;
861
862 if (is_ip6 && ip_hdr_sz > 64)
863 memmove (ip, old_ip, ip_hdr_sz);
864 else
865 clib_memcpy_le64 (ip, old_ip, ip_hdr_sz);
866
867 b->current_data = pd->current_data + adv - ip_hdr_sz;
868 b->current_length += ip_hdr_sz - adv;
869 esp_remove_tail (vm, b, lb, tail);
870
871 if (is_ip6)
872 {
873 ip6_header_t *ip6 = (ip6_header_t *) ip;
874 u16 len = clib_net_to_host_u16 (ip6->payload_length);
875 len -= adv + tail_orig;
876 ip6->payload_length = clib_host_to_net_u16 (len);
877 ip6->protocol = next_header;
878 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
879 }
880 else
881 {
882 ip4_header_t *ip4 = (ip4_header_t *) ip;
883 ip_csum_t sum = ip4->checksum;
884 u16 len = clib_net_to_host_u16 (ip4->length);
885 len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
886 sum = ip_csum_update (sum, ip4->protocol, next_header,
887 ip4_header_t, protocol);
888 sum = ip_csum_update (sum, ip4->length, len, ip4_header_t, length);
889 ip4->checksum = ip_csum_fold (sum);
890 ip4->protocol = next_header;
891 ip4->length = len;
892 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
893 }
894 }
895 else
896 {
897 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
898 {
899 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
900 b->current_data = pd->current_data + adv;
901 b->current_length = pd->current_length - adv;
902 esp_remove_tail (vm, b, lb, tail);
903 }
904 else if (next_header == IP_PROTOCOL_IPV6)
905 {
906 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
907 b->current_data = pd->current_data + adv;
908 b->current_length = pd->current_length - adv;
909 esp_remove_tail (vm, b, lb, tail);
910 }
911 else
912 {
913 if (is_tun && next_header == IP_PROTOCOL_GRE)
914 {
915 gre_header_t *gre;
916
917 b->current_data = pd->current_data + adv;
918 b->current_length = pd->current_length - adv - tail;
919
920 gre = vlib_buffer_get_current (b);
921
922 vlib_buffer_advance (b, sizeof (*gre));
923
924 switch (clib_net_to_host_u16 (gre->protocol))
925 {
926 case GRE_PROTOCOL_teb:
927 vnet_update_l2_len (b);
928 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
929 break;
930 case GRE_PROTOCOL_ip4:
931 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
932 break;
933 case GRE_PROTOCOL_ip6:
934 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
935 break;
936 default:
937 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
938 next[0] = ESP_DECRYPT_NEXT_DROP;
939 break;
940 }
941 }
942 else
943 {
944 next[0] = ESP_DECRYPT_NEXT_DROP;
945 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
946 return;
947 }
948 }
949 if (is_tun)
950 {
951 if (ipsec_sa_is_set_IS_PROTECT (sa0))
952 {
953 /*
954 * There are two encap possibilities
955 * 1) the tunnel and ths SA are prodiving encap, i.e. it's
956 * MAC | SA-IP | TUN-IP | ESP | PAYLOAD
957 * implying the SA is in tunnel mode (on a tunnel interface)
958 * 2) only the tunnel provides encap
959 * MAC | TUN-IP | ESP | PAYLOAD
960 * implying the SA is in transport mode.
961 *
962 * For 2) we need only strip the tunnel encap and we're good.
963 * since the tunnel and crypto ecnap (int the tun=protect
964 * object) are the same and we verified above that these match
965 * for 1) we need to strip the SA-IP outer headers, to
966 * reveal the tunnel IP and then check that this matches
967 * the configured tunnel.
968 */
969 const ipsec_tun_protect_t *itp;
970
971 if (is_async)
972 itp = ipsec_tun_protect_get (pd->protect_index);
973 else
974 itp =
975 ipsec_tun_protect_get (vnet_buffer (b)->
976 ipsec.protect_index);
977
978 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
979 {
980 const ip4_header_t *ip4;
981
982 ip4 = vlib_buffer_get_current (b);
983
984 if (!ip46_address_is_equal_v4 (&itp->itp_tun.src,
985 &ip4->dst_address) ||
986 !ip46_address_is_equal_v4 (&itp->itp_tun.dst,
987 &ip4->src_address))
988 {
989 next[0] = ESP_DECRYPT_NEXT_DROP;
990 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
991 }
992 }
993 else if (next_header == IP_PROTOCOL_IPV6)
994 {
995 const ip6_header_t *ip6;
996
997 ip6 = vlib_buffer_get_current (b);
998
999 if (!ip46_address_is_equal_v6 (&itp->itp_tun.src,
1000 &ip6->dst_address) ||
1001 !ip46_address_is_equal_v6 (&itp->itp_tun.dst,
1002 &ip6->src_address))
1003 {
1004 next[0] = ESP_DECRYPT_NEXT_DROP;
1005 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
1006 }
1007 }
1008 }
1009 }
1010 }
1011}
1012
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001013always_inline uword
1014esp_decrypt_inline (vlib_main_t * vm,
1015 vlib_node_runtime_t * node, vlib_frame_t * from_frame,
Fan Zhangf5395782020-04-29 14:00:03 +01001016 int is_ip6, int is_tun, u16 async_next)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001017{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001018 ipsec_main_t *im = &ipsec_main;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001019 u32 thread_index = vm->thread_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001020 u16 len;
1021 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, thread_index);
Damjan Marionc98275f2019-03-06 14:05:01 +01001022 u32 *from = vlib_frame_vector_args (from_frame);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001023 u32 n_left = from_frame->n_vectors;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001024 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marionc98275f2019-03-06 14:05:01 +01001025 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001026 esp_decrypt_packet_data_t pkt_data[VLIB_FRAME_SIZE], *pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001027 esp_decrypt_packet_data2_t pkt_data2[VLIB_FRAME_SIZE], *pd2 = pkt_data2;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001028 esp_decrypt_packet_data_t cpd = { };
1029 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
1030 const u8 esp_sz = sizeof (esp_header_t);
1031 ipsec_sa_t *sa0 = 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +00001032 vnet_crypto_op_t _op, *op = &_op;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001033 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
1034 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Fan Zhangf5395782020-04-29 14:00:03 +01001035 vnet_crypto_async_frame_t *async_frame = 0;
1036 int is_async = im->async_mode;
1037 vnet_crypto_async_op_id_t last_async_op = ~0;
Fan Zhang18f0e312020-10-19 13:08:34 +01001038 u16 n_async_drop = 0;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001039
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001040 vlib_get_buffers (vm, from, b, n_left);
Fan Zhangf5395782020-04-29 14:00:03 +01001041 if (!is_async)
1042 {
1043 vec_reset_length (ptd->crypto_ops);
1044 vec_reset_length (ptd->integ_ops);
1045 vec_reset_length (ptd->chained_crypto_ops);
1046 vec_reset_length (ptd->chained_integ_ops);
1047 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001048 vec_reset_length (ptd->chunks);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001049 clib_memset_u16 (nexts, -1, n_left);
1050
1051 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001052 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001053 u8 *payload;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001054
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001055 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001056 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001057 u8 *p;
1058 vlib_prefetch_buffer_header (b[2], LOAD);
1059 p = vlib_buffer_get_current (b[1]);
1060 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
1061 p -= CLIB_CACHE_LINE_BYTES;
1062 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
1063 }
1064
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001065 u32 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
1066 if (n_bufs == 0)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001067 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001068 b[0]->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
Fan Zhang18f0e312020-10-19 13:08:34 +01001069 esp_set_next_index (is_async, from, nexts, from[b - bufs],
1070 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
Damjan Marion1f4e1cb2019-03-28 19:19:31 +01001071 next[0] = ESP_DECRYPT_NEXT_DROP;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001072 goto next;
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001073 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001074
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001075 if (vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
Damjan Marionc98275f2019-03-06 14:05:01 +01001076 {
Damjan Marion867dfdd2019-06-05 15:42:54 +02001077 if (current_sa_pkts)
1078 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1079 current_sa_index,
1080 current_sa_pkts,
1081 current_sa_bytes);
1082 current_sa_bytes = current_sa_pkts = 0;
1083
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001084 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
1085 sa0 = pool_elt_at_index (im->sad, current_sa_index);
Neale Ranns123b5eb2020-10-16 14:03:55 +00001086
1087 /* fetch the second cacheline ASAP */
1088 CLIB_PREFETCH (sa0->cacheline1, CLIB_CACHE_LINE_BYTES, LOAD);
Damjan Marion7c22ff72019-04-04 12:25:44 +02001089 cpd.icv_sz = sa0->integ_icv_size;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001090 cpd.iv_sz = sa0->crypto_iv_size;
1091 cpd.flags = sa0->flags;
1092 cpd.sa_index = current_sa_index;
Fan Zhangf5395782020-04-29 14:00:03 +01001093
1094 /* submit frame when op_id is different then the old one */
1095 if (is_async && last_async_op != sa0->crypto_async_dec_op_id)
1096 {
1097 if (async_frame && async_frame->n_elts)
1098 {
1099 if (vnet_crypto_async_submit_open_frame (vm, async_frame))
Fan Zhang18f0e312020-10-19 13:08:34 +01001100 esp_async_recycle_failed_submit (async_frame, b, from,
1101 nexts, &n_async_drop,
1102 ESP_DECRYPT_NEXT_DROP,
1103 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
Fan Zhangf5395782020-04-29 14:00:03 +01001104 }
1105 async_frame =
1106 vnet_crypto_async_get_frame (vm, sa0->crypto_async_dec_op_id);
1107 last_async_op = sa0->crypto_async_dec_op_id;
1108 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001109 }
1110
Neale Rannsf62a8c02019-04-02 08:13:33 +00001111 if (PREDICT_FALSE (~0 == sa0->decrypt_thread_index))
1112 {
1113 /* this is the first packet to use this SA, claim the SA
1114 * for this thread. this could happen simultaneously on
1115 * another thread */
1116 clib_atomic_cmp_and_swap (&sa0->decrypt_thread_index, ~0,
1117 ipsec_sa_assign_thread (thread_index));
1118 }
1119
Fan Zhang153e41a2020-11-18 09:46:39 +00001120 if (PREDICT_FALSE (thread_index != sa0->decrypt_thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +00001121 {
Fan Zhang18f0e312020-10-19 13:08:34 +01001122 esp_set_next_index (is_async, from, nexts, from[b - bufs],
1123 &n_async_drop, ESP_DECRYPT_NEXT_HANDOFF, next);
Neale Rannsf62a8c02019-04-02 08:13:33 +00001124 next[0] = ESP_DECRYPT_NEXT_HANDOFF;
1125 goto next;
1126 }
1127
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001128 /* store packet data for next round for easier prefetch */
1129 pd->sa_data = cpd.sa_data;
1130 pd->current_data = b[0]->current_data;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001131 pd->hdr_sz = pd->current_data - vnet_buffer (b[0])->l3_hdr_offset;
1132 payload = b[0]->data + pd->current_data;
Neale Ranns6afaae12019-07-17 15:07:14 +00001133 pd->seq = clib_host_to_net_u32 (((esp_header_t *) payload)->seq);
Fan Zhangf5395782020-04-29 14:00:03 +01001134 pd->is_chain = 0;
1135 pd2->lb = b[0];
1136 pd2->free_buffer_index = 0;
1137 pd2->icv_removed = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001138
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001139 if (n_bufs > 1)
1140 {
Fan Zhangf5395782020-04-29 14:00:03 +01001141 pd->is_chain = 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001142 /* find last buffer in the chain */
Fan Zhangf5395782020-04-29 14:00:03 +01001143 while (pd2->lb->flags & VLIB_BUFFER_NEXT_PRESENT)
1144 pd2->lb = vlib_get_buffer (vm, pd2->lb->next_buffer);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001145
1146 crypto_ops = &ptd->chained_crypto_ops;
1147 integ_ops = &ptd->chained_integ_ops;
1148 }
Fan Zhangf5395782020-04-29 14:00:03 +01001149
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001150 pd->current_length = b[0]->current_length;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001151
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001152 /* anti-reply check */
Neale Ranns6afaae12019-07-17 15:07:14 +00001153 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001154 {
1155 b[0]->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
Fan Zhang18f0e312020-10-19 13:08:34 +01001156 esp_set_next_index (is_async, from, nexts, from[b - bufs],
1157 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001158 goto next;
1159 }
1160
Damjan Mariona829b132019-04-24 23:39:16 +02001161 if (pd->current_length < cpd.icv_sz + esp_sz + cpd.iv_sz)
1162 {
1163 b[0]->error = node->errors[ESP_DECRYPT_ERROR_RUNT];
Fan Zhang18f0e312020-10-19 13:08:34 +01001164 esp_set_next_index (is_async, from, nexts, from[b - bufs],
1165 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
Damjan Mariona829b132019-04-24 23:39:16 +02001166 goto next;
1167 }
1168
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001169 len = pd->current_length - cpd.icv_sz;
1170 current_sa_pkts += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001171 current_sa_bytes += vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001172
Fan Zhangf5395782020-04-29 14:00:03 +01001173 if (is_async)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001174 {
Fan Zhangf5395782020-04-29 14:00:03 +01001175 int ret = esp_decrypt_prepare_async_frame (vm, node, ptd,
1176 &async_frame,
1177 sa0, payload, len,
1178 cpd.icv_sz,
1179 cpd.iv_sz,
1180 pd, pd2,
1181 from[b - bufs],
1182 b[0], next, async_next);
1183 if (PREDICT_FALSE (ret < 0))
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001184 {
Fan Zhang18f0e312020-10-19 13:08:34 +01001185 b[0]->error = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
1186 esp_set_next_index (1, from, nexts, from[b - bufs],
1187 &n_async_drop, ESP_DECRYPT_NEXT_DROP, next);
1188 /* when next[0] is ESP_DECRYPT_NEXT_DROP we only have to drop
1189 * the current packet. Otherwise it is frame submission error
1190 * thus we have to drop the whole frame.
1191 */
1192 if (next[0] != ESP_DECRYPT_NEXT_DROP && async_frame->n_elts)
1193 esp_async_recycle_failed_submit (async_frame, b, from,
1194 nexts, &n_async_drop,
1195 ESP_DECRYPT_NEXT_DROP,
1196 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
Fan Zhangf5395782020-04-29 14:00:03 +01001197 goto next;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001198 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001199 }
Fan Zhangf5395782020-04-29 14:00:03 +01001200 else
1201 esp_decrypt_prepare_sync_op (vm, node, ptd, &crypto_ops, &integ_ops,
1202 op, sa0, payload, len, cpd.icv_sz,
1203 cpd.iv_sz, pd, pd2, b[0], next,
1204 b - bufs);
Damjan Marionc98275f2019-03-06 14:05:01 +01001205 /* next */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001206 next:
1207 n_left -= 1;
Damjan Marionc98275f2019-03-06 14:05:01 +01001208 next += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001209 pd += 1;
Fan Zhangf5395782020-04-29 14:00:03 +01001210 pd2 += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001211 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001212 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001213
Neale Ranns02950402019-12-20 00:54:57 +00001214 if (PREDICT_TRUE (~0 != current_sa_index))
1215 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1216 current_sa_index, current_sa_pkts,
1217 current_sa_bytes);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001218
Fan Zhangf5395782020-04-29 14:00:03 +01001219 if (is_async)
1220 {
1221 if (async_frame && async_frame->n_elts)
1222 {
1223 if (vnet_crypto_async_submit_open_frame (vm, async_frame) < 0)
Fan Zhang18f0e312020-10-19 13:08:34 +01001224 esp_async_recycle_failed_submit (async_frame, b, from, nexts,
1225 &n_async_drop,
1226 ESP_DECRYPT_NEXT_DROP,
1227 ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR);
Fan Zhangf5395782020-04-29 14:00:03 +01001228 }
Neale Ranns92e93842019-04-08 07:36:50 +00001229
Fan Zhangf5395782020-04-29 14:00:03 +01001230 /* no post process in async */
Fan Zhangf5395782020-04-29 14:00:03 +01001231 vlib_node_increment_counter (vm, node->node_index,
1232 ESP_DECRYPT_ERROR_RX_PKTS, n_left);
Fan Zhang18f0e312020-10-19 13:08:34 +01001233 if (n_async_drop)
1234 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_async_drop);
Fan Zhangf5395782020-04-29 14:00:03 +01001235
1236 return n_left;
1237 }
1238 else
1239 {
1240 esp_process_ops (vm, node, ptd->integ_ops, bufs, nexts,
1241 ESP_DECRYPT_ERROR_INTEG_ERROR);
1242 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, bufs, nexts,
1243 ptd->chunks, ESP_DECRYPT_ERROR_INTEG_ERROR);
1244
1245 esp_process_ops (vm, node, ptd->crypto_ops, bufs, nexts,
1246 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1247 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, bufs, nexts,
1248 ptd->chunks,
1249 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1250 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001251
1252 /* Post decryption ronud - adjust packet data start and length and next
1253 node */
1254
1255 n_left = from_frame->n_vectors;
1256 next = nexts;
1257 pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001258 pd2 = pkt_data2;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001259 b = bufs;
1260
1261 while (n_left)
1262 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001263 if (n_left >= 2)
1264 {
1265 void *data = b[1]->data + pd[1].current_data;
1266
1267 /* buffer metadata */
1268 vlib_prefetch_buffer_header (b[1], LOAD);
1269
1270 /* esp_footer_t */
1271 CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
1272 CLIB_CACHE_LINE_BYTES, LOAD);
1273
1274 /* packet headers */
1275 CLIB_PREFETCH (data - CLIB_CACHE_LINE_BYTES,
1276 CLIB_CACHE_LINE_BYTES * 2, LOAD);
1277 }
1278
Christian Hoppsd570e532020-08-25 12:40:40 -04001279 /* save the sa_index as GRE_teb post_crypto changes L2 opaque */
1280 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1281 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
1282
Fan Zhangf5395782020-04-29 14:00:03 +01001283 if (next[0] >= ESP_DECRYPT_N_NEXT)
1284 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], next, is_ip6,
1285 is_tun, 0);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001286
Fan Zhangf5395782020-04-29 14:00:03 +01001287 /* trace: */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001288 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1289 {
1290 esp_decrypt_trace_t *tr;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001291 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
Christian Hoppsd570e532020-08-25 12:40:40 -04001292 sa0 = pool_elt_at_index (im->sad, current_sa_index);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001293 tr->crypto_alg = sa0->crypto_alg;
1294 tr->integ_alg = sa0->integ_alg;
Neale Ranns6afaae12019-07-17 15:07:14 +00001295 tr->seq = pd->seq;
1296 tr->sa_seq = sa0->last_seq;
1297 tr->sa_seq_hi = sa0->seq_hi;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001298 }
1299
1300 /* next */
1301 n_left -= 1;
1302 next += 1;
1303 pd += 1;
Fan Zhangf5395782020-04-29 14:00:03 +01001304 pd2 += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001305 b += 1;
1306 }
1307
1308 n_left = from_frame->n_vectors;
1309 vlib_node_increment_counter (vm, node->node_index,
1310 ESP_DECRYPT_ERROR_RX_PKTS, n_left);
1311
1312 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
1313
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001314 return n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001315}
1316
Fan Zhangf5395782020-04-29 14:00:03 +01001317always_inline uword
1318esp_decrypt_post_inline (vlib_main_t * vm,
1319 vlib_node_runtime_t * node,
1320 vlib_frame_t * from_frame, int is_ip6, int is_tun)
1321{
1322 ipsec_main_t *im = &ipsec_main;
1323 u32 *from = vlib_frame_vector_args (from_frame);
1324 u32 n_left = from_frame->n_vectors;
1325 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1326 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
1327 vlib_get_buffers (vm, from, b, n_left);
1328
1329 while (n_left > 0)
1330 {
1331 esp_decrypt_packet_data_t *pd = &(esp_post_data (b[0]))->decrypt_data;
1332
1333 if (n_left > 2)
1334 {
1335 vlib_prefetch_buffer_header (b[2], LOAD);
1336 vlib_prefetch_buffer_header (b[1], LOAD);
1337 }
1338
1339 if (!pd->is_chain)
1340 esp_decrypt_post_crypto (vm, node, pd, 0, b[0], next, is_ip6, is_tun,
1341 1);
1342 else
1343 {
1344 esp_decrypt_packet_data2_t *pd2 = esp_post_data2 (b[0]);
1345 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], next, is_ip6,
1346 is_tun, 1);
1347 }
1348
1349 /*trace: */
1350 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1351 {
1352 ipsec_sa_t *sa0 = pool_elt_at_index (im->sad, pd->sa_index);
1353 esp_decrypt_trace_t *tr;
1354 esp_decrypt_packet_data_t *async_pd =
1355 &(esp_post_data (b[0]))->decrypt_data;
1356 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
1357 sa0 = pool_elt_at_index (im->sad, async_pd->sa_index);
1358
1359 tr->crypto_alg = sa0->crypto_alg;
1360 tr->integ_alg = sa0->integ_alg;
1361 tr->seq = pd->seq;
1362 tr->sa_seq = sa0->last_seq;
1363 tr->sa_seq_hi = sa0->seq_hi;
1364 }
1365
1366 n_left--;
1367 next++;
1368 b++;
1369 }
1370
1371 n_left = from_frame->n_vectors;
1372 vlib_node_increment_counter (vm, node->node_index,
1373 ESP_DECRYPT_ERROR_RX_POST_PKTS, n_left);
1374
1375 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
1376
1377 return n_left;
1378}
1379
Klement Sekerab8f35442018-10-29 13:38:19 +01001380VLIB_NODE_FN (esp4_decrypt_node) (vlib_main_t * vm,
1381 vlib_node_runtime_t * node,
1382 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001383{
Fan Zhangf5395782020-04-29 14:00:03 +01001384 return esp_decrypt_inline (vm, node, from_frame, 0, 0,
1385 esp_decrypt_async_next.esp4_post_next);
1386}
1387
1388VLIB_NODE_FN (esp4_decrypt_post_node) (vlib_main_t * vm,
1389 vlib_node_runtime_t * node,
1390 vlib_frame_t * from_frame)
1391{
1392 return esp_decrypt_post_inline (vm, node, from_frame, 0, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001393}
1394
1395VLIB_NODE_FN (esp4_decrypt_tun_node) (vlib_main_t * vm,
1396 vlib_node_runtime_t * node,
1397 vlib_frame_t * from_frame)
1398{
Fan Zhangf5395782020-04-29 14:00:03 +01001399 return esp_decrypt_inline (vm, node, from_frame, 0, 1,
1400 esp_decrypt_async_next.esp4_tun_post_next);
1401}
1402
1403VLIB_NODE_FN (esp4_decrypt_tun_post_node) (vlib_main_t * vm,
1404 vlib_node_runtime_t * node,
1405 vlib_frame_t * from_frame)
1406{
1407 return esp_decrypt_post_inline (vm, node, from_frame, 0, 1);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001408}
1409
1410VLIB_NODE_FN (esp6_decrypt_node) (vlib_main_t * vm,
1411 vlib_node_runtime_t * node,
1412 vlib_frame_t * from_frame)
1413{
Fan Zhangf5395782020-04-29 14:00:03 +01001414 return esp_decrypt_inline (vm, node, from_frame, 1, 0,
1415 esp_decrypt_async_next.esp6_post_next);
1416}
1417
1418VLIB_NODE_FN (esp6_decrypt_post_node) (vlib_main_t * vm,
1419 vlib_node_runtime_t * node,
1420 vlib_frame_t * from_frame)
1421{
1422 return esp_decrypt_post_inline (vm, node, from_frame, 1, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001423}
1424
1425VLIB_NODE_FN (esp6_decrypt_tun_node) (vlib_main_t * vm,
1426 vlib_node_runtime_t * node,
1427 vlib_frame_t * from_frame)
1428{
Fan Zhangf5395782020-04-29 14:00:03 +01001429 return esp_decrypt_inline (vm, node, from_frame, 1, 1,
1430 esp_decrypt_async_next.esp6_tun_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001431}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001432
Fan Zhangf5395782020-04-29 14:00:03 +01001433VLIB_NODE_FN (esp6_decrypt_tun_post_node) (vlib_main_t * vm,
1434 vlib_node_runtime_t * node,
1435 vlib_frame_t * from_frame)
1436{
1437 return esp_decrypt_post_inline (vm, node, from_frame, 1, 1);
1438}
1439
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001440/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001441VLIB_REGISTER_NODE (esp4_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001442 .name = "esp4-decrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001443 .vector_size = sizeof (u32),
1444 .format_trace = format_esp_decrypt_trace,
1445 .type = VLIB_NODE_TYPE_INTERNAL,
1446
1447 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1448 .error_strings = esp_decrypt_error_strings,
1449
1450 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1451 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001452 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1453 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1454 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001455 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001456 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-handoff",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001457 },
1458};
1459
Fan Zhangf5395782020-04-29 14:00:03 +01001460VLIB_REGISTER_NODE (esp4_decrypt_post_node) = {
1461 .name = "esp4-decrypt-post",
1462 .vector_size = sizeof (u32),
1463 .format_trace = format_esp_decrypt_trace,
1464 .type = VLIB_NODE_TYPE_INTERNAL,
1465
1466 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1467 .error_strings = esp_decrypt_error_strings,
1468
1469 .sibling_of = "esp4-decrypt",
1470};
1471
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001472VLIB_REGISTER_NODE (esp6_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001473 .name = "esp6-decrypt",
1474 .vector_size = sizeof (u32),
1475 .format_trace = format_esp_decrypt_trace,
1476 .type = VLIB_NODE_TYPE_INTERNAL,
1477
1478 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1479 .error_strings = esp_decrypt_error_strings,
1480
1481 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1482 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001483 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1484 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1485 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001486 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001487 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-handoff",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001488 },
1489};
Neale Rannsc87b66c2019-02-07 07:26:12 -08001490
Fan Zhangf5395782020-04-29 14:00:03 +01001491VLIB_REGISTER_NODE (esp6_decrypt_post_node) = {
1492 .name = "esp6-decrypt-post",
1493 .vector_size = sizeof (u32),
1494 .format_trace = format_esp_decrypt_trace,
1495 .type = VLIB_NODE_TYPE_INTERNAL,
1496
1497 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1498 .error_strings = esp_decrypt_error_strings,
1499
1500 .sibling_of = "esp6-decrypt",
1501};
1502
Neale Rannsc87b66c2019-02-07 07:26:12 -08001503VLIB_REGISTER_NODE (esp4_decrypt_tun_node) = {
1504 .name = "esp4-decrypt-tun",
1505 .vector_size = sizeof (u32),
1506 .format_trace = format_esp_decrypt_trace,
1507 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001508 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1509 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001510 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1511 .next_nodes = {
1512 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1513 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1514 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001515 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001516 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001517 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001518};
1519
Fan Zhangf5395782020-04-29 14:00:03 +01001520VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node) = {
1521 .name = "esp4-decrypt-tun-post",
1522 .vector_size = sizeof (u32),
1523 .format_trace = format_esp_decrypt_trace,
1524 .type = VLIB_NODE_TYPE_INTERNAL,
1525
1526 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1527 .error_strings = esp_decrypt_error_strings,
1528
1529 .sibling_of = "esp4-decrypt-tun",
1530};
1531
Neale Rannsc87b66c2019-02-07 07:26:12 -08001532VLIB_REGISTER_NODE (esp6_decrypt_tun_node) = {
1533 .name = "esp6-decrypt-tun",
1534 .vector_size = sizeof (u32),
1535 .format_trace = format_esp_decrypt_trace,
1536 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001537 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1538 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001539 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1540 .next_nodes = {
1541 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1542 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1543 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001544 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001545 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001546 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001547};
Fan Zhangf5395782020-04-29 14:00:03 +01001548
1549VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node) = {
1550 .name = "esp6-decrypt-tun-post",
1551 .vector_size = sizeof (u32),
1552 .format_trace = format_esp_decrypt_trace,
1553 .type = VLIB_NODE_TYPE_INTERNAL,
1554
1555 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1556 .error_strings = esp_decrypt_error_strings,
1557
1558 .sibling_of = "esp6-decrypt-tun",
1559};
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001560/* *INDENT-ON* */
1561
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001562/*
1563 * fd.io coding-style-patch-verification: ON
1564 *
1565 * Local Variables:
1566 * eval: (c-set-style "gnu")
1567 * End:
1568 */