blob: ea5a99c6fa11ec7307117c29681ea4aa5336250f [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_decrypt.c : IPSec ESP decrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
John Lo90430b62020-01-31 23:48:30 -050021#include <vnet/l2/l2_input.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070022
23#include <vnet/ipsec/ipsec.h>
24#include <vnet/ipsec/esp.h>
Neale Ranns918c1612019-02-21 23:34:59 -080025#include <vnet/ipsec/ipsec_io.h>
Neale Rannsc87b66c2019-02-07 07:26:12 -080026#include <vnet/ipsec/ipsec_tun.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027
Neale Ranns119c0d72020-11-26 13:12:37 +000028#include <vnet/gre/packet.h>
Neale Ranns568acbb2019-12-18 05:54:40 +000029
Neale Ranns4a58e492020-12-21 13:19:10 +000030#define foreach_esp_decrypt_next \
31 _ (DROP, "error-drop") \
32 _ (IP4_INPUT, "ip4-input-no-checksum") \
33 _ (IP6_INPUT, "ip6-input") \
34 _ (L2_INPUT, "l2-input") \
35 _ (MPLS_INPUT, "mpls-input") \
36 _ (HANDOFF, "handoff")
Ed Warnickecb9cada2015-12-08 15:45:58 -070037
38#define _(v, s) ESP_DECRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070039typedef enum
40{
Ed Warnickecb9cada2015-12-08 15:45:58 -070041 foreach_esp_decrypt_next
42#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070043 ESP_DECRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070044} esp_decrypt_next_t;
45
Neale Ranns4a58e492020-12-21 13:19:10 +000046#define foreach_esp_decrypt_post_next \
47 _ (DROP, "error-drop") \
48 _ (IP4_INPUT, "ip4-input-no-checksum") \
49 _ (IP6_INPUT, "ip6-input") \
50 _ (MPLS_INPUT, "mpls-input") \
51 _ (L2_INPUT, "l2-input")
Fan Zhangf5395782020-04-29 14:00:03 +010052
53#define _(v, s) ESP_DECRYPT_POST_NEXT_##v,
54typedef enum
55{
56 foreach_esp_decrypt_post_next
57#undef _
58 ESP_DECRYPT_POST_N_NEXT,
59} esp_decrypt_post_next_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070060
Neale Rannsf16e9a52021-02-25 19:09:24 +000061#define foreach_esp_decrypt_error \
62 _ (RX_PKTS, "ESP pkts received") \
63 _ (RX_POST_PKTS, "ESP-POST pkts received") \
64 _ (HANDOFF, "hand-off") \
65 _ (DECRYPTION_FAILED, "ESP decryption failed") \
66 _ (INTEG_ERROR, "Integrity check failed") \
67 _ (CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
68 _ (REPLAY, "SA replayed packet") \
69 _ (RUNT, "undersized packet") \
70 _ (NO_BUFFERS, "no buffers (packet dropped)") \
71 _ (OVERSIZED_HEADER, "buffer with oversized header (dropped)") \
72 _ (NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \
73 _ (TUN_NO_PROTO, "no tunnel protocol") \
74 _ (UNSUP_PAYLOAD, "unsupported payload")
Ed Warnickecb9cada2015-12-08 15:45:58 -070075
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070076typedef enum
77{
Ed Warnickecb9cada2015-12-08 15:45:58 -070078#define _(sym,str) ESP_DECRYPT_ERROR_##sym,
79 foreach_esp_decrypt_error
80#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070081 ESP_DECRYPT_N_ERROR,
Ed Warnickecb9cada2015-12-08 15:45:58 -070082} esp_decrypt_error_t;
83
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070084static char *esp_decrypt_error_strings[] = {
Ed Warnickecb9cada2015-12-08 15:45:58 -070085#define _(sym,string) string,
86 foreach_esp_decrypt_error
87#undef _
88};
89
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070090typedef struct
91{
Damjan Marionb4fff3a2019-03-25 15:54:40 +010092 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000093 u32 sa_seq;
94 u32 sa_seq_hi;
Ed Warnickecb9cada2015-12-08 15:45:58 -070095 ipsec_crypto_alg_t crypto_alg;
96 ipsec_integ_alg_t integ_alg;
97} esp_decrypt_trace_t;
98
99/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700100static u8 *
101format_esp_decrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700102{
103 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
104 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700105 esp_decrypt_trace_t *t = va_arg (*args, esp_decrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700106
Neale Ranns6afaae12019-07-17 15:07:14 +0000107 s =
108 format (s,
109 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u",
110 format_ipsec_crypto_alg, t->crypto_alg, format_ipsec_integ_alg,
111 t->integ_alg, t->seq, t->sa_seq, t->sa_seq_hi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700112 return s;
113}
114
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100115#define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700116
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000117static_always_inline void
118esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
119 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
120 int e)
121{
122 vnet_crypto_op_t *op = ops;
123 u32 n_fail, n_ops = vec_len (ops);
124
125 if (n_ops == 0)
126 return;
127
128 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
129
130 while (n_fail)
131 {
132 ASSERT (op - ops < n_ops);
133 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
134 {
135 u32 err, bi = op->user_data;
136 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
137 err = e;
138 else
139 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
140 b[bi]->error = node->errors[err];
141 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
142 n_fail--;
143 }
144 op++;
145 }
146}
147
148static_always_inline void
149esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
150 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
151 u16 * nexts, vnet_crypto_op_chunk_t * chunks, int e)
152{
153
154 vnet_crypto_op_t *op = ops;
155 u32 n_fail, n_ops = vec_len (ops);
156
Neale Rannsf16e9a52021-02-25 19:09:24 +0000157 if (PREDICT_TRUE (n_ops == 0))
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000158 return;
159
160 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
161
162 while (n_fail)
163 {
164 ASSERT (op - ops < n_ops);
165 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
166 {
167 u32 err, bi = op->user_data;
168 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
169 err = e;
170 else
171 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
172 b[bi]->error = node->errors[err];
173 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
174 n_fail--;
175 }
176 op++;
177 }
178}
179
180always_inline void
181esp_remove_tail (vlib_main_t * vm, vlib_buffer_t * b, vlib_buffer_t * last,
182 u16 tail)
183{
184 vlib_buffer_t *before_last = b;
185
186 if (last->current_length > tail)
187 {
188 last->current_length -= tail;
189 return;
190 }
191 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
192
193 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
194 {
195 before_last = b;
196 b = vlib_get_buffer (vm, b->next_buffer);
197 }
198 before_last->current_length -= tail - last->current_length;
199 vlib_buffer_free_one (vm, before_last->next_buffer);
200 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
201}
202
203/* ICV is splitted in last two buffers so move it to the last buffer and
204 return pointer to it */
205static_always_inline u8 *
206esp_move_icv (vlib_main_t * vm, vlib_buffer_t * first,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000207 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100208 esp_decrypt_packet_data2_t * pd2, u16 icv_sz, u16 * dif)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000209{
210 vlib_buffer_t *before_last, *bp;
Fan Zhangf5395782020-04-29 14:00:03 +0100211 u16 last_sz = pd2->lb->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000212 u16 first_sz = icv_sz - last_sz;
213
214 bp = before_last = first;
215 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
216 {
217 before_last = bp;
218 bp = vlib_get_buffer (vm, bp->next_buffer);
219 }
220
Fan Zhangf5395782020-04-29 14:00:03 +0100221 u8 *lb_curr = vlib_buffer_get_current (pd2->lb);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000222 memmove (lb_curr + first_sz, lb_curr, last_sz);
223 clib_memcpy_fast (lb_curr, vlib_buffer_get_tail (before_last) - first_sz,
224 first_sz);
225 before_last->current_length -= first_sz;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000226 if (before_last == first)
227 pd->current_length -= first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100228 clib_memset (vlib_buffer_get_tail (before_last), 0, first_sz);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000229 if (dif)
230 dif[0] = first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100231 pd2->lb = before_last;
232 pd2->icv_removed = 1;
233 pd2->free_buffer_index = before_last->next_buffer;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000234 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
235 return lb_curr;
236}
237
Fan Zhangf5395782020-04-29 14:00:03 +0100238static_always_inline i16
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000239esp_insert_esn (vlib_main_t * vm, ipsec_sa_t * sa,
Fan Zhangf5395782020-04-29 14:00:03 +0100240 esp_decrypt_packet_data2_t * pd2, u32 * data_len,
241 u8 ** digest, u16 * len, vlib_buffer_t * b, u8 * payload)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000242{
243 if (!ipsec_sa_is_set_USE_ESN (sa))
Fan Zhangf5395782020-04-29 14:00:03 +0100244 return 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000245
246 /* shift ICV by 4 bytes to insert ESN */
247 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
248 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa->seq_hi);
249
Fan Zhangf5395782020-04-29 14:00:03 +0100250 if (pd2->icv_removed)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000251 {
Fan Zhangf5395782020-04-29 14:00:03 +0100252 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000253 if (space_left >= sz)
254 {
Fan Zhangf5395782020-04-29 14:00:03 +0100255 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi, sz);
256 *data_len += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000257 }
258 else
Fan Zhangf5395782020-04-29 14:00:03 +0100259 return sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000260
261 len[0] = b->current_length;
262 }
263 else
264 {
265 clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
266 clib_memcpy_fast (payload + len[0], &seq_hi, sz);
267 clib_memcpy_fast (payload + len[0] + sz, tmp, ESP_MAX_ICV_SIZE);
Fan Zhangf5395782020-04-29 14:00:03 +0100268 *data_len += sz;
269 *digest += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000270 }
Fan Zhangf5395782020-04-29 14:00:03 +0100271 return sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000272}
273
274static_always_inline u8 *
275esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000276 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100277 esp_decrypt_packet_data2_t * pd2, u16 icv_sz,
278 ipsec_sa_t * sa, u8 * extra_esn, u32 * len)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000279{
280 u16 dif = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000281 u8 *digest = esp_move_icv (vm, first, pd, pd2, icv_sz, &dif);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000282 if (dif)
Fan Zhangf5395782020-04-29 14:00:03 +0100283 *len -= dif;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000284
285 if (ipsec_sa_is_set_USE_ESN (sa))
286 {
287 u8 sz = sizeof (sa->seq_hi);
288 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
Fan Zhangf5395782020-04-29 14:00:03 +0100289 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000290
291 if (space_left >= sz)
292 {
Fan Zhangf5395782020-04-29 14:00:03 +0100293 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi, sz);
294 *len += sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000295 }
296 else
297 {
298 /* no space for ESN at the tail, use the next buffer
299 * (with ICV data) */
Fan Zhangf5395782020-04-29 14:00:03 +0100300 ASSERT (pd2->icv_removed);
301 vlib_buffer_t *tmp = vlib_get_buffer (vm, pd2->free_buffer_index);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000302 clib_memcpy_fast (vlib_buffer_get_current (tmp) - sz, &seq_hi, sz);
303 extra_esn[0] = 1;
304 }
305 }
306 return digest;
307}
308
Fan Zhangf5395782020-04-29 14:00:03 +0100309static_always_inline int
310esp_decrypt_chain_integ (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
311 esp_decrypt_packet_data2_t * pd2,
312 ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
313 u8 * start_src, u32 start_len,
314 u8 ** digest, u16 * n_ch, u32 * integ_total_len)
315{
316 vnet_crypto_op_chunk_t *ch;
317 vlib_buffer_t *cb = vlib_get_buffer (vm, b->next_buffer);
318 u16 n_chunks = 1;
319 u32 total_len;
320 vec_add2 (ptd->chunks, ch, 1);
321 total_len = ch->len = start_len;
322 ch->src = start_src;
323
324 while (1)
325 {
326 vec_add2 (ptd->chunks, ch, 1);
327 n_chunks += 1;
328 ch->src = vlib_buffer_get_current (cb);
329 if (pd2->lb == cb)
330 {
331 if (pd2->icv_removed)
332 ch->len = cb->current_length;
333 else
334 ch->len = cb->current_length - icv_sz;
335 if (ipsec_sa_is_set_USE_ESN (sa0))
336 {
337 u32 seq_hi = clib_host_to_net_u32 (sa0->seq_hi);
338 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa0->seq_hi);
339 u8 *esn;
340 vlib_buffer_t *tmp_b;
341 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
342 if (space_left < sz)
343 {
344 if (pd2->icv_removed)
345 {
346 /* use pre-data area from the last bufer
347 that was removed from the chain */
348 tmp_b = vlib_get_buffer (vm, pd2->free_buffer_index);
349 esn = tmp_b->data - sz;
350 }
351 else
352 {
353 /* no space, need to allocate new buffer */
354 u32 tmp_bi = 0;
355 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
356 return -1;
357 tmp_b = vlib_get_buffer (vm, tmp_bi);
358 esn = tmp_b->data;
359 pd2->free_buffer_index = tmp_bi;
360 }
361 clib_memcpy_fast (esn, &seq_hi, sz);
362
363 vec_add2 (ptd->chunks, ch, 1);
364 n_chunks += 1;
365 ch->src = esn;
366 ch->len = sz;
367 }
368 else
369 {
370 if (pd2->icv_removed)
371 {
372 clib_memcpy_fast (vlib_buffer_get_tail
373 (pd2->lb), &seq_hi, sz);
374 }
375 else
376 {
377 clib_memcpy_fast (tmp, *digest, ESP_MAX_ICV_SIZE);
378 clib_memcpy_fast (*digest, &seq_hi, sz);
379 clib_memcpy_fast (*digest + sz, tmp, ESP_MAX_ICV_SIZE);
380 *digest += sz;
381 }
382 ch->len += sz;
383 }
384 }
385 total_len += ch->len;
386 break;
387 }
388 else
389 total_len += ch->len = cb->current_length;
390
391 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
392 break;
393
394 cb = vlib_get_buffer (vm, cb->next_buffer);
395 }
396
397 if (n_ch)
398 *n_ch = n_chunks;
399 if (integ_total_len)
400 *integ_total_len = total_len;
401
402 return 0;
403}
404
405static_always_inline u32
406esp_decrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000407 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100408 esp_decrypt_packet_data2_t * pd2,
409 ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
410 u8 * start, u32 start_len, u8 ** tag, u16 * n_ch)
411{
412 vnet_crypto_op_chunk_t *ch;
413 vlib_buffer_t *cb = b;
414 u16 n_chunks = 1;
415 u32 total_len;
416 vec_add2 (ptd->chunks, ch, 1);
417 total_len = ch->len = start_len;
418 ch->src = ch->dst = start;
419 cb = vlib_get_buffer (vm, cb->next_buffer);
420 n_chunks = 1;
421
422 while (1)
423 {
424 vec_add2 (ptd->chunks, ch, 1);
425 n_chunks += 1;
426 ch->src = ch->dst = vlib_buffer_get_current (cb);
427 if (pd2->lb == cb)
428 {
429 if (ipsec_sa_is_set_IS_AEAD (sa0))
430 {
431 if (pd2->lb->current_length < icv_sz)
432 {
433 u16 dif = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000434 *tag = esp_move_icv (vm, b, pd, pd2, icv_sz, &dif);
Fan Zhangf5395782020-04-29 14:00:03 +0100435
436 /* this chunk does not contain crypto data */
437 n_chunks -= 1;
438 /* and fix previous chunk's length as it might have
439 been changed */
440 ASSERT (n_chunks > 0);
441 if (pd2->lb == b)
442 {
443 total_len -= dif;
444 ch[-1].len -= dif;
445 }
446 else
447 {
448 total_len = total_len + pd2->lb->current_length -
449 ch[-1].len;
450 ch[-1].len = pd2->lb->current_length;
451 }
452 break;
453 }
454 else
455 *tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
456 }
457
458 if (pd2->icv_removed)
459 total_len += ch->len = cb->current_length;
460 else
461 total_len += ch->len = cb->current_length - icv_sz;
462 }
463 else
464 total_len += ch->len = cb->current_length;
465
466 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
467 break;
468
469 cb = vlib_get_buffer (vm, cb->next_buffer);
470 }
471
472 if (n_ch)
473 *n_ch = n_chunks;
474
475 return total_len;
476}
477
478static_always_inline void
479esp_decrypt_prepare_sync_op (vlib_main_t * vm, vlib_node_runtime_t * node,
480 ipsec_per_thread_data_t * ptd,
481 vnet_crypto_op_t *** crypto_ops,
482 vnet_crypto_op_t *** integ_ops,
483 vnet_crypto_op_t * op,
484 ipsec_sa_t * sa0, u8 * payload,
485 u16 len, u8 icv_sz, u8 iv_sz,
486 esp_decrypt_packet_data_t * pd,
487 esp_decrypt_packet_data2_t * pd2,
488 vlib_buffer_t * b, u16 * next, u32 index)
489{
490 const u8 esp_sz = sizeof (esp_header_t);
491
492 if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
493 {
494 vnet_crypto_op_init (op, sa0->integ_op_id);
495 op->key_index = sa0->integ_key_index;
496 op->src = payload;
497 op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
498 op->user_data = index;
499 op->digest = payload + len;
500 op->digest_len = icv_sz;
501 op->len = len;
502
503 if (pd->is_chain)
504 {
505 /* buffer is chained */
506 op->len = pd->current_length;
507
508 /* special case when ICV is splitted and needs to be reassembled
509 * first -> move it to the last buffer. Also take into account
510 * that ESN needs to be added after encrypted data and may or
511 * may not fit in the tail.*/
512 if (pd2->lb->current_length < icv_sz)
513 {
514 u8 extra_esn = 0;
515 op->digest =
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000516 esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
Fan Zhangf5395782020-04-29 14:00:03 +0100517 &extra_esn, &op->len);
518
519 if (extra_esn)
520 {
521 /* esn is in the last buffer, that was unlinked from
522 * the chain */
523 op->len = b->current_length;
524 }
525 else
526 {
527 if (pd2->lb == b)
528 {
529 /* we now have a single buffer of crypto data, adjust
530 * the length (second buffer contains only ICV) */
531 *integ_ops = &ptd->integ_ops;
532 *crypto_ops = &ptd->crypto_ops;
533 len = b->current_length;
534 goto out;
535 }
536 }
537 }
538 else
539 op->digest = vlib_buffer_get_tail (pd2->lb) - icv_sz;
540
541 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
542 op->chunk_index = vec_len (ptd->chunks);
543 if (esp_decrypt_chain_integ (vm, ptd, pd2, sa0, b, icv_sz,
544 payload, pd->current_length,
545 &op->digest, &op->n_chunks, 0) < 0)
546 {
547 b->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
548 next[0] = ESP_DECRYPT_NEXT_DROP;
549 return;
550 }
551 }
552 else
553 esp_insert_esn (vm, sa0, pd2, &op->len, &op->digest, &len, b,
554 payload);
555 out:
556 vec_add_aligned (*(integ_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
557 }
558
559 payload += esp_sz;
560 len -= esp_sz;
561
562 if (sa0->crypto_dec_op_id != VNET_CRYPTO_OP_NONE)
563 {
564 vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
565 op->key_index = sa0->crypto_key_index;
566 op->iv = payload;
567
Benoît Ganne490b9272021-01-22 18:03:09 +0100568 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100569 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100570 /* construct nonce in a scratch space in front of the IP header */
571 esp_ctr_nonce_t *nonce =
572 (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz -
573 sizeof (*nonce));
574 if (ipsec_sa_is_set_IS_AEAD (sa0))
575 {
576 /* constuct aad in a scratch space in front of the nonce */
577 esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
578 op->aad = (u8 *) nonce - sizeof (esp_aead_t);
579 op->aad_len = esp_aad_fill (op->aad, esp0, sa0);
580 op->tag = payload + len;
581 op->tag_len = 16;
582 }
583 else
584 {
585 nonce->ctr = clib_host_to_net_u32 (1);
586 }
587 nonce->salt = sa0->salt;
588 ASSERT (sizeof (u64) == iv_sz);
589 nonce->iv = *(u64 *) op->iv;
590 op->iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100591 }
592 op->src = op->dst = payload += iv_sz;
593 op->len = len - iv_sz;
594 op->user_data = index;
595
596 if (pd->is_chain && (pd2->lb != b))
597 {
598 /* buffer is chained */
599 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
600 op->chunk_index = vec_len (ptd->chunks);
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000601 esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
Fan Zhangf5395782020-04-29 14:00:03 +0100602 payload, len - pd->iv_sz + pd->icv_sz,
603 &op->tag, &op->n_chunks);
604 }
605
606 vec_add_aligned (*(crypto_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
607 }
608}
609
Neale Rannsfc811342021-02-26 10:35:33 +0000610static_always_inline esp_decrypt_error_t
611esp_decrypt_prepare_async_frame (vlib_main_t *vm, vlib_node_runtime_t *node,
612 ipsec_per_thread_data_t *ptd,
613 vnet_crypto_async_frame_t *f, ipsec_sa_t *sa0,
614 u8 *payload, u16 len, u8 icv_sz, u8 iv_sz,
615 esp_decrypt_packet_data_t *pd,
616 esp_decrypt_packet_data2_t *pd2, u32 bi,
617 vlib_buffer_t *b, u16 *next, u16 async_next)
Fan Zhangf5395782020-04-29 14:00:03 +0100618{
619 const u8 esp_sz = sizeof (esp_header_t);
620 u32 current_protect_index = vnet_buffer (b)->ipsec.protect_index;
621 esp_decrypt_packet_data_t *async_pd = &(esp_post_data (b))->decrypt_data;
622 esp_decrypt_packet_data2_t *async_pd2 = esp_post_data2 (b);
623 u8 *tag = payload + len, *iv = payload + esp_sz, *aad = 0;
624 u32 key_index;
625 u32 crypto_len, integ_len = 0;
626 i16 crypto_start_offset, integ_start_offset = 0;
627 u8 flags = 0;
628
629 if (!ipsec_sa_is_set_IS_AEAD (sa0))
630 {
631 /* linked algs */
632 key_index = sa0->linked_key_index;
633 integ_start_offset = payload - b->data;
634 integ_len = len;
635
636 if (pd->is_chain)
637 {
638 /* buffer is chained */
639 integ_len = pd->current_length;
640
641 /* special case when ICV is splitted and needs to be reassembled
642 * first -> move it to the last buffer. Also take into account
643 * that ESN needs to be added after encrypted data and may or
644 * may not fit in the tail.*/
645 if (pd2->lb->current_length < icv_sz)
646 {
647 u8 extra_esn = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000648 tag = esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
Fan Zhangf5395782020-04-29 14:00:03 +0100649 &extra_esn, &integ_len);
650
651 if (extra_esn)
652 {
653 /* esn is in the last buffer, that was unlinked from
654 * the chain */
655 integ_len = b->current_length;
656 }
657 else
658 {
659 if (pd2->lb == b)
660 {
661 /* we now have a single buffer of crypto data, adjust
662 * the length (second buffer contains only ICV) */
663 len = b->current_length;
664 goto out;
665 }
666 }
667 }
668 else
669 tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
670
671 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
672 if (esp_decrypt_chain_integ (vm, ptd, pd2, sa0, b, icv_sz, payload,
673 pd->current_length, &tag,
674 0, &integ_len) < 0)
675 {
676 /* allocate buffer failed, will not add to frame and drop */
Neale Rannsfc811342021-02-26 10:35:33 +0000677 return (ESP_DECRYPT_ERROR_NO_BUFFERS);
Fan Zhangf5395782020-04-29 14:00:03 +0100678 }
679 }
680 else
681 esp_insert_esn (vm, sa0, pd2, &integ_len, &tag, &len, b, payload);
682 }
683 else
684 key_index = sa0->crypto_key_index;
685
686out:
687 /* crypto */
688 payload += esp_sz;
689 len -= esp_sz;
690 iv = payload;
691
Benoît Ganne490b9272021-01-22 18:03:09 +0100692 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100693 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100694 /* construct nonce in a scratch space in front of the IP header */
695 esp_ctr_nonce_t *nonce =
696 (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz - sizeof (*nonce));
697 if (ipsec_sa_is_set_IS_AEAD (sa0))
698 {
699 /* constuct aad in a scratch space in front of the nonce */
700 esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
701 aad = (u8 *) nonce - sizeof (esp_aead_t);
702 esp_aad_fill (aad, esp0, sa0);
703 tag = payload + len;
704 }
705 else
706 {
707 nonce->ctr = clib_host_to_net_u32 (1);
708 }
709 nonce->salt = sa0->salt;
710 ASSERT (sizeof (u64) == iv_sz);
711 nonce->iv = *(u64 *) iv;
712 iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100713 }
714
715 crypto_start_offset = (payload += iv_sz) - b->data;
716 crypto_len = len - iv_sz;
717
718 if (pd->is_chain && (pd2->lb != b))
719 {
720 /* buffer is chained */
721 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
722
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000723 crypto_len = esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
Fan Zhangf5395782020-04-29 14:00:03 +0100724 payload,
725 len - pd->iv_sz + pd->icv_sz,
726 &tag, 0);
727 }
728
729 *async_pd = *pd;
730 *async_pd2 = *pd2;
731 pd->protect_index = current_protect_index;
Fan Zhangf5395782020-04-29 14:00:03 +0100732
733 /* for AEAD integ_len - crypto_len will be negative, it is ok since it
734 * is ignored by the engine. */
Neale Rannsfc811342021-02-26 10:35:33 +0000735 vnet_crypto_async_add_to_frame (
736 vm, f, key_index, crypto_len, integ_len - crypto_len, crypto_start_offset,
737 integ_start_offset, bi, async_next, iv, tag, aad, flags);
738
739 return (ESP_DECRYPT_ERROR_RX_PKTS);
Fan Zhangf5395782020-04-29 14:00:03 +0100740}
741
742static_always_inline void
743esp_decrypt_post_crypto (vlib_main_t * vm, vlib_node_runtime_t * node,
744 esp_decrypt_packet_data_t * pd,
745 esp_decrypt_packet_data2_t * pd2, vlib_buffer_t * b,
746 u16 * next, int is_ip6, int is_tun, int is_async)
747{
Neale Rannsc5fe57d2021-02-25 16:01:28 +0000748 ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +0100749 vlib_buffer_t *lb = b;
750 const u8 esp_sz = sizeof (esp_header_t);
751 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
752 u8 pad_length = 0, next_header = 0;
753 u16 icv_sz;
754
755 /*
756 * redo the anti-reply check
757 * in this frame say we have sequence numbers, s, s+1, s+1, s+1
758 * and s and s+1 are in the window. When we did the anti-replay
759 * check above we did so against the state of the window (W),
760 * after packet s-1. So each of the packets in the sequence will be
761 * accepted.
762 * This time s will be cheked against Ws-1, s+1 chceked against Ws
763 * (i.e. the window state is updated/advnaced)
764 * so this time the successive s+! packet will be dropped.
765 * This is a consequence of batching the decrypts. If the
766 * check-dcrypt-advance process was done for each packet it would
767 * be fine. But we batch the decrypts because it's much more efficient
768 * to do so in SW and if we offload to HW and the process is async.
769 *
770 * You're probably thinking, but this means an attacker can send the
771 * above sequence and cause VPP to perform decrpyts that will fail,
772 * and that's true. But if the attacker can determine s (a valid
773 * sequence number in the window) which is non-trivial, it can generate
774 * a sequence s, s+1, s+2, s+3, ... s+n and nothing will prevent any
775 * implementation, sequential or batching, from decrypting these.
776 */
777 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
778 {
779 b->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
780 next[0] = ESP_DECRYPT_NEXT_DROP;
781 return;
782 }
783
784 ipsec_sa_anti_replay_advance (sa0, pd->seq);
785
786 if (pd->is_chain)
787 {
788 lb = pd2->lb;
789 icv_sz = pd2->icv_removed ? 0 : pd->icv_sz;
790 if (pd2->free_buffer_index)
791 {
792 vlib_buffer_free_one (vm, pd2->free_buffer_index);
793 lb->next_buffer = 0;
794 }
795 if (lb->current_length < sizeof (esp_footer_t) + icv_sz)
796 {
797 /* esp footer is either splitted in two buffers or in the before
798 * last buffer */
799
800 vlib_buffer_t *before_last = b, *bp = b;
801 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
802 {
803 before_last = bp;
804 bp = vlib_get_buffer (vm, bp->next_buffer);
805 }
806 u8 *bt = vlib_buffer_get_tail (before_last);
807
808 if (lb->current_length == icv_sz)
809 {
810 esp_footer_t *f = (esp_footer_t *) (bt - sizeof (*f));
811 pad_length = f->pad_length;
812 next_header = f->next_header;
813 }
814 else
815 {
816 pad_length = (bt - 1)[0];
817 next_header = ((u8 *) vlib_buffer_get_current (lb))[0];
818 }
819 }
820 else
821 {
822 esp_footer_t *f =
823 (esp_footer_t *) (lb->data + lb->current_data +
824 lb->current_length - sizeof (esp_footer_t) -
825 icv_sz);
826 pad_length = f->pad_length;
827 next_header = f->next_header;
828 }
829 }
830 else
831 {
832 icv_sz = pd->icv_sz;
833 esp_footer_t *f =
834 (esp_footer_t *) (lb->data + lb->current_data + lb->current_length -
835 sizeof (esp_footer_t) - icv_sz);
836 pad_length = f->pad_length;
837 next_header = f->next_header;
838 }
839
840 u16 adv = pd->iv_sz + esp_sz;
841 u16 tail = sizeof (esp_footer_t) + pad_length + icv_sz;
842 u16 tail_orig = sizeof (esp_footer_t) + pad_length + pd->icv_sz;
843 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
844
845 if ((pd->flags & tun_flags) == 0 && !is_tun) /* transport mode */
846 {
847 u8 udp_sz = (is_ip6 == 0 && pd->flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
848 sizeof (udp_header_t) : 0;
849 u16 ip_hdr_sz = pd->hdr_sz - udp_sz;
850 u8 *old_ip = b->data + pd->current_data - ip_hdr_sz - udp_sz;
851 u8 *ip = old_ip + adv + udp_sz;
852
853 if (is_ip6 && ip_hdr_sz > 64)
854 memmove (ip, old_ip, ip_hdr_sz);
855 else
856 clib_memcpy_le64 (ip, old_ip, ip_hdr_sz);
857
858 b->current_data = pd->current_data + adv - ip_hdr_sz;
859 b->current_length += ip_hdr_sz - adv;
860 esp_remove_tail (vm, b, lb, tail);
861
862 if (is_ip6)
863 {
864 ip6_header_t *ip6 = (ip6_header_t *) ip;
865 u16 len = clib_net_to_host_u16 (ip6->payload_length);
866 len -= adv + tail_orig;
867 ip6->payload_length = clib_host_to_net_u16 (len);
868 ip6->protocol = next_header;
869 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
870 }
871 else
872 {
873 ip4_header_t *ip4 = (ip4_header_t *) ip;
874 ip_csum_t sum = ip4->checksum;
875 u16 len = clib_net_to_host_u16 (ip4->length);
876 len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
877 sum = ip_csum_update (sum, ip4->protocol, next_header,
878 ip4_header_t, protocol);
879 sum = ip_csum_update (sum, ip4->length, len, ip4_header_t, length);
880 ip4->checksum = ip_csum_fold (sum);
881 ip4->protocol = next_header;
882 ip4->length = len;
883 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
884 }
885 }
886 else
887 {
888 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
889 {
890 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
891 b->current_data = pd->current_data + adv;
892 b->current_length = pd->current_length - adv;
893 esp_remove_tail (vm, b, lb, tail);
894 }
895 else if (next_header == IP_PROTOCOL_IPV6)
896 {
897 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
898 b->current_data = pd->current_data + adv;
899 b->current_length = pd->current_length - adv;
900 esp_remove_tail (vm, b, lb, tail);
901 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000902 else if (next_header == IP_PROTOCOL_MPLS_IN_IP)
903 {
904 next[0] = ESP_DECRYPT_NEXT_MPLS_INPUT;
905 b->current_data = pd->current_data + adv;
906 b->current_length = pd->current_length - adv;
907 esp_remove_tail (vm, b, lb, tail);
908 }
Fan Zhangf5395782020-04-29 14:00:03 +0100909 else
910 {
911 if (is_tun && next_header == IP_PROTOCOL_GRE)
912 {
913 gre_header_t *gre;
914
915 b->current_data = pd->current_data + adv;
916 b->current_length = pd->current_length - adv - tail;
917
918 gre = vlib_buffer_get_current (b);
919
920 vlib_buffer_advance (b, sizeof (*gre));
921
922 switch (clib_net_to_host_u16 (gre->protocol))
923 {
924 case GRE_PROTOCOL_teb:
925 vnet_update_l2_len (b);
926 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
927 break;
928 case GRE_PROTOCOL_ip4:
929 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
930 break;
931 case GRE_PROTOCOL_ip6:
932 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
933 break;
934 default:
935 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
936 next[0] = ESP_DECRYPT_NEXT_DROP;
937 break;
938 }
939 }
940 else
941 {
942 next[0] = ESP_DECRYPT_NEXT_DROP;
943 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
944 return;
945 }
946 }
947 if (is_tun)
948 {
949 if (ipsec_sa_is_set_IS_PROTECT (sa0))
950 {
951 /*
952 * There are two encap possibilities
953 * 1) the tunnel and ths SA are prodiving encap, i.e. it's
954 * MAC | SA-IP | TUN-IP | ESP | PAYLOAD
955 * implying the SA is in tunnel mode (on a tunnel interface)
956 * 2) only the tunnel provides encap
957 * MAC | TUN-IP | ESP | PAYLOAD
958 * implying the SA is in transport mode.
959 *
960 * For 2) we need only strip the tunnel encap and we're good.
961 * since the tunnel and crypto ecnap (int the tun=protect
962 * object) are the same and we verified above that these match
963 * for 1) we need to strip the SA-IP outer headers, to
964 * reveal the tunnel IP and then check that this matches
965 * the configured tunnel.
966 */
967 const ipsec_tun_protect_t *itp;
968
969 if (is_async)
970 itp = ipsec_tun_protect_get (pd->protect_index);
971 else
972 itp =
973 ipsec_tun_protect_get (vnet_buffer (b)->
974 ipsec.protect_index);
975
976 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
977 {
978 const ip4_header_t *ip4;
979
980 ip4 = vlib_buffer_get_current (b);
981
982 if (!ip46_address_is_equal_v4 (&itp->itp_tun.src,
983 &ip4->dst_address) ||
984 !ip46_address_is_equal_v4 (&itp->itp_tun.dst,
985 &ip4->src_address))
986 {
987 next[0] = ESP_DECRYPT_NEXT_DROP;
988 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
989 }
990 }
991 else if (next_header == IP_PROTOCOL_IPV6)
992 {
993 const ip6_header_t *ip6;
994
995 ip6 = vlib_buffer_get_current (b);
996
997 if (!ip46_address_is_equal_v6 (&itp->itp_tun.src,
998 &ip6->dst_address) ||
999 !ip46_address_is_equal_v6 (&itp->itp_tun.dst,
1000 &ip6->src_address))
1001 {
1002 next[0] = ESP_DECRYPT_NEXT_DROP;
1003 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
1004 }
1005 }
1006 }
1007 }
1008 }
1009}
1010
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001011always_inline uword
Neale Rannsf16e9a52021-02-25 19:09:24 +00001012esp_decrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
1013 vlib_frame_t *from_frame, int is_ip6, int is_tun,
1014 u16 async_next_node)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001015{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001016 ipsec_main_t *im = &ipsec_main;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001017 u32 thread_index = vm->thread_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001018 u16 len;
1019 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, thread_index);
Damjan Marionc98275f2019-03-06 14:05:01 +01001020 u32 *from = vlib_frame_vector_args (from_frame);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001021 u32 n_left = from_frame->n_vectors;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001022 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001023 vlib_buffer_t *sync_bufs[VLIB_FRAME_SIZE];
1024 u16 sync_nexts[VLIB_FRAME_SIZE], *sync_next = sync_nexts, n_sync = 0;
1025 u16 async_nexts[VLIB_FRAME_SIZE], *async_next = async_nexts, n_async = 0;
1026 u16 noop_nexts[VLIB_FRAME_SIZE], *noop_next = noop_nexts, n_noop = 0;
1027 u32 sync_bi[VLIB_FRAME_SIZE];
1028 u32 noop_bi[VLIB_FRAME_SIZE];
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001029 esp_decrypt_packet_data_t pkt_data[VLIB_FRAME_SIZE], *pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001030 esp_decrypt_packet_data2_t pkt_data2[VLIB_FRAME_SIZE], *pd2 = pkt_data2;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001031 esp_decrypt_packet_data_t cpd = { };
1032 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
1033 const u8 esp_sz = sizeof (esp_header_t);
1034 ipsec_sa_t *sa0 = 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +00001035 vnet_crypto_op_t _op, *op = &_op;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001036 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
1037 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Fan Zhangf5395782020-04-29 14:00:03 +01001038 int is_async = im->async_mode;
Neale Rannsfc811342021-02-26 10:35:33 +00001039 vnet_crypto_async_op_id_t async_op = ~0;
Neale Rannsfc811342021-02-26 10:35:33 +00001040 vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
Neale Rannsf16e9a52021-02-25 19:09:24 +00001041 esp_decrypt_error_t err;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001042
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001043 vlib_get_buffers (vm, from, b, n_left);
Fan Zhangf5395782020-04-29 14:00:03 +01001044 if (!is_async)
1045 {
1046 vec_reset_length (ptd->crypto_ops);
1047 vec_reset_length (ptd->integ_ops);
1048 vec_reset_length (ptd->chained_crypto_ops);
1049 vec_reset_length (ptd->chained_integ_ops);
1050 }
Neale Rannsfc811342021-02-26 10:35:33 +00001051 vec_reset_length (ptd->async_frames);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001052 vec_reset_length (ptd->chunks);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001053 clib_memset (sync_nexts, -1, sizeof (sync_nexts));
Neale Rannsfc811342021-02-26 10:35:33 +00001054 clib_memset (async_frames, 0, sizeof (async_frames));
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001055
1056 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001057 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001058 u8 *payload;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001059
Neale Rannsf16e9a52021-02-25 19:09:24 +00001060 err = ESP_DECRYPT_ERROR_RX_PKTS;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001061 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001062 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001063 u8 *p;
1064 vlib_prefetch_buffer_header (b[2], LOAD);
1065 p = vlib_buffer_get_current (b[1]);
1066 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
1067 p -= CLIB_CACHE_LINE_BYTES;
1068 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
1069 }
1070
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001071 u32 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
1072 if (n_bufs == 0)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001073 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001074 err = ESP_DECRYPT_ERROR_NO_BUFFERS;
1075 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1076 ESP_DECRYPT_NEXT_DROP);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001077 goto next;
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001078 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001079
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001080 if (vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
Damjan Marionc98275f2019-03-06 14:05:01 +01001081 {
Damjan Marion867dfdd2019-06-05 15:42:54 +02001082 if (current_sa_pkts)
1083 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1084 current_sa_index,
1085 current_sa_pkts,
1086 current_sa_bytes);
1087 current_sa_bytes = current_sa_pkts = 0;
1088
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001089 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001090 sa0 = ipsec_sa_get (current_sa_index);
Neale Ranns123b5eb2020-10-16 14:03:55 +00001091
1092 /* fetch the second cacheline ASAP */
1093 CLIB_PREFETCH (sa0->cacheline1, CLIB_CACHE_LINE_BYTES, LOAD);
Damjan Marion7c22ff72019-04-04 12:25:44 +02001094 cpd.icv_sz = sa0->integ_icv_size;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001095 cpd.iv_sz = sa0->crypto_iv_size;
1096 cpd.flags = sa0->flags;
1097 cpd.sa_index = current_sa_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001098 is_async = im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
Neale Rannsfc811342021-02-26 10:35:33 +00001099 }
Fan Zhangf5395782020-04-29 14:00:03 +01001100
Neale Rannsfc811342021-02-26 10:35:33 +00001101 if (is_async)
1102 {
1103 async_op = sa0->crypto_async_dec_op_id;
1104
Neale Rannsfc811342021-02-26 10:35:33 +00001105 /* get a frame for this op if we don't yet have one or it's full
1106 */
1107 if (NULL == async_frames[async_op] ||
1108 vnet_crypto_async_frame_is_full (async_frames[async_op]))
1109 {
1110 async_frames[async_op] =
1111 vnet_crypto_async_get_frame (vm, async_op);
1112 /* Save the frame to the list we'll submit at the end */
1113 vec_add1 (ptd->async_frames, async_frames[async_op]);
Fan Zhangf5395782020-04-29 14:00:03 +01001114 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001115 }
1116
Neale Ranns1a52d372021-02-04 11:33:32 +00001117 if (PREDICT_FALSE (~0 == sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +00001118 {
1119 /* this is the first packet to use this SA, claim the SA
1120 * for this thread. this could happen simultaneously on
1121 * another thread */
Neale Ranns1a52d372021-02-04 11:33:32 +00001122 clib_atomic_cmp_and_swap (&sa0->thread_index, ~0,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001123 ipsec_sa_assign_thread (thread_index));
1124 }
1125
Neale Ranns1a52d372021-02-04 11:33:32 +00001126 if (PREDICT_FALSE (thread_index != sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +00001127 {
Neale Rannsaa7d7662021-02-10 08:42:49 +00001128 vnet_buffer (b[0])->ipsec.thread_index = sa0->thread_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001129 err = ESP_DECRYPT_ERROR_HANDOFF;
1130 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1131 ESP_DECRYPT_NEXT_HANDOFF);
Neale Rannsf62a8c02019-04-02 08:13:33 +00001132 goto next;
1133 }
1134
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001135 /* store packet data for next round for easier prefetch */
1136 pd->sa_data = cpd.sa_data;
1137 pd->current_data = b[0]->current_data;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001138 pd->hdr_sz = pd->current_data - vnet_buffer (b[0])->l3_hdr_offset;
1139 payload = b[0]->data + pd->current_data;
Neale Ranns6afaae12019-07-17 15:07:14 +00001140 pd->seq = clib_host_to_net_u32 (((esp_header_t *) payload)->seq);
Fan Zhangf5395782020-04-29 14:00:03 +01001141 pd->is_chain = 0;
1142 pd2->lb = b[0];
1143 pd2->free_buffer_index = 0;
1144 pd2->icv_removed = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001145
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001146 if (n_bufs > 1)
1147 {
Fan Zhangf5395782020-04-29 14:00:03 +01001148 pd->is_chain = 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001149 /* find last buffer in the chain */
Fan Zhangf5395782020-04-29 14:00:03 +01001150 while (pd2->lb->flags & VLIB_BUFFER_NEXT_PRESENT)
1151 pd2->lb = vlib_get_buffer (vm, pd2->lb->next_buffer);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001152
1153 crypto_ops = &ptd->chained_crypto_ops;
1154 integ_ops = &ptd->chained_integ_ops;
1155 }
Fan Zhangf5395782020-04-29 14:00:03 +01001156
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001157 pd->current_length = b[0]->current_length;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001158
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001159 /* anti-reply check */
Neale Ranns6afaae12019-07-17 15:07:14 +00001160 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001161 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001162 err = ESP_DECRYPT_ERROR_REPLAY;
1163 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1164 ESP_DECRYPT_NEXT_DROP);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001165 goto next;
1166 }
1167
Damjan Mariona829b132019-04-24 23:39:16 +02001168 if (pd->current_length < cpd.icv_sz + esp_sz + cpd.iv_sz)
1169 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001170 err = ESP_DECRYPT_ERROR_RUNT;
1171 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1172 ESP_DECRYPT_NEXT_DROP);
Damjan Mariona829b132019-04-24 23:39:16 +02001173 goto next;
1174 }
1175
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001176 len = pd->current_length - cpd.icv_sz;
1177 current_sa_pkts += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001178 current_sa_bytes += vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001179
Fan Zhangf5395782020-04-29 14:00:03 +01001180 if (is_async)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001181 {
Neale Rannsfc811342021-02-26 10:35:33 +00001182
1183 err = esp_decrypt_prepare_async_frame (
1184 vm, node, ptd, async_frames[async_op], sa0, payload, len,
Neale Rannsf16e9a52021-02-25 19:09:24 +00001185 cpd.icv_sz, cpd.iv_sz, pd, pd2, from[b - bufs], b[0], async_next,
1186 async_next_node);
Neale Rannsfc811342021-02-26 10:35:33 +00001187 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001188 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001189 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1190 ESP_DECRYPT_NEXT_DROP);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001191 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001192 }
Fan Zhangf5395782020-04-29 14:00:03 +01001193 else
Neale Rannsf16e9a52021-02-25 19:09:24 +00001194 esp_decrypt_prepare_sync_op (
1195 vm, node, ptd, &crypto_ops, &integ_ops, op, sa0, payload, len,
1196 cpd.icv_sz, cpd.iv_sz, pd, pd2, b[0], sync_next, b - bufs);
Damjan Marionc98275f2019-03-06 14:05:01 +01001197 /* next */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001198 next:
Neale Rannsf16e9a52021-02-25 19:09:24 +00001199 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
1200 {
1201 noop_bi[n_noop] = from[b - bufs];
1202 n_noop++;
1203 noop_next++;
1204 }
1205 else if (!is_async)
1206 {
1207 sync_bi[n_sync] = from[b - bufs];
1208 sync_bufs[n_sync] = b[0];
1209 n_sync++;
1210 sync_next++;
1211 pd += 1;
1212 pd2 += 1;
1213 }
1214 else
1215 {
1216 n_async++;
1217 async_next++;
1218 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001219 n_left -= 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001220 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001221 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001222
Neale Ranns02950402019-12-20 00:54:57 +00001223 if (PREDICT_TRUE (~0 != current_sa_index))
1224 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1225 current_sa_index, current_sa_pkts,
1226 current_sa_bytes);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001227
Neale Rannsf16e9a52021-02-25 19:09:24 +00001228 if (n_async)
Fan Zhangf5395782020-04-29 14:00:03 +01001229 {
Neale Rannsfc811342021-02-26 10:35:33 +00001230 /* submit all of the open frames */
1231 vnet_crypto_async_frame_t **async_frame;
1232
1233 vec_foreach (async_frame, ptd->async_frames)
Fan Zhangf5395782020-04-29 14:00:03 +01001234 {
Neale Rannsfc811342021-02-26 10:35:33 +00001235 if (vnet_crypto_async_submit_open_frame (vm, *async_frame) < 0)
1236 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001237 n_noop += esp_async_recycle_failed_submit (
1238 vm, *async_frame, node, ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR,
1239 n_sync, noop_bi, noop_nexts, ESP_DECRYPT_NEXT_DROP);
Neale Rannsfc811342021-02-26 10:35:33 +00001240 vnet_crypto_async_reset_frame (*async_frame);
1241 vnet_crypto_async_free_frame (vm, *async_frame);
1242 }
Fan Zhangf5395782020-04-29 14:00:03 +01001243 }
Fan Zhangf5395782020-04-29 14:00:03 +01001244 }
Fan Zhangf5395782020-04-29 14:00:03 +01001245
Neale Rannsf16e9a52021-02-25 19:09:24 +00001246 if (n_sync)
1247 {
1248 esp_process_ops (vm, node, ptd->integ_ops, sync_bufs, sync_nexts,
1249 ESP_DECRYPT_ERROR_INTEG_ERROR);
1250 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, sync_bufs,
1251 sync_nexts, ptd->chunks,
1252 ESP_DECRYPT_ERROR_INTEG_ERROR);
1253
1254 esp_process_ops (vm, node, ptd->crypto_ops, sync_bufs, sync_nexts,
Fan Zhangf5395782020-04-29 14:00:03 +01001255 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001256 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, sync_bufs,
1257 sync_nexts, ptd->chunks,
Fan Zhangf5395782020-04-29 14:00:03 +01001258 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1259 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001260
1261 /* Post decryption ronud - adjust packet data start and length and next
1262 node */
1263
Neale Rannsf16e9a52021-02-25 19:09:24 +00001264 n_left = n_sync;
1265 sync_next = sync_nexts;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001266 pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001267 pd2 = pkt_data2;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001268 b = sync_bufs;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001269
1270 while (n_left)
1271 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001272 if (n_left >= 2)
1273 {
1274 void *data = b[1]->data + pd[1].current_data;
1275
1276 /* buffer metadata */
1277 vlib_prefetch_buffer_header (b[1], LOAD);
1278
1279 /* esp_footer_t */
1280 CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
1281 CLIB_CACHE_LINE_BYTES, LOAD);
1282
1283 /* packet headers */
1284 CLIB_PREFETCH (data - CLIB_CACHE_LINE_BYTES,
1285 CLIB_CACHE_LINE_BYTES * 2, LOAD);
1286 }
1287
Christian Hoppsd570e532020-08-25 12:40:40 -04001288 /* save the sa_index as GRE_teb post_crypto changes L2 opaque */
1289 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1290 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
1291
Neale Rannsf16e9a52021-02-25 19:09:24 +00001292 if (sync_next[0] >= ESP_DECRYPT_N_NEXT)
1293 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], sync_next, is_ip6,
Fan Zhangf5395782020-04-29 14:00:03 +01001294 is_tun, 0);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001295
Fan Zhangf5395782020-04-29 14:00:03 +01001296 /* trace: */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001297 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1298 {
1299 esp_decrypt_trace_t *tr;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001300 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001301 sa0 = ipsec_sa_get (current_sa_index);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001302 tr->crypto_alg = sa0->crypto_alg;
1303 tr->integ_alg = sa0->integ_alg;
Neale Ranns6afaae12019-07-17 15:07:14 +00001304 tr->seq = pd->seq;
1305 tr->sa_seq = sa0->last_seq;
1306 tr->sa_seq_hi = sa0->seq_hi;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001307 }
1308
1309 /* next */
1310 n_left -= 1;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001311 sync_next += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001312 pd += 1;
Fan Zhangf5395782020-04-29 14:00:03 +01001313 pd2 += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001314 b += 1;
1315 }
1316
Neale Rannsf16e9a52021-02-25 19:09:24 +00001317 vlib_node_increment_counter (vm, node->node_index, ESP_DECRYPT_ERROR_RX_PKTS,
1318 from_frame->n_vectors);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001319
Neale Rannsf16e9a52021-02-25 19:09:24 +00001320 if (n_sync)
1321 vlib_buffer_enqueue_to_next (vm, node, sync_bi, sync_nexts, n_sync);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001322
Neale Rannsf16e9a52021-02-25 19:09:24 +00001323 if (n_noop)
1324 vlib_buffer_enqueue_to_next (vm, node, noop_bi, noop_nexts, n_noop);
1325
1326 return (from_frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001327}
1328
Fan Zhangf5395782020-04-29 14:00:03 +01001329always_inline uword
1330esp_decrypt_post_inline (vlib_main_t * vm,
1331 vlib_node_runtime_t * node,
1332 vlib_frame_t * from_frame, int is_ip6, int is_tun)
1333{
Fan Zhangf5395782020-04-29 14:00:03 +01001334 u32 *from = vlib_frame_vector_args (from_frame);
1335 u32 n_left = from_frame->n_vectors;
1336 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1337 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
1338 vlib_get_buffers (vm, from, b, n_left);
1339
1340 while (n_left > 0)
1341 {
1342 esp_decrypt_packet_data_t *pd = &(esp_post_data (b[0]))->decrypt_data;
1343
1344 if (n_left > 2)
1345 {
1346 vlib_prefetch_buffer_header (b[2], LOAD);
1347 vlib_prefetch_buffer_header (b[1], LOAD);
1348 }
1349
1350 if (!pd->is_chain)
1351 esp_decrypt_post_crypto (vm, node, pd, 0, b[0], next, is_ip6, is_tun,
1352 1);
1353 else
1354 {
1355 esp_decrypt_packet_data2_t *pd2 = esp_post_data2 (b[0]);
1356 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], next, is_ip6,
1357 is_tun, 1);
1358 }
1359
1360 /*trace: */
1361 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1362 {
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001363 ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +01001364 esp_decrypt_trace_t *tr;
1365 esp_decrypt_packet_data_t *async_pd =
1366 &(esp_post_data (b[0]))->decrypt_data;
1367 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001368 sa0 = ipsec_sa_get (async_pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +01001369
1370 tr->crypto_alg = sa0->crypto_alg;
1371 tr->integ_alg = sa0->integ_alg;
1372 tr->seq = pd->seq;
1373 tr->sa_seq = sa0->last_seq;
1374 tr->sa_seq_hi = sa0->seq_hi;
1375 }
1376
1377 n_left--;
1378 next++;
1379 b++;
1380 }
1381
1382 n_left = from_frame->n_vectors;
1383 vlib_node_increment_counter (vm, node->node_index,
1384 ESP_DECRYPT_ERROR_RX_POST_PKTS, n_left);
1385
1386 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
1387
1388 return n_left;
1389}
1390
Klement Sekerab8f35442018-10-29 13:38:19 +01001391VLIB_NODE_FN (esp4_decrypt_node) (vlib_main_t * vm,
1392 vlib_node_runtime_t * node,
1393 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001394{
Fan Zhangf5395782020-04-29 14:00:03 +01001395 return esp_decrypt_inline (vm, node, from_frame, 0, 0,
1396 esp_decrypt_async_next.esp4_post_next);
1397}
1398
1399VLIB_NODE_FN (esp4_decrypt_post_node) (vlib_main_t * vm,
1400 vlib_node_runtime_t * node,
1401 vlib_frame_t * from_frame)
1402{
1403 return esp_decrypt_post_inline (vm, node, from_frame, 0, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001404}
1405
1406VLIB_NODE_FN (esp4_decrypt_tun_node) (vlib_main_t * vm,
1407 vlib_node_runtime_t * node,
1408 vlib_frame_t * from_frame)
1409{
Fan Zhangf5395782020-04-29 14:00:03 +01001410 return esp_decrypt_inline (vm, node, from_frame, 0, 1,
1411 esp_decrypt_async_next.esp4_tun_post_next);
1412}
1413
1414VLIB_NODE_FN (esp4_decrypt_tun_post_node) (vlib_main_t * vm,
1415 vlib_node_runtime_t * node,
1416 vlib_frame_t * from_frame)
1417{
1418 return esp_decrypt_post_inline (vm, node, from_frame, 0, 1);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001419}
1420
1421VLIB_NODE_FN (esp6_decrypt_node) (vlib_main_t * vm,
1422 vlib_node_runtime_t * node,
1423 vlib_frame_t * from_frame)
1424{
Fan Zhangf5395782020-04-29 14:00:03 +01001425 return esp_decrypt_inline (vm, node, from_frame, 1, 0,
1426 esp_decrypt_async_next.esp6_post_next);
1427}
1428
1429VLIB_NODE_FN (esp6_decrypt_post_node) (vlib_main_t * vm,
1430 vlib_node_runtime_t * node,
1431 vlib_frame_t * from_frame)
1432{
1433 return esp_decrypt_post_inline (vm, node, from_frame, 1, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001434}
1435
1436VLIB_NODE_FN (esp6_decrypt_tun_node) (vlib_main_t * vm,
1437 vlib_node_runtime_t * node,
1438 vlib_frame_t * from_frame)
1439{
Fan Zhangf5395782020-04-29 14:00:03 +01001440 return esp_decrypt_inline (vm, node, from_frame, 1, 1,
1441 esp_decrypt_async_next.esp6_tun_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001442}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001443
Fan Zhangf5395782020-04-29 14:00:03 +01001444VLIB_NODE_FN (esp6_decrypt_tun_post_node) (vlib_main_t * vm,
1445 vlib_node_runtime_t * node,
1446 vlib_frame_t * from_frame)
1447{
1448 return esp_decrypt_post_inline (vm, node, from_frame, 1, 1);
1449}
1450
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001451/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001452VLIB_REGISTER_NODE (esp4_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001453 .name = "esp4-decrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001454 .vector_size = sizeof (u32),
1455 .format_trace = format_esp_decrypt_trace,
1456 .type = VLIB_NODE_TYPE_INTERNAL,
1457
1458 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1459 .error_strings = esp_decrypt_error_strings,
1460
1461 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1462 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001463 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1464 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1465 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001466 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
Neale Ranns568acbb2019-12-18 05:54:40 +00001467 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001468 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-handoff",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001469 },
1470};
1471
Fan Zhangf5395782020-04-29 14:00:03 +01001472VLIB_REGISTER_NODE (esp4_decrypt_post_node) = {
1473 .name = "esp4-decrypt-post",
1474 .vector_size = sizeof (u32),
1475 .format_trace = format_esp_decrypt_trace,
1476 .type = VLIB_NODE_TYPE_INTERNAL,
1477
1478 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1479 .error_strings = esp_decrypt_error_strings,
1480
1481 .sibling_of = "esp4-decrypt",
1482};
1483
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001484VLIB_REGISTER_NODE (esp6_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001485 .name = "esp6-decrypt",
1486 .vector_size = sizeof (u32),
1487 .format_trace = format_esp_decrypt_trace,
1488 .type = VLIB_NODE_TYPE_INTERNAL,
1489
1490 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1491 .error_strings = esp_decrypt_error_strings,
1492
1493 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1494 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001495 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1496 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1497 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001498 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
Neale Ranns568acbb2019-12-18 05:54:40 +00001499 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001500 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-handoff",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001501 },
1502};
Neale Rannsc87b66c2019-02-07 07:26:12 -08001503
Fan Zhangf5395782020-04-29 14:00:03 +01001504VLIB_REGISTER_NODE (esp6_decrypt_post_node) = {
1505 .name = "esp6-decrypt-post",
1506 .vector_size = sizeof (u32),
1507 .format_trace = format_esp_decrypt_trace,
1508 .type = VLIB_NODE_TYPE_INTERNAL,
1509
1510 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1511 .error_strings = esp_decrypt_error_strings,
1512
1513 .sibling_of = "esp6-decrypt",
1514};
1515
Neale Rannsc87b66c2019-02-07 07:26:12 -08001516VLIB_REGISTER_NODE (esp4_decrypt_tun_node) = {
1517 .name = "esp4-decrypt-tun",
1518 .vector_size = sizeof (u32),
1519 .format_trace = format_esp_decrypt_trace,
1520 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001521 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1522 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001523 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1524 .next_nodes = {
1525 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1526 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1527 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001528 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001529 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001530 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001531 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001532};
1533
Fan Zhangf5395782020-04-29 14:00:03 +01001534VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node) = {
1535 .name = "esp4-decrypt-tun-post",
1536 .vector_size = sizeof (u32),
1537 .format_trace = format_esp_decrypt_trace,
1538 .type = VLIB_NODE_TYPE_INTERNAL,
1539
1540 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1541 .error_strings = esp_decrypt_error_strings,
1542
1543 .sibling_of = "esp4-decrypt-tun",
1544};
1545
Neale Rannsc87b66c2019-02-07 07:26:12 -08001546VLIB_REGISTER_NODE (esp6_decrypt_tun_node) = {
1547 .name = "esp6-decrypt-tun",
1548 .vector_size = sizeof (u32),
1549 .format_trace = format_esp_decrypt_trace,
1550 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001551 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1552 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001553 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1554 .next_nodes = {
1555 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1556 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1557 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001558 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001559 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001560 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001561 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001562};
Fan Zhangf5395782020-04-29 14:00:03 +01001563
1564VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node) = {
1565 .name = "esp6-decrypt-tun-post",
1566 .vector_size = sizeof (u32),
1567 .format_trace = format_esp_decrypt_trace,
1568 .type = VLIB_NODE_TYPE_INTERNAL,
1569
1570 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1571 .error_strings = esp_decrypt_error_strings,
1572
1573 .sibling_of = "esp6-decrypt-tun",
1574};
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001575/* *INDENT-ON* */
1576
Neale Ranns2d498302021-02-25 08:38:58 +00001577#ifndef CLIB_MARCH_VARIANT
1578
1579static clib_error_t *
1580esp_decrypt_init (vlib_main_t *vm)
1581{
1582 ipsec_main_t *im = &ipsec_main;
1583
1584 im->esp4_dec_fq_index =
1585 vlib_frame_queue_main_init (esp4_decrypt_node.index, 0);
1586 im->esp6_dec_fq_index =
1587 vlib_frame_queue_main_init (esp6_decrypt_node.index, 0);
1588 im->esp4_dec_tun_fq_index =
1589 vlib_frame_queue_main_init (esp4_decrypt_tun_node.index, 0);
1590 im->esp6_dec_tun_fq_index =
1591 vlib_frame_queue_main_init (esp6_decrypt_tun_node.index, 0);
1592
1593 return 0;
1594}
1595
1596VLIB_INIT_FUNCTION (esp_decrypt_init);
1597
1598#endif
1599
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001600/*
1601 * fd.io coding-style-patch-verification: ON
1602 *
1603 * Local Variables:
1604 * eval: (c-set-style "gnu")
1605 * End:
1606 */