blob: 905b13aa81c984a681390f86b83b864c82a0ea62 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_decrypt.c : IPSec ESP decrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
John Lo90430b62020-01-31 23:48:30 -050021#include <vnet/l2/l2_input.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070022
23#include <vnet/ipsec/ipsec.h>
24#include <vnet/ipsec/esp.h>
Neale Ranns918c1612019-02-21 23:34:59 -080025#include <vnet/ipsec/ipsec_io.h>
Neale Rannsc87b66c2019-02-07 07:26:12 -080026#include <vnet/ipsec/ipsec_tun.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027
Neale Ranns119c0d72020-11-26 13:12:37 +000028#include <vnet/gre/packet.h>
Neale Ranns568acbb2019-12-18 05:54:40 +000029
Neale Ranns4a58e492020-12-21 13:19:10 +000030#define foreach_esp_decrypt_next \
31 _ (DROP, "error-drop") \
32 _ (IP4_INPUT, "ip4-input-no-checksum") \
33 _ (IP6_INPUT, "ip6-input") \
34 _ (L2_INPUT, "l2-input") \
35 _ (MPLS_INPUT, "mpls-input") \
36 _ (HANDOFF, "handoff")
Ed Warnickecb9cada2015-12-08 15:45:58 -070037
38#define _(v, s) ESP_DECRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070039typedef enum
40{
Ed Warnickecb9cada2015-12-08 15:45:58 -070041 foreach_esp_decrypt_next
42#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070043 ESP_DECRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070044} esp_decrypt_next_t;
45
Neale Ranns4a58e492020-12-21 13:19:10 +000046#define foreach_esp_decrypt_post_next \
47 _ (DROP, "error-drop") \
48 _ (IP4_INPUT, "ip4-input-no-checksum") \
49 _ (IP6_INPUT, "ip6-input") \
50 _ (MPLS_INPUT, "mpls-input") \
51 _ (L2_INPUT, "l2-input")
Fan Zhangf5395782020-04-29 14:00:03 +010052
53#define _(v, s) ESP_DECRYPT_POST_NEXT_##v,
54typedef enum
55{
56 foreach_esp_decrypt_post_next
57#undef _
58 ESP_DECRYPT_POST_N_NEXT,
59} esp_decrypt_post_next_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -070060
Neale Rannsf16e9a52021-02-25 19:09:24 +000061#define foreach_esp_decrypt_error \
62 _ (RX_PKTS, "ESP pkts received") \
63 _ (RX_POST_PKTS, "ESP-POST pkts received") \
64 _ (HANDOFF, "hand-off") \
65 _ (DECRYPTION_FAILED, "ESP decryption failed") \
66 _ (INTEG_ERROR, "Integrity check failed") \
67 _ (CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
68 _ (REPLAY, "SA replayed packet") \
69 _ (RUNT, "undersized packet") \
70 _ (NO_BUFFERS, "no buffers (packet dropped)") \
71 _ (OVERSIZED_HEADER, "buffer with oversized header (dropped)") \
72 _ (NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \
73 _ (TUN_NO_PROTO, "no tunnel protocol") \
74 _ (UNSUP_PAYLOAD, "unsupported payload")
Ed Warnickecb9cada2015-12-08 15:45:58 -070075
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070076typedef enum
77{
Ed Warnickecb9cada2015-12-08 15:45:58 -070078#define _(sym,str) ESP_DECRYPT_ERROR_##sym,
79 foreach_esp_decrypt_error
80#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070081 ESP_DECRYPT_N_ERROR,
Ed Warnickecb9cada2015-12-08 15:45:58 -070082} esp_decrypt_error_t;
83
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070084static char *esp_decrypt_error_strings[] = {
Ed Warnickecb9cada2015-12-08 15:45:58 -070085#define _(sym,string) string,
86 foreach_esp_decrypt_error
87#undef _
88};
89
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070090typedef struct
91{
Damjan Marionb4fff3a2019-03-25 15:54:40 +010092 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000093 u32 sa_seq;
94 u32 sa_seq_hi;
Neale Ranns5b891102021-06-28 13:31:28 +000095 u32 pkt_seq_hi;
Ed Warnickecb9cada2015-12-08 15:45:58 -070096 ipsec_crypto_alg_t crypto_alg;
97 ipsec_integ_alg_t integ_alg;
98} esp_decrypt_trace_t;
99
Neale Ranns5b891102021-06-28 13:31:28 +0000100/* The number of byres in the hisequence number */
101#define N_HI_ESN_BYTES 4
102
Ed Warnickecb9cada2015-12-08 15:45:58 -0700103/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700104static u8 *
105format_esp_decrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700106{
107 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
108 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700109 esp_decrypt_trace_t *t = va_arg (*args, esp_decrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700110
Neale Ranns5b891102021-06-28 13:31:28 +0000111 s = format (s,
112 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u "
113 "pkt-seq-hi %u",
114 format_ipsec_crypto_alg, t->crypto_alg, format_ipsec_integ_alg,
115 t->integ_alg, t->seq, t->sa_seq, t->sa_seq_hi, t->pkt_seq_hi);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700116 return s;
117}
118
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100119#define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700120
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000121static_always_inline void
122esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
123 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
124 int e)
125{
126 vnet_crypto_op_t *op = ops;
127 u32 n_fail, n_ops = vec_len (ops);
128
129 if (n_ops == 0)
130 return;
131
132 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
133
134 while (n_fail)
135 {
136 ASSERT (op - ops < n_ops);
137 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
138 {
139 u32 err, bi = op->user_data;
140 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
141 err = e;
142 else
143 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
144 b[bi]->error = node->errors[err];
145 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
146 n_fail--;
147 }
148 op++;
149 }
150}
151
152static_always_inline void
153esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
154 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
155 u16 * nexts, vnet_crypto_op_chunk_t * chunks, int e)
156{
157
158 vnet_crypto_op_t *op = ops;
159 u32 n_fail, n_ops = vec_len (ops);
160
Neale Rannsf16e9a52021-02-25 19:09:24 +0000161 if (PREDICT_TRUE (n_ops == 0))
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000162 return;
163
164 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
165
166 while (n_fail)
167 {
168 ASSERT (op - ops < n_ops);
169 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
170 {
171 u32 err, bi = op->user_data;
172 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
173 err = e;
174 else
175 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
176 b[bi]->error = node->errors[err];
177 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
178 n_fail--;
179 }
180 op++;
181 }
182}
183
184always_inline void
185esp_remove_tail (vlib_main_t * vm, vlib_buffer_t * b, vlib_buffer_t * last,
186 u16 tail)
187{
188 vlib_buffer_t *before_last = b;
189
190 if (last->current_length > tail)
191 {
192 last->current_length -= tail;
193 return;
194 }
195 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
196
197 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
198 {
199 before_last = b;
200 b = vlib_get_buffer (vm, b->next_buffer);
201 }
202 before_last->current_length -= tail - last->current_length;
203 vlib_buffer_free_one (vm, before_last->next_buffer);
204 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
205}
206
207/* ICV is splitted in last two buffers so move it to the last buffer and
208 return pointer to it */
209static_always_inline u8 *
210esp_move_icv (vlib_main_t * vm, vlib_buffer_t * first,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000211 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100212 esp_decrypt_packet_data2_t * pd2, u16 icv_sz, u16 * dif)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000213{
214 vlib_buffer_t *before_last, *bp;
Fan Zhangf5395782020-04-29 14:00:03 +0100215 u16 last_sz = pd2->lb->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000216 u16 first_sz = icv_sz - last_sz;
217
218 bp = before_last = first;
219 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
220 {
221 before_last = bp;
222 bp = vlib_get_buffer (vm, bp->next_buffer);
223 }
224
Fan Zhangf5395782020-04-29 14:00:03 +0100225 u8 *lb_curr = vlib_buffer_get_current (pd2->lb);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000226 memmove (lb_curr + first_sz, lb_curr, last_sz);
227 clib_memcpy_fast (lb_curr, vlib_buffer_get_tail (before_last) - first_sz,
228 first_sz);
229 before_last->current_length -= first_sz;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000230 if (before_last == first)
231 pd->current_length -= first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100232 clib_memset (vlib_buffer_get_tail (before_last), 0, first_sz);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000233 if (dif)
234 dif[0] = first_sz;
Fan Zhangf5395782020-04-29 14:00:03 +0100235 pd2->lb = before_last;
236 pd2->icv_removed = 1;
237 pd2->free_buffer_index = before_last->next_buffer;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000238 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
239 return lb_curr;
240}
241
Neale Ranns5b891102021-06-28 13:31:28 +0000242static_always_inline u16
243esp_insert_esn (vlib_main_t *vm, ipsec_sa_t *sa, esp_decrypt_packet_data_t *pd,
244 esp_decrypt_packet_data2_t *pd2, u32 *data_len, u8 **digest,
245 u16 *len, vlib_buffer_t *b, u8 *payload)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000246{
247 if (!ipsec_sa_is_set_USE_ESN (sa))
Fan Zhangf5395782020-04-29 14:00:03 +0100248 return 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000249 /* shift ICV by 4 bytes to insert ESN */
Neale Ranns5b891102021-06-28 13:31:28 +0000250 u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
251 u8 tmp[ESP_MAX_ICV_SIZE];
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000252
Fan Zhangf5395782020-04-29 14:00:03 +0100253 if (pd2->icv_removed)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000254 {
Fan Zhangf5395782020-04-29 14:00:03 +0100255 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Neale Ranns5b891102021-06-28 13:31:28 +0000256 if (space_left >= N_HI_ESN_BYTES)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000257 {
Neale Ranns5b891102021-06-28 13:31:28 +0000258 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi,
259 N_HI_ESN_BYTES);
260 *data_len += N_HI_ESN_BYTES;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000261 }
262 else
Neale Ranns5b891102021-06-28 13:31:28 +0000263 return N_HI_ESN_BYTES;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000264
265 len[0] = b->current_length;
266 }
267 else
268 {
269 clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
Neale Ranns5b891102021-06-28 13:31:28 +0000270 clib_memcpy_fast (payload + len[0], &seq_hi, N_HI_ESN_BYTES);
271 clib_memcpy_fast (payload + len[0] + N_HI_ESN_BYTES, tmp,
272 ESP_MAX_ICV_SIZE);
273 *data_len += N_HI_ESN_BYTES;
274 *digest += N_HI_ESN_BYTES;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000275 }
Neale Ranns5b891102021-06-28 13:31:28 +0000276 return N_HI_ESN_BYTES;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000277}
278
279static_always_inline u8 *
280esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000281 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100282 esp_decrypt_packet_data2_t * pd2, u16 icv_sz,
283 ipsec_sa_t * sa, u8 * extra_esn, u32 * len)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000284{
285 u16 dif = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000286 u8 *digest = esp_move_icv (vm, first, pd, pd2, icv_sz, &dif);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000287 if (dif)
Fan Zhangf5395782020-04-29 14:00:03 +0100288 *len -= dif;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000289
290 if (ipsec_sa_is_set_USE_ESN (sa))
291 {
Neale Ranns5b891102021-06-28 13:31:28 +0000292 u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
Fan Zhangf5395782020-04-29 14:00:03 +0100293 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000294
Neale Ranns5b891102021-06-28 13:31:28 +0000295 if (space_left >= N_HI_ESN_BYTES)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000296 {
Neale Ranns5b891102021-06-28 13:31:28 +0000297 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb), &seq_hi,
298 N_HI_ESN_BYTES);
299 *len += N_HI_ESN_BYTES;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000300 }
301 else
302 {
303 /* no space for ESN at the tail, use the next buffer
304 * (with ICV data) */
Fan Zhangf5395782020-04-29 14:00:03 +0100305 ASSERT (pd2->icv_removed);
306 vlib_buffer_t *tmp = vlib_get_buffer (vm, pd2->free_buffer_index);
Neale Ranns5b891102021-06-28 13:31:28 +0000307 clib_memcpy_fast (vlib_buffer_get_current (tmp) - N_HI_ESN_BYTES,
308 &seq_hi, N_HI_ESN_BYTES);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000309 extra_esn[0] = 1;
310 }
311 }
312 return digest;
313}
314
Fan Zhangf5395782020-04-29 14:00:03 +0100315static_always_inline int
Neale Ranns5b891102021-06-28 13:31:28 +0000316esp_decrypt_chain_integ (vlib_main_t *vm, ipsec_per_thread_data_t *ptd,
317 const esp_decrypt_packet_data_t *pd,
318 esp_decrypt_packet_data2_t *pd2, ipsec_sa_t *sa0,
319 vlib_buffer_t *b, u8 icv_sz, u8 *start_src,
320 u32 start_len, u8 **digest, u16 *n_ch,
321 u32 *integ_total_len)
Fan Zhangf5395782020-04-29 14:00:03 +0100322{
323 vnet_crypto_op_chunk_t *ch;
324 vlib_buffer_t *cb = vlib_get_buffer (vm, b->next_buffer);
325 u16 n_chunks = 1;
326 u32 total_len;
327 vec_add2 (ptd->chunks, ch, 1);
328 total_len = ch->len = start_len;
329 ch->src = start_src;
330
331 while (1)
332 {
333 vec_add2 (ptd->chunks, ch, 1);
334 n_chunks += 1;
335 ch->src = vlib_buffer_get_current (cb);
336 if (pd2->lb == cb)
337 {
338 if (pd2->icv_removed)
339 ch->len = cb->current_length;
340 else
341 ch->len = cb->current_length - icv_sz;
342 if (ipsec_sa_is_set_USE_ESN (sa0))
343 {
Neale Ranns5b891102021-06-28 13:31:28 +0000344 u32 seq_hi = clib_host_to_net_u32 (pd->seq_hi);
345 u8 tmp[ESP_MAX_ICV_SIZE];
Fan Zhangf5395782020-04-29 14:00:03 +0100346 u8 *esn;
347 vlib_buffer_t *tmp_b;
348 u16 space_left = vlib_buffer_space_left_at_end (vm, pd2->lb);
Neale Ranns5b891102021-06-28 13:31:28 +0000349 if (space_left < N_HI_ESN_BYTES)
Fan Zhangf5395782020-04-29 14:00:03 +0100350 {
351 if (pd2->icv_removed)
352 {
353 /* use pre-data area from the last bufer
354 that was removed from the chain */
355 tmp_b = vlib_get_buffer (vm, pd2->free_buffer_index);
Neale Ranns5b891102021-06-28 13:31:28 +0000356 esn = tmp_b->data - N_HI_ESN_BYTES;
Fan Zhangf5395782020-04-29 14:00:03 +0100357 }
358 else
359 {
360 /* no space, need to allocate new buffer */
361 u32 tmp_bi = 0;
362 if (vlib_buffer_alloc (vm, &tmp_bi, 1) != 1)
363 return -1;
364 tmp_b = vlib_get_buffer (vm, tmp_bi);
365 esn = tmp_b->data;
366 pd2->free_buffer_index = tmp_bi;
367 }
Neale Ranns5b891102021-06-28 13:31:28 +0000368 clib_memcpy_fast (esn, &seq_hi, N_HI_ESN_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100369
370 vec_add2 (ptd->chunks, ch, 1);
371 n_chunks += 1;
372 ch->src = esn;
Neale Ranns5b891102021-06-28 13:31:28 +0000373 ch->len = N_HI_ESN_BYTES;
Fan Zhangf5395782020-04-29 14:00:03 +0100374 }
375 else
376 {
377 if (pd2->icv_removed)
378 {
Neale Ranns5b891102021-06-28 13:31:28 +0000379 clib_memcpy_fast (vlib_buffer_get_tail (pd2->lb),
380 &seq_hi, N_HI_ESN_BYTES);
Fan Zhangf5395782020-04-29 14:00:03 +0100381 }
382 else
383 {
384 clib_memcpy_fast (tmp, *digest, ESP_MAX_ICV_SIZE);
Neale Ranns5b891102021-06-28 13:31:28 +0000385 clib_memcpy_fast (*digest, &seq_hi, N_HI_ESN_BYTES);
386 clib_memcpy_fast (*digest + N_HI_ESN_BYTES, tmp,
387 ESP_MAX_ICV_SIZE);
388 *digest += N_HI_ESN_BYTES;
Fan Zhangf5395782020-04-29 14:00:03 +0100389 }
Neale Ranns5b891102021-06-28 13:31:28 +0000390 ch->len += N_HI_ESN_BYTES;
Fan Zhangf5395782020-04-29 14:00:03 +0100391 }
392 }
393 total_len += ch->len;
394 break;
395 }
396 else
397 total_len += ch->len = cb->current_length;
398
399 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
400 break;
401
402 cb = vlib_get_buffer (vm, cb->next_buffer);
403 }
404
405 if (n_ch)
406 *n_ch = n_chunks;
407 if (integ_total_len)
408 *integ_total_len = total_len;
409
410 return 0;
411}
412
413static_always_inline u32
414esp_decrypt_chain_crypto (vlib_main_t * vm, ipsec_per_thread_data_t * ptd,
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000415 esp_decrypt_packet_data_t * pd,
Fan Zhangf5395782020-04-29 14:00:03 +0100416 esp_decrypt_packet_data2_t * pd2,
417 ipsec_sa_t * sa0, vlib_buffer_t * b, u8 icv_sz,
418 u8 * start, u32 start_len, u8 ** tag, u16 * n_ch)
419{
420 vnet_crypto_op_chunk_t *ch;
421 vlib_buffer_t *cb = b;
422 u16 n_chunks = 1;
423 u32 total_len;
424 vec_add2 (ptd->chunks, ch, 1);
425 total_len = ch->len = start_len;
426 ch->src = ch->dst = start;
427 cb = vlib_get_buffer (vm, cb->next_buffer);
428 n_chunks = 1;
429
430 while (1)
431 {
432 vec_add2 (ptd->chunks, ch, 1);
433 n_chunks += 1;
434 ch->src = ch->dst = vlib_buffer_get_current (cb);
435 if (pd2->lb == cb)
436 {
437 if (ipsec_sa_is_set_IS_AEAD (sa0))
438 {
439 if (pd2->lb->current_length < icv_sz)
440 {
441 u16 dif = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000442 *tag = esp_move_icv (vm, b, pd, pd2, icv_sz, &dif);
Fan Zhangf5395782020-04-29 14:00:03 +0100443
444 /* this chunk does not contain crypto data */
445 n_chunks -= 1;
446 /* and fix previous chunk's length as it might have
447 been changed */
448 ASSERT (n_chunks > 0);
449 if (pd2->lb == b)
450 {
451 total_len -= dif;
452 ch[-1].len -= dif;
453 }
454 else
455 {
456 total_len = total_len + pd2->lb->current_length -
457 ch[-1].len;
458 ch[-1].len = pd2->lb->current_length;
459 }
460 break;
461 }
462 else
463 *tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
464 }
465
466 if (pd2->icv_removed)
467 total_len += ch->len = cb->current_length;
468 else
469 total_len += ch->len = cb->current_length - icv_sz;
470 }
471 else
472 total_len += ch->len = cb->current_length;
473
474 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
475 break;
476
477 cb = vlib_get_buffer (vm, cb->next_buffer);
478 }
479
480 if (n_ch)
481 *n_ch = n_chunks;
482
483 return total_len;
484}
485
486static_always_inline void
487esp_decrypt_prepare_sync_op (vlib_main_t * vm, vlib_node_runtime_t * node,
488 ipsec_per_thread_data_t * ptd,
489 vnet_crypto_op_t *** crypto_ops,
490 vnet_crypto_op_t *** integ_ops,
491 vnet_crypto_op_t * op,
492 ipsec_sa_t * sa0, u8 * payload,
493 u16 len, u8 icv_sz, u8 iv_sz,
494 esp_decrypt_packet_data_t * pd,
495 esp_decrypt_packet_data2_t * pd2,
496 vlib_buffer_t * b, u16 * next, u32 index)
497{
498 const u8 esp_sz = sizeof (esp_header_t);
499
500 if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
501 {
502 vnet_crypto_op_init (op, sa0->integ_op_id);
503 op->key_index = sa0->integ_key_index;
504 op->src = payload;
505 op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
506 op->user_data = index;
507 op->digest = payload + len;
508 op->digest_len = icv_sz;
509 op->len = len;
510
511 if (pd->is_chain)
512 {
513 /* buffer is chained */
514 op->len = pd->current_length;
515
516 /* special case when ICV is splitted and needs to be reassembled
517 * first -> move it to the last buffer. Also take into account
518 * that ESN needs to be added after encrypted data and may or
519 * may not fit in the tail.*/
520 if (pd2->lb->current_length < icv_sz)
521 {
522 u8 extra_esn = 0;
523 op->digest =
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000524 esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
Fan Zhangf5395782020-04-29 14:00:03 +0100525 &extra_esn, &op->len);
526
527 if (extra_esn)
528 {
529 /* esn is in the last buffer, that was unlinked from
530 * the chain */
531 op->len = b->current_length;
532 }
533 else
534 {
535 if (pd2->lb == b)
536 {
537 /* we now have a single buffer of crypto data, adjust
538 * the length (second buffer contains only ICV) */
539 *integ_ops = &ptd->integ_ops;
540 *crypto_ops = &ptd->crypto_ops;
541 len = b->current_length;
542 goto out;
543 }
544 }
545 }
546 else
547 op->digest = vlib_buffer_get_tail (pd2->lb) - icv_sz;
548
549 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
550 op->chunk_index = vec_len (ptd->chunks);
Neale Ranns5b891102021-06-28 13:31:28 +0000551 if (esp_decrypt_chain_integ (vm, ptd, pd, pd2, sa0, b, icv_sz,
Fan Zhangf5395782020-04-29 14:00:03 +0100552 payload, pd->current_length,
553 &op->digest, &op->n_chunks, 0) < 0)
554 {
555 b->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
556 next[0] = ESP_DECRYPT_NEXT_DROP;
557 return;
558 }
559 }
560 else
Neale Ranns5b891102021-06-28 13:31:28 +0000561 esp_insert_esn (vm, sa0, pd, pd2, &op->len, &op->digest, &len, b,
Fan Zhangf5395782020-04-29 14:00:03 +0100562 payload);
563 out:
564 vec_add_aligned (*(integ_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
565 }
566
567 payload += esp_sz;
568 len -= esp_sz;
569
570 if (sa0->crypto_dec_op_id != VNET_CRYPTO_OP_NONE)
571 {
572 vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
573 op->key_index = sa0->crypto_key_index;
574 op->iv = payload;
575
Benoît Ganne490b9272021-01-22 18:03:09 +0100576 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100577 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100578 /* construct nonce in a scratch space in front of the IP header */
579 esp_ctr_nonce_t *nonce =
580 (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz -
581 sizeof (*nonce));
582 if (ipsec_sa_is_set_IS_AEAD (sa0))
583 {
584 /* constuct aad in a scratch space in front of the nonce */
585 esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
586 op->aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000587 op->aad_len = esp_aad_fill (op->aad, esp0, sa0, pd->seq_hi);
Benoît Ganne490b9272021-01-22 18:03:09 +0100588 op->tag = payload + len;
589 op->tag_len = 16;
590 }
591 else
592 {
593 nonce->ctr = clib_host_to_net_u32 (1);
594 }
595 nonce->salt = sa0->salt;
596 ASSERT (sizeof (u64) == iv_sz);
597 nonce->iv = *(u64 *) op->iv;
598 op->iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100599 }
600 op->src = op->dst = payload += iv_sz;
601 op->len = len - iv_sz;
602 op->user_data = index;
603
604 if (pd->is_chain && (pd2->lb != b))
605 {
606 /* buffer is chained */
607 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
608 op->chunk_index = vec_len (ptd->chunks);
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000609 esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
Fan Zhangf5395782020-04-29 14:00:03 +0100610 payload, len - pd->iv_sz + pd->icv_sz,
611 &op->tag, &op->n_chunks);
612 }
613
614 vec_add_aligned (*(crypto_ops[0]), op, 1, CLIB_CACHE_LINE_BYTES);
615 }
616}
617
Neale Rannsfc811342021-02-26 10:35:33 +0000618static_always_inline esp_decrypt_error_t
619esp_decrypt_prepare_async_frame (vlib_main_t *vm, vlib_node_runtime_t *node,
620 ipsec_per_thread_data_t *ptd,
621 vnet_crypto_async_frame_t *f, ipsec_sa_t *sa0,
622 u8 *payload, u16 len, u8 icv_sz, u8 iv_sz,
623 esp_decrypt_packet_data_t *pd,
624 esp_decrypt_packet_data2_t *pd2, u32 bi,
625 vlib_buffer_t *b, u16 *next, u16 async_next)
Fan Zhangf5395782020-04-29 14:00:03 +0100626{
627 const u8 esp_sz = sizeof (esp_header_t);
Fan Zhangf5395782020-04-29 14:00:03 +0100628 esp_decrypt_packet_data_t *async_pd = &(esp_post_data (b))->decrypt_data;
629 esp_decrypt_packet_data2_t *async_pd2 = esp_post_data2 (b);
630 u8 *tag = payload + len, *iv = payload + esp_sz, *aad = 0;
631 u32 key_index;
632 u32 crypto_len, integ_len = 0;
633 i16 crypto_start_offset, integ_start_offset = 0;
634 u8 flags = 0;
635
636 if (!ipsec_sa_is_set_IS_AEAD (sa0))
637 {
638 /* linked algs */
639 key_index = sa0->linked_key_index;
640 integ_start_offset = payload - b->data;
641 integ_len = len;
Benoît Ganne48524a92021-01-22 18:11:37 +0100642 if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
643 flags |= VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
Fan Zhangf5395782020-04-29 14:00:03 +0100644
645 if (pd->is_chain)
646 {
647 /* buffer is chained */
648 integ_len = pd->current_length;
649
650 /* special case when ICV is splitted and needs to be reassembled
651 * first -> move it to the last buffer. Also take into account
652 * that ESN needs to be added after encrypted data and may or
653 * may not fit in the tail.*/
654 if (pd2->lb->current_length < icv_sz)
655 {
656 u8 extra_esn = 0;
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000657 tag = esp_move_icv_esn (vm, b, pd, pd2, icv_sz, sa0,
Fan Zhangf5395782020-04-29 14:00:03 +0100658 &extra_esn, &integ_len);
659
660 if (extra_esn)
661 {
662 /* esn is in the last buffer, that was unlinked from
663 * the chain */
664 integ_len = b->current_length;
665 }
666 else
667 {
668 if (pd2->lb == b)
669 {
670 /* we now have a single buffer of crypto data, adjust
671 * the length (second buffer contains only ICV) */
672 len = b->current_length;
673 goto out;
674 }
675 }
676 }
677 else
678 tag = vlib_buffer_get_tail (pd2->lb) - icv_sz;
679
680 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
Neale Ranns5b891102021-06-28 13:31:28 +0000681 if (esp_decrypt_chain_integ (vm, ptd, pd, pd2, sa0, b, icv_sz,
682 payload, pd->current_length, &tag, 0,
683 &integ_len) < 0)
Fan Zhangf5395782020-04-29 14:00:03 +0100684 {
685 /* allocate buffer failed, will not add to frame and drop */
Neale Rannsfc811342021-02-26 10:35:33 +0000686 return (ESP_DECRYPT_ERROR_NO_BUFFERS);
Fan Zhangf5395782020-04-29 14:00:03 +0100687 }
688 }
689 else
Neale Ranns5b891102021-06-28 13:31:28 +0000690 esp_insert_esn (vm, sa0, pd, pd2, &integ_len, &tag, &len, b, payload);
Fan Zhangf5395782020-04-29 14:00:03 +0100691 }
692 else
693 key_index = sa0->crypto_key_index;
694
695out:
696 /* crypto */
697 payload += esp_sz;
698 len -= esp_sz;
699 iv = payload;
700
Benoît Ganne490b9272021-01-22 18:03:09 +0100701 if (ipsec_sa_is_set_IS_CTR (sa0))
Fan Zhangf5395782020-04-29 14:00:03 +0100702 {
Benoît Ganne490b9272021-01-22 18:03:09 +0100703 /* construct nonce in a scratch space in front of the IP header */
704 esp_ctr_nonce_t *nonce =
705 (esp_ctr_nonce_t *) (payload - esp_sz - pd->hdr_sz - sizeof (*nonce));
706 if (ipsec_sa_is_set_IS_AEAD (sa0))
707 {
708 /* constuct aad in a scratch space in front of the nonce */
709 esp_header_t *esp0 = (esp_header_t *) (payload - esp_sz);
710 aad = (u8 *) nonce - sizeof (esp_aead_t);
Neale Ranns5b891102021-06-28 13:31:28 +0000711 esp_aad_fill (aad, esp0, sa0, pd->seq_hi);
Benoît Ganne490b9272021-01-22 18:03:09 +0100712 tag = payload + len;
713 }
714 else
715 {
716 nonce->ctr = clib_host_to_net_u32 (1);
717 }
718 nonce->salt = sa0->salt;
719 ASSERT (sizeof (u64) == iv_sz);
720 nonce->iv = *(u64 *) iv;
721 iv = (u8 *) nonce;
Fan Zhangf5395782020-04-29 14:00:03 +0100722 }
723
724 crypto_start_offset = (payload += iv_sz) - b->data;
725 crypto_len = len - iv_sz;
726
727 if (pd->is_chain && (pd2->lb != b))
728 {
729 /* buffer is chained */
730 flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
731
PiotrX Kleskia9585fd2020-12-11 15:10:31 +0000732 crypto_len = esp_decrypt_chain_crypto (vm, ptd, pd, pd2, sa0, b, icv_sz,
Fan Zhangf5395782020-04-29 14:00:03 +0100733 payload,
734 len - pd->iv_sz + pd->icv_sz,
735 &tag, 0);
736 }
737
738 *async_pd = *pd;
739 *async_pd2 = *pd2;
Fan Zhangf5395782020-04-29 14:00:03 +0100740
741 /* for AEAD integ_len - crypto_len will be negative, it is ok since it
742 * is ignored by the engine. */
Neale Rannsfc811342021-02-26 10:35:33 +0000743 vnet_crypto_async_add_to_frame (
744 vm, f, key_index, crypto_len, integ_len - crypto_len, crypto_start_offset,
745 integ_start_offset, bi, async_next, iv, tag, aad, flags);
746
747 return (ESP_DECRYPT_ERROR_RX_PKTS);
Fan Zhangf5395782020-04-29 14:00:03 +0100748}
749
750static_always_inline void
Neale Rannse11203e2021-09-21 12:34:19 +0000751esp_decrypt_post_crypto (vlib_main_t *vm, const vlib_node_runtime_t *node,
752 const esp_decrypt_packet_data_t *pd,
753 const esp_decrypt_packet_data2_t *pd2,
754 vlib_buffer_t *b, u16 *next, int is_ip6, int is_tun,
755 int is_async)
Fan Zhangf5395782020-04-29 14:00:03 +0100756{
Neale Rannsc5fe57d2021-02-25 16:01:28 +0000757 ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +0100758 vlib_buffer_t *lb = b;
759 const u8 esp_sz = sizeof (esp_header_t);
760 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL | IPSEC_SA_FLAG_IS_TUNNEL_V6;
761 u8 pad_length = 0, next_header = 0;
762 u16 icv_sz;
763
764 /*
765 * redo the anti-reply check
766 * in this frame say we have sequence numbers, s, s+1, s+1, s+1
767 * and s and s+1 are in the window. When we did the anti-replay
768 * check above we did so against the state of the window (W),
769 * after packet s-1. So each of the packets in the sequence will be
770 * accepted.
771 * This time s will be cheked against Ws-1, s+1 chceked against Ws
772 * (i.e. the window state is updated/advnaced)
773 * so this time the successive s+! packet will be dropped.
774 * This is a consequence of batching the decrypts. If the
775 * check-dcrypt-advance process was done for each packet it would
776 * be fine. But we batch the decrypts because it's much more efficient
777 * to do so in SW and if we offload to HW and the process is async.
778 *
779 * You're probably thinking, but this means an attacker can send the
780 * above sequence and cause VPP to perform decrpyts that will fail,
781 * and that's true. But if the attacker can determine s (a valid
782 * sequence number in the window) which is non-trivial, it can generate
783 * a sequence s, s+1, s+2, s+3, ... s+n and nothing will prevent any
784 * implementation, sequential or batching, from decrypting these.
785 */
Neale Ranns5b891102021-06-28 13:31:28 +0000786 if (ipsec_sa_anti_replay_and_sn_advance (sa0, pd->seq, pd->seq_hi, true,
787 NULL))
Fan Zhangf5395782020-04-29 14:00:03 +0100788 {
789 b->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
790 next[0] = ESP_DECRYPT_NEXT_DROP;
791 return;
792 }
793
Neale Rannse11203e2021-09-21 12:34:19 +0000794 u64 n_lost =
795 ipsec_sa_anti_replay_advance (sa0, vm->thread_index, pd->seq, pd->seq_hi);
796
797 vlib_prefetch_simple_counter (&ipsec_sa_lost_counters, vm->thread_index,
798 pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +0100799
800 if (pd->is_chain)
801 {
802 lb = pd2->lb;
803 icv_sz = pd2->icv_removed ? 0 : pd->icv_sz;
804 if (pd2->free_buffer_index)
805 {
806 vlib_buffer_free_one (vm, pd2->free_buffer_index);
807 lb->next_buffer = 0;
808 }
809 if (lb->current_length < sizeof (esp_footer_t) + icv_sz)
810 {
811 /* esp footer is either splitted in two buffers or in the before
812 * last buffer */
813
814 vlib_buffer_t *before_last = b, *bp = b;
815 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
816 {
817 before_last = bp;
818 bp = vlib_get_buffer (vm, bp->next_buffer);
819 }
820 u8 *bt = vlib_buffer_get_tail (before_last);
821
822 if (lb->current_length == icv_sz)
823 {
824 esp_footer_t *f = (esp_footer_t *) (bt - sizeof (*f));
825 pad_length = f->pad_length;
826 next_header = f->next_header;
827 }
828 else
829 {
830 pad_length = (bt - 1)[0];
831 next_header = ((u8 *) vlib_buffer_get_current (lb))[0];
832 }
833 }
834 else
835 {
836 esp_footer_t *f =
837 (esp_footer_t *) (lb->data + lb->current_data +
838 lb->current_length - sizeof (esp_footer_t) -
839 icv_sz);
840 pad_length = f->pad_length;
841 next_header = f->next_header;
842 }
843 }
844 else
845 {
846 icv_sz = pd->icv_sz;
847 esp_footer_t *f =
848 (esp_footer_t *) (lb->data + lb->current_data + lb->current_length -
849 sizeof (esp_footer_t) - icv_sz);
850 pad_length = f->pad_length;
851 next_header = f->next_header;
852 }
853
854 u16 adv = pd->iv_sz + esp_sz;
855 u16 tail = sizeof (esp_footer_t) + pad_length + icv_sz;
856 u16 tail_orig = sizeof (esp_footer_t) + pad_length + pd->icv_sz;
857 b->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
858
859 if ((pd->flags & tun_flags) == 0 && !is_tun) /* transport mode */
860 {
861 u8 udp_sz = (is_ip6 == 0 && pd->flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
862 sizeof (udp_header_t) : 0;
863 u16 ip_hdr_sz = pd->hdr_sz - udp_sz;
864 u8 *old_ip = b->data + pd->current_data - ip_hdr_sz - udp_sz;
865 u8 *ip = old_ip + adv + udp_sz;
866
867 if (is_ip6 && ip_hdr_sz > 64)
868 memmove (ip, old_ip, ip_hdr_sz);
869 else
870 clib_memcpy_le64 (ip, old_ip, ip_hdr_sz);
871
872 b->current_data = pd->current_data + adv - ip_hdr_sz;
873 b->current_length += ip_hdr_sz - adv;
874 esp_remove_tail (vm, b, lb, tail);
875
876 if (is_ip6)
877 {
878 ip6_header_t *ip6 = (ip6_header_t *) ip;
879 u16 len = clib_net_to_host_u16 (ip6->payload_length);
880 len -= adv + tail_orig;
881 ip6->payload_length = clib_host_to_net_u16 (len);
882 ip6->protocol = next_header;
883 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
884 }
885 else
886 {
887 ip4_header_t *ip4 = (ip4_header_t *) ip;
888 ip_csum_t sum = ip4->checksum;
889 u16 len = clib_net_to_host_u16 (ip4->length);
890 len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
891 sum = ip_csum_update (sum, ip4->protocol, next_header,
892 ip4_header_t, protocol);
893 sum = ip_csum_update (sum, ip4->length, len, ip4_header_t, length);
894 ip4->checksum = ip_csum_fold (sum);
895 ip4->protocol = next_header;
896 ip4->length = len;
897 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
898 }
899 }
900 else
901 {
902 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
903 {
904 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
905 b->current_data = pd->current_data + adv;
906 b->current_length = pd->current_length - adv;
907 esp_remove_tail (vm, b, lb, tail);
908 }
909 else if (next_header == IP_PROTOCOL_IPV6)
910 {
911 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
912 b->current_data = pd->current_data + adv;
913 b->current_length = pd->current_length - adv;
914 esp_remove_tail (vm, b, lb, tail);
915 }
Neale Ranns4a58e492020-12-21 13:19:10 +0000916 else if (next_header == IP_PROTOCOL_MPLS_IN_IP)
917 {
918 next[0] = ESP_DECRYPT_NEXT_MPLS_INPUT;
919 b->current_data = pd->current_data + adv;
920 b->current_length = pd->current_length - adv;
921 esp_remove_tail (vm, b, lb, tail);
922 }
Fan Zhangf5395782020-04-29 14:00:03 +0100923 else
924 {
925 if (is_tun && next_header == IP_PROTOCOL_GRE)
926 {
927 gre_header_t *gre;
928
929 b->current_data = pd->current_data + adv;
930 b->current_length = pd->current_length - adv - tail;
931
932 gre = vlib_buffer_get_current (b);
933
934 vlib_buffer_advance (b, sizeof (*gre));
935
936 switch (clib_net_to_host_u16 (gre->protocol))
937 {
938 case GRE_PROTOCOL_teb:
939 vnet_update_l2_len (b);
940 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
941 break;
942 case GRE_PROTOCOL_ip4:
943 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
944 break;
945 case GRE_PROTOCOL_ip6:
946 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
947 break;
948 default:
949 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
950 next[0] = ESP_DECRYPT_NEXT_DROP;
951 break;
952 }
953 }
954 else
955 {
956 next[0] = ESP_DECRYPT_NEXT_DROP;
957 b->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
958 return;
959 }
960 }
961 if (is_tun)
962 {
963 if (ipsec_sa_is_set_IS_PROTECT (sa0))
964 {
965 /*
966 * There are two encap possibilities
967 * 1) the tunnel and ths SA are prodiving encap, i.e. it's
968 * MAC | SA-IP | TUN-IP | ESP | PAYLOAD
969 * implying the SA is in tunnel mode (on a tunnel interface)
970 * 2) only the tunnel provides encap
971 * MAC | TUN-IP | ESP | PAYLOAD
972 * implying the SA is in transport mode.
973 *
974 * For 2) we need only strip the tunnel encap and we're good.
975 * since the tunnel and crypto ecnap (int the tun=protect
976 * object) are the same and we verified above that these match
977 * for 1) we need to strip the SA-IP outer headers, to
978 * reveal the tunnel IP and then check that this matches
979 * the configured tunnel.
980 */
981 const ipsec_tun_protect_t *itp;
982
Neale Ranns5b891102021-06-28 13:31:28 +0000983 itp =
984 ipsec_tun_protect_get (vnet_buffer (b)->ipsec.protect_index);
Fan Zhangf5395782020-04-29 14:00:03 +0100985
986 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
987 {
988 const ip4_header_t *ip4;
989
990 ip4 = vlib_buffer_get_current (b);
991
992 if (!ip46_address_is_equal_v4 (&itp->itp_tun.src,
993 &ip4->dst_address) ||
994 !ip46_address_is_equal_v4 (&itp->itp_tun.dst,
995 &ip4->src_address))
996 {
997 next[0] = ESP_DECRYPT_NEXT_DROP;
998 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
999 }
1000 }
1001 else if (next_header == IP_PROTOCOL_IPV6)
1002 {
1003 const ip6_header_t *ip6;
1004
1005 ip6 = vlib_buffer_get_current (b);
1006
1007 if (!ip46_address_is_equal_v6 (&itp->itp_tun.src,
1008 &ip6->dst_address) ||
1009 !ip46_address_is_equal_v6 (&itp->itp_tun.dst,
1010 &ip6->src_address))
1011 {
1012 next[0] = ESP_DECRYPT_NEXT_DROP;
1013 b->error = node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
1014 }
1015 }
1016 }
1017 }
1018 }
Neale Rannse11203e2021-09-21 12:34:19 +00001019
1020 if (PREDICT_FALSE (n_lost))
1021 vlib_increment_simple_counter (&ipsec_sa_lost_counters, vm->thread_index,
1022 pd->sa_index, n_lost);
Fan Zhangf5395782020-04-29 14:00:03 +01001023}
1024
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001025always_inline uword
Neale Rannsf16e9a52021-02-25 19:09:24 +00001026esp_decrypt_inline (vlib_main_t *vm, vlib_node_runtime_t *node,
1027 vlib_frame_t *from_frame, int is_ip6, int is_tun,
1028 u16 async_next_node)
Ed Warnickecb9cada2015-12-08 15:45:58 -07001029{
Ed Warnickecb9cada2015-12-08 15:45:58 -07001030 ipsec_main_t *im = &ipsec_main;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001031 u32 thread_index = vm->thread_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001032 u16 len;
1033 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, thread_index);
Damjan Marionc98275f2019-03-06 14:05:01 +01001034 u32 *from = vlib_frame_vector_args (from_frame);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001035 u32 n_left = from_frame->n_vectors;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001036 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001037 vlib_buffer_t *sync_bufs[VLIB_FRAME_SIZE];
1038 u16 sync_nexts[VLIB_FRAME_SIZE], *sync_next = sync_nexts, n_sync = 0;
Matthew Smith51d56ba2021-06-04 09:18:37 -05001039 u16 async_nexts[VLIB_FRAME_SIZE], *async_next = async_nexts;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001040 u16 noop_nexts[VLIB_FRAME_SIZE], *noop_next = noop_nexts, n_noop = 0;
1041 u32 sync_bi[VLIB_FRAME_SIZE];
1042 u32 noop_bi[VLIB_FRAME_SIZE];
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001043 esp_decrypt_packet_data_t pkt_data[VLIB_FRAME_SIZE], *pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001044 esp_decrypt_packet_data2_t pkt_data2[VLIB_FRAME_SIZE], *pd2 = pkt_data2;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001045 esp_decrypt_packet_data_t cpd = { };
1046 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
1047 const u8 esp_sz = sizeof (esp_header_t);
1048 ipsec_sa_t *sa0 = 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +00001049 vnet_crypto_op_t _op, *op = &_op;
Benoît Gannee631ece2021-05-27 18:49:42 +02001050 vnet_crypto_op_t **crypto_ops;
1051 vnet_crypto_op_t **integ_ops;
Fan Zhangf5395782020-04-29 14:00:03 +01001052 int is_async = im->async_mode;
Neale Rannsfc811342021-02-26 10:35:33 +00001053 vnet_crypto_async_op_id_t async_op = ~0;
Neale Rannsfc811342021-02-26 10:35:33 +00001054 vnet_crypto_async_frame_t *async_frames[VNET_CRYPTO_ASYNC_OP_N_IDS];
Neale Rannsf16e9a52021-02-25 19:09:24 +00001055 esp_decrypt_error_t err;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001056
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001057 vlib_get_buffers (vm, from, b, n_left);
Fan Zhangf5395782020-04-29 14:00:03 +01001058 if (!is_async)
1059 {
1060 vec_reset_length (ptd->crypto_ops);
1061 vec_reset_length (ptd->integ_ops);
1062 vec_reset_length (ptd->chained_crypto_ops);
1063 vec_reset_length (ptd->chained_integ_ops);
1064 }
Neale Rannsfc811342021-02-26 10:35:33 +00001065 vec_reset_length (ptd->async_frames);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001066 vec_reset_length (ptd->chunks);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001067 clib_memset (sync_nexts, -1, sizeof (sync_nexts));
Neale Rannsfc811342021-02-26 10:35:33 +00001068 clib_memset (async_frames, 0, sizeof (async_frames));
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001069
1070 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001071 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001072 u8 *payload;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001073
Neale Rannsf16e9a52021-02-25 19:09:24 +00001074 err = ESP_DECRYPT_ERROR_RX_PKTS;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001075 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001076 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001077 u8 *p;
1078 vlib_prefetch_buffer_header (b[2], LOAD);
1079 p = vlib_buffer_get_current (b[1]);
Damjan Marionaf7fb042021-07-15 11:54:41 +02001080 clib_prefetch_load (p);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001081 p -= CLIB_CACHE_LINE_BYTES;
Damjan Marionaf7fb042021-07-15 11:54:41 +02001082 clib_prefetch_load (p);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001083 }
1084
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001085 u32 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
1086 if (n_bufs == 0)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001087 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001088 err = ESP_DECRYPT_ERROR_NO_BUFFERS;
1089 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1090 ESP_DECRYPT_NEXT_DROP);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001091 goto next;
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001092 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001093
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001094 if (vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
Damjan Marionc98275f2019-03-06 14:05:01 +01001095 {
Damjan Marion867dfdd2019-06-05 15:42:54 +02001096 if (current_sa_pkts)
1097 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1098 current_sa_index,
1099 current_sa_pkts,
1100 current_sa_bytes);
1101 current_sa_bytes = current_sa_pkts = 0;
1102
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001103 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001104 sa0 = ipsec_sa_get (current_sa_index);
Neale Ranns123b5eb2020-10-16 14:03:55 +00001105
1106 /* fetch the second cacheline ASAP */
Damjan Marionaf7fb042021-07-15 11:54:41 +02001107 clib_prefetch_load (sa0->cacheline1);
Damjan Marion7c22ff72019-04-04 12:25:44 +02001108 cpd.icv_sz = sa0->integ_icv_size;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001109 cpd.iv_sz = sa0->crypto_iv_size;
1110 cpd.flags = sa0->flags;
1111 cpd.sa_index = current_sa_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001112 is_async = im->async_mode | ipsec_sa_is_set_IS_ASYNC (sa0);
Neale Rannsfc811342021-02-26 10:35:33 +00001113 }
Fan Zhangf5395782020-04-29 14:00:03 +01001114
Neale Ranns1a52d372021-02-04 11:33:32 +00001115 if (PREDICT_FALSE (~0 == sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +00001116 {
1117 /* this is the first packet to use this SA, claim the SA
1118 * for this thread. this could happen simultaneously on
1119 * another thread */
Neale Ranns1a52d372021-02-04 11:33:32 +00001120 clib_atomic_cmp_and_swap (&sa0->thread_index, ~0,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001121 ipsec_sa_assign_thread (thread_index));
1122 }
1123
Neale Ranns1a52d372021-02-04 11:33:32 +00001124 if (PREDICT_FALSE (thread_index != sa0->thread_index))
Neale Rannsf62a8c02019-04-02 08:13:33 +00001125 {
Neale Rannsaa7d7662021-02-10 08:42:49 +00001126 vnet_buffer (b[0])->ipsec.thread_index = sa0->thread_index;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001127 err = ESP_DECRYPT_ERROR_HANDOFF;
1128 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1129 ESP_DECRYPT_NEXT_HANDOFF);
Neale Rannsf62a8c02019-04-02 08:13:33 +00001130 goto next;
1131 }
1132
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001133 /* store packet data for next round for easier prefetch */
1134 pd->sa_data = cpd.sa_data;
1135 pd->current_data = b[0]->current_data;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001136 pd->hdr_sz = pd->current_data - vnet_buffer (b[0])->l3_hdr_offset;
1137 payload = b[0]->data + pd->current_data;
Neale Ranns6afaae12019-07-17 15:07:14 +00001138 pd->seq = clib_host_to_net_u32 (((esp_header_t *) payload)->seq);
Fan Zhangf5395782020-04-29 14:00:03 +01001139 pd->is_chain = 0;
1140 pd2->lb = b[0];
1141 pd2->free_buffer_index = 0;
1142 pd2->icv_removed = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001143
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001144 if (n_bufs > 1)
1145 {
Fan Zhangf5395782020-04-29 14:00:03 +01001146 pd->is_chain = 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001147 /* find last buffer in the chain */
Fan Zhangf5395782020-04-29 14:00:03 +01001148 while (pd2->lb->flags & VLIB_BUFFER_NEXT_PRESENT)
1149 pd2->lb = vlib_get_buffer (vm, pd2->lb->next_buffer);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001150
1151 crypto_ops = &ptd->chained_crypto_ops;
1152 integ_ops = &ptd->chained_integ_ops;
1153 }
Benoît Gannee631ece2021-05-27 18:49:42 +02001154 else
1155 {
1156 crypto_ops = &ptd->crypto_ops;
1157 integ_ops = &ptd->integ_ops;
1158 }
Fan Zhangf5395782020-04-29 14:00:03 +01001159
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001160 pd->current_length = b[0]->current_length;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001161
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001162 /* anti-reply check */
Neale Ranns5b891102021-06-28 13:31:28 +00001163 if (ipsec_sa_anti_replay_and_sn_advance (sa0, pd->seq, ~0, false,
1164 &pd->seq_hi))
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001165 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001166 err = ESP_DECRYPT_ERROR_REPLAY;
1167 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1168 ESP_DECRYPT_NEXT_DROP);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001169 goto next;
1170 }
1171
Damjan Mariona829b132019-04-24 23:39:16 +02001172 if (pd->current_length < cpd.icv_sz + esp_sz + cpd.iv_sz)
1173 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001174 err = ESP_DECRYPT_ERROR_RUNT;
1175 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1176 ESP_DECRYPT_NEXT_DROP);
Damjan Mariona829b132019-04-24 23:39:16 +02001177 goto next;
1178 }
1179
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001180 len = pd->current_length - cpd.icv_sz;
1181 current_sa_pkts += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001182 current_sa_bytes += vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001183
Fan Zhangf5395782020-04-29 14:00:03 +01001184 if (is_async)
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001185 {
Matthew Smith51d56ba2021-06-04 09:18:37 -05001186 async_op = sa0->crypto_async_dec_op_id;
1187
1188 /* get a frame for this op if we don't yet have one or it's full
1189 */
1190 if (NULL == async_frames[async_op] ||
1191 vnet_crypto_async_frame_is_full (async_frames[async_op]))
1192 {
1193 async_frames[async_op] =
1194 vnet_crypto_async_get_frame (vm, async_op);
1195 /* Save the frame to the list we'll submit at the end */
1196 vec_add1 (ptd->async_frames, async_frames[async_op]);
1197 }
Neale Rannsfc811342021-02-26 10:35:33 +00001198
1199 err = esp_decrypt_prepare_async_frame (
1200 vm, node, ptd, async_frames[async_op], sa0, payload, len,
Neale Rannsf16e9a52021-02-25 19:09:24 +00001201 cpd.icv_sz, cpd.iv_sz, pd, pd2, from[b - bufs], b[0], async_next,
1202 async_next_node);
Neale Rannsfc811342021-02-26 10:35:33 +00001203 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001204 {
Neale Rannsf16e9a52021-02-25 19:09:24 +00001205 esp_set_next_index (b[0], node, err, n_noop, noop_nexts,
1206 ESP_DECRYPT_NEXT_DROP);
Filip Tehlarefcad1a2020-02-04 09:36:04 +00001207 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001208 }
Fan Zhangf5395782020-04-29 14:00:03 +01001209 else
Neale Rannsf16e9a52021-02-25 19:09:24 +00001210 esp_decrypt_prepare_sync_op (
1211 vm, node, ptd, &crypto_ops, &integ_ops, op, sa0, payload, len,
1212 cpd.icv_sz, cpd.iv_sz, pd, pd2, b[0], sync_next, b - bufs);
Damjan Marionc98275f2019-03-06 14:05:01 +01001213 /* next */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001214 next:
Neale Rannsf16e9a52021-02-25 19:09:24 +00001215 if (ESP_DECRYPT_ERROR_RX_PKTS != err)
1216 {
1217 noop_bi[n_noop] = from[b - bufs];
1218 n_noop++;
1219 noop_next++;
1220 }
1221 else if (!is_async)
1222 {
1223 sync_bi[n_sync] = from[b - bufs];
1224 sync_bufs[n_sync] = b[0];
1225 n_sync++;
1226 sync_next++;
1227 pd += 1;
1228 pd2 += 1;
1229 }
1230 else
Matthew Smith51d56ba2021-06-04 09:18:37 -05001231 async_next++;
1232
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001233 n_left -= 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001234 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001235 }
Damjan Marionc98275f2019-03-06 14:05:01 +01001236
Neale Ranns02950402019-12-20 00:54:57 +00001237 if (PREDICT_TRUE (~0 != current_sa_index))
1238 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
1239 current_sa_index, current_sa_pkts,
1240 current_sa_bytes);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001241
Matthew Smith51d56ba2021-06-04 09:18:37 -05001242 /* submit or free all of the open frames */
1243 vnet_crypto_async_frame_t **async_frame;
Neale Rannsfc811342021-02-26 10:35:33 +00001244
Matthew Smith51d56ba2021-06-04 09:18:37 -05001245 vec_foreach (async_frame, ptd->async_frames)
1246 {
1247 /* free frame and move on if no ops were successfully added */
1248 if (PREDICT_FALSE (!(*async_frame)->n_elts))
Fan Zhangf5395782020-04-29 14:00:03 +01001249 {
Matthew Smith51d56ba2021-06-04 09:18:37 -05001250 vnet_crypto_async_free_frame (vm, *async_frame);
1251 continue;
1252 }
1253 if (vnet_crypto_async_submit_open_frame (vm, *async_frame) < 0)
1254 {
1255 n_noop += esp_async_recycle_failed_submit (
1256 vm, *async_frame, node, ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR,
Matthew Smithc14b8cf2021-12-01 20:02:35 +00001257 n_noop, noop_bi, noop_nexts, ESP_DECRYPT_NEXT_DROP);
Matthew Smith51d56ba2021-06-04 09:18:37 -05001258 vnet_crypto_async_reset_frame (*async_frame);
1259 vnet_crypto_async_free_frame (vm, *async_frame);
Fan Zhangf5395782020-04-29 14:00:03 +01001260 }
Fan Zhangf5395782020-04-29 14:00:03 +01001261 }
Fan Zhangf5395782020-04-29 14:00:03 +01001262
Neale Rannsf16e9a52021-02-25 19:09:24 +00001263 if (n_sync)
1264 {
1265 esp_process_ops (vm, node, ptd->integ_ops, sync_bufs, sync_nexts,
1266 ESP_DECRYPT_ERROR_INTEG_ERROR);
1267 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, sync_bufs,
1268 sync_nexts, ptd->chunks,
1269 ESP_DECRYPT_ERROR_INTEG_ERROR);
1270
1271 esp_process_ops (vm, node, ptd->crypto_ops, sync_bufs, sync_nexts,
Fan Zhangf5395782020-04-29 14:00:03 +01001272 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
Neale Rannsf16e9a52021-02-25 19:09:24 +00001273 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, sync_bufs,
1274 sync_nexts, ptd->chunks,
Fan Zhangf5395782020-04-29 14:00:03 +01001275 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
1276 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001277
1278 /* Post decryption ronud - adjust packet data start and length and next
1279 node */
1280
Neale Rannsf16e9a52021-02-25 19:09:24 +00001281 n_left = n_sync;
1282 sync_next = sync_nexts;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001283 pd = pkt_data;
Fan Zhangf5395782020-04-29 14:00:03 +01001284 pd2 = pkt_data2;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001285 b = sync_bufs;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001286
1287 while (n_left)
1288 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001289 if (n_left >= 2)
1290 {
1291 void *data = b[1]->data + pd[1].current_data;
1292
1293 /* buffer metadata */
1294 vlib_prefetch_buffer_header (b[1], LOAD);
1295
1296 /* esp_footer_t */
1297 CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
1298 CLIB_CACHE_LINE_BYTES, LOAD);
1299
1300 /* packet headers */
1301 CLIB_PREFETCH (data - CLIB_CACHE_LINE_BYTES,
1302 CLIB_CACHE_LINE_BYTES * 2, LOAD);
1303 }
1304
Christian Hoppsd570e532020-08-25 12:40:40 -04001305 /* save the sa_index as GRE_teb post_crypto changes L2 opaque */
1306 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1307 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
1308
Neale Rannsf16e9a52021-02-25 19:09:24 +00001309 if (sync_next[0] >= ESP_DECRYPT_N_NEXT)
1310 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], sync_next, is_ip6,
Fan Zhangf5395782020-04-29 14:00:03 +01001311 is_tun, 0);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001312
Fan Zhangf5395782020-04-29 14:00:03 +01001313 /* trace: */
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001314 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1315 {
1316 esp_decrypt_trace_t *tr;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001317 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001318 sa0 = ipsec_sa_get (current_sa_index);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001319 tr->crypto_alg = sa0->crypto_alg;
1320 tr->integ_alg = sa0->integ_alg;
Neale Ranns6afaae12019-07-17 15:07:14 +00001321 tr->seq = pd->seq;
Neale Ranns5b891102021-06-28 13:31:28 +00001322 tr->sa_seq = sa0->seq;
Neale Ranns6afaae12019-07-17 15:07:14 +00001323 tr->sa_seq_hi = sa0->seq_hi;
Neale Ranns5b891102021-06-28 13:31:28 +00001324 tr->pkt_seq_hi = pd->seq_hi;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001325 }
1326
1327 /* next */
1328 n_left -= 1;
Neale Rannsf16e9a52021-02-25 19:09:24 +00001329 sync_next += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001330 pd += 1;
Fan Zhangf5395782020-04-29 14:00:03 +01001331 pd2 += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001332 b += 1;
1333 }
1334
Neale Rannsf16e9a52021-02-25 19:09:24 +00001335 vlib_node_increment_counter (vm, node->node_index, ESP_DECRYPT_ERROR_RX_PKTS,
1336 from_frame->n_vectors);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001337
Neale Rannsf16e9a52021-02-25 19:09:24 +00001338 if (n_sync)
1339 vlib_buffer_enqueue_to_next (vm, node, sync_bi, sync_nexts, n_sync);
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001340
Neale Rannsf16e9a52021-02-25 19:09:24 +00001341 if (n_noop)
1342 vlib_buffer_enqueue_to_next (vm, node, noop_bi, noop_nexts, n_noop);
1343
1344 return (from_frame->n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -07001345}
1346
Fan Zhangf5395782020-04-29 14:00:03 +01001347always_inline uword
1348esp_decrypt_post_inline (vlib_main_t * vm,
1349 vlib_node_runtime_t * node,
1350 vlib_frame_t * from_frame, int is_ip6, int is_tun)
1351{
Fan Zhangf5395782020-04-29 14:00:03 +01001352 u32 *from = vlib_frame_vector_args (from_frame);
1353 u32 n_left = from_frame->n_vectors;
1354 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
1355 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
1356 vlib_get_buffers (vm, from, b, n_left);
1357
1358 while (n_left > 0)
1359 {
1360 esp_decrypt_packet_data_t *pd = &(esp_post_data (b[0]))->decrypt_data;
1361
1362 if (n_left > 2)
1363 {
1364 vlib_prefetch_buffer_header (b[2], LOAD);
1365 vlib_prefetch_buffer_header (b[1], LOAD);
1366 }
1367
1368 if (!pd->is_chain)
1369 esp_decrypt_post_crypto (vm, node, pd, 0, b[0], next, is_ip6, is_tun,
1370 1);
1371 else
1372 {
1373 esp_decrypt_packet_data2_t *pd2 = esp_post_data2 (b[0]);
1374 esp_decrypt_post_crypto (vm, node, pd, pd2, b[0], next, is_ip6,
1375 is_tun, 1);
1376 }
1377
1378 /*trace: */
1379 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
1380 {
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001381 ipsec_sa_t *sa0 = ipsec_sa_get (pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +01001382 esp_decrypt_trace_t *tr;
1383 esp_decrypt_packet_data_t *async_pd =
1384 &(esp_post_data (b[0]))->decrypt_data;
1385 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
Neale Rannsc5fe57d2021-02-25 16:01:28 +00001386 sa0 = ipsec_sa_get (async_pd->sa_index);
Fan Zhangf5395782020-04-29 14:00:03 +01001387
1388 tr->crypto_alg = sa0->crypto_alg;
1389 tr->integ_alg = sa0->integ_alg;
1390 tr->seq = pd->seq;
Neale Ranns5b891102021-06-28 13:31:28 +00001391 tr->sa_seq = sa0->seq;
Fan Zhangf5395782020-04-29 14:00:03 +01001392 tr->sa_seq_hi = sa0->seq_hi;
1393 }
1394
1395 n_left--;
1396 next++;
1397 b++;
1398 }
1399
1400 n_left = from_frame->n_vectors;
1401 vlib_node_increment_counter (vm, node->node_index,
1402 ESP_DECRYPT_ERROR_RX_POST_PKTS, n_left);
1403
1404 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
1405
1406 return n_left;
1407}
1408
Klement Sekerab8f35442018-10-29 13:38:19 +01001409VLIB_NODE_FN (esp4_decrypt_node) (vlib_main_t * vm,
1410 vlib_node_runtime_t * node,
1411 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001412{
Fan Zhangf5395782020-04-29 14:00:03 +01001413 return esp_decrypt_inline (vm, node, from_frame, 0, 0,
1414 esp_decrypt_async_next.esp4_post_next);
1415}
1416
1417VLIB_NODE_FN (esp4_decrypt_post_node) (vlib_main_t * vm,
1418 vlib_node_runtime_t * node,
1419 vlib_frame_t * from_frame)
1420{
1421 return esp_decrypt_post_inline (vm, node, from_frame, 0, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001422}
1423
1424VLIB_NODE_FN (esp4_decrypt_tun_node) (vlib_main_t * vm,
1425 vlib_node_runtime_t * node,
1426 vlib_frame_t * from_frame)
1427{
Fan Zhangf5395782020-04-29 14:00:03 +01001428 return esp_decrypt_inline (vm, node, from_frame, 0, 1,
1429 esp_decrypt_async_next.esp4_tun_post_next);
1430}
1431
1432VLIB_NODE_FN (esp4_decrypt_tun_post_node) (vlib_main_t * vm,
1433 vlib_node_runtime_t * node,
1434 vlib_frame_t * from_frame)
1435{
1436 return esp_decrypt_post_inline (vm, node, from_frame, 0, 1);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001437}
1438
1439VLIB_NODE_FN (esp6_decrypt_node) (vlib_main_t * vm,
1440 vlib_node_runtime_t * node,
1441 vlib_frame_t * from_frame)
1442{
Fan Zhangf5395782020-04-29 14:00:03 +01001443 return esp_decrypt_inline (vm, node, from_frame, 1, 0,
1444 esp_decrypt_async_next.esp6_post_next);
1445}
1446
1447VLIB_NODE_FN (esp6_decrypt_post_node) (vlib_main_t * vm,
1448 vlib_node_runtime_t * node,
1449 vlib_frame_t * from_frame)
1450{
1451 return esp_decrypt_post_inline (vm, node, from_frame, 1, 0);
Neale Rannsc87b66c2019-02-07 07:26:12 -08001452}
1453
1454VLIB_NODE_FN (esp6_decrypt_tun_node) (vlib_main_t * vm,
1455 vlib_node_runtime_t * node,
1456 vlib_frame_t * from_frame)
1457{
Fan Zhangf5395782020-04-29 14:00:03 +01001458 return esp_decrypt_inline (vm, node, from_frame, 1, 1,
1459 esp_decrypt_async_next.esp6_tun_post_next);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001460}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001461
Fan Zhangf5395782020-04-29 14:00:03 +01001462VLIB_NODE_FN (esp6_decrypt_tun_post_node) (vlib_main_t * vm,
1463 vlib_node_runtime_t * node,
1464 vlib_frame_t * from_frame)
1465{
1466 return esp_decrypt_post_inline (vm, node, from_frame, 1, 1);
1467}
1468
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001469/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001470VLIB_REGISTER_NODE (esp4_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001471 .name = "esp4-decrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001472 .vector_size = sizeof (u32),
1473 .format_trace = format_esp_decrypt_trace,
1474 .type = VLIB_NODE_TYPE_INTERNAL,
1475
1476 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1477 .error_strings = esp_decrypt_error_strings,
1478
1479 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1480 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001481 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1482 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1483 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001484 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
Neale Ranns568acbb2019-12-18 05:54:40 +00001485 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001486 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-handoff",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001487 },
1488};
1489
Fan Zhangf5395782020-04-29 14:00:03 +01001490VLIB_REGISTER_NODE (esp4_decrypt_post_node) = {
1491 .name = "esp4-decrypt-post",
1492 .vector_size = sizeof (u32),
1493 .format_trace = format_esp_decrypt_trace,
1494 .type = VLIB_NODE_TYPE_INTERNAL,
1495
1496 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1497 .error_strings = esp_decrypt_error_strings,
1498
1499 .sibling_of = "esp4-decrypt",
1500};
1501
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001502VLIB_REGISTER_NODE (esp6_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001503 .name = "esp6-decrypt",
1504 .vector_size = sizeof (u32),
1505 .format_trace = format_esp_decrypt_trace,
1506 .type = VLIB_NODE_TYPE_INTERNAL,
1507
1508 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1509 .error_strings = esp_decrypt_error_strings,
1510
1511 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1512 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001513 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1514 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1515 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001516 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-drop",
Neale Ranns568acbb2019-12-18 05:54:40 +00001517 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001518 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-handoff",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001519 },
1520};
Neale Rannsc87b66c2019-02-07 07:26:12 -08001521
Fan Zhangf5395782020-04-29 14:00:03 +01001522VLIB_REGISTER_NODE (esp6_decrypt_post_node) = {
1523 .name = "esp6-decrypt-post",
1524 .vector_size = sizeof (u32),
1525 .format_trace = format_esp_decrypt_trace,
1526 .type = VLIB_NODE_TYPE_INTERNAL,
1527
1528 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1529 .error_strings = esp_decrypt_error_strings,
1530
1531 .sibling_of = "esp6-decrypt",
1532};
1533
Neale Rannsc87b66c2019-02-07 07:26:12 -08001534VLIB_REGISTER_NODE (esp4_decrypt_tun_node) = {
1535 .name = "esp4-decrypt-tun",
1536 .vector_size = sizeof (u32),
1537 .format_trace = format_esp_decrypt_trace,
1538 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001539 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1540 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001541 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1542 .next_nodes = {
1543 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1544 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1545 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001546 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001547 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001548 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001549 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001550};
1551
Fan Zhangf5395782020-04-29 14:00:03 +01001552VLIB_REGISTER_NODE (esp4_decrypt_tun_post_node) = {
1553 .name = "esp4-decrypt-tun-post",
1554 .vector_size = sizeof (u32),
1555 .format_trace = format_esp_decrypt_trace,
1556 .type = VLIB_NODE_TYPE_INTERNAL,
1557
1558 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1559 .error_strings = esp_decrypt_error_strings,
1560
1561 .sibling_of = "esp4-decrypt-tun",
1562};
1563
Neale Rannsc87b66c2019-02-07 07:26:12 -08001564VLIB_REGISTER_NODE (esp6_decrypt_tun_node) = {
1565 .name = "esp6-decrypt-tun",
1566 .vector_size = sizeof (u32),
1567 .format_trace = format_esp_decrypt_trace,
1568 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001569 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1570 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001571 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1572 .next_nodes = {
1573 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1574 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1575 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns4a58e492020-12-21 13:19:10 +00001576 [ESP_DECRYPT_NEXT_MPLS_INPUT] = "mpls-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001577 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001578 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001579 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001580};
Fan Zhangf5395782020-04-29 14:00:03 +01001581
1582VLIB_REGISTER_NODE (esp6_decrypt_tun_post_node) = {
1583 .name = "esp6-decrypt-tun-post",
1584 .vector_size = sizeof (u32),
1585 .format_trace = format_esp_decrypt_trace,
1586 .type = VLIB_NODE_TYPE_INTERNAL,
1587
1588 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1589 .error_strings = esp_decrypt_error_strings,
1590
1591 .sibling_of = "esp6-decrypt-tun",
1592};
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001593/* *INDENT-ON* */
1594
Neale Ranns2d498302021-02-25 08:38:58 +00001595#ifndef CLIB_MARCH_VARIANT
1596
1597static clib_error_t *
1598esp_decrypt_init (vlib_main_t *vm)
1599{
1600 ipsec_main_t *im = &ipsec_main;
1601
1602 im->esp4_dec_fq_index =
1603 vlib_frame_queue_main_init (esp4_decrypt_node.index, 0);
1604 im->esp6_dec_fq_index =
1605 vlib_frame_queue_main_init (esp6_decrypt_node.index, 0);
1606 im->esp4_dec_tun_fq_index =
1607 vlib_frame_queue_main_init (esp4_decrypt_tun_node.index, 0);
1608 im->esp6_dec_tun_fq_index =
1609 vlib_frame_queue_main_init (esp6_decrypt_tun_node.index, 0);
1610
1611 return 0;
1612}
1613
1614VLIB_INIT_FUNCTION (esp_decrypt_init);
1615
1616#endif
1617
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001618/*
1619 * fd.io coding-style-patch-verification: ON
1620 *
1621 * Local Variables:
1622 * eval: (c-set-style "gnu")
1623 * End:
1624 */