blob: 80b3246f925fdaaa07fd4265dfa990ea78379ce6 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * esp_decrypt.c : IPSec ESP decrypt node
3 *
4 * Copyright (c) 2015 Cisco and/or its affiliates.
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at:
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <vnet/vnet.h>
19#include <vnet/api_errno.h>
20#include <vnet/ip/ip.h>
John Lo90430b62020-01-31 23:48:30 -050021#include <vnet/l2/l2_input.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070022
23#include <vnet/ipsec/ipsec.h>
24#include <vnet/ipsec/esp.h>
Neale Ranns918c1612019-02-21 23:34:59 -080025#include <vnet/ipsec/ipsec_io.h>
Neale Rannsc87b66c2019-02-07 07:26:12 -080026#include <vnet/ipsec/ipsec_tun.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070027
Neale Ranns568acbb2019-12-18 05:54:40 +000028#include <vnet/gre/gre.h>
29
Ed Warnickecb9cada2015-12-08 15:45:58 -070030#define foreach_esp_decrypt_next \
31_(DROP, "error-drop") \
Kingwel Xie561d1ca2019-03-05 22:56:17 -050032_(IP4_INPUT, "ip4-input-no-checksum") \
Neale Rannsf62a8c02019-04-02 08:13:33 +000033_(IP6_INPUT, "ip6-input") \
Neale Ranns568acbb2019-12-18 05:54:40 +000034_(L2_INPUT, "l2-input") \
Neale Rannsf62a8c02019-04-02 08:13:33 +000035_(HANDOFF, "handoff")
Ed Warnickecb9cada2015-12-08 15:45:58 -070036
37#define _(v, s) ESP_DECRYPT_NEXT_##v,
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070038typedef enum
39{
Ed Warnickecb9cada2015-12-08 15:45:58 -070040 foreach_esp_decrypt_next
41#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070042 ESP_DECRYPT_N_NEXT,
Ed Warnickecb9cada2015-12-08 15:45:58 -070043} esp_decrypt_next_t;
44
45
Damjan Marionb4fff3a2019-03-25 15:54:40 +010046#define foreach_esp_decrypt_error \
47 _(RX_PKTS, "ESP pkts received") \
48 _(DECRYPTION_FAILED, "ESP decryption failed") \
49 _(INTEG_ERROR, "Integrity check failed") \
50 _(CRYPTO_ENGINE_ERROR, "crypto engine error (packet dropped)") \
51 _(REPLAY, "SA replayed packet") \
Damjan Mariona829b132019-04-24 23:39:16 +020052 _(RUNT, "undersized packet") \
Filip Tehlarefcad1a2020-02-04 09:36:04 +000053 _(NO_BUFFERS, "no buffers (packet dropped)") \
Damjan Marionb4fff3a2019-03-25 15:54:40 +010054 _(OVERSIZED_HEADER, "buffer with oversized header (dropped)") \
Neale Ranns12989b52019-09-26 16:20:19 +000055 _(NO_TAIL_SPACE, "no enough buffer tail space (dropped)") \
56 _(TUN_NO_PROTO, "no tunnel protocol") \
Neale Ranns02950402019-12-20 00:54:57 +000057 _(UNSUP_PAYLOAD, "unsupported payload") \
Ed Warnickecb9cada2015-12-08 15:45:58 -070058
59
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070060typedef enum
61{
Ed Warnickecb9cada2015-12-08 15:45:58 -070062#define _(sym,str) ESP_DECRYPT_ERROR_##sym,
63 foreach_esp_decrypt_error
64#undef _
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070065 ESP_DECRYPT_N_ERROR,
Ed Warnickecb9cada2015-12-08 15:45:58 -070066} esp_decrypt_error_t;
67
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070068static char *esp_decrypt_error_strings[] = {
Ed Warnickecb9cada2015-12-08 15:45:58 -070069#define _(sym,string) string,
70 foreach_esp_decrypt_error
71#undef _
72};
73
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070074typedef struct
75{
Damjan Marionb4fff3a2019-03-25 15:54:40 +010076 u32 seq;
Neale Ranns6afaae12019-07-17 15:07:14 +000077 u32 sa_seq;
78 u32 sa_seq_hi;
Ed Warnickecb9cada2015-12-08 15:45:58 -070079 ipsec_crypto_alg_t crypto_alg;
80 ipsec_integ_alg_t integ_alg;
81} esp_decrypt_trace_t;
82
83/* packet trace format function */
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070084static u8 *
85format_esp_decrypt_trace (u8 * s, va_list * args)
Ed Warnickecb9cada2015-12-08 15:45:58 -070086{
87 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
88 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -070089 esp_decrypt_trace_t *t = va_arg (*args, esp_decrypt_trace_t *);
Ed Warnickecb9cada2015-12-08 15:45:58 -070090
Neale Ranns6afaae12019-07-17 15:07:14 +000091 s =
92 format (s,
93 "esp: crypto %U integrity %U pkt-seq %d sa-seq %u sa-seq-hi %u",
94 format_ipsec_crypto_alg, t->crypto_alg, format_ipsec_integ_alg,
95 t->integ_alg, t->seq, t->sa_seq, t->sa_seq_hi);
Ed Warnickecb9cada2015-12-08 15:45:58 -070096 return s;
97}
98
Damjan Marionb4fff3a2019-03-25 15:54:40 +010099typedef struct
Ed Warnickecb9cada2015-12-08 15:45:58 -0700100{
Rajesh Goel7a6f5a42020-03-17 14:43:09 +0530101 vlib_buffer_t *lb;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100102 union
103 {
104 struct
105 {
106 u8 icv_sz;
107 u8 iv_sz;
Neale Rannsc87b66c2019-02-07 07:26:12 -0800108 ipsec_sa_flags_t flags;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100109 u32 sa_index;
110 };
111 u64 sa_data;
112 };
Ed Warnickecb9cada2015-12-08 15:45:58 -0700113
Neale Ranns6afaae12019-07-17 15:07:14 +0000114 u32 seq;
Rajesh Goel7a6f5a42020-03-17 14:43:09 +0530115 u32 free_buffer_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100116 i16 current_data;
117 i16 current_length;
118 u16 hdr_sz;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000119 u8 icv_removed;
Rajesh Goel7a6f5a42020-03-17 14:43:09 +0530120 u8 __unused;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100121} esp_decrypt_packet_data_t;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700122
Rajesh Goel7a6f5a42020-03-17 14:43:09 +0530123STATIC_ASSERT_SIZEOF (esp_decrypt_packet_data_t, 4 * sizeof (u64));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700124
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100125#define ESP_ENCRYPT_PD_F_FD_TRANSPORT (1 << 2)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700126
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000127static_always_inline void
128esp_process_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
129 vnet_crypto_op_t * ops, vlib_buffer_t * b[], u16 * nexts,
130 int e)
131{
132 vnet_crypto_op_t *op = ops;
133 u32 n_fail, n_ops = vec_len (ops);
134
135 if (n_ops == 0)
136 return;
137
138 n_fail = n_ops - vnet_crypto_process_ops (vm, op, n_ops);
139
140 while (n_fail)
141 {
142 ASSERT (op - ops < n_ops);
143 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
144 {
145 u32 err, bi = op->user_data;
146 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
147 err = e;
148 else
149 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
150 b[bi]->error = node->errors[err];
151 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
152 n_fail--;
153 }
154 op++;
155 }
156}
157
158static_always_inline void
159esp_process_chained_ops (vlib_main_t * vm, vlib_node_runtime_t * node,
160 vnet_crypto_op_t * ops, vlib_buffer_t * b[],
161 u16 * nexts, vnet_crypto_op_chunk_t * chunks, int e)
162{
163
164 vnet_crypto_op_t *op = ops;
165 u32 n_fail, n_ops = vec_len (ops);
166
167 if (n_ops == 0)
168 return;
169
170 n_fail = n_ops - vnet_crypto_process_chained_ops (vm, op, chunks, n_ops);
171
172 while (n_fail)
173 {
174 ASSERT (op - ops < n_ops);
175 if (op->status != VNET_CRYPTO_OP_STATUS_COMPLETED)
176 {
177 u32 err, bi = op->user_data;
178 if (op->status == VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC)
179 err = e;
180 else
181 err = ESP_DECRYPT_ERROR_CRYPTO_ENGINE_ERROR;
182 b[bi]->error = node->errors[err];
183 nexts[bi] = ESP_DECRYPT_NEXT_DROP;
184 n_fail--;
185 }
186 op++;
187 }
188}
189
190always_inline void
191esp_remove_tail (vlib_main_t * vm, vlib_buffer_t * b, vlib_buffer_t * last,
192 u16 tail)
193{
194 vlib_buffer_t *before_last = b;
195
196 if (last->current_length > tail)
197 {
198 last->current_length -= tail;
199 return;
200 }
201 ASSERT (b->flags & VLIB_BUFFER_NEXT_PRESENT);
202
203 while (b->flags & VLIB_BUFFER_NEXT_PRESENT)
204 {
205 before_last = b;
206 b = vlib_get_buffer (vm, b->next_buffer);
207 }
208 before_last->current_length -= tail - last->current_length;
209 vlib_buffer_free_one (vm, before_last->next_buffer);
210 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
211}
212
213/* ICV is splitted in last two buffers so move it to the last buffer and
214 return pointer to it */
215static_always_inline u8 *
216esp_move_icv (vlib_main_t * vm, vlib_buffer_t * first,
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000217 esp_decrypt_packet_data_t * pd, u16 icv_sz, u16 * dif)
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000218{
219 vlib_buffer_t *before_last, *bp;
220 u16 last_sz = pd->lb->current_length;
221 u16 first_sz = icv_sz - last_sz;
222
223 bp = before_last = first;
224 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
225 {
226 before_last = bp;
227 bp = vlib_get_buffer (vm, bp->next_buffer);
228 }
229
230 u8 *lb_curr = vlib_buffer_get_current (pd->lb);
231 memmove (lb_curr + first_sz, lb_curr, last_sz);
232 clib_memcpy_fast (lb_curr, vlib_buffer_get_tail (before_last) - first_sz,
233 first_sz);
234 before_last->current_length -= first_sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000235 if (dif)
236 dif[0] = first_sz;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000237 pd->lb = before_last;
238 pd->icv_removed = 1;
239 pd->free_buffer_index = before_last->next_buffer;
240 before_last->flags &= ~VLIB_BUFFER_NEXT_PRESENT;
241 return lb_curr;
242}
243
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000244static_always_inline int
245esp_insert_esn (vlib_main_t * vm, ipsec_sa_t * sa,
246 esp_decrypt_packet_data_t * pd, vnet_crypto_op_t * op,
247 u16 * len, vlib_buffer_t * b, u8 * payload)
248{
249 if (!ipsec_sa_is_set_USE_ESN (sa))
250 return 1;
251
252 /* shift ICV by 4 bytes to insert ESN */
253 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
254 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa->seq_hi);
255
256 if (pd->icv_removed)
257 {
258 u16 space_left = vlib_buffer_space_left_at_end (vm, pd->lb);
259 if (space_left >= sz)
260 {
261 clib_memcpy_fast (vlib_buffer_get_tail (pd->lb), &seq_hi, sz);
262 op->len += sz;
263 }
264 else
265 return 0;
266
267 len[0] = b->current_length;
268 }
269 else
270 {
271 clib_memcpy_fast (tmp, payload + len[0], ESP_MAX_ICV_SIZE);
272 clib_memcpy_fast (payload + len[0], &seq_hi, sz);
273 clib_memcpy_fast (payload + len[0] + sz, tmp, ESP_MAX_ICV_SIZE);
274 op->len += sz;
275 op->digest += sz;
276 }
277 return 1;
278}
279
280static_always_inline u8 *
281esp_move_icv_esn (vlib_main_t * vm, vlib_buffer_t * first,
282 esp_decrypt_packet_data_t * pd, u16 icv_sz, ipsec_sa_t * sa,
283 u8 * extra_esn, vnet_crypto_op_t * op)
284{
285 u16 dif = 0;
286 u8 *digest = esp_move_icv (vm, first, pd, icv_sz, &dif);
287 if (dif)
288 op->len -= dif;
289
290 if (ipsec_sa_is_set_USE_ESN (sa))
291 {
292 u8 sz = sizeof (sa->seq_hi);
293 u32 seq_hi = clib_host_to_net_u32 (sa->seq_hi);
294 u16 space_left = vlib_buffer_space_left_at_end (vm, pd->lb);
295
296 if (space_left >= sz)
297 {
298 clib_memcpy_fast (vlib_buffer_get_tail (pd->lb), &seq_hi, sz);
299 op->len += sz;
300 }
301 else
302 {
303 /* no space for ESN at the tail, use the next buffer
304 * (with ICV data) */
305 ASSERT (pd->icv_removed);
306 vlib_buffer_t *tmp = vlib_get_buffer (vm, pd->free_buffer_index);
307 clib_memcpy_fast (vlib_buffer_get_current (tmp) - sz, &seq_hi, sz);
308 extra_esn[0] = 1;
309 }
310 }
311 return digest;
312}
313
Klement Sekerabe5a5dd2018-10-09 16:05:48 +0200314always_inline uword
315esp_decrypt_inline (vlib_main_t * vm,
316 vlib_node_runtime_t * node, vlib_frame_t * from_frame,
Neale Rannsc87b66c2019-02-07 07:26:12 -0800317 int is_ip6, int is_tun)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700318{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700319 ipsec_main_t *im = &ipsec_main;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100320 u32 thread_index = vm->thread_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100321 u16 len;
322 ipsec_per_thread_data_t *ptd = vec_elt_at_index (im->ptd, thread_index);
Damjan Marionc98275f2019-03-06 14:05:01 +0100323 u32 *from = vlib_frame_vector_args (from_frame);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000324 u32 n_left = from_frame->n_vectors;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100325 vlib_buffer_t *bufs[VLIB_FRAME_SIZE], **b = bufs;
Damjan Marionc98275f2019-03-06 14:05:01 +0100326 u16 nexts[VLIB_FRAME_SIZE], *next = nexts;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100327 esp_decrypt_packet_data_t pkt_data[VLIB_FRAME_SIZE], *pd = pkt_data;
328 esp_decrypt_packet_data_t cpd = { };
329 u32 current_sa_index = ~0, current_sa_bytes = 0, current_sa_pkts = 0;
330 const u8 esp_sz = sizeof (esp_header_t);
331 ipsec_sa_t *sa0 = 0;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000332 vnet_crypto_op_t _op, *op = &_op;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000333 vnet_crypto_op_chunk_t *ch;
334 vnet_crypto_op_t **crypto_ops = &ptd->crypto_ops;
335 vnet_crypto_op_t **integ_ops = &ptd->integ_ops;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700336
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100337 vlib_get_buffers (vm, from, b, n_left);
338 vec_reset_length (ptd->crypto_ops);
339 vec_reset_length (ptd->integ_ops);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000340 vec_reset_length (ptd->chained_crypto_ops);
341 vec_reset_length (ptd->chained_integ_ops);
342 vec_reset_length (ptd->chunks);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100343 clib_memset_u16 (nexts, -1, n_left);
344
345 while (n_left > 0)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700346 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100347 u8 *payload;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700348
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100349 if (n_left > 2)
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700350 {
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100351 u8 *p;
352 vlib_prefetch_buffer_header (b[2], LOAD);
353 p = vlib_buffer_get_current (b[1]);
354 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
355 p -= CLIB_CACHE_LINE_BYTES;
356 CLIB_PREFETCH (p, CLIB_CACHE_LINE_BYTES, LOAD);
357 }
358
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000359 u32 n_bufs = vlib_buffer_chain_linearize (vm, b[0]);
360 if (n_bufs == 0)
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100361 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000362 b[0]->error = node->errors[ESP_DECRYPT_ERROR_NO_BUFFERS];
Damjan Marion1f4e1cb2019-03-28 19:19:31 +0100363 next[0] = ESP_DECRYPT_NEXT_DROP;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100364 goto next;
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -0700365 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100366
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100367 if (vnet_buffer (b[0])->ipsec.sad_index != current_sa_index)
Damjan Marionc98275f2019-03-06 14:05:01 +0100368 {
Damjan Marion867dfdd2019-06-05 15:42:54 +0200369 if (current_sa_pkts)
370 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
371 current_sa_index,
372 current_sa_pkts,
373 current_sa_bytes);
374 current_sa_bytes = current_sa_pkts = 0;
375
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100376 current_sa_index = vnet_buffer (b[0])->ipsec.sad_index;
377 sa0 = pool_elt_at_index (im->sad, current_sa_index);
Damjan Marion7c22ff72019-04-04 12:25:44 +0200378 cpd.icv_sz = sa0->integ_icv_size;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100379 cpd.iv_sz = sa0->crypto_iv_size;
380 cpd.flags = sa0->flags;
381 cpd.sa_index = current_sa_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100382 }
383
Neale Rannsf62a8c02019-04-02 08:13:33 +0000384 if (PREDICT_FALSE (~0 == sa0->decrypt_thread_index))
385 {
386 /* this is the first packet to use this SA, claim the SA
387 * for this thread. this could happen simultaneously on
388 * another thread */
389 clib_atomic_cmp_and_swap (&sa0->decrypt_thread_index, ~0,
390 ipsec_sa_assign_thread (thread_index));
391 }
392
393 if (PREDICT_TRUE (thread_index != sa0->decrypt_thread_index))
394 {
395 next[0] = ESP_DECRYPT_NEXT_HANDOFF;
396 goto next;
397 }
398
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100399 /* store packet data for next round for easier prefetch */
400 pd->sa_data = cpd.sa_data;
401 pd->current_data = b[0]->current_data;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100402 pd->hdr_sz = pd->current_data - vnet_buffer (b[0])->l3_hdr_offset;
403 payload = b[0]->data + pd->current_data;
Neale Ranns6afaae12019-07-17 15:07:14 +0000404 pd->seq = clib_host_to_net_u32 (((esp_header_t *) payload)->seq);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000405 pd->free_buffer_index = 0;
406 pd->icv_removed = 0;
407
408 pd->lb = b[0];
409 if (n_bufs > 1)
410 {
411 /* find last buffer in the chain */
412 while (pd->lb->flags & VLIB_BUFFER_NEXT_PRESENT)
413 pd->lb = vlib_get_buffer (vm, pd->lb->next_buffer);
414
415 crypto_ops = &ptd->chained_crypto_ops;
416 integ_ops = &ptd->chained_integ_ops;
417 }
418 pd->current_length = b[0]->current_length;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100419
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100420 /* anti-reply check */
Neale Ranns6afaae12019-07-17 15:07:14 +0000421 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100422 {
423 b[0]->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
424 next[0] = ESP_DECRYPT_NEXT_DROP;
425 goto next;
426 }
427
Damjan Mariona829b132019-04-24 23:39:16 +0200428 if (pd->current_length < cpd.icv_sz + esp_sz + cpd.iv_sz)
429 {
430 b[0]->error = node->errors[ESP_DECRYPT_ERROR_RUNT];
431 next[0] = ESP_DECRYPT_NEXT_DROP;
432 goto next;
433 }
434
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100435 len = pd->current_length - cpd.icv_sz;
436 current_sa_pkts += 1;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000437 current_sa_bytes += vlib_buffer_length_in_chain (vm, b[0]);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100438
Neale Ranns47feb112019-04-11 15:14:07 +0000439 if (PREDICT_TRUE (sa0->integ_op_id != VNET_CRYPTO_OP_NONE))
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100440 {
Damjan Marion060bfb92019-03-29 13:47:54 +0100441 vnet_crypto_op_init (op, sa0->integ_op_id);
Damjan Mariond1bed682019-04-24 15:20:35 +0200442 op->key_index = sa0->integ_key_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100443 op->src = payload;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100444 op->flags = VNET_CRYPTO_OP_FLAG_HMAC_CHECK;
445 op->user_data = b - bufs;
Damjan Marion060bfb92019-03-29 13:47:54 +0100446 op->digest = payload + len;
447 op->digest_len = cpd.icv_sz;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100448 op->len = len;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000449
450 if (pd->lb != b[0])
451 {
452 /* buffer is chained */
453 vlib_buffer_t *cb = b[0];
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000454 op->len = pd->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000455
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000456 /* special case when ICV is splitted and needs to be reassembled
457 * first -> move it to the last buffer. Also take into account
458 * that ESN needs to be added after encrypted data and may or
459 * may not fit in the tail.*/
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000460 if (pd->lb->current_length < cpd.icv_sz)
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000461 {
462 u8 extra_esn = 0;
463 op->digest =
464 esp_move_icv_esn (vm, b[0], pd, cpd.icv_sz, sa0,
465 &extra_esn, op);
466
467 if (extra_esn)
468 {
469 /* esn is in the last buffer, that was unlinked from
470 * the chain */
471 op->len = b[0]->current_length;
472 }
473 else
474 {
475 if (pd->lb == b[0])
476 {
477 /* we now have a single buffer of crypto data, adjust
478 * the length (second buffer contains only ICV) */
479 integ_ops = &ptd->integ_ops;
480 crypto_ops = &ptd->crypto_ops;
481 len = b[0]->current_length;
482 goto out;
483 }
484 }
485 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000486 else
487 op->digest = vlib_buffer_get_tail (pd->lb) - cpd.icv_sz;
488
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000489 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
490 op->chunk_index = vec_len (ptd->chunks);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000491 vec_add2 (ptd->chunks, ch, 1);
492 ch->len = pd->current_length;
493 ch->src = payload;
494 cb = vlib_get_buffer (vm, cb->next_buffer);
495 op->n_chunks = 1;
496 while (1)
497 {
498 vec_add2 (ptd->chunks, ch, 1);
499 op->n_chunks += 1;
500 ch->src = vlib_buffer_get_current (cb);
501 if (pd->lb == cb)
502 {
503 if (pd->icv_removed)
504 ch->len = cb->current_length;
505 else
506 ch->len = cb->current_length - cpd.icv_sz;
507 if (ipsec_sa_is_set_USE_ESN (sa0))
508 {
509 u32 seq_hi = clib_host_to_net_u32 (sa0->seq_hi);
510 u8 tmp[ESP_MAX_ICV_SIZE], sz = sizeof (sa0->seq_hi);
511 u8 *esn;
512 vlib_buffer_t *tmp_b;
513 u16 space_left = vlib_buffer_space_left_at_end
514 (vm, pd->lb);
515 if (space_left < sz)
516 {
517 if (pd->icv_removed)
518 {
519 /* use pre-data area from the last bufer
520 that was removed from the chain */
521 tmp_b =
522 vlib_get_buffer (vm,
523 pd->free_buffer_index);
524 esn = tmp_b->data - sz;
525 }
526 else
527 {
528 /* no space, need to allocate new buffer */
529 u32 tmp_bi = 0;
530 vlib_buffer_alloc (vm, &tmp_bi, 1);
531 tmp_b = vlib_get_buffer (vm, tmp_bi);
532 esn = tmp_b->data;
533 pd->free_buffer_index = tmp_bi;
534 }
535 clib_memcpy_fast (esn, &seq_hi, sz);
536
537 vec_add2 (ptd->chunks, ch, 1);
538 op->n_chunks += 1;
539 ch->src = esn;
540 ch->len = sz;
541 }
542 else
543 {
544 if (pd->icv_removed)
545 {
546 clib_memcpy_fast (vlib_buffer_get_tail
547 (pd->lb), &seq_hi, sz);
548 }
549 else
550 {
551 clib_memcpy_fast (tmp, op->digest,
552 ESP_MAX_ICV_SIZE);
553 clib_memcpy_fast (op->digest, &seq_hi, sz);
554 clib_memcpy_fast (op->digest + sz, tmp,
555 ESP_MAX_ICV_SIZE);
556 op->digest += sz;
557 }
558 ch->len += sz;
559 }
560 }
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000561 break;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000562 }
563 else
564 ch->len = cb->current_length;
565
566 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
567 break;
568
569 cb = vlib_get_buffer (vm, cb->next_buffer);
570 }
571 }
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000572 else
573 esp_insert_esn (vm, sa0, pd, op, &len, b[0], payload);
574 out:
575 vec_add_aligned (integ_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
Damjan Marionc98275f2019-03-06 14:05:01 +0100576 }
577
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100578 payload += esp_sz;
579 len -= esp_sz;
Damjan Marionc98275f2019-03-06 14:05:01 +0100580
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000581 if (sa0->crypto_dec_op_id != VNET_CRYPTO_OP_NONE)
Damjan Marionc98275f2019-03-06 14:05:01 +0100582 {
Damjan Marion060bfb92019-03-29 13:47:54 +0100583 vnet_crypto_op_init (op, sa0->crypto_dec_op_id);
Damjan Mariond1bed682019-04-24 15:20:35 +0200584 op->key_index = sa0->crypto_key_index;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100585 op->iv = payload;
Neale Ranns47feb112019-04-11 15:14:07 +0000586
587 if (ipsec_sa_is_set_IS_AEAD (sa0))
588 {
589 esp_header_t *esp0;
590 esp_aead_t *aad;
591 u8 *scratch;
Neale Ranns47feb112019-04-11 15:14:07 +0000592
593 /*
594 * construct the AAD and the nonce (Salt || IV) in a scratch
595 * space in front of the IP header.
596 */
597 scratch = payload - esp_sz;
598 esp0 = (esp_header_t *) (scratch);
599
600 scratch -= (sizeof (*aad) + pd->hdr_sz);
601 op->aad = scratch;
602
603 esp_aad_fill (op, esp0, sa0);
604
605 /*
606 * we don't need to refer to the ESP header anymore so we
607 * can overwrite it with the salt and use the IV where it is
608 * to form the nonce = (Salt + IV)
609 */
Neale Ranns47feb112019-04-11 15:14:07 +0000610 op->iv -= sizeof (sa0->salt);
Neale Ranns80f6fd52019-04-16 02:41:34 +0000611 clib_memcpy_fast (op->iv, &sa0->salt, sizeof (sa0->salt));
Neale Ranns47feb112019-04-11 15:14:07 +0000612
613 op->tag = payload + len;
614 op->tag_len = 16;
615 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100616 op->src = op->dst = payload += cpd.iv_sz;
Neale Ranns0a0c7ee2019-04-13 15:30:21 +0000617 op->len = len - cpd.iv_sz;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100618 op->user_data = b - bufs;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000619
620 if (pd->lb != b[0])
621 {
622 /* buffer is chained */
623 vlib_buffer_t *cb = b[0];
624 op->flags |= VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS;
625 op->chunk_index = vec_len (ptd->chunks);
626 vec_add2 (ptd->chunks, ch, 1);
627 ch->len = len - cpd.iv_sz + cpd.icv_sz;
628 ch->src = ch->dst = payload;
629 cb = vlib_get_buffer (vm, cb->next_buffer);
630 op->n_chunks = 1;
631
632 while (1)
633 {
634 vec_add2 (ptd->chunks, ch, 1);
635 op->n_chunks += 1;
636 ch->src = ch->dst = vlib_buffer_get_current (cb);
637 if (pd->lb == cb)
638 {
639 if (ipsec_sa_is_set_IS_AEAD (sa0))
640 {
641 if (pd->lb->current_length < cpd.icv_sz)
642 {
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000643 u16 dif = 0;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000644 op->tag =
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000645 esp_move_icv (vm, b[0], pd, cpd.icv_sz, &dif);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000646
647 /* this chunk does not contain crypto data */
648 op->n_chunks -= 1;
649
650 /* and fix previous chunk's length as it might have
651 been changed */
652 ASSERT (op->n_chunks > 0);
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000653 if (pd->lb == b[0])
654 ch[-1].len -= dif;
655 else
656 ch[-1].len = pd->lb->current_length;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000657 break;
658 }
659 else
660 op->tag =
661 vlib_buffer_get_tail (pd->lb) - cpd.icv_sz;
662 }
663
664 if (pd->icv_removed)
665 ch->len = cb->current_length;
666 else
667 ch->len = cb->current_length - cpd.icv_sz;
668 }
669 else
670 ch->len = cb->current_length;
671
672 if (!(cb->flags & VLIB_BUFFER_NEXT_PRESENT))
673 break;
674
675 cb = vlib_get_buffer (vm, cb->next_buffer);
676 }
677 }
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000678
679 vec_add_aligned (crypto_ops[0], op, 1, CLIB_CACHE_LINE_BYTES);
Damjan Marionc98275f2019-03-06 14:05:01 +0100680 }
681
682 /* next */
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100683 next:
684 n_left -= 1;
Damjan Marionc98275f2019-03-06 14:05:01 +0100685 next += 1;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100686 pd += 1;
687 b += 1;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700688 }
Damjan Marionc98275f2019-03-06 14:05:01 +0100689
Neale Ranns02950402019-12-20 00:54:57 +0000690 if (PREDICT_TRUE (~0 != current_sa_index))
691 vlib_increment_combined_counter (&ipsec_sa_counters, thread_index,
692 current_sa_index, current_sa_pkts,
693 current_sa_bytes);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +0200694
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000695 esp_process_ops (vm, node, ptd->integ_ops, bufs, nexts,
696 ESP_DECRYPT_ERROR_INTEG_ERROR);
697 esp_process_chained_ops (vm, node, ptd->chained_integ_ops, bufs, nexts,
698 ptd->chunks, ESP_DECRYPT_ERROR_INTEG_ERROR);
Neale Ranns92e93842019-04-08 07:36:50 +0000699
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000700 esp_process_ops (vm, node, ptd->crypto_ops, bufs, nexts,
701 ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
702 esp_process_chained_ops (vm, node, ptd->chained_crypto_ops, bufs, nexts,
703 ptd->chunks, ESP_DECRYPT_ERROR_DECRYPTION_FAILED);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100704
705 /* Post decryption ronud - adjust packet data start and length and next
706 node */
707
708 n_left = from_frame->n_vectors;
709 next = nexts;
710 pd = pkt_data;
711 b = bufs;
712
713 while (n_left)
714 {
715 const u8 tun_flags = IPSEC_SA_FLAG_IS_TUNNEL |
716 IPSEC_SA_FLAG_IS_TUNNEL_V6;
717
718 if (n_left >= 2)
719 {
720 void *data = b[1]->data + pd[1].current_data;
721
722 /* buffer metadata */
723 vlib_prefetch_buffer_header (b[1], LOAD);
724
725 /* esp_footer_t */
726 CLIB_PREFETCH (data + pd[1].current_length - pd[1].icv_sz - 2,
727 CLIB_CACHE_LINE_BYTES, LOAD);
728
729 /* packet headers */
730 CLIB_PREFETCH (data - CLIB_CACHE_LINE_BYTES,
731 CLIB_CACHE_LINE_BYTES * 2, LOAD);
732 }
733
734 if (next[0] < ESP_DECRYPT_N_NEXT)
735 goto trace;
736
737 sa0 = vec_elt_at_index (im->sad, pd->sa_index);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100738
Neale Ranns3b9374f2019-08-01 04:45:15 -0700739 /*
740 * redo the anti-reply check
741 * in this frame say we have sequence numbers, s, s+1, s+1, s+1
742 * and s and s+1 are in the window. When we did the anti-replay
743 * check above we did so against the state of the window (W),
744 * after packet s-1. So each of the packets in the sequence will be
745 * accepted.
746 * This time s will be cheked against Ws-1, s+1 chceked against Ws
747 * (i.e. the window state is updated/advnaced)
748 * so this time the successive s+! packet will be dropped.
749 * This is a consequence of batching the decrypts. If the
750 * check-dcrypt-advance process was done for each packet it would
751 * be fine. But we batch the decrypts because it's much more efficient
752 * to do so in SW and if we offload to HW and the process is async.
753 *
754 * You're probably thinking, but this means an attacker can send the
755 * above sequence and cause VPP to perform decrpyts that will fail,
756 * and that's true. But if the attacker can determine s (a valid
757 * sequence number in the window) which is non-trivial, it can generate
758 * a sequence s, s+1, s+2, s+3, ... s+n and nothing will prevent any
759 * implementation, sequential or batching, from decrypting these.
760 */
761 if (ipsec_sa_anti_replay_check (sa0, pd->seq))
762 {
763 b[0]->error = node->errors[ESP_DECRYPT_ERROR_REPLAY];
764 next[0] = ESP_DECRYPT_NEXT_DROP;
765 goto trace;
766 }
767
Neale Ranns6afaae12019-07-17 15:07:14 +0000768 ipsec_sa_anti_replay_advance (sa0, pd->seq);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100769
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000770 u8 pad_length = 0, next_header = 0;
771 u16 icv_sz = pd->icv_removed ? 0 : pd->icv_sz;
772
773 if (pd->free_buffer_index)
774 vlib_buffer_free_one (vm, pd->free_buffer_index);
775
776 if (pd->lb->current_length < sizeof (esp_footer_t) + icv_sz)
777 {
778 /* esp footer is either splitted in two buffers or in the before
779 * last buffer */
780
781 vlib_buffer_t *before_last = b[0], *bp = b[0];
782 while (bp->flags & VLIB_BUFFER_NEXT_PRESENT)
783 {
784 before_last = bp;
785 bp = vlib_get_buffer (vm, bp->next_buffer);
786 }
787 u8 *bt = vlib_buffer_get_tail (before_last);
788
789 if (pd->lb->current_length == icv_sz)
790 {
791 esp_footer_t *f = (esp_footer_t *) (bt - sizeof (*f));
792 pad_length = f->pad_length;
793 next_header = f->next_header;
794 }
795 else
796 {
797 pad_length = (bt - 1)[0];
798 next_header = ((u8 *) vlib_buffer_get_current (pd->lb))[0];
799 }
800 }
801 else
802 {
803 esp_footer_t *f =
804 (esp_footer_t *) (pd->lb->data + pd->lb->current_data +
805 pd->lb->current_length - sizeof (esp_footer_t) -
806 icv_sz);
807 pad_length = f->pad_length;
808 next_header = f->next_header;
809 }
810
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100811 u16 adv = pd->iv_sz + esp_sz;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000812 u16 tail = sizeof (esp_footer_t) + pad_length + icv_sz;
813 u16 tail_orig = sizeof (esp_footer_t) + pad_length + pd->icv_sz;
814 b[0]->flags &= ~VLIB_BUFFER_TOTAL_LENGTH_VALID;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100815
Neale Rannsc87b66c2019-02-07 07:26:12 -0800816 if ((pd->flags & tun_flags) == 0 && !is_tun) /* transport mode */
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100817 {
818 u8 udp_sz = (is_ip6 == 0 && pd->flags & IPSEC_SA_FLAG_UDP_ENCAP) ?
819 sizeof (udp_header_t) : 0;
820 u16 ip_hdr_sz = pd->hdr_sz - udp_sz;
821 u8 *old_ip = b[0]->data + pd->current_data - ip_hdr_sz - udp_sz;
822 u8 *ip = old_ip + adv + udp_sz;
823
824 if (is_ip6 && ip_hdr_sz > 64)
825 memmove (ip, old_ip, ip_hdr_sz);
826 else
827 clib_memcpy_le64 (ip, old_ip, ip_hdr_sz);
828
829 b[0]->current_data = pd->current_data + adv - ip_hdr_sz;
Filip Tehlare4e8c6b2020-02-13 07:49:30 +0000830 b[0]->current_length += ip_hdr_sz - adv;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000831 esp_remove_tail (vm, b[0], pd->lb, tail);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100832
833 if (is_ip6)
834 {
835 ip6_header_t *ip6 = (ip6_header_t *) ip;
836 u16 len = clib_net_to_host_u16 (ip6->payload_length);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000837 len -= adv + tail_orig;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100838 ip6->payload_length = clib_host_to_net_u16 (len);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000839 ip6->protocol = next_header;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100840 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
841 }
842 else
843 {
844 ip4_header_t *ip4 = (ip4_header_t *) ip;
845 ip_csum_t sum = ip4->checksum;
846 u16 len = clib_net_to_host_u16 (ip4->length);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000847 len = clib_host_to_net_u16 (len - adv - tail_orig - udp_sz);
848 sum = ip_csum_update (sum, ip4->protocol, next_header,
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100849 ip4_header_t, protocol);
850 sum = ip_csum_update (sum, ip4->length, len,
851 ip4_header_t, length);
852 ip4->checksum = ip_csum_fold (sum);
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000853 ip4->protocol = next_header;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100854 ip4->length = len;
855 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
856 }
857 }
858 else
859 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000860 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100861 {
862 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
863 b[0]->current_data = pd->current_data + adv;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000864 b[0]->current_length = pd->current_length - adv;
865 esp_remove_tail (vm, b[0], pd->lb, tail);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100866 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000867 else if (next_header == IP_PROTOCOL_IPV6)
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100868 {
869 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
870 b[0]->current_data = pd->current_data + adv;
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000871 b[0]->current_length = pd->current_length - adv;
872 esp_remove_tail (vm, b[0], pd->lb, tail);
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100873 }
874 else
875 {
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000876 if (is_tun && next_header == IP_PROTOCOL_GRE)
Neale Ranns568acbb2019-12-18 05:54:40 +0000877 {
878 gre_header_t *gre;
879
880 b[0]->current_data = pd->current_data + adv;
881 b[0]->current_length = pd->current_length - adv - tail;
882
883 gre = vlib_buffer_get_current (b[0]);
884
885 vlib_buffer_advance (b[0], sizeof (*gre));
886
887 switch (clib_net_to_host_u16 (gre->protocol))
888 {
889 case GRE_PROTOCOL_teb:
John Lo90430b62020-01-31 23:48:30 -0500890 vnet_update_l2_len (b[0]);
Neale Ranns568acbb2019-12-18 05:54:40 +0000891 next[0] = ESP_DECRYPT_NEXT_L2_INPUT;
892 break;
893 case GRE_PROTOCOL_ip4:
894 next[0] = ESP_DECRYPT_NEXT_IP4_INPUT;
895 break;
896 case GRE_PROTOCOL_ip6:
897 next[0] = ESP_DECRYPT_NEXT_IP6_INPUT;
898 break;
899 default:
Neale Ranns02950402019-12-20 00:54:57 +0000900 b[0]->error =
901 node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
Neale Ranns568acbb2019-12-18 05:54:40 +0000902 next[0] = ESP_DECRYPT_NEXT_DROP;
903 break;
904 }
905 }
906 else
907 {
908 next[0] = ESP_DECRYPT_NEXT_DROP;
Neale Ranns02950402019-12-20 00:54:57 +0000909 b[0]->error = node->errors[ESP_DECRYPT_ERROR_UNSUP_PAYLOAD];
Neale Ranns568acbb2019-12-18 05:54:40 +0000910 goto trace;
911 }
Neale Rannsc87b66c2019-02-07 07:26:12 -0800912 }
913 if (is_tun)
914 {
915 if (ipsec_sa_is_set_IS_PROTECT (sa0))
916 {
917 /*
Neale Ranns02950402019-12-20 00:54:57 +0000918 * There are two encap possibilities
919 * 1) the tunnel and ths SA are prodiving encap, i.e. it's
920 * MAC | SA-IP | TUN-IP | ESP | PAYLOAD
921 * implying the SA is in tunnel mode (on a tunnel interface)
922 * 2) only the tunnel provides encap
923 * MAC | TUN-IP | ESP | PAYLOAD
924 * implying the SA is in transport mode.
925 *
926 * For 2) we need only strip the tunnel encap and we're good.
927 * since the tunnel and crypto ecnap (int the tun=protect
928 * object) are the same and we verified above that these match
929 * for 1) we need to strip the SA-IP outer headers, to
930 * reveal the tunnel IP and then check that this matches
931 * the configured tunnel.
Neale Rannsc87b66c2019-02-07 07:26:12 -0800932 */
933 const ipsec_tun_protect_t *itp;
934
Neale Ranns568acbb2019-12-18 05:54:40 +0000935 itp = ipsec_tun_protect_get
936 (vnet_buffer (b[0])->ipsec.protect_index);
937
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000938 if (PREDICT_TRUE (next_header == IP_PROTOCOL_IP_IN_IP))
Neale Rannsc87b66c2019-02-07 07:26:12 -0800939 {
940 const ip4_header_t *ip4;
941
942 ip4 = vlib_buffer_get_current (b[0]);
943
944 if (!ip46_address_is_equal_v4 (&itp->itp_tun.src,
945 &ip4->dst_address) ||
946 !ip46_address_is_equal_v4 (&itp->itp_tun.dst,
947 &ip4->src_address))
Neale Ranns12989b52019-09-26 16:20:19 +0000948 {
949 next[0] = ESP_DECRYPT_NEXT_DROP;
950 b[0]->error =
951 node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
952 }
Neale Rannsc87b66c2019-02-07 07:26:12 -0800953 }
Filip Tehlarefcad1a2020-02-04 09:36:04 +0000954 else if (next_header == IP_PROTOCOL_IPV6)
Neale Rannsc87b66c2019-02-07 07:26:12 -0800955 {
956 const ip6_header_t *ip6;
957
958 ip6 = vlib_buffer_get_current (b[0]);
959
960 if (!ip46_address_is_equal_v6 (&itp->itp_tun.src,
961 &ip6->dst_address) ||
962 !ip46_address_is_equal_v6 (&itp->itp_tun.dst,
963 &ip6->src_address))
Neale Ranns12989b52019-09-26 16:20:19 +0000964 {
965 next[0] = ESP_DECRYPT_NEXT_DROP;
966 b[0]->error =
967 node->errors[ESP_DECRYPT_ERROR_TUN_NO_PROTO];
968 }
Neale Rannsc87b66c2019-02-07 07:26:12 -0800969 }
970 }
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100971 }
972 }
973
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100974 trace:
975 if (PREDICT_FALSE (b[0]->flags & VLIB_BUFFER_IS_TRACED))
976 {
977 esp_decrypt_trace_t *tr;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100978 tr = vlib_add_trace (vm, node, b[0], sizeof (*tr));
979 sa0 = pool_elt_at_index (im->sad,
980 vnet_buffer (b[0])->ipsec.sad_index);
981 tr->crypto_alg = sa0->crypto_alg;
982 tr->integ_alg = sa0->integ_alg;
Neale Ranns6afaae12019-07-17 15:07:14 +0000983 tr->seq = pd->seq;
984 tr->sa_seq = sa0->last_seq;
985 tr->sa_seq_hi = sa0->seq_hi;
Damjan Marionb4fff3a2019-03-25 15:54:40 +0100986 }
987
988 /* next */
989 n_left -= 1;
990 next += 1;
991 pd += 1;
992 b += 1;
993 }
994
995 n_left = from_frame->n_vectors;
996 vlib_node_increment_counter (vm, node->node_index,
997 ESP_DECRYPT_ERROR_RX_PKTS, n_left);
998
999 vlib_buffer_enqueue_to_next (vm, node, from, nexts, n_left);
1000
Damjan Marionb4fff3a2019-03-25 15:54:40 +01001001 return n_left;
Ed Warnickecb9cada2015-12-08 15:45:58 -07001002}
1003
Klement Sekerab8f35442018-10-29 13:38:19 +01001004VLIB_NODE_FN (esp4_decrypt_node) (vlib_main_t * vm,
1005 vlib_node_runtime_t * node,
1006 vlib_frame_t * from_frame)
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001007{
Neale Rannsc87b66c2019-02-07 07:26:12 -08001008 return esp_decrypt_inline (vm, node, from_frame, 0, 0);
1009}
1010
1011VLIB_NODE_FN (esp4_decrypt_tun_node) (vlib_main_t * vm,
1012 vlib_node_runtime_t * node,
1013 vlib_frame_t * from_frame)
1014{
1015 return esp_decrypt_inline (vm, node, from_frame, 0, 1);
1016}
1017
1018VLIB_NODE_FN (esp6_decrypt_node) (vlib_main_t * vm,
1019 vlib_node_runtime_t * node,
1020 vlib_frame_t * from_frame)
1021{
1022 return esp_decrypt_inline (vm, node, from_frame, 1, 0);
1023}
1024
1025VLIB_NODE_FN (esp6_decrypt_tun_node) (vlib_main_t * vm,
1026 vlib_node_runtime_t * node,
1027 vlib_frame_t * from_frame)
1028{
1029 return esp_decrypt_inline (vm, node, from_frame, 1, 1);
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001030}
Ed Warnickecb9cada2015-12-08 15:45:58 -07001031
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001032/* *INDENT-OFF* */
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001033VLIB_REGISTER_NODE (esp4_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001034 .name = "esp4-decrypt",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001035 .vector_size = sizeof (u32),
1036 .format_trace = format_esp_decrypt_trace,
1037 .type = VLIB_NODE_TYPE_INTERNAL,
1038
1039 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1040 .error_strings = esp_decrypt_error_strings,
1041
1042 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1043 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001044 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1045 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1046 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001047 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001048 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-handoff",
Ed Warnickecb9cada2015-12-08 15:45:58 -07001049 },
1050};
1051
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001052VLIB_REGISTER_NODE (esp6_decrypt_node) = {
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001053 .name = "esp6-decrypt",
1054 .vector_size = sizeof (u32),
1055 .format_trace = format_esp_decrypt_trace,
1056 .type = VLIB_NODE_TYPE_INTERNAL,
1057
1058 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1059 .error_strings = esp_decrypt_error_strings,
1060
1061 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1062 .next_nodes = {
Neale Rannsf62a8c02019-04-02 08:13:33 +00001063 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1064 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1065 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001066 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001067 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-handoff",
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001068 },
1069};
Neale Rannsc87b66c2019-02-07 07:26:12 -08001070
1071VLIB_REGISTER_NODE (esp4_decrypt_tun_node) = {
1072 .name = "esp4-decrypt-tun",
1073 .vector_size = sizeof (u32),
1074 .format_trace = format_esp_decrypt_trace,
1075 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001076 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1077 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001078 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1079 .next_nodes = {
1080 [ESP_DECRYPT_NEXT_DROP] = "ip4-drop",
1081 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1082 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001083 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001084 [ESP_DECRYPT_NEXT_HANDOFF] = "esp4-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001085 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001086};
1087
1088VLIB_REGISTER_NODE (esp6_decrypt_tun_node) = {
1089 .name = "esp6-decrypt-tun",
1090 .vector_size = sizeof (u32),
1091 .format_trace = format_esp_decrypt_trace,
1092 .type = VLIB_NODE_TYPE_INTERNAL,
Neale Rannsc87b66c2019-02-07 07:26:12 -08001093 .n_errors = ARRAY_LEN(esp_decrypt_error_strings),
1094 .error_strings = esp_decrypt_error_strings,
Neale Rannsf62a8c02019-04-02 08:13:33 +00001095 .n_next_nodes = ESP_DECRYPT_N_NEXT,
1096 .next_nodes = {
1097 [ESP_DECRYPT_NEXT_DROP] = "ip6-drop",
1098 [ESP_DECRYPT_NEXT_IP4_INPUT] = "ip4-input-no-checksum",
1099 [ESP_DECRYPT_NEXT_IP6_INPUT] = "ip6-input",
Neale Ranns568acbb2019-12-18 05:54:40 +00001100 [ESP_DECRYPT_NEXT_L2_INPUT] = "l2-input",
Neale Ranns4a56f4e2019-12-23 04:10:25 +00001101 [ESP_DECRYPT_NEXT_HANDOFF]= "esp6-decrypt-tun-handoff",
Neale Rannsf62a8c02019-04-02 08:13:33 +00001102 },
Neale Rannsc87b66c2019-02-07 07:26:12 -08001103};
Klement Sekerabe5a5dd2018-10-09 16:05:48 +02001104/* *INDENT-ON* */
1105
Keith Burns (alagalah)166a9d42016-08-06 11:00:56 -07001106/*
1107 * fd.io coding-style-patch-verification: ON
1108 *
1109 * Local Variables:
1110 * eval: (c-set-style "gnu")
1111 * End:
1112 */