Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 1 | /* |
| 2 | * ipsec_output.c : IPSec output node |
| 3 | * |
| 4 | * Copyright (c) 2015 Cisco and/or its affiliates. |
| 5 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 6 | * you may not use this file except in compliance with the License. |
| 7 | * You may obtain a copy of the License at: |
| 8 | * |
| 9 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | * |
| 11 | * Unless required by applicable law or agreed to in writing, software |
| 12 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | * See the License for the specific language governing permissions and |
| 15 | * limitations under the License. |
| 16 | */ |
| 17 | |
| 18 | #include <vnet/vnet.h> |
| 19 | #include <vnet/api_errno.h> |
| 20 | #include <vnet/ip/ip.h> |
| 21 | |
| 22 | #include <vnet/ipsec/ipsec.h> |
Neale Ranns | 918c161 | 2019-02-21 23:34:59 -0800 | [diff] [blame] | 23 | #include <vnet/ipsec/ipsec_io.h> |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 24 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 25 | #define foreach_ipsec_output_error \ |
| 26 | _(RX_PKTS, "IPSec pkts received") \ |
| 27 | _(POLICY_DISCARD, "IPSec policy discard") \ |
| 28 | _(POLICY_NO_MATCH, "IPSec policy (no match)") \ |
| 29 | _(POLICY_PROTECT, "IPSec policy protect") \ |
| 30 | _(POLICY_BYPASS, "IPSec policy bypass") \ |
| 31 | _(ENCAPS_FAILED, "IPSec encapsulation failed") |
| 32 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 33 | typedef enum |
| 34 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 35 | #define _(sym,str) IPSEC_OUTPUT_ERROR_##sym, |
| 36 | foreach_ipsec_output_error |
| 37 | #undef _ |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 38 | IPSEC_DECAP_N_ERROR, |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 39 | } ipsec_output_error_t; |
| 40 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 41 | static char *ipsec_output_error_strings[] = { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 42 | #define _(sym,string) string, |
| 43 | foreach_ipsec_output_error |
| 44 | #undef _ |
| 45 | }; |
| 46 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 47 | typedef struct |
| 48 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 49 | u32 spd_id; |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 50 | u32 policy_id; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 51 | } ipsec_output_trace_t; |
| 52 | |
| 53 | /* packet trace format function */ |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 54 | static u8 * |
| 55 | format_ipsec_output_trace (u8 * s, va_list * args) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 56 | { |
| 57 | CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *); |
| 58 | CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 59 | ipsec_output_trace_t *t = va_arg (*args, ipsec_output_trace_t *); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 60 | |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 61 | s = format (s, "spd %u policy %d", t->spd_id, t->policy_id); |
| 62 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 63 | return s; |
| 64 | } |
| 65 | |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 66 | always_inline void |
| 67 | ipsec4_out_spd_add_flow_cache_entry (ipsec_main_t *im, u8 pr, u32 la, u32 ra, |
| 68 | u16 lp, u16 rp, u32 pol_id) |
| 69 | { |
| 70 | u64 hash; |
| 71 | u8 overwrite = 0, stale_overwrite = 0; |
| 72 | ipsec4_spd_5tuple_t ip4_5tuple = { .ip4_addr = { (ip4_address_t) la, |
| 73 | (ip4_address_t) ra }, |
| 74 | .port = { lp, rp }, |
| 75 | .proto = pr }; |
| 76 | |
| 77 | ip4_5tuple.kv_16_8.value = (((u64) pol_id) << 32) | ((u64) im->epoch_count); |
| 78 | |
| 79 | hash = ipsec4_hash_16_8 (&ip4_5tuple.kv_16_8); |
| 80 | hash &= (im->ipsec4_out_spd_hash_num_buckets - 1); |
| 81 | |
| 82 | ipsec_spinlock_lock (&im->ipsec4_out_spd_hash_tbl[hash].bucket_lock); |
| 83 | /* Check if we are overwriting an existing entry so we know |
| 84 | whether to increment the flow cache counter. Since flow |
| 85 | cache counter is reset on any policy add/remove, but |
| 86 | hash table values are not, we also need to check if the entry |
| 87 | we are overwriting is stale or not. If it's a stale entry |
| 88 | overwrite, we still want to increment flow cache counter */ |
| 89 | overwrite = (im->ipsec4_out_spd_hash_tbl[hash].value != 0); |
| 90 | /* Check for stale entry by comparing with current epoch count */ |
| 91 | if (PREDICT_FALSE (overwrite)) |
| 92 | stale_overwrite = |
| 93 | (im->epoch_count != |
| 94 | ((u32) (im->ipsec4_out_spd_hash_tbl[hash].value & 0xFFFFFFFF))); |
| 95 | clib_memcpy_fast (&im->ipsec4_out_spd_hash_tbl[hash], &ip4_5tuple.kv_16_8, |
| 96 | sizeof (ip4_5tuple.kv_16_8)); |
| 97 | ipsec_spinlock_unlock (&im->ipsec4_out_spd_hash_tbl[hash].bucket_lock); |
| 98 | |
| 99 | /* Increment the counter to track active flow cache entries |
| 100 | when entering a fresh entry or overwriting a stale one */ |
| 101 | if (!overwrite || stale_overwrite) |
| 102 | clib_atomic_fetch_add_relax (&im->ipsec4_out_spd_flow_cache_entries, 1); |
| 103 | |
| 104 | return; |
| 105 | } |
| 106 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 107 | always_inline ipsec_policy_t * |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 108 | ipsec4_out_spd_find_flow_cache_entry (ipsec_main_t *im, u8 pr, u32 la, u32 ra, |
| 109 | u16 lp, u16 rp) |
| 110 | { |
| 111 | ipsec_policy_t *p = NULL; |
| 112 | ipsec4_hash_kv_16_8_t kv_result; |
| 113 | u64 hash; |
| 114 | |
| 115 | if (PREDICT_FALSE ((pr != IP_PROTOCOL_TCP) && (pr != IP_PROTOCOL_UDP) && |
| 116 | (pr != IP_PROTOCOL_SCTP))) |
| 117 | { |
| 118 | lp = 0; |
| 119 | rp = 0; |
| 120 | } |
| 121 | ipsec4_spd_5tuple_t ip4_5tuple = { .ip4_addr = { (ip4_address_t) la, |
| 122 | (ip4_address_t) ra }, |
| 123 | .port = { lp, rp }, |
| 124 | .proto = pr }; |
| 125 | |
| 126 | hash = ipsec4_hash_16_8 (&ip4_5tuple.kv_16_8); |
| 127 | hash &= (im->ipsec4_out_spd_hash_num_buckets - 1); |
| 128 | |
| 129 | ipsec_spinlock_lock (&im->ipsec4_out_spd_hash_tbl[hash].bucket_lock); |
| 130 | kv_result = im->ipsec4_out_spd_hash_tbl[hash]; |
| 131 | ipsec_spinlock_unlock (&im->ipsec4_out_spd_hash_tbl[hash].bucket_lock); |
| 132 | |
| 133 | if (ipsec4_hash_key_compare_16_8 ((u64 *) &ip4_5tuple.kv_16_8, |
| 134 | (u64 *) &kv_result)) |
| 135 | { |
| 136 | if (im->epoch_count == ((u32) (kv_result.value & 0xFFFFFFFF))) |
| 137 | { |
| 138 | /* Get the policy based on the index */ |
| 139 | p = |
| 140 | pool_elt_at_index (im->policies, ((u32) (kv_result.value >> 32))); |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | return p; |
| 145 | } |
| 146 | |
| 147 | always_inline ipsec_policy_t * |
| 148 | ipsec_output_policy_match (ipsec_spd_t *spd, u8 pr, u32 la, u32 ra, u16 lp, |
| 149 | u16 rp, u8 flow_cache_enabled) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 150 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 151 | ipsec_main_t *im = &ipsec_main; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 152 | ipsec_policy_t *p; |
| 153 | u32 *i; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 154 | |
Damjan Marion | 3f54b18 | 2016-08-16 11:27:02 +0200 | [diff] [blame] | 155 | if (!spd) |
| 156 | return 0; |
| 157 | |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 158 | vec_foreach (i, spd->policies[IPSEC_SPD_POLICY_IP4_OUTBOUND]) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 159 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 160 | p = pool_elt_at_index (im->policies, *i); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 161 | if (PREDICT_FALSE (p->protocol && (p->protocol != pr))) |
| 162 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 163 | |
Neale Ranns | a4d2431 | 2019-07-10 13:46:21 +0000 | [diff] [blame] | 164 | if (ra < clib_net_to_host_u32 (p->raddr.start.ip4.as_u32)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 165 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 166 | |
Neale Ranns | a4d2431 | 2019-07-10 13:46:21 +0000 | [diff] [blame] | 167 | if (ra > clib_net_to_host_u32 (p->raddr.stop.ip4.as_u32)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 168 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 169 | |
Neale Ranns | a4d2431 | 2019-07-10 13:46:21 +0000 | [diff] [blame] | 170 | if (la < clib_net_to_host_u32 (p->laddr.start.ip4.as_u32)) |
Radu Nicolau | de412ce | 2018-03-12 13:52:41 +0000 | [diff] [blame] | 171 | continue; |
| 172 | |
Neale Ranns | a4d2431 | 2019-07-10 13:46:21 +0000 | [diff] [blame] | 173 | if (la > clib_net_to_host_u32 (p->laddr.stop.ip4.as_u32)) |
Radu Nicolau | de412ce | 2018-03-12 13:52:41 +0000 | [diff] [blame] | 174 | continue; |
| 175 | |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 176 | if (PREDICT_FALSE ((pr != IP_PROTOCOL_TCP) && (pr != IP_PROTOCOL_UDP) && |
| 177 | (pr != IP_PROTOCOL_SCTP))) |
| 178 | { |
| 179 | lp = 0; |
| 180 | rp = 0; |
| 181 | goto add_flow_cache; |
| 182 | } |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 183 | |
| 184 | if (lp < p->lport.start) |
| 185 | continue; |
| 186 | |
| 187 | if (lp > p->lport.stop) |
| 188 | continue; |
| 189 | |
| 190 | if (rp < p->rport.start) |
| 191 | continue; |
| 192 | |
| 193 | if (rp > p->rport.stop) |
| 194 | continue; |
| 195 | |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 196 | add_flow_cache: |
| 197 | if (flow_cache_enabled) |
| 198 | { |
| 199 | /* Add an Entry in Flow cache */ |
| 200 | ipsec4_out_spd_add_flow_cache_entry ( |
| 201 | im, pr, clib_host_to_net_u32 (la), clib_host_to_net_u32 (ra), |
| 202 | clib_host_to_net_u16 (lp), clib_host_to_net_u16 (rp), *i); |
| 203 | } |
| 204 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 205 | return p; |
| 206 | } |
| 207 | return 0; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 208 | } |
| 209 | |
| 210 | always_inline uword |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 211 | ip6_addr_match_range (ip6_address_t * a, ip6_address_t * la, |
| 212 | ip6_address_t * ua) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 213 | { |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 214 | if ((memcmp (a->as_u64, la->as_u64, 2 * sizeof (u64)) >= 0) && |
| 215 | (memcmp (a->as_u64, ua->as_u64, 2 * sizeof (u64)) <= 0)) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 216 | return 1; |
| 217 | return 0; |
| 218 | } |
| 219 | |
| 220 | always_inline ipsec_policy_t * |
Klement Sekera | be5a5dd | 2018-10-09 16:05:48 +0200 | [diff] [blame] | 221 | ipsec6_output_policy_match (ipsec_spd_t * spd, |
| 222 | ip6_address_t * la, |
| 223 | ip6_address_t * ra, u16 lp, u16 rp, u8 pr) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 224 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 225 | ipsec_main_t *im = &ipsec_main; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 226 | ipsec_policy_t *p; |
| 227 | u32 *i; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 228 | |
Damjan Marion | 3f54b18 | 2016-08-16 11:27:02 +0200 | [diff] [blame] | 229 | if (!spd) |
| 230 | return 0; |
| 231 | |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 232 | vec_foreach (i, spd->policies[IPSEC_SPD_POLICY_IP6_OUTBOUND]) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 233 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 234 | p = pool_elt_at_index (im->policies, *i); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 235 | if (PREDICT_FALSE (p->protocol && (p->protocol != pr))) |
| 236 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 237 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 238 | if (!ip6_addr_match_range (ra, &p->raddr.start.ip6, &p->raddr.stop.ip6)) |
| 239 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 240 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 241 | if (!ip6_addr_match_range (la, &p->laddr.start.ip6, &p->laddr.stop.ip6)) |
| 242 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 243 | |
Marco Varlese | 191a594 | 2017-10-30 18:17:21 +0100 | [diff] [blame] | 244 | if (PREDICT_FALSE |
| 245 | ((pr != IP_PROTOCOL_TCP) && (pr != IP_PROTOCOL_UDP) |
| 246 | && (pr != IP_PROTOCOL_SCTP))) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 247 | return p; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 248 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 249 | if (lp < p->lport.start) |
| 250 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 251 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 252 | if (lp > p->lport.stop) |
| 253 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 254 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 255 | if (rp < p->rport.start) |
| 256 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 257 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 258 | if (rp > p->rport.stop) |
| 259 | continue; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 260 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 261 | return p; |
| 262 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 263 | |
| 264 | return 0; |
| 265 | } |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 266 | |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 267 | static inline uword |
| 268 | ipsec_output_inline (vlib_main_t * vm, vlib_node_runtime_t * node, |
| 269 | vlib_frame_t * from_frame, int is_ipv6) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 270 | { |
| 271 | ipsec_main_t *im = &ipsec_main; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 272 | |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 273 | u32 *from, *to_next = 0, thread_index; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 274 | u32 n_left_from, sw_if_index0, last_sw_if_index = (u32) ~ 0; |
| 275 | u32 next_node_index = (u32) ~ 0, last_next_node_index = (u32) ~ 0; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 276 | vlib_frame_t *f = 0; |
| 277 | u32 spd_index0 = ~0; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 278 | ipsec_spd_t *spd0 = 0; |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 279 | int bogus; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 280 | u64 nc_protect = 0, nc_bypass = 0, nc_discard = 0, nc_nomatch = 0; |
Zachary Leaf | 7cd35f5 | 2021-06-25 08:11:15 -0500 | [diff] [blame] | 281 | u8 flow_cache_enabled = im->output_flow_cache_flag; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 282 | |
| 283 | from = vlib_frame_vector_args (from_frame); |
| 284 | n_left_from = from_frame->n_vectors; |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 285 | thread_index = vm->thread_index; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 286 | |
| 287 | while (n_left_from > 0) |
| 288 | { |
Zhiyong Yang | f922116 | 2019-04-22 00:18:38 -0400 | [diff] [blame] | 289 | u32 bi0, pi0, bi1; |
| 290 | vlib_buffer_t *b0, *b1; |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 291 | ipsec_policy_t *p0 = NULL; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 292 | ip4_header_t *ip0; |
| 293 | ip6_header_t *ip6_0 = 0; |
| 294 | udp_header_t *udp0; |
Florin Coras | fb28e9a | 2016-09-06 15:18:21 +0200 | [diff] [blame] | 295 | u32 iph_offset = 0; |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 296 | tcp_header_t *tcp0; |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 297 | u64 bytes0; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 298 | |
| 299 | bi0 = from[0]; |
| 300 | b0 = vlib_get_buffer (vm, bi0); |
Vratko Polak | ea6a34c | 2019-04-23 15:52:28 +0200 | [diff] [blame] | 301 | if (n_left_from > 1) |
| 302 | { |
| 303 | bi1 = from[1]; |
| 304 | b1 = vlib_get_buffer (vm, bi1); |
| 305 | CLIB_PREFETCH (b1, CLIB_CACHE_LINE_BYTES * 2, STORE); |
| 306 | vlib_prefetch_buffer_data (b1, LOAD); |
| 307 | } |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 308 | sw_if_index0 = vnet_buffer (b0)->sw_if_index[VLIB_TX]; |
Florin Coras | fb28e9a | 2016-09-06 15:18:21 +0200 | [diff] [blame] | 309 | iph_offset = vnet_buffer (b0)->ip.save_rewrite_length; |
| 310 | ip0 = (ip4_header_t *) ((u8 *) vlib_buffer_get_current (b0) |
| 311 | + iph_offset); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 312 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 313 | /* lookup for SPD only if sw_if_index is changed */ |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 314 | if (PREDICT_FALSE (last_sw_if_index != sw_if_index0)) |
| 315 | { |
| 316 | uword *p = hash_get (im->spd_index_by_sw_if_index, sw_if_index0); |
Dave Barach | 47d41ad | 2020-02-17 09:13:26 -0500 | [diff] [blame] | 317 | ALWAYS_ASSERT (p); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 318 | spd_index0 = p[0]; |
| 319 | spd0 = pool_elt_at_index (im->spds, spd_index0); |
| 320 | last_sw_if_index = sw_if_index0; |
| 321 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 322 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 323 | if (is_ipv6) |
| 324 | { |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 325 | ip6_0 = (ip6_header_t *) ((u8 *) vlib_buffer_get_current (b0) |
| 326 | + iph_offset); |
| 327 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 328 | udp0 = ip6_next_header (ip6_0); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 329 | #if 0 |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 330 | clib_warning |
| 331 | ("packet received from %U port %u to %U port %u spd_id %u", |
| 332 | format_ip6_address, &ip6_0->src_address, |
| 333 | clib_net_to_host_u16 (udp0->src_port), format_ip6_address, |
| 334 | &ip6_0->dst_address, clib_net_to_host_u16 (udp0->dst_port), |
| 335 | spd0->id); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 336 | #endif |
| 337 | |
Klement Sekera | be5a5dd | 2018-10-09 16:05:48 +0200 | [diff] [blame] | 338 | p0 = ipsec6_output_policy_match (spd0, |
| 339 | &ip6_0->src_address, |
| 340 | &ip6_0->dst_address, |
Neale Ranns | a4d2431 | 2019-07-10 13:46:21 +0000 | [diff] [blame] | 341 | clib_net_to_host_u16 |
| 342 | (udp0->src_port), |
| 343 | clib_net_to_host_u16 |
| 344 | (udp0->dst_port), ip6_0->protocol); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 345 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 346 | else |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 347 | { |
| 348 | udp0 = (udp_header_t *) ((u8 *) ip0 + ip4_header_bytes (ip0)); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 349 | |
| 350 | #if 0 |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 351 | clib_warning ("packet received from %U to %U port %u", |
| 352 | format_ip4_address, ip0->src_address.as_u8, |
| 353 | format_ip4_address, ip0->dst_address.as_u8, |
| 354 | clib_net_to_host_u16 (udp0->dst_port)); |
| 355 | clib_warning ("sw_if_index0 %u spd_index0 %u spd_id %u", |
| 356 | sw_if_index0, spd_index0, spd0->id); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 357 | #endif |
| 358 | |
Govindarajan Mohandoss | 6d7dfcb | 2021-03-19 19:20:49 +0000 | [diff] [blame] | 359 | /* |
| 360 | * Check whether flow cache is enabled. |
| 361 | */ |
| 362 | if (flow_cache_enabled) |
| 363 | { |
| 364 | p0 = ipsec4_out_spd_find_flow_cache_entry ( |
| 365 | im, ip0->protocol, ip0->src_address.as_u32, |
| 366 | ip0->dst_address.as_u32, udp0->src_port, udp0->dst_port); |
| 367 | } |
| 368 | |
| 369 | /* Fall back to linear search if flow cache lookup fails */ |
| 370 | if (p0 == NULL) |
| 371 | { |
| 372 | p0 = ipsec_output_policy_match ( |
| 373 | spd0, ip0->protocol, |
| 374 | clib_net_to_host_u32 (ip0->src_address.as_u32), |
| 375 | clib_net_to_host_u32 (ip0->dst_address.as_u32), |
| 376 | clib_net_to_host_u16 (udp0->src_port), |
| 377 | clib_net_to_host_u16 (udp0->dst_port), flow_cache_enabled); |
| 378 | } |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 379 | } |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 380 | tcp0 = (void *) udp0; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 381 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 382 | if (PREDICT_TRUE (p0 != NULL)) |
| 383 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 384 | pi0 = p0 - im->policies; |
| 385 | |
| 386 | vlib_prefetch_combined_counter (&ipsec_spd_policy_counters, |
| 387 | thread_index, pi0); |
| 388 | |
| 389 | if (is_ipv6) |
| 390 | { |
| 391 | bytes0 = clib_net_to_host_u16 (ip6_0->payload_length); |
| 392 | bytes0 += sizeof (ip6_header_t); |
| 393 | } |
| 394 | else |
| 395 | { |
| 396 | bytes0 = clib_net_to_host_u16 (ip0->length); |
| 397 | } |
| 398 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 399 | if (p0->policy == IPSEC_POLICY_ACTION_PROTECT) |
| 400 | { |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 401 | ipsec_sa_t *sa = 0; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 402 | nc_protect++; |
Neale Ranns | c5fe57d | 2021-02-25 16:01:28 +0000 | [diff] [blame] | 403 | sa = ipsec_sa_get (p0->sa_index); |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 404 | if (sa->protocol == IPSEC_PROTOCOL_ESP) |
Klement Sekera | be5a5dd | 2018-10-09 16:05:48 +0200 | [diff] [blame] | 405 | if (is_ipv6) |
| 406 | next_node_index = im->esp6_encrypt_node_index; |
| 407 | else |
| 408 | next_node_index = im->esp4_encrypt_node_index; |
| 409 | else if (is_ipv6) |
| 410 | next_node_index = im->ah6_encrypt_node_index; |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 411 | else |
Klement Sekera | be5a5dd | 2018-10-09 16:05:48 +0200 | [diff] [blame] | 412 | next_node_index = im->ah4_encrypt_node_index; |
Damjan Marion | 9c6ae5f | 2016-11-15 23:20:01 +0100 | [diff] [blame] | 413 | vnet_buffer (b0)->ipsec.sad_index = p0->sa_index; |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 414 | |
Mohsin Kazmi | 6809538 | 2021-02-10 11:26:24 +0100 | [diff] [blame] | 415 | if (PREDICT_FALSE (b0->flags & VNET_BUFFER_F_OFFLOAD)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 416 | { |
Mohsin Kazmi | 36f7a6a | 2021-05-05 14:26:38 +0200 | [diff] [blame] | 417 | vnet_buffer_oflags_t oflags = vnet_buffer (b0)->oflags; |
Mohsin Kazmi | 6809538 | 2021-02-10 11:26:24 +0100 | [diff] [blame] | 418 | |
| 419 | /* |
| 420 | * Clearing offload flags before checksum is computed |
| 421 | * It guarantees the cache hit! |
| 422 | */ |
| 423 | vnet_buffer_offload_flags_clear (b0, oflags); |
| 424 | |
| 425 | if (is_ipv6) |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 426 | { |
Mohsin Kazmi | 6809538 | 2021-02-10 11:26:24 +0100 | [diff] [blame] | 427 | if (PREDICT_FALSE (oflags & |
| 428 | VNET_BUFFER_OFFLOAD_F_TCP_CKSUM)) |
| 429 | { |
| 430 | tcp0->checksum = ip6_tcp_udp_icmp_compute_checksum ( |
| 431 | vm, b0, ip6_0, &bogus); |
| 432 | } |
| 433 | if (PREDICT_FALSE (oflags & |
| 434 | VNET_BUFFER_OFFLOAD_F_UDP_CKSUM)) |
| 435 | { |
| 436 | udp0->checksum = ip6_tcp_udp_icmp_compute_checksum ( |
| 437 | vm, b0, ip6_0, &bogus); |
| 438 | } |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 439 | } |
Mohsin Kazmi | 6809538 | 2021-02-10 11:26:24 +0100 | [diff] [blame] | 440 | else |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 441 | { |
Mohsin Kazmi | 6809538 | 2021-02-10 11:26:24 +0100 | [diff] [blame] | 442 | if (PREDICT_FALSE (oflags & |
| 443 | VNET_BUFFER_OFFLOAD_F_IP_CKSUM)) |
| 444 | { |
| 445 | ip0->checksum = ip4_header_checksum (ip0); |
| 446 | } |
| 447 | if (PREDICT_FALSE (oflags & |
| 448 | VNET_BUFFER_OFFLOAD_F_TCP_CKSUM)) |
| 449 | { |
| 450 | tcp0->checksum = |
| 451 | ip4_tcp_udp_compute_checksum (vm, b0, ip0); |
| 452 | } |
| 453 | if (PREDICT_FALSE (oflags & |
| 454 | VNET_BUFFER_OFFLOAD_F_UDP_CKSUM)) |
| 455 | { |
| 456 | udp0->checksum = |
| 457 | ip4_tcp_udp_compute_checksum (vm, b0, ip0); |
| 458 | } |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 459 | } |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 460 | } |
Klement Sekera | 31da2e3 | 2018-06-24 22:49:55 +0200 | [diff] [blame] | 461 | vlib_buffer_advance (b0, iph_offset); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 462 | } |
| 463 | else if (p0->policy == IPSEC_POLICY_ACTION_BYPASS) |
| 464 | { |
| 465 | nc_bypass++; |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 466 | next_node_index = get_next_output_feature_node_index (b0, node); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 467 | } |
| 468 | else |
| 469 | { |
| 470 | nc_discard++; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 471 | next_node_index = im->error_drop_node_index; |
| 472 | } |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 473 | vlib_increment_combined_counter |
| 474 | (&ipsec_spd_policy_counters, thread_index, pi0, 1, bytes0); |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 475 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 476 | else |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 477 | { |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 478 | pi0 = ~0; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 479 | nc_nomatch++; |
| 480 | next_node_index = im->error_drop_node_index; |
| 481 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 482 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 483 | from += 1; |
| 484 | n_left_from -= 1; |
| 485 | |
Damjan Marion | 3f54b18 | 2016-08-16 11:27:02 +0200 | [diff] [blame] | 486 | if (PREDICT_FALSE ((last_next_node_index != next_node_index) || f == 0)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 487 | { |
| 488 | /* if this is not 1st frame */ |
| 489 | if (f) |
| 490 | vlib_put_frame_to_node (vm, last_next_node_index, f); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 491 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 492 | last_next_node_index = next_node_index; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 493 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 494 | f = vlib_get_frame_to_node (vm, next_node_index); |
Kingwel Xie | 2fab01e | 2018-10-10 21:03:10 -0400 | [diff] [blame] | 495 | |
| 496 | /* frame->frame_flags, copy it from node */ |
| 497 | /* Copy trace flag from next_frame and from runtime. */ |
| 498 | f->frame_flags |= node->flags & VLIB_NODE_FLAG_TRACE; |
| 499 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 500 | to_next = vlib_frame_vector_args (f); |
| 501 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 502 | |
| 503 | to_next[0] = bi0; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 504 | to_next += 1; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 505 | f->n_vectors++; |
| 506 | |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 507 | if (PREDICT_FALSE (node->flags & VLIB_NODE_FLAG_TRACE) && |
| 508 | PREDICT_FALSE (b0->flags & VLIB_BUFFER_IS_TRACED)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 509 | { |
| 510 | ipsec_output_trace_t *tr = |
| 511 | vlib_add_trace (vm, node, b0, sizeof (*tr)); |
| 512 | if (spd0) |
| 513 | tr->spd_id = spd0->id; |
Neale Ranns | a09c1ff | 2019-02-04 01:10:30 -0800 | [diff] [blame] | 514 | tr->policy_id = pi0; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 515 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 516 | } |
| 517 | |
| 518 | vlib_put_frame_to_node (vm, next_node_index, f); |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 519 | vlib_node_increment_counter (vm, node->node_index, |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 520 | IPSEC_OUTPUT_ERROR_POLICY_PROTECT, nc_protect); |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 521 | vlib_node_increment_counter (vm, node->node_index, |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 522 | IPSEC_OUTPUT_ERROR_POLICY_BYPASS, nc_bypass); |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 523 | vlib_node_increment_counter (vm, node->node_index, |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 524 | IPSEC_OUTPUT_ERROR_POLICY_DISCARD, nc_discard); |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 525 | vlib_node_increment_counter (vm, node->node_index, |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 526 | IPSEC_OUTPUT_ERROR_POLICY_NO_MATCH, |
| 527 | nc_nomatch); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 528 | return from_frame->n_vectors; |
| 529 | } |
| 530 | |
Klement Sekera | b8f3544 | 2018-10-29 13:38:19 +0100 | [diff] [blame] | 531 | VLIB_NODE_FN (ipsec4_output_node) (vlib_main_t * vm, |
| 532 | vlib_node_runtime_t * node, |
| 533 | vlib_frame_t * frame) |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 534 | { |
| 535 | return ipsec_output_inline (vm, node, frame, 0); |
| 536 | } |
| 537 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 538 | /* *INDENT-OFF* */ |
Klement Sekera | b8f3544 | 2018-10-29 13:38:19 +0100 | [diff] [blame] | 539 | VLIB_REGISTER_NODE (ipsec4_output_node) = { |
Pierre Pfister | 057b356 | 2018-12-10 17:01:01 +0100 | [diff] [blame] | 540 | .name = "ipsec4-output-feature", |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 541 | .vector_size = sizeof (u32), |
| 542 | .format_trace = format_ipsec_output_trace, |
| 543 | .type = VLIB_NODE_TYPE_INTERNAL, |
| 544 | |
| 545 | .n_errors = ARRAY_LEN(ipsec_output_error_strings), |
| 546 | .error_strings = ipsec_output_error_strings, |
| 547 | |
| 548 | .n_next_nodes = IPSEC_OUTPUT_N_NEXT, |
| 549 | .next_nodes = { |
| 550 | #define _(s,n) [IPSEC_OUTPUT_NEXT_##s] = n, |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 551 | foreach_ipsec_output_next |
| 552 | #undef _ |
| 553 | }, |
| 554 | }; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 555 | /* *INDENT-ON* */ |
Damjan Marion | e936bbe | 2016-02-25 23:17:38 +0100 | [diff] [blame] | 556 | |
Klement Sekera | b8f3544 | 2018-10-29 13:38:19 +0100 | [diff] [blame] | 557 | VLIB_NODE_FN (ipsec6_output_node) (vlib_main_t * vm, |
| 558 | vlib_node_runtime_t * node, |
| 559 | vlib_frame_t * frame) |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 560 | { |
| 561 | return ipsec_output_inline (vm, node, frame, 1); |
| 562 | } |
| 563 | |
Klement Sekera | b8f3544 | 2018-10-29 13:38:19 +0100 | [diff] [blame] | 564 | VLIB_REGISTER_NODE (ipsec6_output_node) = { |
Pierre Pfister | 057b356 | 2018-12-10 17:01:01 +0100 | [diff] [blame] | 565 | .name = "ipsec6-output-feature", |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 566 | .vector_size = sizeof (u32), |
| 567 | .format_trace = format_ipsec_output_trace, |
| 568 | .type = VLIB_NODE_TYPE_INTERNAL, |
| 569 | |
| 570 | .n_errors = ARRAY_LEN(ipsec_output_error_strings), |
| 571 | .error_strings = ipsec_output_error_strings, |
| 572 | |
| 573 | .n_next_nodes = IPSEC_OUTPUT_N_NEXT, |
| 574 | .next_nodes = { |
| 575 | #define _(s,n) [IPSEC_OUTPUT_NEXT_##s] = n, |
| 576 | foreach_ipsec_output_next |
| 577 | #undef _ |
| 578 | }, |
| 579 | }; |
Matus Fabian | 08a6f01 | 2016-11-15 06:08:51 -0800 | [diff] [blame] | 580 | |