Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015 Cisco and/or its affiliates. |
| 3 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | * you may not use this file except in compliance with the License. |
| 5 | * You may obtain a copy of the License at: |
| 6 | * |
| 7 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | * |
| 9 | * Unless required by applicable law or agreed to in writing, software |
| 10 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | * See the License for the specific language governing permissions and |
| 13 | * limitations under the License. |
| 14 | */ |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 15 | #ifndef __ESP_H__ |
| 16 | #define __ESP_H__ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 17 | |
Sergio Gonzalez Monroy | db93cd9 | 2017-08-26 15:22:05 +0100 | [diff] [blame] | 18 | #include <vnet/ip/ip.h> |
| 19 | #include <vnet/ipsec/ipsec.h> |
| 20 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 21 | #include <openssl/hmac.h> |
| 22 | #include <openssl/rand.h> |
| 23 | #include <openssl/evp.h> |
| 24 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 25 | typedef struct |
| 26 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 27 | u32 spi; |
| 28 | u32 seq; |
| 29 | u8 data[0]; |
| 30 | } esp_header_t; |
| 31 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 32 | typedef struct |
| 33 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 34 | u8 pad_length; |
| 35 | u8 next_header; |
| 36 | } esp_footer_t; |
| 37 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 38 | /* *INDENT-OFF* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 39 | typedef CLIB_PACKED (struct { |
| 40 | ip4_header_t ip4; |
| 41 | esp_header_t esp; |
| 42 | }) ip4_and_esp_header_t; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 43 | /* *INDENT-ON* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 44 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 45 | /* *INDENT-OFF* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 46 | typedef CLIB_PACKED (struct { |
Klement Sekera | 4b089f2 | 2018-04-17 18:04:57 +0200 | [diff] [blame] | 47 | ip4_header_t ip4; |
| 48 | udp_header_t udp; |
| 49 | esp_header_t esp; |
| 50 | }) ip4_and_udp_and_esp_header_t; |
| 51 | /* *INDENT-ON* */ |
| 52 | |
| 53 | /* *INDENT-OFF* */ |
| 54 | typedef CLIB_PACKED (struct { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 55 | ip6_header_t ip6; |
| 56 | esp_header_t esp; |
| 57 | }) ip6_and_esp_header_t; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 58 | /* *INDENT-ON* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 59 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 60 | typedef struct |
| 61 | { |
| 62 | const EVP_CIPHER *type; |
“mukeshyadav1984” | 72454dd | 2017-11-28 10:52:34 -0800 | [diff] [blame] | 63 | u8 iv_size; |
| 64 | u8 block_size; |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 65 | } ipsec_proto_main_crypto_alg_t; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 66 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 67 | typedef struct |
| 68 | { |
| 69 | const EVP_MD *md; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 70 | u8 trunc_size; |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 71 | } ipsec_proto_main_integ_alg_t; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 72 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 73 | typedef struct |
| 74 | { |
| 75 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 76 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 77 | EVP_CIPHER_CTX *encrypt_ctx; |
| 78 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 79 | EVP_CIPHER_CTX encrypt_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 80 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 81 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline1); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 82 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 83 | EVP_CIPHER_CTX *decrypt_ctx; |
| 84 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 85 | EVP_CIPHER_CTX decrypt_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 86 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 87 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline2); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 88 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 89 | HMAC_CTX *hmac_ctx; |
| 90 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 91 | HMAC_CTX hmac_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 92 | #endif |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 93 | ipsec_crypto_alg_t last_encrypt_alg; |
| 94 | ipsec_crypto_alg_t last_decrypt_alg; |
| 95 | ipsec_integ_alg_t last_integ_alg; |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 96 | } ipsec_proto_main_per_thread_data_t; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 97 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 98 | typedef struct |
| 99 | { |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 100 | ipsec_proto_main_crypto_alg_t *ipsec_proto_main_crypto_algs; |
| 101 | ipsec_proto_main_integ_alg_t *ipsec_proto_main_integ_algs; |
| 102 | ipsec_proto_main_per_thread_data_t *per_thread_data; |
| 103 | } ipsec_proto_main_t; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 104 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 105 | extern ipsec_proto_main_t ipsec_proto_main; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 106 | |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 107 | #define ESP_WINDOW_SIZE (64) |
| 108 | #define ESP_SEQ_MAX (4294967295UL) |
| 109 | |
Sergio Gonzalez Monroy | db93cd9 | 2017-08-26 15:22:05 +0100 | [diff] [blame] | 110 | u8 *format_esp_header (u8 * s, va_list * args); |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 111 | |
| 112 | always_inline int |
| 113 | esp_replay_check (ipsec_sa_t * sa, u32 seq) |
| 114 | { |
| 115 | u32 diff; |
| 116 | |
| 117 | if (PREDICT_TRUE (seq > sa->last_seq)) |
| 118 | return 0; |
| 119 | |
| 120 | diff = sa->last_seq - seq; |
| 121 | |
| 122 | if (ESP_WINDOW_SIZE > diff) |
| 123 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 124 | else |
| 125 | return 1; |
| 126 | |
| 127 | return 0; |
| 128 | } |
| 129 | |
| 130 | always_inline int |
| 131 | esp_replay_check_esn (ipsec_sa_t * sa, u32 seq) |
| 132 | { |
| 133 | u32 tl = sa->last_seq; |
| 134 | u32 th = sa->last_seq_hi; |
| 135 | u32 diff = tl - seq; |
| 136 | |
| 137 | if (PREDICT_TRUE (tl >= (ESP_WINDOW_SIZE - 1))) |
| 138 | { |
| 139 | if (seq >= (tl - ESP_WINDOW_SIZE + 1)) |
| 140 | { |
| 141 | sa->seq_hi = th; |
| 142 | if (seq <= tl) |
| 143 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 144 | else |
| 145 | return 0; |
| 146 | } |
| 147 | else |
| 148 | { |
| 149 | sa->seq_hi = th + 1; |
| 150 | return 0; |
| 151 | } |
| 152 | } |
| 153 | else |
| 154 | { |
| 155 | if (seq >= (tl - ESP_WINDOW_SIZE + 1)) |
| 156 | { |
| 157 | sa->seq_hi = th - 1; |
| 158 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 159 | } |
| 160 | else |
| 161 | { |
| 162 | sa->seq_hi = th; |
| 163 | if (seq <= tl) |
| 164 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 165 | else |
| 166 | return 0; |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | return 0; |
| 171 | } |
| 172 | |
| 173 | /* TODO seq increment should be atomic to be accessed by multiple workers */ |
| 174 | always_inline void |
| 175 | esp_replay_advance (ipsec_sa_t * sa, u32 seq) |
| 176 | { |
| 177 | u32 pos; |
| 178 | |
| 179 | if (seq > sa->last_seq) |
| 180 | { |
| 181 | pos = seq - sa->last_seq; |
| 182 | if (pos < ESP_WINDOW_SIZE) |
| 183 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 184 | else |
| 185 | sa->replay_window = 1; |
| 186 | sa->last_seq = seq; |
| 187 | } |
| 188 | else |
| 189 | { |
| 190 | pos = sa->last_seq - seq; |
| 191 | sa->replay_window |= (1ULL << pos); |
| 192 | } |
| 193 | } |
| 194 | |
| 195 | always_inline void |
| 196 | esp_replay_advance_esn (ipsec_sa_t * sa, u32 seq) |
| 197 | { |
| 198 | int wrap = sa->seq_hi - sa->last_seq_hi; |
| 199 | u32 pos; |
| 200 | |
| 201 | if (wrap == 0 && seq > sa->last_seq) |
| 202 | { |
| 203 | pos = seq - sa->last_seq; |
| 204 | if (pos < ESP_WINDOW_SIZE) |
| 205 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 206 | else |
| 207 | sa->replay_window = 1; |
| 208 | sa->last_seq = seq; |
| 209 | } |
| 210 | else if (wrap > 0) |
| 211 | { |
| 212 | pos = ~seq + sa->last_seq + 1; |
| 213 | if (pos < ESP_WINDOW_SIZE) |
| 214 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 215 | else |
| 216 | sa->replay_window = 1; |
| 217 | sa->last_seq = seq; |
| 218 | sa->last_seq_hi = sa->seq_hi; |
| 219 | } |
| 220 | else if (wrap < 0) |
| 221 | { |
| 222 | pos = ~seq + sa->last_seq + 1; |
| 223 | sa->replay_window |= (1ULL << pos); |
| 224 | } |
| 225 | else |
| 226 | { |
| 227 | pos = sa->last_seq - seq; |
| 228 | sa->replay_window |= (1ULL << pos); |
| 229 | } |
| 230 | } |
| 231 | |
| 232 | always_inline int |
| 233 | esp_seq_advance (ipsec_sa_t * sa) |
| 234 | { |
| 235 | if (PREDICT_TRUE (sa->use_esn)) |
| 236 | { |
| 237 | if (PREDICT_FALSE (sa->seq == ESP_SEQ_MAX)) |
| 238 | { |
| 239 | if (PREDICT_FALSE |
| 240 | (sa->use_anti_replay && sa->seq_hi == ESP_SEQ_MAX)) |
| 241 | return 1; |
| 242 | sa->seq_hi++; |
| 243 | } |
| 244 | sa->seq++; |
| 245 | } |
| 246 | else |
| 247 | { |
| 248 | if (PREDICT_FALSE (sa->use_anti_replay && sa->seq == ESP_SEQ_MAX)) |
| 249 | return 1; |
| 250 | sa->seq++; |
| 251 | } |
| 252 | |
| 253 | return 0; |
| 254 | } |
| 255 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 256 | always_inline void |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 257 | ipsec_proto_init () |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 258 | { |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 259 | ipsec_proto_main_t *em = &ipsec_proto_main; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 260 | vlib_thread_main_t *tm = vlib_get_thread_main (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 261 | |
| 262 | memset (em, 0, sizeof (em[0])); |
| 263 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 264 | vec_validate (em->ipsec_proto_main_crypto_algs, IPSEC_CRYPTO_N_ALG - 1); |
| 265 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128].type = |
| 266 | EVP_aes_128_cbc (); |
| 267 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192].type = |
| 268 | EVP_aes_192_cbc (); |
| 269 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256].type = |
| 270 | EVP_aes_256_cbc (); |
“mukeshyadav1984” | 72454dd | 2017-11-28 10:52:34 -0800 | [diff] [blame] | 271 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128].iv_size = 16; |
| 272 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192].iv_size = 16; |
| 273 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256].iv_size = 16; |
| 274 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128].block_size = |
| 275 | 16; |
| 276 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192].block_size = |
| 277 | 16; |
| 278 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256].block_size = |
| 279 | 16; |
| 280 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_DES_CBC].type = |
| 281 | EVP_des_cbc (); |
| 282 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_3DES_CBC].type = |
| 283 | EVP_des_ede3_cbc (); |
| 284 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_DES_CBC].block_size = 8; |
| 285 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_3DES_CBC].block_size = 8; |
| 286 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_DES_CBC].iv_size = 8; |
| 287 | em->ipsec_proto_main_crypto_algs[IPSEC_CRYPTO_ALG_3DES_CBC].iv_size = 8; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 288 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 289 | vec_validate (em->ipsec_proto_main_integ_algs, IPSEC_INTEG_N_ALG - 1); |
| 290 | ipsec_proto_main_integ_alg_t *i; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 291 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 292 | i = &em->ipsec_proto_main_integ_algs[IPSEC_INTEG_ALG_SHA1_96]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 293 | i->md = EVP_sha1 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 294 | i->trunc_size = 12; |
| 295 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 296 | i = &em->ipsec_proto_main_integ_algs[IPSEC_INTEG_ALG_SHA_256_96]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 297 | i->md = EVP_sha256 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 298 | i->trunc_size = 12; |
| 299 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 300 | i = &em->ipsec_proto_main_integ_algs[IPSEC_INTEG_ALG_SHA_256_128]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 301 | i->md = EVP_sha256 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 302 | i->trunc_size = 16; |
| 303 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 304 | i = &em->ipsec_proto_main_integ_algs[IPSEC_INTEG_ALG_SHA_384_192]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 305 | i->md = EVP_sha384 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 306 | i->trunc_size = 24; |
| 307 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 308 | i = &em->ipsec_proto_main_integ_algs[IPSEC_INTEG_ALG_SHA_512_256]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 309 | i->md = EVP_sha512 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 310 | i->trunc_size = 32; |
| 311 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 312 | vec_validate_aligned (em->per_thread_data, tm->n_vlib_mains - 1, |
| 313 | CLIB_CACHE_LINE_BYTES); |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 314 | int thread_id; |
| 315 | |
Marco Varlese | 4631610 | 2018-03-23 13:32:50 +0100 | [diff] [blame] | 316 | for (thread_id = 0; thread_id < tm->n_vlib_mains; thread_id++) |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 317 | { |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 318 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 319 | em->per_thread_data[thread_id].encrypt_ctx = EVP_CIPHER_CTX_new (); |
| 320 | em->per_thread_data[thread_id].decrypt_ctx = EVP_CIPHER_CTX_new (); |
| 321 | em->per_thread_data[thread_id].hmac_ctx = HMAC_CTX_new (); |
| 322 | #else |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 323 | EVP_CIPHER_CTX_init (&(em->per_thread_data[thread_id].encrypt_ctx)); |
| 324 | EVP_CIPHER_CTX_init (&(em->per_thread_data[thread_id].decrypt_ctx)); |
| 325 | HMAC_CTX_init (&(em->per_thread_data[thread_id].hmac_ctx)); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 326 | #endif |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 327 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 328 | } |
| 329 | |
| 330 | always_inline unsigned int |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 331 | hmac_calc (ipsec_integ_alg_t alg, |
| 332 | u8 * key, |
| 333 | int key_len, |
| 334 | u8 * data, int data_len, u8 * signature, u8 use_esn, u32 seq_hi) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 335 | { |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 336 | ipsec_proto_main_t *em = &ipsec_proto_main; |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 337 | u32 thread_index = vlib_get_thread_index (); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 338 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 339 | HMAC_CTX *ctx = em->per_thread_data[thread_index].hmac_ctx; |
| 340 | #else |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 341 | HMAC_CTX *ctx = &(em->per_thread_data[thread_index].hmac_ctx); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 342 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 343 | const EVP_MD *md = NULL; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 344 | unsigned int len; |
| 345 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 346 | ASSERT (alg < IPSEC_INTEG_N_ALG); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 347 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 348 | if (PREDICT_FALSE (em->ipsec_proto_main_integ_algs[alg].md == 0)) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 349 | return 0; |
| 350 | |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 351 | if (PREDICT_FALSE (alg != em->per_thread_data[thread_index].last_integ_alg)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 352 | { |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 353 | md = em->ipsec_proto_main_integ_algs[alg].md; |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 354 | em->per_thread_data[thread_index].last_integ_alg = alg; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 355 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 356 | |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 357 | HMAC_Init_ex (ctx, key, key_len, md, NULL); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 358 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 359 | HMAC_Update (ctx, data, data_len); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 360 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 361 | if (PREDICT_TRUE (use_esn)) |
| 362 | HMAC_Update (ctx, (u8 *) & seq_hi, sizeof (seq_hi)); |
| 363 | HMAC_Final (ctx, signature, &len); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 364 | |
“mukeshyadav1984” | 430ac93 | 2017-11-23 02:39:33 -0800 | [diff] [blame] | 365 | return em->ipsec_proto_main_integ_algs[alg].trunc_size; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 366 | } |
| 367 | |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 368 | #endif /* __ESP_H__ */ |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 369 | |
| 370 | /* |
| 371 | * fd.io coding-style-patch-verification: ON |
| 372 | * |
| 373 | * Local Variables: |
| 374 | * eval: (c-set-style "gnu") |
| 375 | * End: |
| 376 | */ |