Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2015 Cisco and/or its affiliates. |
| 3 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | * you may not use this file except in compliance with the License. |
| 5 | * You may obtain a copy of the License at: |
| 6 | * |
| 7 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | * |
| 9 | * Unless required by applicable law or agreed to in writing, software |
| 10 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | * See the License for the specific language governing permissions and |
| 13 | * limitations under the License. |
| 14 | */ |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 15 | #ifndef __ESP_H__ |
| 16 | #define __ESP_H__ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 17 | |
Sergio Gonzalez Monroy | db93cd9 | 2017-08-26 15:22:05 +0100 | [diff] [blame] | 18 | #include <vnet/ip/ip.h> |
| 19 | #include <vnet/ipsec/ipsec.h> |
| 20 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 21 | #include <openssl/hmac.h> |
| 22 | #include <openssl/rand.h> |
| 23 | #include <openssl/evp.h> |
| 24 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 25 | typedef struct |
| 26 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 27 | u32 spi; |
| 28 | u32 seq; |
| 29 | u8 data[0]; |
| 30 | } esp_header_t; |
| 31 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 32 | typedef struct |
| 33 | { |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 34 | u8 pad_length; |
| 35 | u8 next_header; |
| 36 | } esp_footer_t; |
| 37 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 38 | /* *INDENT-OFF* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 39 | typedef CLIB_PACKED (struct { |
| 40 | ip4_header_t ip4; |
| 41 | esp_header_t esp; |
| 42 | }) ip4_and_esp_header_t; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 43 | /* *INDENT-ON* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 44 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 45 | /* *INDENT-OFF* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 46 | typedef CLIB_PACKED (struct { |
| 47 | ip6_header_t ip6; |
| 48 | esp_header_t esp; |
| 49 | }) ip6_and_esp_header_t; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 50 | /* *INDENT-ON* */ |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 51 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 52 | typedef struct |
| 53 | { |
| 54 | const EVP_CIPHER *type; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 55 | } esp_crypto_alg_t; |
| 56 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 57 | typedef struct |
| 58 | { |
| 59 | const EVP_MD *md; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 60 | u8 trunc_size; |
| 61 | } esp_integ_alg_t; |
| 62 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 63 | typedef struct |
| 64 | { |
| 65 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline0); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 66 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 67 | EVP_CIPHER_CTX *encrypt_ctx; |
| 68 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 69 | EVP_CIPHER_CTX encrypt_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 70 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 71 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline1); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 72 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 73 | EVP_CIPHER_CTX *decrypt_ctx; |
| 74 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 75 | EVP_CIPHER_CTX decrypt_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 76 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 77 | CLIB_CACHE_LINE_ALIGN_MARK (cacheline2); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 78 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 79 | HMAC_CTX *hmac_ctx; |
| 80 | #else |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 81 | HMAC_CTX hmac_ctx; |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 82 | #endif |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 83 | ipsec_crypto_alg_t last_encrypt_alg; |
| 84 | ipsec_crypto_alg_t last_decrypt_alg; |
| 85 | ipsec_integ_alg_t last_integ_alg; |
| 86 | } esp_main_per_thread_data_t; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 87 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 88 | typedef struct |
| 89 | { |
| 90 | esp_crypto_alg_t *esp_crypto_algs; |
| 91 | esp_integ_alg_t *esp_integ_algs; |
| 92 | esp_main_per_thread_data_t *per_thread_data; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 93 | } esp_main_t; |
| 94 | |
Dave Wallace | 71612d6 | 2017-10-24 01:32:41 -0400 | [diff] [blame] | 95 | extern esp_main_t esp_main; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 96 | |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 97 | #define ESP_WINDOW_SIZE (64) |
| 98 | #define ESP_SEQ_MAX (4294967295UL) |
| 99 | |
Sergio Gonzalez Monroy | db93cd9 | 2017-08-26 15:22:05 +0100 | [diff] [blame] | 100 | u8 *format_esp_header (u8 * s, va_list * args); |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 101 | |
| 102 | always_inline int |
| 103 | esp_replay_check (ipsec_sa_t * sa, u32 seq) |
| 104 | { |
| 105 | u32 diff; |
| 106 | |
| 107 | if (PREDICT_TRUE (seq > sa->last_seq)) |
| 108 | return 0; |
| 109 | |
| 110 | diff = sa->last_seq - seq; |
| 111 | |
| 112 | if (ESP_WINDOW_SIZE > diff) |
| 113 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 114 | else |
| 115 | return 1; |
| 116 | |
| 117 | return 0; |
| 118 | } |
| 119 | |
| 120 | always_inline int |
| 121 | esp_replay_check_esn (ipsec_sa_t * sa, u32 seq) |
| 122 | { |
| 123 | u32 tl = sa->last_seq; |
| 124 | u32 th = sa->last_seq_hi; |
| 125 | u32 diff = tl - seq; |
| 126 | |
| 127 | if (PREDICT_TRUE (tl >= (ESP_WINDOW_SIZE - 1))) |
| 128 | { |
| 129 | if (seq >= (tl - ESP_WINDOW_SIZE + 1)) |
| 130 | { |
| 131 | sa->seq_hi = th; |
| 132 | if (seq <= tl) |
| 133 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 134 | else |
| 135 | return 0; |
| 136 | } |
| 137 | else |
| 138 | { |
| 139 | sa->seq_hi = th + 1; |
| 140 | return 0; |
| 141 | } |
| 142 | } |
| 143 | else |
| 144 | { |
| 145 | if (seq >= (tl - ESP_WINDOW_SIZE + 1)) |
| 146 | { |
| 147 | sa->seq_hi = th - 1; |
| 148 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 149 | } |
| 150 | else |
| 151 | { |
| 152 | sa->seq_hi = th; |
| 153 | if (seq <= tl) |
| 154 | return (sa->replay_window & (1ULL << diff)) ? 1 : 0; |
| 155 | else |
| 156 | return 0; |
| 157 | } |
| 158 | } |
| 159 | |
| 160 | return 0; |
| 161 | } |
| 162 | |
| 163 | /* TODO seq increment should be atomic to be accessed by multiple workers */ |
| 164 | always_inline void |
| 165 | esp_replay_advance (ipsec_sa_t * sa, u32 seq) |
| 166 | { |
| 167 | u32 pos; |
| 168 | |
| 169 | if (seq > sa->last_seq) |
| 170 | { |
| 171 | pos = seq - sa->last_seq; |
| 172 | if (pos < ESP_WINDOW_SIZE) |
| 173 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 174 | else |
| 175 | sa->replay_window = 1; |
| 176 | sa->last_seq = seq; |
| 177 | } |
| 178 | else |
| 179 | { |
| 180 | pos = sa->last_seq - seq; |
| 181 | sa->replay_window |= (1ULL << pos); |
| 182 | } |
| 183 | } |
| 184 | |
| 185 | always_inline void |
| 186 | esp_replay_advance_esn (ipsec_sa_t * sa, u32 seq) |
| 187 | { |
| 188 | int wrap = sa->seq_hi - sa->last_seq_hi; |
| 189 | u32 pos; |
| 190 | |
| 191 | if (wrap == 0 && seq > sa->last_seq) |
| 192 | { |
| 193 | pos = seq - sa->last_seq; |
| 194 | if (pos < ESP_WINDOW_SIZE) |
| 195 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 196 | else |
| 197 | sa->replay_window = 1; |
| 198 | sa->last_seq = seq; |
| 199 | } |
| 200 | else if (wrap > 0) |
| 201 | { |
| 202 | pos = ~seq + sa->last_seq + 1; |
| 203 | if (pos < ESP_WINDOW_SIZE) |
| 204 | sa->replay_window = ((sa->replay_window) << pos) | 1; |
| 205 | else |
| 206 | sa->replay_window = 1; |
| 207 | sa->last_seq = seq; |
| 208 | sa->last_seq_hi = sa->seq_hi; |
| 209 | } |
| 210 | else if (wrap < 0) |
| 211 | { |
| 212 | pos = ~seq + sa->last_seq + 1; |
| 213 | sa->replay_window |= (1ULL << pos); |
| 214 | } |
| 215 | else |
| 216 | { |
| 217 | pos = sa->last_seq - seq; |
| 218 | sa->replay_window |= (1ULL << pos); |
| 219 | } |
| 220 | } |
| 221 | |
| 222 | always_inline int |
| 223 | esp_seq_advance (ipsec_sa_t * sa) |
| 224 | { |
| 225 | if (PREDICT_TRUE (sa->use_esn)) |
| 226 | { |
| 227 | if (PREDICT_FALSE (sa->seq == ESP_SEQ_MAX)) |
| 228 | { |
| 229 | if (PREDICT_FALSE |
| 230 | (sa->use_anti_replay && sa->seq_hi == ESP_SEQ_MAX)) |
| 231 | return 1; |
| 232 | sa->seq_hi++; |
| 233 | } |
| 234 | sa->seq++; |
| 235 | } |
| 236 | else |
| 237 | { |
| 238 | if (PREDICT_FALSE (sa->use_anti_replay && sa->seq == ESP_SEQ_MAX)) |
| 239 | return 1; |
| 240 | sa->seq++; |
| 241 | } |
| 242 | |
| 243 | return 0; |
| 244 | } |
| 245 | |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 246 | always_inline void |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 247 | esp_init () |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 248 | { |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 249 | esp_main_t *em = &esp_main; |
| 250 | vlib_thread_main_t *tm = vlib_get_thread_main (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 251 | |
| 252 | memset (em, 0, sizeof (em[0])); |
| 253 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 254 | vec_validate (em->esp_crypto_algs, IPSEC_CRYPTO_N_ALG - 1); |
| 255 | em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_128].type = EVP_aes_128_cbc (); |
| 256 | em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_192].type = EVP_aes_192_cbc (); |
| 257 | em->esp_crypto_algs[IPSEC_CRYPTO_ALG_AES_CBC_256].type = EVP_aes_256_cbc (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 258 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 259 | vec_validate (em->esp_integ_algs, IPSEC_INTEG_N_ALG - 1); |
| 260 | esp_integ_alg_t *i; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 261 | |
| 262 | i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA1_96]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 263 | i->md = EVP_sha1 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 264 | i->trunc_size = 12; |
| 265 | |
| 266 | i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_96]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 267 | i->md = EVP_sha256 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 268 | i->trunc_size = 12; |
| 269 | |
| 270 | i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_256_128]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 271 | i->md = EVP_sha256 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 272 | i->trunc_size = 16; |
| 273 | |
| 274 | i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_384_192]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 275 | i->md = EVP_sha384 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 276 | i->trunc_size = 24; |
| 277 | |
| 278 | i = &em->esp_integ_algs[IPSEC_INTEG_ALG_SHA_512_256]; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 279 | i->md = EVP_sha512 (); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 280 | i->trunc_size = 32; |
| 281 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 282 | vec_validate_aligned (em->per_thread_data, tm->n_vlib_mains - 1, |
| 283 | CLIB_CACHE_LINE_BYTES); |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 284 | int thread_id; |
| 285 | |
| 286 | for (thread_id = 0; thread_id < tm->n_vlib_mains - 1; thread_id++) |
| 287 | { |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 288 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 289 | em->per_thread_data[thread_id].encrypt_ctx = EVP_CIPHER_CTX_new (); |
| 290 | em->per_thread_data[thread_id].decrypt_ctx = EVP_CIPHER_CTX_new (); |
| 291 | em->per_thread_data[thread_id].hmac_ctx = HMAC_CTX_new (); |
| 292 | #else |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 293 | EVP_CIPHER_CTX_init (&(em->per_thread_data[thread_id].encrypt_ctx)); |
| 294 | EVP_CIPHER_CTX_init (&(em->per_thread_data[thread_id].decrypt_ctx)); |
| 295 | HMAC_CTX_init (&(em->per_thread_data[thread_id].hmac_ctx)); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 296 | #endif |
Matthew Smith | 29d8510 | 2016-05-01 14:52:08 -0500 | [diff] [blame] | 297 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 298 | } |
| 299 | |
| 300 | always_inline unsigned int |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 301 | hmac_calc (ipsec_integ_alg_t alg, |
| 302 | u8 * key, |
| 303 | int key_len, |
| 304 | u8 * data, int data_len, u8 * signature, u8 use_esn, u32 seq_hi) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 305 | { |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 306 | esp_main_t *em = &esp_main; |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 307 | u32 thread_index = vlib_get_thread_index (); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 308 | #if OPENSSL_VERSION_NUMBER >= 0x10100000L |
| 309 | HMAC_CTX *ctx = em->per_thread_data[thread_index].hmac_ctx; |
| 310 | #else |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 311 | HMAC_CTX *ctx = &(em->per_thread_data[thread_index].hmac_ctx); |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 312 | #endif |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 313 | const EVP_MD *md = NULL; |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 314 | unsigned int len; |
| 315 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 316 | ASSERT (alg < IPSEC_INTEG_N_ALG); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 317 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 318 | if (PREDICT_FALSE (em->esp_integ_algs[alg].md == 0)) |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 319 | return 0; |
| 320 | |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 321 | if (PREDICT_FALSE (alg != em->per_thread_data[thread_index].last_integ_alg)) |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 322 | { |
| 323 | md = em->esp_integ_algs[alg].md; |
Damjan Marion | 586afd7 | 2017-04-05 19:18:20 +0200 | [diff] [blame] | 324 | em->per_thread_data[thread_index].last_integ_alg = alg; |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 325 | } |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 326 | |
Marco Varlese | f616d10 | 2017-11-09 15:16:20 +0100 | [diff] [blame] | 327 | HMAC_Init_ex (ctx, key, key_len, md, NULL); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 328 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 329 | HMAC_Update (ctx, data, data_len); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 330 | |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 331 | if (PREDICT_TRUE (use_esn)) |
| 332 | HMAC_Update (ctx, (u8 *) & seq_hi, sizeof (seq_hi)); |
| 333 | HMAC_Final (ctx, signature, &len); |
Ed Warnicke | cb9cada | 2015-12-08 15:45:58 -0700 | [diff] [blame] | 334 | |
| 335 | return em->esp_integ_algs[alg].trunc_size; |
| 336 | } |
| 337 | |
Sergio Gonzalez Monroy | a10f62b | 2016-11-25 13:36:12 +0000 | [diff] [blame] | 338 | #endif /* __ESP_H__ */ |
Keith Burns (alagalah) | 166a9d4 | 2016-08-06 11:00:56 -0700 | [diff] [blame] | 339 | |
| 340 | /* |
| 341 | * fd.io coding-style-patch-verification: ON |
| 342 | * |
| 343 | * Local Variables: |
| 344 | * eval: (c-set-style "gnu") |
| 345 | * End: |
| 346 | */ |