Damjan Marion | da3771c | 2024-01-11 17:37:46 +0000 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: Apache-2.0 |
| 2 | * Copyright(c) 2024 Cisco Systems, Inc. |
| 3 | */ |
| 4 | |
| 5 | #include <vlib/vlib.h> |
| 6 | #include <vnet/plugin/plugin.h> |
| 7 | #include <vnet/crypto/crypto.h> |
| 8 | #include <crypto_native/crypto_native.h> |
| 9 | #include <vppinfra/crypto/aes_ctr.h> |
| 10 | |
| 11 | #if __GNUC__ > 4 && !__clang__ && CLIB_DEBUG == 0 |
| 12 | #pragma GCC optimize("O3") |
| 13 | #endif |
| 14 | |
| 15 | static_always_inline u32 |
| 16 | aes_ops_aes_ctr (vlib_main_t *vm, vnet_crypto_op_t *ops[], u32 n_ops, |
| 17 | vnet_crypto_op_chunk_t *chunks, aes_key_size_t ks, |
| 18 | int maybe_chained) |
| 19 | { |
| 20 | crypto_native_main_t *cm = &crypto_native_main; |
| 21 | vnet_crypto_op_t *op = ops[0]; |
| 22 | aes_ctr_key_data_t *kd; |
| 23 | aes_ctr_ctx_t ctx; |
| 24 | u32 n_left = n_ops; |
| 25 | |
| 26 | next: |
| 27 | kd = (aes_ctr_key_data_t *) cm->key_data[op->key_index]; |
| 28 | |
| 29 | clib_aes_ctr_init (&ctx, kd, op->iv, ks); |
| 30 | if (op->flags & VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS) |
| 31 | { |
| 32 | vnet_crypto_op_chunk_t *chp = chunks + op->chunk_index; |
| 33 | for (int j = 0; j < op->n_chunks; j++, chp++) |
| 34 | clib_aes_ctr_transform (&ctx, chp->src, chp->dst, chp->len, ks); |
| 35 | } |
| 36 | else |
| 37 | clib_aes_ctr_transform (&ctx, op->src, op->dst, op->len, ks); |
| 38 | |
| 39 | op->status = VNET_CRYPTO_OP_STATUS_COMPLETED; |
| 40 | |
| 41 | if (--n_left) |
| 42 | { |
| 43 | op += 1; |
| 44 | goto next; |
| 45 | } |
| 46 | |
| 47 | return n_ops; |
| 48 | } |
| 49 | |
| 50 | static_always_inline void * |
| 51 | aes_ctr_key_exp (vnet_crypto_key_t *key, aes_key_size_t ks) |
| 52 | { |
| 53 | aes_ctr_key_data_t *kd; |
| 54 | |
| 55 | kd = clib_mem_alloc_aligned (sizeof (*kd), CLIB_CACHE_LINE_BYTES); |
| 56 | |
| 57 | clib_aes_ctr_key_expand (kd, key->data, ks); |
| 58 | |
| 59 | return kd; |
| 60 | } |
| 61 | |
| 62 | #define foreach_aes_ctr_handler_type _ (128) _ (192) _ (256) |
| 63 | |
| 64 | #define _(x) \ |
| 65 | static u32 aes_ops_aes_ctr_##x (vlib_main_t *vm, vnet_crypto_op_t *ops[], \ |
| 66 | u32 n_ops) \ |
| 67 | { \ |
| 68 | return aes_ops_aes_ctr (vm, ops, n_ops, 0, AES_KEY_##x, 0); \ |
| 69 | } \ |
| 70 | static u32 aes_ops_aes_ctr_##x##_chained ( \ |
| 71 | vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, \ |
| 72 | u32 n_ops) \ |
| 73 | { \ |
| 74 | return aes_ops_aes_ctr (vm, ops, n_ops, chunks, AES_KEY_##x, 1); \ |
| 75 | } \ |
| 76 | static void *aes_ctr_key_exp_##x (vnet_crypto_key_t *key) \ |
| 77 | { \ |
| 78 | return aes_ctr_key_exp (key, AES_KEY_##x); \ |
| 79 | } |
| 80 | |
| 81 | foreach_aes_ctr_handler_type; |
| 82 | #undef _ |
| 83 | |
| 84 | clib_error_t * |
| 85 | #if defined(__VAES__) && defined(__AVX512F__) |
| 86 | crypto_native_aes_ctr_init_icl (vlib_main_t *vm) |
| 87 | #elif defined(__VAES__) |
| 88 | crypto_native_aes_ctr_init_adl (vlib_main_t *vm) |
| 89 | #elif __AVX512F__ |
| 90 | crypto_native_aes_ctr_init_skx (vlib_main_t *vm) |
| 91 | #elif __AVX2__ |
| 92 | crypto_native_aes_ctr_init_hsw (vlib_main_t *vm) |
| 93 | #elif __aarch64__ |
| 94 | crypto_native_aes_ctr_init_neon (vlib_main_t *vm) |
| 95 | #else |
| 96 | crypto_native_aes_ctr_init_slm (vlib_main_t *vm) |
| 97 | #endif |
| 98 | { |
| 99 | crypto_native_main_t *cm = &crypto_native_main; |
| 100 | |
| 101 | #define _(x) \ |
| 102 | vnet_crypto_register_ops_handlers ( \ |
| 103 | vm, cm->crypto_engine_index, VNET_CRYPTO_OP_AES_##x##_CTR_ENC, \ |
| 104 | aes_ops_aes_ctr_##x, aes_ops_aes_ctr_##x##_chained); \ |
| 105 | vnet_crypto_register_ops_handlers ( \ |
| 106 | vm, cm->crypto_engine_index, VNET_CRYPTO_OP_AES_##x##_CTR_DEC, \ |
| 107 | aes_ops_aes_ctr_##x, aes_ops_aes_ctr_##x##_chained); \ |
| 108 | cm->key_fn[VNET_CRYPTO_ALG_AES_##x##_CTR] = aes_ctr_key_exp_##x; |
| 109 | foreach_aes_ctr_handler_type; |
| 110 | #undef _ |
| 111 | return 0; |
| 112 | } |