Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2016 Cisco and/or its affiliates. |
| 3 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | * you may not use this file except in compliance with the License. |
| 5 | * You may obtain a copy of the License at: |
| 6 | * |
| 7 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | * |
| 9 | * Unless required by applicable law or agreed to in writing, software |
| 10 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | * See the License for the specific language governing permissions and |
| 13 | * limitations under the License. |
| 14 | */ |
| 15 | /*- |
| 16 | * BSD LICENSE |
| 17 | * |
| 18 | * Copyright(c) 2010-2014 Intel Corporation. All rights reserved. |
| 19 | * All rights reserved. |
| 20 | * |
| 21 | * Redistribution and use in source and binary forms, with or without |
| 22 | * modification, are permitted provided that the following conditions |
| 23 | * are met: |
| 24 | * |
| 25 | * * Redistributions of source code must retain the above copyright |
| 26 | * notice, this list of conditions and the following disclaimer. |
| 27 | * * Redistributions in binary form must reproduce the above copyright |
| 28 | * notice, this list of conditions and the following disclaimer in |
| 29 | * the documentation and/or other materials provided with the |
| 30 | * distribution. |
| 31 | * * Neither the name of Intel Corporation nor the names of its |
| 32 | * contributors may be used to endorse or promote products derived |
| 33 | * from this software without specific prior written permission. |
| 34 | * |
| 35 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 36 | * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 37 | * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 38 | * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 39 | * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 40 | * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 41 | * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 42 | * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 43 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 44 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 45 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 46 | */ |
| 47 | |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 48 | #ifndef included_clib_memcpy_avx512_h |
| 49 | #define included_clib_memcpy_avx512_h |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 50 | |
| 51 | #include <stdint.h> |
| 52 | #include <x86intrin.h> |
Benoît Ganne | a66971f | 2020-04-24 10:44:40 +0200 | [diff] [blame] | 53 | #include <vppinfra/warnings.h> |
| 54 | |
| 55 | /* *INDENT-OFF* */ |
| 56 | WARN_OFF (stringop-overflow) |
| 57 | /* *INDENT-ON* */ |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 58 | |
| 59 | static inline void |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 60 | clib_mov16 (u8 * dst, const u8 * src) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 61 | { |
Damjan Marion | 31e59d9 | 2017-07-05 18:15:08 +0200 | [diff] [blame] | 62 | __m128i xmm0; |
| 63 | |
| 64 | xmm0 = _mm_loadu_si128 ((const __m128i *) src); |
| 65 | _mm_storeu_si128 ((__m128i *) dst, xmm0); |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 66 | } |
| 67 | |
| 68 | static inline void |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 69 | clib_mov32 (u8 * dst, const u8 * src) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 70 | { |
Damjan Marion | 31e59d9 | 2017-07-05 18:15:08 +0200 | [diff] [blame] | 71 | __m256i ymm0; |
| 72 | |
| 73 | ymm0 = _mm256_loadu_si256 ((const __m256i *) src); |
| 74 | _mm256_storeu_si256 ((__m256i *) dst, ymm0); |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 75 | } |
| 76 | |
| 77 | static inline void |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 78 | clib_mov64 (u8 * dst, const u8 * src) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 79 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 80 | __m512i zmm0; |
| 81 | |
| 82 | zmm0 = _mm512_loadu_si512 ((const void *) src); |
| 83 | _mm512_storeu_si512 ((void *) dst, zmm0); |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 84 | } |
| 85 | |
| 86 | static inline void |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 87 | clib_mov128 (u8 * dst, const u8 * src) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 88 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 89 | clib_mov64 (dst + 0 * 64, src + 0 * 64); |
| 90 | clib_mov64 (dst + 1 * 64, src + 1 * 64); |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 91 | } |
| 92 | |
| 93 | static inline void |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 94 | clib_mov256 (u8 * dst, const u8 * src) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 95 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 96 | clib_mov128 (dst + 0 * 128, src + 0 * 128); |
| 97 | clib_mov128 (dst + 1 * 128, src + 1 * 128); |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 98 | } |
| 99 | |
| 100 | static inline void |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 101 | clib_mov128blocks (u8 * dst, const u8 * src, size_t n) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 102 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 103 | __m512i zmm0, zmm1; |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 104 | |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 105 | while (n >= 128) |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 106 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 107 | zmm0 = _mm512_loadu_si512 ((const void *) (src + 0 * 64)); |
| 108 | n -= 128; |
| 109 | zmm1 = _mm512_loadu_si512 ((const void *) (src + 1 * 64)); |
| 110 | src = src + 128; |
| 111 | _mm512_storeu_si512 ((void *) (dst + 0 * 64), zmm0); |
| 112 | _mm512_storeu_si512 ((void *) (dst + 1 * 64), zmm1); |
| 113 | dst = dst + 128; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 114 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 115 | } |
| 116 | |
| 117 | static inline void |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 118 | clib_mov512blocks (u8 * dst, const u8 * src, size_t n) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 119 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 120 | __m512i zmm0, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7; |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 121 | |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 122 | while (n >= 512) |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 123 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 124 | zmm0 = _mm512_loadu_si512 ((const void *) (src + 0 * 64)); |
| 125 | n -= 512; |
| 126 | zmm1 = _mm512_loadu_si512 ((const void *) (src + 1 * 64)); |
| 127 | zmm2 = _mm512_loadu_si512 ((const void *) (src + 2 * 64)); |
| 128 | zmm3 = _mm512_loadu_si512 ((const void *) (src + 3 * 64)); |
| 129 | zmm4 = _mm512_loadu_si512 ((const void *) (src + 4 * 64)); |
| 130 | zmm5 = _mm512_loadu_si512 ((const void *) (src + 5 * 64)); |
| 131 | zmm6 = _mm512_loadu_si512 ((const void *) (src + 6 * 64)); |
| 132 | zmm7 = _mm512_loadu_si512 ((const void *) (src + 7 * 64)); |
| 133 | src = src + 512; |
| 134 | _mm512_storeu_si512 ((void *) (dst + 0 * 64), zmm0); |
| 135 | _mm512_storeu_si512 ((void *) (dst + 1 * 64), zmm1); |
| 136 | _mm512_storeu_si512 ((void *) (dst + 2 * 64), zmm2); |
| 137 | _mm512_storeu_si512 ((void *) (dst + 3 * 64), zmm3); |
| 138 | _mm512_storeu_si512 ((void *) (dst + 4 * 64), zmm4); |
| 139 | _mm512_storeu_si512 ((void *) (dst + 5 * 64), zmm5); |
| 140 | _mm512_storeu_si512 ((void *) (dst + 6 * 64), zmm6); |
| 141 | _mm512_storeu_si512 ((void *) (dst + 7 * 64), zmm7); |
| 142 | dst = dst + 512; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 143 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 144 | } |
| 145 | |
| 146 | static inline void * |
Benoît Ganne | 1a3e08a | 2021-02-11 19:46:43 +0100 | [diff] [blame] | 147 | clib_memcpy_fast_avx512 (void *dst, const void *src, size_t n) |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 148 | { |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 149 | uword dstu = (uword) dst; |
| 150 | uword srcu = (uword) src; |
| 151 | void *ret = dst; |
| 152 | size_t dstofss; |
| 153 | size_t bits; |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 154 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 155 | /** |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 156 | * Copy less than 16 bytes |
| 157 | */ |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 158 | if (n < 16) |
| 159 | { |
| 160 | if (n & 0x01) |
| 161 | { |
| 162 | *(u8 *) dstu = *(const u8 *) srcu; |
| 163 | srcu = (uword) ((const u8 *) srcu + 1); |
| 164 | dstu = (uword) ((u8 *) dstu + 1); |
| 165 | } |
| 166 | if (n & 0x02) |
| 167 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 168 | *(u16 *) dstu = *(const u16 *) srcu; |
| 169 | srcu = (uword) ((const u16 *) srcu + 1); |
| 170 | dstu = (uword) ((u16 *) dstu + 1); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 171 | } |
| 172 | if (n & 0x04) |
| 173 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 174 | *(u32 *) dstu = *(const u32 *) srcu; |
| 175 | srcu = (uword) ((const u32 *) srcu + 1); |
| 176 | dstu = (uword) ((u32 *) dstu + 1); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 177 | } |
| 178 | if (n & 0x08) |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 179 | *(u64 *) dstu = *(const u64 *) srcu; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 180 | return ret; |
| 181 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 182 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 183 | /** |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 184 | * Fast way when copy size doesn't exceed 512 bytes |
| 185 | */ |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 186 | if (n <= 32) |
| 187 | { |
| 188 | clib_mov16 ((u8 *) dst, (const u8 *) src); |
| 189 | clib_mov16 ((u8 *) dst - 16 + n, (const u8 *) src - 16 + n); |
| 190 | return ret; |
| 191 | } |
| 192 | if (n <= 64) |
| 193 | { |
| 194 | clib_mov32 ((u8 *) dst, (const u8 *) src); |
| 195 | clib_mov32 ((u8 *) dst - 32 + n, (const u8 *) src - 32 + n); |
| 196 | return ret; |
| 197 | } |
| 198 | if (n <= 512) |
| 199 | { |
| 200 | if (n >= 256) |
| 201 | { |
| 202 | n -= 256; |
| 203 | clib_mov256 ((u8 *) dst, (const u8 *) src); |
| 204 | src = (const u8 *) src + 256; |
| 205 | dst = (u8 *) dst + 256; |
| 206 | } |
| 207 | if (n >= 128) |
| 208 | { |
| 209 | n -= 128; |
| 210 | clib_mov128 ((u8 *) dst, (const u8 *) src); |
| 211 | src = (const u8 *) src + 128; |
| 212 | dst = (u8 *) dst + 128; |
| 213 | } |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 214 | COPY_BLOCK_128_BACK63: |
| 215 | if (n > 64) |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 216 | { |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 217 | clib_mov64 ((u8 *) dst, (const u8 *) src); |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 218 | clib_mov64 ((u8 *) dst - 64 + n, (const u8 *) src - 64 + n); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 219 | return ret; |
| 220 | } |
| 221 | if (n > 0) |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 222 | clib_mov64 ((u8 *) dst - 64 + n, (const u8 *) src - 64 + n); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 223 | return ret; |
| 224 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 225 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 226 | /** |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 227 | * Make store aligned when copy size exceeds 512 bytes |
| 228 | */ |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 229 | dstofss = (uword) dst & 0x3F; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 230 | if (dstofss > 0) |
| 231 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 232 | dstofss = 64 - dstofss; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 233 | n -= dstofss; |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 234 | clib_mov64 ((u8 *) dst, (const u8 *) src); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 235 | src = (const u8 *) src + dstofss; |
| 236 | dst = (u8 *) dst + dstofss; |
| 237 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 238 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 239 | /** |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 240 | * Copy 512-byte blocks. |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 241 | * Use copy block function for better instruction order control, |
| 242 | * which is important when load is unaligned. |
| 243 | */ |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 244 | clib_mov512blocks ((u8 *) dst, (const u8 *) src, n); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 245 | bits = n; |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 246 | n = n & 511; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 247 | bits -= n; |
| 248 | src = (const u8 *) src + bits; |
| 249 | dst = (u8 *) dst + bits; |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 250 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 251 | /** |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 252 | * Copy 128-byte blocks. |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 253 | * Use copy block function for better instruction order control, |
| 254 | * which is important when load is unaligned. |
| 255 | */ |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 256 | if (n >= 128) |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 257 | { |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 258 | clib_mov128blocks ((u8 *) dst, (const u8 *) src, n); |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 259 | bits = n; |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 260 | n = n & 127; |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 261 | bits -= n; |
| 262 | src = (const u8 *) src + bits; |
| 263 | dst = (u8 *) dst + bits; |
| 264 | } |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 265 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 266 | /** |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 267 | * Copy whatever left |
| 268 | */ |
Damjan Marion | fad3fb3 | 2017-12-14 09:30:11 +0100 | [diff] [blame] | 269 | goto COPY_BLOCK_128_BACK63; |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 270 | } |
| 271 | |
Benoît Ganne | a66971f | 2020-04-24 10:44:40 +0200 | [diff] [blame] | 272 | /* *INDENT-OFF* */ |
| 273 | WARN_ON (stringop-overflow) |
| 274 | /* *INDENT-ON* */ |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 275 | |
Paul Vinciguerra | ec11b13 | 2018-09-24 05:25:00 -0700 | [diff] [blame] | 276 | #endif /* included_clib_memcpy_avx512_h */ |
Damjan Marion | f1213b8 | 2016-03-13 02:22:06 +0100 | [diff] [blame] | 277 | |
Dave Barach | c379999 | 2016-08-15 11:12:27 -0400 | [diff] [blame] | 278 | |
| 279 | /* |
| 280 | * fd.io coding-style-patch-verification: ON |
| 281 | * |
| 282 | * Local Variables: |
| 283 | * eval: (c-set-style "gnu") |
| 284 | * End: |
| 285 | */ |