blob: 40628015501a1366b740230384abe92877fd0657 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15#ifndef __included_vnet_classify_h__
16#define __included_vnet_classify_h__
17
18#include <stdarg.h>
19
20#include <vlib/vlib.h>
21#include <vnet/vnet.h>
22#include <vnet/pg/pg.h>
23#include <vnet/ethernet/ethernet.h>
24#include <vnet/ethernet/packet.h>
25#include <vnet/ip/ip_packet.h>
26#include <vnet/ip/ip4_packet.h>
27#include <vnet/ip/ip6_packet.h>
28#include <vlib/cli.h>
29#include <vnet/l2/l2_input.h>
30#include <vnet/l2/feat_bitmap.h>
khemendra kumard7bfa0e2017-11-27 15:15:53 +053031#include <vnet/api_errno.h> /* for API error numbers */
Ed Warnickecb9cada2015-12-08 15:45:58 -070032
33#include <vppinfra/error.h>
34#include <vppinfra/hash.h>
35#include <vppinfra/cache.h>
36#include <vppinfra/xxhash.h>
37
Jean-Mickael Guerin8941ec22016-03-04 14:14:21 +010038extern vlib_node_registration_t ip4_classify_node;
39extern vlib_node_registration_t ip6_classify_node;
Ed Warnickecb9cada2015-12-08 15:45:58 -070040
41#define CLASSIFY_TRACE 0
42
Christophe Fontainefef15b42016-04-09 12:38:49 +090043#if !defined( __aarch64__) && !defined(__arm__)
khemendra kumard7bfa0e2017-11-27 15:15:53 +053044#define CLASSIFY_USE_SSE //Allow usage of SSE operations
Pierre Pfistercb656302016-03-16 09:14:28 +000045#endif
46
Christophe Fontainefef15b42016-04-09 12:38:49 +090047#define U32X4_ALIGNED(p) PREDICT_TRUE((((intptr_t)p) & 0xf) == 0)
Pierre Pfistercb656302016-03-16 09:14:28 +000048
Steve Shin25e26dc2016-11-08 10:47:10 -080049/*
50 * Classify table option to process packets
51 * CLASSIFY_FLAG_USE_CURR_DATA:
52 * - classify packets starting from VPP node’s current data pointer
53 */
54#define CLASSIFY_FLAG_USE_CURR_DATA 1
55
56/*
57 * Classify session action
58 * CLASSIFY_ACTION_SET_IP4_FIB_INDEX:
59 * - Classified IP packets will be looked up
60 * from the specified ipv4 fib table
61 * CLASSIFY_ACTION_SET_IP6_FIB_INDEX:
62 * - Classified IP packets will be looked up
63 * from the specified ipv6 fib table
64 */
Neale Ranns13eaf3e2017-05-23 06:10:33 -070065typedef enum vnet_classify_action_t_
66{
67 CLASSIFY_ACTION_SET_IP4_FIB_INDEX = 1,
68 CLASSIFY_ACTION_SET_IP6_FIB_INDEX = 2,
Dave Barach630a8e22017-11-18 06:58:34 -050069 CLASSIFY_ACTION_SET_METADATA = 3,
Neale Ranns13eaf3e2017-05-23 06:10:33 -070070} __attribute__ ((packed)) vnet_classify_action_t;
Steve Shin25e26dc2016-11-08 10:47:10 -080071
Ed Warnickecb9cada2015-12-08 15:45:58 -070072struct _vnet_classify_main;
73typedef struct _vnet_classify_main vnet_classify_main_t;
74
75#define foreach_size_in_u32x4 \
76_(1) \
77_(2) \
78_(3) \
79_(4) \
80_(5)
81
khemendra kumard7bfa0e2017-11-27 15:15:53 +053082/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -070083typedef CLIB_PACKED(struct _vnet_classify_entry {
84 /* Graph node next index */
85 u32 next_index;
86
87 /* put into vnet_buffer(b)->l2_classfy.opaque_index */
88 union {
89 struct {
90 u32 opaque_index;
91 /* advance on hit, note it's a signed quantity... */
92 i32 advance;
93 };
94 u64 opaque_count;
95 };
96
97 /* Really only need 1 bit */
Steve Shin25e26dc2016-11-08 10:47:10 -080098 u8 flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -070099#define VNET_CLASSIFY_ENTRY_FREE (1<<0)
100
Neale Ranns13eaf3e2017-05-23 06:10:33 -0700101 vnet_classify_action_t action;
Steve Shin25e26dc2016-11-08 10:47:10 -0800102 u16 metadata;
103
Ed Warnickecb9cada2015-12-08 15:45:58 -0700104 /* Hit counter, last heard time */
105 union {
106 u64 hits;
107 struct _vnet_classify_entry * next_free;
108 };
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530109
Ed Warnickecb9cada2015-12-08 15:45:58 -0700110 f64 last_heard;
111
112 /* Must be aligned to a 16-octet boundary */
113 u32x4 key[0];
114}) vnet_classify_entry_t;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530115/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700116
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530117static inline int
118vnet_classify_entry_is_free (vnet_classify_entry_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700119{
120 return e->flags & VNET_CLASSIFY_ENTRY_FREE;
121}
122
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530123static inline int
124vnet_classify_entry_is_busy (vnet_classify_entry_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700125{
126 return ((e->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
127}
128
129/* Need these to con the vector allocator */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530130/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700131#define _(size) \
132typedef CLIB_PACKED(struct { \
133 u32 pad0[4]; \
134 u64 pad1[2]; \
135 u32x4 key[size]; \
136}) vnet_classify_entry_##size##_t;
137foreach_size_in_u32x4;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530138/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700139#undef _
140
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530141typedef struct
142{
143 union
144 {
145 struct
146 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700147 u32 offset;
Dave Barachcada2a02017-05-18 19:16:47 -0400148 u8 linear_search;
149 u8 pad[2];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700150 u8 log2_pages;
151 };
152 u64 as_u64;
153 };
154} vnet_classify_bucket_t;
155
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530156typedef struct
157{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700158 /* Mask to apply after skipping N vectors */
159 u32x4 *mask;
160 /* Buckets and entries */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530161 vnet_classify_bucket_t *buckets;
162 vnet_classify_entry_t *entries;
163
Ed Warnickecb9cada2015-12-08 15:45:58 -0700164 /* Config parameters */
165 u32 match_n_vectors;
166 u32 skip_n_vectors;
167 u32 nbuckets;
168 u32 log2_nbuckets;
Dave Barachcada2a02017-05-18 19:16:47 -0400169 u32 linear_buckets;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700170 int entries_per_page;
171 u32 active_elements;
Steve Shin25e26dc2016-11-08 10:47:10 -0800172 u32 current_data_flag;
173 int current_data_offset;
174 u32 data_offset;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700175 /* Index of next table to try */
176 u32 next_table_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530177
Ed Warnickecb9cada2015-12-08 15:45:58 -0700178 /* Miss next index, return if next_table_index = 0 */
179 u32 miss_next_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530180
Ed Warnickecb9cada2015-12-08 15:45:58 -0700181 /* Per-bucket working copies, one per thread */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530182 vnet_classify_entry_t **working_copies;
Dave Barachcada2a02017-05-18 19:16:47 -0400183 int *working_copy_lengths;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700184 vnet_classify_bucket_t saved_bucket;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530185
Ed Warnickecb9cada2015-12-08 15:45:58 -0700186 /* Free entry freelists */
187 vnet_classify_entry_t **freelists;
188
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530189 u8 *name;
190
Ed Warnickecb9cada2015-12-08 15:45:58 -0700191 /* Private allocation arena, protected by the writer lock */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530192 void *mheap;
193
Ed Warnickecb9cada2015-12-08 15:45:58 -0700194 /* Writer (only) lock for this table */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530195 volatile u32 *writer_lock;
196
Ed Warnickecb9cada2015-12-08 15:45:58 -0700197} vnet_classify_table_t;
198
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530199struct _vnet_classify_main
200{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700201 /* Table pool */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530202 vnet_classify_table_t *tables;
203
Dave Barachf39ff742016-03-20 10:14:45 -0400204 /* Registered next-index, opaque unformat fcns */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530205 unformat_function_t **unformat_l2_next_index_fns;
206 unformat_function_t **unformat_ip_next_index_fns;
207 unformat_function_t **unformat_acl_next_index_fns;
208 unformat_function_t **unformat_policer_next_index_fns;
209 unformat_function_t **unformat_opaque_index_fns;
Dave Barachf39ff742016-03-20 10:14:45 -0400210
Ed Warnickecb9cada2015-12-08 15:45:58 -0700211 /* convenience variables */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530212 vlib_main_t *vlib_main;
213 vnet_main_t *vnet_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700214};
215
Dave Barachf39ff742016-03-20 10:14:45 -0400216extern vnet_classify_main_t vnet_classify_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700217
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530218u8 *format_classify_table (u8 * s, va_list * args);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700219
220u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h);
221
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530222static inline u64
223vnet_classify_hash_packet_inline (vnet_classify_table_t * t, u8 * h)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700224{
Pierre Pfistercb656302016-03-16 09:14:28 +0000225 u32x4 *mask;
226
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530227 union
228 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700229 u32x4 as_u32x4;
230 u64 as_u64[2];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530231 } xor_sum __attribute__ ((aligned (sizeof (u32x4))));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700232
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530233 ASSERT (t);
Pierre Pfistercb656302016-03-16 09:14:28 +0000234 mask = t->mask;
235#ifdef CLASSIFY_USE_SSE
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530236 if (U32X4_ALIGNED (h))
237 { //SSE can't handle unaligned data
238 u32x4 *data = (u32x4 *) h;
239 xor_sum.as_u32x4 = data[0 + t->skip_n_vectors] & mask[0];
240 switch (t->match_n_vectors)
241 {
242 case 5:
243 xor_sum.as_u32x4 ^= data[4 + t->skip_n_vectors] & mask[4];
244 /* FALLTHROUGH */
245 case 4:
246 xor_sum.as_u32x4 ^= data[3 + t->skip_n_vectors] & mask[3];
247 /* FALLTHROUGH */
248 case 3:
249 xor_sum.as_u32x4 ^= data[2 + t->skip_n_vectors] & mask[2];
250 /* FALLTHROUGH */
251 case 2:
252 xor_sum.as_u32x4 ^= data[1 + t->skip_n_vectors] & mask[1];
253 /* FALLTHROUGH */
254 case 1:
255 break;
256 default:
257 abort ();
258 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700259 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530260 else
Pierre Pfistercb656302016-03-16 09:14:28 +0000261#endif /* CLASSIFY_USE_SSE */
Pierre Pfistercb656302016-03-16 09:14:28 +0000262 {
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530263 u32 skip_u64 = t->skip_n_vectors * 2;
264 u64 *data64 = (u64 *) h;
265 xor_sum.as_u64[0] = data64[0 + skip_u64] & ((u64 *) mask)[0];
266 xor_sum.as_u64[1] = data64[1 + skip_u64] & ((u64 *) mask)[1];
267 switch (t->match_n_vectors)
268 {
269 case 5:
270 xor_sum.as_u64[0] ^= data64[8 + skip_u64] & ((u64 *) mask)[8];
271 xor_sum.as_u64[1] ^= data64[9 + skip_u64] & ((u64 *) mask)[9];
272 /* FALLTHROUGH */
273 case 4:
274 xor_sum.as_u64[0] ^= data64[6 + skip_u64] & ((u64 *) mask)[6];
275 xor_sum.as_u64[1] ^= data64[7 + skip_u64] & ((u64 *) mask)[7];
276 /* FALLTHROUGH */
277 case 3:
278 xor_sum.as_u64[0] ^= data64[4 + skip_u64] & ((u64 *) mask)[4];
279 xor_sum.as_u64[1] ^= data64[5 + skip_u64] & ((u64 *) mask)[5];
280 /* FALLTHROUGH */
281 case 2:
282 xor_sum.as_u64[0] ^= data64[2 + skip_u64] & ((u64 *) mask)[2];
283 xor_sum.as_u64[1] ^= data64[3 + skip_u64] & ((u64 *) mask)[3];
284 /* FALLTHROUGH */
285 case 1:
286 break;
Pierre Pfistercb656302016-03-16 09:14:28 +0000287
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530288 default:
289 abort ();
290 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000291 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530292
Ed Warnickecb9cada2015-12-08 15:45:58 -0700293 return clib_xxhash (xor_sum.as_u64[0] ^ xor_sum.as_u64[1]);
294}
295
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530296static inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700297vnet_classify_prefetch_bucket (vnet_classify_table_t * t, u64 hash)
298{
299 u32 bucket_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530300
301 ASSERT (is_pow2 (t->nbuckets));
302
Ed Warnickecb9cada2015-12-08 15:45:58 -0700303 bucket_index = hash & (t->nbuckets - 1);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530304
305 CLIB_PREFETCH (&t->buckets[bucket_index], CLIB_CACHE_LINE_BYTES, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700306}
307
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530308static inline vnet_classify_entry_t *
Ed Warnickecb9cada2015-12-08 15:45:58 -0700309vnet_classify_get_entry (vnet_classify_table_t * t, uword offset)
310{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530311 u8 *hp = t->mheap;
312 u8 *vp = hp + offset;
313
Ed Warnickecb9cada2015-12-08 15:45:58 -0700314 return (void *) vp;
315}
316
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530317static inline uword
318vnet_classify_get_offset (vnet_classify_table_t * t,
319 vnet_classify_entry_t * v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700320{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530321 u8 *hp, *vp;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700322
323 hp = (u8 *) t->mheap;
324 vp = (u8 *) v;
325
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530326 ASSERT ((vp - hp) < 0x100000000ULL);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700327 return vp - hp;
328}
329
330static inline vnet_classify_entry_t *
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530331vnet_classify_entry_at_index (vnet_classify_table_t * t,
332 vnet_classify_entry_t * e, u32 index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700333{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530334 u8 *eu8;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700335
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530336 eu8 = (u8 *) e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700337
338 eu8 += index * (sizeof (vnet_classify_entry_t) +
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530339 (t->match_n_vectors * sizeof (u32x4)));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700340
341 return (vnet_classify_entry_t *) eu8;
342}
343
344static inline void
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530345vnet_classify_prefetch_entry (vnet_classify_table_t * t, u64 hash)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700346{
347 u32 bucket_index;
348 u32 value_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530349 vnet_classify_bucket_t *b;
350 vnet_classify_entry_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700351
352 bucket_index = hash & (t->nbuckets - 1);
353
354 b = &t->buckets[bucket_index];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530355
Ed Warnickecb9cada2015-12-08 15:45:58 -0700356 if (b->offset == 0)
357 return;
358
359 hash >>= t->log2_nbuckets;
360
361 e = vnet_classify_get_entry (t, b->offset);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530362 value_index = hash & ((1 << b->log2_pages) - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700363
364 e = vnet_classify_entry_at_index (t, e, value_index);
365
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530366 CLIB_PREFETCH (e, CLIB_CACHE_LINE_BYTES, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700367}
368
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530369vnet_classify_entry_t *vnet_classify_find_entry (vnet_classify_table_t * t,
370 u8 * h, u64 hash, f64 now);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700371
372static inline vnet_classify_entry_t *
373vnet_classify_find_entry_inline (vnet_classify_table_t * t,
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530374 u8 * h, u64 hash, f64 now)
Dave Barachcada2a02017-05-18 19:16:47 -0400375{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530376 vnet_classify_entry_t *v;
Pierre Pfistercb656302016-03-16 09:14:28 +0000377 u32x4 *mask, *key;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530378 union
379 {
Pierre Pfistercb656302016-03-16 09:14:28 +0000380 u32x4 as_u32x4;
381 u64 as_u64[2];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530382 } result __attribute__ ((aligned (sizeof (u32x4))));
383 vnet_classify_bucket_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700384 u32 value_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700385 u32 bucket_index;
Dave Barachcada2a02017-05-18 19:16:47 -0400386 u32 limit;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700387 int i;
388
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530389 bucket_index = hash & (t->nbuckets - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700390 b = &t->buckets[bucket_index];
Pierre Pfistercb656302016-03-16 09:14:28 +0000391 mask = t->mask;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700392
393 if (b->offset == 0)
394 return 0;
395
396 hash >>= t->log2_nbuckets;
397
398 v = vnet_classify_get_entry (t, b->offset);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530399 value_index = hash & ((1 << b->log2_pages) - 1);
Dave Barachcada2a02017-05-18 19:16:47 -0400400 limit = t->entries_per_page;
401 if (PREDICT_FALSE (b->linear_search))
402 {
403 value_index = 0;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530404 limit *= (1 << b->log2_pages);
Dave Barachcada2a02017-05-18 19:16:47 -0400405 }
406
Ed Warnickecb9cada2015-12-08 15:45:58 -0700407 v = vnet_classify_entry_at_index (t, v, value_index);
408
Pierre Pfistercb656302016-03-16 09:14:28 +0000409#ifdef CLASSIFY_USE_SSE
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530410 if (U32X4_ALIGNED (h))
411 {
412 u32x4 *data = (u32x4 *) h;
413 for (i = 0; i < limit; i++)
414 {
415 key = v->key;
416 result.as_u32x4 = (data[0 + t->skip_n_vectors] & mask[0]) ^ key[0];
417 switch (t->match_n_vectors)
418 {
419 case 5:
420 result.as_u32x4 |=
421 (data[4 + t->skip_n_vectors] & mask[4]) ^ key[4];
422 /* FALLTHROUGH */
423 case 4:
424 result.as_u32x4 |=
425 (data[3 + t->skip_n_vectors] & mask[3]) ^ key[3];
426 /* FALLTHROUGH */
427 case 3:
428 result.as_u32x4 |=
429 (data[2 + t->skip_n_vectors] & mask[2]) ^ key[2];
430 /* FALLTHROUGH */
431 case 2:
432 result.as_u32x4 |=
433 (data[1 + t->skip_n_vectors] & mask[1]) ^ key[1];
434 /* FALLTHROUGH */
435 case 1:
436 break;
437 default:
438 abort ();
439 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000440
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530441 if (u32x4_zero_byte_mask (result.as_u32x4) == 0xffff)
442 {
443 if (PREDICT_TRUE (now))
444 {
445 v->hits++;
446 v->last_heard = now;
447 }
448 return (v);
449 }
450 v = vnet_classify_entry_at_index (t, v, 1);
451 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700452 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530453 else
Pierre Pfistercb656302016-03-16 09:14:28 +0000454#endif /* CLASSIFY_USE_SSE */
Dave Barachcada2a02017-05-18 19:16:47 -0400455 {
456 u32 skip_u64 = t->skip_n_vectors * 2;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530457 u64 *data64 = (u64 *) h;
458 for (i = 0; i < limit; i++)
459 {
460 key = v->key;
Pierre Pfistercb656302016-03-16 09:14:28 +0000461
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530462 result.as_u64[0] =
463 (data64[0 + skip_u64] & ((u64 *) mask)[0]) ^ ((u64 *) key)[0];
464 result.as_u64[1] =
465 (data64[1 + skip_u64] & ((u64 *) mask)[1]) ^ ((u64 *) key)[1];
466 switch (t->match_n_vectors)
467 {
468 case 5:
469 result.as_u64[0] |=
470 (data64[8 + skip_u64] & ((u64 *) mask)[8]) ^ ((u64 *) key)[8];
471 result.as_u64[1] |=
472 (data64[9 + skip_u64] & ((u64 *) mask)[9]) ^ ((u64 *) key)[9];
473 /* FALLTHROUGH */
474 case 4:
475 result.as_u64[0] |=
476 (data64[6 + skip_u64] & ((u64 *) mask)[6]) ^ ((u64 *) key)[6];
477 result.as_u64[1] |=
478 (data64[7 + skip_u64] & ((u64 *) mask)[7]) ^ ((u64 *) key)[7];
479 /* FALLTHROUGH */
480 case 3:
481 result.as_u64[0] |=
482 (data64[4 + skip_u64] & ((u64 *) mask)[4]) ^ ((u64 *) key)[4];
483 result.as_u64[1] |=
484 (data64[5 + skip_u64] & ((u64 *) mask)[5]) ^ ((u64 *) key)[5];
485 /* FALLTHROUGH */
486 case 2:
487 result.as_u64[0] |=
488 (data64[2 + skip_u64] & ((u64 *) mask)[2]) ^ ((u64 *) key)[2];
489 result.as_u64[1] |=
490 (data64[3 + skip_u64] & ((u64 *) mask)[3]) ^ ((u64 *) key)[3];
491 /* FALLTHROUGH */
492 case 1:
493 break;
494 default:
495 abort ();
496 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000497
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530498 if (result.as_u64[0] == 0 && result.as_u64[1] == 0)
499 {
500 if (PREDICT_TRUE (now))
501 {
502 v->hits++;
503 v->last_heard = now;
504 }
505 return (v);
506 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000507
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530508 v = vnet_classify_entry_at_index (t, v, 1);
509 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000510 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700511 return 0;
Dave Barachcada2a02017-05-18 19:16:47 -0400512}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700513
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530514vnet_classify_table_t *vnet_classify_new_table (vnet_classify_main_t * cm,
515 u8 * mask, u32 nbuckets,
516 u32 memory_size,
517 u32 skip_n_vectors,
518 u32 match_n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700519
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530520int vnet_classify_add_del_session (vnet_classify_main_t * cm,
521 u32 table_index,
522 u8 * match,
523 u32 hit_next_index,
524 u32 opaque_index,
525 i32 advance,
526 u8 action, u32 metadata, int is_add);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700527
528int vnet_classify_add_del_table (vnet_classify_main_t * cm,
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530529 u8 * mask,
530 u32 nbuckets,
531 u32 memory_size,
532 u32 skip,
533 u32 match,
534 u32 next_table_index,
535 u32 miss_next_index,
536 u32 * table_index,
537 u8 current_data_flag,
538 i16 current_data_offset,
539 int is_add, int del_chain);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700540
541unformat_function_t unformat_ip4_mask;
542unformat_function_t unformat_ip6_mask;
543unformat_function_t unformat_l3_mask;
544unformat_function_t unformat_l2_mask;
545unformat_function_t unformat_classify_mask;
546unformat_function_t unformat_l2_next_index;
547unformat_function_t unformat_ip_next_index;
548unformat_function_t unformat_ip4_match;
549unformat_function_t unformat_ip6_match;
550unformat_function_t unformat_l3_match;
Dave Barach4a3f69c2017-02-22 12:44:56 -0500551unformat_function_t unformat_l4_match;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700552unformat_function_t unformat_vlan_tag;
553unformat_function_t unformat_l2_match;
554unformat_function_t unformat_classify_match;
555
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530556void vnet_classify_register_unformat_ip_next_index_fn
557 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400558
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530559void vnet_classify_register_unformat_l2_next_index_fn
560 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400561
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530562void vnet_classify_register_unformat_acl_next_index_fn
563 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400564
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530565void vnet_classify_register_unformat_policer_next_index_fn
566 (unformat_function_t * fn);
Matus Fabian70e6a8d2016-06-20 08:10:42 -0700567
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530568void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t *
569 fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400570
Ed Warnickecb9cada2015-12-08 15:45:58 -0700571#endif /* __included_vnet_classify_h__ */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530572
573/*
574 * fd.io coding-style-patch-verification: ON
575 *
576 * Local Variables:
577 * eval: (c-set-style "gnu")
578 * End:
579 */