blob: 6cbbf10aa2e89c6da71ccd0ad98057b68b793f61 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15#ifndef __included_vnet_classify_h__
16#define __included_vnet_classify_h__
17
18#include <stdarg.h>
19
20#include <vlib/vlib.h>
21#include <vnet/vnet.h>
22#include <vnet/pg/pg.h>
23#include <vnet/ethernet/ethernet.h>
24#include <vnet/ethernet/packet.h>
25#include <vnet/ip/ip_packet.h>
26#include <vnet/ip/ip4_packet.h>
27#include <vnet/ip/ip6_packet.h>
28#include <vlib/cli.h>
29#include <vnet/l2/l2_input.h>
Andrew Yourtchenko815d7d52018-02-07 11:37:02 +010030#include <vnet/l2/l2_output.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070031#include <vnet/l2/feat_bitmap.h>
khemendra kumard7bfa0e2017-11-27 15:15:53 +053032#include <vnet/api_errno.h> /* for API error numbers */
Ed Warnickecb9cada2015-12-08 15:45:58 -070033
34#include <vppinfra/error.h>
35#include <vppinfra/hash.h>
36#include <vppinfra/cache.h>
37#include <vppinfra/xxhash.h>
38
Jean-Mickael Guerin8941ec22016-03-04 14:14:21 +010039extern vlib_node_registration_t ip4_classify_node;
40extern vlib_node_registration_t ip6_classify_node;
Ed Warnickecb9cada2015-12-08 15:45:58 -070041
42#define CLASSIFY_TRACE 0
43
Damjan Marion927b0712018-02-20 08:33:50 +010044#ifdef CLIB_HAVE_VEC128
khemendra kumard7bfa0e2017-11-27 15:15:53 +053045#define CLASSIFY_USE_SSE //Allow usage of SSE operations
Pierre Pfistercb656302016-03-16 09:14:28 +000046#endif
47
Christophe Fontainefef15b42016-04-09 12:38:49 +090048#define U32X4_ALIGNED(p) PREDICT_TRUE((((intptr_t)p) & 0xf) == 0)
Pierre Pfistercb656302016-03-16 09:14:28 +000049
Steve Shin25e26dc2016-11-08 10:47:10 -080050/*
51 * Classify table option to process packets
52 * CLASSIFY_FLAG_USE_CURR_DATA:
53 * - classify packets starting from VPP node’s current data pointer
54 */
55#define CLASSIFY_FLAG_USE_CURR_DATA 1
56
57/*
58 * Classify session action
59 * CLASSIFY_ACTION_SET_IP4_FIB_INDEX:
60 * - Classified IP packets will be looked up
61 * from the specified ipv4 fib table
62 * CLASSIFY_ACTION_SET_IP6_FIB_INDEX:
63 * - Classified IP packets will be looked up
64 * from the specified ipv6 fib table
65 */
Neale Ranns13eaf3e2017-05-23 06:10:33 -070066typedef enum vnet_classify_action_t_
67{
68 CLASSIFY_ACTION_SET_IP4_FIB_INDEX = 1,
69 CLASSIFY_ACTION_SET_IP6_FIB_INDEX = 2,
Dave Barach630a8e22017-11-18 06:58:34 -050070 CLASSIFY_ACTION_SET_METADATA = 3,
Neale Ranns13eaf3e2017-05-23 06:10:33 -070071} __attribute__ ((packed)) vnet_classify_action_t;
Steve Shin25e26dc2016-11-08 10:47:10 -080072
Ed Warnickecb9cada2015-12-08 15:45:58 -070073struct _vnet_classify_main;
74typedef struct _vnet_classify_main vnet_classify_main_t;
75
76#define foreach_size_in_u32x4 \
77_(1) \
78_(2) \
79_(3) \
80_(4) \
81_(5)
82
khemendra kumard7bfa0e2017-11-27 15:15:53 +053083/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -070084typedef CLIB_PACKED(struct _vnet_classify_entry {
85 /* Graph node next index */
86 u32 next_index;
87
88 /* put into vnet_buffer(b)->l2_classfy.opaque_index */
89 union {
90 struct {
91 u32 opaque_index;
92 /* advance on hit, note it's a signed quantity... */
93 i32 advance;
94 };
95 u64 opaque_count;
96 };
97
98 /* Really only need 1 bit */
Steve Shin25e26dc2016-11-08 10:47:10 -080099 u8 flags;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700100#define VNET_CLASSIFY_ENTRY_FREE (1<<0)
101
Neale Ranns13eaf3e2017-05-23 06:10:33 -0700102 vnet_classify_action_t action;
Steve Shin25e26dc2016-11-08 10:47:10 -0800103 u16 metadata;
104
Ed Warnickecb9cada2015-12-08 15:45:58 -0700105 /* Hit counter, last heard time */
106 union {
107 u64 hits;
108 struct _vnet_classify_entry * next_free;
109 };
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530110
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111 f64 last_heard;
112
113 /* Must be aligned to a 16-octet boundary */
114 u32x4 key[0];
115}) vnet_classify_entry_t;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530116/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700117
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530118static inline int
119vnet_classify_entry_is_free (vnet_classify_entry_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700120{
121 return e->flags & VNET_CLASSIFY_ENTRY_FREE;
122}
123
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530124static inline int
125vnet_classify_entry_is_busy (vnet_classify_entry_t * e)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700126{
127 return ((e->flags & VNET_CLASSIFY_ENTRY_FREE) == 0);
128}
129
130/* Need these to con the vector allocator */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530131/* *INDENT-OFF* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700132#define _(size) \
133typedef CLIB_PACKED(struct { \
134 u32 pad0[4]; \
135 u64 pad1[2]; \
136 u32x4 key[size]; \
137}) vnet_classify_entry_##size##_t;
138foreach_size_in_u32x4;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530139/* *INDENT-ON* */
Ed Warnickecb9cada2015-12-08 15:45:58 -0700140#undef _
141
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530142typedef struct
143{
144 union
145 {
146 struct
147 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700148 u32 offset;
Dave Barachcada2a02017-05-18 19:16:47 -0400149 u8 linear_search;
150 u8 pad[2];
Ed Warnickecb9cada2015-12-08 15:45:58 -0700151 u8 log2_pages;
152 };
153 u64 as_u64;
154 };
155} vnet_classify_bucket_t;
156
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530157typedef struct
158{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700159 /* Mask to apply after skipping N vectors */
160 u32x4 *mask;
161 /* Buckets and entries */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530162 vnet_classify_bucket_t *buckets;
163 vnet_classify_entry_t *entries;
164
Ed Warnickecb9cada2015-12-08 15:45:58 -0700165 /* Config parameters */
166 u32 match_n_vectors;
167 u32 skip_n_vectors;
168 u32 nbuckets;
169 u32 log2_nbuckets;
Dave Barachcada2a02017-05-18 19:16:47 -0400170 u32 linear_buckets;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700171 int entries_per_page;
172 u32 active_elements;
Steve Shin25e26dc2016-11-08 10:47:10 -0800173 u32 current_data_flag;
174 int current_data_offset;
175 u32 data_offset;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700176 /* Index of next table to try */
177 u32 next_table_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530178
Ed Warnickecb9cada2015-12-08 15:45:58 -0700179 /* Miss next index, return if next_table_index = 0 */
180 u32 miss_next_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530181
Ed Warnickecb9cada2015-12-08 15:45:58 -0700182 /* Per-bucket working copies, one per thread */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530183 vnet_classify_entry_t **working_copies;
Dave Barachcada2a02017-05-18 19:16:47 -0400184 int *working_copy_lengths;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700185 vnet_classify_bucket_t saved_bucket;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530186
Ed Warnickecb9cada2015-12-08 15:45:58 -0700187 /* Free entry freelists */
188 vnet_classify_entry_t **freelists;
189
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530190 u8 *name;
191
Ed Warnickecb9cada2015-12-08 15:45:58 -0700192 /* Private allocation arena, protected by the writer lock */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530193 void *mheap;
194
Ed Warnickecb9cada2015-12-08 15:45:58 -0700195 /* Writer (only) lock for this table */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530196 volatile u32 *writer_lock;
197
Ed Warnickecb9cada2015-12-08 15:45:58 -0700198} vnet_classify_table_t;
199
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530200struct _vnet_classify_main
201{
Ed Warnickecb9cada2015-12-08 15:45:58 -0700202 /* Table pool */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530203 vnet_classify_table_t *tables;
204
Dave Barachf39ff742016-03-20 10:14:45 -0400205 /* Registered next-index, opaque unformat fcns */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530206 unformat_function_t **unformat_l2_next_index_fns;
207 unformat_function_t **unformat_ip_next_index_fns;
208 unformat_function_t **unformat_acl_next_index_fns;
209 unformat_function_t **unformat_policer_next_index_fns;
210 unformat_function_t **unformat_opaque_index_fns;
Dave Barachf39ff742016-03-20 10:14:45 -0400211
Ed Warnickecb9cada2015-12-08 15:45:58 -0700212 /* convenience variables */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530213 vlib_main_t *vlib_main;
214 vnet_main_t *vnet_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700215};
216
Dave Barachf39ff742016-03-20 10:14:45 -0400217extern vnet_classify_main_t vnet_classify_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700218
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530219u8 *format_classify_table (u8 * s, va_list * args);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700220
221u64 vnet_classify_hash_packet (vnet_classify_table_t * t, u8 * h);
222
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530223static inline u64
224vnet_classify_hash_packet_inline (vnet_classify_table_t * t, u8 * h)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700225{
Pierre Pfistercb656302016-03-16 09:14:28 +0000226 u32x4 *mask;
227
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530228 union
229 {
Ed Warnickecb9cada2015-12-08 15:45:58 -0700230 u32x4 as_u32x4;
231 u64 as_u64[2];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530232 } xor_sum __attribute__ ((aligned (sizeof (u32x4))));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700233
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530234 ASSERT (t);
Pierre Pfistercb656302016-03-16 09:14:28 +0000235 mask = t->mask;
236#ifdef CLASSIFY_USE_SSE
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530237 if (U32X4_ALIGNED (h))
238 { //SSE can't handle unaligned data
239 u32x4 *data = (u32x4 *) h;
240 xor_sum.as_u32x4 = data[0 + t->skip_n_vectors] & mask[0];
241 switch (t->match_n_vectors)
242 {
243 case 5:
244 xor_sum.as_u32x4 ^= data[4 + t->skip_n_vectors] & mask[4];
245 /* FALLTHROUGH */
246 case 4:
247 xor_sum.as_u32x4 ^= data[3 + t->skip_n_vectors] & mask[3];
248 /* FALLTHROUGH */
249 case 3:
250 xor_sum.as_u32x4 ^= data[2 + t->skip_n_vectors] & mask[2];
251 /* FALLTHROUGH */
252 case 2:
253 xor_sum.as_u32x4 ^= data[1 + t->skip_n_vectors] & mask[1];
254 /* FALLTHROUGH */
255 case 1:
256 break;
257 default:
258 abort ();
259 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700260 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530261 else
Pierre Pfistercb656302016-03-16 09:14:28 +0000262#endif /* CLASSIFY_USE_SSE */
Pierre Pfistercb656302016-03-16 09:14:28 +0000263 {
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530264 u32 skip_u64 = t->skip_n_vectors * 2;
265 u64 *data64 = (u64 *) h;
266 xor_sum.as_u64[0] = data64[0 + skip_u64] & ((u64 *) mask)[0];
267 xor_sum.as_u64[1] = data64[1 + skip_u64] & ((u64 *) mask)[1];
268 switch (t->match_n_vectors)
269 {
270 case 5:
271 xor_sum.as_u64[0] ^= data64[8 + skip_u64] & ((u64 *) mask)[8];
272 xor_sum.as_u64[1] ^= data64[9 + skip_u64] & ((u64 *) mask)[9];
273 /* FALLTHROUGH */
274 case 4:
275 xor_sum.as_u64[0] ^= data64[6 + skip_u64] & ((u64 *) mask)[6];
276 xor_sum.as_u64[1] ^= data64[7 + skip_u64] & ((u64 *) mask)[7];
277 /* FALLTHROUGH */
278 case 3:
279 xor_sum.as_u64[0] ^= data64[4 + skip_u64] & ((u64 *) mask)[4];
280 xor_sum.as_u64[1] ^= data64[5 + skip_u64] & ((u64 *) mask)[5];
281 /* FALLTHROUGH */
282 case 2:
283 xor_sum.as_u64[0] ^= data64[2 + skip_u64] & ((u64 *) mask)[2];
284 xor_sum.as_u64[1] ^= data64[3 + skip_u64] & ((u64 *) mask)[3];
285 /* FALLTHROUGH */
286 case 1:
287 break;
Pierre Pfistercb656302016-03-16 09:14:28 +0000288
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530289 default:
290 abort ();
291 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000292 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530293
Ed Warnickecb9cada2015-12-08 15:45:58 -0700294 return clib_xxhash (xor_sum.as_u64[0] ^ xor_sum.as_u64[1]);
295}
296
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530297static inline void
Ed Warnickecb9cada2015-12-08 15:45:58 -0700298vnet_classify_prefetch_bucket (vnet_classify_table_t * t, u64 hash)
299{
300 u32 bucket_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530301
302 ASSERT (is_pow2 (t->nbuckets));
303
Ed Warnickecb9cada2015-12-08 15:45:58 -0700304 bucket_index = hash & (t->nbuckets - 1);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530305
306 CLIB_PREFETCH (&t->buckets[bucket_index], CLIB_CACHE_LINE_BYTES, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700307}
308
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530309static inline vnet_classify_entry_t *
Ed Warnickecb9cada2015-12-08 15:45:58 -0700310vnet_classify_get_entry (vnet_classify_table_t * t, uword offset)
311{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530312 u8 *hp = t->mheap;
313 u8 *vp = hp + offset;
314
Ed Warnickecb9cada2015-12-08 15:45:58 -0700315 return (void *) vp;
316}
317
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530318static inline uword
319vnet_classify_get_offset (vnet_classify_table_t * t,
320 vnet_classify_entry_t * v)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700321{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530322 u8 *hp, *vp;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700323
324 hp = (u8 *) t->mheap;
325 vp = (u8 *) v;
326
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530327 ASSERT ((vp - hp) < 0x100000000ULL);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700328 return vp - hp;
329}
330
331static inline vnet_classify_entry_t *
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530332vnet_classify_entry_at_index (vnet_classify_table_t * t,
333 vnet_classify_entry_t * e, u32 index)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700334{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530335 u8 *eu8;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700336
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530337 eu8 = (u8 *) e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700338
339 eu8 += index * (sizeof (vnet_classify_entry_t) +
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530340 (t->match_n_vectors * sizeof (u32x4)));
Ed Warnickecb9cada2015-12-08 15:45:58 -0700341
342 return (vnet_classify_entry_t *) eu8;
343}
344
345static inline void
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530346vnet_classify_prefetch_entry (vnet_classify_table_t * t, u64 hash)
Ed Warnickecb9cada2015-12-08 15:45:58 -0700347{
348 u32 bucket_index;
349 u32 value_index;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530350 vnet_classify_bucket_t *b;
351 vnet_classify_entry_t *e;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700352
353 bucket_index = hash & (t->nbuckets - 1);
354
355 b = &t->buckets[bucket_index];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530356
Ed Warnickecb9cada2015-12-08 15:45:58 -0700357 if (b->offset == 0)
358 return;
359
360 hash >>= t->log2_nbuckets;
361
362 e = vnet_classify_get_entry (t, b->offset);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530363 value_index = hash & ((1 << b->log2_pages) - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700364
365 e = vnet_classify_entry_at_index (t, e, value_index);
366
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530367 CLIB_PREFETCH (e, CLIB_CACHE_LINE_BYTES, LOAD);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700368}
369
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530370vnet_classify_entry_t *vnet_classify_find_entry (vnet_classify_table_t * t,
371 u8 * h, u64 hash, f64 now);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700372
373static inline vnet_classify_entry_t *
374vnet_classify_find_entry_inline (vnet_classify_table_t * t,
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530375 u8 * h, u64 hash, f64 now)
Dave Barachcada2a02017-05-18 19:16:47 -0400376{
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530377 vnet_classify_entry_t *v;
Pierre Pfistercb656302016-03-16 09:14:28 +0000378 u32x4 *mask, *key;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530379 union
380 {
Pierre Pfistercb656302016-03-16 09:14:28 +0000381 u32x4 as_u32x4;
382 u64 as_u64[2];
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530383 } result __attribute__ ((aligned (sizeof (u32x4))));
384 vnet_classify_bucket_t *b;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700385 u32 value_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700386 u32 bucket_index;
Dave Barachcada2a02017-05-18 19:16:47 -0400387 u32 limit;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700388 int i;
389
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530390 bucket_index = hash & (t->nbuckets - 1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700391 b = &t->buckets[bucket_index];
Pierre Pfistercb656302016-03-16 09:14:28 +0000392 mask = t->mask;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700393
394 if (b->offset == 0)
395 return 0;
396
397 hash >>= t->log2_nbuckets;
398
399 v = vnet_classify_get_entry (t, b->offset);
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530400 value_index = hash & ((1 << b->log2_pages) - 1);
Dave Barachcada2a02017-05-18 19:16:47 -0400401 limit = t->entries_per_page;
402 if (PREDICT_FALSE (b->linear_search))
403 {
404 value_index = 0;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530405 limit *= (1 << b->log2_pages);
Dave Barachcada2a02017-05-18 19:16:47 -0400406 }
407
Ed Warnickecb9cada2015-12-08 15:45:58 -0700408 v = vnet_classify_entry_at_index (t, v, value_index);
409
Pierre Pfistercb656302016-03-16 09:14:28 +0000410#ifdef CLASSIFY_USE_SSE
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530411 if (U32X4_ALIGNED (h))
412 {
413 u32x4 *data = (u32x4 *) h;
414 for (i = 0; i < limit; i++)
415 {
416 key = v->key;
417 result.as_u32x4 = (data[0 + t->skip_n_vectors] & mask[0]) ^ key[0];
418 switch (t->match_n_vectors)
419 {
420 case 5:
421 result.as_u32x4 |=
422 (data[4 + t->skip_n_vectors] & mask[4]) ^ key[4];
423 /* FALLTHROUGH */
424 case 4:
425 result.as_u32x4 |=
426 (data[3 + t->skip_n_vectors] & mask[3]) ^ key[3];
427 /* FALLTHROUGH */
428 case 3:
429 result.as_u32x4 |=
430 (data[2 + t->skip_n_vectors] & mask[2]) ^ key[2];
431 /* FALLTHROUGH */
432 case 2:
433 result.as_u32x4 |=
434 (data[1 + t->skip_n_vectors] & mask[1]) ^ key[1];
435 /* FALLTHROUGH */
436 case 1:
437 break;
438 default:
439 abort ();
440 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000441
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530442 if (u32x4_zero_byte_mask (result.as_u32x4) == 0xffff)
443 {
444 if (PREDICT_TRUE (now))
445 {
446 v->hits++;
447 v->last_heard = now;
448 }
449 return (v);
450 }
451 v = vnet_classify_entry_at_index (t, v, 1);
452 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700453 }
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530454 else
Pierre Pfistercb656302016-03-16 09:14:28 +0000455#endif /* CLASSIFY_USE_SSE */
Dave Barachcada2a02017-05-18 19:16:47 -0400456 {
457 u32 skip_u64 = t->skip_n_vectors * 2;
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530458 u64 *data64 = (u64 *) h;
459 for (i = 0; i < limit; i++)
460 {
461 key = v->key;
Pierre Pfistercb656302016-03-16 09:14:28 +0000462
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530463 result.as_u64[0] =
464 (data64[0 + skip_u64] & ((u64 *) mask)[0]) ^ ((u64 *) key)[0];
465 result.as_u64[1] =
466 (data64[1 + skip_u64] & ((u64 *) mask)[1]) ^ ((u64 *) key)[1];
467 switch (t->match_n_vectors)
468 {
469 case 5:
470 result.as_u64[0] |=
471 (data64[8 + skip_u64] & ((u64 *) mask)[8]) ^ ((u64 *) key)[8];
472 result.as_u64[1] |=
473 (data64[9 + skip_u64] & ((u64 *) mask)[9]) ^ ((u64 *) key)[9];
474 /* FALLTHROUGH */
475 case 4:
476 result.as_u64[0] |=
477 (data64[6 + skip_u64] & ((u64 *) mask)[6]) ^ ((u64 *) key)[6];
478 result.as_u64[1] |=
479 (data64[7 + skip_u64] & ((u64 *) mask)[7]) ^ ((u64 *) key)[7];
480 /* FALLTHROUGH */
481 case 3:
482 result.as_u64[0] |=
483 (data64[4 + skip_u64] & ((u64 *) mask)[4]) ^ ((u64 *) key)[4];
484 result.as_u64[1] |=
485 (data64[5 + skip_u64] & ((u64 *) mask)[5]) ^ ((u64 *) key)[5];
486 /* FALLTHROUGH */
487 case 2:
488 result.as_u64[0] |=
489 (data64[2 + skip_u64] & ((u64 *) mask)[2]) ^ ((u64 *) key)[2];
490 result.as_u64[1] |=
491 (data64[3 + skip_u64] & ((u64 *) mask)[3]) ^ ((u64 *) key)[3];
492 /* FALLTHROUGH */
493 case 1:
494 break;
495 default:
496 abort ();
497 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000498
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530499 if (result.as_u64[0] == 0 && result.as_u64[1] == 0)
500 {
501 if (PREDICT_TRUE (now))
502 {
503 v->hits++;
504 v->last_heard = now;
505 }
506 return (v);
507 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000508
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530509 v = vnet_classify_entry_at_index (t, v, 1);
510 }
Pierre Pfistercb656302016-03-16 09:14:28 +0000511 }
Ed Warnickecb9cada2015-12-08 15:45:58 -0700512 return 0;
Dave Barachcada2a02017-05-18 19:16:47 -0400513}
Ed Warnickecb9cada2015-12-08 15:45:58 -0700514
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530515vnet_classify_table_t *vnet_classify_new_table (vnet_classify_main_t * cm,
516 u8 * mask, u32 nbuckets,
517 u32 memory_size,
518 u32 skip_n_vectors,
519 u32 match_n_vectors);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700520
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530521int vnet_classify_add_del_session (vnet_classify_main_t * cm,
522 u32 table_index,
523 u8 * match,
524 u32 hit_next_index,
525 u32 opaque_index,
526 i32 advance,
527 u8 action, u32 metadata, int is_add);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700528
529int vnet_classify_add_del_table (vnet_classify_main_t * cm,
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530530 u8 * mask,
531 u32 nbuckets,
532 u32 memory_size,
533 u32 skip,
534 u32 match,
535 u32 next_table_index,
536 u32 miss_next_index,
537 u32 * table_index,
538 u8 current_data_flag,
539 i16 current_data_offset,
540 int is_add, int del_chain);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700541
542unformat_function_t unformat_ip4_mask;
543unformat_function_t unformat_ip6_mask;
544unformat_function_t unformat_l3_mask;
545unformat_function_t unformat_l2_mask;
546unformat_function_t unformat_classify_mask;
547unformat_function_t unformat_l2_next_index;
548unformat_function_t unformat_ip_next_index;
549unformat_function_t unformat_ip4_match;
550unformat_function_t unformat_ip6_match;
551unformat_function_t unformat_l3_match;
Dave Barach4a3f69c2017-02-22 12:44:56 -0500552unformat_function_t unformat_l4_match;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700553unformat_function_t unformat_vlan_tag;
554unformat_function_t unformat_l2_match;
555unformat_function_t unformat_classify_match;
556
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530557void vnet_classify_register_unformat_ip_next_index_fn
558 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400559
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530560void vnet_classify_register_unformat_l2_next_index_fn
561 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400562
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530563void vnet_classify_register_unformat_acl_next_index_fn
564 (unformat_function_t * fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400565
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530566void vnet_classify_register_unformat_policer_next_index_fn
567 (unformat_function_t * fn);
Matus Fabian70e6a8d2016-06-20 08:10:42 -0700568
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530569void vnet_classify_register_unformat_opaque_index_fn (unformat_function_t *
570 fn);
Dave Barachf39ff742016-03-20 10:14:45 -0400571
Ed Warnickecb9cada2015-12-08 15:45:58 -0700572#endif /* __included_vnet_classify_h__ */
khemendra kumard7bfa0e2017-11-27 15:15:53 +0530573
574/*
575 * fd.io coding-style-patch-verification: ON
576 *
577 * Local Variables:
578 * eval: (c-set-style "gnu")
579 * End:
580 */