blob: 44973ae5e995e2f638fb8583e4efa2c2563c0f89 [file] [log] [blame]
Ed Warnickecb9cada2015-12-08 15:45:58 -07001/*
2 * Copyright (c) 2015 Cisco and/or its affiliates.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at:
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15#include <vnet/ip/ip.h>
16#include <vnet/ethernet/ethernet.h> /* for ethernet_header_t */
17#include <vnet/classify/vnet_classify.h>
Neale Ranns0bfe5d82016-08-25 15:29:12 +010018#include <vnet/dpo/classify_dpo.h>
Ed Warnickecb9cada2015-12-08 15:45:58 -070019
20typedef struct {
21 u32 next_index;
22 u32 table_index;
23 u32 entry_index;
24} ip_classify_trace_t;
25
26/* packet trace format function */
27static u8 * format_ip_classify_trace (u8 * s, va_list * args)
28{
29 CLIB_UNUSED (vlib_main_t * vm) = va_arg (*args, vlib_main_t *);
30 CLIB_UNUSED (vlib_node_t * node) = va_arg (*args, vlib_node_t *);
31 ip_classify_trace_t * t = va_arg (*args, ip_classify_trace_t *);
32
33 s = format (s, "IP_CLASSIFY: next_index %d, table %d, entry %d",
34 t->next_index, t->table_index, t->entry_index);
35 return s;
36}
37
38vlib_node_registration_t ip4_classify_node;
39vlib_node_registration_t ip6_classify_node;
40
41#define foreach_ip_classify_error \
42_(MISS, "Classify misses") \
43_(HIT, "Classify hits") \
44_(CHAIN_HIT, "Classify hits after chain walk")
45
46typedef enum {
47#define _(sym,str) IP_CLASSIFY_ERROR_##sym,
48 foreach_ip_classify_error
49#undef _
50 IP_CLASSIFY_N_ERROR,
51} ip_classify_error_t;
52
53static char * ip_classify_error_strings[] = {
54#define _(sym,string) string,
55 foreach_ip_classify_error
56#undef _
57};
58
59static inline uword
60ip_classify_inline (vlib_main_t * vm,
61 vlib_node_runtime_t * node,
62 vlib_frame_t * frame, int is_ip4)
63{
64 u32 n_left_from, * from, * to_next;
65 ip_lookup_next_t next_index;
66 vnet_classify_main_t * vcm = &vnet_classify_main;
Ed Warnickecb9cada2015-12-08 15:45:58 -070067 f64 now = vlib_time_now (vm);
68 u32 hits = 0;
69 u32 misses = 0;
70 u32 chain_hits = 0;
Ole Troanf0f85222016-06-14 21:12:32 +020071 u32 n_next;
Ed Warnickecb9cada2015-12-08 15:45:58 -070072
Ole Troanf0f85222016-06-14 21:12:32 +020073 if (is_ip4) {
Ole Troanf0f85222016-06-14 21:12:32 +020074 n_next = IP4_LOOKUP_N_NEXT;
75 } else {
Ole Troanf0f85222016-06-14 21:12:32 +020076 n_next = IP6_LOOKUP_N_NEXT;
77 }
Ed Warnickecb9cada2015-12-08 15:45:58 -070078
79 from = vlib_frame_vector_args (frame);
80 n_left_from = frame->n_vectors;
81
82 /* First pass: compute hashes */
83
84 while (n_left_from > 2)
85 {
86 vlib_buffer_t * b0, * b1;
87 u32 bi0, bi1;
88 u8 * h0, * h1;
Neale Ranns0bfe5d82016-08-25 15:29:12 +010089 u32 cd_index0, cd_index1;
90 classify_dpo_t *cd0, * cd1;
Ed Warnickecb9cada2015-12-08 15:45:58 -070091 u32 table_index0, table_index1;
92 vnet_classify_table_t * t0, * t1;
93
94 /* prefetch next iteration */
95 {
96 vlib_buffer_t * p1, * p2;
97
98 p1 = vlib_get_buffer (vm, from[1]);
99 p2 = vlib_get_buffer (vm, from[2]);
100
101 vlib_prefetch_buffer_header (p1, STORE);
102 CLIB_PREFETCH (p1->data, CLIB_CACHE_LINE_BYTES, STORE);
103 vlib_prefetch_buffer_header (p2, STORE);
104 CLIB_PREFETCH (p2->data, CLIB_CACHE_LINE_BYTES, STORE);
105 }
106
107 bi0 = from[0];
108 b0 = vlib_get_buffer (vm, bi0);
Chris Luke194ebc52016-04-25 14:26:55 -0400109 h0 = (void *)vlib_buffer_get_current(b0) -
110 ethernet_buffer_header_size(b0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700111
112 bi1 = from[1];
113 b1 = vlib_get_buffer (vm, bi1);
Chris Luke194ebc52016-04-25 14:26:55 -0400114 h1 = (void *)vlib_buffer_get_current(b1) -
115 ethernet_buffer_header_size(b1);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700116
Neale Ranns0bfe5d82016-08-25 15:29:12 +0100117 cd_index0 = vnet_buffer (b0)->ip.adj_index[VLIB_TX];
118 cd0 = classify_dpo_get(cd_index0);
119 table_index0 = cd0->cd_table_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700120
Neale Ranns0bfe5d82016-08-25 15:29:12 +0100121 cd_index1 = vnet_buffer (b1)->ip.adj_index[VLIB_TX];
122 cd1 = classify_dpo_get(cd_index1);
123 table_index1 = cd1->cd_table_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700124
125 t0 = pool_elt_at_index (vcm->tables, table_index0);
126
127 t1 = pool_elt_at_index (vcm->tables, table_index1);
128
129 vnet_buffer(b0)->l2_classify.hash =
130 vnet_classify_hash_packet (t0, (u8 *) h0);
131
132 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
133
134 vnet_buffer(b1)->l2_classify.hash =
135 vnet_classify_hash_packet (t1, (u8 *) h1);
136
137 vnet_classify_prefetch_bucket (t1, vnet_buffer(b1)->l2_classify.hash);
138
139 vnet_buffer(b0)->l2_classify.table_index = table_index0;
140
141 vnet_buffer(b1)->l2_classify.table_index = table_index1;
142
143 from += 2;
144 n_left_from -= 2;
145 }
146
147 while (n_left_from > 0)
148 {
149 vlib_buffer_t * b0;
150 u32 bi0;
151 u8 * h0;
Neale Ranns0bfe5d82016-08-25 15:29:12 +0100152 u32 cd_index0;
153 classify_dpo_t *cd0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700154 u32 table_index0;
155 vnet_classify_table_t * t0;
156
157 bi0 = from[0];
158 b0 = vlib_get_buffer (vm, bi0);
Chris Luke194ebc52016-04-25 14:26:55 -0400159 h0 = (void *)vlib_buffer_get_current(b0) -
160 ethernet_buffer_header_size(b0);
Ed Warnickecb9cada2015-12-08 15:45:58 -0700161
Neale Ranns0bfe5d82016-08-25 15:29:12 +0100162 cd_index0 = vnet_buffer (b0)->ip.adj_index[VLIB_TX];
163 cd0 = classify_dpo_get(cd_index0);
164 table_index0 = cd0->cd_table_index;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700165
166 t0 = pool_elt_at_index (vcm->tables, table_index0);
167 vnet_buffer(b0)->l2_classify.hash =
168 vnet_classify_hash_packet (t0, (u8 *) h0);
169
170 vnet_buffer(b0)->l2_classify.table_index = table_index0;
171 vnet_classify_prefetch_bucket (t0, vnet_buffer(b0)->l2_classify.hash);
172
173 from++;
174 n_left_from--;
175 }
176
177 next_index = node->cached_next_index;
178 from = vlib_frame_vector_args (frame);
179 n_left_from = frame->n_vectors;
180
181 while (n_left_from > 0)
182 {
183 u32 n_left_to_next;
184
185 vlib_get_next_frame (vm, node, next_index,
186 to_next, n_left_to_next);
187
188 /* Not enough load/store slots to dual loop... */
189 while (n_left_from > 0 && n_left_to_next > 0)
190 {
191 u32 bi0;
192 vlib_buffer_t * b0;
Neale Ranns0bfe5d82016-08-25 15:29:12 +0100193 u32 next0 = IP_LOOKUP_NEXT_DROP;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700194 u32 table_index0;
195 vnet_classify_table_t * t0;
196 vnet_classify_entry_t * e0;
197 u64 hash0;
198 u8 * h0;
199
200 /* Stride 3 seems to work best */
201 if (PREDICT_TRUE (n_left_from > 3))
202 {
203 vlib_buffer_t * p1 = vlib_get_buffer(vm, from[3]);
204 vnet_classify_table_t * tp1;
205 u32 table_index1;
206 u64 phash1;
207
208 table_index1 = vnet_buffer(p1)->l2_classify.table_index;
209
210 if (PREDICT_TRUE (table_index1 != ~0))
211 {
212 tp1 = pool_elt_at_index (vcm->tables, table_index1);
213 phash1 = vnet_buffer(p1)->l2_classify.hash;
214 vnet_classify_prefetch_entry (tp1, phash1);
215 }
216 }
217
218 /* speculatively enqueue b0 to the current next frame */
219 bi0 = from[0];
220 to_next[0] = bi0;
221 from += 1;
222 to_next += 1;
223 n_left_from -= 1;
224 n_left_to_next -= 1;
225
226 b0 = vlib_get_buffer (vm, bi0);
227 h0 = b0->data;
228 table_index0 = vnet_buffer(b0)->l2_classify.table_index;
229 e0 = 0;
230 t0 = 0;
231 vnet_buffer(b0)->l2_classify.opaque_index = ~0;
232
233 if (PREDICT_TRUE(table_index0 != ~0))
234 {
235 hash0 = vnet_buffer(b0)->l2_classify.hash;
236 t0 = pool_elt_at_index (vcm->tables, table_index0);
237
238 e0 = vnet_classify_find_entry (t0, (u8 *) h0, hash0,
239 now);
240 if (e0)
241 {
242 vnet_buffer(b0)->l2_classify.opaque_index
243 = e0->opaque_index;
244 vlib_buffer_advance (b0, e0->advance);
Dave Barach2fa6bef2016-04-07 10:16:33 -0400245 next0 = (e0->next_index < node->n_next_nodes)?
Ed Warnickecb9cada2015-12-08 15:45:58 -0700246 e0->next_index:next0;
247 hits++;
248 }
249 else
250 {
251 while (1)
252 {
253 if (t0->next_table_index != ~0)
254 t0 = pool_elt_at_index (vcm->tables,
255 t0->next_table_index);
256 else
257 {
Ole Troanf0f85222016-06-14 21:12:32 +0200258 next0 = (t0->miss_next_index < n_next) ?
259 t0->miss_next_index : next0;
Ed Warnickecb9cada2015-12-08 15:45:58 -0700260 misses++;
261 break;
262 }
263
264 hash0 = vnet_classify_hash_packet (t0, (u8 *) h0);
265 e0 = vnet_classify_find_entry
266 (t0, (u8 *) h0, hash0, now);
267 if (e0)
268 {
269 vnet_buffer(b0)->l2_classify.opaque_index
270 = e0->opaque_index;
271 vlib_buffer_advance (b0, e0->advance);
Dave Barach2fa6bef2016-04-07 10:16:33 -0400272 next0 = (e0->next_index < node->n_next_nodes)?
Ed Warnickecb9cada2015-12-08 15:45:58 -0700273 e0->next_index:next0;
274 hits++;
275 chain_hits++;
276 break;
277 }
278 }
279 }
280 }
281
282 if (PREDICT_FALSE((node->flags & VLIB_NODE_FLAG_TRACE)
283 && (b0->flags & VLIB_BUFFER_IS_TRACED)))
284 {
285 ip_classify_trace_t *t =
286 vlib_add_trace (vm, node, b0, sizeof (*t));
287 t->next_index = next0;
288 t->table_index = t0 ? t0 - vcm->tables : ~0;
289 t->entry_index = e0 ? e0 - t0->entries : ~0;
290 }
291
292 /* verify speculative enqueue, maybe switch current next frame */
293 vlib_validate_buffer_enqueue_x1 (vm, node, next_index,
294 to_next, n_left_to_next,
295 bi0, next0);
296 }
297
298 vlib_put_next_frame (vm, node, next_index, n_left_to_next);
299 }
300
301 vlib_node_increment_counter (vm, node->node_index,
302 IP_CLASSIFY_ERROR_MISS,
303 misses);
304 vlib_node_increment_counter (vm, node->node_index,
305 IP_CLASSIFY_ERROR_HIT,
306 hits);
307 vlib_node_increment_counter (vm, node->node_index,
308 IP_CLASSIFY_ERROR_CHAIN_HIT,
309 chain_hits);
310 return frame->n_vectors;
311}
312
313static uword
314ip4_classify (vlib_main_t * vm,
315 vlib_node_runtime_t * node,
316 vlib_frame_t * frame)
317{
318 return ip_classify_inline (vm, node, frame, 1 /* is_ip4 */);
319}
320
321
322VLIB_REGISTER_NODE (ip4_classify_node) = {
323 .function = ip4_classify,
324 .name = "ip4-classify",
325 .vector_size = sizeof (u32),
Ole Troanf0f85222016-06-14 21:12:32 +0200326 .sibling_of = "ip4-lookup",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700327 .format_trace = format_ip_classify_trace,
328 .n_errors = ARRAY_LEN(ip_classify_error_strings),
329 .error_strings = ip_classify_error_strings,
330
Ole Troanf0f85222016-06-14 21:12:32 +0200331 .n_next_nodes = 0,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700332};
333
Damjan Marion1c80e832016-05-11 23:07:18 +0200334VLIB_NODE_FUNCTION_MULTIARCH (ip4_classify_node, ip4_classify)
335
Ed Warnickecb9cada2015-12-08 15:45:58 -0700336static uword
337ip6_classify (vlib_main_t * vm,
338 vlib_node_runtime_t * node,
339 vlib_frame_t * frame)
340{
341 return ip_classify_inline (vm, node, frame, 0 /* is_ip4 */);
342}
343
344
345VLIB_REGISTER_NODE (ip6_classify_node) = {
346 .function = ip6_classify,
347 .name = "ip6-classify",
348 .vector_size = sizeof (u32),
Ole Troanf0f85222016-06-14 21:12:32 +0200349 .sibling_of = "ip6-lookup",
Ed Warnickecb9cada2015-12-08 15:45:58 -0700350 .format_trace = format_ip_classify_trace,
351 .n_errors = ARRAY_LEN(ip_classify_error_strings),
352 .error_strings = ip_classify_error_strings,
353
Ole Troanf0f85222016-06-14 21:12:32 +0200354 .n_next_nodes = 0,
Ed Warnickecb9cada2015-12-08 15:45:58 -0700355};
356
Damjan Marion1c80e832016-05-11 23:07:18 +0200357VLIB_NODE_FUNCTION_MULTIARCH (ip6_classify_node, ip6_classify)
358
Ed Warnickecb9cada2015-12-08 15:45:58 -0700359static clib_error_t *
360ip_classify_init (vlib_main_t * vm)
361{
362 return 0;
363}
364
365VLIB_INIT_FUNCTION (ip_classify_init);