Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (c) 2016 Cisco and/or its affiliates. |
| 3 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | * you may not use this file except in compliance with the License. |
| 5 | * You may obtain a copy of the License at: |
| 6 | * |
| 7 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | * |
| 9 | * Unless required by applicable law or agreed to in writing, software |
| 10 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | * See the License for the specific language governing permissions and |
| 13 | * limitations under the License. |
| 14 | */ |
| 15 | |
| 16 | #include <vnet/fib/fib_entry_cover.h> |
| 17 | #include <vnet/fib/fib_entry_src.h> |
| 18 | #include <vnet/fib/fib_node_list.h> |
Neale Ranns | 1f50bf8 | 2019-07-16 15:28:52 +0000 | [diff] [blame] | 19 | #include <vnet/fib/fib_entry_delegate.h> |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 20 | |
| 21 | u32 |
| 22 | fib_entry_cover_track (fib_entry_t* cover, |
| 23 | fib_node_index_t covered) |
| 24 | { |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 25 | fib_entry_delegate_t *fed; |
| 26 | |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 27 | FIB_ENTRY_DBG(cover, "cover-track %d", covered); |
| 28 | |
| 29 | ASSERT(fib_entry_get_index(cover) != covered); |
| 30 | |
Neale Ranns | 1f50bf8 | 2019-07-16 15:28:52 +0000 | [diff] [blame] | 31 | fed = fib_entry_delegate_find(cover, FIB_ENTRY_DELEGATE_COVERED); |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 32 | |
| 33 | if (NULL == fed) |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 34 | { |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 35 | fed = fib_entry_delegate_find_or_add(cover, FIB_ENTRY_DELEGATE_COVERED); |
| 36 | fed->fd_list = fib_node_list_create(); |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 37 | } |
| 38 | |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 39 | return (fib_node_list_push_front(fed->fd_list, |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 40 | 0, FIB_NODE_TYPE_ENTRY, |
| 41 | covered)); |
| 42 | } |
| 43 | |
| 44 | void |
| 45 | fib_entry_cover_untrack (fib_entry_t* cover, |
| 46 | u32 tracked_index) |
| 47 | { |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 48 | fib_entry_delegate_t *fed; |
| 49 | |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 50 | FIB_ENTRY_DBG(cover, "cover-untrack @ %d", tracked_index); |
| 51 | |
Neale Ranns | 1f50bf8 | 2019-07-16 15:28:52 +0000 | [diff] [blame] | 52 | fed = fib_entry_delegate_find(cover, FIB_ENTRY_DELEGATE_COVERED); |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 53 | |
| 54 | if (NULL == fed) |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 55 | return; |
| 56 | |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 57 | fib_node_list_remove(fed->fd_list, tracked_index); |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 58 | |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 59 | if (0 == fib_node_list_get_size(fed->fd_list)) |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 60 | { |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 61 | fib_node_list_destroy(&fed->fd_list); |
| 62 | fib_entry_delegate_remove(cover, FIB_ENTRY_DELEGATE_COVERED); |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 63 | } |
| 64 | } |
| 65 | |
| 66 | /** |
Paul Vinciguerra | 8feeaff | 2019-03-27 11:25:48 -0700 | [diff] [blame] | 67 | * Internal struct to hold user supplied parameters for the cover walk |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 68 | */ |
| 69 | typedef struct fib_enty_cover_walk_ctx_t_ { |
| 70 | fib_entry_t *cover; |
| 71 | fib_entry_covered_walk_t walk; |
| 72 | void *ctx; |
| 73 | } fib_enty_cover_walk_ctx_t; |
| 74 | |
| 75 | static int |
| 76 | fib_entry_cover_walk_node_ptr (fib_node_ptr_t *depend, |
| 77 | void *args) |
| 78 | { |
| 79 | fib_enty_cover_walk_ctx_t *ctx = args; |
| 80 | |
| 81 | ctx->walk(ctx->cover, depend->fnp_index, ctx->ctx); |
| 82 | |
| 83 | /* continue */ |
| 84 | return (1); |
| 85 | } |
| 86 | |
| 87 | void |
| 88 | fib_entry_cover_walk (fib_entry_t *cover, |
| 89 | fib_entry_covered_walk_t walk, |
| 90 | void *args) |
| 91 | { |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 92 | fib_entry_delegate_t *fed; |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 93 | |
Neale Ranns | 1f50bf8 | 2019-07-16 15:28:52 +0000 | [diff] [blame] | 94 | fed = fib_entry_delegate_find(cover, FIB_ENTRY_DELEGATE_COVERED); |
Neale Ranns | ad422ed | 2016-11-02 14:20:04 +0000 | [diff] [blame] | 95 | |
| 96 | if (NULL == fed) |
| 97 | return; |
| 98 | |
| 99 | fib_enty_cover_walk_ctx_t ctx = { |
| 100 | .cover = cover, |
| 101 | .walk = walk, |
| 102 | .ctx = args, |
| 103 | }; |
| 104 | |
| 105 | fib_node_list_walk(fed->fd_list, |
| 106 | fib_entry_cover_walk_node_ptr, |
| 107 | &ctx); |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 108 | } |
| 109 | |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 110 | static int |
| 111 | fib_entry_cover_change_one (fib_entry_t *cover, |
| 112 | fib_node_index_t covered, |
| 113 | void *args) |
| 114 | { |
| 115 | fib_node_index_t new_cover; |
| 116 | |
| 117 | /* |
| 118 | * The 3 entries involved here are: |
| 119 | * cover - the least specific. It will cover both the others |
| 120 | * new_cover - the enty just inserted below the cover |
| 121 | * covered - the entry that was tracking the cover. |
| 122 | * |
| 123 | * The checks below are to determine if new_cover is a cover for covered. |
| 124 | */ |
| 125 | new_cover = pointer_to_uword(args); |
| 126 | |
| 127 | if (FIB_NODE_INDEX_INVALID == new_cover) |
| 128 | { |
| 129 | /* |
| 130 | * nothing has been inserted, which implies the cover was removed. |
| 131 | * 'cover' is thus the new cover. |
| 132 | */ |
| 133 | fib_entry_cover_changed(covered); |
| 134 | } |
| 135 | else if (new_cover != covered) |
| 136 | { |
Neale Ranns | c5d4317 | 2018-07-30 08:04:40 -0700 | [diff] [blame] | 137 | const fib_prefix_t *pfx_covered, *pfx_new_cover; |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 138 | |
Neale Ranns | c5d4317 | 2018-07-30 08:04:40 -0700 | [diff] [blame] | 139 | pfx_covered = fib_entry_get_prefix(covered); |
| 140 | pfx_new_cover = fib_entry_get_prefix(new_cover); |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 141 | |
Neale Ranns | c5d4317 | 2018-07-30 08:04:40 -0700 | [diff] [blame] | 142 | if (fib_prefix_is_cover(pfx_new_cover, pfx_covered)) |
Neale Ranns | 0bfe5d8 | 2016-08-25 15:29:12 +0100 | [diff] [blame] | 143 | { |
| 144 | fib_entry_cover_changed(covered); |
| 145 | } |
| 146 | } |
| 147 | /* continue */ |
| 148 | return (1); |
| 149 | } |
| 150 | |
| 151 | void |
| 152 | fib_entry_cover_change_notify (fib_node_index_t cover_index, |
| 153 | fib_node_index_t covered) |
| 154 | { |
| 155 | fib_entry_t *cover; |
| 156 | |
| 157 | cover = fib_entry_get(cover_index); |
| 158 | |
| 159 | fib_entry_cover_walk(cover, |
| 160 | fib_entry_cover_change_one, |
| 161 | uword_to_pointer(covered, void*)); |
| 162 | } |
| 163 | |
| 164 | static int |
| 165 | fib_entry_cover_update_one (fib_entry_t *cover, |
| 166 | fib_node_index_t covered, |
| 167 | void *args) |
| 168 | { |
| 169 | fib_entry_cover_updated(covered); |
| 170 | |
| 171 | /* continue */ |
| 172 | return (1); |
| 173 | } |
| 174 | |
| 175 | void |
| 176 | fib_entry_cover_update_notify (fib_entry_t *fib_entry) |
| 177 | { |
| 178 | fib_entry_cover_walk(fib_entry, |
| 179 | fib_entry_cover_update_one, |
| 180 | NULL); |
| 181 | } |