Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 1 | /* |
| 2 | ************************************************************************** |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 3 | * Copyright (c) 2013, 2015-2017 The Linux Foundation. All rights reserved. |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 4 | * Permission to use, copy, modify, and/or distribute this software for |
| 5 | * any purpose with or without fee is hereby granted, provided that the |
| 6 | * above copyright notice and this permission notice appear in all copies. |
| 7 | * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES |
| 8 | * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF |
| 9 | * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR |
| 10 | * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES |
| 11 | * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN |
| 12 | * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT |
| 13 | * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. |
| 14 | ************************************************************************** |
| 15 | */ |
| 16 | |
| 17 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 18 | * nss_freq.c |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 19 | * NSS frequency change APIs |
| 20 | */ |
| 21 | |
| 22 | #include "nss_tx_rx_common.h" |
| 23 | |
| 24 | #define NSS_ACK_STARTED 0 |
| 25 | #define NSS_ACK_FINISHED 1 |
| 26 | |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 27 | extern struct nss_frequency_statistics nss_freq_stat; |
| 28 | extern struct nss_runtime_sampling nss_runtime_samples; |
| 29 | extern struct workqueue_struct *nss_wq; |
| 30 | extern nss_work_t *nss_work; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 31 | |
| 32 | /* |
Sundarajan Srinivasan | 02e6c2b | 2014-10-06 11:51:12 -0700 | [diff] [blame] | 33 | * nss_freq_msg_init() |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 34 | * Initialize the freq message |
Sundarajan Srinivasan | 02e6c2b | 2014-10-06 11:51:12 -0700 | [diff] [blame] | 35 | */ |
| 36 | static void nss_freq_msg_init(struct nss_corefreq_msg *ncm, uint16_t if_num, uint32_t type, uint32_t len, |
| 37 | void *cb, void *app_data) |
| 38 | { |
| 39 | nss_cmn_msg_init(&ncm->cm, if_num, type, len, cb, app_data); |
| 40 | } |
| 41 | |
| 42 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 43 | * nss_freq_handle_ack() |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 44 | * Handle the nss ack of frequency change. |
| 45 | */ |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 46 | static void nss_freq_handle_ack(struct nss_ctx_instance *nss_ctx, struct nss_freq_msg *nfa) |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 47 | { |
Thomas Wu | 168ca26 | 2014-03-21 16:20:27 -0700 | [diff] [blame] | 48 | if (nfa->ack == NSS_ACK_STARTED) { |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 49 | /* |
| 50 | * NSS finished start noficiation - HW change clocks and send end notification |
| 51 | */ |
Thomas Wu | 168ca26 | 2014-03-21 16:20:27 -0700 | [diff] [blame] | 52 | nss_info("%p: NSS ACK Received: %d - Change HW CLK/Send Finish to NSS\n", nss_ctx, nfa->ack); |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 53 | |
| 54 | return; |
| 55 | } |
| 56 | |
Thomas Wu | 168ca26 | 2014-03-21 16:20:27 -0700 | [diff] [blame] | 57 | if (nfa->ack == NSS_ACK_FINISHED) { |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 58 | /* |
| 59 | * NSS finished end notification - Done |
| 60 | */ |
Thomas Wu | 168ca26 | 2014-03-21 16:20:27 -0700 | [diff] [blame] | 61 | nss_info("%p: NSS ACK Received: %d - End Notification ACK - Running: %dmhz\n", nss_ctx, nfa->ack, nfa->freq_current); |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 62 | nss_runtime_samples.freq_scale_ready = 1; |
| 63 | return; |
| 64 | } |
| 65 | |
| 66 | nss_info("%p: NSS had an error - Running: %dmhz\n", nss_ctx, nfa->freq_current); |
| 67 | } |
| 68 | |
| 69 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 70 | * nss_freq_queue_work() |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 71 | * Queue Work to the NSS Workqueue based on Current index. |
| 72 | */ |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 73 | static bool nss_freq_queue_work(void) |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 74 | { |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 75 | nss_freq_scales_t index = nss_runtime_samples.freq_scale_index; |
| 76 | |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 77 | BUG_ON(!nss_wq); |
| 78 | |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 79 | nss_info("frequency:%d index:%d sample count:%x\n", nss_runtime_samples.freq_scale[index].frequency, |
| 80 | index, nss_runtime_samples.average); |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 81 | |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 82 | /* |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 83 | * schedule freq change with autoscale ON |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 84 | */ |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 85 | return nss_freq_sched_change(index, true); |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 86 | } |
| 87 | |
| 88 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 89 | * nss_freq_handle_core_stats() |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 90 | * Handle the core stats |
| 91 | */ |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 92 | static void nss_freq_handle_core_stats(struct nss_ctx_instance *nss_ctx, struct nss_core_stats *core_stats) |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 93 | { |
| 94 | uint32_t b_index; |
| 95 | uint32_t minimum; |
| 96 | uint32_t maximum; |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 97 | uint32_t sample = core_stats->inst_cnt_total; |
| 98 | uint32_t index = nss_runtime_samples.freq_scale_index; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 99 | |
| 100 | /* |
| 101 | * We do not accept any statistics if auto scaling is off, |
| 102 | * we start with a fresh sample set when scaling is |
| 103 | * eventually turned on. |
| 104 | */ |
| 105 | if (!nss_cmd_buf.auto_scale && nss_runtime_samples.initialized) { |
| 106 | return; |
| 107 | } |
| 108 | |
| 109 | /* |
| 110 | * Delete Current Index Value, Add New Value, Recalculate new Sum, Shift Index |
| 111 | */ |
| 112 | b_index = nss_runtime_samples.buffer_index; |
| 113 | |
| 114 | nss_runtime_samples.sum = nss_runtime_samples.sum - nss_runtime_samples.buffer[b_index]; |
| 115 | nss_runtime_samples.buffer[b_index] = sample; |
| 116 | nss_runtime_samples.sum = nss_runtime_samples.sum + nss_runtime_samples.buffer[b_index]; |
| 117 | nss_runtime_samples.buffer_index = (b_index + 1) & NSS_SAMPLE_BUFFER_MASK; |
| 118 | |
| 119 | if (nss_runtime_samples.sample_count < NSS_SAMPLE_BUFFER_SIZE) { |
| 120 | nss_runtime_samples.sample_count++; |
| 121 | |
| 122 | /* |
| 123 | * Samples Are All Ready, Start Auto Scale |
| 124 | */ |
| 125 | if (nss_runtime_samples.sample_count == NSS_SAMPLE_BUFFER_SIZE ) { |
| 126 | nss_cmd_buf.auto_scale = 1; |
| 127 | nss_runtime_samples.freq_scale_ready = 1; |
| 128 | nss_runtime_samples.initialized = 1; |
| 129 | } |
| 130 | |
| 131 | return; |
| 132 | } |
| 133 | |
| 134 | nss_runtime_samples.average = nss_runtime_samples.sum / nss_runtime_samples.sample_count; |
| 135 | |
| 136 | /* |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 137 | * Print out statistics every 10 samples |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 138 | */ |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 139 | if (nss_runtime_samples.message_rate_limit++ >= NSS_MESSAGE_RATE_LIMIT) { |
Sakthi Vignesh Radhakrishnan | aa37810 | 2014-04-07 13:52:28 -0700 | [diff] [blame] | 140 | nss_trace("%p: Running AVG:%x Sample:%x Divider:%d\n", nss_ctx, nss_runtime_samples.average, core_stats->inst_cnt_total, nss_runtime_samples.sample_count); |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 141 | nss_trace("%p: Current Frequency Index:%d\n", nss_ctx, index); |
Sakthi Vignesh Radhakrishnan | aa37810 | 2014-04-07 13:52:28 -0700 | [diff] [blame] | 142 | nss_trace("%p: Auto Scale:%d Auto Scale Ready:%d\n", nss_ctx, nss_runtime_samples.freq_scale_ready, nss_cmd_buf.auto_scale); |
| 143 | nss_trace("%p: Current Rate:%x\n", nss_ctx, nss_runtime_samples.average); |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 144 | |
| 145 | nss_runtime_samples.message_rate_limit = 0; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 146 | } |
| 147 | |
| 148 | /* |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 149 | * Don't scale if we are not ready or auto scale is disabled. |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 150 | */ |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 151 | if ((nss_runtime_samples.freq_scale_ready != 1) || (nss_cmd_buf.auto_scale != 1)) { |
| 152 | return; |
| 153 | } |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 154 | |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 155 | /* |
| 156 | * Scale Algorithmn |
| 157 | * Algorithmn will limit how fast it will transition each scale, by the number of samples seen. |
| 158 | * If any sample is out of scale during the idle count, the rate_limit will reset to 0. |
| 159 | * Scales are limited to the max number of cpu scales we support. |
| 160 | */ |
| 161 | if (nss_runtime_samples.freq_scale_rate_limit_up++ >= NSS_FREQUENCY_SCALE_RATE_LIMIT_UP) { |
| 162 | maximum = nss_runtime_samples.freq_scale[index].maximum; |
Thomas Wu | 1fa2609 | 2016-03-30 14:10:03 -0700 | [diff] [blame] | 163 | if ((nss_runtime_samples.average > maximum) && (index < (NSS_FREQ_MAX_SCALE - 1))) { |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 164 | nss_runtime_samples.freq_scale_index++; |
| 165 | nss_runtime_samples.freq_scale_ready = 0; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 166 | |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 167 | /* |
| 168 | * If fail to increase frequency, decrease index |
| 169 | */ |
Thomas Wu | 4640e56 | 2015-06-10 10:23:04 -0700 | [diff] [blame] | 170 | nss_trace("frequency increase to %d inst:%x > maximum:%x\n", nss_runtime_samples.freq_scale[nss_runtime_samples.freq_scale_index].frequency, sample, maximum); |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 171 | if (!nss_freq_queue_work()) { |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 172 | nss_runtime_samples.freq_scale_index--; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 173 | } |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 174 | } |
Thomas Wu | 4640e56 | 2015-06-10 10:23:04 -0700 | [diff] [blame] | 175 | |
| 176 | /* |
| 177 | * Reset the down scale counter based on running average, so can idle properlly |
| 178 | */ |
Thomas Wu | c151f2e | 2015-09-08 10:59:44 -0700 | [diff] [blame] | 179 | if (nss_runtime_samples.average > maximum) { |
Thomas Wu | 4640e56 | 2015-06-10 10:23:04 -0700 | [diff] [blame] | 180 | nss_trace("down scale timeout reset running average:%x\n", nss_runtime_samples.average); |
| 181 | nss_runtime_samples.freq_scale_rate_limit_down = 0; |
| 182 | } |
| 183 | |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 184 | nss_runtime_samples.freq_scale_rate_limit_up = 0; |
| 185 | return; |
| 186 | } |
| 187 | |
| 188 | if (nss_runtime_samples.freq_scale_rate_limit_down++ >= NSS_FREQUENCY_SCALE_RATE_LIMIT_DOWN) { |
| 189 | minimum = nss_runtime_samples.freq_scale[index].minimum; |
| 190 | if ((nss_runtime_samples.average < minimum) && (index > 0)) { |
| 191 | nss_runtime_samples.freq_scale_index--; |
| 192 | nss_runtime_samples.freq_scale_ready = 0; |
| 193 | |
| 194 | /* |
| 195 | * If fail to decrease frequency, increase index |
| 196 | */ |
Thomas Wu | c151f2e | 2015-09-08 10:59:44 -0700 | [diff] [blame] | 197 | nss_trace("frequency decrease to %d inst:%x < minumum:%x\n", nss_runtime_samples.freq_scale[nss_runtime_samples.freq_scale_index].frequency, nss_runtime_samples.average, minimum); |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 198 | if (!nss_freq_queue_work()) { |
Thomas Wu | 0e2fc4f | 2015-03-04 15:39:14 -0800 | [diff] [blame] | 199 | nss_runtime_samples.freq_scale_index++; |
| 200 | } |
| 201 | } |
| 202 | nss_runtime_samples.freq_scale_rate_limit_down = 0; |
| 203 | return; |
Abhishek Rastogi | 9da4747 | 2014-03-18 19:46:15 +0530 | [diff] [blame] | 204 | } |
| 205 | } |
| 206 | |
| 207 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 208 | * nss_freq_interface_handler() |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 209 | * Handle NSS -> HLOS messages for Frequency Changes and Statistics |
| 210 | */ |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 211 | static void nss_freq_interface_handler(struct nss_ctx_instance *nss_ctx, struct nss_cmn_msg *ncm, __attribute__((unused))void *app_data) { |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 212 | |
| 213 | struct nss_corefreq_msg *ncfm = (struct nss_corefreq_msg *)ncm; |
| 214 | |
Thomas Wu | 168ca26 | 2014-03-21 16:20:27 -0700 | [diff] [blame] | 215 | switch (ncfm->cm.type) { |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 216 | case COREFREQ_METADATA_TYPE_TX_FREQ_ACK: |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 217 | nss_freq_handle_ack(nss_ctx, &ncfm->msg.nfc); |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 218 | break; |
| 219 | case COREFREQ_METADATA_TYPE_TX_CORE_STATS: |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 220 | nss_freq_handle_core_stats(nss_ctx, &ncfm->msg.ncs); |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 221 | break; |
| 222 | |
| 223 | default: |
| 224 | if (ncm->response != NSS_CMN_RESPONSE_ACK) { |
| 225 | /* |
| 226 | * Check response |
| 227 | */ |
Thomas Wu | 6825035 | 2014-04-02 18:59:40 -0700 | [diff] [blame] | 228 | nss_info("%p: Received response %d for type %d, interface %d", nss_ctx, ncm->response, ncm->type, ncm->interface); |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 229 | } |
| 230 | } |
| 231 | } |
| 232 | |
| 233 | /* |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 234 | * nss_freq_change() |
| 235 | * NSS frequency change API. |
| 236 | */ |
| 237 | nss_tx_status_t nss_freq_change(struct nss_ctx_instance *nss_ctx, uint32_t eng, uint32_t stats_enable, uint32_t start_or_end) |
| 238 | { |
| 239 | struct sk_buff *nbuf; |
| 240 | int32_t status; |
| 241 | struct nss_corefreq_msg *ncm; |
| 242 | struct nss_freq_msg *nfc; |
| 243 | |
| 244 | nss_info("%p: frequency changing to: %d\n", nss_ctx, eng); |
| 245 | |
| 246 | NSS_VERIFY_CTX_MAGIC(nss_ctx); |
| 247 | if (unlikely(nss_ctx->state != NSS_CORE_STATE_INITIALIZED)) { |
| 248 | return NSS_TX_FAILURE_NOT_READY; |
| 249 | } |
| 250 | |
| 251 | nbuf = dev_alloc_skb(NSS_NBUF_PAYLOAD_SIZE); |
| 252 | if (unlikely(!nbuf)) { |
| 253 | NSS_PKT_STATS_INCREMENT(nss_ctx, &nss_ctx->nss_top->stats_drv[NSS_STATS_DRV_NBUF_ALLOC_FAILS]); |
| 254 | return NSS_TX_FAILURE; |
| 255 | } |
| 256 | |
| 257 | ncm = (struct nss_corefreq_msg *)skb_put(nbuf, sizeof(struct nss_corefreq_msg)); |
| 258 | |
| 259 | nss_freq_msg_init(ncm, NSS_COREFREQ_INTERFACE, NSS_TX_METADATA_TYPE_NSS_FREQ_CHANGE, |
| 260 | sizeof(struct nss_freq_msg), NULL, NULL); |
| 261 | nfc = &ncm->msg.nfc; |
| 262 | nfc->frequency = eng; |
| 263 | nfc->start_or_end = start_or_end; |
| 264 | nfc->stats_enable = stats_enable; |
| 265 | |
| 266 | status = nss_core_send_buffer(nss_ctx, 0, nbuf, NSS_IF_CMD_QUEUE, H2N_BUFFER_CTRL, 0); |
| 267 | if (status != NSS_CORE_STATUS_SUCCESS) { |
| 268 | dev_kfree_skb_any(nbuf); |
| 269 | nss_info("%p: unable to enqueue 'nss frequency change' - marked as stopped\n", nss_ctx); |
| 270 | return NSS_TX_FAILURE; |
| 271 | } |
| 272 | |
| 273 | nss_hal_send_interrupt(nss_ctx, NSS_H2N_INTR_DATA_COMMAND_QUEUE); |
| 274 | |
| 275 | return NSS_TX_SUCCESS; |
| 276 | } |
| 277 | |
| 278 | /* |
| 279 | * nss_freq_sched_change() |
| 280 | * schedule a frequency work |
| 281 | */ |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 282 | bool nss_freq_sched_change(nss_freq_scales_t index, bool auto_scale) |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 283 | { |
| 284 | if (index >= NSS_FREQ_MAX_SCALE) { |
| 285 | nss_info("NSS freq scale beyond limit\n"); |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 286 | return false; |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 287 | } |
| 288 | |
| 289 | nss_work = (nss_work_t *)kmalloc(sizeof(nss_work_t), GFP_ATOMIC); |
| 290 | if (!nss_work) { |
| 291 | nss_info("NSS Freq WQ kmalloc fail"); |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 292 | return false; |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 293 | } |
| 294 | |
| 295 | INIT_WORK((struct work_struct *)nss_work, nss_wq_function); |
| 296 | |
| 297 | nss_work->frequency = nss_runtime_samples.freq_scale[index].frequency; |
| 298 | |
| 299 | nss_work->stats_enable = auto_scale; |
| 300 | nss_cmd_buf.current_freq = nss_work->frequency; |
| 301 | queue_work(nss_wq, (struct work_struct *)nss_work); |
Tanmay V Jagdale | f6b2bce | 2017-03-03 14:31:07 +0530 | [diff] [blame] | 302 | |
| 303 | return true; |
Samarjeet Banerjee | b126e0f | 2016-08-05 20:58:27 +0530 | [diff] [blame] | 304 | } |
| 305 | |
| 306 | /* |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 307 | * nss_freq_register_handler() |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 308 | */ |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 309 | void nss_freq_register_handler(void) |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 310 | { |
Sundarajan Srinivasan | dedd8e4 | 2014-10-06 11:59:34 -0700 | [diff] [blame] | 311 | nss_core_register_handler(NSS_COREFREQ_INTERFACE, nss_freq_interface_handler, NULL); |
Thomas Wu | c07d870 | 2014-03-19 15:46:19 -0700 | [diff] [blame] | 312 | } |