Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 1 | ############################################################################## |
| 2 | # Copyright 2018 EuropeanSoftwareMarketingLtd. |
| 3 | # =================================================================== |
| 4 | # Licensed under the ApacheLicense, Version2.0 (the"License"); |
| 5 | # you may not use this file except in compliance with the License. |
| 6 | # You may obtain a copy of the License at |
| 7 | # http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | # |
| 9 | # software distributed under the License is distributed on an "AS IS" BASIS, |
| 10 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 11 | # See the License for the specific language governing permissions and limitations under |
| 12 | # the License |
| 13 | ############################################################################## |
| 14 | # vnftest comment: this is a modified copy of |
| 15 | # yardstick/benchmark/core/task.py |
| 16 | |
| 17 | """ Handler for vnftest command 'task' """ |
| 18 | |
| 19 | from __future__ import absolute_import |
| 20 | from __future__ import print_function |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 21 | |
| 22 | import atexit |
| 23 | import collections |
| 24 | import copy |
| 25 | import logging |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 26 | import sys |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 27 | import time |
| 28 | import uuid |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 29 | from collections import OrderedDict |
| 30 | |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 31 | import ipaddress |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 32 | import os |
| 33 | import yaml |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 34 | from jinja2 import Environment |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 35 | from six.moves import filter |
| 36 | from vnftest.runners import base as base_runner |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 37 | |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 38 | from vnftest.contexts.base import Context |
| 39 | from vnftest.contexts.csar import CSARContext |
| 40 | from vnftest.runners import base as base_runner |
| 41 | from vnftest.runners.duration import DurationRunner |
| 42 | from vnftest.runners.iteration import IterationRunner |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 43 | from vnftest.common.constants import CONF_FILE |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 44 | from vnftest.common.html_template import report_template |
| 45 | from vnftest.common.task_template import TaskTemplate |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 46 | from vnftest.common.yaml_loader import yaml_load |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 47 | from vnftest.contexts.base import Context |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 48 | from vnftest.dispatcher.base import Base as DispatcherBase |
| 49 | from vnftest.common.task_template import TaskTemplate |
| 50 | from vnftest.common import utils |
| 51 | from vnftest.common import constants |
| 52 | from vnftest.common.html_template import report_template |
| 53 | |
| 54 | output_file_default = "/tmp/vnftest.out" |
| 55 | test_cases_dir_default = "tests/onap/test_cases/" |
| 56 | LOG = logging.getLogger(__name__) |
| 57 | |
| 58 | |
| 59 | class Task(object): # pragma: no cover |
| 60 | """Task commands. |
| 61 | |
| 62 | Set of commands to manage benchmark tasks. |
| 63 | """ |
| 64 | |
| 65 | def __init__(self): |
| 66 | self.context = None |
| 67 | self.outputs = {} |
| 68 | |
| 69 | def _set_dispatchers(self, output_config): |
| 70 | dispatchers = output_config.get('DEFAULT', {}).get('dispatcher', |
| 71 | 'file') |
| 72 | out_types = [s.strip() for s in dispatchers.split(',')] |
| 73 | output_config['DEFAULT']['dispatcher'] = out_types |
| 74 | |
| 75 | def start(self, args, **kwargs): |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 76 | Context.load_vnf_descriptor(args.vnfdescriptor) |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 77 | atexit.register(self.atexit_handler) |
| 78 | |
| 79 | task_id = getattr(args, 'task_id') |
| 80 | self.task_id = task_id if task_id else str(uuid.uuid4()) |
| 81 | |
| 82 | self._set_log() |
| 83 | |
| 84 | try: |
| 85 | output_config = utils.parse_ini_file(CONF_FILE) |
| 86 | except Exception: |
| 87 | # all error will be ignore, the default value is {} |
| 88 | output_config = {} |
| 89 | |
| 90 | self._init_output_config(output_config) |
| 91 | self._set_output_config(output_config, args.output_file) |
| 92 | LOG.debug('Output configuration is: %s', output_config) |
| 93 | |
| 94 | self._set_dispatchers(output_config) |
| 95 | |
| 96 | # update dispatcher list |
| 97 | if 'file' in output_config['DEFAULT']['dispatcher']: |
| 98 | result = {'status': 0, 'result': {}} |
| 99 | utils.write_json_to_file(args.output_file, result) |
| 100 | |
| 101 | total_start_time = time.time() |
Moshe | 30497ac | 2018-03-14 14:22:13 +0200 | [diff] [blame] | 102 | parser = TaskParser(args.inputfile) |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 103 | |
| 104 | if args.suite: |
| 105 | # 1.parse suite, return suite_params info |
| 106 | task_files, task_args, task_args_fnames = \ |
| 107 | parser.parse_suite() |
| 108 | else: |
| 109 | task_files = [parser.path] |
| 110 | task_args = [args.task_args] |
| 111 | task_args_fnames = [args.task_args_file] |
| 112 | |
| 113 | LOG.debug("task_files:%s, task_args:%s, task_args_fnames:%s", |
| 114 | task_files, task_args, task_args_fnames) |
| 115 | |
| 116 | if args.parse_only: |
| 117 | sys.exit(0) |
| 118 | |
| 119 | testcases = {} |
| 120 | # parse task_files |
| 121 | for i in range(0, len(task_files)): |
| 122 | one_task_start_time = time.time() |
| 123 | parser.path = task_files[i] |
| 124 | steps, run_in_parallel, meet_precondition, ret_context = \ |
| 125 | parser.parse_task(self.task_id, task_args[i], |
| 126 | task_args_fnames[i]) |
| 127 | |
| 128 | self.context = ret_context |
| 129 | |
| 130 | if not meet_precondition: |
| 131 | LOG.info("meet_precondition is %s, please check envrionment", |
| 132 | meet_precondition) |
| 133 | continue |
| 134 | |
| 135 | case_name = os.path.splitext(os.path.basename(task_files[i]))[0] |
| 136 | try: |
| 137 | data = self._run(steps, run_in_parallel, args.output_file) |
| 138 | except KeyboardInterrupt: |
| 139 | raise |
| 140 | except Exception: |
| 141 | LOG.error('Testcase: "%s" FAILED!!!', case_name, exc_info=True) |
| 142 | testcases[case_name] = {'criteria': 'FAIL', 'tc_data': []} |
| 143 | else: |
Moshe | 976c2a9 | 2018-03-06 18:50:02 +0200 | [diff] [blame] | 144 | criteria = self.evaluate_task_criteria(data) |
Moshe | 05acf08 | 2018-03-20 10:51:42 +0200 | [diff] [blame] | 145 | testcases[case_name] = {'criteria': criteria, 'tc_data': data, 'output': self.outputs} |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 146 | |
| 147 | if args.keep_deploy: |
| 148 | # keep deployment, forget about stack |
| 149 | # (hide it for exit handler) |
| 150 | self.context = None |
| 151 | else: |
| 152 | self.context.undeploy() |
| 153 | self.context = None |
| 154 | one_task_end_time = time.time() |
| 155 | LOG.info("Task %s finished in %d secs", task_files[i], |
| 156 | one_task_end_time - one_task_start_time) |
| 157 | |
| 158 | result = self._get_format_result(testcases) |
| 159 | |
| 160 | self._do_output(output_config, result) |
| 161 | self._generate_reporting(result) |
| 162 | |
| 163 | total_end_time = time.time() |
| 164 | LOG.info("Total finished in %d secs", |
| 165 | total_end_time - total_start_time) |
| 166 | |
| 167 | step = steps[0] |
| 168 | LOG.info("To generate report, execute command " |
| 169 | "'vnftest report generate %(task_id)s %(tc)s'", step) |
| 170 | LOG.info("Task ALL DONE, exiting") |
| 171 | return result |
| 172 | |
| 173 | def _generate_reporting(self, result): |
| 174 | env = Environment() |
| 175 | with open(constants.REPORTING_FILE, 'w') as f: |
| 176 | f.write(env.from_string(report_template).render(result)) |
| 177 | |
| 178 | LOG.info("Report can be found in '%s'", constants.REPORTING_FILE) |
| 179 | |
| 180 | def _set_log(self): |
| 181 | log_format = '%(asctime)s %(name)s %(filename)s:%(lineno)d %(levelname)s %(message)s' |
| 182 | log_formatter = logging.Formatter(log_format) |
| 183 | |
| 184 | utils.makedirs(constants.TASK_LOG_DIR) |
| 185 | log_path = os.path.join(constants.TASK_LOG_DIR, '{}.log'.format(self.task_id)) |
| 186 | log_handler = logging.FileHandler(log_path) |
| 187 | log_handler.setFormatter(log_formatter) |
| 188 | log_handler.setLevel(logging.DEBUG) |
| 189 | |
| 190 | logging.root.addHandler(log_handler) |
| 191 | |
| 192 | def _init_output_config(self, output_config): |
| 193 | output_config.setdefault('DEFAULT', {}) |
| 194 | output_config.setdefault('dispatcher_http', {}) |
| 195 | output_config.setdefault('dispatcher_file', {}) |
| 196 | output_config.setdefault('dispatcher_influxdb', {}) |
| 197 | output_config.setdefault('nsb', {}) |
| 198 | |
| 199 | def _set_output_config(self, output_config, file_path): |
| 200 | try: |
| 201 | out_type = os.environ['DISPATCHER'] |
| 202 | except KeyError: |
| 203 | output_config['DEFAULT'].setdefault('dispatcher', 'file') |
| 204 | else: |
| 205 | output_config['DEFAULT']['dispatcher'] = out_type |
| 206 | |
| 207 | output_config['dispatcher_file']['file_path'] = file_path |
| 208 | |
| 209 | try: |
| 210 | target = os.environ['TARGET'] |
| 211 | except KeyError: |
| 212 | pass |
| 213 | else: |
| 214 | k = 'dispatcher_{}'.format(output_config['DEFAULT']['dispatcher']) |
| 215 | output_config[k]['target'] = target |
| 216 | |
| 217 | def _get_format_result(self, testcases): |
| 218 | criteria = self._get_task_criteria(testcases) |
| 219 | |
| 220 | info = { |
| 221 | 'deploy_step': os.environ.get('DEPLOY_STEP', 'unknown'), |
| 222 | 'installer': os.environ.get('INSTALLER_TYPE', 'unknown'), |
| 223 | 'pod_name': os.environ.get('NODE_NAME', 'unknown'), |
| 224 | 'version': os.environ.get('VNFTEST_BRANCH', 'unknown') |
| 225 | } |
| 226 | |
| 227 | result = { |
| 228 | 'status': 1, |
| 229 | 'result': { |
| 230 | 'criteria': criteria, |
| 231 | 'task_id': self.task_id, |
| 232 | 'info': info, |
| 233 | 'testcases': testcases |
| 234 | } |
| 235 | } |
| 236 | |
| 237 | return result |
| 238 | |
| 239 | def _get_task_criteria(self, testcases): |
| 240 | criteria = any(t.get('criteria') != 'PASS' for t in testcases.values()) |
| 241 | if criteria: |
| 242 | return 'FAIL' |
| 243 | else: |
| 244 | return 'PASS' |
| 245 | |
Moshe | 976c2a9 | 2018-03-06 18:50:02 +0200 | [diff] [blame] | 246 | def evaluate_task_criteria(self, steps_result_list): |
| 247 | for step_result in steps_result_list: |
| 248 | errors_list = step_result['errors'] |
| 249 | if errors_list is not None and len(errors_list) > 0: |
| 250 | return 'FAIL' |
| 251 | return 'PASS' |
| 252 | |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 253 | def _do_output(self, output_config, result): |
| 254 | dispatchers = DispatcherBase.get(output_config) |
| 255 | |
| 256 | for dispatcher in dispatchers: |
| 257 | dispatcher.flush_result_data(result) |
| 258 | |
| 259 | def _run(self, steps, run_in_parallel, output_file): |
| 260 | """Deploys context and calls runners""" |
Moshe | ea90f9b | 2018-03-25 17:00:40 +0300 | [diff] [blame^] | 261 | if self.context: |
| 262 | self.context.deploy() |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 263 | background_runners = [] |
| 264 | |
| 265 | result = [] |
| 266 | # Start all background steps |
| 267 | for step in filter(_is_background_step, steps): |
| 268 | step["runner"] = dict(type="Duration", duration=1000000000) |
| 269 | runner = self.run_one_step(step, output_file) |
| 270 | background_runners.append(runner) |
| 271 | |
| 272 | runners = [] |
| 273 | if run_in_parallel: |
| 274 | for step in steps: |
| 275 | if not _is_background_step(step): |
| 276 | runner = self.run_one_step(step, output_file) |
| 277 | runners.append(runner) |
| 278 | |
| 279 | # Wait for runners to finish |
| 280 | for runner in runners: |
| 281 | status = runner_join(runner, background_runners, self.outputs, result) |
| 282 | if status != 0: |
| 283 | raise RuntimeError( |
| 284 | "{0} runner status {1}".format(runner.__execution_type__, status)) |
| 285 | LOG.info("Runner ended, output in %s", output_file) |
| 286 | else: |
| 287 | # run serially |
| 288 | for step in steps: |
| 289 | if not _is_background_step(step): |
| 290 | runner = self.run_one_step(step, output_file) |
| 291 | status = runner_join(runner, background_runners, self.outputs, result) |
| 292 | if status != 0: |
| 293 | LOG.error('Step NO.%s: "%s" ERROR!', |
| 294 | steps.index(step) + 1, |
| 295 | step.get('type')) |
| 296 | raise RuntimeError( |
| 297 | "{0} runner status {1}".format(runner.__execution_type__, status)) |
| 298 | LOG.info("Runner ended, output in %s", output_file) |
| 299 | |
| 300 | # Abort background runners |
| 301 | for runner in background_runners: |
| 302 | runner.abort() |
| 303 | |
| 304 | # Wait for background runners to finish |
| 305 | for runner in background_runners: |
| 306 | status = runner.join(self.outputs, result) |
| 307 | if status is None: |
| 308 | # Nuke if it did not stop nicely |
| 309 | base_runner.Runner.terminate(runner) |
| 310 | runner.join(self.outputs, result) |
| 311 | base_runner.Runner.release(runner) |
| 312 | |
| 313 | print("Background task ended") |
| 314 | return result |
| 315 | |
| 316 | def atexit_handler(self): |
| 317 | """handler for process termination""" |
| 318 | base_runner.Runner.terminate_all() |
| 319 | |
| 320 | if self.context: |
| 321 | LOG.info("Undeploying context") |
| 322 | self.context.undeploy() |
| 323 | |
| 324 | def _parse_options(self, op): |
| 325 | if isinstance(op, dict): |
| 326 | return {k: self._parse_options(v) for k, v in op.items()} |
| 327 | elif isinstance(op, list): |
| 328 | return [self._parse_options(v) for v in op] |
| 329 | elif isinstance(op, str): |
| 330 | return self.outputs.get(op[1:]) if op.startswith('$') else op |
| 331 | else: |
| 332 | return op |
| 333 | |
| 334 | def run_one_step(self, step_cfg, output_file): |
| 335 | """run one step using context""" |
Moshe | 976c2a9 | 2018-03-06 18:50:02 +0200 | [diff] [blame] | 336 | # default runner is Iteration |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 337 | if 'runner' not in step_cfg: |
Moshe | 976c2a9 | 2018-03-06 18:50:02 +0200 | [diff] [blame] | 338 | step_cfg['runner'] = dict(type="Iteration", iterations=1) |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 339 | runner_cfg = step_cfg['runner'] |
| 340 | runner_cfg['output_filename'] = output_file |
| 341 | options = step_cfg.get('options', {}) |
| 342 | step_cfg['options'] = self._parse_options(options) |
| 343 | runner = base_runner.Runner.get(runner_cfg) |
| 344 | |
| 345 | LOG.info("Starting runner of type '%s'", runner_cfg["type"]) |
Moshe | e01f706 | 2018-03-11 16:18:20 +0200 | [diff] [blame] | 346 | # Previous steps output is the input of the next step. |
| 347 | input_params = copy.deepcopy(self.outputs) |
| 348 | runner.run(step_cfg, self.context, input_params) |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 349 | return runner |
| 350 | |
| 351 | |
| 352 | class TaskParser(object): # pragma: no cover |
| 353 | """Parser for task config files in yaml format""" |
| 354 | |
| 355 | def __init__(self, path): |
| 356 | self.path = path |
| 357 | |
| 358 | def _meet_constraint(self, task, cur_pod, cur_installer): |
| 359 | if "constraint" in task: |
| 360 | constraint = task.get('constraint', None) |
| 361 | if constraint is not None: |
| 362 | tc_fit_pod = constraint.get('pod', None) |
| 363 | tc_fit_installer = constraint.get('installer', None) |
| 364 | LOG.info("cur_pod:%s, cur_installer:%s,tc_constraints:%s", |
| 365 | cur_pod, cur_installer, constraint) |
| 366 | if (cur_pod is None) or (tc_fit_pod and cur_pod not in tc_fit_pod): |
| 367 | return False |
| 368 | if (cur_installer is None) or (tc_fit_installer and cur_installer |
| 369 | not in tc_fit_installer): |
| 370 | return False |
| 371 | return True |
| 372 | |
| 373 | def _get_task_para(self, task, cur_pod): |
| 374 | task_args = task.get('task_args', None) |
| 375 | if task_args is not None: |
| 376 | task_args = task_args.get(cur_pod, task_args.get('default')) |
| 377 | task_args_fnames = task.get('task_args_fnames', None) |
| 378 | if task_args_fnames is not None: |
| 379 | task_args_fnames = task_args_fnames.get(cur_pod, None) |
| 380 | return task_args, task_args_fnames |
| 381 | |
| 382 | def parse_suite(self): |
| 383 | """parse the suite file and return a list of task config file paths |
| 384 | and lists of optional parameters if present""" |
| 385 | LOG.info("\nParsing suite file:%s", self.path) |
| 386 | |
| 387 | try: |
| 388 | with open(self.path) as stream: |
| 389 | cfg = yaml_load(stream) |
| 390 | except IOError as ioerror: |
| 391 | sys.exit(ioerror) |
| 392 | |
| 393 | self._check_schema(cfg["schema"], "suite") |
| 394 | LOG.info("\nStarting step:%s", cfg["name"]) |
| 395 | |
| 396 | test_cases_dir = cfg.get("test_cases_dir", test_cases_dir_default) |
| 397 | test_cases_dir = os.path.join(constants.VNFTEST_ROOT_PATH, |
| 398 | test_cases_dir) |
| 399 | if test_cases_dir[-1] != os.sep: |
| 400 | test_cases_dir += os.sep |
| 401 | |
| 402 | cur_pod = os.environ.get('NODE_NAME', None) |
| 403 | cur_installer = os.environ.get('INSTALLER_TYPE', None) |
| 404 | |
| 405 | valid_task_files = [] |
| 406 | valid_task_args = [] |
| 407 | valid_task_args_fnames = [] |
| 408 | |
| 409 | for task in cfg["test_cases"]: |
| 410 | # 1.check file_name |
| 411 | if "file_name" in task: |
| 412 | task_fname = task.get('file_name', None) |
| 413 | if task_fname is None: |
| 414 | continue |
| 415 | else: |
| 416 | continue |
| 417 | # 2.check constraint |
| 418 | if self._meet_constraint(task, cur_pod, cur_installer): |
| 419 | valid_task_files.append(test_cases_dir + task_fname) |
| 420 | else: |
| 421 | continue |
| 422 | # 3.fetch task parameters |
| 423 | task_args, task_args_fnames = self._get_task_para(task, cur_pod) |
| 424 | valid_task_args.append(task_args) |
| 425 | valid_task_args_fnames.append(task_args_fnames) |
| 426 | |
| 427 | return valid_task_files, valid_task_args, valid_task_args_fnames |
| 428 | |
| 429 | def parse_task(self, task_id, task_args=None, task_args_file=None): |
| 430 | """parses the task file and return an context and step instances""" |
| 431 | LOG.info("Parsing task config: %s", self.path) |
| 432 | |
| 433 | try: |
| 434 | kw = {} |
| 435 | if task_args_file: |
| 436 | with open(task_args_file) as f: |
| 437 | kw.update(parse_task_args("task_args_file", f.read())) |
| 438 | kw.update(parse_task_args("task_args", task_args)) |
| 439 | except TypeError: |
| 440 | raise TypeError() |
| 441 | |
| 442 | try: |
| 443 | with open(self.path) as f: |
| 444 | try: |
| 445 | input_task = f.read() |
| 446 | rendered_task = TaskTemplate.render(input_task, **kw) |
| 447 | except Exception as e: |
| 448 | LOG.exception('Failed to render template:\n%s\n', input_task) |
| 449 | raise e |
| 450 | LOG.debug("Input task is:\n%s\n", rendered_task) |
| 451 | |
| 452 | cfg = yaml_load(rendered_task) |
| 453 | except IOError as ioerror: |
| 454 | sys.exit(ioerror) |
| 455 | |
| 456 | self._check_schema(cfg["schema"], "task") |
| 457 | meet_precondition = self._check_precondition(cfg) |
| 458 | |
| 459 | if "context" in cfg: |
| 460 | context_cfg = cfg["context"] |
| 461 | else: |
| 462 | context_cfg = {"type": "Dummy"} |
| 463 | |
| 464 | name_suffix = '-{}'.format(task_id[:8]) |
| 465 | try: |
| 466 | context_cfg['name'] = '{}{}'.format(context_cfg['name'], |
| 467 | name_suffix) |
| 468 | except KeyError: |
| 469 | pass |
| 470 | # default to CSAR context |
| 471 | context_type = context_cfg.get("type", "CSAR") |
| 472 | context = Context.get(context_type) |
| 473 | context.init(context_cfg) |
| 474 | |
| 475 | run_in_parallel = cfg.get("run_in_parallel", False) |
| 476 | |
| 477 | # add tc and task id for influxdb extended tags |
| 478 | for step in cfg["steps"]: |
| 479 | task_name = os.path.splitext(os.path.basename(self.path))[0] |
| 480 | step["tc"] = task_name |
| 481 | step["task_id"] = task_id |
| 482 | # embed task path into step so we can load other files |
| 483 | # relative to task path |
| 484 | step["task_path"] = os.path.dirname(self.path) |
| 485 | |
| 486 | # TODO we need something better here, a class that represent the file |
| 487 | return cfg["steps"], run_in_parallel, meet_precondition, context |
| 488 | |
| 489 | def _check_schema(self, cfg_schema, schema_type): |
| 490 | """Check if config file is using the correct schema type""" |
| 491 | |
| 492 | if cfg_schema != "vnftest:" + schema_type + ":0.1": |
| 493 | sys.exit("error: file %s has unknown schema %s" % (self.path, |
| 494 | cfg_schema)) |
| 495 | |
| 496 | def _check_precondition(self, cfg): |
| 497 | """Check if the environment meet the precondition""" |
| 498 | |
| 499 | if "precondition" in cfg: |
| 500 | precondition = cfg["precondition"] |
| 501 | installer_type = precondition.get("installer_type", None) |
| 502 | deploy_steps = precondition.get("deploy_steps", None) |
| 503 | tc_fit_pods = precondition.get("pod_name", None) |
| 504 | installer_type_env = os.environ.get('INSTALL_TYPE', None) |
| 505 | deploy_step_env = os.environ.get('DEPLOY_STEP', None) |
| 506 | pod_name_env = os.environ.get('NODE_NAME', None) |
| 507 | |
| 508 | LOG.info("installer_type: %s, installer_type_env: %s", |
| 509 | installer_type, installer_type_env) |
| 510 | LOG.info("deploy_steps: %s, deploy_step_env: %s", |
| 511 | deploy_steps, deploy_step_env) |
| 512 | LOG.info("tc_fit_pods: %s, pod_name_env: %s", |
| 513 | tc_fit_pods, pod_name_env) |
| 514 | if installer_type and installer_type_env: |
| 515 | if installer_type_env not in installer_type: |
| 516 | return False |
| 517 | if deploy_steps and deploy_step_env: |
| 518 | deploy_steps_list = deploy_steps.split(',') |
| 519 | for deploy_step in deploy_steps_list: |
| 520 | if deploy_step_env.startswith(deploy_step): |
| 521 | return True |
| 522 | return False |
| 523 | if tc_fit_pods and pod_name_env: |
| 524 | if pod_name_env not in tc_fit_pods: |
| 525 | return False |
| 526 | return True |
| 527 | |
| 528 | |
| 529 | def is_ip_addr(addr): |
| 530 | """check if string addr is an IP address""" |
| 531 | try: |
| 532 | addr = addr.get('public_ip_attr', addr.get('private_ip_attr')) |
| 533 | except AttributeError: |
| 534 | pass |
| 535 | |
| 536 | try: |
| 537 | ipaddress.ip_address(addr.encode('utf-8')) |
| 538 | except ValueError: |
| 539 | return False |
| 540 | else: |
| 541 | return True |
| 542 | |
| 543 | |
| 544 | def _is_background_step(step): |
| 545 | if "run_in_background" in step: |
| 546 | return step["run_in_background"] |
| 547 | else: |
| 548 | return False |
| 549 | |
| 550 | |
Moshe | 0bb532c | 2018-02-26 13:39:57 +0200 | [diff] [blame] | 551 | def runner_join(runner, background_runners, outputs, result): |
| 552 | """join (wait for) a runner, exit process at runner failure |
| 553 | :param background_runners: |
| 554 | :type background_runners: |
| 555 | :param outputs: |
| 556 | :type outputs: dict |
| 557 | :param result: |
| 558 | :type result: list |
| 559 | """ |
| 560 | while runner.poll() is None: |
| 561 | outputs.update(runner.get_output()) |
| 562 | result.extend(runner.get_result()) |
| 563 | # drain all the background runner queues |
| 564 | for background in background_runners: |
| 565 | outputs.update(background.get_output()) |
| 566 | result.extend(background.get_result()) |
| 567 | status = runner.join(outputs, result) |
| 568 | base_runner.Runner.release(runner) |
| 569 | return status |
| 570 | |
| 571 | |
| 572 | def print_invalid_header(source_name, args): |
| 573 | print("Invalid %(source)s passed:\n\n %(args)s\n" |
| 574 | % {"source": source_name, "args": args}) |
| 575 | |
| 576 | |
| 577 | def parse_task_args(src_name, args): |
| 578 | if isinstance(args, collections.Mapping): |
| 579 | return args |
| 580 | |
| 581 | try: |
| 582 | kw = args and yaml_load(args) |
| 583 | kw = {} if kw is None else kw |
| 584 | except yaml.parser.ParserError as e: |
| 585 | print_invalid_header(src_name, args) |
| 586 | print("%(source)s has to be YAML. Details:\n\n%(err)s\n" |
| 587 | % {"source": src_name, "err": e}) |
| 588 | raise TypeError() |
| 589 | |
| 590 | if not isinstance(kw, dict): |
| 591 | print_invalid_header(src_name, args) |
| 592 | print("%(src)s had to be dict, actually %(src_type)s\n" |
| 593 | % {"src": src_name, "src_type": type(kw)}) |
| 594 | raise TypeError() |
| 595 | return kw |