Fixed the tox errors for ansible-lint, shellcheck, yamllint
Change-Id: Ibf024b44b2a82c7248877fbc2f022b7fd15e882a
diff --git a/playbooks/prepare-testframework.yaml b/playbooks/prepare-testframework.yaml
index 0f9d00e..5af368b 100644
--- a/playbooks/prepare-testframework.yaml
+++ b/playbooks/prepare-testframework.yaml
@@ -22,10 +22,10 @@
tasks:
- name: Prepare Test Framework for online test
- include_role:
+ include_role:
name: prepare-testframework-online
when: execution_mode == "online-test"
-
+
- name: Prepare Test Framework for offline test
include_role:
name: prepare-testframework-offline
diff --git a/playbooks/roles/package/defaults/main.yaml b/playbooks/roles/package/defaults/main.yaml
index 4fee1ec..4855b84 100644
--- a/playbooks/roles/package/defaults/main.yaml
+++ b/playbooks/roles/package/defaults/main.yaml
@@ -36,12 +36,11 @@
repo: "https://gerrit.nordix.org/infra/test.git"
dest: "test"
version: "{{ lookup('env', 'NORDIX_TEST_VERSION') | default('master', true) }}"
- refspec: "{{ lookup('env', 'NORDIX_TEST_REFSPEC') | default('master', true) }}"
+ refspec: "{{ lookup('env', 'NORDIX_TEST_REFSPEC') | default('master', true) }}"
test-onap:
repo: "https://gerrit.nordix.org/infra/test/onap"
- dest: "test-onap"
+ dest: "test-onap"
version: "{{ lookup('env', 'TEST_STACK_VERSION') | default('master', true) }}"
- refspec: "{{ lookup('env', 'STACK_TEST_REFSPEC') | default(omit) }}"
-
-# placeholder for other images
+ refspec: "{{ lookup('env', 'STACK_TEST_REFSPEC') | default(omit) }}"
+# placeholder for other images
diff --git a/playbooks/roles/package/tasks/containers.yaml b/playbooks/roles/package/tasks/containers.yaml
index 7352f03..f19491f 100644
--- a/playbooks/roles/package/tasks/containers.yaml
+++ b/playbooks/roles/package/tasks/containers.yaml
@@ -28,14 +28,14 @@
# NOTE (eprasad): docker_image module doesn't seem to respect become so falling back to command module
- name: Pull test stack container images
- command: "docker pull {{ xtesting_image_repo}}/{{ item.value.image_name }}@sha256:{{ item.value.image_version }}"
+ command: "docker pull {{ xtesting_image_repo }}/{{ item.value.image_name }}@sha256:{{ item.value.image_version }}"
with_dict: "{{ framework[testfw] }}"
become: true
changed_when: false
- name: Add tag to the stack images
command: |-
- docker tag "{{ xtesting_image_repo}}/{{ item.value.image_name }}@sha256:{{ item.value.image_version }}"
+ docker tag "{{ xtesting_image_repo }}/{{ item.value.image_name }}@sha256:{{ item.value.image_version }}"
"onap/{{ item.value.image_name }}:{{ test_stack_version }}"
with_dict: "{{ framework[testfw] }}"
become: true
diff --git a/playbooks/roles/package/tasks/main.yaml b/playbooks/roles/package/tasks/main.yaml
index 9df4c7b..d2d4d86 100644
--- a/playbooks/roles/package/tasks/main.yaml
+++ b/playbooks/roles/package/tasks/main.yaml
@@ -17,7 +17,7 @@
# SPDX-License-Identifier: Apache-2.0
# ============LICENSE_END=========================================================
-#- name: Prepare packaging
+# - name: Prepare packaging
# include_tasks: prepare-packaging.yaml
- name: Create folder to store dependencies for offline testing
diff --git a/playbooks/roles/prepare-testframework-offline/tasks/prepare-xtesting-healthcheck.yaml b/playbooks/roles/prepare-testframework-offline/tasks/prepare-xtesting-healthcheck.yaml
index 60cb734..f62662b 100644
--- a/playbooks/roles/prepare-testframework-offline/tasks/prepare-xtesting-healthcheck.yaml
+++ b/playbooks/roles/prepare-testframework-offline/tasks/prepare-xtesting-healthcheck.yaml
@@ -54,20 +54,26 @@
become: true
- name: Get the list of ONAP componants installed and enabled in the setup
- shell: "helm ls --deployed --namespace onap -c --output yaml | grep Name: | sed 's/.* //g' | awk -F '-' '{print $2}' | egrep -v '{{ healthcheck_test_ignore_list }}' | awk /./ | sed -e 's/^/- health-/'" # noqa 204
+ shell: |
+ set -o pipefail && \
+ helm ls --deployed --namespace onap -c --output yaml | grep Name: | sed 's/.* //g' | awk -F '-' '{print $2}' | egrep -v '{{ healthcheck_test_ignore_list }}' | awk /./ | sed -e 's/^/- health-/' # noqa 204
+ args:
+ executable: "/bin/bash"
+ changed_when: false
register: onap_enabled_services
+
- name: Generate testcases.yaml file
template:
src: healthcheck-testcases.yaml.j2
dest: "{{ testfw_configdir }}/testcases.yaml"
-- name: Create cmd.sh file
+- name: Create cmd.sh file
copy:
dest: "{{ testfw_configdir }}/cmd.sh"
content: |
#!/bin/sh
- run_tests -t ${TAG} || true
+ run_tests -t ${TAG} || true
- name: Delete existing {{ healthcheck_cm_testcases }}
k8s:
@@ -84,19 +90,31 @@
api_version: v1
state: absent
namespace: onap
-
+
- name: Create ConfigMap - {{ healthcheck_cm_testcases }}
- shell: kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_cm_testcases }} --from-file "{{ testfw_configdir }}/testcases.yaml"
+ shell: |
+ kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_cm_testcases }} --from-file "{{ testfw_configdir }}/testcases.yaml"
+ args:
+ executable: "/bin/bash"
+ changed_when: false
- name: Create ConfigMap - {{ healthcheck_cm_run }}
- shell: kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_cm_run }} --from-file "{{ testfw_configdir }}/cmd.sh"
+ shell: |
+ kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_cm_run }} --from-file "{{ testfw_configdir }}/cmd.sh"
+ args:
+ executable: "/bin/bash"
+ changed_when: false
# eprasad: there is bug in ansible k8s module, removing a job doest not remove pods assisated with it hence using shell here.
- name: Delete healthcheck job
- shell: "kubectl -n {{ onap_namespace }} delete job xtesting-onap-{{ healthcheck_test_tier }}"
- ignore_errors: yes
+ shell: |
+ "kubectl -n {{ onap_namespace }} delete job xtesting-onap-{{ healthcheck_test_tier }}"
+ args:
+ executable: "/bin/bash"
+ changed_when: false
+ ignore_errors: yes # yamllint disable-line rule:truthy
- name: Construct and save healthcheck testsuite deployment to file
template:
diff --git a/playbooks/roles/prepare-testframework-offline/vars/main.yaml b/playbooks/roles/prepare-testframework-offline/vars/main.yaml
index f0d1e37..3bb5605 100644
--- a/playbooks/roles/prepare-testframework-offline/vars/main.yaml
+++ b/playbooks/roles/prepare-testframework-offline/vars/main.yaml
@@ -20,5 +20,5 @@
healthcheck_cm_testcases: xtesting-healthcheck-testsuit-config-map
healthcheck_cm_run: xtesting-healthcheck-run-config-map
-# xtesting container image repo for offline test
+# xtesting container image repo for offline test
xtesting_image_repo: engine.local/onap
diff --git a/playbooks/roles/prepare-testframework-online/tasks/prepare-xtesting-healthcheck.yaml b/playbooks/roles/prepare-testframework-online/tasks/prepare-xtesting-healthcheck.yaml
index 05e565d..46e4247 100644
--- a/playbooks/roles/prepare-testframework-online/tasks/prepare-xtesting-healthcheck.yaml
+++ b/playbooks/roles/prepare-testframework-online/tasks/prepare-xtesting-healthcheck.yaml
@@ -54,7 +54,12 @@
become: true
- name: Get the list of ONAP componants installed and enabled in the setup
- shell: "helm ls --deployed --namespace onap --output yaml | grep name: | sed 's/.* //g' | awk -F '-' '{print $2}' | egrep -v '{{ healthcheck_test_ignore_list }}' | awk /./ | sed -e 's/^/- health-/'" # noqa 204
+ shell: |
+ set -o pipefail && \
+ helm ls --deployed --namespace onap --output yaml | grep name: | sed 's/.* //g' | awk -F '-' '{print $2}' | egrep -v '{{ healthcheck_test_ignore_list }}' | awk /./ | sed -e 's/^/- health-/' # noqa 204
+ args:
+ executable: "/bin/bash"
+ changed_when: false
register: onap_enabled_services
- name: Generate testcases.yaml file
@@ -71,12 +76,20 @@
namespace: onap
- name: Create xtesting healthcheck testcases ConfigMap - {{ healthcheck_config_map }}
- shell: kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_config_map }} --from-file "{{ testfw_configdir }}/testcases.yaml"
+ shell: |
+ kubectl -n {{ onap_namespace }} create configmap {{ healthcheck_config_map }} --from-file "{{ testfw_configdir }}/testcases.yaml"
+ args:
+ executable: "/bin/bash"
+ changed_when: false
# eprasad: there is bug in ansible k8s module, removing a job doest not remove pods assisated with it hence using shell here.
- name: Delete healthcheck job
- shell: "kubectl -n {{ onap_namespace }} delete job xtesting-onap-{{ healthcheck_test_tier }}"
- ignore_errors: yes
+ shell: |
+ "kubectl -n {{ onap_namespace }} delete job xtesting-onap-{{ healthcheck_test_tier }}"
+ args:
+ executable: "/bin/bash"
+ changed_when: false
+ ignore_errors: yes # yamllint disable-line rule:truthy
- name: Construct and save healthcheck testsuite deployment to file
template:
diff --git a/playbooks/roles/run-tests/tasks/healthcheck.yaml b/playbooks/roles/run-tests/tasks/healthcheck.yaml
index c65035c..6a0234f 100644
--- a/playbooks/roles/run-tests/tasks/healthcheck.yaml
+++ b/playbooks/roles/run-tests/tasks/healthcheck.yaml
@@ -23,18 +23,32 @@
src: "{{ config_path }}/xtesting/healthcheck-{{ healthcheck_test_tier }}.yaml"
- name: Get the name of pod created by healthcheck job
- shell: kubectl -n {{ onap_namespace }} get pods -l job-name=xtesting-onap-{{ healthcheck_test_tier }} -o name --no-headers=true | sed 's/\<pod\>//g' | sed 's/\///'
+ shell: |
+ set -o pipefail
+ kubectl -n {{ onap_namespace }} get pods -l job-name=xtesting-onap-{{ healthcheck_test_tier }} -o name \
+ --no-headers=true | sed 's/\<pod\>//g' | sed 's/\///'
+ args:
+ executable: "/bin/bash"
+ changed_when: false
register: xtesting_pod
- name: Wait for healthcheck job to complete
- shell: "kubectl -n {{ onap_namespace }} get pods {{ xtesting_pod.stdout }}"
+ shell: |
+ kubectl -n {{ onap_namespace }} get pods {{ xtesting_pod.stdout }}
+ args:
+ executable: "/bin/bash"
+ changed_when: false
register: healthcheck_pod
until: '" Completed " in healthcheck_pod.stdout'
retries: 100
delay: 4
- name: collect healthcheck testsuite logs
- shell: "kubectl -n {{ onap_namespace }} logs {{ xtesting_pod.stdout }}"
+ shell: |
+ kubectl -n {{ onap_namespace }} logs {{ xtesting_pod.stdout }}
+ args:
+ executable: "/bin/bash"
+ changed_when: false
register: healthcheck_log
- name: Log healthcheck testsuite output to console