Removal of older test profiles
Keeping current and 2 previous releases
Issue-ID: NONRTRIC-837
Signed-off-by: BjornMagnussonXA <bjorn.magnusson@est.tech>
Change-Id: I67ed21e985235a8f3c443b7b1fafa3a852442ee7
diff --git a/test/auto-test/FTC1.sh b/test/auto-test/FTC1.sh
index e1526ff..092efe5 100755
--- a/test/auto-test/FTC1.sh
+++ b/test/auto-test/FTC1.sh
@@ -21,7 +21,7 @@
TC_ONELINE_DESCR="Sanity test, create service and then create,update and delete a policy using http/https and A1PMS REST/DMAAP with/without SDNC controller"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR DMAAPMR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR DMAAPMR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR DMAAPMR A1PMS RICSIM SDNC NGW KUBEPROXY "
@@ -31,10 +31,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW CBS CONSUL"
+CONDITIONALLY_IGNORED_IMAGES="NGW "
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -48,6 +48,11 @@
# Tested variants of REST/DMAAP/SDNC config
TESTED_VARIANTS="REST DMAAP REST+SDNC DMAAP+SDNC"
+
+if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then
+ TESTED_VARIANTS="REST REST+SDNC"
+fi
+
#Test a1pms and simulator protocol versions (others are http only)
TESTED_PROTOCOLS="HTTP HTTPS"
for __httpx in $TESTED_PROTOCOLS ; do
@@ -80,7 +85,6 @@
if [ $__httpx == "HTTPS" ]; then
use_cr_https
use_simulator_https
- use_mr_https
if [[ $interface = *"SDNC"* ]]; then
use_sdnc_https
fi
@@ -92,7 +96,6 @@
else
use_cr_http
use_simulator_http
- use_mr_http
if [[ $interface = *"SDNC"* ]]; then
use_sdnc_http
fi
@@ -102,15 +105,26 @@
use_a1pms_rest_http
fi
fi
+ if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then
+ :
+ else
+ if [ $__httpx == "HTTPS" ]; then
+ use_mr_https
+ else
+ use_mr_http
+ fi
+ fi
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- fi
+ start_ric_simulators ricsim_g3 1 STD_2.0.0
- start_mr "$MR_READ_TOPIC" "/events" "users/policy-agent" \
- "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
+ if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then
+ :
+ else
+ start_mr "$MR_READ_TOPIC" "/events" "users/policy-agent" \
+ "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
+ fi
start_cr 1
@@ -120,82 +134,60 @@
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
- if [ $__httpx == "HTTPS" ]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_a1pms_rest_https
+ else
+ use_a1pms_rest_http
+ fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_https
+ else
use_a1pms_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_http
else
use_a1pms_rest_http
fi
- a1pms_api_put_configuration 200 ".consul_config.json"
- if [ $__httpx == "HTTPS" ]; then
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_https
- else
- use_a1pms_rest_https
- fi
- else
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_http
- else
- use_a1pms_rest_http
- fi
- fi
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
fi
fi
- mr_equal requests_submitted 0
+ if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then
+ :
+ else
+ mr_equal requests_submitted 0
+ fi
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
+ a1pms_equal json:rics 3 300
- a1pms_equal json:rics 3 300
+ a1pms_equal json:policy-types 3 120
- a1pms_equal json:policy-types 3 120
+ a1pms_equal json:policies 0
- a1pms_equal json:policies 0
+ a1pms_equal json:policy-instances 0
- a1pms_equal json:policy-instances 0
+ cr_equal 0 received_callbacks 3 120
- cr_equal 0 received_callbacks 3 120
-
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
-
- else
- a1pms_equal json:rics 2 300
-
- a1pms_equal json:policy_schemas 2 120
-
- a1pms_equal json:policy_types 2
-
- a1pms_equal json:policies 0
-
- a1pms_equal json:policy_ids 0
- fi
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
echo "############################################"
echo "############## Health check ################"
@@ -219,87 +211,59 @@
echo "############## RIC Repository ##############"
echo "############################################"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0:AVAILABLE"
- else
- a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
- fi
+ a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0:AVAILABLE"
echo "############################################"
echo "########### A1 Policy Management ###########"
echo "############################################"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
- else
- notificationurl=""
- fi
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
a1pms_api_put_policy 201 "serv1" ricsim_g1_1 1 5000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json
a1pms_api_put_policy 200 "serv1" ricsim_g1_1 1 5000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS_0_2_0 5200 true $notificationurl testdata/STD2/pi_qos_template.json
- a1pms_api_put_policy 200 "serv1" ricsim_g3_1 STD_QOS_0_2_0 5200 true $notificationurl testdata/STD2/pi_qos_template.json
- fi
+ a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS_0_2_0 5200 true $notificationurl testdata/STD2/pi_qos_template.json
+ a1pms_api_put_policy 200 "serv1" ricsim_g3_1 STD_QOS_0_2_0 5200 true $notificationurl testdata/STD2/pi_qos_template.json
a1pms_api_put_policy 201 "serv1" ricsim_g2_1 NOTYPE 5100 NOTRANSIENT $notificationurl testdata/STD/pi1_template.json
a1pms_api_put_policy 200 "serv1" ricsim_g2_1 NOTYPE 5100 NOTRANSIENT $notificationurl testdata/STD/pi1_template.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policies 3
- else
- a1pms_equal json:policies 2
- fi
+ a1pms_equal json:policies 3
a1pms_api_delete_policy 204 5000
a1pms_api_delete_policy 204 5100
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_delete_policy 204 5200
- fi
+ a1pms_api_delete_policy 204 5200
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policies 0
+ a1pms_equal json:policies 0
- a1pms_equal json:policy-instances 0
- else
- a1pms_equal json:policies 0
+ a1pms_equal json:policy-instances 0
- a1pms_equal json:policy_ids 0
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks 3
- fi
+ cr_equal 0 received_callbacks 3
if [[ $interface = *"DMAAP"* ]]; then
- if [ "$A1PMS_VERSION" == "V2" ]; then
- VAL=14 # Number of a1pms API calls over DMAAP
- else
- VAL=11 # Number of a1pms API calls over DMAAP
- fi
+ VAL=14 # Number of a1pms API calls over DMAAP
mr_equal requests_fetched $VAL
mr_equal responses_submitted $VAL
mr_equal responses_fetched $VAL
mr_equal current_requests 0
mr_equal current_responses 0
else
- mr_equal requests_submitted 0
+ if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then
+ :
+ else
+ mr_equal requests_submitted 0
+ fi
fi
if [[ $interface = *"SDNC"* ]]; then
sim_contains_str ricsim_g1_1 remote_hosts $SDNC_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $SDNC_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
else
sim_contains_str ricsim_g1_1 remote_hosts $A1PMS_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $A1PMS_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
fi
check_a1pms_logs
diff --git a/test/auto-test/FTC10.sh b/test/auto-test/FTC10.sh
index 61bd722..32b6b1c 100755
--- a/test/auto-test/FTC10.sh
+++ b/test/auto-test/FTC10.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Basic use case, register service, create/update policy, delete policy, de-register service using both STD and OSC interface while mixing REST and Dmaap"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES=" MR CR A1PMS RICSIM CP KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -58,19 +58,12 @@
start_ric_simulators ricsim_g2 5 STD_1.1.3
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
-fi
+start_ric_simulators ricsim_g3 1 STD_2.0.0
start_mr
start_cr 1
-if [ $RUNMODE == "DOCKER" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then
- start_consul_cbs
- fi
-fi
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
@@ -84,24 +77,13 @@
use_a1pms_rest_http
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
sleep_wait 120 "Let A1PMS cofiguration take effect"
@@ -112,26 +94,16 @@
sim_print ricsim_g2_1 interface
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_print ricsim_g3_1 interface
-fi
+sim_print ricsim_g3_1 interface
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
- a1pms_equal json:policy-types 3 300
-else
- a1pms_equal json:policy_types 2 300
-fi
+sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
+a1pms_equal json:policy-types 3 300
# Create policies
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
use_a1pms_rest_http
@@ -162,20 +134,17 @@
sim_equal ricsim_g2_1 num_instances 2
-if [ "$A1PMS_VERSION" == "V2" ]; then
- use_a1pms_rest_http
+use_a1pms_rest_http
- a1pms_api_put_policy 201 "service1" ricsim_g3_1 STD_QOS_0_2_0 2200 true $notificationurl testdata/STD2/pi_qos_template.json 1
+a1pms_api_put_policy 201 "service1" ricsim_g3_1 STD_QOS_0_2_0 2200 true $notificationurl testdata/STD2/pi_qos_template.json 1
- sim_equal ricsim_g3_1 num_instances 1
+sim_equal ricsim_g3_1 num_instances 1
- use_a1pms_dmaap_http
+use_a1pms_dmaap_http
- a1pms_api_put_policy 201 "service1" ricsim_g3_1 STD_QOS_0_2_0 3200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos_template.json 1
+a1pms_api_put_policy 201 "service1" ricsim_g3_1 STD_QOS_0_2_0 3200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos_template.json 1
- sim_equal ricsim_g3_1 num_instances 2
-
-fi
+sim_equal ricsim_g3_1 num_instances 2
#Update policies
use_a1pms_rest_http
@@ -208,42 +177,31 @@
sim_equal ricsim_g2_1 num_instances 2
-if [ "$A1PMS_VERSION" == "V2" ]; then
- use_a1pms_rest_http
+use_a1pms_rest_http
- a1pms_api_put_policy 200 "service1" ricsim_g3_1 STD_QOS_0_2_0 2200 true $notificationurl testdata/STD2/pi_qos_template.json 1
+a1pms_api_put_policy 200 "service1" ricsim_g3_1 STD_QOS_0_2_0 2200 true $notificationurl testdata/STD2/pi_qos_template.json 1
- sim_equal ricsim_g3_1 num_instances 2
+sim_equal ricsim_g3_1 num_instances 2
- use_a1pms_dmaap_http
+use_a1pms_dmaap_http
- a1pms_api_put_policy 200 "service1" ricsim_g3_1 STD_QOS_0_2_0 3200 true $notificationurl testdata/STD2/pi_qos_template.json 1
+a1pms_api_put_policy 200 "service1" ricsim_g3_1 STD_QOS_0_2_0 3200 true $notificationurl testdata/STD2/pi_qos_template.json 1
- sim_equal ricsim_g3_1 num_instances 2
-fi
+sim_equal ricsim_g3_1 num_instances 2
# Check policies
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy 200 2000 testdata/OSC/pi1_template.json "service1" ricsim_g1_1 1 false $notificationurl
- a1pms_api_get_policy 200 3000 testdata/OSC/pi1_template.json "service1" ricsim_g1_1 1 false $notificationurl
- a1pms_api_get_policy 200 2100 testdata/STD/pi1_template.json "service1" ricsim_g2_1 NOTYPE false $notificationurl
- a1pms_api_get_policy 200 3100 testdata/STD/pi1_template.json "service1" ricsim_g2_1 NOTYPE false $notificationurl
- a1pms_api_get_policy 200 2200 testdata/STD2/pi_qos_template.json "service1" ricsim_g3_1 STD_QOS_0_2_0 true $notificationurl
- a1pms_api_get_policy 200 3200 testdata/STD2/pi_qos_template.json "service1" ricsim_g3_1 STD_QOS_0_2_0 true $notificationurl
-else
- a1pms_api_get_policy 200 2000 testdata/OSC/pi1_template.json
- a1pms_api_get_policy 200 3000 testdata/OSC/pi1_template.json
- a1pms_api_get_policy 200 2100 testdata/STD/pi1_template.json
- a1pms_api_get_policy 200 3100 testdata/STD/pi1_template.json
-fi
+a1pms_api_get_policy 200 2000 testdata/OSC/pi1_template.json "service1" ricsim_g1_1 1 false $notificationurl
+a1pms_api_get_policy 200 3000 testdata/OSC/pi1_template.json "service1" ricsim_g1_1 1 false $notificationurl
+a1pms_api_get_policy 200 2100 testdata/STD/pi1_template.json "service1" ricsim_g2_1 NOTYPE false $notificationurl
+a1pms_api_get_policy 200 3100 testdata/STD/pi1_template.json "service1" ricsim_g2_1 NOTYPE false $notificationurl
+a1pms_api_get_policy 200 2200 testdata/STD2/pi_qos_template.json "service1" ricsim_g3_1 STD_QOS_0_2_0 true $notificationurl
+a1pms_api_get_policy 200 3200 testdata/STD2/pi_qos_template.json "service1" ricsim_g3_1 STD_QOS_0_2_0 true $notificationurl
sim_equal ricsim_g1_1 num_instances 2
sim_equal ricsim_g2_1 num_instances 2
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal ricsim_g3_1 num_instances 2
-fi
+sim_equal ricsim_g3_1 num_instances 2
# Remove policies
@@ -255,27 +213,21 @@
a1pms_api_delete_policy 204 2100
use_a1pms_rest_http
a1pms_api_delete_policy 204 3100
-if [ "$A1PMS_VERSION" == "V2" ]; then
- use_a1pms_dmaap_http
- a1pms_api_delete_policy 204 2200
- use_a1pms_rest_http
- a1pms_api_delete_policy 204 3200
-fi
+use_a1pms_dmaap_http
+a1pms_api_delete_policy 204 2200
+use_a1pms_rest_http
+a1pms_api_delete_policy 204 3200
sim_equal ricsim_g1_1 num_instances 0
sim_equal ricsim_g2_1 num_instances 0
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal ricsim_g3_1 num_instances 0
-fi
+sim_equal ricsim_g3_1 num_instances 0
# Check remote host access to simulator
sim_contains_str ricsim_g1_1 remote_hosts $A1PMS_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $A1PMS_APP_NAME
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
-fi
+sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
# Check policy removal
use_a1pms_rest_http
@@ -284,10 +236,8 @@
a1pms_api_get_policy 404 2100
a1pms_api_get_policy 404 3100
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy 404 2200
- a1pms_api_get_policy 404 3200
-fi
+a1pms_api_get_policy 404 2200
+a1pms_api_get_policy 404 3200
# Remove the service
use_a1pms_dmaap_http
diff --git a/test/auto-test/FTC100.sh b/test/auto-test/FTC100.sh
index 7b8478d..c7c80aa 100755
--- a/test/auto-test/FTC100.sh
+++ b/test/auto-test/FTC100.sh
@@ -23,7 +23,7 @@
USE_ISTIO=0
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
if [ $USE_ISTIO -eq 0 ]; then
@@ -37,10 +37,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -205,11 +205,9 @@
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
sim_put_policy_type 201 ricsim_g1_1 2 testdata/OSC/sim_2.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
- fi
+ start_ric_simulators ricsim_g3 1 STD_2.0.0
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
start_mr
@@ -221,83 +219,57 @@
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
- if [ $__httpx == "HTTPS" ]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_a1pms_rest_https
+ else
+ use_a1pms_rest_http
+ fi
+
+ if [[ $interface != *"DMAAP"* ]]; then
+ echo "{}" > ".a1pms_config_incorrect.json"
+ a1pms_api_put_configuration 400 ".a1pms_config_incorrect.json"
+ fi
+
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
+ a1pms_api_get_configuration 200 ".a1pms_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_https
+ else
use_a1pms_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_http
else
use_a1pms_rest_http
fi
-
- if [[ $interface != *"DMAAP"* ]]; then
- echo "{}" > ".consul_config_incorrect.json"
- a1pms_api_put_configuration 400 ".consul_config_incorrect.json"
- fi
-
- a1pms_api_put_configuration 200 ".consul_config.json"
- a1pms_api_get_configuration 200 ".consul_config.json"
- if [ $__httpx == "HTTPS" ]; then
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_https
- else
- use_a1pms_rest_https
- fi
- else
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_http
- else
- use_a1pms_rest_http
- fi
- fi
-
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
fi
fi
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:rics 3 300
+ a1pms_equal json:rics 3 300
- a1pms_equal json:policy-types 5 120
+ a1pms_equal json:policy-types 5 120
- a1pms_equal json:policies 0
+ a1pms_equal json:policies 0
- a1pms_equal json:policy-instances 0
- else
- a1pms_equal json:rics 2 300
+ a1pms_equal json:policy-instances 0
- a1pms_equal json:policy_schemas 3 120
+ cr_equal 0 received_callbacks 3 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
- a1pms_equal json:policy_types 3
-
- a1pms_equal json:policies 0
-
- a1pms_equal json:policy_ids 0
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks 3 120
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
- fi
mr_equal requests_submitted 0
@@ -393,117 +365,71 @@
echo "############## RIC Repository ##############"
echo "############################################"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
- else
- a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
- fi
+ a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
+
a1pms_api_get_rics 200 1 "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
a1pms_api_get_rics 404 47
a1pms_api_get_rics 404 "test"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_ric 200 me1_ricsim_g1_1 NORIC "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
+ a1pms_api_get_ric 200 me1_ricsim_g1_1 NORIC "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
- a1pms_api_get_ric 200 me2_ricsim_g1_1 NORIC "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
+ a1pms_api_get_ric 200 me2_ricsim_g1_1 NORIC "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
- a1pms_api_get_ric 200 me1_ricsim_g2_1 NORIC "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
+ a1pms_api_get_ric 200 me1_ricsim_g2_1 NORIC "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
- a1pms_api_get_ric 200 me2_ricsim_g2_1 NORIC "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
+ a1pms_api_get_ric 200 me2_ricsim_g2_1 NORIC "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
- a1pms_api_get_ric 200 me1_ricsim_g3_1 NORIC "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
+ a1pms_api_get_ric 200 me1_ricsim_g3_1 NORIC "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
- a1pms_api_get_ric 200 me2_ricsim_g3_1 NORIC "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
+ a1pms_api_get_ric 200 me2_ricsim_g3_1 NORIC "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
- a1pms_api_get_ric 200 NOME ricsim_g1_1 "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
+ a1pms_api_get_ric 200 NOME ricsim_g1_1 "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1,2:AVAILABLE"
- a1pms_api_get_ric 200 NOME ricsim_g2_1 "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
+ a1pms_api_get_ric 200 NOME ricsim_g2_1 "ricsim_g2_1:me1_ricsim_g2_1,me2_ricsim_g2_1:EMPTYTYPE:AVAILABLE"
- a1pms_api_get_ric 200 NOME ricsim_g3_1 "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
+ a1pms_api_get_ric 200 NOME ricsim_g3_1 "ricsim_g3_1:me1_ricsim_g3_1,me2_ricsim_g3_1:STD_QOS_0_2_0,STD_QOS2_0.1.0:AVAILABLE"
- a1pms_api_get_ric 404 NOME test1
+ a1pms_api_get_ric 404 NOME test1
- a1pms_api_get_ric 404 test NORIC
+ a1pms_api_get_ric 404 test NORIC
- a1pms_api_get_ric 400 me1_ricsim_g1_1 ricsim_g1_1
+ a1pms_api_get_ric 400 me1_ricsim_g1_1 ricsim_g1_1
- a1pms_api_get_ric 400 me1_ricsim_g1_1 TESTRIC
+ a1pms_api_get_ric 400 me1_ricsim_g1_1 TESTRIC
- a1pms_api_get_ric 400 TESTME ricsim_g1_1
-
- else
- a1pms_api_get_ric 200 me1_ricsim_g1_1 ricsim_g1_1
-
- a1pms_api_get_ric 200 me2_ricsim_g1_1 ricsim_g1_1
-
- a1pms_api_get_ric 200 me1_ricsim_g2_1 ricsim_g2_1
-
- a1pms_api_get_ric 200 me2_ricsim_g2_1 ricsim_g2_1
-
- a1pms_api_get_ric 404 test
- fi
+ a1pms_api_get_ric 400 TESTME ricsim_g1_1
echo "############################################"
echo "########### A1 Policy Management ###########"
echo "############################################"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_type 200 1 testdata/OSC/1-a1pms-modified.json
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_type 200 2 testdata/OSC/2-a1pms-modified.json
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_type 200 STD_QOS_0_2_0 testdata/STD2/qos-a1pms-modified.json
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_type 200 STD_QOS2_0.1.0 testdata/STD2/qos2-a1pms-modified.json
+ deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
+ #Behaviour accepted for now
+ a1pms_api_get_policy_type 200 1 testdata/OSC/1-a1pms-modified.json
+ deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
+ #Behaviour accepted for now
+ a1pms_api_get_policy_type 200 2 testdata/OSC/2-a1pms-modified.json
+ deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
+ #Behaviour accepted for now
+ a1pms_api_get_policy_type 200 STD_QOS_0_2_0 testdata/STD2/qos-a1pms-modified.json
+ deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
+ #Behaviour accepted for now
+ a1pms_api_get_policy_type 200 STD_QOS2_0.1.0 testdata/STD2/qos2-a1pms-modified.json
- a1pms_api_get_policy_type 404 3
- else
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_schema 200 1 testdata/OSC/1-a1pms-modified.json
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_schema 200 2 testdata/OSC/2-a1pms-modified.json
+ a1pms_api_get_policy_type 404 3
- a1pms_api_get_policy_schema 404 3
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_schemas 404
- else
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_schemas 200 NORIC testdata/OSC/1-a1pms-modified.json testdata/OSC/2-a1pms-modified.json NOFILE
- deviation "TR9 - a1pms modify the type with type id - test combo $interface and $__httpx"
- #Behaviour accepted for now
- a1pms_api_get_policy_schemas 200 ricsim_g1_1 testdata/OSC/1-a1pms-modified.json testdata/OSC/2-a1pms-modified.json
-
- a1pms_api_get_policy_schemas 200 ricsim_g2_1 NOFILE
-
- a1pms_api_get_policy_schemas 404 test
- fi
+ a1pms_api_get_policy_schemas 404
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_types 200 NORIC 1 2 EMPTY STD_QOS_0_2_0 STD_QOS2_0.1.0
- else
- a1pms_api_get_policy_types 200 NORIC 1 2 EMPTY
- fi
+ a1pms_api_get_policy_types 200 NORIC 1 2 EMPTY STD_QOS_0_2_0 STD_QOS2_0.1.0
a1pms_api_get_policy_types 200 ricsim_g1_1 1 2
a1pms_api_get_policy_types 200 ricsim_g2_1 EMPTY
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_types 200 ricsim_g3_1 STD_QOS_0_2_0 STD_QOS2_0.1.0
- fi
+ a1pms_api_get_policy_types 200 ricsim_g3_1 STD_QOS_0_2_0 STD_QOS2_0.1.0
a1pms_api_get_policy_types 404 dummy-ric
@@ -511,11 +437,8 @@
a1pms_api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH_0/1"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
- else
- notificationurl=""
- fi
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
+
if [[ $interface != *"DMAAP"* ]]; then
# Badly formatted json is not possible to send via dmaap
a1pms_api_put_policy 400 "unregistered-service" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi_bad_template.json
@@ -538,74 +461,49 @@
a1pms_api_put_policy 200 "service10" ricsim_g2_1 NOTYPE 5100 true $notificationurl testdata/STD/pi1_template.json
a1pms_api_put_policy 200 "service10" ricsim_g2_1 NOTYPE 5100 false $notificationurl testdata/STD/pi1_template.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
- a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
+ a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
+ a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
- a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 true $notificationurl testdata/STD2/pi_qos2_template.json
- a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 false $notificationurl testdata/STD2/pi_qos2_template.json
- fi
+ a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 true $notificationurl testdata/STD2/pi_qos2_template.json
+ a1pms_api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 false $notificationurl testdata/STD2/pi_qos2_template.json
a1pms_api_get_policy_status 404 1
a1pms_api_get_policy_status 404 2
VAL='NOT IN EFFECT'
a1pms_api_get_policy_status 200 5000 OSC "$VAL" "false"
a1pms_api_get_policy_status 200 5100 STD "UNDEFINED"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_status 200 5200 STD2 EMPTY EMPTY
- fi
+ a1pms_api_get_policy_status 200 5200 STD2 EMPTY EMPTY
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
#a1pms_equal json:policies 2
#Allow 3 for now
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policies 4
- else
- a1pms_equal json:policies 3
- fi
+ a1pms_equal json:policies 4
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
#a1pms_equal json:policy_ids 2
#Allow 3 for now
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances 4
- else
- a1pms_equal json:policy_ids 3
- fi
+ a1pms_equal json:policy-instances 4
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
#a1pms_api_get_policy_ids 200 NORIC NOSERVICE NOTYPE 5000 5100
#Allow policy create with unregistered service for now
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_ids 200 NORIC NOSERVICE NOTYPE 5000 5100 2000 5200
- else
- a1pms_api_get_policy_ids 200 NORIC NOSERVICE NOTYPE 5000 5100 2000
- fi
+ a1pms_api_get_policy_ids 200 NORIC NOSERVICE NOTYPE 5000 5100 2000 5200
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
#a1pms_api_get_policy_ids 200 ricsim_g1_1 NOSERVICE NOTYPE 5000
#Allow policy create with unregistered service for now
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_ids 200 ricsim_g1_1 NOSERVICE NOTYPE 5000 2000
+ a1pms_api_get_policy_ids 200 ricsim_g1_1 NOSERVICE NOTYPE 5000 2000
- a1pms_api_get_policy_ids 200 ricsim_g2_1 NOSERVICE NOTYPE 5100
+ a1pms_api_get_policy_ids 200 ricsim_g2_1 NOSERVICE NOTYPE 5100
- a1pms_api_get_policy_ids 200 ricsim_g3_1 NOSERVICE NOTYPE 5200
+ a1pms_api_get_policy_ids 200 ricsim_g3_1 NOSERVICE NOTYPE 5200
- a1pms_api_get_policy_ids 200 NORIC "service10" NOTYPE 5000 5100 5200
- else
- a1pms_api_get_policy_ids 200 ricsim_g1_1 NOSERVICE NOTYPE 5000 2000
-
- a1pms_api_get_policy_ids 200 ricsim_g2_1 NOSERVICE NOTYPE 5100
-
-
- a1pms_api_get_policy_ids 200 NORIC "service10" NOTYPE 5000 5100
- fi
+ a1pms_api_get_policy_ids 200 NORIC "service10" NOTYPE 5000 5100 5200
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
@@ -616,27 +514,17 @@
a1pms_api_get_policy_ids 200 NORIC NOSERVICE 2 NOID
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy_ids 200 NORIC NOSERVICE STD_QOS2_0.1.0 5200
- fi
+ a1pms_api_get_policy_ids 200 NORIC NOSERVICE STD_QOS2_0.1.0 5200
a1pms_api_get_policy_ids 200 ricsim_g2_1 NOSERVICE 1 NOID
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_get_policy 200 5000 testdata/OSC/pi1_template.json "service10" ricsim_g1_1 1 false $notificationurl
+ a1pms_api_get_policy 200 5000 testdata/OSC/pi1_template.json "service10" ricsim_g1_1 1 false $notificationurl
- a1pms_api_get_policy 200 5100 testdata/STD/pi1_template.json "service10" ricsim_g2_1 NOTYPE false $notificationurl
+ a1pms_api_get_policy 200 5100 testdata/STD/pi1_template.json "service10" ricsim_g2_1 NOTYPE false $notificationurl
- a1pms_api_get_policy 200 5200 testdata/STD2/pi_qos2_template.json "service10" ricsim_g3_1 STD_QOS2_0.1.0 false $notificationurl
+ a1pms_api_get_policy 200 5200 testdata/STD2/pi_qos2_template.json "service10" ricsim_g3_1 STD_QOS2_0.1.0 false $notificationurl
- a1pms_api_get_policies 200 ricsim_g1_1 "service10" 1 5000 ricsim_g1_1 "service10" 1 false $notificationurl testdata/OSC/pi1_template.json
- else
- a1pms_api_get_policy 200 5000 testdata/OSC/pi1_template.json
-
- a1pms_api_get_policy 200 5100 testdata/STD/pi1_template.json
-
- a1pms_api_get_policies 200 ricsim_g1_1 "service10" 1 5000 ricsim_g1_1 "service10" 1 testdata/OSC/pi1_template.json
- fi
+ a1pms_api_get_policies 200 ricsim_g1_1 "service10" 1 5000 ricsim_g1_1 "service10" 1 false $notificationurl testdata/OSC/pi1_template.json
deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
#kept until decision
@@ -648,33 +536,20 @@
a1pms_api_delete_policy 204 5000
- if [ "$A1PMS_VERSION" == "V2" ]; then
-
- a1pms_api_delete_policy 204 5200
- fi
+ a1pms_api_delete_policy 204 5200
a1pms_equal json:policies 1
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances 1
- else
- a1pms_equal json:policy_ids 1
- fi
+ a1pms_equal json:policy-instances 1
a1pms_api_delete_policy 204 5100
a1pms_equal json:policies 0
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances 0
- else
- a1pms_equal json:policy_ids 0
- fi
+ a1pms_equal json:policy-instances 0
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks 3
- fi
+ cr_equal 0 received_callbacks 3
if [[ $interface = *"DMAAP"* ]]; then
mr_greater requests_submitted 0
@@ -691,15 +566,11 @@
if [[ $interface = *"SDNC"* ]]; then
sim_contains_str ricsim_g1_1 remote_hosts $SDNC_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $SDNC_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
else
sim_contains_str ricsim_g1_1 remote_hosts $A1PMS_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $A1PMS_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
fi
fi
diff --git a/test/auto-test/FTC110.sh b/test/auto-test/FTC110.sh
index b9168cd..e3f3c85 100755
--- a/test/auto-test/FTC110.sh
+++ b/test/auto-test/FTC110.sh
@@ -21,7 +21,7 @@
TC_ONELINE_DESCR="Testing of service registration timeouts and keepalive"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM KUBEPROXY NGW"
@@ -31,10 +31,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -57,9 +57,7 @@
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
-fi
+start_ric_simulators ricsim_g3 1 STD_2.0.0
start_mr
@@ -73,25 +71,13 @@
start_a1pms NORPOXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
set_a1pms_debug
@@ -106,9 +92,7 @@
#Print simulator interface version
sim_print ricsim_g1_1 interface
sim_print ricsim_g2_1 interface
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_print ricsim_g3_1 interface
-fi
+sim_print ricsim_g3_1 interface
a1pms_api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
@@ -190,42 +174,23 @@
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
-if [ "$A1PMS_VERSION" == "V2" ]; then
+sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
+a1pms_equal json:rics 3 300
- a1pms_equal json:rics 3 300
+#a1pms_equal json:policy_schemas 2 120
- #a1pms_equal json:policy_schemas 2 120
+a1pms_equal json:policy-types 3 120
- a1pms_equal json:policy-types 3 120
+a1pms_equal json:policies 0
- a1pms_equal json:policies 0
-else
- a1pms_equal json:rics 2 300
-
- a1pms_equal json:policy_schemas 2 120
-
- a1pms_equal json:policy_types 2
-
- a1pms_equal json:policies 0
-fi
-
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
a1pms_api_put_policy 201 "service10" ricsim_g1_1 1 5000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json
a1pms_api_put_policy 201 "service10" ricsim_g2_1 NOTYPE 5100 NOTRANSIENT $notificationurl testdata/STD/pi1_template.json
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
- a1pms_equal json:policies 3
-else
- a1pms_equal json:policies 2
-fi
+a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json
+a1pms_equal json:policies 3
sim_equal ricsim_g1_1 num_instances 1
sim_equal ricsim_g2_1 num_instances 1
@@ -233,38 +198,24 @@
a1pms_api_put_policy 201 "service10" ricsim_g1_1 1 5001 true $notificationurl testdata/OSC/pi1_template.json
a1pms_api_put_policy 201 "service10" ricsim_g2_1 NOTYPE 5101 true $notificationurl testdata/STD/pi1_template.json
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5201 true $notificationurl testdata/STD2/pi_qos2_template.json
- a1pms_equal json:policies 6
-else
- a1pms_equal json:policies 4
-fi
+a1pms_api_put_policy 201 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5201 true $notificationurl testdata/STD2/pi_qos2_template.json
+a1pms_equal json:policies 6
sim_equal ricsim_g1_1 num_instances 2
sim_equal ricsim_g2_1 num_instances 2
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal ricsim_g3_1 num_instances 2
-fi
+sim_equal ricsim_g3_1 num_instances 2
sim_post_delete_instances 200 ricsim_g1_1
sim_post_delete_instances 200 ricsim_g2_1
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_post_delete_instances 200 ricsim_g3_1
-fi
+sim_post_delete_instances 200 ricsim_g3_1
#Wait for recreate of non transient policy
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policies 3 180
-else
- a1pms_equal json:policies 2 180
-fi
+a1pms_equal json:policies 3 180
sim_equal ricsim_g1_1 num_instances 1
sim_equal ricsim_g2_1 num_instances 1
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal ricsim_g3_1 num_instances 1
-fi
+sim_equal ricsim_g3_1 num_instances 1
a1pms_api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH_0/service10"
@@ -273,9 +224,7 @@
sim_equal ricsim_g1_1 num_instances 0
sim_equal ricsim_g2_1 num_instances 0
-if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal ricsim_g3_1 num_instances 0
-fi
+sim_equal ricsim_g3_1 num_instances 0
a1pms_api_get_service_ids 200
diff --git a/test/auto-test/FTC1100.sh b/test/auto-test/FTC1100.sh
index 932cddb..2c3cd3b 100755
--- a/test/auto-test/FTC1100.sh
+++ b/test/auto-test/FTC1100.sh
@@ -40,7 +40,7 @@
CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -150,9 +150,7 @@
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 4 STD_2.0.0
-fi
+start_ric_simulators ricsim_g3 4 STD_2.0.0
start_cr 1
diff --git a/test/auto-test/FTC150.sh b/test/auto-test/FTC150.sh
index b88daf5..a0110d9 100755
--- a/test/auto-test/FTC150.sh
+++ b/test/auto-test/FTC150.sh
@@ -33,7 +33,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -66,9 +66,7 @@
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- fi
+ start_ric_simulators ricsim_g3 1 STD_2.0.0
start_sdnc
diff --git a/test/auto-test/FTC1800.sh b/test/auto-test/FTC1800.sh
index 472444f..8447049 100755
--- a/test/auto-test/FTC1800.sh
+++ b/test/auto-test/FTC1800.sh
@@ -34,7 +34,7 @@
CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/auto-test/FTC2001.sh b/test/auto-test/FTC2001.sh
index 63ab798..3b53e8e 100755
--- a/test/auto-test/FTC2001.sh
+++ b/test/auto-test/FTC2001.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Testing southbound proxy for A1PMS and ICS"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM ICS PRODSTUB HTTPPROXY NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM ICS PRODSTUB HTTPPROXY NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES=" MR CR A1PMS PRODSTUB RICSIM CP ICS HTTPPROXY KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -52,12 +52,7 @@
use_ics_rest_https
use_prod_stub_https
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- echo "A1PMS VERSION 2 (V2) is required"
- exit 1
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
clean_environment
@@ -79,26 +74,14 @@
start_a1pms PROXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
start_cr 1
diff --git a/test/auto-test/FTC2002.sh b/test/auto-test/FTC2002.sh
index 8143de3..3774b69 100755
--- a/test/auto-test/FTC2002.sh
+++ b/test/auto-test/FTC2002.sh
@@ -32,7 +32,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON "
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON "
#Supported run modes
SUPPORTED_RUNMODES="DOCKER"
@@ -72,9 +72,7 @@
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- fi
+ start_ric_simulators ricsim_g3 1 STD_2.0.0
start_sdnc
diff --git a/test/auto-test/FTC2003.sh b/test/auto-test/FTC2003.sh
index e04e8b5..c98eeac 100755
--- a/test/auto-test/FTC2003.sh
+++ b/test/auto-test/FTC2003.sh
@@ -33,7 +33,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/auto-test/FTC300.sh b/test/auto-test/FTC300.sh
index 1a6d686..b260c55 100755
--- a/test/auto-test/FTC300.sh
+++ b/test/auto-test/FTC300.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Resync 10000 policies using OSC and STD interface"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -94,9 +94,7 @@
start_ric_simulators ricsim_g2 4 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 4 STD_2.0.0
- fi
+ start_ric_simulators ricsim_g3 4 STD_2.0.0
start_mr
@@ -112,48 +110,36 @@
set_a1pms_debug
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
- if [ $__httpx == "HTTPS" ]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_a1pms_rest_https
+ else
+ use_a1pms_rest_http
+ fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_https
+ else
use_a1pms_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_http
else
use_a1pms_rest_http
fi
- a1pms_api_put_configuration 200 ".consul_config.json"
- if [ $__httpx == "HTTPS" ]; then
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_https
- else
- use_a1pms_rest_https
- fi
- else
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_http
- else
- use_a1pms_rest_http
- fi
- fi
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
fi
fi
@@ -165,28 +151,18 @@
sim_print ricsim_g2_1 interface
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_print ricsim_g3_1 interface
- fi
+ sim_print ricsim_g3_1 interface
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 2 120 #Wait for the a1pms to refresh types from the simulator
- else
- a1pms_equal json:policy_types 2 120 #Wait for the a1pms to refresh types from the simulator
- fi
+ a1pms_equal json:policy-types 2 120 #Wait for the a1pms to refresh types from the simulator
a1pms_api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
START_ID=2000
NUM_POLICIES=10000 # Must be at least 100
- NUM_POLICIES=110
- if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
- else
- notificationurl=""
- fi
+
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
if [[ $interface == *"BATCH"* ]]; then
a1pms_api_put_policy_batch 201 "serv1" ricsim_g1_1 1 $START_ID NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json $NUM_POLICIES
diff --git a/test/auto-test/FTC3000.sh b/test/auto-test/FTC3000.sh
index 9f2a609..2eb1d9f 100755
--- a/test/auto-test/FTC3000.sh
+++ b/test/auto-test/FTC3000.sh
@@ -34,7 +34,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/auto-test/FTC3001.sh b/test/auto-test/FTC3001.sh
index ca31ab7..7123665 100755
--- a/test/auto-test/FTC3001.sh
+++ b/test/auto-test/FTC3001.sh
@@ -34,7 +34,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/auto-test/FTC310.sh b/test/auto-test/FTC310.sh
index ba07999..6d024d7 100755
--- a/test/auto-test/FTC310.sh
+++ b/test/auto-test/FTC310.sh
@@ -18,18 +18,18 @@
#
-TC_ONELINE_DESCR="Resync of RIC via changes in the consul config or pushed config"
+TC_ONELINE_DESCR="Resync of RIC via changes in the pushed config"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM KUBEPROXY"
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL"
+CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER"
@@ -39,126 +39,81 @@
#### TEST BEGIN ####
+generate_policy_uuid
+
+# Clean container and start all needed containers #
+clean_environment
+
+start_kube_proxy
+
+start_a1pms NOPROXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
+
+set_a1pms_trace
+
+# Create service to be able to receive events when rics becomes available
+# Must use rest towards the a1pms since dmaap is not configured yet
+a1pms_api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
+
+# Start one RIC of each type
+start_ric_simulators ricsim_g1 1 OSC_2.1.0
+start_ric_simulators ricsim_g2 1 STD_1.1.3
+start_ric_simulators ricsim_g3 1 STD_2.0.0
+
+start_mr
+
+start_cr 1
+
+start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
+
+
+
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
+
+a1pms_api_put_configuration 200 ".a1pms_config.json"
+a1pms_api_get_configuration 200 ".a1pms_config.json"
+
+a1pms_equal json:rics 3 300
+
+cr_equal 0 received_callbacks 3 120
+
+cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+
+# Add an STD RIC and check
+start_ric_simulators ricsim_g2 2 STD_1.1.3
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
+a1pms_api_put_configuration 200 ".a1pms_config.json"
+a1pms_api_get_configuration 200 ".a1pms_config.json"
+
+a1pms_equal json:rics 4 120
+
+cr_equal 0 received_callbacks 4 120
+
+cr_api_check_all_sync_events 200 0 ric-registration ricsim_g2_2
+
+check_a1pms_logs
+
+
+# Remove one RIC RIC and check
+start_ric_simulators ricsim_g2 1 STD_1.1.3
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
+a1pms_api_put_configuration 200 ".a1pms_config.json"
+a1pms_api_get_configuration 200 ".a1pms_config.json"
+
+a1pms_equal json:rics 3 120
+
+cr_equal 0 received_callbacks 4 120
+
if [ "$A1PMS_VERSION" == "V2" ]; then
- TESTED_VARIANTS="CONSUL NOCONSUL"
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- TESTED_VARIANTS="NOCONSUL"
- fi
-else
- TESTED_VARIANTS="CONSUL"
+ a1pms_api_get_configuration 200 ".a1pms_config.json"
fi
-for consul_conf in $TESTED_VARIANTS ; do
- generate_policy_uuid
+check_a1pms_logs
- # Clean container and start all needed containers #
- clean_environment
+store_logs END_$consul_conf
- start_kube_proxy
-
- start_a1pms NOPROXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
-
- set_a1pms_trace
-
- # Create service to be able to receive events when rics becomes available
- # Must use rest towards the a1pms since dmaap is not configured yet
- a1pms_api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
-
- # Start one RIC of each type
- start_ric_simulators ricsim_g1 1 OSC_2.1.0
- start_ric_simulators ricsim_g2 1 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- fi
-
- start_mr
-
- start_cr 1
-
- start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
-
- if [ $consul_conf == "CONSUL" ]; then
- start_consul_cbs
- fi
-
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
-
- if [ "$A1PMS_VERSION" == "V2" ] && [ $consul_conf == "NOCONSUL" ]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- a1pms_api_get_configuration 200 ".consul_config.json"
- else
- consul_config_app ".consul_config.json"
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:rics 3 300
-
- cr_equal 0 received_callbacks 3 120
-
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
- else
- a1pms_equal json:rics 2 300
- fi
-
- # Add an STD RIC and check
- start_ric_simulators ricsim_g2 2 STD_1.1.3
-
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
- if [ "$A1PMS_VERSION" == "V2" ] && [ $consul_conf == "NOCONSUL" ]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- a1pms_api_get_configuration 200 ".consul_config.json"
- else
- consul_config_app ".consul_config.json"
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:rics 4 120
-
- cr_equal 0 received_callbacks 4 120
-
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g2_2
- else
- a1pms_equal json:rics 3 120
- fi
-
- check_a1pms_logs
-
-
- # Remove one RIC RIC and check
- start_ric_simulators ricsim_g2 1 STD_1.1.3
-
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
- if [ "$A1PMS_VERSION" == "V2" ] && [ $consul_conf == "NOCONSUL" ]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- a1pms_api_get_configuration 200 ".consul_config.json"
- else
- consul_config_app ".consul_config.json"
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:rics 3 120
-
- cr_equal 0 received_callbacks 4 120
- else
- a1pms_equal json:rics 2 120
- fi
-
- if [ "$A1PMS_VERSION" == "V2" ] && [ $consul_conf == "NOCONSUL" ]; then
- a1pms_api_get_configuration 200 ".consul_config.json"
- fi
-
- check_a1pms_logs
-
- store_logs END_$consul_conf
-done
#### TEST COMPLETE ####
diff --git a/test/auto-test/FTC350.sh b/test/auto-test/FTC350.sh
index 379eb04..2b677d0 100755
--- a/test/auto-test/FTC350.sh
+++ b/test/auto-test/FTC350.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Change supported policy types and reconfigure rics"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL"
+CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -75,39 +75,26 @@
start_mr
- if [ $RUNMODE == "DOCKER" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then
- start_consul_cbs
- fi
- fi
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
# Create first config
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config_initial.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config_initial.json"
else
- prepare_consul_config NOSDNC ".consul_config_initial.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config_initial.json"
fi
# Create 2nd config and save for later
start_ric_simulators ricsim_g1 $NUM_RICS OSC_2.1.0
if [[ $interface = *"SDNC"* ]]; then
- prepare_consul_config SDNC ".consul_config_all.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config_all.json"
else
- prepare_consul_config NOSDNC ".consul_config_all.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config_all.json"
fi
- if [ $RUNMODE == "KUBE" ] && [[ "$A1PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ if [ $RUNMODE == "KUBE" ]; then
start_a1pms NORPOXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/application2.yaml
else
start_a1pms NORPOXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
@@ -124,21 +111,8 @@
a1pms_api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
#Load first config
- if [ $RUNMODE == "KUBE" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_initial.json"
- a1pms_api_get_configuration 200 ".consul_config_initial.json"
- else
- a1pms_load_config ".consul_config_initial.json"
- fi
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_initial.json"
- a1pms_api_get_configuration 200 ".consul_config_initial.json"
- else
- consul_config_app ".consul_config_initial.json"
- fi
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config_initial.json"
+ a1pms_api_get_configuration 200 ".a1pms_config_initial.json"
for ((i=1; i<=${NUM_RICS}; i++))
do
@@ -148,10 +122,8 @@
# All sims running but 2 are not configured in consul
a1pms_equal json:rics 8 300
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks?id=ric-registration 8 120
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
- fi
+ cr_equal 0 received_callbacks?id=ric-registration 8 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:NOTYPE:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:NOTYPE:???? \
@@ -192,31 +164,17 @@
sim_put_policy_type 201 ricsim_g1_7 5 testdata/OSC/sim_5.json
sim_put_policy_type 201 ricsim_g1_8 5 testdata/OSC/sim_5.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 5 120
+ a1pms_equal json:policy-types 5 120
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
- else
- a1pms_equal json:policy_types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy_types?ric=ricsim_g1_1 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_2 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_3 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_4 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_5 5 120
- a1pms_equal json:policy_types?ric=ricsim_g1_6 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_7 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 2 120
- fi
+ echo "Check the number of types in the a1pms for each ric"
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
@@ -227,68 +185,33 @@
ricsim_g1_7:me1_ricsim_g1_7,me2_ricsim_g1_7:3,4,5:???? \
ricsim_g1_8:me1_ricsim_g1_8,me2_ricsim_g1_8:4,5:???? "
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks?id=ric-registration 16 120
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
- fi
+ cr_equal 0 received_callbacks?id=ric-registration 16 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
#Load config with all rics
- if [ $RUNMODE == "KUBE" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_all.json"
- a1pms_api_get_configuration 200 ".consul_config_all.json"
- else
- a1pms_load_config ".consul_config_all.json"
- fi
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_all.json"
- a1pms_api_get_configuration 200 ".consul_config_all.json"
- else
- consul_config_app ".consul_config_all.json"
- fi
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config_all.json"
+ a1pms_api_get_configuration 200 ".a1pms_config_all.json"
a1pms_equal json:rics 10 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks?id=ric-registration 18 120
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9 ricsim_g1_10
- fi
+ cr_equal 0 received_callbacks?id=ric-registration 18 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9 ricsim_g1_10
sim_put_policy_type 201 ricsim_g1_9 5 testdata/OSC/sim_5.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
+ a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
- else
-
- a1pms_equal json:policy_types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy_types?ric=ricsim_g1_1 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_2 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_3 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_4 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_5 5 120
- a1pms_equal json:policy_types?ric=ricsim_g1_6 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_7 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_9 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_10 0 120
- fi
+ echo "Check the number of types in the a1pms for each ric"
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
@@ -301,80 +224,39 @@
ricsim_g1_9:me1_ricsim_g1_9,me2_ricsim_g1_9:5:???? \
ricsim_g1_10:me1_ricsim_g1_10,me2_ricsim_g1_10:NOTYPE:???? "
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks?id=ric-registration 19 120
- cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9
- fi
+ cr_equal 0 received_callbacks?id=ric-registration 19 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9
#No policy type in sim #10
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 5
- else
- a1pms_equal json:policy_types 5
- fi
+ a1pms_equal json:policy-types 5
a1pms_api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/serv1"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
- else
- notificationurl=""
- fi
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
sleep_wait 120
# Load config with reduced number of rics
- if [ $RUNMODE == "KUBE" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_initial.json"
- a1pms_api_get_configuration 200 ".consul_config_initial.json"
- else
- a1pms_load_config ".consul_config_initial.json"
- fi
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_initial.json"
- a1pms_api_get_configuration 200 ".consul_config_initial.json"
- else
- consul_config_app ".consul_config_initial.json"
- fi
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config_initial.json"
+ a1pms_api_get_configuration 200 ".a1pms_config_initial.json"
a1pms_equal json:rics 8 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
- cr_equal 0 received_callbacks?id=ric-registration 19 120
- cr_api_check_all_sync_events 200 0 ric-registration EMPTY
- fi
+ cr_equal 0 received_callbacks?id=ric-registration 19 120
+ cr_api_check_all_sync_events 200 0 ric-registration EMPTY
- if [ "$A1PMS_VERSION" == "V2" ]; then
+ a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
- else
-
- a1pms_equal json:policy_types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy_types?ric=ricsim_g1_1 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_2 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_3 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_4 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_5 5 120
- a1pms_equal json:policy_types?ric=ricsim_g1_6 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_7 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 2 120
- fi
+ echo "Check the number of types in the a1pms for each ric"
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
@@ -387,64 +269,29 @@
sleep_wait 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances 0
- else
- a1pms_equal json:policy_ids 0
- fi
+ a1pms_equal json:policy-instances 0
a1pms_api_get_policy_types 404 ricsim_g1_9
# Load config with all rics
- if [ $RUNMODE == "KUBE" ]; then
- if [[ "$A1PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_all.json"
- a1pms_api_get_configuration 200 ".consul_config_all.json"
- else
- a1pms_load_config ".consul_config_all.json"
- fi
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config_all.json"
- a1pms_api_get_configuration 200 ".consul_config_all.json"
- else
- consul_config_app ".consul_config_all.json"
- fi
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config_all.json"
+ a1pms_api_get_configuration 200 ".a1pms_config_all.json"
a1pms_equal json:rics 10 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
+ a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
- else
-
- a1pms_equal json:policy_types 5 120
-
- echo "Check the number of types in the a1pms for each ric"
- a1pms_equal json:policy_types?ric=ricsim_g1_1 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_2 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_3 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_4 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_5 5 120
- a1pms_equal json:policy_types?ric=ricsim_g1_6 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_7 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_9 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_10 0 120
- fi
+ echo "Check the number of types in the a1pms for each ric"
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_4 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_5 5 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_6 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_7 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
@@ -459,11 +306,7 @@
sleep_wait 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances 0
- else
- a1pms_equal json:policy_ids 0
- fi
+ a1pms_equal json:policy-instances 0
sim_equal ricsim_g1_9 num_instances 0
@@ -475,35 +318,18 @@
sleep_wait 120
- if [ "$A1PMS_VERSION" == "V2" ]; then
+ a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
-
- a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_4 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_5 4 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_6 3 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_7 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
- else
-
- a1pms_equal json:policy_types 5 120
-
- a1pms_equal json:policy_types?ric=ricsim_g1_1 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_2 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_3 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_4 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_5 4 120
- a1pms_equal json:policy_types?ric=ricsim_g1_6 3 120
- a1pms_equal json:policy_types?ric=ricsim_g1_7 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 2 120
- a1pms_equal json:policy_types?ric=ricsim_g1_9 1 120
- a1pms_equal json:policy_types?ric=ricsim_g1_10 0 120
- fi
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_1 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_2 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_3 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_4 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_5 4 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_6 3 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_7 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 2 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_9 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_10 0 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
@@ -518,13 +344,8 @@
sim_delete_policy_type 204 ricsim_g1_8 4
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types?ric_id=ricsim_g1_8 1 120
- else
- a1pms_equal json:policy_types 5 120
- a1pms_equal json:policy_types?ric=ricsim_g1_8 1 120
- fi
+ a1pms_equal json:policy-types 5 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_8 1 120
a1pms_api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:1:???? \
ricsim_g1_2:me1_ricsim_g1_2,me2_ricsim_g1_2:1,2:???? \
diff --git a/test/auto-test/FTC4000.sh b/test/auto-test/FTC4000.sh
index c572ca5..d8783ba 100755
--- a/test/auto-test/FTC4000.sh
+++ b/test/auto-test/FTC4000.sh
@@ -33,7 +33,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/auto-test/FTC800.sh b/test/auto-test/FTC800.sh
index 786832a..1ecd5ff 100755
--- a/test/auto-test/FTC800.sh
+++ b/test/auto-test/FTC800.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Create 10000 policies in sequence using http/https and a1pms REST/DMAAP with/without SDNC controller"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -86,9 +86,7 @@
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
- if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 1 STD_2.0.0
- fi
+ start_ric_simulators ricsim_g3 1 STD_2.0.0
start_mr
@@ -107,34 +105,20 @@
mr_equal requests_submitted 0
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
- fi
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface == "SDNC" ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
sleep_wait 120 "Let A1PMS cofiguration take effect"
@@ -143,23 +127,13 @@
sim_print ricsim_g1_1 interface
sim_print ricsim_g2_1 interface
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_print ricsim_g3_1 interface
- fi
+ sim_print ricsim_g3_1 interface
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 3 300 #Wait for the a1pms to refresh types from the simulators
- else
- a1pms_equal json:policy_types 2 300 #Wait for the a1pms to refresh types from the simulators
- fi
+ a1pms_equal json:policy-types 3 300 #Wait for the a1pms to refresh types from the simulators
a1pms_api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
- if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
- else
- notificationurl=""
- fi
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
start_timer "Create polices in OSC via a1pms REST and $interface using "$__httpx
a1pms_api_put_policy 201 "serv1" ricsim_g1_1 1 $START_ID NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json $NUM_POLICIES
@@ -175,16 +149,16 @@
sim_equal ricsim_g2_1 num_instances $NUM_POLICIES
- if [ "$A1PMS_VERSION" == "V2" ]; then
- START_ID=$(($START_ID+$NUM_POLICIES))
- start_timer "Create polices in STD 2 via a1pms REST and $interface using "$__httpx
- a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
- print_timer "Create polices in STD via a1pms REST and $interface using "$__httpx
+ START_ID=$(($START_ID+$NUM_POLICIES))
- sim_equal ricsim_g3_1 num_instances $NUM_POLICIES
- fi
+ start_timer "Create polices in STD 2 via a1pms REST and $interface using "$__httpx
+ a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
+ print_timer "Create polices in STD via a1pms REST and $interface using "$__httpx
+
+ sim_equal ricsim_g3_1 num_instances $NUM_POLICIES
+
if [ $__httpx == "HTTPS" ]; then
echo "Using secure ports towards dmaap"
@@ -210,16 +184,15 @@
sim_equal ricsim_g2_1 num_instances $((2*$NUM_POLICIES))
- if [ "$A1PMS_VERSION" == "V2" ]; then
- START_ID=$(($START_ID+$NUM_POLICIES))
+ START_ID=$(($START_ID+$NUM_POLICIES))
- start_timer "Create polices in STD 2 via a1pms DMAAP, one by one, and $interface using "$__httpx
- a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
- print_timer "Create polices in STD via a1pms DMAAP, one by one, and $interface using "$__httpx
+ start_timer "Create polices in STD 2 via a1pms DMAAP, one by one, and $interface using "$__httpx
+ a1pms_api_put_policy 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
+ print_timer "Create polices in STD via a1pms DMAAP, one by one, and $interface using "$__httpx
- sim_equal ricsim_g3_1 num_instances $((2*$NUM_POLICIES))
- fi
+ sim_equal ricsim_g3_1 num_instances $((2*$NUM_POLICIES))
+
START_ID=$(($START_ID+$NUM_POLICIES))
@@ -237,29 +210,24 @@
sim_equal ricsim_g2_1 num_instances $((3*$NUM_POLICIES))
- if [ "$A1PMS_VERSION" == "V2" ]; then
- START_ID=$(($START_ID+$NUM_POLICIES))
+ START_ID=$(($START_ID+$NUM_POLICIES))
- start_timer "Create polices in STD via a1pms DMAAP in batch and $interface using "$__httpx
- a1pms_api_put_policy_batch 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
- print_timer "Create polices in STD via a1pms DMAAP in batch and $interface using "$__httpx
+ start_timer "Create polices in STD via a1pms DMAAP in batch and $interface using "$__httpx
+ a1pms_api_put_policy_batch 201 "serv1" ricsim_g3_1 STD_QOS2_0.1.0 $START_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_POLICIES
+ print_timer "Create polices in STD via a1pms DMAAP in batch and $interface using "$__httpx
- sim_equal ricsim_g3_1 num_instances $((3*$NUM_POLICIES))
- fi
+ sim_equal ricsim_g3_1 num_instances $((3*$NUM_POLICIES))
+
if [ $interface == "SDNC" ]; then
sim_contains_str ricsim_g1_1 remote_hosts $SDNC_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $SDNC_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $SDNC_APP_NAME
else
sim_contains_str ricsim_g1_1 remote_hosts $A1PMS_APP_NAME
sim_contains_str ricsim_g2_1 remote_hosts $A1PMS_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
- fi
+ sim_contains_str ricsim_g3_1 remote_hosts $A1PMS_APP_NAME
fi
check_a1pms_logs
diff --git a/test/auto-test/FTC805.sh b/test/auto-test/FTC805.sh
index 01a774a..e652757 100755
--- a/test/auto-test/FTC805.sh
+++ b/test/auto-test/FTC805.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="A1PMS Create 10000 policies and restart, test polices persistency"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -58,11 +58,7 @@
generate_policy_uuid
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
for __httpx in $TESTED_PROTOCOLS ; do
for interface in $TESTED_VARIANTS ; do
@@ -106,31 +102,19 @@
set_a1pms_debug
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
start_cr 1
@@ -150,22 +134,14 @@
sim_put_policy_type 201 ricsim_g1_$i STD_QOS_0_2_0 testdata/STD2/sim_qos.json
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 1 300 #Wait for the a1pms to refresh types from the simulator
- else
- a1pms_equal json:policy_types 1 300 #Wait for the a1pms to refresh types from the simulator
- fi
+ a1pms_equal json:policy-types 1 300 #Wait for the a1pms to refresh types from the simulator
a1pms_api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
echo "Check the number of types in the a1pms for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
do
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
- else
- a1pms_equal json:policy_types?ric=ricsim_g1_$i 1 120
- fi
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
done
START_ID=2000
diff --git a/test/auto-test/FTC810.sh b/test/auto-test/FTC810.sh
index ac2f88a..5ce79c0 100755
--- a/test/auto-test/FTC810.sh
+++ b/test/auto-test/FTC810.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Repeatedly create and delete policies in each RICs for 24h (or configured number of days). Via a1pms REST/DMAAP/DMAAP_BATCH and SDNC using http or https"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -48,11 +48,8 @@
#Local vars in test script
##########################
-# Number of RICs per interface type (OSC and STD)
-NUM_RICS=30
-if [ "$A1PMS_VERSION" == "V2" ]; then
- NUM_RICS=20 # 3 A1 interfaces test, less sims per interface. total sims will be same
-fi
+# Number of RICs per interface type (OSC and STD x 2)
+NUM_RICS=20
# Number of policy instances per RIC
NUM_INSTANCES=5
@@ -82,9 +79,7 @@
start_ric_simulators ricsim_g2 $NUM_RICS STD_1.1.3
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators ricsim_g3 $NUM_RICS STD_2.0.0
-fi
+start_ric_simulators ricsim_g3 $NUM_RICS STD_2.0.0
start_mr
@@ -99,26 +94,14 @@
A1PMS_RETRY_CODES=423
start_a1pms NORPOXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config SDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
start_sdnc
@@ -139,13 +122,11 @@
sim_print ricsim_g2_$i interface
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
- echo "Print the interface for group 2 simulators, shall be STD 2"
- for ((i=1; i<=$NUM_RICS; i++))
- do
- sim_print ricsim_g3_$i interface
- done
-fi
+echo "Print the interface for group 2 simulators, shall be STD 2"
+for ((i=1; i<=$NUM_RICS; i++))
+do
+ sim_print ricsim_g3_$i interface
+done
echo "Load policy type in group 1 simulators"
for ((i=1; i<=$NUM_RICS; i++))
@@ -153,13 +134,11 @@
sim_put_policy_type 201 ricsim_g1_$i 1 testdata/OSC/sim_1.json
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
- echo "Load policy type in group 3 simulators"
- for ((i=1; i<=$NUM_RICS; i++))
- do
- sim_put_policy_type 201 ricsim_g3_$i STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
- done
-fi
+echo "Load policy type in group 3 simulators"
+for ((i=1; i<=$NUM_RICS; i++))
+do
+ sim_put_policy_type 201 ricsim_g3_$i STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
+done
echo "Check the number of instances in group 1 simulators, shall be 0"
for ((i=1; i<=$NUM_RICS; i++))
@@ -173,30 +152,20 @@
sim_equal ricsim_g2_$i num_instances 0
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
- echo "Check the number of instances in group 3 simulators, shall be 0"
- for ((i=1; i<=$NUM_RICS; i++))
- do
- sim_equal ricsim_g3_$i num_instances 0
- done
-fi
+echo "Check the number of instances in group 3 simulators, shall be 0"
+for ((i=1; i<=$NUM_RICS; i++))
+do
+ sim_equal ricsim_g3_$i num_instances 0
+done
echo "Wait for the a1pms to refresh types from the simulator"
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 3 300
-else
- a1pms_equal json:policy_types 2 300
-fi
+a1pms_equal json:policy-types 3 300
echo "Check the number of types in the a1pms for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
do
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
- a1pms_equal json:policy-types?ric_id=ricsim_g3_$i 1 120
- else
- a1pms_equal json:policy_types?ric=ricsim_g1_$i 1 120
- fi
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
+ a1pms_equal json:policy-types?ric_id=ricsim_g3_$i 1 120
done
echo "Register a service"
@@ -209,11 +178,7 @@
MR_MESSAGES=0
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
while [ $(($SECONDS-$TEST_START)) -lt $TEST_DURATION ]; do
@@ -266,11 +231,7 @@
INSTANCES=$(($INSTANCES+$NUM_INSTANCES))
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
- fi
+ a1pms_equal json:policy-instances $INSTANCES
echo "Create $NUM_INSTANCES instances in each STD RIC"
if [ $interface == "REST_PARALLEL" ]; then
@@ -291,39 +252,28 @@
INSTANCES=$(($INSTANCES+$NUM_INSTANCES))
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
+ a1pms_equal json:policy-instances $INSTANCES
+
+ echo "Create $NUM_INSTANCES instances in each STD 2 RIC"
+ if [ $interface == "REST_PARALLEL" ]; then
+ a1pms_api_put_policy_parallel 201 "serv1" ricsim_g3_ $NUM_RICS STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES 3
fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- echo "Create $NUM_INSTANCES instances in each STD 2 RIC"
- if [ $interface == "REST_PARALLEL" ]; then
- a1pms_api_put_policy_parallel 201 "serv1" ricsim_g3_ $NUM_RICS STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES 3
+ for ((i=1; i<=$NUM_RICS; i++))
+ do
+ if [ $interface == "DMAAP-BATCH" ]; then
+ a1pms_api_put_policy_batch 201 "serv1" ricsim_g3_$i STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES
+ elif [ $interface == "DMAAP" ] || [ $interface == "REST" ]; then
+ a1pms_api_put_policy 201 "serv1" ricsim_g3_$i STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES
fi
- for ((i=1; i<=$NUM_RICS; i++))
- do
- if [ $interface == "DMAAP-BATCH" ]; then
- a1pms_api_put_policy_batch 201 "serv1" ricsim_g3_$i STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES
- elif [ $interface == "DMAAP" ] || [ $interface == "REST" ]; then
- a1pms_api_put_policy 201 "serv1" ricsim_g3_$i STD_QOS2_0.1.0 $INSTANCE_ID NOTRANSIENT $notificationurl testdata/STD2/pi_qos2_template.json $NUM_INSTANCES
- fi
- if [ $interface == "DMAAP" ] || [ $interface == "DMAAP-BATCH" ]; then
- MR_MESSAGES=$(($MR_MESSAGES+$NUM_INSTANCES))
- fi
- sim_equal ricsim_g3_$i num_instances $NUM_INSTANCES
- INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
- INSTANCES=$(($INSTANCES+$NUM_INSTANCES))
- done
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
+ if [ $interface == "DMAAP" ] || [ $interface == "DMAAP-BATCH" ]; then
+ MR_MESSAGES=$(($MR_MESSAGES+$NUM_INSTANCES))
fi
- fi
+ sim_equal ricsim_g3_$i num_instances $NUM_INSTANCES
+ INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
+ INSTANCES=$(($INSTANCES+$NUM_INSTANCES))
+ done
+ a1pms_equal json:policy-instances $INSTANCES
echo "Delete all instances in each OSC RIC"
@@ -346,11 +296,7 @@
INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
- fi
+ a1pms_equal json:policy-instances $INSTANCES
echo "Delete all instances in each STD RIC"
@@ -372,39 +318,29 @@
INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
+ a1pms_equal json:policy-instances $INSTANCES
+
+ echo "Delete all instances in each STD 2 RIC"
+
+ if [ $interface == "REST_PARALLEL" ]; then
+ a1pms_api_delete_policy_parallel 204 $NUM_RICS $INSTANCE_ID $NUM_INSTANCES 3
fi
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- echo "Delete all instances in each STD 2 RIC"
-
- if [ $interface == "REST_PARALLEL" ]; then
- a1pms_api_delete_policy_parallel 204 $NUM_RICS $INSTANCE_ID $NUM_INSTANCES 3
+ for ((i=1; i<=$NUM_RICS; i++))
+ do
+ if [ $interface == "DMAAP-BATCH" ]; then
+ a1pms_api_delete_policy_batch 204 $INSTANCE_ID $NUM_INSTANCES
+ elif [ $interface == "DMAAP" ] || [ $interface == "REST" ]; then
+ a1pms_api_delete_policy 204 $INSTANCE_ID $NUM_INSTANCES
fi
- for ((i=1; i<=$NUM_RICS; i++))
- do
- if [ $interface == "DMAAP-BATCH" ]; then
- a1pms_api_delete_policy_batch 204 $INSTANCE_ID $NUM_INSTANCES
- elif [ $interface == "DMAAP" ] || [ $interface == "REST" ]; then
- a1pms_api_delete_policy 204 $INSTANCE_ID $NUM_INSTANCES
- fi
- if [ $interface == "DMAAP" ] || [ $interface == "DMAAP-BATCH" ]; then
- MR_MESSAGES=$(($MR_MESSAGES+$NUM_INSTANCES))
- fi
- INSTANCES=$(($INSTANCES-$NUM_INSTANCES))
- sim_equal ricsim_g3_$i num_instances 0
- INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
- done
-
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-instances $INSTANCES
- else
- a1pms_equal json:policy_ids $INSTANCES
+ if [ $interface == "DMAAP" ] || [ $interface == "DMAAP-BATCH" ]; then
+ MR_MESSAGES=$(($MR_MESSAGES+$NUM_INSTANCES))
fi
- fi
+ INSTANCES=$(($INSTANCES-$NUM_INSTANCES))
+ sim_equal ricsim_g3_$i num_instances 0
+ INSTANCE_ID=$(($INSTANCE_ID+$NUM_INSTANCES))
+ done
+
+ a1pms_equal json:policy-instances $INSTANCES
mr_equal requests_submitted $MR_MESSAGES
mr_equal requests_fetched $MR_MESSAGES
@@ -419,9 +355,7 @@
sim_contains_str ricsim_g1_$i remote_hosts $SDNC_APP_NAME
sim_contains_str ricsim_g2_$i remote_hosts $SDNC_APP_NAME
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_contains_str ricsim_g3_$i remote_hosts $SDNC_APP_NAME
- fi
+ sim_contains_str ricsim_g3_$i remote_hosts $SDNC_APP_NAME
done
done
diff --git a/test/auto-test/FTC850.sh b/test/auto-test/FTC850.sh
index 0a9f0b8..4b795c0 100755
--- a/test/auto-test/FTC850.sh
+++ b/test/auto-test/FTC850.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Create/delete policies in parallel over a number of rics using a number of child process"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -57,11 +57,7 @@
generate_policy_uuid
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
for __httpx in $TESTED_PROTOCOLS ; do
for interface in $TESTED_VARIANTS ; do
@@ -107,31 +103,19 @@
set_a1pms_debug
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
start_mr # Not used, but removes error messages from the a1pms log
@@ -153,22 +137,14 @@
sim_put_policy_type 201 ricsim_g1_$i 1 testdata/OSC/sim_1.json
done
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 1 300 #Wait for the a1pms to refresh types from the simulator
- else
- a1pms_equal json:policy_types 1 300 #Wait for the a1pms to refresh types from the simulator
- fi
+ a1pms_equal json:policy-types 1 300 #Wait for the a1pms to refresh types from the simulator
a1pms_api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH_0/1"
echo "Check the number of types in the a1pms for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
do
- if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
- else
- a1pms_equal json:policy_types?ric=ricsim_g1_$i 1 120
- fi
+ a1pms_equal json:policy-types?ric_id=ricsim_g1_$i 1 120
done
START_ID=2000
diff --git a/test/auto-test/FTC900.sh b/test/auto-test/FTC900.sh
index 3d87934..b9a6654 100755
--- a/test/auto-test/FTC900.sh
+++ b/test/auto-test/FTC900.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Preparation for test of the Control Panel and the Health Check app - populating a number of ric simulators with types and instances"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -54,9 +54,7 @@
start_ric_simulators $RIC_SIM_PREFIX"_g2" $STD_NUM_RICS STD_1.1.3
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators $RIC_SIM_PREFIX"_g3" $STD_NUM_RICS STD_2.0.0
-fi
+start_ric_simulators $RIC_SIM_PREFIX"_g3" $STD_NUM_RICS STD_2.0.0
start_mr #Just to prevent errors in the a1pms log...
@@ -70,26 +68,14 @@
use_a1pms_rest_http
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config NOSDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
sleep_wait 120 "Let A1PMS cofiguration take effect"
@@ -109,13 +95,11 @@
sim_print $RIC_SIM_PREFIX"_g2_"$i interface
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
- # Print the A1 version for STD 2.X
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- sim_print $RIC_SIM_PREFIX"_g3_"$i interface
- done
-fi
+# Print the A1 version for STD 2.X
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ sim_print $RIC_SIM_PREFIX"_g3_"$i interface
+done
# Load the polictypes in osc
for ((i=1; i<=$OSC_NUM_RICS; i++))
@@ -127,73 +111,49 @@
#Check the number of schemas and the individual schemas in OSC
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 4 300
+a1pms_equal json:policy-types 4 300
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g1_"$i 3 120
- done
+for ((i=1; i<=$OSC_NUM_RICS; i++))
+do
+ a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g1_"$i 3 120
+done
- # Check the schemas in OSC
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_api_get_policy_type 200 2 testdata/OSC/hw-a1pms-modified.json
- a1pms_api_get_policy_type 200 100 testdata/OSC/qos-a1pms-modified.json
- a1pms_api_get_policy_type 200 20008 testdata/OSC/tsa-a1pms-modified.json
- done
-else
- a1pms_equal json:policy_types 4 300
+# Check the schemas in OSC
+for ((i=1; i<=$OSC_NUM_RICS; i++))
+do
+ a1pms_api_get_policy_type 200 2 testdata/OSC/hw-a1pms-modified.json
+ a1pms_api_get_policy_type 200 100 testdata/OSC/qos-a1pms-modified.json
+ a1pms_api_get_policy_type 200 20008 testdata/OSC/tsa-a1pms-modified.json
+done
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_equal json:policy_types?ric=$RIC_SIM_PREFIX"_g1_"$i 3 120
- done
+# Load the polictypes in std
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 demo-testdata/STD2/sim_qos.json
+ sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS2_0.1.0 demo-testdata/STD2/sim_qos2.json
+done
- # Check the schemas in OSC
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_api_get_policy_schema 200 2 testdata/OSC/hw-a1pms-modified.json
- a1pms_api_get_policy_schema 200 100 testdata/OSC/qos-a1pms-modified.json
- a1pms_api_get_policy_schema 200 20008 testdata/OSC/tsa-a1pms-modified.json
- done
-fi
+#Check the number of schemas and the individual schemas in STD
+a1pms_equal json:policy-types 6 120
-if [ "$A1PMS_VERSION" == "V2" ]; then
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g3_"$i 2 120
+done
- # Load the polictypes in std
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 demo-testdata/STD2/sim_qos.json
- sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS2_0.1.0 demo-testdata/STD2/sim_qos2.json
- done
-
- #Check the number of schemas and the individual schemas in STD
- a1pms_equal json:policy-types 6 120
-
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g3_"$i 2 120
- done
-
- # Check the schemas in STD
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- a1pms_api_get_policy_type 200 STD_QOS_0_2_0 demo-testdata/STD2/qos-a1pms-modified.json
- a1pms_api_get_policy_type 200 'STD_QOS2_0.1.0' demo-testdata/STD2/qos2-a1pms-modified.json
- done
-fi
+# Check the schemas in STD
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ a1pms_api_get_policy_type 200 STD_QOS_0_2_0 demo-testdata/STD2/qos-a1pms-modified.json
+ a1pms_api_get_policy_type 200 'STD_QOS2_0.1.0' demo-testdata/STD2/qos2-a1pms-modified.json
+done
# Create policies
use_a1pms_rest_http
a1pms_api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
# Create policies in OSC
for ((i=1; i<=$OSC_NUM_RICS; i++))
@@ -219,12 +179,10 @@
do
generate_policy_uuid
a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g2_"$i NOTYPE $((2100+$i)) NOTRANSIENT $notificationurl testdata/STD/pi1_template.json 1
- if [ "$A1PMS_VERSION" == "V2" ]; then
- generate_policy_uuid
- a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 $((2300+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
- generate_policy_uuid
- a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i 'STD_QOS2_0.1.0' $((2400+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
- fi
+ generate_policy_uuid
+ a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 $((2300+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
+ generate_policy_uuid
+ a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i 'STD_QOS2_0.1.0' $((2400+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
done
@@ -232,9 +190,7 @@
for ((i=1; i<=$STD_NUM_RICS; i++))
do
sim_equal $RIC_SIM_PREFIX"_g2_"$i num_instances 1
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal $RIC_SIM_PREFIX"_g3_"$i num_instances 2
- fi
+ sim_equal $RIC_SIM_PREFIX"_g3_"$i num_instances 2
done
check_a1pms_logs
diff --git a/test/auto-test/FTC_HELM_E_RELEASE.sh b/test/auto-test/FTC_HELM_E_RELEASE.sh
index 8b68a81..f6433ad 100755
--- a/test/auto-test/FTC_HELM_E_RELEASE.sh
+++ b/test/auto-test/FTC_HELM_E_RELEASE.sh
@@ -66,11 +66,6 @@
###############################use_control_panel_https
use_control_panel_http
-if [ "$A1PMS_VERSION" == "V1" ]; then
- echo "A1PMS VERSION 2 (V2) is required"
- exit 1
-fi
-
clean_environment
ics_kube_pvc_reset
@@ -227,23 +222,11 @@
a1_a1pms_api_get_policy_type 200 2 testdata/OSC/2-a1pms-modified.json
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
+a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
+a1pms_equal json:policies 0
- a1pms_equal json:policies 0
-
- a1pms_equal json:policy-instances 0
-else
-
- a1pms_equal json:policy_schemas 5 120
-
- a1pms_equal json:policy_types 5
-
- a1pms_equal json:policies 0
-
- a1pms_equal json:policy_ids 0
-fi
+a1pms_equal json:policy-instances 0
a1pms_api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/ER-app"
@@ -299,23 +282,11 @@
sleep_wait 200
-if [ "$A1PMS_VERSION" == "V2" ]; then
+a1pms_equal json:policy-types 5 120
- a1pms_equal json:policy-types 5 120
+a1pms_equal json:policies 12
- a1pms_equal json:policies 12
-
- a1pms_equal json:policy-instances 12
-else
-
- a1pms_equal json:policy_schemas 5 120
-
- a1pms_equal json:policy_types 5
-
- a1pms_equal json:policies 12
-
- a1pms_equal json:policy_ids 12
-fi
+a1pms_equal json:policy-instances 12
# Check the number of policies in STD and STD2
for ((i=0; i<$STD_NUM_RICS; i++))
diff --git a/test/auto-test/ONAP_UC.sh b/test/auto-test/ONAP_UC.sh
index f6f5ae0..7d7c935 100755
--- a/test/auto-test/ONAP_UC.sh
+++ b/test/auto-test/ONAP_UC.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="ONAP Use case REQ-626"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR DMAAPMR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR DMAAPMR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR DMAAPMR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON "
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON "
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -52,12 +52,7 @@
use_simulator_https
use_mr_https
__httpx="HTTPS"
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- echo "Version V2 of A1PMS is needed, exiting..."
- exit 1
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
generate_policy_uuid
@@ -99,20 +94,13 @@
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
- __CONFIG_HEADER="NOHEADER"
- if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
- else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
+
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
- prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config SDNC ".a1pms_config.json"
else
- prepare_consul_config NOSDNC ".consul_config.json" $__CONFIG_HEADER
+ prepare_a1pms_config NOSDNC ".a1pms_config.json"
fi
start_a1pms NORPOXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
@@ -120,32 +108,27 @@
set_a1pms_trace
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
- if [ $__httpx == "HTTPS" ]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_a1pms_rest_https
+ else
+ use_a1pms_rest_http
+ fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_https
+ else
use_a1pms_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_a1pms_dmaap_http
else
use_a1pms_rest_http
fi
- a1pms_api_put_configuration 200 ".consul_config.json"
- if [ $__httpx == "HTTPS" ]; then
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_https
- else
- use_a1pms_rest_https
- fi
- else
- if [[ $interface = *"DMAAP"* ]]; then
- use_a1pms_dmaap_http
- else
- use_a1pms_rest_http
- fi
- fi
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
fi
fi
diff --git a/test/auto-test/PM_DEMO.sh b/test/auto-test/PM_DEMO.sh
index a8bc7f4..f4adcd6 100755
--- a/test/auto-test/PM_DEMO.sh
+++ b/test/auto-test/PM_DEMO.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Preparation demo setup - populating a number of ric simulators with types and instances"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -51,11 +51,7 @@
use_sdnc_https
use_simulator_https
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- notificationurl=""
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
clean_environment
@@ -68,9 +64,7 @@
start_ric_simulators $RIC_SIM_PREFIX"_g2" $STD_NUM_RICS STD_1.1.3
-if [ "$A1PMS_VERSION" == "V2" ]; then
- start_ric_simulators $RIC_SIM_PREFIX"_g3" $STD_NUM_RICS STD_2.0.0
-fi
+start_ric_simulators $RIC_SIM_PREFIX"_g3" $STD_NUM_RICS STD_2.0.0
start_mr #Just to prevent errors in the a1pms log...
@@ -86,26 +80,14 @@
set_a1pms_trace
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
- fi
-prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER
+
+prepare_a1pms_config SDNC ".a1pms_config.json"
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
sleep_wait 120 "Let A1PMS cofiguration take effect"
@@ -125,13 +107,11 @@
sim_print $RIC_SIM_PREFIX"_g2_"$i interface
done
-if [ "$A1PMS_VERSION" == "V2" ]; then
- # Print the A1 version for STD 2.X
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- sim_print $RIC_SIM_PREFIX"_g3_"$i interface
- done
-fi
+# Print the A1 version for STD 2.X
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ sim_print $RIC_SIM_PREFIX"_g3_"$i interface
+done
# Load the polictypes in osc
@@ -143,71 +123,45 @@
#Check the number of schemas and the individual schemas in OSC
-if [ "$A1PMS_VERSION" == "V2" ]; then
+a1pms_equal json:policy-types 3 300
- a1pms_equal json:policy-types 3 300
+for ((i=1; i<=$OSC_NUM_RICS; i++))
+do
+ a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g1_"$i 2 120
+done
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g1_"$i 2 120
- done
-
- # Check the schemas in OSC
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_api_get_policy_type 200 100 demo-testdata/OSC/qos-a1pms-modified.json
- a1pms_api_get_policy_type 200 20008 demo-testdata/OSC/tsa-a1pms-modified.json
- done
-else
- a1pms_equal json:policy_types 3 300
-
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_equal json:policy_types?ric=$RIC_SIM_PREFIX"_g1_"$i 2 120
- done
-
- # Check the schemas in OSC
- for ((i=1; i<=$OSC_NUM_RICS; i++))
- do
- a1pms_api_get_policy_schema 200 100 demo-testdata/OSC/qos-a1pms-modified.json
- a1pms_api_get_policy_schema 200 20008 demo-testdata/OSC/tsa-a1pms-modified.json
- done
-fi
+# Check the schemas in OSC
+for ((i=1; i<=$OSC_NUM_RICS; i++))
+do
+ a1pms_api_get_policy_type 200 100 demo-testdata/OSC/qos-a1pms-modified.json
+ a1pms_api_get_policy_type 200 20008 demo-testdata/OSC/tsa-a1pms-modified.json
+done
+# Load the polictypes in std
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 demo-testdata/STD2/sim_qos.json
+ sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS2_0.1.0 demo-testdata/STD2/sim_qos2.json
+done
+#Check the number of schemas and the individual schemas in STD
+a1pms_equal json:policy-types 5 120
-if [ "$A1PMS_VERSION" == "V2" ]; then
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g3_"$i 2 120
+done
- # Load the polictypes in std
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 demo-testdata/STD2/sim_qos.json
- sim_put_policy_type 201 $RIC_SIM_PREFIX"_g3_"$i STD_QOS2_0.1.0 demo-testdata/STD2/sim_qos2.json
- done
-
- #Check the number of schemas and the individual schemas in STD
- a1pms_equal json:policy-types 5 120
-
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- a1pms_equal json:policy-types?ric_id=$RIC_SIM_PREFIX"_g3_"$i 2 120
- done
-
- # Check the schemas in STD
- for ((i=1; i<=$STD_NUM_RICS; i++))
- do
- a1pms_api_get_policy_type 200 STD_QOS_0_2_0 demo-testdata/STD2/qos-a1pms-modified.json
- a1pms_api_get_policy_type 200 'STD_QOS2_0.1.0' demo-testdata/STD2/qos2-a1pms-modified.json
- done
-fi
+# Check the schemas in STD
+for ((i=1; i<=$STD_NUM_RICS; i++))
+do
+ a1pms_api_get_policy_type 200 STD_QOS_0_2_0 demo-testdata/STD2/qos-a1pms-modified.json
+ a1pms_api_get_policy_type 200 'STD_QOS2_0.1.0' demo-testdata/STD2/qos2-a1pms-modified.json
+done
#Check the number of types
-if [ "$A1PMS_VERSION" == "V2" ]; then
- a1pms_equal json:policy-types 5 120
-else
- a1pms_equal json:policy_types 3 120
-fi
+a1pms_equal json:policy-types 5 120
# Create policies
@@ -237,12 +191,10 @@
do
generate_policy_uuid
a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g2_"$i NOTYPE $((2100+$i)) NOTRANSIENT $notificationurl demo-testdata/STD/pi1_template.json 1
- if [ "$A1PMS_VERSION" == "V2" ]; then
- generate_policy_uuid
- a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 $((2300+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
- generate_policy_uuid
- a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i 'STD_QOS2_0.1.0' $((2400+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
- fi
+ generate_policy_uuid
+ a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i STD_QOS_0_2_0 $((2300+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
+ generate_policy_uuid
+ a1pms_api_put_policy 201 "Emergency-response-app" $RIC_SIM_PREFIX"_g3_"$i 'STD_QOS2_0.1.0' $((2400+$i)) NOTRANSIENT $notificationurl demo-testdata/STD2/pi1_template.json 1
done
@@ -250,9 +202,7 @@
for ((i=1; i<=$STD_NUM_RICS; i++))
do
sim_equal $RIC_SIM_PREFIX"_g2_"$i num_instances 1
- if [ "$A1PMS_VERSION" == "V2" ]; then
- sim_equal $RIC_SIM_PREFIX"_g3_"$i num_instances 2
- fi
+ sim_equal $RIC_SIM_PREFIX"_g3_"$i num_instances 2
done
check_a1pms_logs
diff --git a/test/auto-test/PM_EI_DEMO.sh b/test/auto-test/PM_EI_DEMO.sh
index 0ac0daa..7b8a6ed 100755
--- a/test/auto-test/PM_EI_DEMO.sh
+++ b/test/auto-test/PM_EI_DEMO.sh
@@ -20,7 +20,7 @@
TC_ONELINE_DESCR="Preparation demo setup - policy management and information information"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR A1PMS RICSIM SDNC ICS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
+DOCKER_INCLUDED_IMAGES="CP CR MR A1PMS RICSIM SDNC ICS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES=" MR CR A1PMS RC PRODSTUB RICSIM CP ICS SDNC HTTPPROXY KUBEPROXY NGW"
@@ -30,10 +30,10 @@
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW"
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ONAP-JAKARTA ONAP-KOHN ONAP-LONDON ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
@@ -59,12 +59,7 @@
fi
-if [ "$A1PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH_0"/test"
-else
- echo "A1PMS VERSION 2 (V2) is required"
- exit 1
-fi
+notificationurl=$CR_SERVICE_APP_PATH_0"/test"
clean_environment
@@ -88,26 +83,14 @@
start_a1pms PROXY $SIM_GROUP/$A1PMS_COMPOSE_DIR/$A1PMS_CONFIG_FILE
-__CONFIG_HEADER="NOHEADER"
-if [ $RUNMODE == "KUBE" ]; then
- __CONFIG_HEADER="HEADER"
-else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- __CONFIG_HEADER="HEADER"
- fi
-fi
-prepare_consul_config SDNC ".consul_config.json" $__CONFIG_HEADER #Change to NOSDNC if running A1PMS with proxy
+
+prepare_a1pms_config SDNC ".a1pms_config.json" #Change to NOSDNC if running A1PMS with proxy
if [ $RUNMODE == "KUBE" ]; then
- a1pms_load_config ".consul_config.json"
+ a1pms_load_config ".a1pms_config.json"
else
- if [[ "$A1PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
- a1pms_api_put_configuration 200 ".consul_config.json"
- else
- start_consul_cbs
- consul_config_app ".consul_config.json"
- fi
+ a1pms_api_put_configuration 200 ".a1pms_config.json"
fi
start_cr 1
diff --git a/test/auto-test/startMR.sh b/test/auto-test/startMR.sh
index cd4740a..823a72e 100755
--- a/test/auto-test/startMR.sh
+++ b/test/auto-test/startMR.sh
@@ -33,7 +33,7 @@
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
+SUPPORTED_PROFILES="ORAN-F-RELEASE ORAN-G-RELEASE ORAN-H-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
diff --git a/test/common/.gitignore b/test/common/.gitignore
index bdc6c7d..8f948f4 100644
--- a/test/common/.gitignore
+++ b/test/common/.gitignore
@@ -1,2 +1,4 @@
NO-CHECKIN
TEST_TMP
+REMOVE_*
+
diff --git a/test/common/README.md b/test/common/README.md
index c61debb..ef65172 100644
--- a/test/common/README.md
+++ b/test/common/README.md
@@ -329,28 +329,6 @@
| `<chart-name>` | Name of the chart to delete |
| `<version>` | Chart version, default is 0.1.0 |
-
-# Description of functions in consul_api_function.sh #
-
-## Function: consul_config_app ##
-
-Function to load a json config from a file into consul for the A1PMS
-
-| arg list |
-|--|
-| `<json-config-file>` |
-
-| parameter | description |
-| --------- | ----------- |
-| `<json-config-file>` | The path to the json file to be loaded to Consul/CBS |
-
-## Function: start_consul_cbs ##
-
-Start the Consul and CBS containers
-| arg list |
-|--|
-| None |
-
# Description of functions in cp_api_function.sh #
## Function: use_control_panel_http ##
@@ -2080,9 +2058,9 @@
|--|
| None |
-## Function: prepare_consul_config ##
+## Function: prepare_a1pms_config ##
-Function to prepare a Consul config based on the previously configured (and started simulators). Note that all simulator must be running and the test script has to configure if http or https shall be used for the components (this is done by the functions 'use_simulator_http', 'use_simulator_https', 'use_sdnc_http', 'use_sdnc_https', 'use_mr_http', 'use_mr_https')
+Function to prepare an a1pms config based on the previously configured (and started simulators). Note that all simulator must be running and the test script has to configure if http or https shall be used for the components (this is done by the functions 'use_simulator_http', 'use_simulator_https', 'use_sdnc_http', 'use_sdnc_https', 'use_mr_http', 'use_mr_https')
| arg list |
|--|
| `SDNC|NOSDNC <output-file>` |
diff --git a/test/common/a1pms_api_functions.sh b/test/common/a1pms_api_functions.sh
index e74c310..ccc424e 100644
--- a/test/common/a1pms_api_functions.sh
+++ b/test/common/a1pms_api_functions.sh
@@ -197,9 +197,6 @@
export A1PMS_CONFIG_CONFIGMAP_NAME=$A1PMS_APP_NAME"-config"
export A1PMS_DATA_CONFIGMAP_NAME=$A1PMS_APP_NAME"-data"
export A1PMS_PKG_NAME
- export CONSUL_HOST
- export CONSUL_INTERNAL_PORT
- export CONFIG_BINDING_SERVICE
export A1PMS_CONFIG_KEY
export DOCKER_SIM_NWNAME
export A1PMS_HOST_MNT_DIR
@@ -460,16 +457,16 @@
# Function to perpare the consul configuration according to the current simulator configuration
-# args: SDNC|NOSDNC <output-file> HEADER|NOHEADER
+# args: SDNC|NOSDNC <output-file>
# (Function for test scripts)
-prepare_consul_config() {
+prepare_a1pms_config() {
echo -e $BOLD"Prepare Consul config"$EBOLD
echo " Writing consul config for "$A1PMS_APP_NAME" to file: "$2
- if [ $# != 3 ]; then
+ if [ $# != 2 ]; then
((RES_CONF_FAIL++))
- __print_err "need two args, SDNC|NOSDNC <output-file> HEADER|NOHEADER" $@
+ __print_err "need two args, SDNC|NOSDNC <output-file>" $@
exit 1
fi
@@ -571,9 +568,7 @@
config_json=$config_json"\n ]"
config_json=$config_json"\n}"
- if [ $3 == "HEADER" ]; then
- config_json="{\"config\":"$config_json"}"
- fi
+ config_json="{\"config\":"$config_json"}"
printf "$config_json">$2
diff --git a/test/common/consul_api_functions.sh b/test/common/consul_api_functions.sh
deleted file mode 100644
index 221124b..0000000
--- a/test/common/consul_api_functions.sh
+++ /dev/null
@@ -1,279 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-# This is a script that contains container/service management functions and test functions for Consul/CBS
-
-################ Test engine functions ################
-
-# Create the image var used during the test
-# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
-# <image-tag-suffix> is present only for that exist with staging, snapshot,release tags
-__CONSUL_imagesetup() {
- __check_and_create_image_var CONSUL "CONSUL_IMAGE" "CONSUL_IMAGE_BASE" "CONSUL_IMAGE_TAG" REMOTE_PROXY "$CONSUL_DISPLAY_NAME"
-
-}
-
-# Create the image var used during the test
-# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
-# <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__CBS_imagesetup() {
- __check_and_create_image_var CBS "CBS_IMAGE" "CBS_IMAGE_BASE" "CBS_IMAGE_TAG" REMOTE_RELEASE_ONAP "$CBS_DISPLAY_NAME"
-
-}
-
-# Pull image from remote repo or use locally built image
-# arg: <pull-policy-override> <pull-policy-original>
-# <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
-# <pull-policy-original> Shall be used for images that does not allow overriding
-# Both var may contain: 'remote', 'remote-remove' or 'local'
-__CONSUL_imagepull() {
- __check_and_pull_image $2 "$CONSUL_DISPLAY_NAME" $CONSUL_APP_NAME CONSUL_IMAGE
-}
-
-# Pull image from remote repo or use locally built image
-# arg: <pull-policy-override> <pull-policy-original>
-# <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
-# <pull-policy-original> Shall be used for images that does not allow overriding
-# Both var may contain: 'remote', 'remote-remove' or 'local'
-__CBS_imagepull() {
- __check_and_pull_image $2 "$CBS_DISPLAY_NAME" $CBS_APP_NAME CBS_IMAGE
-}
-
-# Build image (only for simulator or interfaces stubs owned by the test environment)
-# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
-# <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__CONSUL_imagebuild() {
- echo -e $RED" Image for app CONSUL shall never be built"$ERED
-}
-
-# Build image (only for simulator or interfaces stubs owned by the test environment)
-# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
-# <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__CBS_imagebuild() {
- echo -e $RED" Image for app CBS shall never be built"$ERED
-}
-
-# Generate a string for each included image using the app display name and a docker images format string
-# If a custom image repo is used then also the source image from the local repo is listed
-# arg: <docker-images-format-string> <file-to-append>
-__CONSUL_image_data() {
- echo -e "$CONSUL_DISPLAY_NAME\t$(docker images --format $1 $CONSUL_IMAGE)" >> $2
- if [ ! -z "$CONSUL_IMAGE_SOURCE" ]; then
- echo -e "-- source image --\t$(docker images --format $1 $CONSUL_IMAGE_SOURCE)" >> $2
- fi
-}
-
-# Generate a string for each included image using the app display name and a docker images format string
-# If a custom image repo is used then also the source image from the local repo is listed
-# arg: <docker-images-format-string> <file-to-append>
-__CBS_image_data() {
- echo -e "$CBS_DISPLAY_NAME\t$(docker images --format $1 $CBS_IMAGE)" >> $2
- if [ ! -z "$CBS_IMAGE_SOURCE" ]; then
- echo -e "-- source image --\t$(docker images --format $1 $CBS_IMAGE_SOURCE)" >> $2
- fi
-}
-
-# Scale kubernetes resources to zero
-# All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
-# This function is called for apps fully managed by the test script
-__CONSUL_kube_scale_zero() {
- echo -e $RED" Image for app CONSUL is not used in kube"$ERED
-}
-
-# Scale kubernetes resources to zero
-# All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
-# This function is called for apps fully managed by the test script
-__CBS_kube_scale_zero() {
- echo -e $RED" Image for app CBS is not used in kube"$ERED
-}
-
-# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
-# This function is called for prestarted apps not managed by the test script.
-__CONSUL_kube_scale_zero_and_wait() {
- echo -e $RED" CONSUL app is not used in kube"$ERED
-}
-
-# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
-# This function is called for prestarted apps not managed by the test script.
-__CBS_kube_scale_zero_and_wait() {
- echo -e $RED" CBS app is not used in kube"$ERED
-}
-
-# Delete all kube resouces for the app
-# This function is called for apps managed by the test script.
-__CONSUL_kube_delete_all() {
- echo -e $RED" CONSUL app is not used in kube"$ERED
-}
-
-# Delete all kube resouces for the app
-# This function is called for apps managed by the test script.
-__CBS_kube_delete_all() {
- echo -e $RED" CBS app is not used in kube"$ERED
-}
-
-# Store docker logs
-# This function is called for apps managed by the test script.
-# args: <log-dir> <file-prexix>
-__CONSUL_store_docker_logs() {
- if [ $RUNMODE == "KUBE" ]; then
- :
- else
- docker logs $CONSUL_APP_NAME > $1/$2_consul.log 2>&1
- fi
-}
-
-# Store docker logs
-# This function is called for apps managed by the test script.
-# args: <log-dir> <file-prexix>
-__CBS_store_docker_logs() {
- if [ $RUNMODE == "KUBE" ]; then
- :
- else
- docker logs $CBS_APP_NAME > $1$2_cbs.log 2>&1
- body="$(__do_curl $LOCALHOST_HTTP:$CBS_EXTERNAL_PORT/service_component_all/$A1PMS_APP_NAME)"
- echo "$body" > $1$2_consul_config.json 2>&1
- fi
-}
-
-# Initial setup of protocol, host and ports
-# This function is called for apps managed by the test script.
-# args: -
-__CONSUL_initial_setup() {
- CONSUL_SERVICE_PATH="http://"$CONSUL_APP_NAME":"$CONSUL_INTERNAL_PORT
-}
-
-# Initial setup of protocol, host and ports
-# This function is called for apps managed by the test script.
-# args: -
-__CBS_initial_setup() {
- CBS_SERVICE_PATH="http://"$CBS_APP_NAME":"$CBS_INTERNAL_PORT
-}
-
-# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
-# For docker, the namespace shall be excluded
-# This function is called for apps managed by the test script as well as for prestarted apps.
-# args: -
-__CONSUL_statisics_setup() {
- echo ""
-}
-
-# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
-# For docker, the namespace shall be excluded
-# This function is called for apps managed by the test script as well as for prestarted apps.
-# args: -
-__CBS_statisics_setup() {
- echo ""
-}
-
-# Check application requirements, e.g. helm, the the test needs. Exit 1 if req not satisfied
-# args: -
-__CONSUL_test_requirements() {
- :
-}
-
-# Check application requirements, e.g. helm, the the test needs. Exit 1 if req not satisfied
-# args: -
-__CBS_test_requirements() {
- :
-}
-#######################################################
-
-
-####################
-### Consul functions
-####################
-
-# Function to load config from a file into consul for the a1pms
-# arg: <json-config-file>
-# (Function for test scripts)
-consul_config_app() {
-
- echo -e $BOLD"Configuring Consul"$EBOLD
-
- if [ $# -ne 1 ]; then
- ((RES_CONF_FAIL++))
- __print_err "need one arg, <json-config-file>" $@
- exit 1
- fi
-
- echo " Loading config for "$A1PMS_APP_NAME" from "$1
-
- curlString="$CONSUL_SERVICE_PATH/v1/kv/${A1PMS_CONFIG_KEY}?dc=dc1 -X PUT -H Accept:application/json -H Content-Type:application/json -H X-Requested-With:XMLHttpRequest --data-binary @"$1
-
- result=$(__do_curl "$curlString")
- if [ $? -ne 0 ]; then
- echo -e $RED" FAIL - json config could not be loaded to consul" $ERED
- ((RES_CONF_FAIL++))
- return 1
- fi
- body="$(__do_curl $CBS_SERVICE_PATH/service_component_all/$A1PMS_CONFIG_KEY)"
- echo $body > "./tmp/.output"$1
-
- if [ $? -ne 0 ]; then
- echo -e $RED" FAIL - json config could not be loaded from consul/cbs, contents cannot be checked." $ERED
- ((RES_CONF_FAIL++))
- return 1
- else
- targetJson=$(< $1)
- targetJson="{\"config\":"$targetJson"}"
- echo "TARGET JSON: $targetJson" >> $HTTPLOG
- res=$(python3 ../common/compare_json.py "$targetJson" "$body")
- if [ $res -ne 0 ]; then
- echo -e $RED" FAIL - policy json config read from consul/cbs is not equal to the intended json config...." $ERED
- ((RES_CONF_FAIL++))
- return 1
- else
- echo -e $GREEN" Config loaded ok to consul"$EGREEN
- fi
- fi
-
- echo ""
-
-}
-
-# Start Consul and CBS
-# args: -
-# (Function for test scripts)
-start_consul_cbs() {
-
- echo -e $BOLD"Starting $CONSUL_DISPLAY_NAME and $CBS_DISPLAY_NAME"$EBOLD
- __check_included_image 'CONSUL'
- if [ $? -eq 1 ]; then
- echo -e $RED"The Consul image has not been checked for this test run due to arg to the test script"$ERED
- echo -e $RED"Consul will not be started"$ERED
- exit
- fi
- export CONSUL_APP_NAME
- export CONSUL_INTERNAL_PORT
- export CONSUL_EXTERNAL_PORT
- export CBS_APP_NAME
- export CBS_INTERNAL_PORT
- export CBS_EXTERNAL_PORT
- export CONSUL_HOST
- export CONSUL_DISPLAY_NAME
- export CBS_DISPLAY_NAME
-
- __start_container $CONSUL_CBS_COMPOSE_DIR "" NODOCKERARGS 2 $CONSUL_APP_NAME $CBS_APP_NAME
-
- __check_service_start $CONSUL_APP_NAME $CONSUL_SERVICE_PATH$CONSUL_ALIVE_URL
- __check_service_start $CBS_APP_NAME $CBS_SERVICE_PATH$CBS_ALIVE_URL
-
- echo ""
-}
-
diff --git a/test/common/test_env-onap-guilin.sh b/test/common/test_env-onap-guilin.sh
deleted file mode 100755
index 320600f..0000000
--- a/test/common/test_env-onap-guilin.sh
+++ /dev/null
@@ -1,342 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ONAP guilin release
-TEST_ENV_PROFILE="ONAP-GUILIN"
-FLAVOUR="ONAP"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.onap.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.onap.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.onap.org:10003/"
-NEXUS_STAGING_REPO=$NEXUS_SNAPSHOT_REPO #staging repo not used in ONAP, using snapshot
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ORAN="nexus3.o-ran-sc.org:10002/" # Only for released ORAN images
-NEXUS_RELEASE_REPO_ONAP=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-
-# A1PMS image and tags
-A1PMS_IMAGE_BASE="onap/ccsdk-oran-a1policymanagementservice"
-A1PMS_IMAGE_TAG_LOCAL="1.0.2-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.2-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="1.0.2-SNAPSHOT" #Will use snapshot repo
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="1.0.2"
-
-
-# Tag for guilin branch
-# SDNC A1 Controller remote image and tag
-SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.0.5-STAGING-latest"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.0.5-STAGING-latest"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.0.4" #Will use snapshot repo
-
-
-#SDNC DB remote image and tag
-#The DB is part of SDNC so handled in the same way as SDNC
-SDNC_DB_IMAGE_BASE="mysql/mysql-server"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="5.6"
-
-
-# Control Panel image and tag - uses bronze release
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.0.0"
-
-
-# Near RT RIC Simulator image and tags - uses bronze release
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.0.0"
-
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.0.3"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.0.4"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.18"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#Kube proxy remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS SDNC"
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP RICSIM"
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="" # Not used
-
-########################################
-# Detailed settings per app
-########################################
-
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1" # Supported northbound api versions
-A1PMS_VERSION="V1" # Tested version of northbound API
-A1PMS_API_PREFIX="" # api url prefix, only for V2
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.onap.ccsdk.oran.a1policymanagementservice" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL="/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback Reciever"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/"
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-SDNC_COMPOSE_FILE="docker-compose.yml"
-SDNC_KUBE_APP_FILE="app.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Non-RT RIC Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_LOGPATH="/logs/nonrtric-controlpanel.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/maven # Container internal path for config
-CONTROL_PANEL_CONFIG_FILE=application.properties # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-########################################
-# Setting for common curl-base function
-########################################
-
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID to use other this empty prefix is used
-
diff --git a/test/common/test_env-onap-honolulu.sh b/test/common/test_env-onap-honolulu.sh
deleted file mode 100755
index a583c80..0000000
--- a/test/common/test_env-onap-honolulu.sh
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ONAP honolulu release
-TEST_ENV_PROFILE="ONAP-HONOLULU"
-FLAVOUR="ONAP"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.onap.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.onap.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.onap.org:10003/"
-NEXUS_STAGING_REPO=$NEXUS_SNAPSHOT_REPO #staging repo not used in ONAP, using snapshot
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ORAN="nexus3.o-ran-sc.org:10002/" # Only for released ORAN images
-NEXUS_RELEASE_REPO_ONAP=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-#############################################################################
-# Note:
-# The imgage tags for a1pms and sdnc are updated AFTER the release.
-# This means that the latest staging/snapshot images for these two components have
-# version one step (0.0.1 - bug-level) higher than the
-# latest release image version.
-
-# This is only applicable for ONAP images
-#############################################################################
-
-# A1PMS image and tags
-A1PMS_IMAGE_BASE="onap/ccsdk-oran-a1policymanagementservice"
-A1PMS_IMAGE_TAG_LOCAL="1.1.2-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.2-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="1.1.2-STAGING-latest" #Will use snapshot repo
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="1.1.1"
-
-# SDNC A1 Controller remote image and tag
-SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
-SDNC_A1_CONTROLLER_IMAGE_TAG_LOCAL="2.1.7-SNAPSHOT" ###CHECK THIS
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.7-STAGING-latest"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.1.7-STAGING-latest" #Will use snapshot repo
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.1.6"
-
-#SDNC DB remote image and tag
-#The DB is part of SDNC so handled in the same way as SDNC
-SDNC_DB_IMAGE_BASE="mariadb"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-
-# ICS image and tag - uses cherry release
-ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
-#Note: Update var ICS_FEATURE_LEVEL if image version is changed
-
-# Control Panel image and tag - uses cherry release
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.1.1"
-
-
-# RAPP Catalogue image and tags - uses cherry release
-RAPP_CAT_IMAGE_BASE="o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
-
-
-# Near RT RIC Simulator image and tags - uses cherry release
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.1.0"
-
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-
-#Producer stub image and tag
-PROD_STUB_IMAGE_BASE="producer-stub"
-PROD_STUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for producer stub, local image always used
-
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.0.3"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.0.4"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.18"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#Kube proxy remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS SDNC"
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC"
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="" # Not used
-
-
-########################################
-# Detailed settings per app
-########################################
-
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1 V2" # Supported northbound api versions
-A1PMS_VERSION="V2" # Tested version of northbound API
-A1PMS_API_PREFIX="/a1-policy" # api url prefix, only for V2. Shall contain leading "/"
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.onap.ccsdk.oran.a1policymanagementservice" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL="$A1PMS_API_PREFIX/v2/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-ICS_APP_NAME="informationservice" # Name for ICS container
-ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
-ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
-ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
-ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
-ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
-
-ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
-ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
-ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
-ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
-ICS_CERT_MOUNT_DIR="./cert"
-ICS_ALIVE_URL="/status" # Base path for alive check
-ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
-ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
-ICS_CONFIG_FILE=application.yaml # Config file name
-ICS_VERSION="V1-2" # Version where the types are added in the producer registration
-ICS_FEATURE_LEVEL="" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback Reciever"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
-PROD_STUB_DISPLAY_NAME="Producer Stub"
-PROD_STUB_EXTERNAL_PORT=8092 # Producer stub container external port (host -> container)
-PROD_STUB_INTERNAL_PORT=8092 # Producer stub container internal port (container -> container)
-PROD_STUB_EXTERNAL_SECURE_PORT=8093 # Producer stub container external secure port (host -> container)
-PROD_STUB_INTERNAL_SECURE_PORT=8093 # Producer stub container internal secure port (container -> container)
-PROD_STUB_JOB_CALLBACK="/callbacks/job" # Callback path for job create/update/delete
-PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for producre supervision
-PROD_STUB_ALIVE_URL="/" # Base path for alive check
-PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/"
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="admin" # SNDC PWD
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc"
-SDNC_COMPOSE_FILE="docker-compose-2.yml"
-SDNC_KUBE_APP_FILE="app2.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-SDNC_FEATURE_LEVEL="" # Space separated list of features
-
-RAPP_CAT_APP_NAME="rappcatalogueservice" # Name for the RAPP Catalogue
-RAPP_CAT_DISPLAY_NAME="RAPP Catalogue Service"
-RAPP_CAT_EXTERNAL_PORT=8680 # RAPP Catalogue container external port (host -> container)
-RAPP_CAT_INTERNAL_PORT=8680 # RAPP Catalogue container internal port (container -> container)
-RAPP_CAT_EXTERNAL_SECURE_PORT=8633 # RAPP Catalogue container external secure port (host -> container)
-RAPP_CAT_INTERNAL_SECURE_PORT=8633 # RAPP Catalogue container internal secure port (container -> container)
-RAPP_CAT_ALIVE_URL="/services" # Base path for alive check
-RAPP_CAT_COMPOSE_DIR="rapp_catalogue" # Dir in simulator_group for docker-compose
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Non-RT RIC Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container intternal port (container -> container)
-CONTROL_PANEL_LOGPATH="/logs/nonrtric-controlpanel.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/maven # Container internal path for config
-CONTROL_PANEL_CONFIG_FILE=application.properties # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-########################################
-# Setting for common curl-base function
-########################################
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID to use other this empty prefix is used
diff --git a/test/common/test_env-onap-istanbul.sh b/test/common/test_env-onap-istanbul.sh
deleted file mode 100644
index 7e1c755..0000000
--- a/test/common/test_env-onap-istanbul.sh
+++ /dev/null
@@ -1,428 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2021 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ONAP honolulu release
-TEST_ENV_PROFILE="ONAP-ISTANBUL"
-FLAVOUR="ONAP"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.onap.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.onap.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.onap.org:10003/"
-NEXUS_STAGING_REPO=$NEXUS_SNAPSHOT_REPO #staging repo not used in ONAP, using snapshot
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ORAN="nexus3.o-ran-sc.org:10002/" # Only for released ORAN images
-NEXUS_RELEASE_REPO_ONAP=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-#############################################################################
-# Note:
-# The imgage tags for a1pms and sdnc are updated AFTER the release.
-# This means that the latest staging/snapshot images for these two components have
-# version one step (0.0.1 - bug-level) higher than the
-# latest release image version.
-
-# This is only applicable for ONAP images
-#############################################################################
-
-# A1PMS image and tags
-A1PMS_IMAGE_BASE="onap/ccsdk-oran-a1policymanagementservice"
-A1PMS_IMAGE_TAG_LOCAL="1.2.6-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.6-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="1.2.6-STAGING-latest" #Will use snapshot repo
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="1.2.5"
-
-# SDNC A1 Controller remote image and tag
-SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
-SDNC_A1_CONTROLLER_IMAGE_TAG_LOCAL="2.2.6-SNAPSHOT" ###CHECK THIS
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.2.6-STAGING-latest"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.2.6-STAGING-latest" #Will use snapshot repo
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.2.5"
-
-#SDNC DB remote image and tag
-#The DB is part of SDNC so handled in the same way as SDNC
-SDNC_DB_IMAGE_BASE="mariadb"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-
-# ICS image and tag - uses d release
-ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.1.0"
-#Note: Update var ICS_FEATURE_LEVEL if image version is changed
-
-# Control Panel image and tag - uses d release
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.2.0"
-
-# Gateway image and tags - uses d release
-NRT_GATEWAY_IMAGE_BASE="o-ran-sc/nonrtric-gateway"
-NRT_GATEWAY_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.0"
-
-# RAPP Catalogue image and tags - uses d release
-RAPP_CAT_IMAGE_BASE="o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
-
-
-# Near RT RIC Simulator image and tags - uses d release
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.1.0"
-
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-
-#Producer stub image and tag
-PROD_STUB_IMAGE_BASE="producer-stub"
-PROD_STUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for producer stub, local image always used
-
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.1.0"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.1"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.3.0"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#Kube proxy remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS SDNC"
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC NGW"
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="" # Not used
-
-
-########################################
-# Detailed settings per app
-########################################
-
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1 V2" # Supported northbound api versions
-A1PMS_VERSION="V2" # Tested version of northbound API
-A1PMS_API_PREFIX="/a1-policy" # api url prefix, only for V2. Shall contain leading "/"
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.onap.ccsdk.oran.a1policymanagementservice" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL="$A1PMS_API_PREFIX/v2/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-ICS_APP_NAME="informationservice" # Name for ICS container
-ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
-ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
-ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
-ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
-ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
-
-ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
-ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
-ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
-ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
-ICS_CERT_MOUNT_DIR="./cert"
-ICS_ALIVE_URL="/status" # Base path for alive check
-ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
-ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
-ICS_CONFIG_FILE=application.yaml # Config file name
-ICS_VERSION="V1-2" # Version where the types are added in the producer registration
-ICS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs1" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback Reciever"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
-PROD_STUB_DISPLAY_NAME="Producer Stub"
-PROD_STUB_EXTERNAL_PORT=8092 # Producer stub container external port (host -> container)
-PROD_STUB_INTERNAL_PORT=8092 # Producer stub container internal port (container -> container)
-PROD_STUB_EXTERNAL_SECURE_PORT=8093 # Producer stub container external secure port (host -> container)
-PROD_STUB_INTERNAL_SECURE_PORT=8093 # Producer stub container internal secure port (container -> container)
-PROD_STUB_JOB_CALLBACK="/callbacks/job" # Callback path for job create/update/delete
-PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for producre supervision
-PROD_STUB_ALIVE_URL="/" # Base path for alive check
-PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/"
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="admin" # SNDC PWD
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc"
-SDNC_COMPOSE_FILE="docker-compose-2.yml"
-SDNC_KUBE_APP_FILE="app2.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-SDNC_FEATURE_LEVEL="TRANS_RESP_CODE" # Space separated list of features
- # TRANS_RESP_CODE: SDNC return southbound response code
-
-RAPP_CAT_APP_NAME="rappcatalogueservice" # Name for the RAPP Catalogue
-RAPP_CAT_DISPLAY_NAME="RAPP Catalogue Service"
-RAPP_CAT_EXTERNAL_PORT=8680 # RAPP Catalogue container external port (host -> container)
-RAPP_CAT_INTERNAL_PORT=8680 # RAPP Catalogue container internal port (container -> container)
-RAPP_CAT_EXTERNAL_SECURE_PORT=8633 # RAPP Catalogue container external secure port (host -> container)
-RAPP_CAT_INTERNAL_SECURE_PORT=8633 # RAPP Catalogue container internal secure port (container -> container)
-RAPP_CAT_ALIVE_URL="/services" # Base path for alive check
-RAPP_CAT_COMPOSE_DIR="rapp_catalogue" # Dir in simulator_group for docker-compose
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Non-RT RIC Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container intternal port (container -> container)
-CONTROL_PANEL_LOGPATH="/logs/nonrtric-controlpanel.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/maven # Container internal path for config
-CONTROL_PANEL_CONFIG_FILE=application.properties # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-
-NRT_GATEWAY_APP_NAME="nonrtricgateway" # Name of the Gateway container
-NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
-NRT_GATEWAY_EXTERNAL_PORT=9090 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_PORT=9090 # Gateway container internal port (container -> container)
-NRT_GATEWAY_EXTERNAL_SECURE_PORT=9091 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_SECURE_PORT=9091 # Gateway container internal port (container -> container)
-NRT_GATEWAY_LOGPATH="/var/log/nonrtric-gateway/application.log" # Path the application log in the Gateway container
-NRT_GATEWAY_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-NRT_GATEWAY_ALIVE_URL="/actuator/metrics" # Base path for alive check
-NRT_GATEWAY_COMPOSE_DIR="ngw" # Dir in simulator_group for docker-compose
-NRT_GATEWAY_CONFIG_MOUNT_PATH=/opt/app/nonrtric-gateway/config # Container internal path for config
-NRT_GATEWAY_CONFIG_FILE=application.yaml # Config file name
-NRT_GATEWAY_PKG_NAME="org.springframework.cloud.gateway" # Java base package name
-NRT_GATEWAY_ACTUATOR="/actuator/loggers/$NRT_GATEWAY_PKG_NAME" # Url for trace/debug
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-########################################
-# Setting for common curl-base function
-########################################
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID to use other this empty prefix is used
diff --git a/test/common/test_env-onap-jakarta.sh b/test/common/test_env-onap-jakarta.sh
index 6eace0b..7383ddf 100644
--- a/test/common/test_env-onap-jakarta.sh
+++ b/test/common/test_env-onap-jakarta.sh
@@ -202,7 +202,7 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
diff --git a/test/common/test_env-onap-kohn.sh b/test/common/test_env-onap-kohn.sh
index 1aad454..cb6c1f7 100644
--- a/test/common/test_env-onap-kohn.sh
+++ b/test/common/test_env-onap-kohn.sh
@@ -202,7 +202,7 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
diff --git a/test/common/test_env-onap-london.sh b/test/common/test_env-onap-london.sh
index 4f0a35b..dd4c5b3 100644
--- a/test/common/test_env-onap-london.sh
+++ b/test/common/test_env-onap-london.sh
@@ -209,7 +209,9 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
+#Preparation for DMAAP removal
+#A1PMS_FEATURE_LEVEL="NO-DMAAP" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
diff --git a/test/common/test_env-oran-cherry.sh b/test/common/test_env-oran-cherry.sh
deleted file mode 100755
index 311ab14..0000000
--- a/test/common/test_env-oran-cherry.sh
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ORAN Cherry
-TEST_ENV_PROFILE="ORAN-CHERRY"
-FLAVOUR="ORAN"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.o-ran-sc.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.o-ran-sc.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.o-ran-sc.org:10003/"
-NEXUS_STAGING_REPO="nexus3.o-ran-sc.org:10004/"
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ONAP="nexus3.onap.org:10002/" # Only for released ONAP images
-NEXUS_RELEASE_REPO_ORAN=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-
-# A1PMS base image and tags
-A1PMS_IMAGE_BASE="o-ran-sc/nonrtric-policy-agent"
-A1PMS_IMAGE_TAG_LOCAL="2.1.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="2.1.1"
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="2.1.1"
-
-# ICS image and tags
-ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ICS_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE="1.0.1"
-ICS_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
-
-
-# Control Panel image and tags
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_LOCAL="2.1.1-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.1-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE="2.1.1"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE="2.1.1"
-
-
-# SDNC A1 Controller image and tags
-SDNC_A1_CONTROLLER_IMAGE_BASE="o-ran-sc/nonrtric-a1-controller"
-SDNC_A1_CONTROLLER_IMAGE_TAG_LOCAL="2.0.1-SNAPSHOT"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.0.1-SNAPSHOT"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.0.1"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.0.1"
-
-
-#SDNC DB remote image and tag
-SDNC_DB_IMAGE_BASE="mysql/mysql-server"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="5.6"
-#No local image for SSDNC DB, remote image always used
-
-
-# RAPP Catalogue image and tags
-RAPP_CAT_IMAGE_BASE="o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_CAT_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE="1.0.1"
-RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
-
-
-# Near RT RIC Simulator image and tags
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_LOCAL="latest"
-RIC_SIM_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.0-SNAPSHOT"
-RIC_SIM_IMAGE_TAG_REMOTE="2.1.0"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE="2.1.0"
-
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-
-#Producer stub image and tag
-PROD_STUB_IMAGE_BASE="producer-stub"
-PROD_STUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for producer stub, local image always used
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.0.3"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.0.4"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.18"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#Kube proxy remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS ICS CP SDNC RC RICSIM"
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="" # Not used
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="CBS DMAAPMR"
-
-
-########################################
-# Detailed settings per app
-########################################
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1 V2" # Supported northbound api versions
-A1PMS_VERSION="V2" # Tested version of northbound API
-A1PMS_API_PREFIX="/a1-policy" # api url prefix, only for V2
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.oransc.policyagent" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL=$A1PMS_API_PREFIX"/v2/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-ICS_APP_NAME="informationservice" # Name for ICS container
-ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
-ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
-ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
-ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
-ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
-
-ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
-ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
-ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
-ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
-ICS_CERT_MOUNT_DIR="./cert"
-ICS_ALIVE_URL="/status" # Base path for alive check
-ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
-ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
-ICS_CONFIG_FILE=application.yaml # Config file name
-ICS_VERSION="V1-2" # Version where the types are added in the producer registration
-ICS_FEATURE_LEVEL="" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback Reciever"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
-PROD_STUB_DISPLAY_NAME="Producer Stub"
-PROD_STUB_EXTERNAL_PORT=8092 # Producer stub container external port (host -> container)
-PROD_STUB_INTERNAL_PORT=8092 # Producer stub container internal port (container -> container)
-PROD_STUB_EXTERNAL_SECURE_PORT=8093 # Producer stub container external secure port (host -> container)
-PROD_STUB_INTERNAL_SECURE_PORT=8093 # Producer stub container internal secure port (container -> container)
-PROD_STUB_JOB_CALLBACK="/callbacks/job" # Callback path for job create/update/delete
-PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for producre supervision
-PROD_STUB_ALIVE_URL="/" # Base path for alive check
-PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/" # Base path for alive check
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-SDNC_COMPOSE_FILE="docker-compose.yml"
-SDNC_KUBE_APP_FILE="app.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-
-RAPP_CAT_APP_NAME="rappcatalogueservice" # Name for the RAPP Catalogue
-RAPP_CAT_DISPLAY_NAME="RAPP Catalogue Service"
-RAPP_CAT_EXTERNAL_PORT=8680 # RAPP Catalogue container external port (host -> container)
-RAPP_CAT_INTERNAL_PORT=8680 # RAPP Catalogue container internal port (container -> container)
-RAPP_CAT_EXTERNAL_SECURE_PORT=8633 # RAPP Catalogue container external secure port (host -> container)
-RAPP_CAT_INTERNAL_SECURE_PORT=8633 # RAPP Catalogue container internal secure port (container -> container)
-RAPP_CAT_ALIVE_URL="/services" # Base path for alive check
-RAPP_CAT_COMPOSE_DIR="rapp_catalogue" # Dir in simulator_group for docker-compose
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Non-RT RIC Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_LOGPATH="/logs/nonrtric-controlpanel.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/maven # Container internal path for config
-CONTROL_PANEL_CONFIG_FILE=application.properties # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-########################################
-# Setting for common curl-base function
-########################################
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID otherwise this empty prefix is used
diff --git a/test/common/test_env-oran-d-release.sh b/test/common/test_env-oran-d-release.sh
deleted file mode 100755
index 1f73595..0000000
--- a/test/common/test_env-oran-d-release.sh
+++ /dev/null
@@ -1,472 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ORAN Cherry
-TEST_ENV_PROFILE="ORAN-D-RELEASE"
-FLAVOUR="ORAN"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.o-ran-sc.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.o-ran-sc.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.o-ran-sc.org:10003/"
-NEXUS_STAGING_REPO="nexus3.o-ran-sc.org:10004/"
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ONAP="nexus3.onap.org:10002/" # Only for released ONAP images
-NEXUS_RELEASE_REPO_ORAN=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-
-# A1PMS base image and tags
-A1PMS_IMAGE_BASE="o-ran-sc/nonrtric-policy-agent"
-A1PMS_IMAGE_TAG_LOCAL="2.2.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="2.2.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="2.2.1"
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="2.2.1"
-
-# ICS image and tags
-ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ICS_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE="1.1.0"
-ICS_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
-#Note: Update var ICS_FEATURE_LEVEL if image version is changed
-
-#Control Panel image and tags
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_LOCAL="2.2.0-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_SNAPSHOT="2.2.0-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE="2.2.0"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE="2.2.0"
-
-
-# Gateway image and tags
-NRT_GATEWAY_IMAGE_BASE="o-ran-sc/nonrtric-gateway"
-NRT_GATEWAY_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT"
-NRT_GATEWAY_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT"
-NRT_GATEWAY_IMAGE_TAG_REMOTE="1.0.0"
-NRT_GATEWAY_IMAGE_TAG_REMOTE_RELEASE="1.0.0"
-
-
-# SDNC A1 Controller image and tags - Note using Honolulu ONAP image
-SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.1.6"
-#No local image for ONAP SDNC, remote release image always used
-
-# ORAN SDNC adapter kept as reference
-# SDNC A1 Controller image and tags - still using cherry version, no new version for D-Release
-#SDNC_A1_CONTROLLER_IMAGE_BASE="o-ran-sc/nonrtric-a1-controller"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_LOCAL="2.0.1-SNAPSHOT"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.0.1-SNAPSHOT"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.0.1"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.0.1"
-
-#SDNC DB remote image and tag
-#The DB is part of SDNC so handled in the same way as SDNC
-SDNC_DB_IMAGE_BASE="mariadb"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-
-#Older SDNC db image kept for reference
-#SDNC DB remote image and tag
-#SDNC_DB_IMAGE_BASE="mysql/mysql-server"
-#SDNC_DB_IMAGE_TAG_REMOTE_PROXY="5.6"
-#No local image for SSDNC DB, remote image always used
-
-
-# RAPP Catalogue image and tags
-RAPP_CAT_IMAGE_BASE="o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_CAT_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE="1.0.1"
-RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
-
-
-# Near RT RIC Simulator image and tags - same version as cherry
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_LOCAL="latest"
-RIC_SIM_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.0-SNAPSHOT"
-RIC_SIM_IMAGE_TAG_REMOTE="2.1.0"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE="2.1.0"
-
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-
-#Producer stub image and tag
-PROD_STUB_IMAGE_BASE="producer-stub"
-PROD_STUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for producer stub, local image always used
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.0.3"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.0.4"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.18"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#Kube proxy remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS ICS CP RC RICSIM NGW" # Add SDNC here if oran image is used
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="" # Not used
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="CBS DMAAPMR SDNC" # SDNC added as ONAP image
-
-
-########################################
-# Detailed settings per app
-########################################
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1 V2" # Supported northbound api versions
-A1PMS_VERSION="V2" # Tested version of northbound API
-A1PMS_API_PREFIX="/a1-policy" # api url prefix, only for V2
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.onap.ccsdk.oran.a1policymanagementservice" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL="$A1PMS_API_PREFIX/v2/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-ICS_APP_NAME="informationservice" # Name for ICS container
-ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
-ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
-ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
-ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
-ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
-
-ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
-ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
-ICS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
-ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
-ICS_CERT_MOUNT_DIR="./cert"
-ICS_ALIVE_URL="/status" # Base path for alive check
-ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
-ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
-ICS_CONFIG_FILE=application.yaml # Config file name
-ICS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
-ICS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback receiver"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
-PROD_STUB_DISPLAY_NAME="Producer Stub"
-PROD_STUB_EXTERNAL_PORT=8092 # Producer stub container external port (host -> container)
-PROD_STUB_INTERNAL_PORT=8092 # Producer stub container internal port (container -> container)
-PROD_STUB_EXTERNAL_SECURE_PORT=8093 # Producer stub container external secure port (host -> container)
-PROD_STUB_INTERNAL_SECURE_PORT=8093 # Producer stub container internal secure port (container -> container)
-PROD_STUB_JOB_CALLBACK="/callbacks/job" # Callback path for job create/update/delete
-PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for producre supervision
-PROD_STUB_ALIVE_URL="/" # Base path for alive check
-PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/" # Base path for alive check
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-
-# Kept as reference for oran a1 adapter
-# SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-# SDNC_DISPLAY_NAME="SDNC A1 Controller"
-# SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-# SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-# SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-# SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-# SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-# SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-# SDNC_USER="admin" # SDNC username
-# SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-# SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-# SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-# SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-# SDNC_COMPOSE_FILE="docker-compose.yml"
-# SDNC_KUBE_APP_FILE="app.yaml"
-# SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-# SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-
-# For ONAP sdan
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="admin" # SNDC PWD
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc"
-SDNC_COMPOSE_FILE="docker-compose-2.yml"
-SDNC_KUBE_APP_FILE="app2.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-SDNC_FEATURE_LEVEL="" # Space separated list of features
-
-RAPP_CAT_APP_NAME="rappcatalogueservice" # Name for the RAPP Catalogue
-RAPP_CAT_DISPLAY_NAME="RAPP Catalogue"
-RAPP_CAT_EXTERNAL_PORT=8680 # RAPP Catalogue container external port (host -> container)
-RAPP_CAT_INTERNAL_PORT=8680 # RAPP Catalogue container internal port (container -> container)
-RAPP_CAT_EXTERNAL_SECURE_PORT=8633 # RAPP Catalogue container external secure port (host -> container)
-RAPP_CAT_INTERNAL_SECURE_PORT=8633 # RAPP Catalogue container internal secure port (container -> container)
-RAPP_CAT_ALIVE_URL="/services" # Base path for alive check
-RAPP_CAT_COMPOSE_DIR="rapp_catalogue" # Dir in simulator_group for docker-compose
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_LOGPATH="/var/log/nonrtric-gateway/application.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_FILE=nginx.conf # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/etc/nginx # Container internal path for config
-CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s" #nginx resolver for kube
-CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11" # nginx resolver for docker
-CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/" # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/" # Path prefix for forwarding ics calls to NGW
-CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/" # Path prefix for forwarding ics calls to NGW
-
-NRT_GATEWAY_APP_NAME="nonrtricgateway" # Name of the Gateway container
-NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
-NRT_GATEWAY_EXTERNAL_PORT=9090 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_PORT=9090 # Gateway container internal port (container -> container)
-NRT_GATEWAY_EXTERNAL_SECURE_PORT=9091 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_SECURE_PORT=9091 # Gateway container internal port (container -> container)
-NRT_GATEWAY_LOGPATH="/var/log/nonrtric-gateway/application.log" # Path the application log in the Gateway container
-NRT_GATEWAY_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-NRT_GATEWAY_ALIVE_URL="/actuator/metrics" # Base path for alive check
-NRT_GATEWAY_COMPOSE_DIR="ngw" # Dir in simulator_group for docker-compose
-NRT_GATEWAY_CONFIG_MOUNT_PATH=/opt/app/nonrtric-gateway/config # Container internal path for config
-NRT_GATEWAY_CONFIG_FILE=application.yaml # Config file name
-NRT_GATEWAY_PKG_NAME="org.springframework.cloud.gateway" # Java base package name
-NRT_GATEWAY_ACTUATOR="/actuator/loggers/$NRT_GATEWAY_PKG_NAME" # Url for trace/debug
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-########################################
-# Setting for common curl-base function
-########################################
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID otherwise this empty prefix is used
diff --git a/test/common/test_env-oran-e-release.sh b/test/common/test_env-oran-e-release.sh
deleted file mode 100755
index e534531..0000000
--- a/test/common/test_env-oran-e-release.sh
+++ /dev/null
@@ -1,585 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-#Profile for ORAN Cherry
-TEST_ENV_PROFILE="ORAN-E-RELEASE"
-FLAVOUR="ORAN"
-
-########################################
-## Nexus repo settings
-########################################
-
-# Nexus repos for developed images
-NEXUS_PROXY_REPO="nexus3.o-ran-sc.org:10001/"
-NEXUS_RELEASE_REPO="nexus3.o-ran-sc.org:10002/"
-NEXUS_SNAPSHOT_REPO="nexus3.o-ran-sc.org:10003/"
-NEXUS_STAGING_REPO="nexus3.o-ran-sc.org:10004/"
-
-# Nexus repos for images used by test (not developed by the project)
-NEXUS_RELEASE_REPO_ONAP="nexus3.onap.org:10002/" # Only for released ONAP images
-NEXUS_RELEASE_REPO_ORAN=$NEXUS_RELEASE_REPO
-
-########################################
-# Set up of image and tags for the test.
-########################################
-
-# NOTE: One environment variable containing the image name and tag is create by the test script
-# for each image from the env variables below.
-# The variable is created by removing the suffix "_BASE" from the base image variable name.
-# Example: A1PMS_IMAGE_BASE -> A1PMS_IMAGE
-# This var will point to the local or remote image depending on cmd line arguments.
-# In addition, the repo and the image tag version are selected from the list of image tags based on the cmd line argurment.
-# For images built by the script, only tag #1 shall be specified
-# For project images, only tag #1, #2, #3 and #4 shall be specified
-# For ORAN images (non project), only tag #5 shall be specified
-# For ONAP images (non project), only tag #6 shall be specified
-# For all other images, only tag #7 shall be specified
-# 1 XXX_LOCAL: local images: <image-name>:<local-tag>
-# 2 XXX_REMOTE_SNAPSHOT: snapshot images: <snapshot-nexus-repo><image-name>:<snapshot-tag>
-# 3 XXX_REMOTE: staging images: <staging-nexus-repo><image-name>:<staging-tag>
-# 4 XXX_REMOTE_RELEASE: release images: <release-nexus-repo><image-name>:<release-tag>
-# 5 XXX_REMOTE_RELEASE_ORAN: ORAN release images: <oran-release-nexus-repo><image-name>:<release-tag>
-# 6 XXX_REMOTE_RELEASE_ONAP: ONAP release images: <onap-release-nexus-repo><image-name>:<release-tag>
-# 7 XXX_PROXY: other images, not produced by the project: <proxy-nexus-repo><mage-name>:<proxy-tag>
-
-
-# A1PMS base image and tags
-A1PMS_IMAGE_BASE="o-ran-sc/nonrtric-a1-policy-management-service"
-A1PMS_IMAGE_TAG_LOCAL="2.3.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE_SNAPSHOT="2.3.1-SNAPSHOT"
-A1PMS_IMAGE_TAG_REMOTE="2.3.1"
-A1PMS_IMAGE_TAG_REMOTE_RELEASE="2.3.1"
-
-# ICS image and tags
-ICS_IMAGE_BASE="o-ran-sc/nonrtric-information-coordinator-service"
-ICS_IMAGE_TAG_LOCAL="1.2.1-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.1-SNAPSHOT"
-ICS_IMAGE_TAG_REMOTE="1.2.1"
-ICS_IMAGE_TAG_REMOTE_RELEASE="1.2.1"
-#Note: Update var ICS_FEATURE_LEVEL if image version is changed
-
-#Control Panel image and tags
-CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG_LOCAL="2.3.0-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_SNAPSHOT="2.3.0-SNAPSHOT"
-CONTROL_PANEL_IMAGE_TAG_REMOTE="2.3.0"
-CONTROL_PANEL_IMAGE_TAG_REMOTE_RELEASE="2.3.0"
-
-
-# Gateway image and tags
-NRT_GATEWAY_IMAGE_BASE="o-ran-sc/nonrtric-gateway"
-NRT_GATEWAY_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT"
-NRT_GATEWAY_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT"
-NRT_GATEWAY_IMAGE_TAG_REMOTE="1.0.0"
-NRT_GATEWAY_IMAGE_TAG_REMOTE_RELEASE="1.0.0"
-
-
-# SDNC A1 Controller image and tags - Note using released honolulu ONAP image
-SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
-SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.1.6"
-#No local image for ONAP SDNC, remote release image always used
-
-# ORAN SDNC adapter kept as reference
-# SDNC A1 Controller image and tags - still using cherry version, no new version for D-Release
-#SDNC_A1_CONTROLLER_IMAGE_BASE="o-ran-sc/nonrtric-a1-controller"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_LOCAL="2.0.1-SNAPSHOT"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_SNAPSHOT="2.0.1-SNAPSHOT"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE="2.0.1"
-#SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.0.1"
-
-#SDNC DB remote image and tag
-#The DB is part of SDNC so handled in the same way as SDNC
-SDNC_DB_IMAGE_BASE="mariadb"
-SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-
-#Older SDNC db image kept for reference
-#SDNC DB remote image and tag
-#SDNC_DB_IMAGE_BASE="mysql/mysql-server"
-#SDNC_DB_IMAGE_TAG_REMOTE_PROXY="5.6"
-#No local image for SSDNC DB, remote image always used
-
-
-# RAPP Catalogue image and tags
-RAPP_CAT_IMAGE_BASE="o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_CAT_IMAGE_TAG_LOCAL="1.0.2-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.2-SNAPSHOT"
-RAPP_CAT_IMAGE_TAG_REMOTE="1.0.2"
-RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE="1.0.2"
-
-
-# Near RT RIC Simulator image and tags - same version as cherry
-RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
-RIC_SIM_IMAGE_TAG_LOCAL="latest"
-RIC_SIM_IMAGE_TAG_REMOTE_SNAPSHOT="2.2.0-SNAPSHOT"
-RIC_SIM_IMAGE_TAG_REMOTE="2.2.0"
-RIC_SIM_IMAGE_TAG_REMOTE_RELEASE="2.2.0"
-
-# DMAAP Mediator Service
-DMAAP_MED_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-mediator-producer"
-DMAAP_MED_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-DMAAP_MED_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-DMAAP_MED_IMAGE_TAG_REMOTE="1.0.1"
-DMAAP_MED_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
-
-# DMAAP Adapter Service
-DMAAP_ADP_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-adaptor"
-DMAAP_ADP_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-DMAAP_ADP_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-DMAAP_ADP_IMAGE_TAG_REMOTE="1.0.1"
-DMAAP_ADP_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
-
-# Helm Manager
-HELM_MANAGER_IMAGE_BASE="o-ran-sc/nonrtric-helm-manager"
-HELM_MANAGER_IMAGE_TAG_LOCAL="1.1.1-SNAPSHOT"
-HELM_MANAGER_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.1-SNAPSHOT"
-HELM_MANAGER_IMAGE_TAG_REMOTE="1.1.1"
-HELM_MANAGER_IMAGE_TAG_REMOTE_RELEASE="1.1.1"
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
-#MR stub image and tag
-MRSTUB_IMAGE_BASE="mrstub"
-MRSTUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for MR stub, local image always used
-
-
-#Callback receiver image and tag
-CR_IMAGE_BASE="callback-receiver"
-CR_IMAGE_TAG_LOCAL="latest"
-#No remote image for CR, local image always used
-
-
-#Producer stub image and tag
-PROD_STUB_IMAGE_BASE="producer-stub"
-PROD_STUB_IMAGE_TAG_LOCAL="latest"
-#No remote image for producer stub, local image always used
-
-#Http proxy remote image and tag
-HTTP_PROXY_IMAGE_BASE="nodejs-http-proxy"
-HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
-#No local image for http proxy, remote image always used
-
-#ONAP Zookeeper remote image and tag
-ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.1.0"
-#No local image for ONAP Zookeeper, remote image always used
-
-#ONAP Kafka remote image and tag
-ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.1"
-#No local image for ONAP Kafka, remote image always used
-
-#ONAP DMAAP-MR remote image and tag
-ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.3.0"
-#No local image for ONAP DMAAP-MR, remote image always used
-
-#Kube proxy remote image and tag
-KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
-KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
-#No remote image for kube proxy, local image always used
-
-#PVC Cleaner remote image and tag
-PVC_CLEANER_IMAGE_BASE="ubuntu"
-PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
-#No local image for pvc cleaner, remote image always used
-
-#Kafka Procon image and tag
-KAFKAPC_IMAGE_BASE="kafka-procon"
-KAFKAPC_IMAGE_TAG_LOCAL="latest"
-#No local image for pvc cleaner, remote image always used
-
-#Chartmusem remote image and tag
-CHART_MUS_IMAGE_BASE="ghcr.io/helm/chartmuseum"
-CHART_MUS_IMAGE_TAG_REMOTE_OTHER="v0.13.1"
-#No local image for chart museum, remote image always used
-
-# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="A1PMS ICS CP RC RICSIM NGW DMAAPADP DMAAPMED HELMMANAGER" # Add SDNC here if oran image is used
-
-# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="" # Not used
-
-# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="CBS DMAAPMR SDNC" # SDNC added as ONAP image
-
-
-########################################
-# Detailed settings per app
-########################################
-
-# Port number variables
-# =====================
-# Port number vars <name>_INTERNAL_PORT and <name>_INTERNAL_SECURE_PORT are set as pod/container port in kube and container port in docker
-#
-# Port number vars <name>_EXTERNAL_PORT and <name>_EXTERNAL_SECURE_PORT are set as svc port in kube and localhost port in docker
-#
-# For some components, eg. MR, can be represented as the MR-STUB and/or the DMAAP MR. For these components
-# special vars nameed <name>_LOSTHOST_PORT and <name>_LOCALHOST_SECURE_PORT are used as localhost ports instead of
-# name>_EXTERNAL_PORT and <name>_EXTERNAL_SECURE_PORT ports in docker in order to prevent overalapping ports on local host
-#
-# For KUBE PROXY there are special external port for docker as the proyx exposes also the kube svc port on localhost,
-# therefore a special set of external port are needed for docker <name>_DOCKER_EXTERNAL_PORT and <name>_DOCKER_EXTERNAL_SECURE_PORT
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net" # Name of docker private network
-
-KUBE_NONRTRIC_NAMESPACE="nonrtric" # Namespace for all nonrtric components
-KUBE_SIM_NAMESPACE="nonrtric-ft" # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim" # Namespace for a1-p simulators (RICSIM)
-KUBE_ONAP_NAMESPACE="onap" # Namespace for onap (only message router)
-KUBE_SDNC_NAMESPACE="onap" # Namespace for sdnc
-
-A1PMS_EXTERNAL_PORT=8081 # A1PMS container external port (host -> container)
-A1PMS_INTERNAL_PORT=8081 # A1PMS container internal port (container -> container)
-A1PMS_EXTERNAL_SECURE_PORT=8433 # A1PMS container external secure port (host -> container)
-A1PMS_INTERNAL_SECURE_PORT=8433 # A1PMS container internal secure port (container -> container)
-A1PMS_APIS="V1 V2" # Supported northbound api versions
-A1PMS_VERSION="V2" # Tested version of northbound API
-A1PMS_API_PREFIX="/a1-policy" # api url prefix, only for V2
-
-A1PMS_APP_NAME="policymanagementservice" # Name for A1PMS container
-A1PMS_DISPLAY_NAME="Policy Management Service"
-A1PMS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-A1PMS_LOGPATH="/var/log/policy-agent/application.log" # Path the application log in the A1PMS container
-A1PMS_APP_NAME_ALIAS="policy-agent-container" # Alias name, name used by the control panel
-A1PMS_CONFIG_KEY="policy-agent" # Key for consul config
-A1PMS_PKG_NAME="org.onap.ccsdk.oran.a1policymanagementservice" # Java base package name
-A1PMS_ACTUATOR="/actuator/loggers/$A1PMS_PKG_NAME" # Url for trace/debug
-A1PMS_ALIVE_URL="$A1PMS_API_PREFIX/v2/status" # Base path for alive check
-A1PMS_COMPOSE_DIR="a1pms" # Dir in simulator_group for docker-compose
-A1PMS_CONFIG_MOUNT_PATH="/opt/app/policy-agent/config" # Path in container for config file
-A1PMS_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container for data file
-A1PMS_CONFIG_FILE="application.yaml" # Container config file name
-A1PMS_DATA_FILE="application_configuration.json" # Container data file name
-A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="" # Space separated list of features
-
-ICS_APP_NAME="informationservice" # Name for ICS container
-ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
-ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
-ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
-ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
-ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
-
-ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
-ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
-ICS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
-ICS_ACTUATOR="/actuator/loggers/org.oransc.ics" # Url for trace/debug
-ICS_CERT_MOUNT_DIR="./cert"
-ICS_ALIVE_URL="/status" # Base path for alive check
-ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
-ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
-ICS_CONFIG_FILE=application.yaml # Config file name
-ICS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
-ICS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO RESP_CODE_CHANGE_1" # Space separated list of features
-
-MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
-MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
-MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
-MR_STUB_DISPLAY_NAME="Message Router stub"
-MR_STUB_CERT_MOUNT_DIR="./cert"
-MR_EXTERNAL_PORT=3904 # MR dmaap/stub container external port
-MR_INTERNAL_PORT=3904 # MR dmaap/stub container internal port
-MR_EXTERNAL_SECURE_PORT=3905 # MR dmaap/stub container external secure port
-MR_INTERNAL_SECURE_PORT=3905 # MR dmaap/stub container internal secure port
-MR_DMAAP_LOCALHOST_PORT=3904 # MR stub container external port (host -> container)
-MR_STUB_LOCALHOST_PORT=3908 # MR stub container external port (host -> container)
-MR_DMAAP_LOCALHOST_SECURE_PORT=3905 # MR stub container internal port (container -> container)
-MR_STUB_LOCALHOST_SECURE_PORT=3909 # MR stub container external secure port (host -> container)
-MR_READ_TOPIC="A1-POLICY-AGENT-READ" # Read topic
-MR_WRITE_TOPIC="A1-POLICY-AGENT-WRITE" # Write topic
-MR_READ_URL="/events/$MR_READ_TOPIC/users/policy-agent?timeout=15000&limit=100" # Path to read messages from MR
-MR_WRITE_URL="/events/$MR_WRITE_TOPIC" # Path to write messages to MR
-MR_STUB_ALIVE_URL="/" # Base path for mr stub alive check
-MR_DMAAP_ALIVE_URL="/topics" # Base path for dmaap-mr alive check
-MR_DMAAP_COMPOSE_DIR="dmaapmr" # Dir in simulator_group for dmaap mr for - docker-compose
-MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
-MR_KAFKA_PORT=9092 # Kafka port number
-MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
-MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
-MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
-MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
-MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs1" # Config files dir on localhost
-
-CR_APP_NAME="callback-receiver" # Name for the Callback receiver
-CR_DISPLAY_NAME="Callback receiver"
-CR_EXTERNAL_PORT=8090 # Callback receiver container external port (host -> container)
-CR_INTERNAL_PORT=8090 # Callback receiver container internal port (container -> container)
-CR_EXTERNAL_SECURE_PORT=8091 # Callback receiver container external secure port (host -> container)
-CR_INTERNAL_SECURE_PORT=8091 # Callback receiver container internal secure port (container -> container)
-CR_APP_CALLBACK="/callbacks" # Url for callbacks
-CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/reset" # Base path for alive check
-CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
-
-PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
-PROD_STUB_DISPLAY_NAME="Producer Stub"
-PROD_STUB_EXTERNAL_PORT=8092 # Producer stub container external port (host -> container)
-PROD_STUB_INTERNAL_PORT=8092 # Producer stub container internal port (container -> container)
-PROD_STUB_EXTERNAL_SECURE_PORT=8093 # Producer stub container external secure port (host -> container)
-PROD_STUB_INTERNAL_SECURE_PORT=8093 # Producer stub container internal secure port (container -> container)
-PROD_STUB_JOB_CALLBACK="/callbacks/job" # Callback path for job create/update/delete
-PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for producre supervision
-PROD_STUB_ALIVE_URL="/" # Base path for alive check
-PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
-RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
-RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
- # Note, a prefix is added to each container name by the .env file in the 'ric' dir
-RIC_SIM_PREFIX="ricsim" # Prefix added to ric container name, added in the .env file in the 'ric' dir
- # This prefix can be changed from the command line
-RIC_SIM_INTERNAL_PORT=8085 # RIC Simulator container internal port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_INTERNAL_SECURE_PORT=8185 # RIC Simulator container internal secure port (container -> container).
- # (external ports allocated by docker)
-RIC_SIM_CERT_MOUNT_DIR="./cert"
-
-RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
-RIC_SIM_ALIVE_URL="/" # Base path for alive check
-RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-# Kept as reference for oran a1 adapter
-# SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-# SDNC_DISPLAY_NAME="SDNC A1 Controller"
-# SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-# SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-# SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-# SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-# SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-# SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-# SDNC_USER="admin" # SDNC username
-# SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-# SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-# SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-# SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-# SDNC_COMPOSE_FILE="docker-compose.yml"
-# SDNC_KUBE_APP_FILE="app.yaml"
-# SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-# SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-
-# For ONAP sdan
-SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-SDNC_DISPLAY_NAME="SDNC A1 Controller"
-SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore password
-SDNC_USER="admin" # SDNC username
-SDNC_PWD="admin" # SNDC PWD
-SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-SDNC_COMPOSE_DIR="sdnc"
-SDNC_COMPOSE_FILE="docker-compose-2.yml"
-SDNC_KUBE_APP_FILE="app2.yaml"
-SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
-SDNC_FEATURE_LEVEL="" # Space separated list of features
-
-RAPP_CAT_APP_NAME="rappcatalogueservice" # Name for the RAPP Catalogue
-RAPP_CAT_DISPLAY_NAME="RAPP Catalogue"
-RAPP_CAT_EXTERNAL_PORT=8680 # RAPP Catalogue container external port (host -> container)
-RAPP_CAT_INTERNAL_PORT=8680 # RAPP Catalogue container internal port (container -> container)
-RAPP_CAT_EXTERNAL_SECURE_PORT=8633 # RAPP Catalogue container external secure port (host -> container)
-RAPP_CAT_INTERNAL_SECURE_PORT=8633 # RAPP Catalogue container internal secure port (container -> container)
-RAPP_CAT_ALIVE_URL="/services" # Base path for alive check
-RAPP_CAT_COMPOSE_DIR="rapp_catalogue" # Dir in simulator_group for docker-compose
-
-CONTROL_PANEL_APP_NAME="controlpanel" # Name of the Control Panel container
-CONTROL_PANEL_DISPLAY_NAME="Control Panel"
-CONTROL_PANEL_EXTERNAL_PORT=8080 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_PORT=8080 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_EXTERNAL_SECURE_PORT=8880 # Control Panel container external port (host -> container)
-CONTROL_PANEL_INTERNAL_SECURE_PORT=8082 # Control Panel container internal port (container -> container)
-CONTROL_PANEL_LOGPATH="/var/log/nonrtric-gateway/application.log" # Path the application log in the Control Panel container
-CONTROL_PANEL_ALIVE_URL="/" # Base path for alive check
-CONTROL_PANEL_COMPOSE_DIR="control_panel" # Dir in simulator_group for docker-compose
-CONTROL_PANEL_CONFIG_FILE=nginx.conf # Config file name
-CONTROL_PANEL_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-CONTROL_PANEL_CONFIG_MOUNT_PATH=/etc/nginx # Container internal path for config
-CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s" #nginx resolver for kube
-CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11" # nginx resolver for docker
-CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/" # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/" # Path prefix for forwarding ics calls to NGW
-CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/" # Path prefix for forwarding ics calls to NGW
-
-NRT_GATEWAY_APP_NAME="nonrtricgateway" # Name of the Gateway container
-NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
-NRT_GATEWAY_EXTERNAL_PORT=9090 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_PORT=9090 # Gateway container internal port (container -> container)
-NRT_GATEWAY_EXTERNAL_SECURE_PORT=9091 # Gateway container external port (host -> container)
-NRT_GATEWAY_INTERNAL_SECURE_PORT=9091 # Gateway container internal port (container -> container)
-NRT_GATEWAY_LOGPATH="/var/log/nonrtric-gateway/application.log" # Path the application log in the Gateway container
-NRT_GATEWAY_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-NRT_GATEWAY_ALIVE_URL="/actuator/metrics" # Base path for alive check
-NRT_GATEWAY_COMPOSE_DIR="ngw" # Dir in simulator_group for docker-compose
-NRT_GATEWAY_CONFIG_MOUNT_PATH=/opt/app/nonrtric-gateway/config # Container internal path for config
-NRT_GATEWAY_CONFIG_FILE=application.yaml # Config file name
-NRT_GATEWAY_PKG_NAME="org.springframework.cloud.gateway" # Java base package name
-NRT_GATEWAY_ACTUATOR="/actuator/loggers/$NRT_GATEWAY_PKG_NAME" # Url for trace/debug
-
-HTTP_PROXY_APP_NAME="httpproxy" # Name of the Http Proxy container
-HTTP_PROXY_DISPLAY_NAME="Http Proxy"
-HTTP_PROXY_EXTERNAL_PORT=8740 # Http Proxy container external port (host -> container)
-HTTP_PROXY_INTERNAL_PORT=8080 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_EXTERNAL_SECURE_PORT=8742 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_INTERNAL_SECURE_PORT=8433 # Http Proxy container internal secure port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_PORT=8741 # Http Proxy container external port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_PORT=8081 # Http Proxy container internal port (container -> container)
-HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=8743 # Http Proxy container external secure port (host -> container)
-HTTP_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Http Proxy container internal secure port (container -> container
-HTTP_PROXY_CONFIG_PORT=0 # Port number for proxy config, will be set if proxy is started
-HTTP_PROXY_CONFIG_HOST_NAME="" # Proxy host, will be set if proxy is started
-HTTP_PROXY_ALIVE_URL="/" # Base path for alive check
-HTTP_PROXY_COMPOSE_DIR="httpproxy" # Dir in simulator_group for docker-compose
-HTTP_PROXY_BUILD_DIR="http-https-proxy" # Dir in simulator_group for image build - note, reuses source from kubeproxy
-
-KUBE_PROXY_APP_NAME="kubeproxy" # Name of the Kube Http Proxy container
-KUBE_PROXY_DISPLAY_NAME="Kube Http Proxy"
-KUBE_PROXY_EXTERNAL_PORT=8730 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_INTERNAL_PORT=8080 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_EXTERNAL_SECURE_PORT=8782 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_INTERNAL_SECURE_PORT=8433 # Kube Proxy container internal secure port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_PORT=8731 # Kube Http Proxy container external port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_PORT=8081 # Kube Http Proxy container internal port (container -> container)
-KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783 # Kube Proxy container external secure port (host -> container)
-KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434 # Kube Proxy container internal secure port (container -> container
-
-KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784 # Kube Proxy container external secure port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733 # Kube Http Proxy container external port, doocker (host -> container)
-KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785 # Kube Proxy container external secure port, doocker (host -> container)
-
-KUBE_PROXY_PATH="" # Proxy url path, will be set if proxy is started
-KUBE_PROXY_ALIVE_URL="/" # Base path for alive check
-KUBE_PROXY_COMPOSE_DIR="kubeproxy" # Dir in simulator_group for docker-compose
-
-PVC_CLEANER_APP_NAME="pvc-cleaner" # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner" # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner" # Dir in simulator_group for yamls
-
-DMAAP_ADP_APP_NAME="dmaapadapterservice" # Name for Dmaap Adapter container
-DMAAP_ADP_DISPLAY_NAME="Dmaap Adapter Service" # Display name for Dmaap Adapter container
-DMAAP_ADP_EXTERNAL_PORT=9087 # Dmaap Adapter container external port (host -> container)
-DMAAP_ADP_INTERNAL_PORT=8084 # Dmaap Adapter container internal port (container -> container)
-DMAAP_ADP_EXTERNAL_SECURE_PORT=9088 # Dmaap Adapter container external secure port (host -> container)
-DMAAP_ADP_INTERNAL_SECURE_PORT=8435 # Dmaap Adapter container internal secure port (container -> container)
-
-#DMAAP_ADP_LOGPATH="/var/log/dmaap-adaptor-service/application.log" # Path the application log in the Dmaap Adapter container
-DMAAP_ADP_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-#MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container
-DMAAP_ADP_ACTUATOR="/actuator/loggers/org.oran.dmaapadapter" # Url for trace/debug
-#DMAAP_ADP_CERT_MOUNT_DIR="./cert"
-DMAAP_ADP_ALIVE_URL="/actuator/info" # Base path for alive check
-DMAAP_ADP_COMPOSE_DIR="dmaapadp" # Dir in simulator_group for docker-compose
-DMAAP_ADP_CONFIG_MOUNT_PATH="/opt/app/dmaap-adaptor-service/config" # Internal container path for configuration
-DMAAP_ADP_DATA_MOUNT_PATH="/opt/app/dmaap-adaptor-service/data" # Path in container for data file
-DMAAP_ADP_DATA_FILE="application_configuration.json" # Container data file name
-DMAAP_ADP_CONFIG_FILE=application.yaml # Config file name
-DMAAP_ADP_CONFIG_FILE_TEMPLATE=application.yaml # Template config file name
-DMAAP_ADP_FEATURE_LEVEL="GENERATED_PROD_NAME" # Space separated list of features
-
-DMAAP_MED_APP_NAME="dmaapmediatorservice" # Name for Dmaap Mediator container
-DMAAP_MED_DISPLAY_NAME="Dmaap Mediator Service" # Display name for Dmaap Mediator container
-DMAAP_MED_EXTERNAL_PORT=8085 # Dmaap Mediator container external port (host -> container)
-DMAAP_MED_INTERNAL_PORT=8085 # Dmaap Mediator container internal port (container -> container)
-DMAAP_MED_EXTERNAL_SECURE_PORT=8185 # Dmaap Mediator container external secure port (host -> container)
-DMAAP_MED_INTERNAL_SECURE_PORT=8185 # Dmaap Mediator container internal secure port (container -> container)
-
-DMAAP_MED_LOGPATH="/var/log/dmaap-adaptor-service/application.log" # Path the application log in the Dmaap Mediator container
-DMAAP_MED_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-#MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container
-#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
-#DMAAP_MED_CERT_MOUNT_DIR="./cert"
-DMAAP_MED_ALIVE_URL="/health_check" # Base path for alive check
-DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose
-#MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration
-DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file
-DMAAP_MED_HOST_DATA_FILE="type_config.json" # Host data file name
-DMAAP_MED_CONTR_DATA_FILE="type_config.json" # Container data file name
-DMAAP_MED_FEATURE_LEVEL="" # Space separated list of features
-
-KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon
-KAFKAPC_DISPLAY_NAME="Kafka Producer/Consumer"
-KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container)
-KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container)
-KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container)
-KAFKAPC_INTERNAL_SECURE_PORT=8091 # Kafka procon container internal secure port (container -> container)
-KAFKAPC_ALIVE_URL="/" # Base path for alive check
-KAFKAPC_COMPOSE_DIR="kafka-procon" # Dir in simulator_group for docker-compose
-KAFKAPC_BUILD_DIR="kafka-procon" # Build dir
-
-CHART_MUS_APP_NAME="chartmuseum" # Name for the chart museum app
-CHART_MUS_DISPLAY_NAME="Chart Museum"
-CHART_MUS_EXTERNAL_PORT=8201 # chart museum container external port (host -> container)
-CHART_MUS_INTERNAL_PORT=8080 # chart museum container internal port (container -> container)
-CHART_MUS_ALIVE_URL="/health" # Base path for alive check
-CHART_MUS_COMPOSE_DIR="chartmuseum" # Dir in simulator_group for docker-compose
-CHART_MUS_CHART_CONTR_CHARTS="/tmp/charts" # Local dir container for charts
-
-HELM_MANAGER_APP_NAME="helmmanagerservice" # Name for the helm manager app
-HELM_MANAGER_DISPLAY_NAME="Helm Manager"
-HELM_MANAGER_EXTERNAL_PORT=8211 # helm manager container external port (host -> container)
-HELM_MANAGER_INTERNAL_PORT=8083 # helm manager container internal port (container -> container)
-HELM_MANAGER_EXTERNAL_SECURE_PORT=8212 # helm manager container external secure port (host -> container)
-HELM_MANAGER_INTERNAL_SECURE_PORT=8443 # helm manager container internal secure port (container -> container)
-HELM_MANAGER_CLUSTER_ROLE=cluster-admin # Kubernetes cluster role for helm manager
-HELM_MANAGER_SA_NAME=helm-manager-sa # Service account name
-HELM_MANAGER_ALIVE_URL="/helm/charts" # Base path for alive check
-HELM_MANAGER_COMPOSE_DIR="helmmanager" # Dir in simulator_group for docker-compose
-HELM_MANAGER_USER="helmadmin"
-HELM_MANAGER_PWD="itisasecret"
-
-########################################
-# Setting for common curl-base function
-########################################
-
-UUID="" # UUID used as prefix to the policy id to simulate a real UUID
- # Testscript need to set the UUID otherwise this empty prefix is used
diff --git a/test/common/test_env-oran-f-release.sh b/test/common/test_env-oran-f-release.sh
index d69f66a..42e3e54 100755
--- a/test/common/test_env-oran-f-release.sh
+++ b/test/common/test_env-oran-f-release.sh
@@ -278,7 +278,7 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
@@ -367,24 +367,6 @@
RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
RIC_SIM_ALIVE_URL="/" # Base path for alive check
RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-# Kept as reference for oran a1 adapter
-# SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-# SDNC_DISPLAY_NAME="SDNC A1 Controller"
-# SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-# SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-# SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-# SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-# SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-# SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-# SDNC_USER="admin" # SDNC username
-# SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-# SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-# SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-# SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-# SDNC_COMPOSE_FILE="docker-compose.yml"
-# SDNC_KUBE_APP_FILE="app.yaml"
-# SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-# SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
# For ONAP sdnc
SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
diff --git a/test/common/test_env-oran-g-release.sh b/test/common/test_env-oran-g-release.sh
index 0f543a7..cebdeb8 100755
--- a/test/common/test_env-oran-g-release.sh
+++ b/test/common/test_env-oran-g-release.sh
@@ -278,7 +278,7 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
@@ -367,24 +367,6 @@
RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
RIC_SIM_ALIVE_URL="/" # Base path for alive check
RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-# Kept as reference for oran a1 adapter
-# SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-# SDNC_DISPLAY_NAME="SDNC A1 Controller"
-# SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-# SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-# SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-# SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-# SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-# SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-# SDNC_USER="admin" # SDNC username
-# SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-# SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-# SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-# SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-# SDNC_COMPOSE_FILE="docker-compose.yml"
-# SDNC_KUBE_APP_FILE="app.yaml"
-# SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-# SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
# For ONAP sdnc
SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
diff --git a/test/common/test_env-oran-h-release.sh b/test/common/test_env-oran-h-release.sh
index f1cedcc..96c88ca 100755
--- a/test/common/test_env-oran-h-release.sh
+++ b/test/common/test_env-oran-h-release.sh
@@ -278,7 +278,7 @@
A1PMS_CONFIG_FILE="application.yaml" # Container config file name
A1PMS_DATA_FILE="application_configuration.json" # Container data file name
A1PMS_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-A1PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
+A1PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
@@ -367,24 +367,6 @@
RIC_SIM_COMPOSE_DIR="ric" # Dir in simulator group for docker compose
RIC_SIM_ALIVE_URL="/" # Base path for alive check
RIC_SIM_COMMON_SVC_NAME="" # Name of svc if one common svc is used for all ric sim groups (stateful sets)
-# Kept as reference for oran a1 adapter
-# SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
-# SDNC_DISPLAY_NAME="SDNC A1 Controller"
-# SDNC_EXTERNAL_PORT=8282 # SNDC A1 Controller container external port (host -> container)
-# SDNC_INTERNAL_PORT=8181 # SNDC A1 Controller container internal port (container -> container)
-# SDNC_EXTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container external securee port (host -> container)
-# SDNC_INTERNAL_SECURE_PORT=8443 # SNDC A1 Controller container internal secure port (container -> container)
-# SDNC_DB_APP_NAME="sdncdb" # Name of the SDNC DB container
-# SDNC_A1_TRUSTSTORE_PASSWORD="" # SDNC truststore password
-# SDNC_USER="admin" # SDNC username
-# SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-# SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
-# SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
-# SDNC_COMPOSE_DIR="sdnc" # Dir in simulator_group for docker-compose
-# SDNC_COMPOSE_FILE="docker-compose.yml"
-# SDNC_KUBE_APP_FILE="app.yaml"
-# SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-# SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
# For ONAP sdnc
SDNC_APP_NAME="a1controller" # Name of the SNDC A1 Controller container
diff --git a/test/common/testengine_config.sh b/test/common/testengine_config.sh
index 77b8ee0..00cfcca 100644
--- a/test/common/testengine_config.sh
+++ b/test/common/testengine_config.sh
@@ -18,7 +18,7 @@
#
# List of short names for all supported apps, including simulators etc
-APP_SHORT_NAMES="A1PMS ICS SDNC CP NGW RC RICSIM HTTPPROXY CBS CONSUL DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER KAFKAPC CHARTMUS HELMMANAGER LOCALHELM KEYCLOAK ISTIO AUTHSIDECAR"
+APP_SHORT_NAMES="A1PMS ICS SDNC CP NGW RC RICSIM HTTPPROXY DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER KAFKAPC CHARTMUS HELMMANAGER LOCALHELM KEYCLOAK ISTIO AUTHSIDECAR"
# List of available apps that built and released of the project
PROJECT_IMAGES="A1PMS ICS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP HELMMANAGER AUTHSIDECAR"
diff --git a/test/jenkins/.gitignore b/test/jenkins/.gitignore
index 260a2eb..13f5823 100644
--- a/test/jenkins/.gitignore
+++ b/test/jenkins/.gitignore
@@ -1,8 +1,8 @@
-.consul_config.json
+.a1pms_config.json
.docker-images-table
.httplog*
.image-list
-.output.consul_config.json
+.output.a1pms_config.json
.result*
.timer_measurement.txt
.tmp.curl.json
diff --git a/test/simulator-group/a1pms/docker-compose.yml b/test/simulator-group/a1pms/docker-compose.yml
index 61a9d7e..42df99d 100644
--- a/test/simulator-group/a1pms/docker-compose.yml
+++ b/test/simulator-group/a1pms/docker-compose.yml
@@ -30,11 +30,6 @@
ports:
- ${A1PMS_EXTERNAL_PORT}:${A1PMS_INTERNAL_PORT}
- ${A1PMS_EXTERNAL_SECURE_PORT}:${A1PMS_INTERNAL_SECURE_PORT}
- environment:
- - CONSUL_HOST=${CONSUL_HOST}
- - CONSUL_PORT=${CONSUL_INTERNAL_PORT}
- - CONFIG_BINDING_SERVICE=${CONFIG_BINDING_SERVICE}
- - HOSTNAME=${A1PMS_CONFIG_KEY}
volumes:
- ${A1PMS_HOST_MNT_DIR}/$A1PMS_CONFIG_FILE:${A1PMS_CONFIG_MOUNT_PATH}/$A1PMS_CONFIG_FILE
- ${A1PMS_HOST_MNT_DIR}/db:${A1PMS_CONTAINER_MNT_DIR}
diff --git a/test/simulator-group/consul_cbs/.gitignore b/test/simulator-group/consul_cbs/.gitignore
deleted file mode 100644
index 7dc00c5..0000000
--- a/test/simulator-group/consul_cbs/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-.tmp.json
-.dockererr
-gen_docker-compose*
\ No newline at end of file
diff --git a/test/simulator-group/consul_cbs/consul/cbs_config.hcl b/test/simulator-group/consul_cbs/consul/cbs_config.hcl
deleted file mode 100644
index bd24ad4..0000000
--- a/test/simulator-group/consul_cbs/consul/cbs_config.hcl
+++ /dev/null
@@ -1,30 +0,0 @@
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-#server = true
-#bootstrap = true
-#client_addr = "0.0.0.0"
-
-service {
- # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
- # should be passed to A1PMS app with this value
- Name = "config-binding-service"
- # Host name where CBS is running
- Address = "config-binding-service"
- # Port number where CBS is running
- Port = 10000
-}
\ No newline at end of file
diff --git a/test/simulator-group/consul_cbs/docker-compose.yml b/test/simulator-group/consul_cbs/docker-compose.yml
deleted file mode 100644
index 003dbfa..0000000
--- a/test/simulator-group/consul_cbs/docker-compose.yml
+++ /dev/null
@@ -1,53 +0,0 @@
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-version: '3.0'
-
-networks:
- default:
- external: true
- name: ${DOCKER_SIM_NWNAME}
-services:
-
- consul-server:
- networks:
- - default
- container_name: ${CONSUL_APP_NAME}
- image: ${CONSUL_IMAGE}
- ports:
- - ${CONSUL_EXTERNAL_PORT}:${CONSUL_INTERNAL_PORT}
- volumes:
- - ./consul/:/consul/config
- labels:
- - "nrttest_app=CONSUL"
- - "nrttest_dp=${CONSUL_DISPLAY_NAME}"
-
- config-binding-service:
- networks:
- - default
- container_name: ${CBS_APP_NAME}
- image: ${CBS_IMAGE}
- ports:
- - ${CBS_EXTERNAL_PORT}:${CBS_INTERNAL_PORT}
- environment:
- - CONSUL_HOST=${CONSUL_HOST}
- depends_on:
- - ${CONSUL_HOST}
- labels:
- - "nrttest_app=CBS"
- - "nrttest_dp=${CBS_DISPLAY_NAME}"
-
diff --git a/test/simulator-group/consul_cbs/start.sh b/test/simulator-group/consul_cbs/start.sh
deleted file mode 100755
index b0399f2..0000000
--- a/test/simulator-group/consul_cbs/start.sh
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/bash
-
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-DOCKER_SIM_NWNAME="nonrtric-docker-net"
-echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
-docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
-
-docker-compose -f docker-compose-template.yml config > docker-compose.yml
-
-docker-compose up -d
-
-CONSUL_PORT=8500
-
-APP="policy-agent"
-JSON_FILE="config.json"
-
-curl -s -v http://127.0.0.1:${CONSUL_PORT}/v1/kv/${APP}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$JSON_FILE
\ No newline at end of file
diff --git a/test/simulator-group/ric/cleanConsul.py b/test/simulator-group/ric/cleanConsul.py
deleted file mode 100644
index fcf3034..0000000
--- a/test/simulator-group/ric/cleanConsul.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-import json
-import subprocess
-import os
-
-print(" Clean old ric configurations in Consul config file")
-
-p = os.path.abspath('..')
-consul_config = p + '/consul_cbs' + '/config.json'
-
-
-def write_json(data, filename=consul_config):
- with open(filename, 'w') as f:
- json.dump(data, f, indent=4)
-
-
-with open(consul_config) as json_file:
- clean = json.load(json_file)
- clean['ric'] = []
-
-
-write_json(clean)
-print(" Clean old ric configurations from Consul config file, done")
-
-
diff --git a/test/simulator-group/ric/prepareConsul.py b/test/simulator-group/ric/prepareConsul.py
deleted file mode 100644
index fcb01e9..0000000
--- a/test/simulator-group/ric/prepareConsul.py
+++ /dev/null
@@ -1,60 +0,0 @@
-# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
-# ========================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=================================================
-#
-
-import json
-import subprocess
-import os
-
-print(" Update fresh ric configuration in Consul configuration file")
-
-p = os.path.abspath('..')
-consul_config = p + '/consul_cbs' + '/config.json'
-
-
-def write_json(data, filename=consul_config):
- with open(filename, 'w') as f:
- json.dump(data, f, indent=4)
-
-
-def bash_command(cmd):
- result = []
- sp = subprocess.Popen(['/bin/bash', '-c', cmd], stdout=subprocess.PIPE)
- for line in sp.stdout.readlines():
- result.append(line.decode().strip())
- return result
-
-
-command = "docker ps | grep simulator | awk '{print $NF}'"
-
-ric_list = bash_command(command)
-
-with open(consul_config) as json_file:
- data = json.load(json_file)
- temp = data['ric']
- for ric in ric_list:
- y = {"name": ric,
- "baseUrl": "http://" + ric + ":8085/",
- "managedElementIds": [
- "kista_" + ric,
- "stockholm_" + ric
- ]
- }
- temp.append(y)
-
-
-write_json(data)
-print(" Update Consul config file with fresh ric configuration, done")
diff --git a/test/simulator-group/sdnc/app.yaml b/test/simulator-group/sdnc/app.yaml
deleted file mode 100644
index 361d6cc..0000000
--- a/test/simulator-group/sdnc/app.yaml
+++ /dev/null
@@ -1,93 +0,0 @@
-# ============LICENSE_START=======================================================
-# Copyright (C) 2020-2022 Nordix Foundation
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=========================================================
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: $SDNC_APP_NAME
- namespace: $KUBE_SDNC_NAMESPACE
- labels:
- run: $SDNC_APP_NAME
- autotest: SDNC
-spec:
- replicas: 1
- selector:
- matchLabels:
- run: $SDNC_APP_NAME
- template:
- metadata:
- labels:
- run: $SDNC_APP_NAME
- autotest: SDNC
- spec:
- containers:
- - name: $SDNC_APP_NAME
- image: $SDNC_A1_CONTROLLER_IMAGE
- imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
- ports:
- - name: http
- containerPort: $SDNC_INTERNAL_PORT
- - name: https
- containerPort: $SDNC_INTERNAL_SECURE_PORT
- env:
- - name: MYSQL_ROOT_PASSWORD
- value: openECOMP1.0
- - name: SDNC_CONFIG_DIR
- value: /opt/onap/sdnc/data/properties
- - name: SDNC_DB_INIT
- value: 'true'
- - name: A1_TRUSTSTORE_PASSWORD
- value: $SDNC_A1_TRUSTSTORE_PASSWORD
- - name: ODL_ADMIN_USERNAME
- value: $SDNC_USER
- - name: ODL_ADMIN_PASSWORD
- value: $SDNC_PWD
- command:
- - /opt/onap/sdnc/bin/startODL.sh
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: $SDNC_DB_APP_NAME
- namespace: $KUBE_SDNC_NAMESPACE
- labels:
- run: $SDNC_DB_APP_NAME
- autotest: SDNC
-spec:
- replicas: 1
- selector:
- matchLabels:
- run: $SDNC_DB_APP_NAME
- template:
- metadata:
- labels:
- run: $SDNC_DB_APP_NAME
- autotest: SDNC
- spec:
- containers:
- - name: $SDNC_DB_APP_NAME
- image: $SDNC_DB_IMAGE
- imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
- ports:
- - name: http
- containerPort: 3306
- env:
- - name: MYSQL_ROOT_PASSWORD
- value: openECOMP1.0
- - name: MYSQL_ROOT_HOST
- value: '%'
-
diff --git a/test/simulator-group/sdnc/docker-compose.yml b/test/simulator-group/sdnc/docker-compose.yml
deleted file mode 100644
index 505a54c..0000000
--- a/test/simulator-group/sdnc/docker-compose.yml
+++ /dev/null
@@ -1,71 +0,0 @@
-# ==================================================================================
-# Modifications Copyright (c) 2019 Nordix Foundation.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ==================================================================================
-version: '3'
-
-networks:
- default:
- external: true
- name: ${DOCKER_SIM_NWNAME}
-services:
- db:
- image: ${SDNC_DB_IMAGE}
- container_name: ${SDNC_DB_APP_NAME}
- networks:
- - default
- ports:
- - "3306"
- environment:
- - MYSQL_ROOT_PASSWORD=openECOMP1.0
- - MYSQL_ROOT_HOST=%
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
- labels:
- - "nrttest_app=SDNC"
- - "nrttest_dp=SDNC DB"
-
- a1controller:
- image: ${SDNC_A1_CONTROLLER_IMAGE}
- depends_on :
- - db
- container_name: ${SDNC_APP_NAME}
- networks:
- - default
- entrypoint: ["/opt/onap/sdnc/bin/startODL.sh"]
- ports:
- - ${SDNC_EXTERNAL_PORT}:${SDNC_INTERNAL_PORT}
- - ${SDNC_EXTERNAL_SECURE_PORT}:${SDNC_INTERNAL_SECURE_PORT}
- links:
- - db:dbhost
- - db:sdnctldb01
- - db:sdnctldb02
- environment:
- - MYSQL_ROOT_PASSWORD=openECOMP1.0
- - SDNC_DB_INIT=true
- - SDNC_CONFIG_DIR=/opt/onap/sdnc/data/properties
- - A1_TRUSTSTORE_PASSWORD=${SDNC_A1_TRUSTSTORE_PASSWORD}
- - ODL_ADMIN_USERNAME=${SDNC_USER}
- - ODL_ADMIN_PASSWORD=${SDNC_PWD}
- logging:
- driver: "json-file"
- options:
- max-size: "30m"
- max-file: "5"
- labels:
- - "nrttest_app=SDNC"
- - "nrttest_dp=${SDNC_DISPLAY_NAME}"