k8s: Resolve address conflicts in virtual environments
Running Casablanca and Dublin virtual environments at the same time led
to networking issues - the same IP had been assigned to cluster nodes.
Issue-ID: SECCOM-235
Change-Id: I2a59d023115326f5b132782a32190fd8f7dc1f48
Signed-off-by: Pawel Wieczorek <p.wieczorek2@samsung.com>
diff --git a/test/security/k8s/vagrant/dublin/cluster.yml b/test/security/k8s/vagrant/dublin/cluster.yml
index f062222..df93a88 100644
--- a/test/security/k8s/vagrant/dublin/cluster.yml
+++ b/test/security/k8s/vagrant/dublin/cluster.yml
@@ -1,7 +1,7 @@
# An example of a Kubernetes cluster for ONAP
ssh_key_path: &ssh_key_path "~/.ssh/onap-key"
nodes:
-- address: 172.17.0.100
+- address: 172.17.4.100
port: "22"
role:
- controlplane
@@ -9,7 +9,7 @@
hostname_override: "onap-control-1"
user: vagrant
ssh_key_path: *ssh_key_path
-- address: 172.17.0.101
+- address: 172.17.4.101
port: "22"
role:
- worker