k8s: Resolve address conflicts in virtual environments

Running Casablanca and Dublin virtual environments at the same time led
to networking issues - the same IP had been assigned to cluster nodes.

Issue-ID: SECCOM-235
Change-Id: I2a59d023115326f5b132782a32190fd8f7dc1f48
Signed-off-by: Pawel Wieczorek <p.wieczorek2@samsung.com>
diff --git a/test/security/k8s/vagrant/dublin/Vagrantfile b/test/security/k8s/vagrant/dublin/Vagrantfile
index b8abcd1..1ccc3ef 100644
--- a/test/security/k8s/vagrant/dublin/Vagrantfile
+++ b/test/security/k8s/vagrant/dublin/Vagrantfile
@@ -15,10 +15,10 @@
 vm_cpus = 1
 vm_box = "generic/ubuntu1804"
 
-operation = { name: 'operator', hostname: 'operator', ip: '172.17.0.254' }
+operation = { name: 'operator', hostname: 'operator', ip: '172.17.4.254' }
 cluster = [
-  { name: 'control', hostname: 'control', ip: '172.17.0.100' },
-  { name: 'worker', hostname: 'worker', ip: '172.17.0.101' }
+  { name: 'control', hostname: 'control', ip: '172.17.4.100' },
+  { name: 'worker', hostname: 'worker', ip: '172.17.4.101' }
 ]
 
 all = cluster.dup << operation
diff --git a/test/security/k8s/vagrant/dublin/cluster.yml b/test/security/k8s/vagrant/dublin/cluster.yml
index f062222..df93a88 100644
--- a/test/security/k8s/vagrant/dublin/cluster.yml
+++ b/test/security/k8s/vagrant/dublin/cluster.yml
@@ -1,7 +1,7 @@
 # An example of a Kubernetes cluster for ONAP
 ssh_key_path: &ssh_key_path "~/.ssh/onap-key"
 nodes:
-- address: 172.17.0.100
+- address: 172.17.4.100
   port: "22"
   role:
   - controlplane
@@ -9,7 +9,7 @@
   hostname_override: "onap-control-1"
   user: vagrant
   ssh_key_path: *ssh_key_path
-- address: 172.17.0.101
+- address: 172.17.4.101
   port: "22"
   role:
   - worker