Enhancements of simulators for DFC

New test cases for MR and DR simulators
Added simulator monitor server for simulator supervision
Update - Adapted simulators to changed naming of ftp files
Update2 - Added some more test behaviour to simulators.

Issue-ID: DCAEGEN2-1313

Change-Id: I90a346d44ab4e7c9f5a65f599b64f1907525dd51
Signed-off-by: BjornMagnussonXA <bjorn.magnusson@est.tech>
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
index fc903d7..3d95492 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
@@ -12,7 +12,10 @@
 RUN npm install express
 RUN npm install argparse
 
-EXPOSE 3906
-EXPOSE 3907
-EXPOSE 3908
-EXPOSE 3909
\ No newline at end of file
+#Ports for DR
+#EXPOSE 3906
+#EXPOSE 3907
+
+#Ports for DR redir
+#EXPOSE 3908
+#EXPOSE 3909
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/README.md b/test/mocks/datafilecollector-testharness/dr-sim/README.md
index f0cdf58..8761d0c 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/dr-sim/README.md
@@ -1,6 +1,9 @@
-#Alternative to running python (as described below) on your machine, use the docker files.
+###Alternative to running python (as described below) on your machine, use the docker files.
 1. Build docker container with ```docker build -t drsim_common:latest .```
 2. Run the container ```docker-compose up```
+3. For specific behavior of of the simulators, add arguments to the `command` entries in the `docker-compose.yml`.
+For example `command: node dmaapDR.js --tc no_publish` . (No argument will assume '--tc normal'). Run `node dmaapDR.js --printtc`
+and `node dmaapDR-redir.js --printtc` for details. 
 
 
 
@@ -9,5 +12,46 @@
 Make sure that you run these commands in the application directory "dr-sim"
 3. `npm install express`
 4. `npm install argparse`
-5. `node dmaapDR.js`   #keep it in the foreground
-6. `node dmaapDR_redir.js`  #keep it in the foreground
+5. `node dmaapDR.js`   #keep it in the foreground, see item 3 in the above list for arg to the simulator
+6. `node dmaapDR_redir.js`  #keep it in the foreground, see item 3 in the above list for arg to the simulator
+
+
+The dmaapDR_redir server send callback to dmaapDR server to update the list of successfully published files.
+As default, the ip for dmaapDR is set to work when running as container (using an ip address from the dfc_net docker network) . When running the servers from command line, set the env variable DR_SIM_IP=localhost
+
+The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
+
+DR
+
+`curl localhost:3906/ctr_publish_query` - returns the number of publish queries
+
+`curl localhost:3906/ctr_publish_query_published` - returns the number of responses where the files was published
+
+`curl localhost:3906/ctr_publish_query_not_published` - returns the number of responses where the file was not published
+
+`curl localhost:3906/ctr_publish_req` - returns the number of publish requests
+
+`curl localhost:3906/ctr_publish_req_redirect` - returns the number of publish responses with redirect
+
+`curl localhost:3906/ctr_publish_req_published` - returns the number of publish responses without redirect
+
+`curl localhost:3906/ctr_published_files` - returns the number of unique published files
+
+`curl localhost:3906/tc_info` - returns the tc name (argument on the command line)
+
+`curl localhost:3906/execution_time` - returns the execution times in mm:ss
+
+
+DR REDIR
+
+`curl localhost:3908/ctr_publish_requests` - returns the number of publish queries
+
+`curl localhost:3908/ctr_publish_responses` - returns the number of publish responses
+
+`curl localhost:3908/tc_info` - returns the tc name (argument on the command line)
+
+`curl localhost:3908/execution_time` - returns the execution times in mm:ss
+
+`curl localhost:3908/time_lastpublish` - returns the time (mm:ss) for the latest publish
+
+`curl localhost:3908/dwl_volume` - returns the total received data volume of file data
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
index 4e73174..fffe57c 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
+++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
@@ -5,12 +5,33 @@
 const stream = require('stream');
 var app = express();
 var fs = require('fs');
-var path = require('path');
 var privateKey  = fs.readFileSync('cert/private.key', 'utf8');
 var certificate = fs.readFileSync('cert/certificate.crt', 'utf8');
 var credentials = {key: privateKey, cert: certificate};
-const allPublished = "allPublished";
-const nonePublished = "nonePublished";
+
+//For execution time calculation
+var startTime = Date.now();
+
+//Test case constants
+const tc_normal = "normal";
+const tc_none_published = "none_published";
+const tc_all_published = "all_published"
+const tc_10p_no_response = "10p_no_response";
+const tc_10first_no_response = "10first_no_response";
+const tc_100first_no_response = "100first_no_response";
+const tc_all_delay_10s = "all_delay_10s";
+const tc_10p_delay_10s = "10p_delay_10s";
+const tc_10p_error_response = "10p_error_response";
+const tc_10first_error_response = "10first_error_response";
+const tc_100first_error_response = "100first_error_response";
+
+//Counters
+var ctr_publish_query = 0;
+var ctr_publish_query_published = 0;
+var ctr_publish_query_not_published = 0;
+var ctr_publish_req = 0;
+var ctr_publish_req_redirect = 0;
+var ctr_publish_req_published = 0;
 
 var parser = new ArgumentParser({
 	  version: '0.0.1',
@@ -28,20 +49,59 @@
 
 var args = parser.parseArgs();
 
-if (args.tc=="nonePublished") {
-	console.log("TC: nonePublished")
-}
-if (args.tc=="allPublished") {
-	console.log("TC: allPublished")
-	//preparations
+if (args.tc==tc_normal) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_none_published) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_all_published) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10p_no_response) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10first_no_response) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_100first_no_response) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_all_delay_10s) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10p_delay_10s) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10p_error_response) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10first_error_response) {
+	console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_100first_error_response) {
+	console.log("TC: " + args.tc)
+} else {
+	console.log("No TC specified, use: --tc <tc-id>");
+	process.exit(0);
 }
 
 if (args.printtc) {
-	console.log("TC nonePublished: no file has already been published.");
-	console.log("TC allPublished: whatever is the request, this file is considered as published.");
-	console.log("No argument passed: normal behaviour, that is publish if not already published");
+	console.log("TC " + tc_normal + ": Normal case, query respone based on published files. Publish responde with ok/redirect depending on if file is published or not.");
+	console.log("TC " + tc_none_published + ": Query responde 'ok'. Publish respond with redirect.");
+	console.log("TC " + tc_all_published + ": Query respond with filename. Publish respond with 'ok'.");
+	console.log("TC " + tc_10p_no_response + ": 10% % no response for query and publish. Otherwise normal case.");
+	console.log("TC " + tc_10first_no_response + ": 10 first queries and requests gives no response for query and publish. Otherwise normal case.");
+	console.log("TC " + tc_100first_no_response + ": 100 first queries and requests gives no response for query and publish. Otherwise normal case.");
+	console.log("TC " + tc_all_delay_10s + ": All responses delayed 10s (both query and publish).");
+	console.log("TC " + tc_10p_delay_10s + ": 10% of responses delayed 10s, (both query and publish).");
+	console.log("TC " + tc_10p_error_response + ": 10% error response for query and publish. Otherwise normal case.");
+	console.log("TC " + tc_10first_error_response + ": 10 first queries and requests gives no response for query and publish. Otherwise normal case.");
+	console.log("TC " + tc_100first_error_response + ": 100 first queries and requests gives no response for query and publish. Otherwise normal case.");
+
 	process.exit(0);
-}
+  }
+
 
 var bodyParser = require('body-parser')
 app.use(bodyParser.urlencoded({ extended: false }))
@@ -52,55 +112,179 @@
 // parse application/vnd.api+json as json
 app.use(bodyParser.json({ type: 'application/vnd.api+json' }))
 
-// parse some custom thing into a Buffer
-app.use(bodyParser.raw({limit:1024*1024*20, type: 'application/octet-stream' }))
+// parse some custom thing into a Buffer (to cater for 60MB files)
+app.use(bodyParser.raw({limit:1024*1024*60, type: 'application/octet-stream' }))
 // parse an HTML body into a string
 app.use(bodyParser.text({ type: 'text/html' }))
+
+
+
+//Is alive function
 app.get("/",function(req, res){
 	res.send("ok");
 })
 
-
-var published = [];
-app.get('/feedlog/1/',function(req, res){
-	var filename = req.query.filename;
-	if(args.tc == allPublished){
-		res.send("[" + filename + "]");
-	} else if(args.tc == nonePublished){
-		res.send("[]");
-	} else {
-		if (published.includes(filename)) {
-			res.send("[" + filename + "]");
-		} else {
-			res.send("[]");
-		}
-	}
+//Counter readout
+app.get("/ctr_publish_query",function(req, res){
+	res.send(""+ctr_publish_query);
+})
+app.get("/ctr_publish_query_published",function(req, res){
+	res.send(""+ctr_publish_query_published);
+})
+app.get("/ctr_publish_query_not_published",function(req, res){
+	res.send(""+ctr_publish_query_not_published);
+})
+app.get("/ctr_publish_req",function(req, res){
+	res.send(""+ctr_publish_req);
+})
+app.get("/ctr_publish_req_redirect",function(req, res){
+	res.send(""+ctr_publish_req_redirect);
+})
+app.get("/ctr_publish_req_published",function(req, res){
+	res.send(""+ctr_publish_req_published);
+})
+app.get("/ctr_published_files",function(req, res){
+	res.send(""+published.length);
+})
+app.get("/tc_info",function(req, res){
+	res.send(args.tc);
+})
+function fmtMSS(s){
+	return(s-(s%=60))/60+(9<s?':':':0')+s    //Format time diff in mm:ss
+}
+app.get("/execution_time",function(req, res){
+	diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
+	res.send(""+diff);
 })
 
+//db of published files
+var published = [];
 
-app.put('/publish/1/', function (req, res) {
+app.get('/feedlog/1/',function(req, res){
+	console.log("url:"+req.url);
+	ctr_publish_query++;
 	var filename = req.query.filename;
-	var type = req.query.type;
+	console.log(filename);
+	var qtype = req.query.type;
 	if(typeof(filename) == 'undefined'){
 		res.status(400).send({error: 'No filename provided.'});
-	} else if(typeof(type) == 'undefined'){
+		return;
+	} else if(typeof(qtype) == 'undefined'){
 		res.status(400).send({error: 'No type provided.'});
+		return;
+	}
+	
+	//Ugly fix, plus signs replaces with spaces in query params....need to put them back
+	filename = filename.replace(/ /g,"+");
+	
+	if (args.tc==tc_normal) {
+	  //continue
+	}  else if (args.tc==tc_none_published) {
+		ctr_publish_query_not_published++;
+		res.send("[]");
+		return;
+	} else if (args.tc==tc_all_published) {
+		ctr_publish_query_published++;
+		res.send("[" + filename + "]");
+		return;
+	} else if (args.tc==tc_10p_no_response && (ctr_publish_query%10) == 0) {
+		return;
+	} else if (args.tc==tc_10first_no_response && ctr_publish_query<11) {
+		return;
+	} else if (args.tc==tc_100first_no_response && ctr_publish_query<101) {
+		return;
+	} else if (args.tc==tc_all_delay_10s) {
+		console.log("sleep begin");
+		timer(10000).then(_=>console.log("sleeping done")); 
+	} else if (args.tc==tc_10p_delay_10s && (ctr_publish_query%10) == 0) {
+		console.log("sleep begin");
+		timer(10000).then(_=>console.log("sleeping done")); 
+	} else if (args.tc==tc_10p_error_response && (ctr_publish_query%10) == 0) {
+		res.send(400);
+		return;
+	} else if (args.tc==tc_10first_error_response && ctr_publish_query<11) {
+		res.send(400);
+		return;
+	} else if (args.tc==tc_100first_error_response & ctr_publish_query<101) {
+		res.send(400);
+		return;
+	}
+
+	if (published.includes(filename)) {
+		ctr_publish_query_published++;
+		res.send("[" + filename + "]");
 	} else {
-		if(args.tc == allPublished){
-			res.send("[" + filename + "]");
-		} else if(args.tc == nonePublished){
-			res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename);
-		} else {
-			if (!published.includes(filename)) {
-				published.push(filename);
-				res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename);
-			} else {
-				res.send("ok");
-			}
-		}
+		ctr_publish_query_not_published++;
+		res.send("[]");
 	}
 })
 
+app.put('/publish/1/:filename', function (req, res) {
+	console.log("url:"+req.url);
+	console.log("body (first 25 bytes):"+req.body.slice(0,25));
+	console.log("headers:"+req.headers);
+	ctr_publish_req++;
+
+	var filename = req.params.filename;
+	console.log(filename);
+
+	if (args.tc==tc_normal) {
+	    //continue
+	} else if (args.tc==tc_none_published) {
+		ctr_publish_req_redirect++;
+		res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename);
+		return;
+	} else if (args.tc==tc_all_published) {
+		ctr_publish_req_published++;
+		res.send("ok");
+		return;
+	}else if (args.tc==tc_10p_no_response && (ctr_publish_req%10) == 0) {
+		return;
+	} else if (args.tc==tc_10first_no_response && ctr_publish_req<11) {
+		return;
+	} else if (args.tc==tc_100first_no_response && ctr_publish_req<101) {
+		return;
+	} else if (args.tc==tc_all_delay_10s) {
+		console.log("sleep begin");
+		timer(10000).then(_=>console.log("sleeping done")); 
+	} else if (args.tc==tc_10p_delay_10s && (ctr_publish_req%10) == 0) {
+		console.log("sleep begin");
+		timer(10000).then(_=>console.log("sleeping done")); 
+	} else if (args.tc==tc_10p_error_response && (ctr_publish_req%10) == 0) {
+		res.send(400);
+		return;
+	} else if (args.tc==tc_10first_error_response && ctr_publish_req<11) {
+		res.send(400);
+		return;
+	} else if (args.tc==tc_100first_error_response & ctr_publish_req<101) {
+		res.send(400);
+		return;
+	}
+
+	if (!published.includes(filename)) {
+		ctr_publish_req_redirect++;
+		res.redirect(301, 'http://127.0.0.1:3908/publish/1/'+filename);
+	} else {
+		ctr_publish_req_published++;
+		res.send("ok");
+	}
+})
+
+//Callback from DR REDIR server, when file is published ok this PUT request update the list of published files.
+app.put('/dr_redir_publish/:filename', function (req, res) {
+	console.log("url:"+req.url);
+	var filename = req.params.filename;
+	console.log(filename);
+
+	if (!published.includes(filename)) {
+		console.log("File marked as published by callback from DR redir SIM. url: " + req.url);
+		published.push(filename);
+	} else {
+		console.log("File already marked as published. Callback from DR redir SIM. url: " + req.url);
+	}
+
+	res.send("ok");
+})
 
 var httpServer = http.createServer(app);
 var httpsServer = https.createServer(credentials, app);
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
index 5be1f68..4494e89 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
+++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
@@ -6,11 +6,97 @@
 var app = express();
 var fs = require("fs");
 var path = require('path');
+var ArgumentParser = require('argparse').ArgumentParser;
 var privateKey  = fs.readFileSync('cert/private.key', 'utf8');
 var certificate = fs.readFileSync('cert/certificate.crt', 'utf8');
 var credentials = {key: privateKey, cert: certificate};
 
+
 var bodyParser = require('body-parser')
+var startTime = Date.now();
+
+var dr_callback_ip = '192.168.100.2'; //IP for DR when running as container. Can be changed by env DR_SIM_IP
+
+//Counters
+var ctr_publish_requests = 0;
+var ctr_publish_responses = 0;
+var lastPublish = "";
+var dwl_volume = 0;
+
+var parser = new ArgumentParser({
+	version: '0.0.1',
+	addHelp:true,
+	description: 'Datarouter redirect simulator'
+  });
+
+parser.addArgument('--tc' , { help: 'TC $NoOfTc' } );
+parser.addArgument('--printtc' ,
+	  {
+		  help: 'Print complete usage help',
+		  action: 'storeTrue'
+	  }
+  );
+
+var args = parser.parseArgs();
+const tc_normal = "normal";
+const tc_no_publish ="no_publish"
+const tc_10p_no_response = "10p_no_response";
+const tc_10first_no_response = "10first_no_response";
+const tc_100first_no_response = "100first_no_response";
+const tc_all_delay_10s = "all_delay_10s";
+const tc_10p_delay_10s = "10p_delay_10s";
+const tc_10p_error_response = "10p_error_response";
+const tc_10first_error_response = "10first_error_response";
+const tc_100first_error_response = "100first_error_response";
+
+if (args.tc==tc_normal) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_no_publish) {
+	console.log("TC: " + args.tc)
+  
+} else if (args.tc==tc_10p_no_response) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10first_no_response) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_100first_no_response) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_all_delay_10s) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10p_delay_10s) {
+	console.log("TC: " + args.tc)
+  
+} else if (args.tc==tc_10p_error_response) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_10first_error_response) {
+  console.log("TC: " + args.tc)
+
+} else if (args.tc==tc_100first_error_response) {
+  console.log("TC: " + args.tc)
+} else {
+	console.log("No TC specified, use: --tc <tc-id>");
+	process.exit(0);
+}
+
+if (args.printtc) {
+  console.log("TC " + tc_normal + ": Normal case, all files publish and DR updated");
+  console.log("TC " + tc_no_publish + ": Ok response but no files published");
+  console.log("TC " + tc_10p_no_response + ": 10% % no response (file not published)");
+  console.log("TC " + tc_10first_no_response + ": 10 first requests give no response (files not published)");
+  console.log("TC " + tc_100first_no_response + ": 100 first requests give no response (files not published)");
+  console.log("TC " + tc_all_delay_10s + ": All responses delayed 10s, normal publish");
+  console.log("TC " + tc_10p_delay_10s + ": 10% of responses delayed 10s, normal publish");
+  console.log("TC " + tc_10p_error_response + ": 10% error response (file not published)");
+  console.log("TC " + tc_10first_error_response + ": 10 first requests give error response (file not published)");
+  console.log("TC " + tc_100first_error_response + ": 100 first requests give error responses (file not published)");
+
+  process.exit(0);
+}
 
 // parse application/x-www-form-urlencoded
 app.use(bodyParser.urlencoded({ extended: false }))
@@ -22,26 +108,120 @@
 app.use(bodyParser.json({ type: 'application/vnd.api+json' }))
 
 // parse some custom thing into a Buffer
-app.use(bodyParser.raw({limit:1024*1024*20, type: 'application/octet-stream' }))
+app.use(bodyParser.raw({limit:1024*1024*60, type: 'application/octet-stream' }))
 
 // parse an HTML body into a string
 app.use(bodyParser.text({ type: 'text/html' }))
+
+//Formatting
+function fmtMSS(s){
+	return(s-(s%=60))/60+(9<s?':':':0')+s  //Format time diff to mm:ss
+}
+function fmtLargeNumber(x) {
+	return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, " "); //Format large with space, eg: 1 000 000
+}
+
+//I'm alive function
 app.get("/",function(req, res){
 	res.send("ok");
 })
 
-app.put('/publish/1/:filename', function (req, res) {
-	console.log(req.files);
-	console.log(req.body)
-	console.log(req.headers)
-	var filename = path.basename(req.params.filename);
-  filename = path.resolve(__dirname, filename);
-	console.log(req.params.filename);
-  fs.writeFile(filename, req.body, function (error) {
-  	if (error) { console.error(error); }
-	});
-	 res.send("ok")
+//Counter readout
+app.get("/ctr_publish_requests",function(req, res){
+	res.send(""+ctr_publish_requests);
 })
+app.get("/ctr_publish_responses",function(req, res){
+	res.send(""+ctr_publish_responses);
+})
+app.get("/execution_time",function(req, res){
+	diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
+	res.send(""+diff);
+})
+app.get("/time_lastpublish",function(req, res){
+	res.send(""+lastPublish);
+})
+app.get("/dwl_volume",function(req, res){
+	res.send(""+fmtLargeNumber(dwl_volume));
+})
+app.get("/tc_info",function(req, res){
+	res.send(args.tc);
+})
+
+app.put('/publish/1/:filename', function (req, res) {
+	console.log(req.url);
+	console.log("First 25 bytes of body: " + req.body.slice(0,25))
+	console.log(req.headers)
+	ctr_publish_requests++;
+	if (args.tc == tc_no_publish) {
+		tr_publish_responses++;
+		res.send("ok")
+		return;
+	} else if (args.tc==tc_10p_no_response && (ctr_publish_requests%10)==0) {
+		return;
+	} else if (args.tc==tc_10first_no_response && ctr_publish_requests<11) {
+		return;
+	} else if (args.tc==tc_100first_no_response && ctr_publish_requests<101) {
+		return;
+	} else if (args.tc==tc_10p_error_response && (ctr_publish_requests%10)==0) {
+		tr_publish_responses++;
+		res.send(400, "");
+		return;
+	} else if (args.tc==tc_10first_error_response && ctr_publish_requests<11) {
+		tr_publish_responses++;
+		res.send(400, "");
+		return;
+	} else if (args.tc==tc_100first_error_response && ctr_publish_requests<101) {
+		tr_publish_responses++;
+		res.send(400, "");
+		return;
+	} else if (args.tc==tc_10p_delay_10s && (ctr_publish_requests%10)==0) {
+		console.log("sleep begin");
+		timer(10000).then(_=>console.log("sleeping done")); 
+	} else if (args.tc==tc_all_delay_10s) {
+		//var sleep = require('sleep');
+		console.log("sleep begin");
+		//sleep.sleep(10); 
+		timer(10000).then(_=>console.log("sleeping done")); 
+	}
+
+	//Remaining part if normal file publish
+
+	var filename = req.params.filename;
+	console.log(filename);
+	//Create filename (appending file size to name) to store
+  	var storedFilename = path.resolve(__dirname, filename+"-"+req.body.length); 
+  	fs.writeFile(storedFilename, "", function (error) {  //Store file with zero size
+  		if (error) { console.error(error); }
+	});
+	
+	//Make callback to update list of publish files in DR sim
+	//Note the hard code ip-adress, DR sim get this ip if simulators started from the
+	//script in the 'simulatorgroup' dir.
+	//Work around: Could not get a normal http put to work from nodejs, using curl instead
+	var util = require('util');
+	var exec = require('child_process').exec;
+
+	var command = 'curl -s -X PUT http://' + dr_callback_ip + ':3906/dr_redir_publish/' +req.params.filename;
+
+	console.log("Callback to DR sim to report file published, cmd: " + command);
+	child = exec(command, function(error, stdout, stderr){
+		console.log('stdout: ' + stdout);
+		console.log('stderr: ' + stderr);
+		if(error !== null) {
+			console.log('exec error: ' + error);
+		}
+		
+	});
+
+	//Update status variables
+	ctr_publish_responses++;
+	lastPublish = fmtMSS(Math.floor((Date.now()-startTime)/1000));
+	dwl_volume = dwl_volume + req.body.length;
+
+	res.send("ok")
+});
+
+
 var httpServer = http.createServer(app);
 var httpsServer = https.createServer(credentials, app);
 
@@ -52,3 +232,7 @@
 httpsServer.listen(httpsPort);
 console.log("DR-simulator listening (https) at "+httpsPort)
 
+if (process.env.DR_SIM_IP) {
+	dr_callback_ip=process.env.DR_SIM_IP;
+} 
+console.log("Using IP " + dr_callback_ip + " for callback to DR sim");
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md
index 5746345..8fafdfe 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md
@@ -4,11 +4,21 @@
 2. Run the container ```docker-compose up```
 The behavior can be changed by argument to the python script in the docker-compose.yml
 
-The simulator can be queried for statistics
-localhost:2222/ctr_requests   - return an integer of the number of get request to the event poll path
-localhost:2222/ctr_responses  - return an integer of the number of get responses to the event poll path
-localhost:2222/ctr_unique_files - returns an integer or the number of unique files. A unique file is the combination of node+file_sequence_number 
+The simulator can be queried for statistics (use curl from cmd line or open in browser, curl used below):
 
+`curl localhost:2222/ctr_requests`   - return an integer of the number of get request to the event poll path
+
+`curl localhost:2222/ctr_responses`  - return an integer of the number of get responses to the event poll path
+
+`curl localhost:2222/ctr_unique_files` - returns an integer or the number of unique files. A unique file is the combination of node+file_sequence_number 
+
+`curl localhost:2222/tc_info` - returns the tc string (as given on the cmd line)
+
+`curl localhost:2222/ctr_events` - returns the total number of events
+
+`curl localhost:2222/execution_time` - returns the execution time in mm:ss
+
+`curl localhost:2222/ctr_unique_PNFs` - return the number of unique PNFS in alla events.
 
 ##Common TC info
 File names for 1MB, 5MB and 50MB files
@@ -20,33 +30,44 @@
 When the number of events are exhausted, empty replies are returned '[]'
 
 TC100 - One ME, SFTP, 1 1MB file, 1 event
+
 TC101 - One ME, SFTP, 1 5MB file, 1 event
+
 TC102 - One ME, SFTP, 1 50MB file, 1 event
 
 TC110 - One ME, SFTP, 1MB files, 1 file per event, 100 events, 1 event per poll.
+
 TC111 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll.
+
 TC112 - One ME, SFTP, 5MB files, 100 files per event, 100 events, 1 event per poll.
+
 TC113 - One ME, SFTP, 1MB files, 100 files per event, 100 events. All events in one poll.
 
 
 TC120 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% of replies each: no response, empty message, slow response, 404-error, malformed json
+
 TC121 - One ME, SFTP, 1MB files, 100 files per event, 100 events, 1 event per poll. 10% missing files
+
 TC122 - One ME, SFTP, 1MB files, 100 files per event, 100 events. 1 event per poll. All files with identical name. 
 
-Endless event streams
-TC1000 - One ME, SFTP, 1MB files, 100 files per event, endless number of events, 1 event per poll
-TC1001 - One ME, SFTP, 5MB files, 100 files per event, endless number of events, 1 event per poll
-
-
-TC510 - 5 ME, SFTP, 1MB files, 1 file per event, 100 events, 1 event per poll.
-
+TC510 - 700 MEs, SFTP, 1MB files, 1 file per event, 3500 events, 700 event per poll.
 
 TC200-TC202 same as TC100-TC102 but with FTPS
+
 TC210-TC213 same as TC110-TC113 but with FTPS
+
 TC2000-TC2001 same as TC1000-TC1001 but with FTPS
+
 TC610 same as TC510 but with FTPS
 
 
+Endless event streams
+
+TC1000 - One ME, SFTP, 1MB files, 100 files per event, endless number of events, 1 event per poll
+
+TC1001 - One ME, SFTP, 5MB files, 100 files per event, endless number of events, 1 event per poll
+
+
 ## Developer workflow
 
 1. ```sudo apt install python3-venv```
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
index ef46535..c1bed8f 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
+++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
@@ -3,15 +3,20 @@
 from werkzeug import secure_filename
 from flask import Flask, render_template, request
 from time import sleep
+import time
 import sys
 import json
 from flask import Flask
+
 app = Flask(__name__)
 
 #Server info
 HOST_IP = "0.0.0.0"
 HOST_PORT = 2222
 
+SFTP_PORT = 1022
+FTPS_PORT = 21
+
 #Test function to check server running
 @app.route('/',
     methods=['GET'])
@@ -46,6 +51,30 @@
     global tc_num
     return tc_num
 
+#Returns number of events
+@app.route('/ctr_events',
+    methods=['GET'])
+def counter_events():
+    global ctr_events
+    return str(ctr_events)
+
+#Returns number of events
+@app.route('/execution_time',
+    methods=['GET'])
+def exe_time():
+    global startTime
+    
+    stopTime = time.time()
+    minutes, seconds = divmod(stopTime-startTime, 60)
+    return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+
+#Returns number of unique PNFs
+@app.route('/ctr_unique_PNFs',
+    methods=['GET'])
+def counter_uniquePNFs():
+    global pnfMap
+    return str(len(pnfMap))
+
 #Messages polling function
 @app.route(
     "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12",
@@ -86,7 +115,12 @@
       return tc1001("sftp")
 
     elif args.tc510:
-      return tc510("sftp")      
+      return tc510("sftp")  
+    elif args.tc511:
+      return tc511("sftp")   
+  
+    elif args.tc710:
+      return tc710("sftp")     
 
 
     elif args.tc200:
@@ -118,7 +152,12 @@
       return tc2001("ftps")
 
     elif args.tc610:
-      return tc510("ftps")     
+      return tc510("ftps")  
+    elif args.tc611:
+      return tc511("ftps") 
+  
+    elif args.tc810:
+      return tc710("ftps")   
 
 
 #### Test case functions
@@ -127,6 +166,7 @@
 def tc100(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
@@ -134,28 +174,35 @@
     return buildOkResponse("[]")
 
   seqNr = (ctr_responses-1)
-  msg = getEventHead() + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022) + getEventEnd()
+  nodeName = createNodeName(0)
+  fileName = createFileName(nodeName, seqNr, "1MB")
+  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
   fileMap[seqNr] = seqNr
+  ctr_events = ctr_events+1
   return buildOkResponse("["+msg+"]")
 
 def tc101(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 1):
     return buildOkResponse("[]")  
- 
-  seqNr = (ctr_responses-1)
-  msg = getEventHead() + getEventName("5MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022) + getEventEnd()
-  fileMap[seqNr] = seqNr
 
+  seqNr = (ctr_responses-1)
+  nodeName = createNodeName(0)
+  fileName = createFileName(nodeName, seqNr, "5MB")
+  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
+  fileMap[seqNr] = seqNr
+  ctr_events = ctr_events+1
   return buildOkResponse("["+msg+"]")
 
 def tc102(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
@@ -163,14 +210,17 @@
     return buildOkResponse("[]")  
 
   seqNr = (ctr_responses-1)
-  msg = getEventHead() + getEventName("50MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022) + getEventEnd()
+  nodeName = createNodeName(0)
+  fileName = createFileName(nodeName, seqNr, "50MB")
+  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
   fileMap[seqNr] = seqNr
-
+  ctr_events = ctr_events+1
   return buildOkResponse("["+msg+"]")
 
 def tc110(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
@@ -178,75 +228,89 @@
     return buildOkResponse("[]")  
   
   seqNr = (ctr_responses-1)
-  msg = getEventHead() + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022) + getEventEnd()
+  nodeName = createNodeName(0)
+  fileName = createFileName(nodeName, seqNr, "1MB")
+  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
   fileMap[seqNr] = seqNr
-
+  ctr_events = ctr_events+1
   return buildOkResponse("["+msg+"]")
 
 def tc111(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 100):
     return buildOkResponse("[]")  
-  
-  msg = getEventHead()
+
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
 
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if i != 0: msg = msg + ","
-    msg = msg + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+    fileName = createFileName(nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
     fileMap[seqNr] = seqNr
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc112(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 100):
     return buildOkResponse("[]")  
-  
-  msg = getEventHead()
+
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
 
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if i != 0: msg = msg + ","
-    msg = msg + getEventName("5MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+    fileName = createFileName(nodeName, seqNr, "5MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
     fileMap[seqNr] = seqNr
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc113(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 1):
     return buildOkResponse("[]")  
-  
+
+  nodeName = createNodeName(0)
   msg = ""
 
   for evts in range(100):  # build 100 evts
     if (evts > 0):
       msg = msg + ","
-    msg = msg + getEventHead()
+    msg = msg + getEventHead(nodeName)
     for i in range(100):   # build 100 files
       seqNr = i+evts+100*(ctr_responses-1)
       if i != 0: msg = msg + ","
-      msg = msg + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+      fileName = createFileName(nodeName, seqNr, "1MB")
+      msg = msg + getEventName(fileName,ftptype,"onap","pano")
       fileMap[seqNr] = seqNr
 
     msg = msg + getEventEnd()
+    ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
@@ -254,20 +318,23 @@
 def tc120(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
+  nodeName = createNodeName(0)
+
   if (ctr_responses > 100):
     return buildOkResponse("[]")  
 
   if (ctr_responses % 10 == 2):
     return  # Return nothing
-  
+
   if (ctr_responses % 10 == 3):
     return buildOkResponse("") # Return empty message
 
   if (ctr_responses % 10 == 4):
-    return buildOkResponse(getEventHead()) # Return part of a json event
+    return buildOkResponse(getEventHead(nodeName)) # Return part of a json event
 
   if (ctr_responses % 10 == 5):
     return buildEmptyResponse(404) # Return empty message with status code
@@ -275,64 +342,72 @@
   if (ctr_responses % 10 == 6):
     sleep(60)
 
-  
-  msg = getEventHead()
+
+  msg = getEventHead(nodeName)
 
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if i != 0: msg = msg + ","
-    msg = msg + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+    fileName = createFileName(nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
     fileMap[seqNr] = seqNr
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc121(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 100):
-    return buildOkResponse("[]")  
-  
-  msg = getEventHead()
+    return buildOkResponse("[]")
 
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
+
+  fileName = ""
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if (seqNr%10 == 0):     # Every 10th file is "missing"
-      fn = "MissingFile_" + str(seqNr) + ".tar.gz"
+      fileName = createMissingFileName(nodeName, seqNr, "1MB")
     else:
-      fn = "1MB_" + str(seqNr) + ".tar.gz"
+      fileName = createFileName(nodeName, seqNr, "1MB")
       fileMap[seqNr] = seqNr
 
     if i != 0: msg = msg + ","
-    msg = msg + getEventName(fn,ftptype,"onap","pano","localhost",1022)
-    
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc122(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
   if (ctr_responses > 100):
     return buildOkResponse("[]")  
-  
-  msg = getEventHead()
+
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
 
   for i in range(100):
-    fn = "1MB_0.tar.gz"  # All files identical names
+    fileName = createFileName(nodeName, 0, "1MB")  # All files identical names
     if i != 0: msg = msg + ","
-    msg = msg + getEventName(fn,ftptype,"onap","pano","localhost",1022)
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
 
   fileMap[0] = 0
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
@@ -340,42 +415,51 @@
 def tc1000(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
-  msg = getEventHead()
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
 
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if i != 0: msg = msg + ","
-    msg = msg + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+    fileName = createFileName(nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
     fileMap[seqNr] = seqNr
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc1001(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
-  msg = getEventHead()
+  nodeName = createNodeName(0)
+  msg = getEventHead(nodeName)
 
   for i in range(100):
     seqNr = i+(ctr_responses-1)
     if i != 0: msg = msg + ","
-    msg = msg + getEventName("5MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
+    fileName = createFileName(nodeName, seqNr, "5MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
     fileMap[seqNr] = seqNr
 
   msg = msg + getEventEnd()
+  ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
 def tc510(ftptype):
   global ctr_responses
   global ctr_unique_files
+  global ctr_events
 
   ctr_responses = ctr_responses + 1
 
@@ -384,18 +468,82 @@
 
   msg = ""
 
-  for evts in range(700):  # build events for 5 MEs
-    if (evts > 0):
+  for pnfs in range(700):  # build events for 700 MEs
+    if (pnfs > 0):
       msg = msg + ","
-    msg = msg + getEventHeadNodeName("PNF"+str(evts))
+    nodeName = createNodeName(pnfs)
+    msg = msg + getEventHead(nodeName)
     seqNr = (ctr_responses-1)
-    msg = msg + getEventName("1MB_" + str(seqNr) + ".tar.gz",ftptype,"onap","pano","localhost",1022)
-    seqNr = seqNr + evts*1000000 #Create unique id for this node and file
+    fileName = createFileName(nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
     fileMap[seqNr] = seqNr
     msg = msg + getEventEnd()
+    ctr_events = ctr_events+1
 
   return buildOkResponse("["+msg+"]")
 
+def tc511(ftptype):
+  global ctr_responses
+  global ctr_unique_files
+  global ctr_events
+
+  ctr_responses = ctr_responses + 1
+
+  if (ctr_responses > 5):
+    return buildOkResponse("[]")  
+
+  msg = ""
+
+  for pnfs in range(700):  # build events for 700 MEs
+    if (pnfs > 0):
+      msg = msg + ","
+    nodeName = createNodeName(pnfs)
+    msg = msg + getEventHead(nodeName)
+    seqNr = (ctr_responses-1)
+    fileName = createFileName(nodeName, seqNr, "1KB")
+    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
+    fileMap[seqNr] = seqNr
+    msg = msg + getEventEnd()
+    ctr_events = ctr_events+1
+
+  return buildOkResponse("["+msg+"]")
+
+def tc710(ftptype):
+  global ctr_responses
+  global ctr_unique_files
+  global ctr_events
+
+  ctr_responses = ctr_responses + 1
+  
+  if (ctr_responses > 100):
+    return buildOkResponse("[]")
+
+  msg = ""
+  
+  batch = (ctr_responses-1)%20;  
+
+  for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
+    if (pnfs > 0):
+      msg = msg + ","
+    nodeName = createNodeName(pnfs + batch*35)
+    msg = msg + getEventHead(nodeName)
+
+    for i in range(100):  # 100 files per event
+      seqNr = i + int((ctr_responses-1)/20);
+      if i != 0: msg = msg + ","
+      fileName = createFileName(nodeName, seqNr, "1MB")
+      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
+      fileMap[seqNr] = seqNr
+
+    msg = msg + getEventEnd()
+    ctr_events = ctr_events+1
+
+  return buildOkResponse("["+msg+"]")
+
+
 #Mapping FTPS TCs
 def tc200(ftptype):
   return tc100(ftptype)
@@ -419,6 +567,14 @@
   return tc121(ftptype)
 def tc222(ftptype):
   return tc122(ftptype)
+  
+def tc610(ftptype):
+  return tc510(ftptype)
+def tc611(ftptype):
+  return tc511(ftptype)
+  
+def tc810(ftptype):
+  return tc710(ftptype)
 
 def tc2000(ftptype):
   return tc1000(ftptype)
@@ -427,11 +583,21 @@
 
 #### Functions to build json messages and respones ####
 
-# Function to build fixed beginning of an event
-def getEventHead():
-  return getEventHeadNodeName("oteNB5309")
+def createNodeName(index):
+    return "PNF"+str(index);
 
-def getEventHeadNodeName(nodename):
+def createFileName(nodeName, index, size):
+    return "A20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+
+def createMissingFileName(nodeName, index, size):
+    return "AMissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+
+
+# Function to build fixed beginning of an event
+
+def getEventHead(nodename):
+  global pnfMap
+  pnfMap.add(nodename) 
   headStr = """
         {
           "event": {
@@ -461,7 +627,13 @@
   return headStr
 
 # Function to build the variable part of an event
-def getEventName(fn,type,user,passwd,ip,port):
+def getEventName(fn,type,user,passwd):
+    port = SFTP_PORT
+    ip = sftp_ip
+    if (type == "ftps"):
+        port = FTPS_PORT
+        ip = ftps_ip
+        
     nameStr =        """{
                   "name": \"""" + fn + """",
                   "hashMap": {
@@ -501,15 +673,26 @@
 
 
 if __name__ == "__main__":
-  
+
+    # IP addresses to use for ftp servers, using localhost if not env var is set
+    sftp_ip = os.environ.get('SFTP_SIM_IP', 'localhost')
+    ftps_ip = os.environ.get('FTPS_SIM_IP', 'localhost')
+    
+    
+
     #Counters
     ctr_responses = 0
     ctr_requests = 0
     ctr_unique_files = 0
+    ctr_events = 0
+    startTime = time.time()
 
-    #Keeps all reponded file names
+    #Keeps all responded file names
     fileMap = {}
 
+    #Keeps all responded PNF names
+    pnfMap = set()
+
     tc_num = "Not set"
     tc_help = "Not set"
 
@@ -572,9 +755,17 @@
     parser.add_argument(
         '--tc510',
         action='store_true',
-        help='TC510 - 5 MEs, SFTP, 1MB files, 1 file per event, 100 events, 1 event per poll.')
+        help='TC510 - 700 MEs, SFTP, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
+    
+    parser.add_argument(
+        '--tc511',
+        action='store_true',
+        help='TC511 - 700 MEs, SFTP, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
 
-
+    parser.add_argument(
+        '--tc710',
+        action='store_true',
+        help='TC710 - 700 MEs, SFTP, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
 
 # FTPS TCs with single ME
     parser.add_argument(
@@ -632,11 +823,21 @@
     parser.add_argument(
         '--tc610',
         action='store_true',
-        help='TC510 - 5 MEs, FTPS, 1MB files, 1 file per event, 100 events, 1 event per poll.')
+        help='TC610 - 700 MEs, FTPS, 1MB files, 1 file per event, 3500 events, 700 event per poll.')
+
+    parser.add_argument(
+        '--tc611',
+        action='store_true',
+        help='TC611 - 700 MEs, FTPS, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
+
+    parser.add_argument(
+        '--tc810',
+        action='store_true',
+        help='TC810 - 700 MEs, FTPS, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
 
     args = parser.parse_args()
 
-    
+
 
     if args.tc100:
         tc_num = "TC# 100"
@@ -668,6 +869,11 @@
 
     elif args.tc510:
         tc_num = "TC# 510"
+    elif args.tc511:
+        tc_num = "TC# 511"
+        
+    elif args.tc710:
+        tc_num = "TC# 710"
 
     elif args.tc200:
         tc_num = "TC# 200"
@@ -697,14 +903,25 @@
     elif args.tc2001:
         tc_num = "TC# 2001"
 
+
     elif args.tc610:
         tc_num = "TC# 610"
+    elif args.tc611:
+        tc_num = "TC# 611"
+           
+    elif args.tc810:
+        tc_num = "TC# 810"
 
     else:
         print("No TC was defined")
         print("use --help for usage info")
         sys.exit()
 
-    print(tc_num)
- 
+    print("TC num: " + tc_num)
+    
+        
+    print("Using " + sftp_ip + " for sftp server address in file urls.")
+    print("Using " + ftps_ip + " for ftps server address in file urls.")
+
     app.run(port=HOST_PORT, host=HOST_IP)
+
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md
index 4d448f8..5981c79 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/README.md
+++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md
@@ -4,15 +4,17 @@
 
 
 ###Preparation 
+Do the manual steps to prepare the simulator images
+
 Build the mr-sim image.
 
 cd ../mr-sim
 
-Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .
+Run the docker build command to build the image for the MR simulator: 'docker build -t mrsim:latest .'
 
 cd ../dr-sim
 
-Run the docker build command to build the image for the DR simulators: `docker build -t drsim_common:latest . 
+Run the docker build command to build the image for the DR simulators: `docker build -t drsim_common:latest .'
 
 
 cd ../simulator-group
@@ -21,21 +23,53 @@
 
 Check the README.md in ftps-sftp-server dir in case the cert need to be updated.
 
-cp -r ./ftps-sftp-server/configuration .
+cp -r ../ftps-sftp-server/configuration .
 
 cp -r ../ftps-sftp-server/tls .
 
 
 ###Execution
 
-Edit the `docker-compose-setup.sh` to setup the env variables to the desired test behavior for each simulators.
+Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators.
 See each simulator to find a description of the available settings.
+The following env variables shall be set (example values).
+Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers. 
+A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each dtp server (4 files in the below example). 
+Large settings will be time consuming at start of the servers.
 
-Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. All simulators
-will be started with the generated docker-compose.yml file
+DR_TC="--tc normal"           #Normal behavior of the DR sim
+
+DR_REDIR_TC="--tc normal"     #Normal behavior of the DR redirect sim
+
+MR_TC="--tc100"               #One 1 MB file in one event, once. 
+
+BC_TC=""                      #Not in use yet
+
+NUM_FTPFILES="2"              #Two file for each PNF
+
+NUM_PNFS="2"                  #Two PNFs
+
+Run the script `docker-compose-setup.sh`to create a docker-compose with the desired settings. The desired setting
+in the script need to be manually adapted to for each specific simulator behavior according to the above. Check each simulator for available
+parameters.
+All simulators will be started with the generated docker-compose.yml file
+
+To generate ftp url with IP different from localhost, set SFTP_SIM_IP and/or FTPS_SIM_IP env variables to the addreses of the ftp servers before starting. 
+So farm, this only works when the simulator python script is started from the command line.
 
 Kill all the containers with `simulators-kill.se`
 
 `simulators_start.sh` is for CSIT test and requires the env variables for test setting to be present in the shell.
 `setup-ftp-files.for-image.sh` is for CSIT and executed when the ftp servers are started from the docker-compose-setup.sh`.
 
+To make DFC to be able to connect to the simulator containers, DFC need to run in host mode.
+Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <dfc-image> `
+
+`<dfc-image>` could be either the locally built image `onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
+or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`. 
+
+
+
+###Simulator monitor
+Start the simulator monitor server with `sim-monitor-start.sh` and the open a browser with the url `localhost:9999/mon`
+to see the statisics page with data from MR sim, DR sim and DR redir sim.
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
index d0171a5..af36d05 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
@@ -2,10 +2,13 @@
 
 #Script for manually starting all simulators with test setting below
 
-export DR_TC=""
-export DR_REDIR_TC=""
+export DR_TC="--tc normal"
+export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
-export NUM_FTPFILES="250"
+export BC_TC=""  #Not in use yet
+export NUM_FTPFILES="10"
+export NUM_PNFS="700"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
 
 source ./simulators-start.sh
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
index e7c7007..89a45a4 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
@@ -1,8 +1,17 @@
 version: '2'
+
+networks:
+  dfcnet:
+    ipam:
+      config:
+        - subnet: 192.168.100.0/16
+          
 services:
       
   drsim:
-    network_mode: "host"
+    networks:
+      dfcnet:
+        ipv4_address: 192.168.100.2
     image: drsim_common:latest
     ports:
      - "3906:3906"
@@ -11,6 +20,9 @@
     command: node dmaapDR.js ${DR_TC}
 
   drsim_redir:
+    networks:
+      dfcnet:
+        ipv4_address: 192.168.100.3
     image: drsim_common:latest
     ports:
      - "3908:3908"
@@ -19,6 +31,9 @@
     command: node dmaapDR_redir.js ${DR_REDIR_TC}
 
   mrsim:
+    networks:
+      dfcnet:
+        ipv4_address: 192.168.100.1
     image: mrsim:latest
     ports:
      - "2222:2222"
@@ -26,17 +41,17 @@
     command: python mr-sim.py ${MR_TC}
 
   sftp-server:
+    network_mode: bridge
     container_name: dfc_sftp-server
     image: atmoz/sftp:alpine
     ports:
       - "1022:22"
-#    volumes:
-#      - ./files/onap/sftp/:/home/onap/
     restart: on-failure
     command: onap:pano:1001
 
 
   ftpes-server-vsftpd:
+    network_mode: bridge
     container_name: dfc_ftpes-server-vsftpd
     image: docker.io/panubo/vsftpd
     ports:
@@ -54,6 +69,5 @@
       - ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
       - ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
 
-#      - ./files/onap/ftps/:/srv/
     restart: on-failure
   
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
index d8e57a3..6c2b4f2 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
@@ -1,28 +1,47 @@
-#!/bin/bash
+#!/usr/bin/env bash
 
 # Script to create files for the FTP server to return upon request.
 # The file names matches the files names in the events polled from the MR simulator.
 # Intended for execution in the running ftp containers in the ftp-root dir.
 
-echo "Running ftp file creations"
-
 NUM=200 #Default number of files 
+PNFS=1 #Default number of PNFs
+FSIZE="ALL"
 
 if [ $# -eq 1 ]; then 
     NUM=$1
+elif [ $# -eq 2 ]; then
+    NUM=$1
+    PNFS=$2
+elif [ $# -eq 3 ]; then
+	NUM=$1
+    PNFS=$2
+    FSIZE=$3
+    if [ $3 != "1KB" ] && [ $3 != "1MB" ] && [ $3 != "5MB" ]  && [ $3 != "50MB" ]  && [ $3 != "ALL" ]; then
+    	echo "File size shall be 1KB|1MB|5MB|50MB|ALL"
+    	exit
+    fi
+else
+    echo "Wrong args, usage: setup-ftp-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB ] ] ]"
+    exit
 fi
 
+echo "Running ftp file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) "$FSIZE
+
+truncate -s 1KB 1KB.tar.gz
 truncate -s 1MB 1MB.tar.gz
 truncate -s 5MB 5MB.tar.gz
 truncate -s 50MB 50MB.tar.gz
 
-
-i=0
-while [ $i -lt $NUM ]; do  #Problem with for loop and var substituion in curly bracket....so used good old style loop
-   ln -s 1MB.tar.gz 1MB_$i.tar.gz
-   ln -s 5MB.tar.gz 5MB_$i.tar.gz
-   let i=i+1
+p=0
+while [ $p -lt $PNFS ]; do 
+    i=0
+    while [ $i -lt $NUM ]; do  #Problem with for loop and var substituion in curly bracket....so used good old style loop
+    	if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz'; fi
+        if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz'; fi
+        if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz'; fi
+        if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz'; fi
+    let i=i+1
+    done
+    let p=p+1
 done
-
-
-ln -s 50MB.tar.gz 50MB_0.tar.gz   #Large file, only for single file test
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh
new file mode 100755
index 0000000..52c8c1c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh
@@ -0,0 +1,7 @@
+#/bin/bash
+
+#Script to start the sim-monitor
+
+#Re-using modules for dr-sim
+cp -r ../dr-sim/node_modules .
+node sim-monitor.js
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js
new file mode 100644
index 0000000..e4a19c2
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js
@@ -0,0 +1,165 @@
+var http = require('http');
+
+var express = require('express');
+var app = express();
+
+//I am alive
+app.get("/",function(req, res){
+	res.send("ok");
+})
+
+//Get parameter valuye from other server
+function getSimCtr(url, cb) {
+    var data = '';
+	http.get(url, (resp) => {
+  		// A chunk of data has been recieved.
+  		resp.on('data', (chunk) => {
+    		data += chunk;
+  		});
+
+  		// The whole response has been received.
+  		resp.on('end', () => {
+  			//Pad data to fixed length
+  			var i = 20-data.length;
+  			while(i>0) {
+  				data = data+"&nbsp;";
+  				i--;
+  			}
+    		cb(data);
+  		});
+
+	}).on("error", (err) => {
+  		console.log("Error: " + err.message);
+  		cb("no response from simulator");
+	});
+};
+
+//Status variables, for parameters values fetched from other simulators
+var mr1, mr2, mr3, mr4, mr5, mr6, mr7;
+
+var dr1, dr2, dr3, dr4, dr5, dr6, dr7, dr8, dr9;
+
+var drr1, drr2, drr3, drr4, drr5, drr6;
+
+app.get("/mon",function(req, res){
+
+	//MR
+    getSimCtr("http://127.0.0.1:2222/ctr_requests", function(data) {
+    	mr1 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/ctr_responses", function(data) {
+    	mr2 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/ctr_unique_files", function(data) {
+    	mr3 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/tc_info", function(data) {
+    	mr4 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/ctr_events", function(data) {
+    	mr5 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/execution_time", function(data) {
+    	mr6 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/ctr_unique_PNFs", function(data) {
+    	mr7 = data;
+    });
+
+    //DR
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_query", function(data) {
+    	dr1 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_query_published", function(data) {
+    	dr2 = data;
+    });    
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_query_not_published", function(data) {
+    	dr3 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_req", function(data) {
+    	dr4 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_req_redirect", function(data) {
+    	dr5 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/ctr_publish_req_published", function(data) {
+    	dr6 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/ctr_published_files", function(data) {
+    	dr7 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/tc_info", function(data) {
+    	dr8 = data;
+    });
+    getSimCtr("http://127.0.0.1:3906/execution_time", function(data) {
+    	dr9 = data;
+    });
+ 
+    //DR REDIR
+    getSimCtr("http://127.0.0.1:3908/ctr_publish_requests", function(data) {
+    	drr1 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/ctr_publish_responses", function(data) {
+    	drr2 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/tc_info", function(data) {
+    	drr3 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/execution_time", function(data) {
+    	drr4 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/time_lastpublish", function(data) {
+    	drr5 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/dwl_volume", function(data) {
+    	drr6 = data;
+    });
+
+  //Build web page
+	var str = "<!DOCTYPE html>" +
+          "<html>" +
+          "<head>" +
+            "<meta http-equiv=\"refresh\" content=\"5\">"+  //5 sec auto reefresh
+            "<title>Simulator monitor</title>"+
+          "</head>" +
+          "<body>" +
+            "<h3>MR Simulator</h3>" +
+            "<font face=\"courier\">"+
+            "MR TC:........................................." + mr4 + "<br>" +
+            "Execution time (mm.ss):........................" + mr6 + "<br>" +
+            "Number of requests (polls):...................." + mr1 + "<br>" +
+            "Number of responses (polls):..................." + mr2 + "<br>" +
+            "Number of unique files in all responses:......." + mr3 + "<br>" +
+            "Number of events..............................." + mr5 + "<br>" +
+            "Number of unique PNFs.........................." + mr7 + "<br>" +
+            "</font>"+
+            "<h3>DR Simulator</h3>" +
+            "<font face=\"courier\">"+
+            "DR TC:........................................." + dr8 + "<br>" +
+            "Execution time (mm.ss):........................" + dr9 + "<br>" +
+            "Number of queries:............................." + dr1 + "<br>" +
+            "Number of query responses, file published:....." + dr2 + "<br>" +
+            "Number of query responses, file not published:." + dr3 + "<br>" +
+            "Number of requests:............................" + dr4 + "<br>" +
+            "Number of responses with redirect:............." + dr5 + "<br>" +
+            "Number of responses without redirect:.........." + dr6 + "<br>" +
+            "Number of published files:....................." + dr7 + "<br>" +
+            "</font>"+
+            "<h3>DR Redirect Simulator</h3>" +
+            "<font face=\"courier\">"+
+            "DR REDIR TC:..................................." + drr3 + "<br>" +
+            "Execution time (mm.ss):........................" + drr4 + "<br>" +
+            "Number of requests:............................" + drr1 + "<br>" +
+            "Number of responses:..........................." + drr2 + "<br>" +
+            "Downloaded volume (bytes):....................." + drr6 + "<br>" +
+            "Last publish (mm:ss):.........................." + drr5 + "<br>" +
+            "</font>"+
+           "</body>" +
+          "</html>";
+	res.send(str);
+})
+
+var httpServer = http.createServer(app);
+var httpPort=9999;
+httpServer.listen(httpPort);
+console.log("Simulator monitor listening (http) at "+httpPort);
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
index ef1b90a..cc5ce32 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
@@ -35,11 +35,27 @@
  then
  NUM_FTPFILES=200
 fi
+if [ -z "$NUM_PNFS" ]
+ then
+ NUM_PNFS=1
+fi
+if [ -z "$FILE_SIZE" ]
+ then
+ FILE_SIZE="ALL"
+fi
+if [ -z "$FTP_TYPE" ]
+ then
+ FTP_TYPE="ALL"
+fi
 
-
-docker cp setup-ftp-files-for-image.sh $SFTP_SIM:/tmp/
-docker exec -w /home/onap/ $SFTP_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES >/dev/null 2>&1
-
-docker cp setup-ftp-files-for-image.sh $FTPS_SIM:/tmp/setup-ftp-files-for-image.sh
-docker exec -w /srv $FTPS_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES >/dev/null 2>&1
-
+if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
+	echo "Creating files for SFTP server, may take time...."
+	docker cp setup-ftp-files-for-image.sh $SFTP_SIM:/tmp/
+	docker exec -w /home/onap/ $SFTP_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1
+fi
+if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then
+	echo "Creating files for FTPS server, may take time...."
+	docker cp setup-ftp-files-for-image.sh $FTPS_SIM:/tmp/setup-ftp-files-for-image.sh
+	docker exec -w /srv $FTPS_SIM /tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1
+fi
+echo "Done: All simulators started and configured"