Added support for Consul/CBS and multiple DFCs

Issue-ID: INT-1155
Change-Id: I3c1ed2f6072655c4396e406ddfd490d3786fe4d6
Signed-off-by: BjornMagnussonXA <bjorn.magnusson@est.tech>
diff --git a/test/mocks/datafilecollector-testharness/auto-test/.gitignore b/test/mocks/datafilecollector-testharness/auto-test/.gitignore
index 2a21cc0..83c64de 100644
--- a/test/mocks/datafilecollector-testharness/auto-test/.gitignore
+++ b/test/mocks/datafilecollector-testharness/auto-test/.gitignore
@@ -1,2 +1,2 @@
 logs
-.tmp_tcsuite_*
+.tmp_*
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
new file mode 100755
index 0000000..76afc8c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_FTPS.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using FTPS."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc1500"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="300"
+export NUM_PNFS="700"
+export FILE_SIZE="1MB"
+export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
+
+log_sim_settings
+
+start_simulators
+
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                    0 60
+dr_equal            ctr_published_files             0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc 0
+
+mr_equal            ctr_unique_files                70000 18000
+
+mr_print            stop
+
+dr_equal            ctr_published_files             70000 900
+
+sleep_wait          30
+
+dr_equal            ctr_published_files             70000
+
+mr_equal            ctr_events                      70700
+mr_equal            ctr_unique_files                70000
+mr_equal            ctr_unique_PNFs                 700
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
new file mode 100755
index 0000000..2722182
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/25h_backlog_1MB_SFTP.sh
@@ -0,0 +1,65 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="Simulating a 25h backlog of events for 700 PNF with decreasing number of missing files, then continues with 15 min events from all PNFs using SFTP"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc1500"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="300"
+export NUM_PNFS="700"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
+
+log_sim_settings
+
+start_simulators
+
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                    0 60
+dr_equal            ctr_published_files             0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc 0
+
+mr_equal            ctr_unique_files                70000 18000
+
+mr_print            stop
+
+dr_equal            ctr_published_files             70000 900
+
+sleep_wait          30
+
+dr_equal            ctr_published_files             70000
+
+mr_equal            ctr_events                      70700
+mr_equal            ctr_unique_files                70000
+mr_equal            ctr_unique_PNFs                 700
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/AllTestCasesSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/AllTestCasesSuite.sh
new file mode 100755
index 0000000..512e5e4
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/AllTestCasesSuite.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+TS_ONELINE_DESCR="All test cases suite (excluding 24 h max test and 72 stab test)"
+
+. ../common/testsuite_common.sh
+
+suite_setup
+
+############# TEST CASES #################
+
+run_tc FTC1.sh $1 $2
+run_tc FTC2.sh $1 $2
+run_tc FTC3.sh $1 $2
+run_tc FTC4.sh $1 $2
+run_tc FTC5.sh $1 $2
+run_tc FTC6.sh $1 $2
+
+run_tc FTC10.sh $1 $2
+run_tc FTC11.sh $1 $2
+run_tc FTC12.sh $1 $2
+run_tc FTC13.sh $1 $2
+run_tc FTC14.sh $1 $2
+run_tc FTC15.sh $1 $2
+
+run_tc FTC20.sh $1 $2
+run_tc FTC21.sh $1 $2
+
+run_tc FTC30.sh $1 $2
+run_tc FTC31.sh $1 $2
+run_tc FTC32.sh $1 $2
+run_tc FTC33.sh $1 $2
+
+run_tc FTC40.sh $1 $2
+
+run_tc FTC50.sh $1 $2
+
+run_tc FTC60.sh $1 $2
+run_tc FTC61.sh $1 $2
+
+run_tc FTC70.sh $1 $2
+run_tc FTC71.sh $1 $2
+
+run_tc FTC80.sh $1 $2
+run_tc FTC81.sh $1 $2
+
+run_tc FTC90.sh $1 $2
+
+run_tc FTC100.sh $1 $2
+
+run_tc FTC200.sh $1 $2
+run_tc FTC210.sh $1 $2
+run_tc FTC220.sh $1 $2
+
+
+
+
+##########################################
+
+suite_complete
diff --git a/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
new file mode 100755
index 0000000..462a7dd
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/AutoTestTest.sh
@@ -0,0 +1,467 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="Test script for auto test and simulator control"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc1000"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES:CTR_MEAS_FILES,OpenDcae-c13:CTR_MEAS_FILES,OpenDcae-c14:LOG_FILES,OpenDcae-c15:PM_MEAS_FILES:TEST_FILES,OpenDcae-c16:TEST_FILES:TEMP_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A,CTR_MEAS_FILES:B,LOG_FILES:C,TEST_FILES:D,TEMP_FILES:E"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A,2:B,3:C,4:D,5:E"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A,2:B,3:C,4:D,5:E"
+
+export NUM_FTPFILES="300"
+export NUM_PNFS="5"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A,B,C,D,E"
+export NUM_FTP_SERVERS=5
+
+
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0 "../simulator-group/consul/c12_feed1_PM_feed2_CTR.json"
+consul_config_dmaap 0 "../simulator-group/consul/dmaap_feed1_2_3_4_5.json"
+
+consul_config_app   1 "../simulator-group/consul/c13_feed2_CTR.json"
+consul_config_dmaap 1 "../simulator-group/consul/dmaap_feed1_2_3_4_5.json"
+
+consul_config_app   2 "../simulator-group/consul/c14_feed3_LOG.json"
+consul_config_dmaap 2 "../simulator-group/consul/dmaap_feed1_2_3_4_5.json"
+
+consul_config_app   3 "../simulator-group/consul/c15_feed1_PM_feed4_TEST.json"
+consul_config_dmaap 3 "../simulator-group/consul/dmaap_feed1_2_3_4_5.json"
+
+consul_config_app   2 "../simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json"
+consul_config_dmaap 4 "../simulator-group/consul/dmaap_feed1_2_3_4_5.json"
+
+
+mr_print			""
+mr_print			groups
+mr_print			changeids
+mr_print			fileprefixes
+mr_print			tc_info
+mr_print			status
+mr_print			stop
+mr_print			start
+
+mr_print			ctr_requests
+mr_print			groups/ctr_requests
+mr_print			ctr_requests/OpenDcae-c12
+mr_print			ctr_requests/OpenDcae-c13
+mr_print			ctr_requests/OpenDcae-c14
+mr_print			ctr_requests/OpenDcae-c15
+mr_print			ctr_requests/OpenDcae-c16
+
+mr_print			ctr_responses
+mr_print			groups/ctr_responses
+mr_print			ctr_responses/OpenDcae-c12
+mr_print			ctr_responses/OpenDcae-c13
+mr_print			ctr_responses/OpenDcae-c14
+mr_print			ctr_responses/OpenDcae-c15
+mr_print			ctr_responses/OpenDcae-c16
+
+mr_print			ctr_files
+mr_print			groups/ctr_files
+mr_print			ctr_files/OpenDcae-c12
+mr_print			ctr_files/OpenDcae-c13
+mr_print			ctr_files/OpenDcae-c14
+mr_print			ctr_files/OpenDcae-c15
+mr_print			ctr_files/OpenDcae-c16
+
+mr_print			ctr_unique_files
+mr_print			groups/ctr_unique_files
+mr_print			ctr_unique_files/OpenDcae-c12
+mr_print			ctr_unique_files/OpenDcae-c13
+mr_print			ctr_unique_files/OpenDcae-c14
+mr_print			ctr_unique_files/OpenDcae-c15
+mr_print			ctr_unique_files/OpenDcae-c16
+
+mr_print			ctr_events
+mr_print			groups/ctr_events
+mr_print			ctr_events/OpenDcae-c12
+mr_print			ctr_events/OpenDcae-c13
+mr_print			ctr_events/OpenDcae-c14
+mr_print			ctr_events/OpenDcae-c15
+mr_print			ctr_events/OpenDcae-c16
+
+mr_contain_str		groups						"OpenDcae-c12,OpenDcae-c13,OpenDcae-c14,OpenDcae-c15,OpenDcae-c16"
+mr_contain_str		changeids					"PM_MEAS_FILES:CTR_MEAS_FILES,CTR_MEAS_FILES,LOG_FILES,PM_MEAS_FILES:TEST_FILES,TEST_FILES:TEMP_FILES"
+mr_contain_str		fileprefixes				$MR_FILE_PREFIX_MAPPING
+mr_contain_str		tc_info						"TC#1000"
+mr_contain_str		status						"Started"
+mr_contain_str		stop						"Stopped"
+mr_contain_str		start						"Started"
+
+mr_equal			ctr_requests				0
+mr_contain_str		groups/ctr_requests			"0,0,0,0,0"
+mr_equal			ctr_requests/OpenDcae-c12	0
+mr_equal			ctr_requests/OpenDcae-c13	0
+mr_equal			ctr_requests/OpenDcae-c14	0
+mr_equal			ctr_requests/OpenDcae-c15	0
+mr_equal			ctr_requests/OpenDcae-c16	0
+
+mr_equal			ctr_responses				0
+mr_contain_str		groups/ctr_responses		"0,0,0,0,0"
+mr_equal			ctr_responses/OpenDcae-c12	0
+mr_equal			ctr_responses/OpenDcae-c13	0
+mr_equal			ctr_responses/OpenDcae-c14	0
+mr_equal			ctr_responses/OpenDcae-c15	0
+mr_equal			ctr_responses/OpenDcae-c16	0
+
+mr_equal			ctr_files					0
+mr_contain_str		groups/ctr_files			"0,0,0,0,0"
+mr_equal			ctr_files/OpenDcae-c12		0
+mr_equal			ctr_files/OpenDcae-c13		0
+mr_equal			ctr_files/OpenDcae-c14		0
+mr_equal			ctr_files/OpenDcae-c15		0
+mr_equal			ctr_files/OpenDcae-c16		0
+
+mr_equal			ctr_unique_files				0
+mr_contain_str		groups/ctr_unique_files			"0,0,0,0,0"
+mr_equal			ctr_unique_files/OpenDcae-c12	0
+mr_equal			ctr_unique_files/OpenDcae-c13	0
+mr_equal			ctr_unique_files/OpenDcae-c14	0
+mr_equal			ctr_unique_files/OpenDcae-c15	0
+mr_equal			ctr_unique_files/OpenDcae-c16	0
+
+mr_equal			ctr_events						0
+mr_contain_str		groups/ctr_events				"0,0,0,0,0"
+mr_equal			ctr_events/OpenDcae-c12			0
+mr_equal			ctr_events/OpenDcae-c13			0
+mr_equal			ctr_events/OpenDcae-c14			0
+mr_equal			ctr_events/OpenDcae-c15			0
+mr_equal			ctr_events/OpenDcae-c16			0
+
+
+dr_print			""
+dr_print 			tc_info
+dr_print 			execution_time
+dr_print 			feeds
+
+dr_print			ctr_publish_query
+dr_print			feeds/ctr_publish_query
+dr_print 			ctr_publish_query/1
+dr_print 			ctr_publish_query/2
+dr_print 			ctr_publish_query/3
+dr_print 			ctr_publish_query/4
+dr_print			ctr_publish_query/5
+
+dr_print			ctr_publish_query_published
+dr_print			feeds/ctr_publish_query_published
+dr_print			ctr_publish_query_published/1
+dr_print			ctr_publish_query_published/2
+dr_print			ctr_publish_query_published/3
+dr_print			ctr_publish_query_published/4
+dr_print			ctr_publish_query_published/5
+
+dr_print			ctr_publish_query_not_published
+dr_print			feeds/ctr_publish_query_not_published
+dr_print			ctr_publish_query_not_published/1
+dr_print			ctr_publish_query_not_published/2
+dr_print			ctr_publish_query_not_published/3
+dr_print			ctr_publish_query_not_published/4
+dr_print			ctr_publish_query_not_published/5
+
+dr_print			ctr_publish_req
+dr_print			feeds/ctr_publish_req
+dr_print			ctr_publish_req/1
+dr_print			ctr_publish_req/2
+dr_print			ctr_publish_req/3
+dr_print			ctr_publish_req/4
+dr_print			ctr_publish_req/5
+
+dr_print			ctr_publish_req_redirect
+dr_print			feeds/ctr_publish_req_redirect
+dr_print			ctr_publish_req_redirect/1
+dr_print			ctr_publish_req_redirect/2
+dr_print			ctr_publish_req_redirect/3
+dr_print			ctr_publish_req_redirect/4
+dr_print			ctr_publish_req_redirect/5
+
+dr_print			ctr_publish_req_published
+dr_print			feeds/ctr_publish_req_published
+dr_print			ctr_publish_req_published/1
+dr_print			ctr_publish_req_published/2
+dr_print			ctr_publish_req_published/3
+dr_print			ctr_publish_req_published/4
+dr_print			ctr_publish_req_published/5
+
+dr_print			ctr_published_files
+dr_print			feeds/ctr_published_files
+dr_print			ctr_published_files/1
+dr_print			ctr_published_files/2
+dr_print			ctr_published_files/3
+dr_print			ctr_published_files/4
+dr_print			ctr_published_files/5
+
+dr_print			ctr_double_publish
+dr_print			feeds/ctr_double_publish
+dr_print			ctr_double_publish/1
+dr_print			ctr_double_publish/2
+dr_print			ctr_double_publish/3
+dr_print			ctr_double_publish/4
+dr_print			ctr_double_publish/5
+
+dr_print			ctr_publish_query_bad_file_prefix
+dr_print			feeds/ctr_publish_query_bad_file_prefix
+dr_print			ctr_publish_query_bad_file_prefix/1
+dr_print			ctr_publish_query_bad_file_prefix/2
+dr_print			ctr_publish_query_bad_file_prefix/3
+dr_print			ctr_publish_query_bad_file_prefix/4
+dr_print			ctr_publish_query_bad_file_prefix/5
+
+dr_print			ctr_publish_req_bad_file_prefix
+dr_print			feeds/ctr_publish_req_bad_file_prefix
+dr_print			ctr_publish_req_bad_file_prefix/1
+dr_print			ctr_publish_req_bad_file_prefix/2
+dr_print			ctr_publish_req_bad_file_prefix/3
+dr_print			ctr_publish_req_bad_file_prefix/4
+dr_print			ctr_publish_req_bad_file_prefix/5
+
+
+
+
+
+
+dr_contain_str 		tc_info										"normal"
+dr_contain_str 		execution_time								"0:"
+dr_contain_str 		feeds										"1:A,2:B,3:C,4:D,5:E"
+
+dr_equal			ctr_publish_query							0
+dr_contain_str		feeds/ctr_publish_query						"0,0,0,0,0"
+dr_equal 			ctr_publish_query/1							0
+dr_equal 			ctr_publish_query/2							0
+dr_equal 			ctr_publish_query/3							0
+dr_equal 			ctr_publish_query/4							0
+dr_equal			ctr_publish_query/5							0
+
+dr_equal			ctr_publish_query_published					0
+dr_contain_str		feeds/ctr_publish_query_published			"0,0,0,0,0"
+dr_equal			ctr_publish_query_published/1				0
+dr_equal			ctr_publish_query_published/2				0
+dr_equal			ctr_publish_query_published/3				0
+dr_equal			ctr_publish_query_published/4				0
+dr_equal			ctr_publish_query_published/5				0
+
+dr_equal			ctr_publish_query_not_published				0
+dr_contain_str		feeds/ctr_publish_query_not_published		"0,0,0,0,0"
+dr_equal			ctr_publish_query_not_published/1			0
+dr_equal			ctr_publish_query_not_published/2			0
+dr_equal			ctr_publish_query_not_published/3			0
+dr_equal			ctr_publish_query_not_published/4			0
+dr_equal			ctr_publish_query_not_published/5			0
+
+dr_equal			ctr_publish_req								0
+dr_contain_str		feeds/ctr_publish_req						"0,0,0,0,0"
+dr_equal			ctr_publish_req/1							0
+dr_equal			ctr_publish_req/2							0
+dr_equal			ctr_publish_req/3							0
+dr_equal			ctr_publish_req/4							0
+dr_equal			ctr_publish_req/5							0
+
+dr_equal			ctr_publish_req_redirect					0
+dr_contain_str		feeds/ctr_publish_req_redirect				"0,0,0,0,0"
+dr_equal			ctr_publish_req_redirect/1					0
+dr_equal			ctr_publish_req_redirect/2					0
+dr_equal			ctr_publish_req_redirect/3					0
+dr_equal			ctr_publish_req_redirect/4					0
+dr_equal			ctr_publish_req_redirect/5					0
+
+dr_equal			ctr_publish_req_published					0
+dr_contain_str		feeds/ctr_publish_req_published				"0,0,0,0,0"
+dr_equal			ctr_publish_req_published/1					0
+dr_equal			ctr_publish_req_published/2					0
+dr_equal			ctr_publish_req_published/3					0
+dr_equal			ctr_publish_req_published/4					0
+dr_equal			ctr_publish_req_published/5					0
+
+dr_equal			ctr_published_files							0
+dr_contain_str		feeds/ctr_published_files					"0,0,0,0,0"
+dr_equal			ctr_published_files/1						0
+dr_equal			ctr_published_files/2						0
+dr_equal			ctr_published_files/3						0
+dr_equal			ctr_published_files/4						0
+dr_equal			ctr_published_files/5						0
+
+dr_equal			ctr_double_publish							0
+dr_contain_str		feeds/ctr_double_publish					"0,0,0,0,0"
+dr_equal			ctr_double_publish/1						0
+dr_equal			ctr_double_publish/2						0
+dr_equal			ctr_double_publish/3						0
+dr_equal			ctr_double_publish/4						0
+dr_equal			ctr_double_publish/5						0
+
+dr_equal			ctr_publish_query_bad_file_prefix			0
+dr_contain_str		feeds/ctr_publish_query_bad_file_prefix		"0,0,0,0,0"
+dr_equal			ctr_publish_query_bad_file_prefix/1			0
+dr_equal			ctr_publish_query_bad_file_prefix/2			0
+dr_equal			ctr_publish_query_bad_file_prefix/3			0
+dr_equal			ctr_publish_query_bad_file_prefix/4			0
+dr_equal			ctr_publish_query_bad_file_prefix/5			0
+
+dr_equal			ctr_publish_req_bad_file_prefix				0
+dr_contain_str		feeds/ctr_publish_req_bad_file_prefix		"0,0,0,0,0"
+dr_equal			ctr_publish_req_bad_file_prefix/1			0
+dr_equal			ctr_publish_req_bad_file_prefix/2			0
+dr_equal			ctr_publish_req_bad_file_prefix/3			0
+dr_equal			ctr_publish_req_bad_file_prefix/4			0
+dr_equal			ctr_publish_req_bad_file_prefix/5			0
+
+drr_print			""
+drr_print			tc_info
+drr_print			execution_time
+drr_print			feeds
+drr_print			speed
+
+drr_print			ctr_publish_requests
+drr_print			feeds/ctr_publish_requests
+drr_print			ctr_publish_requests/1
+drr_print			ctr_publish_requests/2
+drr_print			ctr_publish_requests/3
+drr_print			ctr_publish_requests/4
+drr_print			ctr_publish_requests/5
+
+drr_print			ctr_publish_requests_bad_file_prefix
+drr_print			feeds/ctr_publish_requests_bad_file_prefix
+drr_print			ctr_publish_requests_bad_file_prefix/1
+drr_print			ctr_publish_requests_bad_file_prefix/2
+drr_print			ctr_publish_requests_bad_file_prefix/3
+drr_print			ctr_publish_requests_bad_file_prefix/4
+drr_print			ctr_publish_requests_bad_file_prefix/5
+
+drr_print			ctr_publish_responses
+drr_print			feeds/ctr_publish_responses
+drr_print			ctr_publish_responses/1
+drr_print			ctr_publish_responses/2
+drr_print			ctr_publish_responses/3
+drr_print			ctr_publish_responses/4
+drr_print			ctr_publish_responses/5
+
+drr_print			time_lastpublish
+drr_print			feeds/time_lastpublish
+drr_print			time_lastpublish/1
+drr_print			time_lastpublish/2
+drr_print			time_lastpublish/3
+drr_print			time_lastpublish/4
+drr_print			time_lastpublish/5
+
+drr_print			dwl_volume
+drr_print			feeds/dwl_volume
+drr_print			dwl_volume/1
+drr_print			dwl_volume/2
+drr_print			dwl_volume/3
+drr_print			dwl_volume/4
+drr_print			dwl_volume/5
+
+
+drr_contain_str		tc_info										"normal"
+drr_contain_str		execution_time								"0:"
+drr_contain_str		feeds										"1:A,2:B,3:C,4:D,5:E"
+drr_equal			speed										0
+
+drr_equal			ctr_publish_requests						0
+drr_contain_str		feeds/ctr_publish_requests					"0,0,0,0,0"
+drr_equal			ctr_publish_requests/1						0
+drr_equal			ctr_publish_requests/2						0
+drr_equal			ctr_publish_requests/3						0
+drr_equal			ctr_publish_requests/4						0
+drr_equal			ctr_publish_requests/5						0
+
+drr_equal			ctr_publish_requests_bad_file_prefix		0
+drr_contain_str		feeds/ctr_publish_requests_bad_file_prefix	"0,0,0,0,0"
+drr_equal			ctr_publish_requests_bad_file_prefix/1		0
+drr_equal			ctr_publish_requests_bad_file_prefix/2		0
+drr_equal			ctr_publish_requests_bad_file_prefix/3		0
+drr_equal			ctr_publish_requests_bad_file_prefix/4		0
+drr_equal			ctr_publish_requests_bad_file_prefix/5		0
+
+drr_equal			ctr_publish_responses						0
+drr_contain_str		feeds/ctr_publish_responses					"0,0,0,0,0"
+drr_equal			ctr_publish_responses/1						0
+drr_equal			ctr_publish_responses/2						0
+drr_equal			ctr_publish_responses/3						0
+drr_equal			ctr_publish_responses/4						0
+drr_equal			ctr_publish_responses/5						0
+
+drr_contain_str		time_lastpublish							"--:--"
+drr_contain_str		feeds/time_lastpublish						"--:--,--:--,--:--,--:--,--:--"
+drr_contain_str		time_lastpublish/1							"--:--"
+drr_contain_str		time_lastpublish/2							"--:--"
+drr_contain_str		time_lastpublish/3							"--:--"
+drr_contain_str		time_lastpublish/4							"--:--"
+drr_contain_str		time_lastpublish/5							"--:--"
+
+drr_equal			dwl_volume									0
+drr_contain_str		feeds/dwl_volume							"0,0,0,0,0"
+drr_equal			dwl_volume/1								0
+drr_equal			dwl_volume/2								0
+drr_equal			dwl_volume/3								0
+drr_equal			dwl_volume/4								0
+drr_equal			dwl_volume/5								0
+
+
+
+
+
+
+
+
+
+
+dr_equal            ctr_published_files             0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc 0
+start_dfc 1
+start_dfc 2
+start_dfc 3
+start_dfc 4
+
+dr_equal            ctr_published_files             1 60
+
+sleep_wait          30
+
+dr_equal            ctr_published_files             1
+
+mr_greater          ctr_requests                    1
+
+mr_equal            ctr_events                      1
+mr_equal            ctr_unique_files                1
+mr_equal            ctr_unique_PNFs                 1
+
+dr_equal            ctr_publish_query               1
+dr_equal            ctr_publish_query_published     0
+dr_equal            ctr_publish_query_not_published 1
+dr_equal            ctr_publish_req                 1
+dr_equal            ctr_publish_req_redirect        1
+dr_equal            ctr_publish_req_published       0
+dr_equal            ctr_published_files             1
+dr_equal            ctr_double_publish              0
+
+drr_equal           ctr_publish_requests            1
+drr_equal           ctr_publish_responses           1
+
+drr_equal           dwl_volume                      1000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh b/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh
index 219e9c5..ccdcdf1 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/DFC_FileRetentionSuite.sh
@@ -16,4 +16,3 @@
 ##########################################
 
 suite_complete
-
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
index b6685a9..fe6d453 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC1.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
index 65c585a..918906f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC10.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc510"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="5"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             3500 900
+dr_equal            ctr_published_files                  3500 900
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             3500
+dr_equal            ctr_published_files                  3500
 
-mr_greater          ctr_requests                    5
+mr_greater          ctr_requests                         5
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                3500
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               3500
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3500
-dr_equal            ctr_publish_req                 3500
-dr_equal            ctr_publish_req_redirect        3500
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3500
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3500
-drr_equal           ctr_publish_responses           3500
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
 
-drr_equal           dwl_volume                      3500000000
+drr_equal           dwl_volume                           3500000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
new file mode 100755
index 0000000..77477a8
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC100.sh
@@ -0,0 +1,81 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="100 event with 1 1MB file in each evewnt from one PNF in one event using SFTP with feed reconfigure"
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc110"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="200"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+dr_contain_str      feeds "2:A"
+drr_contain_str     feeds "2:A"
+
+start_dfc           0
+
+dr_equal            ctr_published_files                  50 900
+
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A"
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed1_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1.json"
+
+kill_dr
+kill_drr
+
+log_sim_settings
+
+start_simulators
+
+mr_equal            ctr_events                           100 900
+mr_equal            ctr_unique_files                     100
+mr_equal            ctr_unique_PNFs                      1
+
+dr_greater          ctr_published_files                  1
+
+dr_contain_str      feeds "1:A"
+drr_contain_str     feeds "1:A"
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
index 0d91ec7..d6560a3 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC11.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc610"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="5"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             3500 900
+dr_equal            ctr_published_files                  3500 900
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             3500
+dr_equal            ctr_published_files                  3500
 
-mr_greater          ctr_requests                    5
+mr_greater          ctr_requests                         5
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                3500
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               3500
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3500
-dr_equal            ctr_publish_req                 3500
-dr_equal            ctr_publish_req_redirect        3500
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3500
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3500
-drr_equal           ctr_publish_responses           3500
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
 
-drr_equal           dwl_volume                      3500000000
+drr_equal           dwl_volume                           3500000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
index d721fe1..adafb2f 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC12.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc511"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="5"
 export NUM_PNFS="700"
 export FILE_SIZE="1KB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             3500 900
+dr_equal            ctr_published_files                  3500 900
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             3500
+dr_equal            ctr_published_files                  3500
 
-mr_greater          ctr_requests                    5
+mr_greater          ctr_requests                         5
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                3500
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               3500
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3500
-dr_equal            ctr_publish_req                 3500
-dr_equal            ctr_publish_req_redirect        3500
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3500
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3500
-drr_equal           ctr_publish_responses           3500
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
 
-drr_equal           dwl_volume                      3500000
+drr_equal           dwl_volume                           3500000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
index 5cf2112..12e9ddc 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC13.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc611"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="5"
 export NUM_PNFS="700"
 export FILE_SIZE="1KB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             3500 900
+dr_equal            ctr_published_files                  3500 900
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             3500
+dr_equal            ctr_published_files                  3500
 
-mr_greater          ctr_requests                    5
+mr_greater          ctr_requests                         5
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                3500
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               3500
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3500
-dr_equal            ctr_publish_req                 3500
-dr_equal            ctr_publish_req_redirect        3500
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3500
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3500
-drr_equal           ctr_publish_responses           3500
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
 
-drr_equal           dwl_volume                      3500000
+drr_equal           dwl_volume                           3500000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
new file mode 100755
index 0000000..27d51f6
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC14.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using SFTP, from poll to publish."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc550"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="5"
+export NUM_PNFS="700"
+export FILE_SIZE="50MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc           0
+
+dr_equal            ctr_published_files                  3500 3600
+
+sleep_wait          30
+
+dr_equal            ctr_published_files                  3500
+
+mr_greater          ctr_requests                         5
+
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
+
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
+
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
+
+drr_equal           dwl_volume                           175000000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
new file mode 100755
index 0000000..060f3c3
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC15.sh
@@ -0,0 +1,79 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="3500 50MB files from 700 PNFs in 3500 events in 5 polls using FTPS, from poll to publish."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc650"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="5"
+export NUM_PNFS="700"
+export FILE_SIZE="50MB"
+export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc           0
+
+dr_equal            ctr_published_files                  3500 3600
+
+sleep_wait          30
+
+dr_equal            ctr_published_files                  3500
+
+mr_greater          ctr_requests                         5
+
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
+
+dr_equal            ctr_publish_query                    3500
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3500
+dr_equal            ctr_publish_req                      3500
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3500
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3500
+dr_equal            ctr_double_publish                   0
+
+drr_equal           ctr_publish_requests                 3500
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3500
+
+drr_equal           dwl_volume                           175000000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
index 45ecb6f..e46c0f2 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC2.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc101"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="5MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      5000000
+drr_equal           dwl_volume                           5000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
index 88d9e57..a61b1a4 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC20.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc710"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="105"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             72800 18000
+dr_equal            ctr_published_files                  72800 18000
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             72800
+dr_equal            ctr_published_files                  72800
 
-mr_greater          ctr_requests                    100
+mr_greater          ctr_requests                         100
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                72800
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     72800
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               72800
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 72800
-dr_equal            ctr_publish_req                 72800
-dr_equal            ctr_publish_req_redirect        72800
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             72800
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    72800
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      72800
+dr_equal            ctr_publish_req                      72800
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             72800
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  72800
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            72800
-drr_equal           ctr_publish_responses           72800
+drr_equal           ctr_publish_requests                 72800
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                72800
 
-drr_equal           dwl_volume                      72800000000
+drr_equal           dwl_volume                           72800000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
index 7cb07de..8060ddf 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC200.sh
@@ -8,86 +8,101 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export BC_TC=""
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
-dfc_contain_str     heartbeat                       "I'm living!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     heartbeat                       "I'm living"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     start                           "Datafile Service is still running!"
-dfc_contain_str     heartbeat                       "I'm living"
+dfc_contain_str     0                                    heartbeat    "I'm living!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    heartbeat    "I'm living"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    start        "Datafile Service is still running!"
+dfc_contain_str     0                                    heartbeat    "I'm living"
 
 
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
-dfc_contain_str     start                           "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
 
 
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
index a3c0514..fb18d76 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC21.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc810"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="105"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             72800 18000
+dr_equal            ctr_published_files                  72800 18000
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             72800
+dr_equal            ctr_published_files                  72800
 
-mr_greater          ctr_requests                    100
+mr_greater          ctr_requests                         100
 
-mr_equal            ctr_events                      3500
-mr_equal            ctr_unique_files                72800
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     72800
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               72800
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 72800
-dr_equal            ctr_publish_req                 72800
-dr_equal            ctr_publish_req_redirect        72800
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             72800
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    72800
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      72800
+dr_equal            ctr_publish_req                      72800
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             72800
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  72800
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            72800
-drr_equal           ctr_publish_responses           72800
+drr_equal           ctr_publish_requests                 72800
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                72800
 
-drr_equal           dwl_volume                      72800000000
+drr_equal           dwl_volume                           72800000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
index f7e9bf5..46fff9d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC210.sh
@@ -8,71 +8,85 @@
 
 clean_containers
 
+export MR_TC="--tc1300"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
 export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
 export DR_REDIR_TC="--tc normal"
-export MR_TC="--tc113"
-export BC_TC=""
-export NUM_FTPFILES="199"
-export NUM_PNFS="1"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="200"
+export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-mr_greater          ctr_events                      0 120
-dr_print            ctr_published_files
+mr_equal            ctr_events                           35 120
 
+dfc_contain_str     0                                    heartbeat    "I'm living!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
 
-dfc_contain_str     heartbeat                       "I'm living!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
+dr_equal            ctr_published_files                  3500 900
 
 sleep_wait          120
 
-dfc_contain_str     start                           "Datafile Service has been started!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
 
-dr_greater          ctr_published_files             100 60
-dr_less             ctr_published_files             199
-dr_print            ctr_published_files
+mr_equal            ctr_events                           70 120
 
-dfc_contain_str     heartbeat                       "I'm living!"
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    heartbeat    "I'm living!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
+
+dr_equal            ctr_published_files                  7000 900
 
 sleep_wait          120
 
-dfc_contain_str     start                           "Datafile Service has been started!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
 
-dr_equal            ctr_published_files             199 60
+dr_equal            ctr_published_files                  7000
 
 
-mr_equal            ctr_events                      100
-mr_equal            ctr_unique_files                199
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           70
+mr_equal            ctr_unique_files                     7000
+mr_equal            ctr_unique_PNFs                      70
 
-dr_equal            ctr_publish_query               199
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 199
-dr_equal            ctr_publish_req                 199
-dr_equal            ctr_publish_req_redirect        199
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             199
+dr_equal            ctr_publish_query                    7000
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      7000
+dr_equal            ctr_publish_req                      7000
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             7000
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  7000
 
-drr_equal           ctr_publish_requests            199
-drr_equal           ctr_publish_responses           199
+drr_equal           ctr_publish_requests                 7000
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                7000
 
-drr_equal           dwl_volume                      199000000
+drr_equal           dwl_volume                           7000000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
index a652f85..08222d2 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC220.sh
@@ -8,60 +8,74 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 30
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 30
 
 kill_mr
 
-start_dfc
+start_dfc           0
 
 sleep_wait          30
 
-dfc_contain_str     stopDatafile                    "Datafile Service has already been stopped!"
+dfc_contain_str     0                                    stopDatafile "Datafile Service has already been stopped!"
 
 start_simulators
 
 sleep_wait          120
 
-mr_less             ctr_requests                    2
+mr_equal            ctr_requests                         0
 
-dfc_contain_str     start                           "Datafile Service has been started!"
+dfc_contain_str     0                                    start        "Datafile Service has been started!"
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
-mr_greater          ctr_requests                    0
+mr_greater          ctr_requests                         0
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
index afa1f2a..06baaef 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC3.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc102"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="50MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      50000000
+drr_equal           dwl_volume                           50000000
 
-check_dfc_log
+check_dfc_logs
 
 
 #### TEST COMPLETE ####
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
index 0fb3b05..e29d948 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC30.sh
@@ -8,84 +8,97 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 store_logs          PART1
 
+mr_greater          ctr_requests                         4 120
 
 kill_mr
 start_simulators
 
-mr_equal            ctr_events                      0 60
-mr_equal            ctr_unique_files                0
-mr_equal            ctr_unique_PNFs                 0
+mr_equal            ctr_requests                         4 120
 
-mr_equal            ctr_events                      1 60
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
index 34cf8ae..ad71d30 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC31.sh
@@ -8,66 +8,80 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 store_logs          PART1
 
 kill_mr
-kill_dfc
+kill_dfc           0
 start_simulators
 
 mr_equal            ctr_events                      0 60
 mr_equal            ctr_unique_files                0
 mr_equal            ctr_unique_PNFs                 0
 
-start_dfc
+start_dfc           0
 
 sleep_wait          30
 
@@ -89,7 +103,7 @@
 
 drr_equal           dwl_volume                      1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
index 48257a8..3d60835 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC32.sh
@@ -8,54 +8,68 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc122"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="200"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-mr_equal            ctr_events                      100 1800
+mr_equal            ctr_events                           100 1800
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
 
-mr_equal            ctr_events                      100
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           100
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
index a45ce10..1b1ae45 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC33.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files overSFTP). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
+TC_ONELINE_DESCR="DFC file retention (files with SFTP and then same files over FTPS). 1MB, 5MB and 50MB using first SFTP and thenSFTP with restart of MR between each file."
 
 . ../common/testcase_common.sh $1 $2
 
@@ -8,28 +8,39 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="ALL"
 export FTP_TYPE="ALL"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 
-start_dfc
+start_dfc           0
 
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#100"
-dr_equal            ctr_published_files             1 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#100"
+dr_equal            ctr_published_files                  1 30
 
 
 kill_mr
@@ -38,9 +49,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#101"
-dr_equal            ctr_published_files             2 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#101"
+dr_equal            ctr_published_files                  2 30
 
 kill_mr
 export MR_TC="--tc102"
@@ -48,9 +59,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#102"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#102"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc200"
@@ -58,9 +69,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#200"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#200"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc201"
@@ -68,9 +79,9 @@
 
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#201"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#201"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc202"
@@ -78,26 +89,29 @@
 
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#202"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#202"
+dr_equal            ctr_published_files                  3 30
 
 
-dr_equal            ctr_publish_query               3
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3
-dr_equal            ctr_publish_req                 3
-dr_equal            ctr_publish_req_redirect        3
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3
+dr_equal            ctr_publish_req                      3
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3
-drr_equal           ctr_publish_responses           3
+drr_equal           ctr_publish_requests                 3
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3
 
-drr_equal           dwl_volume                      56000000
+drr_equal           dwl_volume                           56000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
index a29b6b1..bb3d2ba 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC4.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc200"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed1_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      1000000
+drr_equal           dwl_volume                           1000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
index 074f290..25e68e7 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC40.sh
@@ -8,28 +8,39 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc100"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1"
 export NUM_PNFS="1"
 export FILE_SIZE="ALL"
 export FTP_TYPE="ALL"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 
-start_dfc
+start_dfc           0
 
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#100"
-dr_equal            ctr_published_files             1 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#100"
+dr_equal            ctr_published_files                  1 30
 
 
 kill_mr
@@ -38,9 +49,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#101"
-dr_equal            ctr_published_files             2 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#101"
+dr_equal            ctr_published_files                  2 30
 
 kill_mr
 export MR_TC="--tc102"
@@ -48,9 +59,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#102"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#102"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc200"
@@ -58,9 +69,9 @@
 start_simulators
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#200"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#200"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc201"
@@ -68,9 +79,9 @@
 
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#201"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#201"
+dr_equal            ctr_published_files                  3 30
 
 kill_mr
 export MR_TC="--tc202"
@@ -78,26 +89,29 @@
 
 
 mr_print            tc_info
-mr_equal            ctr_events                      1 60
-mr_contain_str      tc_info                         "TC#202"
-dr_equal            ctr_published_files             3 30
+mr_equal            ctr_events                           1 60
+mr_contain_str      tc_info                              "TC#202"
+dr_equal            ctr_published_files                  3 30
 
 
-dr_equal            ctr_publish_query               3
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 3
-dr_equal            ctr_publish_req                 3
-dr_equal            ctr_publish_req_redirect        3
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             3
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    3
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      3
+dr_equal            ctr_publish_req                      3
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             3
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  3
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            3
-drr_equal           ctr_publish_responses           3
+drr_equal           ctr_publish_requests                 3
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                3
 
-drr_equal           dwl_volume                      56000000
+drr_equal           dwl_volume                           56000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
index f62b85a..f599621 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC5.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc201"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="5MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed1_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      5000000
+drr_equal           dwl_volume                           5000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
index 0eb9722..7e6288b 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC50.sh
@@ -8,53 +8,67 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc121"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="199"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             179 5000
+dr_equal            ctr_published_files                  179 5000
 
 sleep_wait          600
 
 
-dr_equal            ctr_published_files             179
+dr_equal            ctr_published_files                  179
 
-mr_equal            ctr_events                      100
-mr_equal            ctr_unique_files                179
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           100
+mr_equal            ctr_unique_files                     179
+mr_equal            ctr_unique_PNFs                      1
 
-dr_greater          ctr_publish_query               179
-dr_equal            ctr_publish_query_published     0
-dr_greater          ctr_publish_query_not_published 179
-dr_equal            ctr_publish_req                 179
-dr_equal            ctr_publish_req_redirect        179
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             179
+dr_greater          ctr_publish_query                    179
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_greater          ctr_publish_query_not_published      179
+dr_equal            ctr_publish_req                      179
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             179
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  179
 
-drr_equal           ctr_publish_requests            179
-drr_equal           ctr_publish_responses           179
+drr_equal           ctr_publish_requests                 179
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                179
 
-drr_equal           dwl_volume                      179000000
+drr_equal           dwl_volume                           179000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
index 036225c..10de5c7 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC6.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc202"
-export BC_TC=""
-export NUM_FTPFILES="1"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A"
+
+export NUM_FTPFILES="10"
 export NUM_PNFS="1"
 export FILE_SIZE="50MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed1_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             1 60
+dr_equal            ctr_published_files                  1 60
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1
+dr_equal            ctr_published_files                  1
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      1
-mr_equal            ctr_unique_files                1
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           1
+mr_equal            ctr_unique_files                     1
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               1
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1
-dr_equal            ctr_publish_req                 1
-dr_equal            ctr_publish_req_redirect        1
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1
+dr_equal            ctr_publish_req                      1
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1
-drr_equal           ctr_publish_responses           1
+drr_equal           ctr_publish_requests                 1
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1
 
-drr_equal           dwl_volume                      50000000
+drr_equal           dwl_volume                           50000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
index f68b9b0..8be18cb 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC60.sh
@@ -8,61 +8,75 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc500"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="2"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_greater          ctr_published_files             100 200
+dr_greater          ctr_published_files                  100 200
 
-kill_sftp
-sleep_wait          10       #Server will be gone longer due to long startup time of ftp (ftp file creatation)
-start_simulators
+stop_sftp           0
+sleep_wait          30
+start_sftp          0
 
-dr_equal            ctr_published_files             1400 400
+dr_equal            ctr_published_files                  1400 400
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1400
+dr_equal            ctr_published_files                  1400
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      700
-mr_equal            ctr_unique_files                1400
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           700
+mr_equal            ctr_unique_files                     1400
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               1400
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1400
-dr_equal            ctr_publish_req                 1400
-dr_equal            ctr_publish_req_redirect        1400
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1400
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1400
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1400
+dr_equal            ctr_publish_req                      1400
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1400
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1400
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1400
-drr_equal           ctr_publish_responses           1400
+drr_equal           ctr_publish_requests                 1400
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1400
 
-drr_equal           dwl_volume                      1400000000
+drr_equal           dwl_volume                          1400000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
index ef48047..861e035 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC61.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-TC_ONELINE_DESCR="Kill FTPs sever for 10+ sec during download"
+TC_ONELINE_DESCR="Kill FTPS sever for 10+ sec during download"
 
 . ../common/testcase_common.sh $1 $2
 
@@ -8,61 +8,75 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc600"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="2"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_greater          ctr_published_files             100 200
+dr_greater          ctr_published_files                  100 200
 
-kill_ftps
-sleep_wait          10       #Server will be gone longer due to long startup time of ftp (ftp file creatation)
-start_simulators
+stop_ftps           0
+sleep_wait          30
+start_ftps          0
 
-dr_equal            ctr_published_files             1400 400
+dr_equal            ctr_published_files                  1400 400
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             1400
+dr_equal            ctr_published_files                  1400
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      700
-mr_equal            ctr_unique_files                1400
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_events                           700
+mr_equal            ctr_unique_files                     1400
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               1400
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 1400
-dr_equal            ctr_publish_req                 1400
-dr_equal            ctr_publish_req_redirect        1400
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             1400
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    1400
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      1400
+dr_equal            ctr_publish_req                      1400
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             1400
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  1400
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            1400
-drr_equal           ctr_publish_responses           1400
+drr_equal           ctr_publish_requests                 1400
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                1400
 
-drr_equal           dwl_volume                      1400000000
+drr_equal           dwl_volume                           1400000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
index e464d88..0c21b3e 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC70.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc 10p_error_response"
-export DR_REDIR_TC="--tc 10p_error_response"
 export MR_TC="--tc113"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc 10p_error_response"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc 10p_error_response"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="199"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             199 300
+dr_equal            ctr_published_files                  199 300
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             199
+dr_equal            ctr_published_files                  199
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      100
-mr_equal            ctr_unique_files                199
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           100
+mr_equal            ctr_unique_files                     199
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               199
-dr_equal            ctr_publish_query_published     0
-dr_less             ctr_publish_query_not_published 199
-dr_greater          ctr_publish_req                 199
-dr_greater          ctr_publish_req_redirect        199
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             199
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    199
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_less             ctr_publish_query_not_published      199
+dr_greater          ctr_publish_req                      199
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_greater          ctr_publish_req_redirect             199
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  199
+dr_equal            ctr_double_publish                   0
 
-drr_greater         ctr_publish_requests            199
-drr_equal           ctr_publish_responses           199
+drr_greater         ctr_publish_requests                 199
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_greater         ctr_publish_responses                199
 
-drr_equal           dwl_volume                      199000000
+drr_equal           dwl_volume                           199000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
index 5f4238e..c69958d 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC71.sh
@@ -8,55 +8,69 @@
 
 clean_containers
 
-export DR_TC="--tc all_delay_10s"
-export DR_REDIR_TC="--tc all_delay_10s"
 export MR_TC="--tc113"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc all_delay_10s"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc all_delay_10s"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="199"
 export NUM_PNFS="1"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=1
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-dr_equal            ctr_published_files             199 300
+dr_equal            ctr_published_files                  199 300
 
 sleep_wait          30
 
-dr_equal            ctr_published_files             199
+dr_equal            ctr_published_files                  199
 
-mr_greater          ctr_requests                    1
+mr_greater          ctr_requests                         1
 
-mr_equal            ctr_events                      100
-mr_equal            ctr_unique_files                199
-mr_equal            ctr_unique_PNFs                 1
+mr_equal            ctr_events                           100
+mr_equal            ctr_unique_files                     199
+mr_equal            ctr_unique_PNFs                      1
 
-dr_equal            ctr_publish_query               199
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published 199
-dr_equal            ctr_publish_req                 199
-dr_equal            ctr_publish_req_redirect        199
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             199
-dr_equal            ctr_double_publish              0
+dr_equal            ctr_publish_query                    199
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      199
+dr_equal            ctr_publish_req                      199
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             199
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  199
+dr_equal            ctr_double_publish                   0
 
-drr_equal         ctr_publish_requests            199
-drr_equal           ctr_publish_responses           199
+drr_equal           ctr_publish_requests                 199
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                199
 
-drr_equal           dwl_volume                      199000000
+drr_equal           dwl_volume                           199000000
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
new file mode 100755
index 0000000..134c87c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC80.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="25 events for each 4 feeds with 100 1MB files per event from one PNF using SFTP, 1 change id with no feed, 1 change with one feed and two change id to one feed."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc111"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES:CTR_MEAS_FILES:LOG_FILES:TEMP_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A,CTR_MEAS_FILES:B,LOG_FILES:C,TEMP_FILES:D"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:B,3:C:D"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:B,3:C:D"
+
+export NUM_FTPFILES="200"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A,B,C,D"
+export NUM_FTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1_2_3_4.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc           0
+
+dr_equal            ctr_published_files                  588 2000
+dr_equal            ctr_published_files/2                196
+dr_equal            ctr_published_files/3                392
+sleep_wait          30
+
+dr_equal            ctr_published_files                  588
+dr_equal            ctr_published_files/2                196
+dr_equal            ctr_published_files/3                392
+
+mr_greater          ctr_requests                         100
+
+mr_equal            ctr_events                           100
+mr_equal            ctr_unique_files                     784
+mr_equal            ctr_unique_PNFs                      1
+
+dr_equal            ctr_publish_query                    588
+dr_equal            ctr_publish_query/2                  196
+dr_equal            ctr_publish_query/3                  392
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      588
+dr_equal            ctr_publish_query_not_published/2    196
+dr_equal            ctr_publish_query_not_published/3    392
+dr_equal            ctr_publish_req                      588
+dr_equal            ctr_publish_req/2                    196
+dr_equal            ctr_publish_req/3                    392
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             588
+dr_equal            ctr_publish_req_redirect/2           196
+dr_equal            ctr_publish_req_redirect/3           392
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  588
+dr_equal            ctr_published_files/2                196
+dr_equal            ctr_published_files/3                392
+dr_equal            ctr_double_publish                   0
+
+drr_equal           ctr_publish_requests                 588
+drr_equal           ctr_publish_requests/2               196
+drr_equal           ctr_publish_requests/3               392
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                588
+drr_equal           ctr_publish_responses/2              196
+drr_equal           ctr_publish_responses/3              392
+
+drr_equal           dwl_volume                           588000000
+drr_equal           dwl_volume/2                         196000000
+drr_equal           dwl_volume/3                         392000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+print_all
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
new file mode 100755
index 0000000..442bdce
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC81.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="3500 1MB files from 700 PNFs in 3500 events in 5 polls using SFTP, 3 polls with change ids mapped to feeds and 2 polls not."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc510"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES:CTR_MEAS_FILES:LOG_FILES:TEMP_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A,CTR_MEAS_FILES:B,LOG_FILES:C,TEMP_FILES:D"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="3:A:B"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="3:A:B"
+
+export NUM_FTPFILES="30"
+export NUM_PNFS="700"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A,B,C,D"
+export NUM_FTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed3_PM_CTR.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed3.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc           0
+
+dr_equal            ctr_published_files                  2100 900
+dr_equal            ctr_published_files/3                2100
+sleep_wait          30
+
+dr_equal            ctr_published_files                  2100
+dr_equal            ctr_published_files/3                2100
+
+mr_greater          ctr_requests                         5
+
+mr_equal            ctr_events                           3500
+mr_equal            ctr_unique_files                     3500
+mr_equal            ctr_unique_PNFs                      700
+
+dr_equal            ctr_publish_query                    2100
+dr_equal            ctr_publish_query/3                  2100
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      2100
+dr_equal            ctr_publish_query_not_published/3    2100
+dr_equal            ctr_publish_req                      2100
+dr_equal            ctr_publish_req/3                    2100
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             2100
+dr_equal            ctr_publish_req_redirect/3           2100
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  2100
+dr_equal            ctr_published_files/3                2100
+dr_equal            ctr_double_publish                   0
+
+drr_equal           ctr_publish_requests                 2100
+drr_equal           ctr_publish_requests/3               2100
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                2100
+drr_equal           ctr_publish_responses/3              2100
+
+drr_equal           dwl_volume                           2100000000
+drr_equal           dwl_volume/3                         2100000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
new file mode 100755
index 0000000..780fbf7
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/auto-test/FTC90.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+
+TC_ONELINE_DESCR="100 events (1 evt per poll) per DFC with 100 1MB files from one PNF using two DFC (different consumer groups) each publishing using unique change ids/feeds over SFTP."
+
+. ../common/testcase_common.sh $1 $2
+
+#### TEST BEGIN ####
+
+clean_containers
+
+export MR_TC="--tc111"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES:CTR_MEAS_FILES,OpenDcae-c13:PM_MEAS_FILES:CTR_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A,CTR_MEAS_FILES:B"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="1:A,2:B"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="1:A,2:B"
+
+export NUM_FTPFILES="1000"
+export NUM_PNFS="1"
+export FILE_SIZE="1MB"
+export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A,B"
+export NUM_FTP_SERVERS=1
+
+log_sim_settings
+
+start_simulators
+
+consul_config_app   0                                    "../simulator-group/consul/c12_feed1_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed1.json"
+consul_config_app   1                                    "../simulator-group/consul/c13_feed2_CTR.json"
+consul_config_dmaap 1                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
+
+mr_print            tc_info
+dr_print            tc_info
+drr_print           tc_info
+
+start_dfc           0
+start_dfc           1
+
+dr_equal            ctr_published_files                  396 2000
+
+sleep_wait          30
+
+dr_equal            ctr_published_files                  396
+
+mr_greater          ctr_requests                         200
+
+mr_equal            ctr_events                           200
+mr_equal            ctr_unique_files                     792
+mr_equal            ctr_unique_PNFs                      2
+mr_equal            ctr_unique_PNFs/OpenDcae-c12         1
+mr_equal            ctr_unique_PNFs/OpenDcae-c13         1
+
+dr_equal            ctr_publish_query                    396
+dr_equal            ctr_publish_query/1                  198
+dr_equal            ctr_publish_query/2                  198
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      396
+dr_equal            ctr_publish_query_not_published/1    198
+dr_equal            ctr_publish_query_not_published/2    198
+dr_equal            ctr_publish_req                      396
+dr_equal            ctr_publish_req/1                    198
+dr_equal            ctr_publish_req/2                    198
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             396
+dr_equal            ctr_publish_req_redirect/1           198
+dr_equal            ctr_publish_req_redirect/2           198
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  396
+dr_equal            ctr_published_files/1                198
+dr_equal            ctr_published_files/2                198
+dr_equal            ctr_double_publish                   0
+
+drr_equal           ctr_publish_requests                 396
+drr_equal           ctr_publish_requests/1               198
+drr_equal           ctr_publish_requests/2               198
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                396
+drr_equal           ctr_publish_responses/1              198
+drr_equal           ctr_publish_responses/2              198
+
+drr_equal           dwl_volume                           396000000
+drr_equal           dwl_volume/1                         198000000
+drr_equal           dwl_volume/2                         198000000
+
+check_dfc_logs
+
+#### TEST COMPLETE ####
+
+store_logs          END
+
+print_result
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
index cf4dbdc..4c3fca0 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_FTPS_24h.sh
@@ -8,30 +8,41 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc2200"
-export BC_TC=""
-export NUM_FTPFILES="3500"
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
+export NUM_FTPFILES="4000"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
 # 24h MR sim execution time since first poll, should be reached withing 24h +1h margin
-mr_contain_str      exe_time_first_poll             1440: $((60*60*24+3600))
+mr_contain_str      exe_time_first_poll                  1440: $((60*60*24+3600))
 # stop event delivery
 mr_print            stop
 # wait for MR sim values to stabilize
@@ -46,43 +57,47 @@
 TARGET_EVENTS=$((TARGET_FILES/100))
 TARGET_VOLUME=$((TARGET_FILES*1000000))
 
-#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased.
-MAX_FILES=$((NUM_FTPFILE*NUM_PNFS))
+#Maximum number of configured FTP files, if DFC reach this then the NUM_FTPSFILES need to be increased.
+MAX_FILES=$((NUM_FTPFILES*NUM_PNFS))
 
 #Wait remaining time upto 15 min for DFC to download all consumed events
 sleep_wait          870
 
 #At least the requiment number of file shall be published
-dr_greater          ctr_published_files             $TARGET_REQUIRMENT_FILES
+dr_greater          ctr_published_files                  $TARGET_REQUIRMENT_FILES
 
 #If greater then MAX_FILES then more FTP files need to be configured
-mr_less             ctr_ctr_unique_files            MAX_FILES
+mr_less             ctr_unique_files                     $MAX_FILES
 
 
 #Test that all files from polled events has been downloaded etc
 
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_published_files                  $TARGET_FILES
 
-mr_equal            ctr_events                      $TARGET_EVENTS
+mr_equal            ctr_events                           $TARGET_EVENTS
 
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               $TARGET_FILES
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published $TARGET_FILES
-dr_equal            ctr_publish_req                 $TARGET_FILES
-dr_equal            ctr_publish_req_redirect        $TARGET_FILES
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_publish_query                    $TARGET_FILES
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      $TARGET_FILES
+dr_equal            ctr_publish_req                      $TARGET_FILES
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             $TARGET_FILES
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  $TARGET_FILES
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            $TARGET_FILES
-drr_equal           ctr_publish_responses           $TARGET_FILES
+drr_equal           ctr_publish_requests                 $TARGET_FILES
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                $TARGET_FILES
 
-drr_equal           dwl_volume                      $TARGET_VOLUME
+drr_equal           dwl_volume                           $TARGET_VOLUME
 
 print_all
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
index a50fc16..feae9d5 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/MaxFiles1MB_SFTP_24h.sh
@@ -8,30 +8,42 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc1200"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1500"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
 # 24h MR sim execution time since first poll, should be reached withing 24h +1h margion
-mr_contain_str      exe_time_first_poll             1440: $((60*60*24+3600))
+mr_contain_str      exe_time_first_poll                  1440: $((60*60*24+3600))
+
 # stop event delivery
 mr_print            stop
 # wait for MR sim values to stabilize
@@ -46,43 +58,47 @@
 TARGET_EVENTS=$((TARGET_FILES/100))
 TARGET_VOLUME=$((TARGET_FILES*1000000))
 
-#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased.
-MAX_FILES=$((NUM_FTPFILE*NUM_PNFS))
+#Maximum number of configured FTP files, if DFC reach this then the NUM_FTPSFILES need to be increased.
+MAX_FILES=$((NUM_FTPFILES*NUM_PNFS))
 
 #Wait remaining time upto 15 min for DFC to download all consumed events
 sleep_wait          870
 
 #At least the requiment number of file shall be published
-dr_greater          ctr_published_files             $TARGET_REQUIRMENT_FILES
+dr_greater          ctr_published_files                  $TARGET_REQUIRMENT_FILES
 
 #If greater then MAX_FILES then more FTP files need to be configured
-mr_less             ctr_ctr_unique_files            MAX_FILES
+mr_less             ctr_unique_files                     $MAX_FILES
 
 
 #Test that all files from polled events has been downloaded etc
 
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_published_files                  $TARGET_FILES
 
-mr_equal            ctr_events                      $TARGET_EVENTS
+mr_equal            ctr_events                           $TARGET_EVENTS
 
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               $TARGET_FILES
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published $TARGET_FILES
-dr_equal            ctr_publish_req                 $TARGET_FILES
-dr_equal            ctr_publish_req_redirect        $TARGET_FILES
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_publish_query                    $TARGET_FILES
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      $TARGET_FILES
+dr_equal            ctr_publish_req                      $TARGET_FILES
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             $TARGET_FILES
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  $TARGET_FILES
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            $TARGET_FILES
-drr_equal           ctr_publish_responses           $TARGET_FILES
+drr_equal           ctr_publish_requests                 $TARGET_FILES
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                $TARGET_FILES
 
-drr_equal           dwl_volume                      $TARGET_VOLUME
+drr_equal           dwl_volume                           $TARGET_VOLUME
 
 print_all
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/README.md b/test/mocks/datafilecollector-testharness/auto-test/README.md
index 09001d0..a94076f 100644
--- a/test/mocks/datafilecollector-testharness/auto-test/README.md
+++ b/test/mocks/datafilecollector-testharness/auto-test/README.md
@@ -8,12 +8,14 @@
 The integration repo is needed as well as docker.
 If needed setup the ``DFC_LOCAL_IMAGE`` and ``DFC_REMOTE_IMAGE`` env var in test_env.sh to point to the dfc images (local registry image or next registry image) without the image tag.
 The predefined images should be ok for current usage:
+
 ``DFC_REMOTE_IMAGE=nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
+
 ``DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server``
 
 If the test cases/suites in this dir are not executed in the auto-test dir in the integration repo, then the ``SIM_GROUP`` env var need to point to the ``simulator-group`` dir. 
-See instructions in the test_env.sh. The ../common dir is needed as well in the case. That is, it is possible to have auto-test dir (and the common dir) somewhere 
-than in the integration repo but the simulator group dir need to be available.
+See instructions in the test_env.sh. The ../common dir is needed as well in the case. That is, it is possible to have auto-test dir (and the common dir) somewhere else
+than in the integration repo but the simulator-group and common dir need to be available.
 
 ##Test cases and test suites naming.
 Each file filename should have the format ``<tc-id>.sh`` for test cases and ``<ts-id>.sh`` for test suite. The tc-id and ts-id are the
@@ -24,15 +26,18 @@
 ##Logs from containers and test cases
 All logs from each test cases are stored under ``logs/<tc-id>/``.
 The logs include the application.log and the container log from dfc, the container logs from each simulator and the test case log (same as the screen output).
-
+In the test cases the logs are stored with a prefix so the logs can be stored at different steps during the test. All test cases contains an entry to save all logs with prefix 'END' at the end of each test case.
 ##Execution##
-Test cases and test suites are executed by: ``./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app``</br>
+Test cases and test suites are executed by: `` [sudo] ./<tc-id or ts-id>.sh local | remote | remote-remove | manual-container | manual-app``</br>
 **local** - uses the dfc image pointed out by ``DFC_LOCAL_IMAGE`` in the test_env, should be the dfc image built locally in your docker registry.</br>
 **remote** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry.</br>
 **remote-remove** - uses the dfc image pointed out by ``DFC_REMOTE_IMAGE`` in the test_env, should be the dfc nexus image in your docker registry. Removes the nexus image and pull from remote registry.</br>
 **manual-container** - uses dfc in a manually started container. The script will prompt you for manual starting and stopping of the container.</br>
 **manual-app** - uses dfc app started as an external process (from eclipse etc). The script will prompt you for manual start and stop of the process.</br>
 
+When running dfc manually, either as a container or an app the ports need to be set to map the instance id of the dfc. Most test cases start dfc with index 0, then the test case expects the ports of dfc to be mapped to the standar port number. 
+However, if a higher instance id than 0 is used then the mapped ports need add that index to the port number (eg, if index 2 is used the dfc need to map port 8102 and 8435 instead of the standard 8100 and 8433).
+
 ##Test case file##
 A test case file contains a number of steps to verify a certain functionality.
 A description of the test case should be given to the ``TC_ONELINE_DESCR`` var. The description will be printed in the test result.
@@ -72,16 +77,22 @@
 Print the env variables needed for the simulators and their setup
 
 **clean_containers**</br>
-Stop and remove all containers including dfc app and simulators
+Stop and remove all containers including dfc apps and simulators
 
-**start_simulators**
+**start_simulators**</br>
 Start all simulators in the simulator group
 
-**start_dfc**</br>
-Start the dfc application
+**start_dfc <dfc-instance-id>**</br>
+Start the dfc application. The arg shall be an integer from 0 to 5 reprenting the dfc instance to start. DFC app will get a name like 'dfc_app0' to 'dfc_app4'.
 
-**kill_dfc**</br>
-Stop and remove the dfc app container
+**kill_dfc  <dfc-instance-id> **</br>
+Stop and remove the dfc app container with the instance id.
+
+**consul_config_app <dfc-instance-id> <json-file-path>**</br>
+Configure consul with json file with app config for a dfc instance using the dfc instance id and the json file.
+
+**consul_config_dmaap <dfc-instance-id> <json-file-path>**</br>
+Configure consul with json file with dmaap config for a dfc instance using the dfc instance id and the json file.
 
 **kill_dr**</br>
 Stop and remove the DR simulator container
@@ -92,11 +103,23 @@
 **kill_mr**</br>
 Stop and remove the MR simulator container
 
-**kill_sftp**</br>
-Stop and remove the SFTP container
+**kill_sftp <sftp-instance-id>**</br>
+Stop and remove a SFTP container with the supplied instance id (0-5).
 
-**kill_ftps**</br>
-Stop and remove the FTPS container
+**stop_sftp <sftp-instance-id>**</br>
+Stop a SFTP container with the supplied instance id (0-5).
+
+**start_sftp <sftp-instance-id>**</br>
+Start a previously stopped SFTP container with the supplied instance id (0-5).
+
+**kill_ftps <ftps-instance-id>**</br>
+Stop and remove a FTPS container with the supplied instance id (0-5).
+
+**stop_ftps <ftps-instance-id>**</br>
+Stop a FTPS container with the supplied instance id (0-5).
+
+**start_ftps <ftps-instance-id>**</br>
+Start a previously stopped FTPS container with the supplied instance id (0-5).
 
 **mr_print <vaiable-name>**</br>
 Print a variable value from the MR simulator.
@@ -107,6 +130,9 @@
 **drr_print <vaiable-name>**</br>
 Print a variable value from the DR redir simulator.
 
+**dfc_print <dfc-instance-id> <vaiable-name>**</br>
+Print a variable value from an dfc instance with the supplied instance id (0-5).
+
 **mr_read <vaiable-name>**</br>
 Read a variable value from MR sim and send to stdout
 
@@ -178,6 +204,14 @@
 before setting pass or fail depending on if the variable value is less than the target
 value or not.
 
+**dr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+Tests if a variable value in the DR simulator contains a substring target and and optional timeout.
+</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+the target substring or not.
+</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>``  - This test waits up to the timeout seconds
+before setting pass or fail depending on if the variable value contains the target
+substring or not.
+
 **drr_equal <variable-name> <target-value> [<timeout-in-sec>]**</br>
 Tests if a variable value in the DR Redir simulator is equal to a target value and and optional timeout.
 </br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable is
@@ -202,6 +236,14 @@
 before setting pass or fail depending on if the variable value is less than the target
 value or not.
 
+**drr_contain_str <variable-name> <target-value> [<timeout-in-sec>]**</br>
+Tests if a variable value in the DR Redir simulator contains a substring target and and optional timeout.
+</br>Arg: ``<variable-name> <target-value>`` - This test set pass or fail depending on if the variable contains
+the target substring or not.
+</br>Arg: ``<variable-name> <target-value> <timeout-in-sec>``  - This test waits up to the timeout seconds
+before setting pass or fail depending on if the variable value contains the target
+substring or not.
+
 **dfc_contain_str <variable-name> <substring-in-quotes>**</br>
 Test is a variable in the DFC contains a substring.
 
@@ -225,7 +267,7 @@
 
 ##Test suite files##
 A test suite file contains one or more test cases to run in sequence.
-A description of the test case should be given to the TS_ONELINE_DESCR var. The description will be printed in the test result.
+A description of the test case should be given to the ``TS_ONELINE_DESCR`` var. The description will be printed in the test result.
 
 The empty template for a test suite files looks like this:
 
@@ -235,7 +277,7 @@
 ```
 #!/bin/bash
 
-TS_ONELINE_DESCR="<test-suite-description"
+TS_ONELINE_DESCR="<test-suite-description>"
 
 . ../common/testsuite_common.sh
 
@@ -257,7 +299,7 @@
 
 The ../common/testsuite_common.sh contains all functions needed for a test suite file.
 
-The following is a list of the available functions in a test case file. Please see a defined test suite for examples.
+The following is a list of the available functions in a test case file. Please see a existing test suite for examples.
 
 **suite_setup**</br>
 Sets up the test suite and print out a heading.
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
index 2b76f16..f221ac5 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_FTPS_72h.sh
@@ -8,81 +8,100 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc2300"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1000"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="FTPS"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-# 24h MR sim execution time since first poll, should be reached withing 72h +1h margion
-mr_contain_str      exe_time_first_poll             4320: $((60*60*24*3+3600))
+# 72h MR sim execution time since first poll, should be reached withing 72h + 1h margin
+mr_contain_str      exe_time_first_poll                  4320: $((60*60*24*3+3600))
+
+# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the
+# remaining 15 min polls up to 72h. This is the minimum number of published files for the test
+TARGET_REQUIRMENT_FILES=$((70000+700*95+700*96+700*96))
+
+#Make sure target is reached within 72h + a reasonable tolerance
+mr_greater         ctr_unique_files                      $((TARGET_REQUIRMENT_FILES-1)) 1800
+
 # stop event delivery
 mr_print            stop
 # wait for MR sim values to stabilize
 sleep_wait          30
 
-# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the
-# remaining polls up to 24h. This is the minimum number of published files for the test
-TARGET_REQUIRMENT_FILE=$((70000+700*95+700*96+700*96))
-
 #Calculate targets based on the number of of unique files delivered from MR sim
 TARGET_FILES=$(mr_read ctr_unique_files)
-TARGET_EVENTS=$((TARGET_FILES/100))
+TARGET_EVENTS=$((TARGET_FILES-70000+700))  #First event from a PNF is 100 new files, remaining events contains 1 new file
 TARGET_VOLUME=$((TARGET_FILES*1000000))
 
-#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased.
+#Maximum number of configured FTP files, if DFC reach this then the NUM_FTPSFILES need to be increased.
 MAX_FILES=$((NUM_FTPFILE*NUM_PNFS))
 
 #Wait remaining time upto 15 min for DFC to download all consumed events
 sleep_wait          870
 
 #At least the requiment number of file shall be published
-dr_greater          ctr_published_files             $TARGET_REQUIRMENT_FILE
+dr_greater          ctr_published_files                  $((TARGET_REQUIRMENT_FILES-1))
 
-#If greate then MAX_FILES then more FTP files need to be configured
-mr_less             ctr_ctr_unique_files            MAX_FILES
+#If greater then MAX_FILES then more FTP files need to be configured
+mr_less             ctr_unique_files                     $MAX_FILES
 
 
 #Test that all files from polled events has been downloaded etc
 
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_published_files                  $TARGET_FILES
 
-mr_equal            ctr_events                      $TARGET_EVENTS
+mr_equal            ctr_events                           $TARGET_EVENTS
 
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               $TARGET_FILES
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published $TARGET_FILES
-dr_equal            ctr_publish_req                 $TARGET_FILES
-dr_equal            ctr_publish_req_redirect        $TARGET_FILES
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_publish_query                    $TARGET_FILES
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      $TARGET_FILES
+dr_equal            ctr_publish_req                      $TARGET_FILES
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             $TARGET_FILES
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  $TARGET_FILES
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            $TARGET_FILES
-drr_equal           ctr_publish_responses           $TARGET_FILES
+drr_equal           ctr_publish_requests                 $TARGET_FILES
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                $TARGET_FILES
 
-drr_equal           dwl_volume                      $TARGET_VOLUME
+drr_equal           dwl_volume                           $TARGET_VOLUME
 
 print_all
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
index 072135c..bc0b2ef 100755
--- a/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
+++ b/test/mocks/datafilecollector-testharness/auto-test/Stability1MB_SFTP_72h.sh
@@ -8,81 +8,100 @@
 
 clean_containers
 
-export DR_TC="--tc normal"
-export DR_REDIR_TC="--tc normal"
 export MR_TC="--tc1300"
-export BC_TC=""
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"
+
+export DR_TC="--tc normal"
+export DR_FEEDS="2:A"
+
+export DR_REDIR_TC="--tc normal"
+export DR_REDIR_FEEDS="2:A"
+
 export NUM_FTPFILES="1000"
 export NUM_PNFS="700"
 export FILE_SIZE="1MB"
 export FTP_TYPE="SFTP"
+export FTP_FILE_PREFIXES="A"
+export NUM_FTP_SERVERS=5
 
 log_sim_settings
 
 start_simulators
 
-mr_equal            ctr_requests                    0 60
-dr_equal            ctr_published_files             0 60
+consul_config_app   0                                    "../simulator-group/consul/c12_feed2_PM.json"
+consul_config_dmaap 0                                    "../simulator-group/consul/dmaap_feed2.json"
+
+mr_equal            ctr_requests                         0 60
+dr_equal            ctr_published_files                  0 60
 
 mr_print            tc_info
 dr_print            tc_info
 drr_print           tc_info
 
-start_dfc
+start_dfc           0
 
-# 24h MR sim execution time since first poll, should be reached withing 72h +1h margion
-mr_contain_str      exe_time_first_poll             4320: $((60*60*24*3+3600))
+# 72h MR sim execution time since first poll, should be reached withing 72h + 1h margin
+mr_contain_str      exe_time_first_poll                  4320: $((60*60*24*3+3600))
+
+# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the
+# remaining 15 min polls up to 72h. This is the minimum number of published files for the test
+TARGET_REQUIRMENT_FILES=$((70000+700*95+700*96+700*96))
+
+#Make sure target is reached within 72h + a reasonable tolerance
+mr_greater         ctr_unique_files                      $((TARGET_REQUIRMENT_FILES-1)) 1800
+
 # stop event delivery
 mr_print            stop
 # wait for MR sim values to stabilize
 sleep_wait          30
 
-# Requirement number of files, 100 new files in first event for each PNF, then 1 new file per PNF in the
-# remaining polls up to 24h. This is the minimum number of published files for the test
-TARGET_REQUIRMENT_FILE=$((70000+700*95+700*96+700*96))
-
 #Calculate targets based on the number of of unique files delivered from MR sim
 TARGET_FILES=$(mr_read ctr_unique_files)
-TARGET_EVENTS=$((TARGET_FILES/100))
+TARGET_EVENTS=$((TARGET_FILES-70000+700))    #First event from a PNF is 100 new files, remaining events contains 1 new file
 TARGET_VOLUME=$((TARGET_FILES*1000000))
 
-#Maximum number of configured FTP files, if DFC download more than this then the NUM_FTPSFILES need to be increased.
+#Maximum number of configured FTP files, if DFC reach this then the NUM_FTPSFILES need to be increased.
 MAX_FILES=$((NUM_FTPFILE*NUM_PNFS))
 
 #Wait remaining time upto 15 min for DFC to download all consumed events
 sleep_wait          870
 
 #At least the requiment number of file shall be published
-dr_greater          ctr_published_files             $TARGET_REQUIRMENT_FILE
+dr_greater          ctr_published_files                  $((TARGET_REQUIRMENT_FILES-1))
 
-#If greate then MAX_FILES then more FTP files need to be configured
-mr_less             ctr_ctr_unique_files            MAX_FILES
+#If greater then MAX_FILES then more FTP files need to be configured
+mr_less             ctr_unique_files                     $MAX_FILES
 
 
 #Test that all files from polled events has been downloaded etc
 
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_published_files                  $TARGET_FILES
 
-mr_equal            ctr_events                      $TARGET_EVENTS
+mr_equal            ctr_events                           $TARGET_EVENTS
 
-mr_equal            ctr_unique_PNFs                 700
+mr_equal            ctr_unique_PNFs                      700
 
-dr_equal            ctr_publish_query               $TARGET_FILES
-dr_equal            ctr_publish_query_published     0
-dr_equal            ctr_publish_query_not_published $TARGET_FILES
-dr_equal            ctr_publish_req                 $TARGET_FILES
-dr_equal            ctr_publish_req_redirect        $TARGET_FILES
-dr_equal            ctr_publish_req_published       0
-dr_equal            ctr_published_files             $TARGET_FILES
+dr_equal            ctr_publish_query                    $TARGET_FILES
+dr_equal            ctr_publish_query_bad_file_prefix    0
+dr_equal            ctr_publish_query_published          0
+dr_equal            ctr_publish_query_not_published      $TARGET_FILES
+dr_equal            ctr_publish_req                      $TARGET_FILES
+dr_equal            ctr_publish_req_bad_file_prefix      0
+dr_equal            ctr_publish_req_redirect             $TARGET_FILES
+dr_equal            ctr_publish_req_published            0
+dr_equal            ctr_published_files                  $TARGET_FILES
+dr_equal            ctr_double_publish                   0
 
-drr_equal           ctr_publish_requests            $TARGET_FILES
-drr_equal           ctr_publish_responses           $TARGET_FILES
+drr_equal           ctr_publish_requests                 $TARGET_FILES
+drr_equal           ctr_publish_requests_bad_file_prefix 0
+drr_equal           ctr_publish_responses                $TARGET_FILES
 
-drr_equal           dwl_volume                      $TARGET_VOLUME
+drr_equal           dwl_volume                           $TARGET_VOLUME
 
 print_all
 
-check_dfc_log
+check_dfc_logs
 
 #### TEST COMPLETE ####
 
diff --git a/test/mocks/datafilecollector-testharness/common/test_env.sh b/test/mocks/datafilecollector-testharness/common/test_env.sh
index 794b354..f4d443b 100644
--- a/test/mocks/datafilecollector-testharness/common/test_env.sh
+++ b/test/mocks/datafilecollector-testharness/common/test_env.sh
@@ -16,3 +16,34 @@
 export DFC_LOCAL_IMAGE=onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server
 
 
+# Common env var for auto-test.
+
+DFC_PORT=8100
+DFC_PORT_SECURE=8433
+DFC_LOGPATH="/var/log/ONAP/application.log"
+DOCKER_SIM_NWNAME="dfcnet"
+CONSUL_HOST="consul-server"
+CONSUL_PORT=8500
+CONFIG_BINDING_SERVICE="config-binding-service"
+MR_PORT=2222
+DR_PORT=3906
+DR_PORT_SECURE=3907
+DRR_PORT=3908
+DRR_PORT_SECURE=3909
+DFC_APP_BASE="dfc_app"
+DFC_MAX_NUM=5
+DFC_MAX_IDX=$(($DFC_MAX_NUM - 1))
+SFTP_BASE="dfc_sftp-server"
+FTPS_BASE="dfc_ftpes-server-vsftpd"
+FTP_MAX_NUM=5
+FTP_MAX_IDX=$(($FTP_MAX_NUM - 1))
+SFTP_SIMS_CONTAINER="sftp-server0:22,sftp-server1:22,sftp-server2:22,sftp-server3:22,sftp-server4:22"
+FTPS_SIMS_CONTAINER="ftpes-server-vsftpd0:21,ftpes-server-vsftpd1:21,ftpes-server-vsftpd2:21,ftpes-server-vsftpd3:21,ftpes-server-vsftpd4:21"
+SFTP_SIMS_LOCALHOST="localhost:1022,localhost:1023,localhost:1024,localhost:1025,localhost:1026"
+FTPS_SIMS_LOCALHOST="localhost:1032,localhost:1033,localhost:1034,localhost:1035,localhost:1036"
+
+export SFTP_SIMS=$SFTP_SIMS_CONTAINER   #This env will be set to SFTP_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+export FTPS_SIMS=$FTPS_SIMS_CONTAINER   #This env will be set to FTPS_SIMS_LOCALHOST if auto test is executed with 'manual-app'
+
+export DR_REDIR_SIM="drsim_redir"       #This env will be set to 'localhost' if auto test is executed with 'manual-app'
+
diff --git a/test/mocks/datafilecollector-testharness/common/testcase_common.sh b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
index 1e0118e..b0a14aa 100755
--- a/test/mocks/datafilecollector-testharness/common/testcase_common.sh
+++ b/test/mocks/datafilecollector-testharness/common/testcase_common.sh
@@ -2,15 +2,22 @@
 
 . ../common/test_env.sh
 
+
+
 echo "Test case started as: ${BASH_SOURCE[$i+1]} "$1 $2
 
 # Script containing all functions needed for auto testing of test cases
 # Arg: local [<image-tag>] ]| remote [<image-tag>] ]| remote-remove [<image-tag>]] | manual-container | manual-app
 
+STARTED_DFCS="" #DFC app names added to this var to keep track of started container in the script
 START_ARG=$1
 IMAGE_TAG="latest"
 
 if [ $# -gt 1 ]; then
+	if [[ "$2" =~ ^1.1.* ]]; then
+		echo "This version of auto-test does not support DFC image version of 1.1.X"
+		exit 1
+	fi
 	IMAGE_TAG=$2
 fi
 
@@ -30,9 +37,9 @@
 	fi
 	DFC_IMAGE=$DFC_REMOTE_IMAGE":"$IMAGE_TAG
 elif [ $1 == "manual-container" ] && [ $# -eq 1 ]; then
-	echo "DFC is expected to be started manually as a container with name 'dfc_app'"
+	echo "DFC is expected to be started manually, when prompted,  as a container with name 'dfc_app'<index> with and index in the range from 0 to '${DFC_MAX_IDX}'"
 elif [ $1 == "manual-app" ] && [ $# -eq 1 ]; then
-	echo "DFC is expected to be started manually as a java application"
+	echo "DFC is expected to be started manually, when prompted, as a java application"
 else
 	echo "Expected arg: local [<image-tag>] ]| remote [<image-tag>] ]| remote-remove [<image-tag>]] | manual-container | manual-app"
 	exit 1
@@ -104,7 +111,7 @@
 
 if [ $1 !=  "manual-container" ] && [ $1 !=  "manual-app" ]; then
 	echo -e "DFC image tag set to: \033[1m" $IMAGE_TAG"\033[0m"
-	echo "Configured image for DFC app (${1}): "$DFC_IMAGE 
+	echo "Configured image for DFC app(s) (${1}): "$DFC_IMAGE
 	tmp_im=$(docker images ${DFC_IMAGE} | grep -v REPOSITORY)
 
 	if [ $1 == "local" ]; then
@@ -118,9 +125,9 @@
 	elif [ $1 == "remote" ] || [ $1 == "remote-remove" ]; then
 
 		if [ $1 == "remote-remove" ]; then
-			echo "Attempt to stop dfc_app container if running"
-			docker stop $(docker ps -q --filter name=dfc_app) &> /dev/null
-			docker rm $(docker ps -q --filter name=dfc_app) &> /dev/null
+			echo "Attempt to stop dfc_app container(s) if running"
+			docker stop $(docker ps -q --filter name=${DFC_APP_BASE}]) &> /dev/null
+			docker rm $(docker ps -q --filter name=${DFC_APP_BASE}) &> /dev/null
 			docker rmi $DFC_IMAGE &> /dev/null
 			tmp_im=""
 		fi
@@ -133,25 +140,26 @@
 				exit 1
 			fi
 			echo -e "DFC image: \033[1m"$tmp_im"\033[0m"
-		else 
+		else
 			echo -e "DFC image: \033[1m"$tmp_im"\033[0m"
-			echo "!! If the dfc image seem outdated, consider removing it from your docker registry and run the test again."
+			echo "!! If the dfc image seem outdated, consider removing it from your docker registry and run the test again. Or run the script with 'remote-remove'"
 		fi
 	fi
 fi
 
+
+
 echo ""
 
-echo "Building images for the simulators if needed, MR, DR and DR Redir simulators"
+echo "Building images for the simulators if needed, MR, DR, DR Redir and FTPS simulators"
 curdir=$PWD
 cd $SIM_GROUP
 cd ../dr-sim
 docker build -t drsim_common:latest . &> /dev/null
 cd ../mr-sim
 docker build -t mrsim:latest . &> /dev/null
-cd ../simulator-group
-cp -r ../ftps-sftp-server/configuration .
-cp -r ../ftps-sftp-server/tls .
+cd ../ftps-sftp-server
+docker build -t ftps_vsftpd:latest -f Dockerfile-ftps . &> /dev/null
 cd $curdir
 
 echo ""
@@ -161,19 +169,35 @@
 echo "DR simulator:       " $(docker images | grep drsim_common)
 echo "DR redir simulator: " $(docker images | grep drsim_common)
 echo "SFTP:               " $(docker images | grep atmoz/sftp)
-echo "FTPS:               " $(docker images | grep panubo/vsftpd)
+echo "FTPS:               " $(docker images | grep ftps_vsftpd)
+echo "Consul:             " $(docker images | grep consul)
+echo "CBS:                " $(docker images | grep platform.configbinding.app)
 echo ""
 
+#Configure MR sim to use correct host:port for running dfc as an app or as a container
+#Configure DR sim with correct address for DR redirect simulator
+if [ $START_ARG == "manual-app" ]; then
+	export SFTP_SIMS=$SFTP_SIMS_LOCALHOST
+	export FTPS_SIMS=$FTPS_SIMS_LOCALHOST
+	export DR_REDIR_SIM="localhost"
+fi
+#else
+#	export SFTP_SIMS=$SFTP_SIMS_CONTAINER
+#	export FTPS_SIMS=$FTPS_SIMS_CONTAINER
+#	export DR_REDIR_SIM="drsim_redir"
+#fi
+
 echo "-----------------------------------      Test case steps      -----------------------------------"
 
 # Print error info for the call in the parent script (test case). Arg: <error-message-to-print>
 # Not to be called from test script.
-print_err() {
+__print_err() {
     echo ${FUNCNAME[1]} " "$1" " ${BASH_SOURCE[$i+2]} " line" ${BASH_LINENO[$i+1]}
 }
-# Execute curl using the host and variable. Arg: <host> <variable-name>
+# Execute curl using the host and variable. Arg: <host and variable-name>  [ <flag-to-strip-new-line> ]
+#<flag-to-strip-new-line> may contain any string, it is just a flag
 # Returns the variable value (if success) and return code 0 or an error message and return code 1
-do_curl() {
+__do_curl() {
 	res=$(curl -sw "%{http_code}" $1)
 	http_code="${res:${#res}-3}"
 	if [ ${#res} -eq 3 ]; then
@@ -184,19 +208,24 @@
 			echo "<not found, resp:${http_code}>"
 			return 1
 		fi
-  		echo "${res:0:${#res}-3}"
+		if [ $# -eq 2 ]; then
+  			echo "${res:0:${#res}-3}" | xargs
+		else
+  			echo "${res:0:${#res}-3}"
+		fi
+
 		return 0
 	fi
 }
 
 # Test a simulator variable value towards  target value using an condition operator with an optional timeout.
-# Arg: <simulator-name> <host> <variable-name> <condition-operator> <target-value>  - This test is done 
+# Arg: <simulator-name> <host> <variable-name> <condition-operator> <target-value>  - This test is done
 # immediately and sets pass or fail depending on the result of comparing variable and target using the operator.
 # Arg: <simulator-name> <host> <variable-name> <condition-operator> <target-value> <timeout>  - This test waits up to the timeout
 # before setting pass or fail depending on the result of comparing variable and target using the operator.
 # Not to be called from test script.
 
-var_test() {
+__var_test() {
 	if [ $# -eq 6 ]; then
 		echo -e "---- ${1} sim test criteria: \033[1m ${3} \033[0m ${4} ${5} within ${6} seconds ----"
 		((RES_TEST++))
@@ -204,13 +233,17 @@
 		ctr=0
 		for (( ; ; ))
 		do
-			result="$(do_curl $2$3)"
+			result="$(__do_curl $2$3)"
 			retcode=$?
 			result=${result//[[:blank:]]/} #Strip blanks
 			duration=$((SECONDS-start))
 			if [ $((ctr%30)) -eq 0 ]; then
-				echo -ne "  Result=${result} after ${duration} seconds, DFC heartbeat="$(do_curl http://127.0.0.1:8100/heartbeat)
-				echo ""
+				echo "  Result=${result} after ${duration} seconds"
+				for (( i=0; i<=$DFC_MAX_IDX; i++ )); do
+					if [[ $STARTED_DFCS =~ "_"$DFC_APP_BASE$i"_" ]]; then
+						echo "    HB ${DFC_APP_BASE}${i}: $(__do_curl http://127.0.0.1:$(($DFC_PORT+$i))/status strip)"
+					fi
+				done
 			else
 				echo -ne "  Result=${result} after ${duration} seconds\033[0K\r"
 			fi
@@ -253,7 +286,7 @@
 	elif [ $# -eq 5 ]; then
 		echo -e "---- ${1} sim test criteria: \033[1m ${3} \033[0m ${4} ${5} ----"
 		((RES_TEST++))
-		result="$(do_curl $2$3)"
+		result="$(__do_curl $2$3)"
 		retcode=$?
 		result=${result//[[:blank:]]/}  #Strip blanks
 		if [ $retcode -ne 0 ]; then
@@ -275,15 +308,15 @@
 			((RES_FAIL++))
 			echo -e "----  \033[31m\033[1mFAIL\033[0m - Target ${3} ${4} ${5} not reached, result = ${result} ----"
 		fi
-	else 
-		echo "Wrong args to var_test, needs five or six args: <simulator-name> <host> <variable-name> <condition-operator> <target-value> [ <timeout> ]"
+	else
+		echo "Wrong args to __var_test, needs five or six args: <simulator-name> <host> <variable-name> <condition-operator> <target-value> [ <timeout> ]"
 		exit 1
 	fi
 }
 # Stops a named container
-docker_stop() {
+__docker_stop() {
 	if [ $# -ne 1 ]; then
-		echo "docker_stop need 1 arg <container-name>"
+		echo "__docker_stop need 1 arg <container-name>"
 		exit 1
 	fi
 	tmp=$(docker stop $1  2>/dev/null)
@@ -294,10 +327,24 @@
 	fi
 }
 
-# Removes a named container
-docker_rm() {
+# Starts a named container (that has previously been stopped)
+__docker_start() {
 	if [ $# -ne 1 ]; then
-		echo "docker_rm need 1 arg <container-name>"
+		echo "__docker_start need 1 arg <container-name>"
+		exit 1
+	fi
+	tmp=$(docker start $1  2>/dev/null)
+	if [ -z $tmp ] || [ $tmp != $1 ]; then
+		echo " ${1} container not started or not existing"
+	else
+		echo " ${1} container started"
+	fi
+}
+
+# Removes a named container
+__docker_rm() {
+	if [ $# -ne 1 ]; then
+		echo "__docker_rm need 1 arg <container-name>"
 		exit 1
 	fi
 	tmp=$(docker rm $1  2>/dev/null)
@@ -308,37 +355,78 @@
 	fi
 }
 
-start_dfc_image() {
-	echo "Starting DFC"
-	# Port mappning not needed since dfc is running in host mode
-	docker run -d --network="host" --name dfc_app $DFC_IMAGE > /dev/null
+__start_dfc_image() {
+
+	if [ $# != 2 ]; then
+    	__print_err "need tow args, <dfc-instance-name> 0.."$$DFC_MAX_IDX
+		exit 1
+	fi
+
+	if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
+		__print_err "need two args, <dfc-instance-name> 0.."$DFC_MAX_IDX
+		exit 1
+	fi
+	appname=$1
+	localport=$(($DFC_PORT + $2))
+	localport_secure=$(($DFC_PORT_SECURE + $2))
+
+	echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
+
+	docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
+
+	echo "Starting DFC: " $appname " with ports mapped to " $localport " and " $localport_secure " in docker network "$DOCKER_SIM_NWNAME
+
+	docker run -d -p $localport":8100" -p $localport_secure":8433" --network=$DOCKER_SIM_NWNAME -e CONSUL_HOST=$CONSUL_HOST -e CONSUL_PORT=$CONSUL_PORT -e CONFIG_BINDING_SERVICE=$CONFIG_BINDING_SERVICE -e HOSTNAME=$appname --name $appname $DFC_IMAGE > /dev/null
+
 	dfc_started=false
 	for i in {1..10}; do
-	if [ $(docker inspect --format '{{ .State.Running }}' dfc_app) ]
+	if [ $(docker inspect --format '{{ .State.Running }}' $appname) ]
  	then
-	 	echo " Image: $(docker inspect --format '{{ .Config.Image }}' dfc_app)"
-   		echo "DFC app Running"
+	 	echo " Image: $(docker inspect --format '{{ .Config.Image }}' ${appname})"
+   		echo "DFC container ${appname} running"
 		dfc_started=true
    		break
  	else
-   		echo sleep $i
+   		sleep $i
  	fi
 	done
 	if ! [ $dfc_started  ]; then
-		echo "DFC app could not be started"
+		echo "DFC container ${appname} could not be started"
 		exit 1
 	fi
+
+	dfc_hb=false
+	echo "Waiting for DFC ${appname} heartbeat..."
+	for i in {1..10}; do
+		result="$(__do_curl http://127.0.0.1:${localport}/heartbeat)"
+		if [ $? -eq 0 ]; then
+	   		echo "DFC ${appname} responds to heartbeat: " $result
+	   		dfc_hb=true
+	   		result="$(__do_curl http://127.0.0.1:${localport}/actuator/info)"
+	   		echo "DFC ${appname} image build info: " $result
+	   		break
+	 	else
+	   		sleep $i
+	 	fi
+	done
+
+	if ! [ $dfc_hb  ]; then
+		echo "DFC ${appname} did not respond to heartbeat"
+	fi
 }
 
-#WFunction for waiting for named container to be started manually.
-wait_for_container() {
+# Function for waiting for named container to be started manually.
+__wait_for_container() {
 	start=$SECONDS
-	if [ $# != 1 ]; then
-		echo "Need one arg: <container-name>"
+	if [ $# != 2 ]; then
+		echo "Need one arg: <container-name> <instance-id>"
 		exit 1
 	fi
+	http=$(($DFC_PORT+$2))
+	https=$((DFC_PORT_SECURE+$2))
+	echo "The container is expected to map its ports (8100/8433) to the following port visibile on the host: http port ${http} and https port ${https}"
 	echo "Waiting for container with name '${1}' to be started manually...."
-		
+
 	for (( ; ; ))
 	do
 		if [ $(docker inspect --format '{{ .State.Running }}' $1 2> /dev/null) ]; then
@@ -348,17 +436,22 @@
 	 		duration=$((SECONDS-start))
 			echo -ne "  Waited ${duration} seconds\033[0K\r"
    			sleep 1
- 		fi 
+ 		fi
 	done
+
+	echo "Connecting container "$1" to simulator network "$DOCKER_SIM_NWNAME
+	docker network connect $DOCKER_SIM_NWNAME $1
 }
 
-#WFunction for waiting for named container to be stopped manually. 
-wait_for_container_gone() {
+#WFunction for waiting for named container to be stopped manually.
+__wait_for_container_gone() {
 	start=$SECONDS
 	if [ $# != 1 ]; then
 		echo "Need one arg: <container-name>"
 		exit 1
 	fi
+	echo "Disconnecting container "$1" from simulator network "$DOCKER_SIM_NWNAME
+	docker network disconnect $DOCKER_SIM_NWNAME $1
 	echo "Waiting for container with name '${1}' to be stopped manually...."
 
 	for (( ; ; ))
@@ -370,18 +463,24 @@
 		else
    			echo "Container stopped: "$1
    			break
- 		fi 
+ 		fi
 	done
 }
 
 #Function for waiting to dfc to be started manually
-wait_for_dfc() {
-	read -p "Press enter to continue when dfc has been manually started"
+__wait_for_dfc() {
+	http=$(($DFC_PORT+$2))
+	https=$((DFC_PORT_SECURE+$2))
+	echo "The app is expected to listen to http port ${http} and https port ${https}"
+	echo "The app shall use 'localhost' and '8500' for CONSUL_HOST and CONSUL_PORT."
+	echo "The app shale use 'config-binding-service-localhost' for CONFIG_BINDING_SERVICE"
+	echo "The app shall use ${1} for HOSTNAME."
+	read -p "Press enter to continue when app mapping to ${1} has been manually started"
 }
 
 #Function for waiting to dfc to be stopped manually
-wait_for_dfc_gone() {
-	read -p "Press enter to continue when dfc has been manually stopped"
+__wait_for_dfc_gone() {
+	read -p "Press enter to continue when when app mapping to ${1} has been manually stopped"
 }
 
 #############################################################
@@ -391,28 +490,40 @@
 # Print the env variables needed for the simulators and their setup
 log_sim_settings() {
 	echo "Simulator settings"
-	echo "DR_TC=        "$DR_TC
-	echo "DR_REDIR_TC=  "$DR_REDIR_TC
-	echo "MR_TC=        "$MR_TC
-	echo "BC_TC=        "$BC_TC
-	echo "NUM_FTPFILES= "$NUM_FTPFILES
-	echo "NUM_PNFS=     "$NUM_PNFS
-	echo "FILE_SIZE=    "$FILE_SIZE
-	echo "FTP_TYPE=     "$FTP_TYPE
+	echo "MR_TC=                 "$MR_TC
+	echo "MR_GROUPS=             "$MR_GROUPS
+    echo "MR_FILE_PREFIX_MAPPING="$MR_FILE_PREFIX_MAPPING
+	echo "DR_TC=                 "$DR_TC
+	echo "DR_FEEDS=              "$DR_FEEDS
+	echo "DR_REDIR_SIM=          "$DR_REDIR_SIM
+	echo "DR_REDIR_TC=           "$DR_REDIR_TC
+	echo "DR_REDIR_FEEDS=        "$DR_REDIR_FEEDS
+
+	echo "NUM_FTPFILES=          "$NUM_FTPFILES
+	echo "NUM_PNFS=              "$NUM_PNFS
+	echo "FILE_SIZE=             "$FILE_SIZE
+	echo "FTP_TYPE=              "$FTP_TYPE
+	echo "FTP_FILE_PREFIXES=     "$FTP_FILE_PREFIXES
+	echo "NUM_FTP_SERVERS=       "$NUM_FTP_SERVERS
+	echo "SFTP_SIMS=             "$SFTP_SIMS
+	echo "FTPS_SIMS=             "$FTPS_SIMS
 	echo ""
 }
 
 # Stop and remove all containers including dfc app and simulators
 clean_containers() {
-	echo "Stopping all containers, dfc app and simulators with name prefix 'dfc_'"
+	echo "Stopping all containers, dfc app(s) and simulators with name prefix 'dfc_'"
 	docker stop $(docker ps -q --filter name=dfc_) &> /dev/null
 	echo "Removing all containers, dfc app and simulators with name prefix 'dfc_'"
 	docker rm $(docker ps -a -q --filter name=dfc_) &> /dev/null
+	echo "Removing unused docker networks with substring 'dfc' in network name"
+	docker network rm $(docker network ls -q --filter name=dfc)
 	echo ""
 }
 
 # Start all simulators in the simulator group
 start_simulators() {
+
 	echo "Starting all simulators"
 	curdir=$PWD
 	cd $SIM_GROUP
@@ -424,119 +535,304 @@
 # Start the dfc application
 start_dfc() {
 
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <dfc-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $DFC_MAX_IDX ]; then
+		__print_err "arg should be 0.."$DFC_MAX_IDX
+		exit 1
+	fi 
+	appname=$DFC_APP_BASE$1
+	STARTED_DFCS=$STARTED_DFCS"_"$appname"_"
+
 	if [ $START_ARG == "local" ] || [ $START_ARG == "remote" ] ||  [ $START_ARG == "remote-remove" ]; then
-		start_dfc_image
+		__start_dfc_image $appname $1
 	elif [ $START_ARG == "manual-container" ]; then
-		wait_for_container dfc_app
+		__wait_for_container $appname $1
 	elif [ $START_ARG == "manual-app" ]; then
-		wait_for_dfc
+		__wait_for_dfc $appname $1
+	fi
+}
+
+# Configure consul with dfc config, args <app|dmaap> <dfc-instance-id> <json-file-path>
+# Not intended to be called directly by test scripts.
+__consul_config() {
+
+	if [ $# != 3 ]; then
+    	__print_err "need three args, <app|dmaap> <dfc-instance-id> <json-file-path>"
+		exit 1
+	fi
+
+	if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
+		__print_err "dfc-instance-id should be 0.."$DFC_MAX_IDX
+		exit 1
+	fi
+	if ! [ -f $3 ]; then
+		__print_err "json file does not extis: "$3
+		exit 1
+	fi
+
+	if [ $1 == "app" ]; then
+		appname=$DFC_APP_BASE$2
+	elif [ $1 == "dmaap" ]; then
+		appname=$DFC_APP_BASE$2":dmaap"
+	else
+		__print_err "config type should be 'app' or 'dmaap'"
+		exit 1
+	fi
+
+	echo "Configuring consul for " $appname " from " $3
+	curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$3 >/dev/null
+}
+
+# Configure consul with dfc app config, args <dfc-instance-id> <json-file-path>
+consul_config_app() {
+	if [ $START_ARG == "manual-app" ]; then
+		echo "Replacing 'mrsim' with 'localhost' in json app config for consul"
+		sed 's/mrsim/localhost/g' $2 > .tmp_app.json
+		__consul_config app $1 .tmp_app.json
+	else
+		__consul_config app $1 $2
+	fi
+}
+
+# Configure consul with dfc dmaap config, args <dfc-instance-id> <json-file-path>
+consul_config_dmaap() {
+	if [ $START_ARG == "manual-app" ]; then
+		echo "Replacing 'drsim' with 'localhost' in json dmaap config for consul"
+		sed 's/drsim/localhost/g' $2 > .tmp_dmaap.json
+		__consul_config dmaap $1 .tmp_dmaap.json
+	else
+		__consul_config dmaap $1 $2
 	fi
 }
 
 # Stop and remove the dfc app container
 kill_dfc() {
-	echo "Killing DFC"
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <dfc-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $DFC_MAX_IDX ]; then
+		__print_err "arg should be 0.."$DFC_MAX_IDX
+		exit 1
+	fi
+	appname=$DFC_APP_BASE$1
+
+	echo "Killing DFC, instance id: "$1
 
 	if [ $START_ARG == "local" ] || [ $START_ARG == "remote" ] ||  [ $START_ARG == "remote-remove" ]; then
-		docker_stop dfc_app
-		docker_rm dfc_app
+		__docker_stop $appname
+		__docker_rm $appname
 	elif [ $START_ARG == "manual-container" ]; then
-		wait_for_container_gone dfc_app
+		__wait_for_container_gone $appname
 	elif [ $START_ARG == "manual-app" ]; then
-		wait_for_dfc_gone
+		__wait_for_dfc_gone $appname
 	fi
 }
 
 # Stop and remove the DR simulator container
 kill_dr() {
 	echo "Killing DR sim"
-	docker_stop dfc_dr-sim
-	docker_rm dfc_dr-sim
+	__docker_stop dfc_dr-sim
+	__docker_rm dfc_dr-sim
 }
 
 # Stop and remove the DR redir simulator container
 kill_drr() {
 	echo "Killing DR redir sim"
-	docker_stop dfc_dr-redir-sim
-	docker_rm dfc_dr-redir-sim
+	__docker_stop dfc_dr-redir-sim
+	__docker_rm dfc_dr-redir-sim
 }
 
 # Stop and remove the MR simulator container
 kill_mr() {
 	echo "Killing MR sim"
-	docker_stop dfc_mr-sim
-	docker_rm dfc_mr-sim
+	__docker_stop dfc_mr-sim
+	__docker_rm dfc_mr-sim
 }
 
-# Stop and remove the SFTP container
+# Stop and remove the SFTP container, arg: <sftp-instance-id>
 kill_sftp() {
-	echo "Killing SFTP"
-	docker_stop dfc_sftp-server
-	docker_rm dfc_sftp-server
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <sftp-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$SFTP_BASE$1
+
+	echo "Killing SFTP, instance id: "$1
+
+	__docker_stop $appname
+	__docker_rm $appname
 }
 
-# Stop and remove the FTPS container
+# Stop SFTP container, arg: <sftp-instance-id>
+stop_sftp() {
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <sftp-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$SFTP_BASE$1
+
+	echo "Stopping SFTP, instance id: "$1
+
+	__docker_stop $appname
+}
+
+# Starts a stopped SFTP container, arg: <sftp-instance-id>
+start_sftp() {
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <sftp-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$SFTP_BASE$1
+
+	echo "Starting SFTP, instance id: "$1
+
+	__docker_start $appname
+}
+
+# Stop and remove the FTPS container, arg: <ftps-instance-id>
 kill_ftps() {
-	echo "Killing FTPS"
-	docker_stop dfc_ftpes-server-vsftpd
-	docker_rm dfc_ftpes-server-vsftpd
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <ftpS-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$FTPS_BASE$1
+
+	echo "Killing FTPS, instance id: "$1
+
+	__docker_stop $appname
+	__docker_rm $appname
+}
+
+# Stop FTPS container, arg: <ftps-instance-id>
+stop_ftps() {
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <ftps-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$FTPS_BASE$1
+
+	echo "Stopping FTPS, instance id: "$1
+
+	__docker_stop $appname
+}
+
+# Starts a stopped FTPS container, arg: <ftps-instance-id>
+start_ftps() {
+
+	if [ $# != 1 ]; then
+    	__print_err "need one arg, <ftps-instance-id>"
+		exit 1
+	fi
+
+	if [ $1 -lt 0 ] || [ $1 -gt $FTP_MAX_IDX ]; then
+		__print_err "arg should be 0.."$FTP_MAX_IDX
+		exit 1
+	fi
+	appname=$FTPS_BASE$1
+
+	echo "Starting FTPS, instance id: "$1
+
+	__docker_start $appname
 }
 
 # Print a variable value from the MR simulator. Arg: <variable-name>
 mr_print() {
 	if [ $# != 1 ]; then
-    	print_err "need one arg, <sim-param>"
+    	__print_err "need one arg, <sim-param>"
 		exit 1
 	fi
-	echo -e "---- MR sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:2222/$1)"
+	echo -e "---- MR sim, \033[1m $1 \033[0m: $(__do_curl http://127.0.0.1:$MR_PORT/$1)"
 }
 
 # Print a variable value from the DR simulator. Arg: <variable-name>
 dr_print() {
 	if [ $# != 1 ]; then
-    	print_err "need one arg, <sim-param>"
+    	__print_err "need one arg, <sim-param>"
 		exit 1
 	fi
-	echo -e "---- DR sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:3906/$1)"
+	echo -e "---- DR sim, \033[1m $1 \033[0m: $(__do_curl http://127.0.0.1:$DR_PORT/$1)"
 }
 
 # Print a variable value from the DR redir simulator. Arg: <variable-name>
 drr_print() {
 	if [ $# != 1 ]; then
-    	print_err "need one arg, <sim-param>"
+    	__print_err "need one arg, <sim-param>"
 		exit 1
 	fi
-	echo -e "---- DR redir sim, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:3908/$1)"
+	echo -e "---- DR redir sim, \033[1m $1 \033[0m: $(__do_curl http://127.0.0.1:$DRR_PORT/$1)"
 }
-# Print a variable value from dfc. Arg: <variable-name>
+# Print a variable value from dfc. Arg: <dfc-instance-id> <variable-name>
 dfc_print() {
-	if [ $# != 1 ]; then
-    	print_err "need one arg, <dfc-param>"
+	if [ $# != 2 ]; then
+    	__print_err "need two args, <dfc-instance-id> <dfc-param>"
 		exit 1
 	fi
-	echo -e "---- DFC, \033[1m $1 \033[0m: $(do_curl http://127.0.0.1:8100/$1)"
+	if [ $1 -lt 0 ] || [ $1 -gt $DFC_MAX_IDX ]; then
+		__print_err "dfc instance id should be in range 0.."DFC_MAX_IDX
+		exit 1
+	fi
+	localport=$(($DFC_PORT + $1))
+	appname=$DFC_APP_BASE$1
+	echo -e "---- DFC $appname, \033[1m $2 \033[0m: $(__do_curl http://127.0.0.1:$localport/$2)"
 }
 
-# Read a variable value from MR sim and send to stdout.
+# Read a variable value from MR sim and send to stdout. Arg: <variable-name>
 mr_read() {
-	echo "$(do_curl http://127.0.0.1:2222/$1)"
+	echo "$(__do_curl http://127.0.0.1:$MR_PORT/$1)"
 }
 
-# Read a variable value from DR sim and send to stdout.
+# Read a variable value from DR sim and send to stdout. Arg: <variable-name>
 dr_read() {
-	echo "$(do_curl http://127.0.0.1:3906/$1)"
+	echo "$(__do_curl http://127.0.0.1:$DR_PORT/$1)"
 }
 
-# Read a variable value from DR redir sim and send to stdout.
+# Read a variable value from DR redir sim and send to stdout. Arg: <variable-name>
 drr_read() {
-	echo "$(do_curl http://127.0.0.1:3908/$1)"
+	echo "$(__do_curl http://127.0.0.1:$DRR_PORT/$1)"
 }
 
 
 # Sleep. Arg: <sleep-time-in-sec>
 sleep_wait() {
 	if [ $# != 1 ]; then
-		print_err "need one arg, <sleep-time-in-sec>"
+		__print_err "need one arg, <sleep-time-in-sec>"
 		exit 1
 	fi
 	echo "---- Sleep for " $1 " seconds ----"
@@ -547,25 +843,36 @@
 		sleep 1
 		duration=$((SECONDS-start))
 	done
+	echo ""
 }
 
 # Sleep and print dfc heartbeat. Arg: <sleep-time-in-sec>
 sleep_heartbeat() {
 	if [ $# != 1 ]; then
-		print_err "need one arg, <sleep-time-in-sec>"
+		__print_err "need one arg, <sleep-time-in-sec>"
 		exit 1
 	fi
 	echo "---- Sleep for " $1 " seconds ----"
+	echo ""
 	start=$SECONDS
 	duration=$((SECONDS-start))
 	ctr=0
+	rows=0
 	while [ $duration -lt $1 ]; do
-		if [ $((ctr%30)) -eq 0 ]; then
-			echo -ne "  Slept for ${duration} seconds, \033[1m heartbeat \033[0m "$(do_curl http://127.0.0.1:8100/heartbeat)
-			echo ""
-		else
-			echo -ne "  Slept for ${duration} seconds, \033[1m heartbeat \033[0m "$(do_curl http://127.0.0.1:8100/heartbeat)" \033[0K\r"
+		if [ $rows -eq 0 ]; then
+			tput cuu1
 		fi
+		rows=0
+		echo "  Slept for ${duration} seconds"
+		if [ $((ctr%30)) -eq 0 ]; then
+			for (( i=0; i<=$DFC_MAX_IDX; i++ )); do
+				if [[ $STARTED_DFCS =~ "_"$DFC_APP_BASE$i"_" ]]; then
+					let rows=rows+1
+					echo "    HB ${DFC_APP_BASE}${i}: $(__do_curl http://127.0.0.1:$(($DFC_PORT+$i))/heartbeat)"
+				fi
+			done
+		fi
+
 		let ctr=ctr+1
 		sleep 1
 		duration=$((SECONDS-start))
@@ -581,9 +888,9 @@
 # value or not.
 mr_equal() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "MR" "http://127.0.0.1:2222/" $1 "=" $2 $3
+		__var_test "MR" "http://127.0.0.1:$MR_PORT/" $1 "=" $2 $3
 	else
-		print_err "Wrong args to mr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to mr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -595,9 +902,9 @@
 # value or not.
 mr_greater() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "MR" "http://127.0.0.1:2222/" $1 ">" $2 $3
+		__var_test "MR" "http://127.0.0.1:$MR_PORT/" $1 ">" $2 $3
 	else
-		print_err "Wrong args to mr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to mr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -609,9 +916,9 @@
 # value or not.
 mr_less() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "MR" "http://127.0.0.1:2222/" $1 "<" $2 $3
+		__var_test "MR" "http://127.0.0.1:$MR_PORT/" $1 "<" $2 $3
 	else
-		print_err "Wrong args to mr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to mr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -623,9 +930,9 @@
 # value or not.
 mr_contain_str() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "MR" "http://127.0.0.1:2222/" $1 "contain_str" $2 $3
+		__var_test "MR" "http://127.0.0.1:$MR_PORT/" $1 "contain_str" $2 $3
 	else
-		print_err "Wrong args to mr_contain_str, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to mr_contain_str, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -637,9 +944,9 @@
 # value or not.
 dr_equal() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR" "http://127.0.0.1:3906/" $1 "=" $2 $3
+		__var_test "DR" "http://127.0.0.1:$DR_PORT/" $1 "=" $2 $3
 	else
-		print_err "Wrong args to dr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to dr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -651,9 +958,9 @@
 # value or not.
 dr_greater() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR" "http://127.0.0.1:3906/" $1 ">" $2 $3
+		__var_test "DR" "http://127.0.0.1:$DR_PORT/" $1 ">" $2 $3
 	else
-		print_err "Wrong args to dr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to dr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -665,9 +972,23 @@
 # value or not.
 dr_less() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR" "http://127.0.0.1:3906/" $1 "<" $2 $3
+		__var_test "DR" "http://127.0.0.1:$DR_PORT/" $1 "<" $2 $3
 	else
-		print_err "Wrong args to dr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to dr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
+	fi
+}
+
+# Tests if a variable value in the DR simulator contains the target string and and optional timeout.
+# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
+# the target or not.
+# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# before setting pass or fail depending on if the variable value contains the target
+# value or not.
+dr_contain_str() {
+	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
+		__var_test "DR" "http://127.0.0.1:$DR_PORT/" $1 "contain_str" $2 $3
+	else
+		__print_err "Wrong args to dr_contain_str, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -679,9 +1000,9 @@
 # value or not.
 drr_equal() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR REDIR" "http://127.0.0.1:3908/" $1 "=" $2 $3
+		__var_test "DR REDIR" "http://127.0.0.1:$DRR_PORT/" $1 "=" $2 $3
 	else
-		print_err "Wrong args to drr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to drr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -694,9 +1015,9 @@
 # value or not.
 drr_greater() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR REDIR" "http://127.0.0.1:3908/" $1 ">" $2 $3
+		__var_test "DR REDIR" "http://127.0.0.1:$DRR_PORT/" $1 ">" $2 $3
 	else
-		print_err "Wrong args to drr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to drr_greater, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
@@ -708,27 +1029,47 @@
 # value or not.
 drr_less() {
 	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-		var_test "DR REDIR" "http://127.0.0.1:3908/" $1 "<" $2 $3
+		__var_test "DR REDIR" "http://127.0.0.1:$DRR_PORT/" $1 "<" $2 $3
 	else
-		print_err "Wrong args to drr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
+		__print_err "Wrong args to drr_less, needs two or three args: <sim-param> <target-value> [ timeout ]"
 	fi
 }
 
-#Test is a variable in the DFC contains a substring. Arg: <variable-name> <substring-in-quotes>
+# Tests if a variable value in the DR redir simulator contains the target string and and optional timeout.
+# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
+# the target or not.
+# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# before setting pass or fail depending on if the variable value contains the target
+# value or not.
+drr_contain_str() {
+	if [ $# -eq 2 ] || [ $# -eq 3 ]; then
+		__var_test "DR REDIR" "http://127.0.0.1:$DRR_PORT/" $1 "contain_str" $2 $3
+	else
+		__print_err "Wrong args to drr_contain_str, needs two or three args: <sim-param> <target-value> [ timeout ]"
+	fi
+}
+
+#Test if a variable in the DFC contains a substring. Arg: <dfc-index> <variable-name> <substring-in-quotes>
 dfc_contain_str() {
-	if [ $# -eq 2 ]; then
-		echo -e "---- DFC test criteria: \033[1m ${1} \033[0m contains: ${2} ----"
+	if [ $# -eq 3 ]; then
+		if [ $1 -lt 0 ] || [ $1 -gt $DFC_MAX_IDX ]; then
+			__print_err "arg should be 0.."$DFC_MAX_IDX
+			exit 1
+		fi
+		appname=$DFC_APP_BASE$1
+		localport=$(($DFC_PORT + $1))
+		echo -e "---- DFC test criteria: $appname \033[1m ${2} \033[0m contains: ${3} ----"
 		((RES_TEST++))
-		result="$(do_curl http://127.0.0.1:8100/${1})"
-		if [[ $result =~ $2 ]]; then
+		result="$(__do_curl http://127.0.0.1:$localport/${2})"
+		if [[ $result =~ $3 ]]; then
 			((RES_PASS++))
-			echo -e "----  \033[32m\033[1mPASS\033[0m - Test criteria met"		
+			echo -e "----  \033[32m\033[1mPASS\033[0m - Test criteria met"
 		else
 			((RES_FAIL++))
-			echo -e "----  \033[31m\033[1mFAIL\033[0m - Target ${1} not reached, result = ${result} ----"
+			echo -e "----  \033[31m\033[1mFAIL\033[0m - Target '${3}' not reached, result = ${result} ----"
 		fi
-	else 
-		echo "Wrong args to dfc_contain_str, needs two arg: <dfc-variable> <str>"
+	else
+		echo "Wrong args to dfc_contain_str, needs three arg: <dfc-index> <dfc-variable> <str>"
 		exit 1
 	fi
 }
@@ -737,41 +1078,66 @@
 # separate logs stored at different steps in the test script. Arg: <tc-id> <log-prefix>
 store_logs() {
 	if [ $# != 1 ]; then
-    	print_err "need one arg, <file-prefix>"
+    	__print_err "need one arg, <file-prefix>"
 		exit 1
 	fi
 	echo "Storing all container logs and dfc app log using prefix: "$1
 	if ! [ $START_ARG == "manual-app" ]; then
-		docker cp dfc_app:/var/log/ONAP/application.log $TESTLOGS/$ATC/$1_application.log
-		docker logs dfc_app > $TESTLOGS/$ATC/$1_dfc_app-docker.log 2>&1
+		for (( i=0; i<=$DFC_MAX_IDX; i++ )); do
+			appname=$DFC_APP_BASE$i
+			tmp=$(docker ps | grep $appname)
+			if ! [ -z "$tmp" ]; then   #Only stored logs from running DFC apps
+				docker cp $appname:/var/log/ONAP/application.log $TESTLOGS/$ATC/${1}_${appname}_application.log
+				docker logs $appname > $TESTLOGS/$ATC/$1_$appname-docker.log 2>&1
+			fi
+		done
 	fi
 	docker logs dfc_mr-sim > $TESTLOGS/$ATC/$1_dfc_mr-sim-docker.log 2>&1
 	docker logs dfc_dr-sim > $TESTLOGS/$ATC/$1_dfc_dr-sim-docker.log 2>&1
 	docker logs dfc_dr-redir-sim > $TESTLOGS/$ATC/$1_dfc_dr-redir-sim-docker.log 2>&1
-	docker logs dfc_ftpes-server-vsftpd > $TESTLOGS/$ATC/$1_dfc_ftpes-server-vsftpd.log 2>&1
-	docker logs dfc_sftp-server > $TESTLOGS/$ATC/$1_dfc_sftp-server.log 2>&1
+
+	for (( i=0; i<=$FTP_MAX_IDX; i++ )); do
+		appname=$SFTP_BASE$i
+		docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
+		appname=$FTPS_BASE$i
+		docker logs $appname > $TESTLOGS/$ATC/${1}_${appname}.log 2>&1
+	done
+
+	docker logs dfc_consul > $TESTLOGS/$ATC/$1_consul.log 2>&1
+	docker logs dfc_cbs > $TESTLOGS/$ATC/$1_cbs.log 2>&1
 }
-# Check the dfc application log for WARN and ERR messages and print the count.
-check_dfc_log() {
-	echo "Checking dfc log /var/log/ONAP/application.log for WARNINGs and ERRORs, excluding messages from CONSUL"
-	foundentries=$(docker exec -it dfc_app grep WARN /var/log/ONAP/application.log | grep -iv CONSUL | wc -l)
+# Check the dfc application log, for all dfc instances, for WARN and ERR messages and print the count.
+check_dfc_logs() {
+	for (( i=0; i<=$DFC_MAX_IDX; i++ )); do
+		appname=$DFC_APP_BASE$i
+		tmp=$(docker ps | grep $appname)
+		if ! [ -z "$tmp" ]; then  #Only check logs for running dfc_apps
+			_check_dfc_log $appname
+		fi
+	done
+}
+
+# Check dfc app log for one dfc instance, arg <dfc-app-name>
+_check_dfc_log() {
+	echo "Checking $1 log $DFC_LOGPATH for WARNINGs and ERRORs"
+	foundentries=$(docker exec -it $1 grep WARN /var/log/ONAP/application.log | wc -l)
 	if [ $? -ne  0 ];then
-		echo "  Problem to search dfc log /var/log/ONAP/application.log"
+		echo "  Problem to search $1 log $DFC_LOGPATH"
 	else
 		if [ $foundentries -eq 0 ]; then
-			echo "  No WARN entries found in dfc log /var/log/ONAP/application.log"
-		else 
-			echo -e "  Found \033[1m"$foundentries"\033[0m WARN entries in dfc log /var/log/ONAP/application.log"
+			echo "  No WARN entries found in $1 log $DFC_LOGPATH"
+		else
+			echo -e "  Found \033[1m"$foundentries"\033[0m WARN entries in $1 log $DFC_LOGPATH"
 		fi
 	fi
-		foundentries=$(docker exec -it dfc_app grep ERR /var/log/ONAP/application.log | grep -iv CONSUL | wc -l)
+	foundentries=$(docker exec -it $1 grep ERR $DFC_LOGPATH | wc -l)
 	if [ $? -ne  0 ];then
-		echo "  Problem to search dfc log /var/log/ONAP/application.log"
+		echo "  Problem to search $1 log $DFC_LOGPATH"
 	else
 		if [ $foundentries -eq 0 ]; then
-			echo "  No ERR entries found in dfc log /var/log/ONAP/application.log"
-		else 
-			echo -e "  Found \033[1m"$foundentries"\033[0m ERR entries in dfc log /var/log/ONAP/application.log"
+			echo "  No ERR entries found in $1 log $DFC_LOGPATH"
+		else
+			echo -e "  Found \033[1m"$foundentries"\033[0m ERR entries in $1 log $DFC_LOGPATH"
 		fi
 	fi
 }
@@ -780,37 +1146,76 @@
 
 	echo "---- DFC and all sim variables"
 
-	dfc_print heartbeat
-	
+	for (( i=0; i<=$DFC_MAX_IDX; i++ )); do
+		appname=$DFC_APP_BASE$i
+		tmp=$(docker ps | grep $appname)
+		if ! [ -z "$tmp" ]; then  #Only check running dfc_apps
+			dfc_print $i status
+		fi
+	done
+
+
 	mr_print tc_info
+	mr_print status
 	mr_print execution_time
+	mr_print groups
+	mr_print changeids
+	mr_print fileprefixes
 	mr_print exe_time_first_poll
+	mr_print groups/exe_time_first_poll
 	mr_print ctr_requests
+	mr_print groups/ctr_requests
 	mr_print ctr_responses
+	mr_print groups/ctr_responses
 	mr_print ctr_files
+	mr_print groups/ctr_files
 	mr_print ctr_unique_files
+	mr_print groups/ctr_unique_files
+	mr_print groups/ctr_events
 	mr_print ctr_events
 	mr_print ctr_unique_PNFs
+	mr_print groups/ctr_unique_PNFs
 
 	dr_print tc_info
 	dr_print execution_time
+	dr_print feeds
 	dr_print ctr_publish_query
+	dr_print feeds/ctr_publish_query
+	dr_print ctr_publish_query_bad_file_prefix
+	dr_print feeds/ctr_publish_query_bad_file_prefix
 	dr_print ctr_publish_query_published
+	dr_print feeds/ctr_publish_query_published
 	dr_print ctr_publish_query_not_published
+	dr_print feeds/ctr_publish_query_not_published
 	dr_print ctr_publish_req
+	dr_print feeds/ctr_publish_req
+	dr_print ctr_publish_req_bad_file_prefix
+	dr_print feeds/ctr_publish_req_bad_file_prefix
 	dr_print ctr_publish_req_redirect
+	dr_print feeds/ctr_publish_req_redirect
 	dr_print ctr_publish_req_published
+	dr_print feeds/ctr_publish_req_published
 	dr_print ctr_published_files
+	dr_print feeds/ctr_published_files
+	dr_print ctr_double_publish
+	dr_print feeds/ctr_double_publish
 
 	drr_print tc_info
 	drr_print execution_time
+	drr_print feeds
 	drr_print ctr_publish_requests
+	drr_print feeds/ctr_publish_requests
+	drr_print ctr_publish_requests_bad_file_prefix
+	drr_print feeds/ctr_publish_requests_bad_file_prefix
 	drr_print ctr_publish_responses
+	drr_print feeds/ctr_publish_responses
 	drr_print dwl_volume
+	drr_print feeds/dwl_volume
 	drr_print time_lastpublish
+	drr_print feeds/time_lastpublish
 }
 
-# Print the test result 
+# Print the test result
 print_result() {
 
 	TCTEST_END=$SECONDS
@@ -830,10 +1235,24 @@
 	total=$((RES_PASS+RES_FAIL))
 	if [ $RES_TEST -eq 0 ]; then
 		echo -e "\033[1mNo tests seem to have executed. Check the script....\033[0m"
+ 		echo -e "\033[31m\033[1m ___  ___ ___ ___ ___ _____   ___ _   ___ _   _   _ ___ ___ \033[0m"
+ 		echo -e "\033[31m\033[1m/ __|/ __| _ \_ _| _ \_   _| | __/_\ |_ _| | | | | | _ \ __|\033[0m"
+		echo -e "\033[31m\033[1m\__ \ (__|   /| ||  _/ | |   | _/ _ \ | || |_| |_| |   / _| \033[0m"
+ 		echo -e "\033[31m\033[1m|___/\___|_|_\___|_|   |_|   |_/_/ \_\___|____\___/|_|_\___|\033[0m"
 	elif [ $total != $RES_TEST ]; then
 		echo -e "\033[1mTotal number of tests does not match the sum of passed and failed tests. Check the script....\033[0m"
+		echo -e "\033[31m\033[1m ___  ___ ___ ___ ___ _____   ___ _   ___ _   _   _ ___ ___ \033[0m"
+		echo -e "\033[31m\033[1m/ __|/ __| _ \_ _| _ \_   _| | __/_\ |_ _| | | | | | _ \ __|\033[0m"
+		echo -e "\033[31m\033[1m\__ \ (__|   /| ||  _/ | |   | _/ _ \ | || |_| |_| |   / _| \033[0m"
+ 		echo -e "\033[31m\033[1m|___/\___|_|_\___|_|   |_|   |_/_/ \_\___|____\___/|_|_\___|\033[0m"
 	elif [ $RES_PASS = $RES_TEST ]; then
 		echo -e "All tests \033[32m\033[1mPASS\033[0m"
+		echo -e "\033[32m\033[1m  ___  _   ___ ___ \033[0m"
+		echo -e "\033[32m\033[1m | _ \/_\ / __/ __| \033[0m"
+		echo -e "\033[32m\033[1m |  _/ _ \\__ \__ \\ \033[0m"
+		echo -e "\033[32m\033[1m |_|/_/ \_\___/___/ \033[0m"
+		echo ""
+
 		# Update test suite counter
 		if [ -f .tmp_tcsuite_pass_ctr ]; then
 			tmpval=$(< .tmp_tcsuite_pass_ctr)
@@ -845,6 +1264,11 @@
 		fi
 	else
 		echo -e "One or more tests with status  \033[31m\033[1mFAIL\033[0m "
+		echo -e "\033[31m\033[1m  ___ _   ___ _    \033[0m"
+		echo -e "\033[31m\033[1m | __/_\ |_ _| |   \033[0m"
+		echo -e "\033[31m\033[1m | _/ _ \ | || |__ \033[0m"
+		echo -e "\033[31m\033[1m |_/_/ \_\___|____|\033[0m"
+		echo ""
 		# Update test suite counter
 		if [ -f .tmp_tcsuite_fail_ctr ]; then
 			tmpval=$(< .tmp_tcsuite_fail_ctr)
diff --git a/test/mocks/datafilecollector-testharness/common/testsuite_common.sh b/test/mocks/datafilecollector-testharness/common/testsuite_common.sh
index 2476f6c..e35d670 100755
--- a/test/mocks/datafilecollector-testharness/common/testsuite_common.sh
+++ b/test/mocks/datafilecollector-testharness/common/testsuite_common.sh
@@ -50,7 +50,7 @@
     touch .tmp_tcsuite_fail
 }
 
-print_err() {
+__print_err() {
     echo ${FUNCNAME[1]} " "$1" " ${BASH_SOURCE[$i+2]} " line" ${BASH_LINENO[$i+1]}
 }
 
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
index 3d95492..da05eed 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
+++ b/test/mocks/datafilecollector-testharness/dr-sim/Dockerfile
@@ -1,6 +1,6 @@
 #Common image for both dmmapDR and dmaapDR_redir
 
-FROM node:8
+FROM node:12
 
 WORKDIR /app
 
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
index 5367c9e..f94600a 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
+++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR.js
@@ -12,6 +12,12 @@
 var certificate = fs.readFileSync('cert/certificate.crt', 'utf8');
 var credentials = {key: privateKey, cert: certificate};
 
+var feeds="1:A";  //Comma separated list of feedId:filePrefix. Default is feedId=1 and file prefix 'A'
+var feedNames=[];
+var filePrefixes=[];
+var feedIndexes=[];
+
+
 //For execution time calculation
 var startTime = Date.now();
 
@@ -32,13 +38,18 @@
 var drr_sim_ip = '127.0.0.1'; //IP for redirect to DR redir sim. Can be changed by env DRR_SIM_IP
 
 //Counters
-var ctr_publish_query = 0;
-var ctr_publish_query_published = 0;
-var ctr_publish_query_not_published = 0;
-var ctr_publish_req = 0;
-var ctr_publish_req_redirect = 0;
-var ctr_publish_req_published = 0;
-var ctr_double_publish = 0
+var ctr_publish_query = [];
+var ctr_publish_query_bad_file_prefix = [];
+var ctr_publish_query_published = [];
+var ctr_publish_query_not_published = [];
+var ctr_publish_req = [];
+var ctr_publish_req_bad_file_prefix = [];
+var ctr_publish_req_redirect = [];
+var ctr_publish_req_published = [];
+var ctr_double_publish = [];
+
+//db of published files
+var published=[];
 
 var parser = new ArgumentParser({
 	  version: '0.0.1',
@@ -124,7 +135,9 @@
 app.use(bodyParser.json({ type: 'application/vnd.api+json' }))
 
 // parse some custom thing into a Buffer (to cater for 60MB files)
-app.use(bodyParser.raw({limit:1024*1024*60, type: 'application/octet-stream' }))
+//Removed-file data not used in this simulator
+//app.use(bodyParser.raw({limit:1024*1024*60, type: 'application/octet-stream' }))
+
 // parse an HTML body into a string
 app.use(bodyParser.text({ type: 'text/html' }))
 
@@ -135,49 +148,190 @@
 	res.send("ok");
 })
 
+function toCommaList(ctrArray) {
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		str=str+ctrArray[i];
+	}
+	return str;
+}
+
+function sumList(ctrArray) {
+	var tmp=0;
+	for(i=0;i<feedNames.length;i++) {
+		tmp=tmp+ctrArray[i];
+	}
+	return ""+tmp;
+}
+
+function sumListLength(ctrArray) {
+	var tmp=0;
+	for(i=0;i<feedNames.length;i++) {
+		tmp=tmp+ctrArray[i].length;
+	}
+	return ""+tmp;
+}
+
 //Counter readout
 app.get("/ctr_publish_query",function(req, res){
-	res.send(""+ctr_publish_query);
+	res.send(""+sumList(ctr_publish_query));
 })
+app.get("/feeds/ctr_publish_query",function(req, res){
+	res.send(toCommaList(ctr_publish_query));
+})
+app.get("/ctr_publish_query/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_query[feedIndexes[feedId]]);
+})
+
+app.get("/ctr_publish_query_bad_file_prefix",function(req, res){
+	res.send(""+sumList(ctr_publish_query_bad_file_prefix));
+})
+app.get("/feeds/ctr_publish_query_bad_file_prefix",function(req, res){
+	res.send(toCommaList(ctr_publish_query_bad_file_prefix));
+})
+app.get("/ctr_publish_query_bad_file_prefix/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_query_bad_file_prefix[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_query_published",function(req, res){
-	res.send(""+ctr_publish_query_published);
+	res.send(""+sumList(ctr_publish_query_published));
 })
+app.get("/feeds/ctr_publish_query_published",function(req, res){
+	res.send(toCommaList(ctr_publish_query_published));
+})
+app.get("/ctr_publish_query_published/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_query_published[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_query_not_published",function(req, res){
-	res.send(""+ctr_publish_query_not_published);
+	res.send(""+sumList(ctr_publish_query_not_published));
 })
+app.get("/feeds/ctr_publish_query_not_published",function(req, res){
+	res.send(toCommaList(ctr_publish_query_not_published));
+})
+app.get("/ctr_publish_query_not_published/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_query_not_published[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_req",function(req, res){
-	res.send(""+ctr_publish_req);
+	res.send(""+sumList(ctr_publish_req));
 })
+app.get("/feeds/ctr_publish_req",function(req, res){
+	res.send(toCommaList(ctr_publish_req));
+})
+app.get("/ctr_publish_req/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_req[feedIndexes[feedId]]);
+})
+
+app.get("/ctr_publish_req_bad_file_prefix",function(req, res){
+	res.send(""+sumList(ctr_publish_req_bad_file_prefix));
+})
+app.get("/feeds/ctr_publish_req_bad_file_prefix",function(req, res){
+	res.send(toCommaList(ctr_publish_req_bad_file_prefix));
+})
+app.get("/ctr_publish_req_bad_file_prefix/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_req_bad_file_prefix[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_req_redirect",function(req, res){
-	res.send(""+ctr_publish_req_redirect);
+	res.send(""+sumList(ctr_publish_req_redirect));
 })
+app.get("/feeds/ctr_publish_req_redirect",function(req, res){
+	res.send(toCommaList(ctr_publish_req_redirect));
+})
+app.get("/ctr_publish_req_redirect/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_req_redirect[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_req_published",function(req, res){
-	res.send(""+ctr_publish_req_published);
+	res.send(""+sumList(ctr_publish_req_published));
 })
+app.get("/feeds/ctr_publish_req_published",function(req, res){
+	res.send(toCommaList(ctr_publish_req_published));
+})
+app.get("/ctr_publish_req_published/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_req_published[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_published_files",function(req, res){
-	res.send(""+published.length);
+	res.send(""+sumListLength(published));
 })
+app.get("/feeds/ctr_published_files",function(req, res){
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		str=str+published[i].length;
+	}
+	res.send(str);
+})
+app.get("/ctr_published_files/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+published[feedIndexes[feedId]].length);
+})
+
 app.get("/tc_info",function(req, res){
 	res.send(args.tc);
 })
 app.get("/ctr_double_publish",function(req, res){
-	res.send(""+ctr_double_publish);
+	res.send(""+sumList(ctr_double_publish));
 })
+app.get("/feeds/ctr_double_publish",function(req, res){
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		str=str+ctr_double_publish[i];
+	}
+	res.send(str);
+})
+app.get("/ctr_double_publish/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_double_publish[feedIndexes[feedId]]);
+})
+
 function fmtMSS(s){
 	return(s-(s%=60))/60+(9<s?':':':0')+s    //Format time diff in mm:ss
 }
 app.get("/execution_time",function(req, res){
-	diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
+	var diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
 	res.send(""+diff);
 })
+app.get("/feeds",function(req, res){
+	res.send(feeds);
+})
 
-//db of published files
-var published = [];
+function filenameStartsWith(fileName, feedIndex) {
+	for(i=0;i<filePrefixes[feedIndex].length;i++) {
+		var prefix=filePrefixes[feedIndex][i];
+		if (fileName.startsWith(prefix)) {
+			return true;
+		}
+	}
+	return false;
+}
 
-app.get('/feedlog/1/',function(req, res){
+app.get('/feedlog/:feedId',function(req, res){
 	console.log("url:"+req.url);
-	ctr_publish_query++;
+	var feedId = req.params.feedId;
+	ctr_publish_query[feedIndexes[feedId]]++;
 	var filename = req.query.filename;
+	if (!filenameStartsWith(filename, feedIndexes[feedId])) {
+		ctr_publish_query_bad_file_prefix[feedIndexes[feedId]]++;
+	}
 	console.log(filename);
 	var qtype = req.query.type;
 	if(typeof(filename) == 'undefined'){
@@ -187,41 +341,41 @@
 		res.status(400).send({error: 'No type provided.'});
 		return;
 	}
-	
+	var ctr = ctr_publish_query[feedIndexes[feedId]];
 	//Ugly fix, plus signs replaces with spaces in query params....need to put them back
 	filename = filename.replace(/ /g,"+");
-	
+
 	var sleeptime=0;
 	if (args.tc==tc_normal) {
 		sleeptime=0;
-	} else if (args.tc==tc_10p_no_response && (ctr_publish_query%10) == 0) {
+	} else if (args.tc==tc_10p_no_response && (ctr%10) == 0) {
 		return;
-	} else if (args.tc==tc_10first_no_response && ctr_publish_query<11) {
+	} else if (args.tc==tc_10first_no_response && ctr<11) {
 		return;
-	} else if (args.tc==tc_100first_no_response && ctr_publish_query<101) {
+	} else if (args.tc==tc_100first_no_response && ctr<101) {
 		return;
 	} else if (args.tc==tc_all_delay_1s) {
 		sleeptime=1000;
 	} else if (args.tc==tc_all_delay_10s) {
 		sleeptime=10000;
-	} else if (args.tc==tc_10p_delay_10s && (ctr_publish_query%10) == 0) {
+	} else if (args.tc==tc_10p_delay_10s && (ctr%10) == 0) {
 		sleeptime=10000;
-	} else if (args.tc==tc_10p_error_response && (ctr_publish_query%10) == 0) {
+	} else if (args.tc==tc_10p_error_response && (ctr%10) == 0) {
 		res.send(400);
 		return;
-	} else if (args.tc==tc_10first_error_response && ctr_publish_query<11) {
+	} else if (args.tc==tc_10first_error_response && ctr<11) {
 		res.send(400);
 		return;
-	} else if (args.tc==tc_100first_error_response & ctr_publish_query<101) {
+	} else if (args.tc==tc_100first_error_response & ctr<101) {
 		res.send(400);
 		return;
 	}
 
-	if (published.includes(filename)) {
-		ctr_publish_query_published++;
+	if (published[feedIndexes[feedId]].includes(filename)) {
+		ctr_publish_query_published[feedIndexes[feedId]]++;
 		strToSend="[" + filename + "]";
 	} else {
-		ctr_publish_query_not_published++;
+		ctr_publish_query_not_published[feedIndexes[feedId]]++;
 		strToSend="[]";
 	}
 	if (sleeptime > 0) {
@@ -234,68 +388,73 @@
 });
 
 
-app.put('/publish/1/:filename', function (req, res) {
+app.put('/publish/:feedId/:filename', function (req, res) {
 	console.log("url:"+req.url);
-	console.log("body (first 25 bytes):"+req.body.slice(0,25));
+//	console.log("body (first 25 bytes):"+req.body.slice(0,25));
 	console.log("headers:"+req.headers);
-	ctr_publish_req++;
+	console.log(JSON.stringify(req.headers));
+	var feedId = req.params.feedId;
+	ctr_publish_req[feedIndexes[feedId]]++;
 
 	var filename = req.params.filename;
 	console.log(filename);
-
+	if (!filenameStartsWith(filename, feedIndexes[feedId])) {
+		ctr_publish_req_bad_file_prefix[feedIndexes[feedId]]++;
+	}
+    var ctr = ctr_publish_req[feedIndexes[feedId]];
 	if (args.tc==tc_normal) {
 	// Continue
 	} else if (args.tc==tc_none_published) {
-		ctr_publish_req_redirect++;
-		res.redirect(301, 'http://' + drr_sim_ip + ':3908/publish/1/'+filename);
+		ctr_publish_req_redirect[feedIndexes[feedId]]++;
+		res.redirect(301, 'http://' + drr_sim_ip + ':3908/publish/'+feedId+'/'+filename);
 		return;
 	} else if (args.tc==tc_all_published) {
-		ctr_publish_req_published++;
+		ctr_publish_req_published[feedIndexes[feedId]]++;
 		res.send("ok");
 		return;
-	}else if (args.tc==tc_10p_no_response && (ctr_publish_req%10) == 0) {
+	}else if (args.tc==tc_10p_no_response && (ctr%10) == 0) {
 		return;
-	} else if (args.tc==tc_10first_no_response && ctr_publish_req<11) {
+	} else if (args.tc==tc_10first_no_response && ctr<11) {
 		return;
-	} else if (args.tc==tc_100first_no_response && ctr_publish_req<101) {
+	} else if (args.tc==tc_100first_no_response && ctr<101) {
 		return;
 	} else if (args.tc==tc_all_delay_1s) {
-		do_publish_delay(res, filename, 1000);
+		do_publish_delay(res, filename, 1000, feedId);
 		return;
 	} else if (args.tc==tc_all_delay_10s) {
-		do_publish_delay(res, filename, 10000);
+		do_publish_delay(res, filename, 10000, feedId);
 		return;
-	} else if (args.tc==tc_10p_delay_10s && (ctr_publish_req%10) == 0) {
-		do_publish_delay(res, filename, 10000);
+	} else if (args.tc==tc_10p_delay_10s && (ctr%10) == 0) {
+		do_publish_delay(res, filename, 10000, feedId);
 		return;
-	} else if (args.tc==tc_10p_error_response && (ctr_publish_req%10) == 0) {
+	} else if (args.tc==tc_10p_error_response && (ctr%10) == 0) {
 		res.send(400);
 		return;
-	} else if (args.tc==tc_10first_error_response && ctr_publish_req<11) {
+	} else if (args.tc==tc_10first_error_response && ctr<11) {
 		res.send(400);
 		return;
-	} else if (args.tc==tc_100first_error_response & ctr_publish_req<101) {
+	} else if (args.tc==tc_100first_error_response & ctr<101) {
 		res.send(400);
 		return;
 	}
 	if (!published.includes(filename)) {
-		ctr_publish_req_redirect++;
-		res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/1/'+filename);
+		ctr_publish_req_redirect[feedIndexes[feedId]]++;
+		res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/'+feedId+'/'+filename);
 	} else {
-		ctr_publish_req_published++;
+		ctr_publish_req_published[feedIndexes[feedId]]++;
 		res.send("ok");
 	}
 	return;
 })
 
-function do_publish_delay(res, filename, sleeptime) {
+function do_publish_delay(res, filename, sleeptime, feedId) {
 	if (!published.includes(filename)) {
-		ctr_publish_req_redirect++;
+		ctr_publish_req_redirect[feedIndexes[feedId]]++;
 		sleep(1000).then(() => {
-			res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/1/'+filename);
+			res.redirect(301, 'http://'+drr_sim_ip+':3908/publish/'+feedId+'/'+filename);
 		});
 	} else {
-		ctr_publish_req_published++;
+		ctr_publish_req_published[feedIndexes[feedId]]++;
 		sleep(1000).then(() => {
 			res.send("ok");
 		});
@@ -303,17 +462,18 @@
 }
 
 //Callback from DR REDIR server, when file is published ok this PUT request update the list of published files.
-app.put('/dr_redir_publish/:filename', function (req, res) {
+app.put('/dr_redir_publish/:feedId/:filename', function (req, res) {
 	console.log("url:"+req.url);
+	var feedId = req.params.feedId;
 	var filename = req.params.filename;
 	console.log(filename);
 
-	if (!published.includes(filename)) {
+	if (!published[feedIndexes[feedId]].includes(filename)) {
 		console.log("File marked as published by callback from DR redir SIM. url: " + req.url);
-		published.push(filename);
+		published[feedIndexes[feedId]].push(filename);
 	} else {
 		console.log("File already marked as published. Callback from DR redir SIM. url: " + req.url);
-		ctr_double_publish = ctr_double_publish+1;
+		ctr_double_publish[feedIndexes[feedId]]++;
 	}
 
 	res.send("ok");
@@ -331,5 +491,42 @@
 
 if (process.env.DRR_SIM_IP) {
 	drr_sim_ip=process.env.DRR_SIM_IP;
-} 
-console.log("Using IP " + drr_sim_ip + " for redirect to DR redir sim");
\ No newline at end of file
+}
+console.log("Using IP " + drr_sim_ip + " for redirect to DR redir sim");
+
+if (process.env.DR_FEEDS) {
+	feeds=process.env.DR_FEEDS;
+}
+
+console.log("Configured list of feeds mapped to file name prefixes: " + feeds);
+
+feedNames=feeds.split(',');
+for(i=0;i<feedNames.length;i++) {
+	var tmp=feedNames[i].split(':');
+	feedNames[i]=tmp[0].trim();
+	feedIndexes[feedNames[i]]=i;
+	filePrefixes[i]=[]
+	for(j=1;j<tmp.length;j++) {
+		filePrefixes[i][j-1]=tmp[j];
+	}
+
+    ctr_publish_query[i] = 0;
+    ctr_publish_query_published[i] = 0;
+    ctr_publish_query_not_published[i] = 0;
+    ctr_publish_req[i] = 0;
+    ctr_publish_req_redirect[i] = 0;
+    ctr_publish_req_published[i] = 0;
+    ctr_double_publish[i] = 0;
+    ctr_publish_query_bad_file_prefix[i] = 0;
+	ctr_publish_req_bad_file_prefix[i] = 0;
+	published[i] = [];
+}
+
+console.log("Parsed mapping between feed id and file name prefix");
+for(i=0;i<feedNames.length;i++) {
+	var fn = feedNames[i];
+	for (j=0;j<filePrefixes[i].length;j++) {
+		console.log("Feed id: " + fn + ", file name prefix: " + filePrefixes[i][j]);
+	}
+}
+
diff --git a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
index 970c183..a5f1beb 100644
--- a/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
+++ b/test/mocks/datafilecollector-testharness/dr-sim/dmaapDR_redir.js
@@ -14,6 +14,15 @@
 var certificate = fs.readFileSync('cert/certificate.crt', 'utf8');
 var credentials = {key: privateKey, cert: certificate};
 
+var total_first_publish=0;
+var total_last_publish=0
+var total_files=0;
+var speed=0;
+
+var feeds="1:A";  //Comma separated list of feedId:filePrefix. Default is feedId=1 and file prefix 'A'
+var feedNames=[];
+var filePrefixes=[];
+var feedIndexes=[];
 
 var bodyParser = require('body-parser')
 var startTime = Date.now();
@@ -21,10 +30,11 @@
 var dr_callback_ip = '192.168.100.2'; //IP for DR when running as container. Can be changed by env DR_SIM_IP
 
 //Counters
-var ctr_publish_requests = 0;
-var ctr_publish_responses = 0;
-var lastPublish = "";
-var dwl_volume = 0;
+var ctr_publish_requests = [];
+var ctr_publish_requests_bad_file_prefix = [];
+var ctr_publish_responses = [];
+var lastPublish = [];
+var dwl_volume = [];
 
 var parser = new ArgumentParser({
 	version: '0.0.1',
@@ -134,66 +144,194 @@
 	res.send("ok");
 })
 
+function toCommaList(ctrArray) {
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		str=str+ctrArray[i];
+	}
+	return str;
+}
+
+function toCommaListTime(ctrArray) {
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		if (ctrArray[i] < 0) {
+			str=str+"--:--";
+		} else {
+			str=str+fmtMSS(ctrArray[i]);
+		}
+	}
+	return str;
+}
+
+function sumList(ctrArray) {
+	var tmp=0;
+	for(i=0;i<feedNames.length;i++) {
+		tmp=tmp+ctrArray[i];
+	}
+	return ""+tmp;
+}
+
+function largestInListTime(ctrArray) {
+	var tmp=-1;
+	var str=""
+	for(i=0;i<feedNames.length;i++) {
+		if (ctrArray[i] > tmp) {
+			tmp = ctrArray[i];
+		}
+	}
+	if (tmp < 0) {
+		str="--:--";
+	} else {
+		str=fmtMSS(tmp);
+	}
+	return str;
+}
+
 //Counter readout
 app.get("/ctr_publish_requests",function(req, res){
-	res.send(""+ctr_publish_requests);
+	res.send(""+sumList(ctr_publish_requests));
 })
+app.get("/feeds/ctr_publish_requests/",function(req, res){
+	res.send(toCommaList(ctr_publish_requests));
+})
+app.get("/ctr_publish_requests/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_requests[feedIndexes[feedId]]);
+})
+
+app.get("/ctr_publish_requests_bad_file_prefix",function(req, res){
+	res.send(""+sumList(ctr_publish_requests_bad_file_prefix));
+})
+app.get("/feeds/ctr_publish_requests_bad_file_prefix/",function(req, res){
+	res.send(toCommaList(ctr_publish_requests_bad_file_prefix));
+})
+app.get("/ctr_publish_requests_bad_file_prefix/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_requests_bad_file_prefix[feedIndexes[feedId]]);
+})
+
 app.get("/ctr_publish_responses",function(req, res){
-	res.send(""+ctr_publish_responses);
+	res.send(""+sumList(ctr_publish_responses));
 })
+app.get("/feeds/ctr_publish_responses/",function(req, res){
+	res.send(toCommaList(ctr_publish_responses));
+})
+app.get("/ctr_publish_responses/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+ctr_publish_responses[feedIndexes[feedId]]);
+})
+
 app.get("/execution_time",function(req, res){
-	diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
+	var diff = fmtMSS(Math.floor((Date.now()-startTime)/1000));
 	res.send(""+diff);
 })
 app.get("/time_lastpublish",function(req, res){
-	res.send(""+lastPublish);
+	res.send(""+largestInListTime(lastPublish));
 })
+app.get("/feeds/time_lastpublish/",function(req, res){
+	res.send(toCommaListTime(lastPublish));
+})
+app.get("/time_lastpublish/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	if (lastPublish[feedIndexes[feedId]] < 0) {
+		res.send("--:--");
+	}
+	res.send(""+fmtMSS(lastPublish[feedIndexes[feedId]]));
+})
+
 app.get("/dwl_volume",function(req, res){
-	res.send(""+fmtLargeNumber(dwl_volume));
+	res.send(""+fmtLargeNumber(sumList(dwl_volume)));
 })
+app.get("/feeds/dwl_volume/",function(req, res){
+	var str="";
+	for(i=0;i<feedNames.length;i++) {
+		if (i!=0) {
+			str=str+",";
+		}
+		str=str+fmtLargeNumber(dwl_volume[i]);
+	}
+	res.send(str);
+})
+app.get("/dwl_volume/:feedId",function(req, res){
+	var feedId = req.params.feedId;
+	res.send(""+fmtLargeNumber(dwl_volume[feedIndexes[feedId]]));
+})
+
 app.get("/tc_info",function(req, res){
 	res.send(args.tc);
 })
 
-app.put('/publish/1/:filename', function (req, res) {
+app.get("/feeds",function(req, res){
+	res.send(feeds);
+})
+
+app.get("/speed",function(req, res){
+	res.send(""+speed);
+})
+
+function filenameStartsWith(fileName, feedIndex) {
+	var i=0;
+	for(i=0;i<filePrefixes[feedIndex].length;i++) {
+		var prefix=filePrefixes[feedIndex][i];
+		if (fileName.startsWith(prefix)) {
+			return true;
+		}
+	}
+	return false;
+}
+
+app.put('/publish/:feedId/:filename', function (req, res) {
+
 	console.log(req.url);
-	console.log("First 25 bytes of body: " + req.body.slice(0,25))
+	var feedId=req.params.feedId;
+//	console.log("First 25 bytes of body: " + req.body.slice(0,25))
 	console.log(req.headers)
-	ctr_publish_requests++;
+	ctr_publish_requests[feedIndexes[feedId]]++;
+	var filename = req.params.filename;
+	if (!filenameStartsWith(filename, feedIndexes[feedId])) {
+		ctr_publish_requests_bad_file_prefix[feedIndexes[feedId]]++;
+	}
+	var ctr = ctr_publish_requests[feedIndexes[feedId]];
 	if (args.tc == tc_no_publish) {
-		tr_publish_responses++;
+		ctr_publish_responses[feedIndexes[feedId]]++;
 		res.send("ok")
 		return;
-	} else if (args.tc==tc_10p_no_response && (ctr_publish_requests%10)==0) {
+	} else if (args.tc==tc_10p_no_response && (ctr%10)==0) {
 		return;
-	} else if (args.tc==tc_10first_no_response && ctr_publish_requests<11) {
+	} else if (args.tc==tc_10first_no_response && ctr<11) {
 		return;
-	} else if (args.tc==tc_100first_no_response && ctr_publish_requests<101) {
+	} else if (args.tc==tc_100first_no_response && ctr<101) {
 		return;
-	} else if (args.tc==tc_10p_error_response && (ctr_publish_requests%10)==0) {
-		tr_publish_responses++;
+	} else if (args.tc==tc_10p_error_response && (ctr%10)==0) {
+		ctr_publish_responses[feedIndexes[feedId]]++;
 		res.send(400, "");
 		return;
-	} else if (args.tc==tc_10first_error_response && ctr_publish_requests<11) {
-		tr_publish_responses++;
+	} else if (args.tc==tc_10first_error_response && ctr<11) {
+		ctr_publish_responses[feedIndexes[feedId]]++;
 		res.send(400, "");
 		return;
-	} else if (args.tc==tc_100first_error_response && ctr_publish_requests<101) {
-		tr_publish_responses++;
+	} else if (args.tc==tc_100first_error_response && ctr<101) {
+		ctr_publish_responses[feedIndexes[feedId]]++;
 		res.send(400, "");
 		return;
 	}
 
 	//Remaining part if normal file publish
 
-	var filename = req.params.filename;
 	console.log(filename);
-	//Create filename (appending file size to name) to store
-  	var storedFilename = path.resolve(__dirname, filename+"-"+req.body.length); 
+	//Create filename (appending file size and feedid to name) to store
+  	var storedFilename = path.resolve(__dirname, filename+"-"+feedId+"-"+req.body.length); 
   	fs.writeFile(storedFilename, "", function (error) {  //Store file with zero size
   		if (error) { console.error(error); }
 	});
-	
+
 	//Make callback to update list of publish files in DR sim
 	//Note the hard code ip-adress, DR sim get this ip if simulators started from the
 	//script in the 'simulatorgroup' dir.
@@ -201,24 +339,23 @@
 	var util = require('util');
 	var exec = require('child_process').exec;
 
-	var command = 'curl -s -X PUT http://' + dr_callback_ip + ':3906/dr_redir_publish/' +req.params.filename;
+	var command = 'curl -s -X PUT http://' + dr_callback_ip + ':3906/dr_redir_publish/'+feedId+'/'+filename;
 
 	console.log("Callback to DR sim to report file published, cmd: " + command);
-	child = exec(command, function(error, stdout, stderr){
+	var child = exec(command, function(error, stdout, stderr){
 		console.log('stdout: ' + stdout);
 		console.log('stderr: ' + stderr);
 		if(error !== null) {
 			console.log('exec error: ' + error);
 		}
-		
 	});
 
 	//Update status variables
-	ctr_publish_responses++;
-	lastPublish = fmtMSS(Math.floor((Date.now()-startTime)/1000));
-	dwl_volume = dwl_volume + req.body.length;
+	ctr_publish_responses[feedIndexes[feedId]]++;
+	lastPublish[feedIndexes[feedId]] = Math.floor((Date.now()-startTime)/1000);
+	dwl_volume[feedIndexes[feedId]] = dwl_volume[feedIndexes[feedId]] + req.body.length;
 
-	if (args.tc==tc_10p_delay_10s && (ctr_publish_requests%10)==0) {
+	if (args.tc==tc_10p_delay_10s && (ctr%10)==0) {
         sleep(10000).then(() => {
 			res.send("ok");
 		});
@@ -234,6 +371,15 @@
 		});
 		return;
 	}
+	if (total_first_publish == 0) {
+		total_first_publish=Date.now()/1000;
+	}
+	total_last_publish=Date.now()/1000;
+	total_files++;
+	if (total_last_publish > total_first_publish) {
+		speed = Math.round((total_files/(total_last_publish-total_first_publish))*10)/10;
+	}
+
 	res.send("ok")
 });
 
@@ -250,5 +396,36 @@
 
 if (process.env.DR_SIM_IP) {
 	dr_callback_ip=process.env.DR_SIM_IP;
-} 
+}
 console.log("Using IP " + dr_callback_ip + " for callback to DR sim");
+
+if (process.env.DR_REDIR_FEEDS) {
+	feeds=process.env.DR_REDIR_FEEDS;
+}
+console.log("Configured list of feeds: " + feeds);
+
+var i=0;
+feedNames=feeds.split(',');
+for(i=0;i<feedNames.length;i++) {
+	var tmp=feedNames[i].split(':');
+	feedNames[i]=tmp[0].trim();
+	feedIndexes[feedNames[i]]=i;
+	filePrefixes[i]=[]
+	var j=0;
+	for(j=1;j<tmp.length;j++) {
+		filePrefixes[i][j-1]=tmp[j];
+	}
+
+	ctr_publish_requests[i] = 0;
+	ctr_publish_requests_bad_file_prefix[i] = 0;
+	ctr_publish_responses[i] = 0;
+	lastPublish[i] = -1;
+	dwl_volume[i] = 0;
+}
+console.log("Parsed mapping between feed id and file name prefix");
+for(i=0;i<feedNames.length;i++) {
+	var fn = feedNames[i];
+	for (j=0;j<filePrefixes[i].length;j++) {
+		console.log("Feed id: " + fn + ", file name prefix: " + filePrefixes[i][j]);
+	}
+}
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps b/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps
new file mode 100644
index 0000000..736314c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/ftps-sftp-server/Dockerfile-ftps
@@ -0,0 +1,18 @@
+FROM docker.io/panubo/vsftpd
+
+
+COPY tls/ftp.crt /etc/ssl/private/ftp.crt
+RUN chmod 644 /etc/ssl/private/ftp.crt
+RUN chown root:root /etc/ssl/private/ftp.crt
+
+COPY tls/ftp.key /etc/ssl/private/ftp.key
+RUN chmod 644 /etc/ssl/private/ftp.key
+RUN chown root:root /etc/ssl/private/ftp.key
+
+COPY tls/dfc.crt /etc/ssl/private/dfc.crt
+RUN chmod 644 /etc/ssl/private/dfc.crt
+RUN chown root:root /etc/ssl/private/dfc.crt
+
+COPY configuration/vsftpd_ssl.conf /etc/vsftpd_ssl.conf
+RUN chmod 644 /etc/vsftpd_ssl.conf
+RUN chown root:root /etc/vsftpd_ssl.conf
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf b/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
index 99d64dc..6946bff 100644
--- a/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
+++ b/test/mocks/datafilecollector-testharness/ftps-sftp-server/configuration/vsftpd_ssl.conf
@@ -34,8 +34,8 @@
 connect_from_port_20=NO
 listen=YES
 tcp_wrappers=YES
-pasv_min_port=8001
-pasv_max_port=8010
+#pasv_min_port=8001
+#pasv_max_port=8010
 
 # SSL
 ssl_enable=Yes
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/README.md b/test/mocks/datafilecollector-testharness/mr-sim/README.md
index 7ec6e14..5e6ac6d 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/README.md
+++ b/test/mocks/datafilecollector-testharness/mr-sim/README.md
@@ -1,3 +1,84 @@
+#MR-simulator 
+This readme contains:
+
+**Introduction**
+
+**Building and running**
+
+**Configuration**
+
+###Introduction###
+The MR-sim is a python script delivering batches of events including one or more fileReady for one or more PNFs.
+It is possible to configure number of events, PNFs, consumer groups, exising or missing files, file prefixes and change identifier.
+In addition, MR sim can be configured to deliver file url for up to 5 FTP servers (simulating the PNFs).
+
+###Building and running###
+It is possible build and run MR-sim manually as a container if needed. In addition MR-sim can be executed as python script, see instuctions further down.
+Otherwise it is recommended to use the test scripts in the auto-test dir or run all simulators in one go using scripts in the simulator-group dir.
+
+To build and run manually as a docker container:
+1. Build docker container with ```docker build -t mrsim:latest .```
+2. Run the container ```docker-compose up```
+
+###Configuration###
+The event pattern, called TC, of the MR-sim is controlled with a arg to python script. See section TC info for available patterns.
+All other configuration is done via envrionment variables.
+The simulator listens to port 2222.
+
+The following envrionment vaiables are used:
+**FTPS_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate ftps file urls for. If not set MR sim will assume 'localhost:21'. Minimum 1 and maximum 5 host-port pairs can be given.
+**SFTP_SIMS** - A comma-separated list of hostname:port for the FTP servers to generate sftp file urls for. If not set MR sim will assume 'localhost:1022'. Minimum 1 and maximum 5 host-port pairs can be given.
+**NUM_FTP_SERVERS** - Number of FTP servers to use out of those specified in the envrioment variables above. The number shall be in the range 1-5.
+**MR_GROUPS** - A comma-separated list of consummer-group:changeId[:changeId]*. Defines which change identifier that should be used for each consumer gropu. If not set the MR-sim will assume 'OpenDcae-c12:PM_MEAS_FILES'.
+**MR_FILE_PREFIX_MAPPING** - A comma-separated list of changeId:filePrefix. Defines which file prefix to use for each change identifier, needed to distinguish files for each change identifiers. If not set the MR-sim will assume 'PM_MEAS_FILES:A
+
+
+
+###Statistics read-out and commands###
+The simulator can be queried for statistics  and  started/stopped (use curl from cmd line or open in browser, curl used below):
+
+`curl localhost:2222` - Just returns 'Hello World'.
+
+`curl localhost:2222/groups` - Return a comma-separated list of configured consumer groups..
+
+`curl localhost:2222/changeids` - Return a commar-separated list of configured change id sets, where each set is a list of colon-separated change for each configured consumer group.
+
+`curl localhost:2222/fileprefixes` - Return the setting of env var MR_FILE_PREFIX_MAPPING.
+
+`curl localhost:2222/ctr_requests`   - return an integer of the number of get request to the event poll path
+
+`curl localhost:2222/groups/ctr_requests`   - return an integer of the number of get requests, for all consumer groups, to the event poll path
+
+`curl localhost:2222/ctr_requests/<consumer-group>`   - return an integer of the number of get requests, for the specified consumer group, to the event poll path
+
+
+
+
+
+
+
+`curl localhost:2222/ctr_responses`  - return an integer of the number of get responses to the event poll path
+
+`curl localhost:2222/ctr_files` - returns an integer or the number files.  
+
+`curl localhost:2222/ctr_unique_files` - returns an integer or the number of unique files. A unique file is the combination of node+file_sequence_number 
+
+`curl localhost:2222/tc_info` - returns the tc string (as given on the cmd line)
+
+`curl localhost:2222/ctr_events` - returns the total number of events
+
+`curl localhost:2222/execution_time` - returns the execution time in mm:ss
+
+`curl localhost:2222/exe_time_first_poll` - returns the execution time in mm:ss from the first poll
+
+`curl localhost:2222/ctr_unique_PNFs` - return the number of unique PNFS in alla events.
+
+`curl localhost:2222/start` - start event delivery (default status).
+
+`curl localhost:2222/stop` - stop event delivery.
+
+`curl localhost:2222/status` - Return the started or stopped status.
+
 
 #Alternative to running python (as described below) on your machine, use the docker files.
 1. Build docker container with ```docker build -t mrsim:latest .```
diff --git a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
index 219415a..eefc61e 100644
--- a/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
+++ b/test/mocks/datafilecollector-testharness/mr-sim/mr-sim.py
@@ -14,8 +14,25 @@
 HOST_IP = "0.0.0.0"
 HOST_PORT = 2222
 
-SFTP_PORT = 1022
-FTPS_PORT = 21
+sftp_hosts=[]
+sftp_ports=[]
+ftps_hosts=[]
+ftps_ports=[]
+num_ftp_servers=0
+
+def sumList(ctrArray):
+    tmp=0
+    for i in range(len(ctrArray)):
+        tmp=tmp+ctrArray[i];
+
+    return str(tmp);
+
+def sumListLength(ctrArray):
+    tmp=0
+    for i in range(len(ctrArray)):
+        tmp=tmp+len(ctrArray[i]);
+
+    return str(tmp);
 
 #Test function to check server running
 @app.route('/',
@@ -23,33 +40,140 @@
 def index():
     return 'Hello world'
 
+#Returns the list of configured groups
+@app.route('/groups',
+    methods=['GET'])
+def group_ids():
+    global configuredGroups
+    return configuredGroups
+
+#Returns the list of configured changeids
+@app.route('/changeids',
+    methods=['GET'])
+def change_ids():
+    global configuredChangeIds
+    return configuredChangeIds
+
+#Returns the list of configured fileprefixes
+@app.route('/fileprefixes',
+    methods=['GET'])
+def fileprefixes():
+    global configuredPrefixes
+    return configuredPrefixes
+
+
 #Returns number of polls
 @app.route('/ctr_requests',
     methods=['GET'])
 def counter_requests():
     global ctr_requests
-    return str(ctr_requests)
+    return sumList(ctr_requests)
 
-#Returns number of replies
+#Returns number of polls for all groups
+@app.route('/groups/ctr_requests',
+    methods=['GET'])
+def group_counter_requests():
+    global ctr_requests
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(ctr_requests[i])
+    return tmp
+
+#Returns the total number of polls for a group
+@app.route('/ctr_requests/<groupId>',
+    methods=['GET'])
+def counter_requests_group(groupId):
+    global ctr_requests
+    global groupNameIndexes
+    return str(ctr_requests[groupNameIndexes[groupId]])
+
+#Returns number of poll replies
 @app.route('/ctr_responses',
     methods=['GET'])
 def counter_responses():
     global ctr_responses
-    return str(ctr_responses)
+    return sumList(ctr_responses)
 
-#Returns the total number of file
+#Returns number of poll replies for all groups
+@app.route('/groups/ctr_responses',
+    methods=['GET'])
+def group_counter_responses():
+    global ctr_responses
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(ctr_responses[i])
+    return tmp
+
+#Returns the total number of poll replies for a group
+@app.route('/ctr_responses/<groupId>',
+    methods=['GET'])
+def counter_responses_group(groupId):
+    global ctr_responses
+    global groupNameIndexes
+    return str(ctr_responses[groupNameIndexes[groupId]])
+
+#Returns the total number of files
 @app.route('/ctr_files',
     methods=['GET'])
 def counter_files():
     global ctr_files
-    return str(ctr_files)
+    return sumList(ctr_files)
+
+#Returns the total number of file for all groups
+@app.route('/groups/ctr_files',
+    methods=['GET'])
+def group_counter_files():
+    global ctr_files
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(ctr_files[i])
+    return tmp
+
+#Returns the total number of files for a group
+@app.route('/ctr_files/<groupId>',
+    methods=['GET'])
+def counter_files_group(groupId):
+    global ctr_files
+    global groupNameIndexes
+    return str(ctr_files[groupNameIndexes[groupId]])
+
 
 #Returns number of unique files
 @app.route('/ctr_unique_files',
     methods=['GET'])
 def counter_uniquefiles():
     global fileMap
-    return str(len(fileMap))
+    return sumListLength(fileMap)
+
+#Returns number of unique files for all groups
+@app.route('/groups/ctr_unique_files',
+    methods=['GET'])
+def group_counter_uniquefiles():
+    global fileMap
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(len(fileMap[i]))
+    return tmp
+
+#Returns the total number of unique files for a group
+@app.route('/ctr_unique_files/<groupId>',
+    methods=['GET'])
+def counter_uniquefiles_group(groupId):
+    global fileMap
+    global groupNameIndexes
+    return str(len(fileMap[groupNameIndexes[groupId]]))
 
 #Returns tc info
 @app.route('/tc_info',
@@ -63,7 +187,28 @@
     methods=['GET'])
 def counter_events():
     global ctr_events
-    return str(ctr_events)
+    return sumList(ctr_events)
+
+#Returns number of events for all groups
+@app.route('/groups/ctr_events',
+    methods=['GET'])
+def group_counter_events():
+    global ctr_events
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(ctr_events[i])
+    return tmp
+
+#Returns the total number of events for a group
+@app.route('/ctr_events/<groupId>',
+    methods=['GET'])
+def counter_events_group(groupId):
+    global ctr_events
+    global groupNameIndexes
+    return str(ctr_events[groupNameIndexes[groupId]])
 
 #Returns execution time in mm:ss
 @app.route('/execution_time',
@@ -81,9 +226,44 @@
 def exe_time_first_poll():
     global firstPollTime
 
-    if (firstPollTime == 0):
+    tmp = 0
+    for i in range(len(groupNames)):
+        if (firstPollTime[i] > tmp):
+            tmp = firstPollTime[i]
+
+    if (tmp == 0):
         return "--:--"
-    minutes, seconds = divmod(time.time()-firstPollTime, 60)
+    minutes, seconds = divmod(time.time()-tmp, 60)
+    return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+
+#Returns the timestamp for first poll for all groups
+@app.route('/groups/exe_time_first_poll',
+    methods=['GET'])
+def group_exe_time_first_poll():
+    global firstPollTime
+    global groupNames
+
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        if (firstPollTime[i] == 0):
+            tmp=tmp+ "--:--"
+        else:
+            minutes, seconds = divmod(time.time()-firstPollTime[i], 60)
+            tmp=tmp+"{:0>2}:{:0>2}".format(int(minutes),int(seconds))
+    return tmp
+
+#Returns the timestamp for first poll for a group
+@app.route('/exe_time_first_poll/<groupId>',
+    methods=['GET'])
+def exe_time_first_poll_group(groupId):
+    global ctr_requests
+    global groupNameIndexes
+
+    if (firstPollTime[groupNameIndexes[groupId]] == 0):
+        return "--:--"
+    minutes, seconds = divmod(time.time()-firstPollTime[groupNameIndexes[groupId]], 60)
     return "{:0>2}:{:0>2}".format(int(minutes),int(seconds))
 
 #Starts event delivery
@@ -114,525 +294,660 @@
     methods=['GET'])
 def counter_uniquePNFs():
     global pnfMap
-    return str(len(pnfMap))
+    return sumListLength(pnfMap)
+
+#Returns number of unique PNFs for all groups
+@app.route('/groups/ctr_unique_PNFs',
+    methods=['GET'])
+def group_counter_uniquePNFs():
+    global pnfMap
+    global groupNames
+    tmp=''
+    for i in range(len(groupNames)):
+        if (i > 0):
+            tmp=tmp+','
+        tmp=tmp+str(len(pnfMap[i]))
+    return tmp
+
+#Returns the unique PNFs for a group
+@app.route('/ctr_unique_PNFs/<groupId>',
+    methods=['GET'])
+def counter_uniquePNFs_group(groupId):
+    global pnfMap
+    global groupNameIndexes
+    return str(len(pnfMap[groupNameIndexes[groupId]]))
+
 
 #Messages polling function
 @app.route(
-    "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12",
+    "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/<consumerGroup>/<consumerId>",
     methods=['GET'])
-def MR_reply():
+def MR_reply(consumerGroup, consumerId):
     global ctr_requests
     global ctr_responses
     global args
     global runningState
     global firstPollTime
+    global groupNameIndexes
+    global changeIds
+    global filePrefixes
 
-    if (firstPollTime == 0):
-        firstPollTime = time.time()
+    groupIndex = groupNameIndexes[consumerGroup]
+    print("Setting groupIndex: " + str(groupIndex))
 
-    ctr_requests = ctr_requests + 1
-    print("MR: poll request#: " + str(ctr_requests))
+    reqCtr = ctr_requests[groupIndex]
+    changeId = changeIds[groupIndex][reqCtr%len(changeIds[groupIndex])]
+    print("Setting changeid: " + changeId)
+    filePrefix = filePrefixes[changeId]
+    print("Setting file name prefix: " + filePrefix)
+
+    if (firstPollTime[groupIndex] == 0):
+        firstPollTime[groupIndex] = time.time()
+
+    ctr_requests[groupIndex] = ctr_requests[groupIndex] + 1
+    print("MR: poll request#: " + str(ctr_requests[groupIndex]))
 
     if (runningState == "Stopped"):
-        ctr_responses = ctr_responses + 1
+        ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
         return buildOkResponse("[]")
 
+
+
     if args.tc100:
-      return tc100("sftp")
+      return tc100(groupIndex, changeId, filePrefix, "sftp", "1MB")
     elif args.tc101:
-      return tc101("sftp")
+      return tc100(groupIndex, changeId, filePrefix, "sftp", "5MB")
     elif args.tc102:
-      return tc102("sftp")
+      return tc100(groupIndex, changeId, filePrefix, "sftp", "50MB")
 
     elif args.tc110:
-      return tc110("sftp")
+      return tc110(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc111:
-      return tc111("sftp")
+      return tc111(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc112:
-      return tc112("sftp")
+      return tc112(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc113:
-      return tc113("sftp")
+      return tc113(groupIndex, changeId, filePrefix, "sftp")
 
     elif args.tc120:
-      return tc120("sftp")
+      return tc120(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc121:
-      return tc121("sftp")
+      return tc121(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc122:
-      return tc122("sftp")
+      return tc122(groupIndex, changeId, filePrefix, "sftp")
 
     elif args.tc1000:
-      return tc1000("sftp")
+      return tc1000(groupIndex, changeId, filePrefix, "sftp")
     elif args.tc1001:
-      return tc1001("sftp")
+      return tc1001(groupIndex, changeId, filePrefix, "sftp")
 
     elif args.tc1100:
-      return tc1100("sftp","1MB")
+      return tc1100(groupIndex, changeId, filePrefix, "sftp","1MB")
     elif args.tc1101:
-      return tc1100("sftp","50MB")
+      return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
     elif args.tc1102:
-      return tc1100("sftp","50MB")
+      return tc1100(groupIndex, changeId, filePrefix, "sftp","50MB")
     elif args.tc1200:
-      return tc1200("sftp","1MB")
+      return tc1200(groupIndex, changeId, filePrefix, "sftp","1MB")
     elif args.tc1201:
-      return tc1200("sftp","5MB")
+      return tc1200(groupIndex, changeId, filePrefix, "sftp","5MB")
     elif args.tc1202:
-      return tc1200("sftp","50MB")
+      return tc1200(groupIndex, changeId, filePrefix, "sftp","50MB")
     elif args.tc1300:
-      return tc1300("sftp","1MB")
+      return tc1300(groupIndex, changeId, filePrefix, "sftp","1MB")
     elif args.tc1301:
-      return tc1300("sftp","5MB")
+      return tc1300(groupIndex, changeId, filePrefix, "sftp","5MB")
     elif args.tc1302:
-      return tc1300("sftp","50MB")
+      return tc1300(groupIndex, changeId, filePrefix, "sftp","50MB")
+
+    elif args.tc1500:
+      return tc1500(groupIndex, changeId, filePrefix, "sftp","1MB")
 
     elif args.tc500:
-      return tc500("sftp","1MB")
+      return tc500(groupIndex, changeId, filePrefix, "sftp","1MB")
     elif args.tc501:
-      return tc500("sftp","5MB")
+      return tc500(groupIndex, changeId, filePrefix, "sftp","5MB")
     elif args.tc502:
-      return tc500("sftp","50MB")
+      return tc500(groupIndex, changeId, filePrefix, "sftp","50MB")
     elif args.tc510:
-      return tc510("sftp")
+      return tc510(groupIndex, changeId, filePrefix, "sftp", "1MB")
     elif args.tc511:
-      return tc511("sftp")
+      return tc511(groupIndex, changeId, filePrefix, "sftp", "1KB")
+
+    elif args.tc550:
+      return tc510(groupIndex, changeId, filePrefix, "sftp", "50MB")
 
     elif args.tc710:
-      return tc710("sftp")
+      return tc710(groupIndex, changeId, filePrefix, "sftp")
 
 
     elif args.tc200:
-      return tc100("ftps")
+      return tc100(groupIndex, changeId, filePrefix, "ftps", "1MB")
     elif args.tc201:
-      return tc101("ftps")
+      return tc100(groupIndex, changeId, filePrefix, "ftps", "5MB")
     elif args.tc202:
-      return tc102("ftps")
+      return tc100(groupIndex, changeId, filePrefix, "ftps", "50MB")
 
     elif args.tc210:
-      return tc110("ftps")
+      return tc110(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc211:
-      return tc111("ftps")
+      return tc111(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc212:
-      return tc112("ftps")
+      return tc112(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc213:
-      return tc113("ftps")
+      return tc113(groupIndex, changeId, filePrefix, "ftps")
 
     elif args.tc220:
-      return tc120("ftps")
+      return tc120(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc221:
-      return tc121("ftps")
+      return tc121(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc222:
-      return tc122("ftps")
+      return tc122(groupIndex, changeId, filePrefix, "ftps")
 
     elif args.tc2000:
-      return tc1000("ftps")
+      return tc1000(groupIndex, changeId, filePrefix, "ftps")
     elif args.tc2001:
-      return tc1001("ftps")
+      return tc1001(groupIndex, changeId, filePrefix, "ftps")
 
     elif args.tc2100:
-      return tc1100("ftps","1MB")
+      return tc1100(groupIndex, changeId, filePrefix, "ftps","1MB")
     elif args.tc2101:
-      return tc1100("ftps","50MB")
+      return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
     elif args.tc2102:
-      return tc1100("ftps","50MB")
+      return tc1100(groupIndex, changeId, filePrefix, "ftps","50MB")
     elif args.tc2200:
-      return tc1200("ftps","1MB")
+      return tc1200(groupIndex, changeId, filePrefix, "ftps","1MB")
     elif args.tc2201:
-      return tc1200("ftps","5MB")
+      return tc1200(groupIndex, changeId, filePrefix, "ftps","5MB")
     elif args.tc2202:
-      return tc1200("ftps","50MB")
+      return tc1200(groupIndex, changeId, filePrefix, "ftps","50MB")
     elif args.tc2300:
-      return tc1300("ftps","1MB")
+      return tc1300(groupIndex, changeId, filePrefix, "ftps","1MB")
     elif args.tc2301:
-      return tc1300("ftps","5MB")
+      return tc1300(groupIndex, changeId, filePrefix, "ftps","5MB")
     elif args.tc2302:
-      return tc1300("ftps","50MB")
+      return tc1300(groupIndex, changeId, filePrefix, "ftps","50MB")
+
+    elif args.tc2500:
+      return tc1500(groupIndex, changeId, filePrefix, "ftps","1MB")
 
     elif args.tc600:
-      return tc500("ftps","1MB")
+      return tc500(groupIndex, changeId, filePrefix, "ftps","1MB")
     elif args.tc601:
-      return tc500("ftps","5MB")
+      return tc500(groupIndex, changeId, filePrefix, "ftps","5MB")
     elif args.tc602:
-      return tc500("ftps","50MB")
+      return tc500(groupIndex, changeId, filePrefix, "ftps","50MB")
     elif args.tc610:
-      return tc510("ftps")
+      return tc510(groupIndex, changeId, filePrefix, "ftps", "1MB")
     elif args.tc611:
-      return tc511("ftps")
-
+      return tc511(groupIndex, changeId, filePrefix, "ftps", "1KB")
+    elif args.tc650:
+      return tc510(groupIndex, changeId, filePrefix, "ftps", "50MB")
     elif args.tc810:
-      return tc710("ftps")
+      return tc710(groupIndex, changeId, filePrefix, "ftps")
 
 
 #### Test case functions
 
 
-def tc100(ftptype):
+def tc100(groupIndex, changeId, filePrefix, ftpType, fileSize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 1):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 1):
     return buildOkResponse("[]")
 
-  seqNr = (ctr_responses-1)
-  nodeName = createNodeName(0)
-  fileName = createFileName(nodeName, seqNr, "1MB")
-  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
-  fileMap[seqNr] = seqNr
-  ctr_events = ctr_events+1
+  seqNr = (ctr_responses[groupIndex]-1)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+  msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
+  fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
   return buildOkResponse("["+msg+"]")
 
-def tc101(ftptype):
+#def tc101(groupIndex, ftpType):
+#  global ctr_responses
+#  global ctr_events
+#
+#  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+#
+#  if (ctr_responses[groupIndex] > 1):
+#    return buildOkResponse("[]")
+#
+#  seqNr = (ctr_responses[groupIndex]-1)
+#  nodeName = createNodeName(0)
+#  fileName = createFileName(groupIndex, nodeName, seqNr, "5MB")
+#  msg = getEventHead(groupIndex, nodeName) + getEventName(fileName,ftpType,"onap","pano") + getEventEnd()
+#  fileMap[groupIndex][seqNr] = seqNr
+#  ctr_events[groupIndex] = ctr_events[groupIndex]+1
+#  return buildOkResponse("["+msg+"]")
+#
+#def tc102(groupIndex, ftpType):
+#  global ctr_responses
+#  global ctr_events
+#
+#  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+#
+#  if (ctr_responses[groupIndex] > 1):
+#    return buildOkResponse("[]")
+#
+#  seqNr = (ctr_responses[groupIndex]-1)
+#  nodeName = createNodeName(0)
+#  fileName = createFileName(groupIndex, nodeName, seqNr, "50MB")
+#  msg = getEventHead(groupIndex, nodeName) + getEventName(fileName,ftpType,"onap","pano") + getEventEnd()
+#  fileMap[groupIndex][seqNr] = seqNr
+#  ctr_events[groupIndex] = ctr_events[groupIndex]+1
+#  return buildOkResponse("["+msg+"]")
+
+def tc110(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 1):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  seqNr = (ctr_responses-1)
-  nodeName = createNodeName(0)
-  fileName = createFileName(nodeName, seqNr, "5MB")
-  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
-  fileMap[seqNr] = seqNr
-  ctr_events = ctr_events+1
+  seqNr = (ctr_responses[groupIndex]-1)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+  msg = getEventHead(groupIndex, changeId, nodeName) + getEventName(fileName,ftpType,"onap","pano",nodeIndex) + getEventEnd()
+  fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
   return buildOkResponse("["+msg+"]")
 
-def tc102(ftptype):
+def tc111(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 1):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  seqNr = (ctr_responses-1)
-  nodeName = createNodeName(0)
-  fileName = createFileName(nodeName, seqNr, "50MB")
-  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
-  fileMap[seqNr] = seqNr
-  ctr_events = ctr_events+1
-  return buildOkResponse("["+msg+"]")
-
-def tc110(ftptype):
-  global ctr_responses
-  global ctr_unique_files
-  global ctr_events
-
-  ctr_responses = ctr_responses + 1
-
-  if (ctr_responses > 100):
-    return buildOkResponse("[]")
-
-  seqNr = (ctr_responses-1)
-  nodeName = createNodeName(0)
-  fileName = createFileName(nodeName, seqNr, "1MB")
-  msg = getEventHead(nodeName) + getEventName(fileName,ftptype,"onap","pano") + getEventEnd()
-  fileMap[seqNr] = seqNr
-  ctr_events = ctr_events+1
-  return buildOkResponse("["+msg+"]")
-
-def tc111(ftptype):
-  global ctr_responses
-  global ctr_unique_files
-  global ctr_events
-
-  ctr_responses = ctr_responses + 1
-
-  if (ctr_responses > 100):
-    return buildOkResponse("[]")
-
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if i != 0: msg = msg + ","
-    fileName = createFileName(nodeName, seqNr, "1MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
-    fileMap[seqNr] = seqNr
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc112(ftptype):
+def tc112(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 100):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if i != 0: msg = msg + ","
-    fileName = createFileName(nodeName, seqNr, "5MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
-    fileMap[seqNr] = seqNr
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc113(ftptype):
+def tc113(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 1):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 1):
     return buildOkResponse("[]")
 
-  nodeName = createNodeName(0)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
   msg = ""
 
   for evts in range(100):  # build 100 evts
     if (evts > 0):
       msg = msg + ","
-    msg = msg + getEventHead(nodeName)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
     for i in range(100):   # build 100 files
-      seqNr = i+evts+100*(ctr_responses-1)
+      seqNr = i+evts+100*(ctr_responses[groupIndex]-1)
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, "1MB")
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
-      fileMap[seqNr] = seqNr
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
 
-def tc120(ftptype):
+def tc120(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  nodeName = createNodeName(0)
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
 
-  if (ctr_responses > 100):
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  if (ctr_responses % 10 == 2):
+  if (ctr_responses[groupIndex] % 10 == 2):
     return  # Return nothing
 
-  if (ctr_responses % 10 == 3):
+  if (ctr_responses[groupIndex] % 10 == 3):
     return buildOkResponse("") # Return empty message
 
-  if (ctr_responses % 10 == 4):
-    return buildOkResponse(getEventHead(nodeName)) # Return part of a json event
+  if (ctr_responses[groupIndex] % 10 == 4):
+    return buildOkResponse(getEventHead(groupIndex, changeId, nodeName)) # Return part of a json event
 
-  if (ctr_responses % 10 == 5):
+  if (ctr_responses[groupIndex] % 10 == 5):
     return buildEmptyResponse(404) # Return empty message with status code
 
-  if (ctr_responses % 10 == 6):
+  if (ctr_responses[groupIndex] % 10 == 6):
     sleep(60)
 
 
-  msg = getEventHead(nodeName)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if i != 0: msg = msg + ","
-    fileName = createFileName(nodeName, seqNr, "1MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
-    fileMap[seqNr] = seqNr
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc121(ftptype):
+def tc121(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 100):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   fileName = ""
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if (seqNr%10 == 0):     # Every 10th file is "missing"
-      fileName = createMissingFileName(nodeName, seqNr, "1MB")
+      fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
     else:
-      fileName = createFileName(nodeName, seqNr, "1MB")
-      fileMap[seqNr] = seqNr
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     if i != 0: msg = msg + ","
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc122(ftptype):
+def tc122(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 100):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    fileName = createFileName(nodeName, 0, "1MB")  # All files identical names
+    fileName = createFileName(groupIndex, filePrefix, nodeName, 0, "1MB")  # All files identical names
     if i != 0: msg = msg + ","
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
 
-  fileMap[0] = 0
+  fileMap[groupIndex][0] = 0
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
 
-def tc1000(ftptype):
+def tc1000(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if i != 0: msg = msg + ","
-    fileName = createFileName(nodeName, seqNr, "1MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
-    fileMap[seqNr] = seqNr
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc1001(ftptype):
+def tc1001(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  nodeName = createNodeName(0)
-  msg = getEventHead(nodeName)
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  nodeIndex=0
+  nodeName = createNodeName(nodeIndex)
+  msg = getEventHead(groupIndex, changeId, nodeName)
 
   for i in range(100):
-    seqNr = i+(ctr_responses-1)
+    seqNr = i+(ctr_responses[groupIndex]-1)
     if i != 0: msg = msg + ","
-    fileName = createFileName(nodeName, seqNr, "5MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
-    fileMap[seqNr] = seqNr
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "5MB")
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
   msg = msg + getEventEnd()
-  ctr_events = ctr_events+1
+  ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
 
-def tc1100(ftptype, filesize):
+def tc1100(groupIndex, changeId, filePrefix, ftpType, filesize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
+
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
 
   msg = ""
 
-  batch = (ctr_responses-1)%20;
+  batch = (ctr_responses[groupIndex]-1)%20;
 
   for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
     if (pnfs > 0):
       msg = msg + ","
-    nodeName = createNodeName(pnfs + batch*35)
-    msg = msg + getEventHead(nodeName)
+    nodeIndex=pnfs + batch*35
+    nodeName = createNodeName(nodeIndex)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
 
     for i in range(100):  # 100 files per event
-      seqNr = i + int((ctr_responses-1)/20);
+      seqNr = i + int((ctr_responses[groupIndex]-1)/20);
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, filesize)
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
       seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
-      fileMap[seqNr] = seqNr
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc1200(ftptype, filesize):
+def tc1200(groupIndex, changeId, filePrefix, ftpType, filesize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
+
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
 
   msg = ""
 
-  batch = (ctr_responses-1)%20;
+  batch = (ctr_responses[groupIndex]-1)%20;
 
   for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
     if (pnfs > 0):
       msg = msg + ","
-    nodeName = createNodeName(pnfs + batch*35)
-    msg = msg + getEventHead(nodeName)
+    nodeIndex=pnfs + batch*35
+    nodeName = createNodeName(nodeIndex)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
 
     for i in range(100):  # 100 files per event, all new files
-      seqNr = i+100 * int((ctr_responses-1)/20);
+      seqNr = i+100 * int((ctr_responses[groupIndex]-1)/20);
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, filesize)
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
       seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
-      fileMap[seqNr] = seqNr
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
 
-def tc1300(ftptype, filesize):
+def tc1300(groupIndex, changeId, filePrefix, ftpType, filesize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
   global rop_counter
   global rop_timestamp
 
-  ctr_responses = ctr_responses + 1
+  if (rop_counter == 0):
+      rop_timestamp = time.time()
+
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
 
   #Start a  event deliver for all 700 nodes every 15min
   rop = time.time()-rop_timestamp
   if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
       return buildOkResponse("[]")
   else:
+    if (rop_counter%20 == 0) & (rop_counter > 0):
+        rop_timestamp = rop_timestamp+900
+
+    rop_counter = rop_counter+1
+
+  msg = ""
+
+  batch = (rop_counter-1)%20;
+
+  for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
+    if (pnfs > 0):
+      msg = msg + ","
+    nodeIndex=pnfs + batch*35
+    nodeName = createNodeName(nodeIndex)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
+
+    for i in range(100):  # 100 files per event
+      seqNr = i + int((rop_counter-1)/20);
+      if i != 0: msg = msg + ","
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+      seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+
+    msg = msg + getEventEnd()
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
+
+  return buildOkResponse("["+msg+"]")
+
+def tc1500(groupIndex, changeId, filePrefix, ftpType, filesize):
+  global ctr_responses
+  global ctr_events
+  global rop_counter
+  global rop_timestamp
+
+  if (rop_counter == 0):
+      rop_timestamp = time.time()
+
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] <= 2000 ):   #first 25h of event doess not care of 15min rop timer
+
+    msg = ""
+
+    batch = (ctr_responses[groupIndex]-1)%20;
+
+    for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
+        if (pnfs > 0):
+            msg = msg + ","
+
+        nodeIndex=pnfs + batch*35
+        nodeName = createNodeName(nodeIndex)
+        msg = msg + getEventHead(groupIndex, changeId, nodeName)
+
+        for i in range(100):  # 100 files per event
+            seqNr = i + int((ctr_responses[groupIndex]-1)/20);
+            if i != 0: msg = msg + ","
+            if (seqNr < 100):
+                fileName = createMissingFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+            else:
+                fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+                seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
+                fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
+            msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
+
+
+        msg = msg + getEventEnd()
+        ctr_events[groupIndex] = ctr_events[groupIndex]+1
+
+        rop_counter = rop_counter+1
+    return buildOkResponse("["+msg+"]")
+
+  #Start an event delivery for all 700 nodes every 15min
+  rop = time.time()-rop_timestamp
+  if ((rop < 900) & (rop_counter%20 == 0) & (rop_counter != 0)):
+      return buildOkResponse("[]")
+  else:
     if (rop_counter%20 == 0):
         rop_timestamp = time.time()
 
@@ -645,30 +960,31 @@
   for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
     if (pnfs > 0):
       msg = msg + ","
-    nodeName = createNodeName(pnfs + batch*35)
-    msg = msg + getEventHead(nodeName)
+    nodeIndex=pnfs + batch*35
+    nodeName = createNodeName(nodeIndex)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
 
     for i in range(100):  # 100 files per event
       seqNr = i + int((rop_counter-1)/20);
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, filesize)
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+      msg = msg + getEventName(fileName,ftpType,"onap","pano", nodeIndex)
       seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
-      fileMap[seqNr] = seqNr
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc500(ftptype, filesize):
+def tc500(groupIndex, changeId, filePrefix, ftpType, filesize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 1):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 1):
     return buildOkResponse("[]")
 
   msg = ""
@@ -678,29 +994,29 @@
     if (pnfs > 0):
       msg = msg + ","
     nodeName = createNodeName(pnfs)
-    msg = msg + getEventHead(nodeName)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
 
     for i in range(2):
       seqNr = i;
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, filesize)
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, filesize)
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
       seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
-      fileMap[seqNr] = seqNr
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc510(ftptype):
+def tc510(groupIndex, changeId, filePrefix, ftpType, fileSize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 5):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 5):
     return buildOkResponse("[]")
 
   msg = ""
@@ -709,25 +1025,25 @@
     if (pnfs > 0):
       msg = msg + ","
     nodeName = createNodeName(pnfs)
-    msg = msg + getEventHead(nodeName)
-    seqNr = (ctr_responses-1)
-    fileName = createFileName(nodeName, seqNr, "1MB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
+    seqNr = (ctr_responses[groupIndex]-1)
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
     seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
-    fileMap[seqNr] = seqNr
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc511(ftptype):
+def tc511(groupIndex, changeId, filePrefix, ftpType, fileSize):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 5):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 5):
     return buildOkResponse("[]")
 
   msg = ""
@@ -736,47 +1052,48 @@
     if (pnfs > 0):
       msg = msg + ","
     nodeName = createNodeName(pnfs)
-    msg = msg + getEventHead(nodeName)
-    seqNr = (ctr_responses-1)
-    fileName = createFileName(nodeName, seqNr, "1KB")
-    msg = msg + getEventName(fileName,ftptype,"onap","pano")
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
+    seqNr = (ctr_responses[groupIndex]-1)
+    fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, fileSize)
+    msg = msg + getEventName(fileName,ftpType,"onap","pano",pnfs)
     seqNr = seqNr + pnfs*1000000 #Create unique id for this node and file
-    fileMap[seqNr] = seqNr
+    fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
-def tc710(ftptype):
+def tc710(groupIndex, changeId, filePrefix, ftpType):
   global ctr_responses
-  global ctr_unique_files
   global ctr_events
 
-  ctr_responses = ctr_responses + 1
 
-  if (ctr_responses > 100):
+  ctr_responses[groupIndex] = ctr_responses[groupIndex] + 1
+
+  if (ctr_responses[groupIndex] > 100):
     return buildOkResponse("[]")
 
   msg = ""
 
-  batch = (ctr_responses-1)%20;
+  batch = (ctr_responses[groupIndex]-1)%20;
 
   for pnfs in range(35):  # build events for 35 PNFs at a time. 20 batches -> 700
     if (pnfs > 0):
       msg = msg + ","
-    nodeName = createNodeName(pnfs + batch*35)
-    msg = msg + getEventHead(nodeName)
+    nodeIndex=pnfs + batch*35
+    nodeName = createNodeName(nodeIndex)
+    msg = msg + getEventHead(groupIndex, changeId, nodeName)
 
     for i in range(100):  # 100 files per event
-      seqNr = i + int((ctr_responses-1)/20);
+      seqNr = i + int((ctr_responses[groupIndex]-1)/20);
       if i != 0: msg = msg + ","
-      fileName = createFileName(nodeName, seqNr, "1MB")
-      msg = msg + getEventName(fileName,ftptype,"onap","pano")
+      fileName = createFileName(groupIndex, filePrefix, nodeName, seqNr, "1MB")
+      msg = msg + getEventName(fileName,ftpType,"onap","pano",nodeIndex)
       seqNr = seqNr + (pnfs+batch*35)*1000000 #Create unique id for this node and file
-      fileMap[seqNr] = seqNr
+      fileMap[groupIndex][seqNr*hash(filePrefix)] = seqNr
 
     msg = msg + getEventEnd()
-    ctr_events = ctr_events+1
+    ctr_events[groupIndex] = ctr_events[groupIndex]+1
 
   return buildOkResponse("["+msg+"]")
 
@@ -786,22 +1103,22 @@
 def createNodeName(index):
     return "PNF"+str(index);
 
-def createFileName(nodeName, index, size):
+def createFileName(groupIndex, filePrefix, nodeName, index, size):
     global ctr_files
-    ctr_files = ctr_files + 1
-    return "A20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+    ctr_files[groupIndex] = ctr_files[groupIndex] + 1
+    return filePrefix+"20000626.2315+0200-2330+0200_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
 
-def createMissingFileName(nodeName, index, size):
+def createMissingFileName(groupIndex, filePrefix, nodeName, index, size):
     global ctr_files
-    ctr_files = ctr_files + 1
-    return "AMissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
+    ctr_files[groupIndex] = ctr_files[groupIndex] + 1
+    return filePrefix+"MissingFile_" + nodeName + "-" + str(index) + "-" +size + ".tar.gz";
 
 
 # Function to build fixed beginning of an event
 
-def getEventHead(nodename):
+def getEventHead(groupIndex, changeId, nodename):
   global pnfMap
-  pnfMap.add(nodename) 
+  pnfMap[groupIndex].add(nodename)
   headStr = """
         {
           "event": {
@@ -825,18 +1142,19 @@
             "notificationFields": {
               "notificationFieldsVersion": "2.0",
               "changeType": "FileReady",
-              "changeIdentifier": "PM_MEAS_FILES",
+              "changeIdentifier": \"""" + changeId + """",
               "arrayOfNamedHashMap": [
           """ 
   return headStr
 
 # Function to build the variable part of an event
-def getEventName(fn,type,user,passwd):
-    port = SFTP_PORT
-    ip = sftp_ip
+def getEventName(fn,type,user,passwd, nodeIndex):
+    nodeIndex=nodeIndex%num_ftp_servers
+    port = sftp_ports[nodeIndex]
+    ip = sftp_hosts[nodeIndex]
     if (type == "ftps"):
-        port = FTPS_PORT
-        ip = ftps_ip
+        port = ftps_ports[nodeIndex]
+        ip = ftps_hosts[nodeIndex]
 
     nameStr =        """{
                   "name": \"""" + fn + """",
@@ -879,26 +1197,102 @@
 if __name__ == "__main__":
 
     # IP addresses to use for ftp servers, using localhost if not env var is set
-    sftp_ip = os.environ.get('SFTP_SIM_IP', 'localhost')
-    ftps_ip = os.environ.get('FTPS_SIM_IP', 'localhost')
+    sftp_sims = os.environ.get('SFTP_SIMS', 'localhost:1022')
+    ftps_sims = os.environ.get('FTPS_SIMS', 'localhost:21')
+    num_ftp_servers = int(os.environ.get('NUM_FTP_SERVERS', 1))
+
+    print("Configured sftp sims: " + sftp_sims)
+    print("Configured ftps sims: " + ftps_sims)
+    print("Configured number of ftp servers: " + str(num_ftp_servers))
+
+    tmp=sftp_sims.split(',')
+    for i in range(len(tmp)):
+        hp=tmp[i].split(':')
+        sftp_hosts.append(hp[0])
+        sftp_ports.append(hp[1])
+
+    tmp=ftps_sims.split(',')
+    for i in range(len(tmp)):
+        hp=tmp[i].split(':')
+        ftps_hosts.append(hp[0])
+        ftps_ports.append(hp[1])
+
+    groups = os.environ.get('MR_GROUPS', 'OpenDcae-c12:PM_MEAS_FILES')
+    configuredPrefixes = os.environ.get('MR_FILE_PREFIX_MAPPING', 'PM_MEAS_FILES:A')
+
+    if (len(groups) == 0 ):
+        groups='OpenDcae-c12:PM_MEAS_FILES'
+        print("Using default group: " + groups)
+    else:
+        print("Configured groups: " + groups)
+
+    if (len(configuredPrefixes) == 0 ):
+        configuredPrefixes='PM_MEAS_FILES:A'
+        print("Using default changeid to file prefix mapping: " + configuredPrefixes)
+    else:
+        print("Configured changeid to file prefix mapping: " + configuredPrefixes)
 
     #Counters
-    ctr_responses = 0
-    ctr_requests = 0
-    ctr_files=0
-    ctr_unique_files = 0
-    ctr_events = 0
+    ctr_responses = []
+    ctr_requests = []
+    ctr_files=[]
+    ctr_events = []
     startTime = time.time()
-    firstPollTime = 0
+    firstPollTime = []
     runningState = "Started"
+     #Keeps all responded file names
+    fileMap = []
+    #Keeps all responded PNF names
+    pnfMap = []
+    #Handles rop periods for tests that deliveres events every 15 min
     rop_counter = 0
     rop_timestamp = time.time()
 
-    #Keeps all responded file names
-    fileMap = {}
+    #List of configured group names
+    groupNames = []
+    #Mapping between group name and index in groupNames
+    groupNameIndexes = {}
+    #String of configured groups
+    configuredGroups = ""
+    #String of configured change identifiers
+    configuredChangeIds = ""
+    #List of changed identifiers
+    changeIds = []
+    #List of filePrefixes
+    filePrefixes = {}
 
-    #Keeps all responded PNF names
-    pnfMap = set()
+    tmp=groups.split(',')
+    for i in range(len(tmp)):
+        g=tmp[i].split(':')
+        for j in range(len(g)):
+            g[j] = g[j].strip()
+            if (j == 0):
+                if (len(configuredGroups) > 0):
+                    configuredGroups=configuredGroups+","
+                configuredGroups=configuredGroups+g[0]
+                groupNames.append(g[0])
+                groupNameIndexes[g[0]] = i
+                changeIds.append({})
+                ctr_responses.append(0)
+                ctr_requests.append(0)
+                ctr_files.append(0)
+                ctr_events.append(0)
+                firstPollTime.append(0)
+                pnfMap.append(set())
+                fileMap.append({})
+                if (len(configuredChangeIds) > 0):
+                    configuredChangeIds=configuredChangeIds+","
+            else:
+                changeIds[i][j-1]=g[j]
+                if (j > 1):
+                    configuredChangeIds=configuredChangeIds+":"
+                configuredChangeIds=configuredChangeIds+g[j]
+
+    # Create a map between changeid and file name prefix
+    tmp=configuredPrefixes.split(',')
+    for i in range(len(tmp)):
+        p=tmp[i].split(':')
+        filePrefixes[p[0]] = p[1]
 
     tc_num = "Not set"
     tc_help = "Not set"
@@ -985,6 +1379,11 @@
         help='TC511 - 700 MEs, SFTP, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
 
     parser.add_argument(
+        '--tc550',
+        action='store_true',
+        help='TC550 - 700 MEs, SFTP, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
+
+    parser.add_argument(
         '--tc710',
         action='store_true',
         help='TC710 - 700 MEs, SFTP, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
@@ -1028,6 +1427,10 @@
         action='store_true',
         help='TC1302 - 700 ME, SFTP, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
 
+    parser.add_argument(
+        '--tc1500',
+        action='store_true',
+        help='TC1500 - 700 ME, SFTP, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
 
 # FTPS TCs with single ME
     parser.add_argument(
@@ -1123,6 +1526,11 @@
         help='TC2302 - 700 ME, FTPS, 50MB files, 100 files per event, endless number of events, 35 event per poll, 20 event polls every 15min')
 
     parser.add_argument(
+        '--tc2500',
+        action='store_true',
+        help='TC2500 - 700 ME, FTPS, 1MB files, 100 files per event, 35 events per poll, simulating 25h backlog of decreasing number of outdated files and then 20 event polls every 15min for 1h')
+
+    parser.add_argument(
         '--tc600',
         action='store_true',
         help='TC600 - 700 MEs, FTPS, 1MB files, 2 new files per event, 700 events, all event in one poll.')
@@ -1148,6 +1556,11 @@
         help='TC611 - 700 MEs, FTPS, 1KB files, 1 file per event, 3500 events, 700 event per poll.')
 
     parser.add_argument(
+        '--tc650',
+        action='store_true',
+        help='TC610 - 700 MEs, FTPS, 50MB files, 1 file per event, 3500 events, 700 event per poll.')
+
+    parser.add_argument(
         '--tc810',
         action='store_true',
         help='TC810 - 700 MEs, FTPS, 1MB files, 100 files per event, 3500 events, 35 event per poll.')
@@ -1203,6 +1616,9 @@
     elif args.tc1302:
         tc_num = "TC# 1302"
 
+    elif args.tc1500:
+        tc_num = "TC# 1500"
+
     elif args.tc500:
         tc_num = "TC# 500"
     elif args.tc501:
@@ -1214,6 +1630,9 @@
     elif args.tc511:
         tc_num = "TC# 511"
 
+    elif args.tc550:
+        tc_num = "TC# 550"
+
     elif args.tc710:
         tc_num = "TC# 710"
 
@@ -1264,6 +1683,9 @@
     elif args.tc2302:
         tc_num = "TC# 2302"
 
+    elif args.tc2500:
+        tc_num = "TC# 2500"
+
     elif args.tc600:
         tc_num = "TC# 600"
     elif args.tc601:
@@ -1274,7 +1696,8 @@
         tc_num = "TC# 610"
     elif args.tc611:
         tc_num = "TC# 611"
-
+    elif args.tc650:
+        tc_num = "TC# 650"
     elif args.tc810:
         tc_num = "TC# 810"
 
@@ -1285,8 +1708,13 @@
 
     print("TC num: " + tc_num)
 
-    print("Using " + sftp_ip + " for sftp server address in file urls.")
-    print("Using " + ftps_ip + " for ftps server address in file urls.")
+    for i in range(len(sftp_hosts)):
+        print("Using " + str(sftp_hosts[i]) + ":" + str(sftp_ports[i]) + " for sftp server with index " + str(i) + " for sftp server address and port in file urls.")
+
+    for i in range(len(ftps_hosts)):
+        print("Using " + str(ftps_hosts[i]) + ":" + str(ftps_ports[i]) + " for ftps server with index " + str(i) + " for ftps server address and port in file urls.")
+
+    print("Using up to " + str(num_ftp_servers) + " ftp servers, for each protocol for PNFs.")
 
     app.run(port=HOST_PORT, host=HOST_IP)
 
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
index 48e8e80..fada972 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
+++ b/test/mocks/datafilecollector-testharness/simulator-group/.gitignore
@@ -1,6 +1,5 @@
-configuration
-tls
 docker-compose.yml
 node_modules
 package.json
-prepare-images.sh
\ No newline at end of file
+package-lock.json
+.tmp*
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor b/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor
deleted file mode 100644
index 145d2d9..0000000
--- a/test/mocks/datafilecollector-testharness/simulator-group/Dockerfile-sim-monitor
+++ /dev/null
@@ -1,15 +0,0 @@
-#Image for monitor simulator
-
-FROM node:8
-
-WORKDIR /app
-
-COPY sim-monitor.js ./
-COPY package*.json ./
-
-RUN npm install express
-RUN npm install argparse
-
-EXPOSE 9999
-
-CMD node /app/sim-monitor.js
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/README.md b/test/mocks/datafilecollector-testharness/simulator-group/README.md
index dc8f286..5a51d8a 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/README.md
+++ b/test/mocks/datafilecollector-testharness/simulator-group/README.md
@@ -1,9 +1,69 @@
-#Introduction
+###Introduction
 The purpose of the "simulator-group" is to run all containers in one go with specified behavior.
-Mainly this is needed for CSIT tests but can be used also for local testing.
+Mainly this is needed for CSIT tests and for auto test but can be used also for manual testing of dfc both as an java-app
+or as a manually started container. Instead of running the simulators manually as described below the auto-test cases
+can be executed together with a java-app or a manaully started container.
+
+In general these steps are needed to run the simulator group and dfc
+
+1. Build the simulator images
+2. Edit simulator env variables (to adapt the behavior of simulators)
+3. Configure consul
+4. Start the simulator monitor (to view the simulator stats)
+5. Start the simulators
+6. Start dfc
+
+###Overview of the simulators.
+There are 5 different types of simulators. For futher details, see the README.md in each simulator dir.
+
+1. The MR simulator emits fileready events, upon poll requests, with new and historice file references.
+It is possible to configire the change identifier and file prefixes for these identifiers and for which consumer groups
+these change identifier shall be generated. It is also possible to configure the number of events and files to generate and
+from which ftp servers the files shall be fetched from.
+2. The DR simulator handles the publish queries (to check if a file has previously been published) and the
+actual publish request (which results in a redirect to the DR REDIR simulator. It keeps a 'db' of published files updated by the DR REDIR simulator.
+It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+to configure the responses for the publish queries and publish requests.
+3. The DR REDIR simulator handles the redirect request for publish from the DR simulator. All accepted files will be stored as and empty
+file with a file name concatenated from the published file name + file size + feed id.
+It is possible to configure 1 or more feeds along with the accepted filename prefixes for each feed. It is also possible
+to configure the responses for the publish requests.
+4. The SFTP simulator(s) handles the ftp download requests. 5 of these simulators are always started and in the MR sim it is
+possible to configure the distrubution of files over these 5 servers (from 1 up to 5 severs). At start of the server, the server is
+populated with files to download.
+5. The FTPS simulator(s) is the same as the SFTP except that it using the FTPS protocol.
 
 
-###Preparation 
+### Build the simulator images
+Run the script `prepare-images.sh` to build the docker images for MR, DR and FTPS servers.
+
+###Edit simulator env variables
+
+
+
+
+###Summary of scripts and files
+`consul_config.sh` - Convert a json config file to work with dfc when manually started as java-app or container and then add that json to Consul.
+
+`docker-compose-setup.sh` - Sets environment variables for the simulators and start the simulators with that settings.
+
+`docker-compose-template.yml` - A docker compose template with environment variables setting. Used for producing a docker-compose file to defined the simulator containers.
+
+`prepare-images.sh` - Script to build all needed simulator images.
+
+`setup-ftp-files-for-image.sh` - Script executed in the ftp server to create files for download.
+
+`sim-monitor-start.sh` - Script to install needed packages and start the simulator monitor.
+
+`sim-monitor.js` - The source file the simulator monitor.
+
+`simulators-kill.sh` - Script to kill all the simulators
+
+`simulators-start.sh` - Script to start all the simulators. All env variables need to be set prior to executing the script.
+
+
+
+###Preparation
 Do the manual steps to prepare the simulator images
 
 Build the mr-sim image.
@@ -33,8 +93,8 @@
 Edit the `docker-compose-setup.sh` (or create a copy) to setup the env variables to the desired test behavior for each simulators.
 See each simulator to find a description of the available settings (DR_TC, DR_REDIR_TC and MR_TC).
 The following env variables shall be set (example values).
-Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers. 
-A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each ftp server (4 files in the below example). 
+Note that NUM_FTPFILES and NUM_PNFS controls the number of ftp files created in the ftp servers.
+A total of NUM_FTPFILES * NUM_PNFS ftp files will be created in each ftp server (4 files in the below example).
 Large settings will be time consuming at start of the servers.
 Note that the number of files must match the number of file references emitted from the MR sim.
 
@@ -42,7 +102,7 @@
 
 DR_REDIR_TC="--tc normal"     #Normal behavior of the DR redirect sim
 
-MR_TC="--tc100"               #One 1 MB file in one event, once. 
+MR_TC="--tc100"               #One 1 MB file in one event, once.
 
 BC_TC=""                      #Not in use yet
 
@@ -52,7 +112,7 @@
 
 To minimize the number of ftp file creation, the following two variables can be configured in the same file.
 FILE_SIZE="1MB"               #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-FTP_TYPE="SFTP"               #Type of FTP files to generate (SFTP, FTPS or ALL) 
+FTP_TYPE="SFTP"               #Type of FTP files to generate (SFTP, FTPS or ALL)
 
 If `FTP_TYPE` is set to `ALL`, both ftp servers will be populated with the same files. If set to `SFTP` or `FTPS` then only the server serving that protocol will be populated with files.
 
@@ -73,19 +133,11 @@
 Start DFC by the following cmd: `docker run -d --network="host" --name dfc_app <dfc-image> `
 
 `<dfc-image>` could be either the locally built image `onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`
-or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`. 
+or the one in nexus `nexus3.onap.org:10001/onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server`.
 
 
 
-###Simulator monitor
-Start the simulator monitor server with `sim-monitor-start.sh` and the open a browser with the url `localhost:9999/mon`
-to see the statisics page with data from MR sim, DR sim and DR redir sim.
-Or run as a container, build image first. Note, does not work on Mac.
-
-`cp ../dr-sim/package.json .`
-
-`docker build  -t sim-mon:latest -f Dockerfile-sim-monitor  .`
-
-Then run it, `docker run --network="host" --name sim-mon -it -d sim-mon:latest`
-
-Stop it with `docker stop sim-mon` and if desired, remove the container by `docker rm sim-mon`
+###Start the simulator monitor
+Start the simulator monitor server with `node sim-monitor.js` on the cmd line and the open a browser with the url `localhost:9999/mon`
+to see the statisics page with data from DFC(ss), MR sim, DR sim and DR redir sim.
+If needed run 'npm install express' first
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
new file mode 100644
index 0000000..282085c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM.json
@@ -0,0 +1,23 @@
+{
+   "dmaap.ftpesConfig.keyCert":"config/dfc.jks",
+   "dmaap.ftpesConfig.keyPassword":"secret",
+   "dmaap.ftpesConfig.trustedCa":"config/ftp.jks",
+   "dmaap.ftpesConfig.trustedCaPassword":"secret",
+   "dmaap.security.trustStorePath":"change it",
+   "dmaap.security.trustStorePasswordPath":"trustStorePasswordPath",
+   "dmaap.security.keyStorePath":"keyStorePath",
+   "dmaap.security.keyStorePasswordPath":"change it",
+   "dmaap.security.enableDmaapCertAuth":"false",
+   "dmaap.dmaapProducerConfiguration" : {
+         "changeIdentifier":"PM_MEAS_FILES",
+         "feedName":"feed01"
+    },
+    "streams_subscribes":{
+      "dmaap_subscriber":{
+         "dmmap_info":{
+            "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+         },
+         "type":"message_router"
+      }
+   }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
new file mode 100644
index 0000000..2e4f62c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed1_PM_feed2_CTR.json
@@ -0,0 +1,29 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": [
+    {
+      "changeIdentifier": "PM_MEAS_FILES",
+      "feedName": "feed01"
+    },
+    {
+      "changeIdentifier": "CTR_FILES",
+      "feedName": "feed02"
+    }
+  ],
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
new file mode 100644
index 0000000..7eeed24
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR.json
@@ -0,0 +1,23 @@
+{
+   "dmaap.ftpesConfig.keyCert":"config/dfc.jks",
+   "dmaap.ftpesConfig.keyPassword":"secret",
+   "dmaap.ftpesConfig.trustedCa":"config/ftp.jks",
+   "dmaap.ftpesConfig.trustedCaPassword":"secret",
+   "dmaap.security.trustStorePath":"change it",
+   "dmaap.security.trustStorePasswordPath":"trustStorePasswordPath",
+   "dmaap.security.keyStorePath":"keyStorePath",
+   "dmaap.security.keyStorePasswordPath":"change it",
+   "dmaap.security.enableDmaapCertAuth":"false",
+   "dmaap.dmaapProducerConfiguration" : {
+         "changeIdentifier":"CTR_MEAS_FILES",
+         "feedName":"feed02"
+    },
+    "streams_subscribes":{
+      "dmaap_subscriber":{
+         "dmmap_info":{
+            "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+         },
+         "type":"message_router"
+      }
+   }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
new file mode 100644
index 0000000..83be949
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_CTR_feed3_LOG_TEMP.json
@@ -0,0 +1,33 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": [
+    {
+      "changeIdentifier": "CTR_MEAS_FILES",
+      "feedName": "feed02"
+    },
+    {
+      "changeIdentifier": "LOG_FILES",
+      "feedName": "feed03"
+    },
+    {
+      "changeIdentifier": "TEMP_FILES",
+      "feedName": "feed03"
+    }
+  ],
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
new file mode 100644
index 0000000..a366b4c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed2_PM.json
@@ -0,0 +1,23 @@
+{
+   "dmaap.ftpesConfig.keyCert":"config/dfc.jks",
+   "dmaap.ftpesConfig.keyPassword":"secret",
+   "dmaap.ftpesConfig.trustedCa":"config/ftp.jks",
+   "dmaap.ftpesConfig.trustedCaPassword":"secret",
+   "dmaap.security.trustStorePath":"change it",
+   "dmaap.security.trustStorePasswordPath":"trustStorePasswordPath",
+   "dmaap.security.keyStorePath":"keyStorePath",
+   "dmaap.security.keyStorePasswordPath":"change it",
+   "dmaap.security.enableDmaapCertAuth":"false",
+   "dmaap.dmaapProducerConfiguration" : {
+         "changeIdentifier":"PM_MEAS_FILES",
+         "feedName":"feed02"
+    },
+    "streams_subscribes":{
+      "dmaap_subscriber":{
+         "dmmap_info":{
+            "topic_url":"http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+         },
+         "type":"message_router"
+      }
+   }
+}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
new file mode 100644
index 0000000..eca72be
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c12_feed3_PM_CTR.json
@@ -0,0 +1,29 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": [
+    {
+      "changeIdentifier": "PM_MEAS_FILES",
+      "feedName": "feed03"
+    },
+    {
+      "changeIdentifier": "CTR_MEAS_FILES",
+      "feedName": "feed03"
+    }
+  ],
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
new file mode 100644
index 0000000..c8e199d
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c13_feed2_CTR.json
@@ -0,0 +1,23 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": {
+    "changeIdentifier": "CTR_MEAS_FILES",
+    "feedName": "feed02"
+  },
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c13/C13"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
new file mode 100644
index 0000000..1f91a38
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c14_feed3_LOG.json
@@ -0,0 +1,23 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": {
+    "changeIdentifier": "LOG_FILES",
+    "feedName": "feed03"
+  },
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c14/C14"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
new file mode 100644
index 0000000..acef9b9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c15_feed1_PM_feed4_TEST.json
@@ -0,0 +1,29 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": [
+    {
+      "changeIdentifier": "PM_MEAS_FILES",
+      "feedName": "feed01"
+    },
+    {
+      "changeIdentifier": "TEST_FILES",
+      "feedName": "feed04"
+    }
+  ],
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
new file mode 100644
index 0000000..e10fe07
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/c16_feed4_TEST_feed5_TEMP.json
@@ -0,0 +1,29 @@
+{
+  "dmaap.ftpesConfig.keyCert": "config/dfc.jks",
+  "dmaap.ftpesConfig.keyPassword": "secret",
+  "dmaap.ftpesConfig.trustedCa": "config/ftp.jks",
+  "dmaap.ftpesConfig.trustedCaPassword": "secret",
+  "dmaap.security.trustStorePath": "change it",
+  "dmaap.security.trustStorePasswordPath": "trustStorePasswordPath",
+  "dmaap.security.keyStorePath": "keyStorePath",
+  "dmaap.security.keyStorePasswordPath": "change it",
+  "dmaap.security.enableDmaapCertAuth": "false",
+  "dmaap.dmaapProducerConfiguration": [
+    {
+      "changeIdentifier": "TEST_FILES",
+      "feedName": "feed04"
+    },
+    {
+      "changeIdentifier": "TEMP_FILES",
+      "feedName": "feed05"
+    }
+  ],
+  "streams_subscribes": {
+    "dmaap_subscriber": {
+      "dmmap_info": {
+        "topic_url": "http://dradmin:dradmin@mrsim:2222/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c15/C15"
+      },
+      "type": "message_router"
+    }
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
new file mode 100644
index 0000000..f540975
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_config.hcl
@@ -0,0 +1,13 @@
+#server = true
+#bootstrap = true
+#client_addr = "0.0.0.0"
+
+service  {
+  # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
+  # should be passed to dfc app with this value
+  Name = "config-binding-service"
+  # Host name where CBS is running
+  Address = "config-binding-service"
+  # Port number where CBS is running
+  Port = 10000
+}
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
new file mode 100644
index 0000000..c2d9839
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/consul/cbs_localhost_config.hcl
@@ -0,0 +1,11 @@
+service {
+  # Name for CBS in consul, env var CONFIG_BINDING_SERVICE
+  # should be passed to dfc app with this value
+  # This is only to be used when contacting cbs via local host
+  # (typicall when dfc is executed as an application without a container)
+  Name = "config-binding-service-localhost"
+  # Host name where CBS is running
+  Address = "localhost"
+  # Port number where CBS is running
+  Port = 10000
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1.json
new file mode 100644
index 0000000..e6769d8
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1.json
@@ -0,0 +1,10 @@
+{
+  "feed01": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/1",
+    "publish_url": "https://drsim:3907/publish/1",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4.json
new file mode 100644
index 0000000..aa2e6a0
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4.json
@@ -0,0 +1,34 @@
+{
+  "feed01": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/1",
+    "publish_url": "https://drsim:3907/publish/1",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed02": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/2",
+    "publish_url": "https://drsim:3907/publish/2",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed03": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/3",
+    "publish_url": "https://drsim:3907/publish/3",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed04": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/4",
+    "publish_url": "https://drsim:3907/publish/4",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4_5.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4_5.json
new file mode 100644
index 0000000..6f28f39
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed1_2_3_4_5.json
@@ -0,0 +1,42 @@
+{
+  "feed01": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/1",
+    "publish_url": "https://drsim:3907/publish/1",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed02": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/2",
+    "publish_url": "https://drsim:3907/publish/2",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed03": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/3",
+    "publish_url": "https://drsim:3907/publish/3",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed04": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/4",
+    "publish_url": "https://drsim:3907/publish/4",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+    "feed05": {
+	 "username": "user",
+	 "log_url": "https://drsim:3907/feedlog/5",
+	 "publish_url": "https://drsim:3907/publish/4",
+	 "location": "loc00",
+	 "password": "password",
+	 "publisher_id": "972.360gm"
+	  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2.json
new file mode 100644
index 0000000..bea360c
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2.json
@@ -0,0 +1,10 @@
+{
+  "feed02": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/2",
+    "publish_url": "https://drsim:3907/publish/2",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2_3.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2_3.json
new file mode 100644
index 0000000..a84bf33
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed2_3.json
@@ -0,0 +1,18 @@
+{
+  "feed02": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/2",
+    "publish_url": "https://drsim:3907/publish/2",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  },
+  "feed03": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/3",
+    "publish_url": "https://drsim:3907/publish/3",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed3.json b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed3.json
new file mode 100644
index 0000000..11138e3
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul/dmaap_feed3.json
@@ -0,0 +1,10 @@
+{
+  "feed03": {
+    "username": "user",
+    "log_url": "https://drsim:3907/feedlog/3",
+    "publish_url": "https://drsim:3907/publish/3",
+    "location": "loc00",
+    "password": "password",
+    "publisher_id": "972.360gm"
+  }
+}
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh b/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
new file mode 100755
index 0000000..a3492b9
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/consul_config.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+
+# Script to configure consul with json configuration files with 'localhost' urls. This
+# is needed when running the simulator as as a stand-alone app or via a dfc container in 'host' network mode. 
+# Assuming the input json files hostnames for MR and DR simulators are given as 'mrsim'/'drsim'
+# See available consul files in the consul dir
+# The script stores a json config for 'dfc_app'<dfc-instance-id>' if arg 'app' is given.
+# And for 'dfc_app'<dfc-instance-id>':dmaap' if arg 'dmaap' is given.
+# Instance id shall be and integer in the range 0..5
+
+. ../common/test_env.sh
+
+if [ $# != 3 ]; then
+	echo "Script needs three args, app|dmaap <dfc-instance-id> <json-file-path>"
+	exit 1
+fi
+
+if [ $2 -lt 0 ] || [ $2 -gt $DFC_MAX_IDX ]; then
+	__print_err "dfc-instance-id should be 0.."$DFC_MAX_IDX
+	exit 1
+fi
+if ! [ -f $3 ]; then
+	__print_err "json file does not extis: "$3
+	exit 1
+fi
+
+if [ $1 == "app" ]; then
+	appname=$DFC_APP_BASE$2
+	echo "Replacing 'mrsim' with 'localhost' in json app config for consul"
+	sed 's/mrsim/localhost/g' $3 > .tmp_file.json
+elif [ $1 == "dmaap" ]; then
+	appname=$DFC_APP_BASE$2":dmaap"
+	echo "Replacing 'drsim' with 'localhost' in json dmaap config for consul"
+	sed 's/drsim/localhost/g' $3 > .tmp_file.json
+else
+	__print_err "config type should be 'app' or 'dmaap'"
+	exit 1
+fi
+
+echo "Configuring consul for " $appname " from " $3
+curl -s http://127.0.0.1:${CONSUL_PORT}/v1/kv/${appname}?dc=dc1 -X PUT -H 'Accept: application/json' -H 'Content-Type: application/json' -H 'X-Requested-With: XMLHttpRequest' --data-binary "@"$tmp_file.json >/dev/null
+
+echo "done"
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
new file mode 100755
index 0000000..9fbe961
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/dfc-internal-stats.sh
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+# Script to print internal dfc stats every 5 sec to screen and file
+# Default port is 8100 for DFC
+# Useage: ./dfc-internal-stats.sh all|internal|jvm [<dfc-port-number>]
+
+print_usage() {
+	echo "Useage: ./dfc-internal-stats.sh all|internal|jvm [<dfc-port-number>]"
+}
+stat=""
+if [ $# -eq 0 ]; then
+	dfcport=8100
+	stat="all"
+elif [ $# -eq 1 ]; then
+	dfcport=8100
+	stat=$1
+elif [ $# -eq 2 ]; then
+	dfcport=$2
+	stat=$1
+else
+	print_usage
+	exit 1
+fi
+
+heading=1
+
+if [ $stat == "all" ]; then
+	echo "Printing stats for both JVM and DFC using port "$dfcport
+elif [ $stat == "internal" ]; then
+	echo "Printing stats for DFC using port "$dfcport
+elif [ $stat == "jvm" ]; then
+	echo "Printing stats for JVM using port "$dfcport
+else
+	print_usage
+	exit 1
+fi
+fileoutput="./.tmp_stats.txt"
+
+echo "Stats piped to file: "$fileoutput
+
+rm $fileoutput
+
+
+
+floatToInt() {
+    printf "%.0f\n" "$@"
+}
+
+do_curl_actuator() {
+    val=$(curl -s localhost:${dfcport}/actuator/metrics/${1} |  grep -o -E "\"value\":[0-9.E]+" | awk -F\: '{print $2}')
+    val=$(floatToInt $val)
+    printf "%-20s %+15s\n" $1 $val
+    if [ $heading -eq 1 ]; then
+    	echo -n "," $1 >> $fileoutput
+    else
+    	echo -n "," $val >> $fileoutput
+    fi
+}
+
+do_curl_status() {
+	    curl -s localhost:${dfcport}/status > ./.tmp_curl_res
+	    cat ./.tmp_curl_res
+	    while read line; do
+	    	len=${#line}
+	    	if [ $len -gt 0 ]; then
+	    	    val=${line#*:}
+    			id=${line%"$val"}
+	    		if [ $heading -eq 1 ]; then
+    				echo -n "," $id >> $fileoutput
+    			else
+    				echo -n "," $val >> $fileoutput
+    			fi
+    		fi
+		done < ./.tmp_curl_res
+
+}
+
+
+while [ true ]; do
+	if [ $heading -eq 1 ]; then
+    	echo  -n "date" >> $fileoutput
+    else
+    	ds=$(date)
+    	echo -n $ds >> $fileoutput
+    fi
+    if [ $stat == "all" ] || [ $stat == "jvm" ]; then
+    	echo "=========    DFC JVM Stats   ========="
+    	do_curl_actuator jvm.threads.live
+    	do_curl_actuator jvm.threads.peak
+    	do_curl_actuator process.files.open
+    	do_curl_actuator process.files.max
+    	do_curl_actuator jvm.memory.used
+    	do_curl_actuator jvm.memory.max
+    fi
+
+	if [ $stat == "all" ] || [ $stat == "internal" ]; then
+    	echo "========= DFC internal Stats ========="
+    	do_curl_status
+    fi
+	echo ""  >> $fileoutput
+	heading=0
+    sleep 5
+done
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
index b9b38f8..b212fc2 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-setup.sh
@@ -1,14 +1,28 @@
 #/bin/bash
 
-#Script for manually starting all simulators with test setting below
+# Script for manually starting all simulators with test setting below
+# Matching json config is needed in CBS/Consul as well. Use consul_config.sh to add config to consul
 
-export DR_TC="--tc normal"           #Test behaviour for DR sim
-export DR_REDIR_TC="--tc normal"     #Test behaviour for DR redir sim
-export MR_TC="--tc710"               #Test behaviour for MR sim
-export BC_TC=""  #Not in use yet
-export NUM_FTPFILES="105"            #Number of FTP files to generate per PNF
-export NUM_PNFS="700"                #Number of unuqie PNFs to generate FTP file for
-export FILE_SIZE="1MB"               #File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
-export FTP_TYPE="SFTP"               #Type of FTP files to generate (SFTP, FTPS or ALL)
+export MR_TC="--tc710"                                 # Test behaviour for MR sim
+export MR_GROUPS="OpenDcae-c12:PM_MEAS_FILES"          # Comma-separated list of <consumer-group>:<change-identifier>
+export MR_FILE_PREFIX_MAPPING="PM_MEAS_FILES:A"        # Comma-separated list of <change-identifer>:<file-name-prefix>
 
-source ./simulators-start.sh
\ No newline at end of file
+export DR_TC="--tc normal"                             # Test behaviour for DR sim
+export DR_FEEDS="1:A,2:B,3:C,4:D"                      # Comma-separated of <feed-id>:<file-name-prefixes> for DR sim
+
+export DR_REDIR_TC="--tc normal"                       # Test behaviour for DR redir sim
+export DR_REDIR_FEEDS="1:A,2:B,3:C,4:D"                # Comma-separated of <feed-id>:<file-name-prefixes> for DR redir sim
+
+export NUM_FTPFILES="105"                              # Number of FTP files to generate per PNF
+export NUM_PNFS="700"                                  # Number of unuqie PNFs to generate FTP file for
+export FILE_SIZE="1MB"                                 # File size for FTP file (1KB, 1MB, 5MB, 50MB or ALL)
+export FTP_TYPE="SFTP"                                 # Type of FTP files to generate (SFTP, FTPS or ALL)
+export FTP_FILE_PREFIXES="A,B,C,D"                     # Comma separated list of file name prefixes for ftp files
+export NUM_FTP_SERVERS=1                               # Number of FTP server to distribute the PNFs (Max 5)
+
+export SFTP_SIMS="localhost:21,localhost:22,localhost:23,localhost:24,localhost:25"  # Comma separated list for SFTP servers host:port
+export FTPS_SIMS="localhost:1022,localhost:1023,localhost:1024,localhost:1026,localhost:1026" # Comma separated list for FTPS servers host:port
+
+export DR_REDIR_SIM="localhost"                               # Hostname of DR redirect server
+
+source ./simulators-start.sh
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
index 8505631..f078d36 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
+++ b/test/mocks/datafilecollector-testharness/simulator-group/docker-compose-template.yml
@@ -1,17 +1,40 @@
 version: '2'
 
 networks:
-  dfcnet:
-    ipam:
-      config:
-        - subnet: 192.168.100.0/16
-          
+   dfcnet:
+      external:
+         name: dfcnet
+
 services:
-      
+
+  consul-server:
+    networks:
+      - dfcnet
+    container_name: dfc_consul
+    image: docker.io/consul:1.4.4
+    ports:
+      - "8500:8500"
+    volumes:
+      - ./consul/consul/:/consul/config
+
+  config-binding-service:
+    networks:
+      - dfcnet
+    container_name: dfc_cbs
+    image: nexus3.onap.org:10001/onap/org.onap.dcaegen2.platform.configbinding.app-app:2.3.0
+    ports:
+      - "10000:10000"
+    environment:
+      - CONSUL_HOST=consul-server
+    depends_on:
+      - consul-server
+
   drsim:
     networks:
-      dfcnet:
-        ipv4_address: 192.168.100.2
+      - dfcnet
+    environment:
+        DRR_SIM_IP: ${DR_REDIR_SIM}
+        DR_FEEDS: ${DR_FEEDS}
     image: drsim_common:latest
     ports:
      - "3906:3906"
@@ -21,8 +44,10 @@
 
   drsim_redir:
     networks:
-      dfcnet:
-        ipv4_address: 192.168.100.3
+      - dfcnet
+    environment: 
+        DR_SIM_IP: drsim
+        DR_REDIR_FEEDS: ${DR_REDIR_FEEDS}
     image: drsim_common:latest
     ports:
      - "3908:3908"
@@ -32,42 +57,131 @@
 
   mrsim:
     networks:
-      dfcnet:
-        ipv4_address: 192.168.100.1
+      - dfcnet
+    environment:
+        SFTP_SIMS: ${SFTP_SIMS}
+        FTPS_SIMS: ${FTPS_SIMS}
+        NUM_FTP_SERVERS: ${NUM_FTP_SERVERS}
+        MR_GROUPS: ${MR_GROUPS}
+        MR_FILE_PREFIX_MAPPING: ${MR_FILE_PREFIX_MAPPING}
     image: mrsim:latest
     ports:
      - "2222:2222"
     container_name: dfc_mr-sim
     command: python mr-sim.py ${MR_TC}
 
-  sftp-server:
-    network_mode: bridge
-    container_name: dfc_sftp-server
+  sftp-server0:
+    networks:
+      - dfcnet
+    container_name: dfc_sftp-server0
     image: atmoz/sftp:alpine
     ports:
       - "1022:22"
     restart: on-failure
     command: onap:pano:1001
 
-
-  ftpes-server-vsftpd:
-    network_mode: bridge
-    container_name: dfc_ftpes-server-vsftpd
-    image: docker.io/panubo/vsftpd
+  sftp-server1:
+    networks:
+      - dfcnet
+    container_name: dfc_sftp-server1
+    image: atmoz/sftp:alpine
     ports:
-      - "21:21"
-      - "8001-8010:8001-8010"
+      - "1023:22"
+    restart: on-failure
+    command: onap:pano:1001
+
+  sftp-server2:
+    networks:
+      - dfcnet
+    container_name: dfc_sftp-server2
+    image: atmoz/sftp:alpine
+    ports:
+      - "1024:22"
+    restart: on-failure
+    command: onap:pano:1001
+
+  sftp-server3:
+    networks:
+      - dfcnet
+    container_name: dfc_sftp-server3
+    image: atmoz/sftp:alpine
+    ports:
+      - "1025:22"
+    restart: on-failure
+    command: onap:pano:1001
+
+  sftp-server4:
+    networks:
+      - dfcnet
+    container_name: dfc_sftp-server4
+    image: atmoz/sftp:alpine
+    ports:
+      - "1026:22"
+    restart: on-failure
+    command: onap:pano:1001
+
+  ftpes-server-vsftpd0:
+    networks:
+      - dfcnet
+    container_name: dfc_ftpes-server-vsftpd0
+    image: ftps_vsftpd:latest
+    ports:
+      - "1032:21"
     environment:
       FTP_USER: onap
       FTP_PASSWORD: pano
-      PASV_ADDRESS: localhost
-      PASV_MIN_PORT: 8001
-      PASV_MAX_PORT: 8010
-    volumes:
-      - ./tls/ftp.crt:/etc/ssl/private/ftp.crt:ro
-      - ./tls/ftp.key:/etc/ssl/private/ftp.key:ro
-      - ./tls/dfc.crt:/etc/ssl/private/dfc.crt:ro
-      - ./configuration/vsftpd_ssl.conf:/etc/vsftpd_ssl.conf:ro
-
     restart: on-failure
     command: vsftpd /etc/vsftpd_ssl.conf
+
+  ftpes-server-vsftpd1:
+    networks:
+      - dfcnet
+    container_name: dfc_ftpes-server-vsftpd1
+    image: ftps_vsftpd:latest
+    ports:
+      - "1033:21"
+    environment:
+      FTP_USER: onap
+      FTP_PASSWORD: pano
+    restart: on-failure
+    command: vsftpd /etc/vsftpd_ssl.conf
+
+  ftpes-server-vsftpd2:
+    networks:
+      - dfcnet
+    container_name: dfc_ftpes-server-vsftpd2
+    image: ftps_vsftpd:latest
+    ports:
+      - "1034:21"
+    environment:
+      FTP_USER: onap
+      FTP_PASSWORD: pano
+    restart: on-failure
+    command: vsftpd /etc/vsftpd_ssl.conf
+
+  ftpes-server-vsftpd3:
+    networks:
+      - dfcnet
+    container_name: dfc_ftpes-server-vsftpd3
+    image: ftps_vsftpd:latest
+    ports:
+      - "1035:21"
+    environment:
+      FTP_USER: onap
+      FTP_PASSWORD: pano
+    restart: on-failure
+    command: vsftpd /etc/vsftpd_ssl.conf
+
+  ftpes-server-vsftpd4:
+    networks:
+      - dfcnet
+    container_name: dfc_ftpes-server-vsftpd4
+    image: ftps_vsftpd:latest
+    ports:
+      - "1036:21"
+    environment:
+      FTP_USER: onap
+      FTP_PASSWORD: pano
+    restart: on-failure
+    command: vsftpd /etc/vsftpd_ssl.conf
+
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
new file mode 100755
index 0000000..666e14a
--- /dev/null
+++ b/test/mocks/datafilecollector-testharness/simulator-group/prepare-images.sh
@@ -0,0 +1,19 @@
+#/bin/bash
+
+#Script for manually preparing images for mr-sim, dr-sim, dr-redir-sim and sftp server.
+
+#Build MR sim
+cd ../mr-sim
+
+docker build -t mrsim:latest .
+
+#Build DR sim common image
+cd ../dr-sim
+
+docker build -t drsim_common:latest .
+
+#Build image for ftps server
+cd ../ftps-sftp-server
+
+docker build -t ftps_vsftpd:latest -f Dockerfile-ftps .
+
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
index 7685c81..b1fa01e 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/setup-ftp-files-for-image.sh
@@ -4,44 +4,61 @@
 # The file names matches the files names in the events polled from the MR simulator.
 # Intended for execution in the running ftp containers in the ftp-root dir.
 
-NUM=200 #Default number of files 
+NUM=200 #Default number of files
 PNFS=1 #Default number of PNFs
 FSIZE="ALL"
+PREFIXES="A"
+FTP_SERV_INDEX=0
+NUM_FTP_SERVERS=1
 
-if [ $# -eq 1 ]; then 
+if [ $# -ge 1 ]; then
     NUM=$1
-elif [ $# -eq 2 ]; then
-    NUM=$1
+fi
+if [ $# -ge 2 ]; then
     PNFS=$2
-elif [ $# -eq 3 ]; then
-	NUM=$1
-    PNFS=$2
+fi
+if [ $# -ge 3 ]; then
     FSIZE=$3
     if [ $3 != "1KB" ] && [ $3 != "1MB" ] && [ $3 != "5MB" ]  && [ $3 != "50MB" ]  && [ $3 != "ALL" ]; then
     	echo "File size shall be 1KB|1MB|5MB|50MB|ALL"
     	exit
     fi
-else
-    echo "Wrong args, usage: setup-ftp-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB ] ] ]"
+fi
+if [ $# -ge 4 ]; then
+	PREFIXES=$4
+fi
+if [ $# -ge 5 ]; then
+	NUM_FTP_SERVERS=$5
+fi
+if [ $# -ge 6 ]; then
+	FTP_SERV_INDEX=$6
+fi
+if [ $# -lt 1 ] || [ $# -gt 6 ]; then
+    echo "Wrong args, usage: setup-ftp-files-for-image.sh [ <num-files> [ <num-PNFs> [ 1KB|1MB|5MB|50MB [ <comma-separated-file-name-prefixs> [ <number-of-ftp-servers> <ftp-server-index> ] ] ] ] ] ]"
     exit
 fi
 
-echo "Running ftp file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) "$FSIZE
+echo "Running ftp file creations. " $PNFS " PNFs and " $NUM " files for each PNF with file size(s) " $FSIZE "and file prefixe(s) " $PREFIXES " in ftp servers with index " $FTP_SERV_INDEX
 
 truncate -s 1KB 1KB.tar.gz
 truncate -s 1MB 1MB.tar.gz
 truncate -s 5MB 5MB.tar.gz
 truncate -s 50MB 50MB.tar.gz
 
-p=0
-while [ $p -lt $PNFS ]; do 
-    i=0
-    while [ $i -lt $NUM ]; do  #Problem with for loop and var substituion in curly bracket....so used good old style loop
-    	if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi
-        if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi
-        if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi
-        if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz 'A20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi
-    let i=i+1
-    done
-    let p=p+1
+for fnp in ${PREFIXES//,/ }
+do
+	p=0
+	while [ $p -lt $PNFS ]; do
+		if [[ $(($p%$NUM_FTP_SERVERS)) == $FTP_SERV_INDEX ]]; then
+    		i=0
+    		while [ $i -lt $NUM ]; do  #Problem with for loop and var substituion in curly bracket....so used good old style loop
+    			if [ $FSIZE = "ALL" ] || [ $FSIZE = "1KB" ]; then ln -s 1KB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1KB.tar.gz' >& /dev/null; fi
+        		if [ $FSIZE = "ALL" ] || [ $FSIZE = "1MB" ]; then ln -s 1MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-1MB.tar.gz' >& /dev/null; fi
+        		if [ $FSIZE = "ALL" ] || [ $FSIZE = "5MB" ]; then ln -s 5MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-5MB.tar.gz' >& /dev/null; fi
+        		if [ $FSIZE = "ALL" ] || [ $FSIZE = "50MB" ]; then ln -s 50MB.tar.gz $fnp'20000626.2315+0200-2330+0200_PNF'$p'-'$i'-50MB.tar.gz' >& /dev/null; fi
+    			let i=i+1
+    		done
+    	fi
+    	let p=p+1
+	done
 done
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh
index 52c8c1c..79aab90 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor-start.sh
@@ -1,7 +1,7 @@
 #/bin/bash
 
-#Script to start the sim-monitor
+#Script to install dependencies and start the sim-monitor
 
-#Re-using modules for dr-sim
-cp -r ../dr-sim/node_modules .
-node sim-monitor.js
\ No newline at end of file
+npm install express
+node sim-monitor
+
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js
index 634d144..e15b637 100644
--- a/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js
+++ b/test/mocks/datafilecollector-testharness/simulator-group/sim-monitor.js
@@ -1,7 +1,14 @@
+// Sim mon server - query the simulators for counters and other data
+// Presents a web page on localhost:9999/mon
+
 var http = require('http');
 
 var express = require('express');
 var app = express();
+var fieldSize=32;
+
+var dfcHeadings=[];
+var dfcVal=[];
 
 //I am alive
 app.get("/",function(req, res){
@@ -19,12 +26,6 @@
 
   		// The whole response has been received.
   		resp.on('end', () => {
-  			//Pad data to fixed length
-  			var i = 20-data.length;
-  			while(i>0) {
-  				data = data+"&nbsp;";
-  				i--;
-  			}
     		cb(data);
   		});
 
@@ -34,75 +35,203 @@
 	});
 };
 
+
+//Format a comma separated list of data to a html-safe string with fixed fieldsizes
+function formatDataRow(commaList) {
+	var str = "";
+	var tmp=commaList.split(',');
+    for(i=0;i<tmp.length;i++) {
+        data=tmp[i];
+        var len = fieldSize-data.length;
+        while(len>0) {
+            data = data+"&nbsp;";
+            len--;
+        }
+        str=str+data+"&nbsp;&nbsp;&nbsp;";
+     }
+	return str;
+}
+
+//Format a comma separated list of ids to a html-safe string with fixed fieldsizes
+function formatIdRow(commaList) {
+	var str = "";
+	var tmp=commaList.split(',');
+    for(i=0;i<tmp.length;i++) {
+    	tmp[i] = tmp[i].trim();
+        data="&lt"+tmp[i]+"&gt";
+        var len = fieldSize+4-data.length;
+        while(len>0) {
+            data = data+"&nbsp;";
+            len--;
+        }
+        str=str+data+"&nbsp;&nbsp;&nbsp;";
+    }
+	return str;
+}
+
+//Format a list of ids to a html-safe string in compact format
+function formatIdRowCompact(commaList) {
+	var str = "";
+	var tmp=commaList.split(',');
+    for(i=0;i<tmp.length;i++) {
+    	tmp[i] = tmp[i].trim();
+        data="&lt"+tmp[i]+"&gt";
+        str=str+data+"&nbsp;";
+    }
+	return str;
+}
+
+function buildDfcData(dfc, idx) {
+
+	if (dfcHeadings.length == 0) {
+		dfcVal[0]=[];
+		dfcVal[1]=[];
+		dfcVal[2]=[];
+		dfcVal[3]=[];
+		dfcVal[4]=[];
+		if (dfc.indexOf("no response") > -1) {
+			return;
+		} else {
+			dfc=dfc.replace(/\n/g, " ");
+			dfc=dfc.replace(/\r/g, " ");
+			var tmp=dfc.split(' ');
+			var ctr=0
+			for(i=0;i<tmp.length;i++) {
+				tmp[i]=tmp[i].trim();
+				if (tmp[i].length>0) {
+					if (ctr%2==0) {
+						dfcHeadings[ctr/2]=tmp[i];
+					}
+					ctr=ctr+1;
+				}
+			}
+		}
+	}
+	if (dfcHeadings.length > 0) {
+		if (dfc.indexOf("no response") > -1) {
+			dfcVal[idx]=[];
+			return;
+		} else {
+			dfc=dfc.replace(/\n/g, " ");
+			dfc=dfc.replace(/\r/g, " ");
+			var tmp=dfc.split(' ');
+			var ctr=0
+			for(i=0;i<tmp.length;i++) {
+				tmp[i]=tmp[i].trim();
+				if (tmp[i].length>0) {
+					if (ctr%2==1) {
+						dfcVal[idx][Math.trunc(ctr/2)]=""+tmp[i];
+					}
+					ctr=ctr+1;
+				}
+			}
+		}
+	}
+}
+
+function padding(val, fieldSize, pad) {
+	s=""+val;
+	for(i=s.length;i<fieldSize;i++) {
+		s=s+pad
+	}
+	return s;
+}
+
 //Status variables, for parameters values fetched from other simulators
-var mr1, mr2, mr3, mr4, mr5, mr6, mr7, mr8, mr9, mr10;
+var mr1="", mr2="", mr3="", mr4="", mr5="", mr6="", mr7="", mr8="", mr9="", mr10="", mr11="", mr12="", mr13="";
 
-var dr1, dr2, dr3, dr4, dr5, dr6, dr7, dr8, dr9, dr10;
+var dr1="", dr2="", dr3="", dr4="", dr5="", dr6="", dr7="", dr8="", dr9="", dr10="", dr11="", dr12="", dr13="";
 
-var drr1, drr2, drr3, drr4, drr5, drr6;
+var drr1="", drr2="", drr3="", drr4="", drr5="", drr6="", drr7="", drr8="", drr9="";
 
 //Heartbeat var
-var dfc1;
+var dfc0,dfc1,dfc2,dfc3,dfc4;
 
 app.get("/mon",function(req, res){
 
 	//DFC
-	getSimCtr("http://127.0.0.1:8100/heartbeat", function(data) {
+	getSimCtr("http://127.0.0.1:8100/status", function(data) {
+		dfc0 = data;
+		buildDfcData(dfc0, 0);
+    });
+	getSimCtr("http://127.0.0.1:8101/status", function(data) {
 		dfc1 = data;
+		buildDfcData(dfc1, 1);
+    });
+	getSimCtr("http://127.0.0.1:8102/status", function(data) {
+		dfc2 = data;
+		buildDfcData(dfc2, 2);
+    });
+	getSimCtr("http://127.0.0.1:8103/status", function(data) {
+		dfc3 = data;
+		buildDfcData(dfc3, 3);
+    });
+	getSimCtr("http://127.0.0.1:8104/status", function(data) {
+		dfc4 = data;
+		buildDfcData(dfc4, 4);
     });
 
 	//MR
-    getSimCtr("http://127.0.0.1:2222/ctr_requests", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_requests", function(data) {
     	mr1 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/ctr_responses", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_responses", function(data) {
     	mr2 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/ctr_unique_files", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_unique_files", function(data) {
     	mr3 = data;
     });
     getSimCtr("http://127.0.0.1:2222/tc_info", function(data) {
     	mr4 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/ctr_events", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_events", function(data) {
     	mr5 = data;
     });
     getSimCtr("http://127.0.0.1:2222/execution_time", function(data) {
     	mr6 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/ctr_unique_PNFs", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_unique_PNFs", function(data) {
     	mr7 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/exe_time_first_poll", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/exe_time_first_poll", function(data) {
     	mr8 = data;
     });
-    getSimCtr("http://127.0.0.1:2222/ctr_files", function(data) {
+    getSimCtr("http://127.0.0.1:2222/groups/ctr_files", function(data) {
     	mr9 = data;
     });
     getSimCtr("http://127.0.0.1:2222/status", function(data) {
     	mr10 = data;
     });
+    getSimCtr("http://127.0.0.1:2222/groups", function(data) {
+    	mr11 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/changeids", function(data) {
+    	mr12 = data;
+    });
+    getSimCtr("http://127.0.0.1:2222/fileprefixes", function(data) {
+    	mr13 = data;
+    });
 
     //DR
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_query", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_query", function(data) {
     	dr1 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_query_published", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_query_published", function(data) {
     	dr2 = data;
-    });    
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_query_not_published", function(data) {
+    });
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_query_not_published", function(data) {
     	dr3 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_req", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_req", function(data) {
     	dr4 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_req_redirect", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_req_redirect", function(data) {
     	dr5 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_publish_req_published", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_req_published", function(data) {
     	dr6 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_published_files", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_published_files", function(data) {
     	dr7 = data;
     });
     getSimCtr("http://127.0.0.1:3906/tc_info", function(data) {
@@ -111,15 +240,24 @@
     getSimCtr("http://127.0.0.1:3906/execution_time", function(data) {
     	dr9 = data;
     });
-    getSimCtr("http://127.0.0.1:3906/ctr_double_publish", function(data) {
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_double_publish", function(data) {
     	dr10 = data;
     });
+    getSimCtr("http://127.0.0.1:3906/feeds", function(data) {
+    	dr11=data;
+    });
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_query_bad_file_prefix", function(data) {
+    	dr12=data;
+    });
+    getSimCtr("http://127.0.0.1:3906/feeds/ctr_publish_req_bad_file_prefix",function(data) {
+    	dr13=data;
+    });
 
     //DR REDIR
-    getSimCtr("http://127.0.0.1:3908/ctr_publish_requests", function(data) {
+    getSimCtr("http://127.0.0.1:3908/feeds/ctr_publish_requests", function(data) {
     	drr1 = data;
     });
-    getSimCtr("http://127.0.0.1:3908/ctr_publish_responses", function(data) {
+    getSimCtr("http://127.0.0.1:3908/feeds/ctr_publish_responses", function(data) {
     	drr2 = data;
     });
     getSimCtr("http://127.0.0.1:3908/tc_info", function(data) {
@@ -128,66 +266,117 @@
     getSimCtr("http://127.0.0.1:3908/execution_time", function(data) {
     	drr4 = data;
     });
-    getSimCtr("http://127.0.0.1:3908/time_lastpublish", function(data) {
+    getSimCtr("http://127.0.0.1:3908/feeds/time_lastpublish", function(data) {
     	drr5 = data;
     });
-    getSimCtr("http://127.0.0.1:3908/dwl_volume", function(data) {
+    getSimCtr("http://127.0.0.1:3908/feeds/dwl_volume", function(data) {
     	drr6 = data;
     });
+    getSimCtr("http://127.0.0.1:3908/feeds", function(data) {
+    	drr7=data;
+    });
+    getSimCtr("http://127.0.0.1:3908/feeds/ctr_publish_requests_bad_file_prefix", function(data) {
+    	drr8 = data;
+    });
+    getSimCtr("http://127.0.0.1:3908/speed", function(data) {
+    	drr9 = data;
+    });
 
   //Build web page
 	var str = "<!DOCTYPE html>" +
           "<html>" +
           "<head>" +
-            "<meta http-equiv=\"refresh\" content=\"5\">"+  //5 sec auto reefresh
+            "<meta http-equiv=\"refresh\" content=\"5\">"+  //5 sec auto refresh
             "<title>DFC and simulator monitor</title>"+
             "</head>" +
             "<body>" +
-            "<h3>DFC</h3>" +
-            "<font face=\"Courier New\">"+
-            "Heartbeat:....................................." + dfc1 + "<br>" +
-            "</font>"+
+            "<h3>DFC apps</h3>" +
+            "<font face=\"monospace\">";
+//            "dfc_app0: " + dfc0 + "<br>" +
+//            "dfc_app1: " + dfc1 + "<br>" +
+//            "dfc_app2: " + dfc2 + "<br>" +
+//            "dfc_app3: " + dfc3 + "<br>" +
+//            "dfc_app4: " + dfc4 + "<br>";
+
+	for(id=0;id<5;id++) {
+		if (id==0) {
+			str=str+padding("Instance",22,".");
+			str=str+"&nbsp;"+"&nbsp;";
+		}
+		str=str+padding("dfc_app"+id,26, "&nbsp;");
+		str=str+"&nbsp;"+"&nbsp;";
+	}
+	str=str+"<br>";
+
+	if (dfcHeadings.length > 0) {
+		var hl=0;
+		for(hl=0;hl<dfcHeadings.length;hl++) {
+			str=str+padding(dfcHeadings[hl], 22, ".");
+			for(id=0;id<5;id++) {
+				if (dfcVal[id].length > 0) {
+					val=""+padding(dfcVal[id][hl], 26, "&nbsp;");
+				} else {
+					val=""+padding("-", 26, "&nbsp;");
+				}
+				str=str+"&nbsp;"+"&nbsp;"+val;
+			}
+			str=str+"<br>";
+		}
+	}
+
+            str=str+"</font>"+
             "<h3>MR Simulator</h3>" +
-            "<font face=\"Courier New\">"+
+            "<font face=\"monospace\">"+
             "MR TC:........................................." + mr4 + "<br>" +
+            "Configured filename prefixes:.................." + formatIdRowCompact(mr13) + "<br>" +
             "Status:........................................" + mr10 + "<br>" +
             "Execution time (mm.ss):........................" + mr6 + "<br>" +
-            "Execution time from first poll (mm.ss):....... " + mr8 + "<br>" +
-            "Number of requests (polls):...................." + mr1 + "<br>" +
-            "Number of responses (polls):..................." + mr2 + "<br>" +
-            "Number of files in all responses:.............." + mr9 + "<br>" +
-            "Number of unique files in all responses:......." + mr3 + "<br>" +
-            "Number of events..............................." + mr5 + "<br>" +
-            "Number of unique PNFs.........................." + mr7 + "<br>" +
+            "Configured groups:............................." + formatIdRow(mr11) + "<br>" +
+            "Configured change identifiers:................." + formatIdRow(mr12) + "<br>" +
+            "Execution time from first poll (mm.ss):....... " + formatDataRow(mr8) + "<br>" +
+            "Number of requests (polls):...................." + formatDataRow(mr1) + "<br>" +
+            "Number of responses (polls):..................." + formatDataRow(mr2) + "<br>" +
+            "Number of files in all responses:.............." + formatDataRow(mr9) + "<br>" +
+            "Number of unique files in all responses:......." + formatDataRow(mr3) + "<br>" +
+            "Number of events..............................." + formatDataRow(mr5) + "<br>" +
+            "Number of unique PNFs.........................." + formatDataRow(mr7) + "<br>" +
             "</font>"+
             "<h3>DR Simulator</h3>" +
-            "<font face=\"Courier New\">"+
+            "<font face=\"monospace\">"+
             "DR TC:........................................." + dr8 + "<br>" +
             "Execution time (mm.ss):........................" + dr9 + "<br>" +
-            "Number of queries:............................." + dr1 + "<br>" +
-            "Number of query responses, file published:....." + dr2 + "<br>" +
-            "Number of query responses, file not published:." + dr3 + "<br>" +
-            "Number of requests:............................" + dr4 + "<br>" +
-            "Number of responses with redirect:............." + dr5 + "<br>" +
-            "Number of responses without redirect:.........." + dr6 + "<br>" +
-            "Number of published files:....................." + dr7 + "<br>" +
-            "Number of double published files:.............." + dr10 + "<br>" +
+            "Configured feeds (feedId:filePrefix)..........." + formatIdRow(dr11) +"<br>" +
+            "Number of queries:............................." + formatDataRow(dr1) + "<br>" +
+            "Number of queries with bad file name prefix:..." + formatDataRow(dr12) + "<br>" +
+            "Number of query responses, file published:....." + formatDataRow(dr2) + "<br>" +
+            "Number of query responses, file not published:." + formatDataRow(dr3) + "<br>" +
+            "Number of requests:............................" + formatDataRow(dr4) + "<br>" +
+            "Number of requests with bad file name prefix:.." + formatDataRow(dr13) + "<br>" +
+            "Number of responses with redirect:............." + formatDataRow(dr5) + "<br>" +
+            "Number of responses without redirect:.........." + formatDataRow(dr6) + "<br>" +
+            "Number of published files:....................." + formatDataRow(dr7) + "<br>" +
+            "Number of double published files:.............." + formatDataRow(dr10) + "<br>" +
             "</font>"+
             "<h3>DR Redirect Simulator</h3>" +
-            "<font face=\"Courier New\">"+
+            "<font face=\"monospace\">" +
             "DR REDIR TC:..................................." + drr3 + "<br>" +
             "Execution time (mm.ss):........................" + drr4 + "<br>" +
-            "Number of requests:............................" + drr1 + "<br>" +
-            "Number of responses:..........................." + drr2 + "<br>" +
-            "Downloaded volume (bytes):....................." + drr6 + "<br>" +
-            "Last publish (mm:ss):.........................." + drr5 + "<br>" +
+            "Publish speed (files/sec):....................." + drr9 + "<br>" +
+            "Configured feeds (feedId:filePrefix)..........." + formatIdRow(drr7) +"<br>" +
+            "Number of requests:............................" + formatDataRow(drr1) + "<br>" +
+            "Number of requests with bad file name prefix:.." + formatDataRow(drr8) + "<br>" +
+            "Number of responses:..........................." + formatDataRow(drr2) + "<br>" +
+            "Downloaded volume (bytes):....................." + formatDataRow(drr6) + "<br>" +
+            "Last publish (mm:ss):.........................." + formatDataRow(drr5) + "<br>" +
             "</font>"+
            "</body>" +
           "</html>";
 	res.send(str);
+
 })
 
 var httpServer = http.createServer(app);
 var httpPort=9999;
 httpServer.listen(httpPort);
-console.log("Simulator monitor listening (http) at "+httpPort);
\ No newline at end of file
+console.log("Simulator monitor listening (http) at "+httpPort);
+console.log("Open the web page on localhost:9999/mon to view the statistics page.")
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
index 3f0ba35..c0526e9 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-kill.sh
@@ -1,10 +1,39 @@
 #!/bin/bash
 
-#Stop all simulators
+#Script to kill and remove all simulators
 
+echo "Killing simulator containers"
 docker kill dfc_dr-sim
 docker kill dfc_dr-redir-sim
 docker kill dfc_mr-sim
-docker kill dfc_sftp-server
-docker kill dfc_ftpes-server-vsftpd
+docker kill dfc_sftp-server0
+docker kill dfc_sftp-server1
+docker kill dfc_sftp-server2
+docker kill dfc_sftp-server3
+docker kill dfc_sftp-server4
+docker kill dfc_ftpes-server-vsftpd0
+docker kill dfc_ftpes-server-vsftpd1
+docker kill dfc_ftpes-server-vsftpd2
+docker kill dfc_ftpes-server-vsftpd3
+docker kill dfc_ftpes-server-vsftpd4
+docker kill dfc_cbs
+docker kill dfc_consul
 
+echo "Removing simulator containers"
+docker rm dfc_dr-sim
+docker rm dfc_dr-redir-sim
+docker rm dfc_mr-sim
+docker rm dfc_sftp-server0
+docker rm dfc_sftp-server1
+docker rm dfc_sftp-server2
+docker rm dfc_sftp-server3
+docker rm dfc_sftp-server4
+docker rm dfc_ftpes-server-vsftpd0
+docker rm dfc_ftpes-server-vsftpd1
+docker rm dfc_ftpes-server-vsftpd2
+docker rm dfc_ftpes-server-vsftpd3
+docker rm dfc_ftpes-server-vsftpd4
+docker rm dfc_cbs
+docker rm dfc_consul
+
+echo "done"
\ No newline at end of file
diff --git a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
index 774b753..82ad6aa 100755
--- a/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
+++ b/test/mocks/datafilecollector-testharness/simulator-group/simulators-start.sh
@@ -1,35 +1,115 @@
 #!/bin/bash
 
+server_check() {
+	for i in {1..10}; do
+		res=$(curl  -s -o /dev/null -w "%{http_code}" localhost:$2$3)
+		if [ $res -gt 199 ] && [ $res -lt 300 ]; then
+			echo "Simulator " $1 " on localhost:$2$3 responded ok"
+			return
+		fi
+		sleep 1
+	done
+	echo "Simulator " $1 " on localhost:$2$3 - no response"
+}
+
+ftps_server_check() {
+	for i in {1..10}; do
+		res=$(curl --silent --max-time 3 localhost:$2 2>&1 | grep vsFTPd)
+		if ! [ -z "$res" ]; then
+			echo "Simulator " $1 " on localhost:$2 responded ok"
+			return
+		fi
+		sleep 1
+	done
+	echo "Simulator " $1 " on localhost:$2 - no response"
+}
+
+sftp_server_check() {
+	for i in {1..10}; do
+		res=$(curl --silent --max-time 3 localhost:$2 2>&1 | grep OpenSSH)
+		if ! [ -z "$res" ]; then
+			echo "Simulator " $1 " on localhost:"$2" responded ok"
+			return
+		fi
+		sleep 1
+	done
+	echo "Simulator " $1 " on localhost:"$2" - no response"
+}
 
 # Starts all simulators with the test settings
-# Intended for CSIT test. For manual start, use the docker-compose-setup.sh
+# Intended for CSIT test and auto test. For manual start, use the docker-compose-setup.sh
+
+DOCKER_SIM_NWNAME="dfcnet"
+echo "Creating docker network $DOCKER_SIM_NWNAME, if needed"
+docker network ls| grep $DOCKER_SIM_NWNAME > /dev/null || docker network create $DOCKER_SIM_NWNAME
 
 docker-compose -f docker-compose-template.yml config > docker-compose.yml
 
 docker-compose up -d
 
+declare -a SFTP_SIM
+declare -a FTPS_SIM
+
 DR_SIM="$(docker ps -q --filter='name=dfc_dr-sim')"
 DR_RD_SIM="$(docker ps -q --filter='name=dfc_dr-redir-sim')"
 MR_SIM="$(docker ps -q --filter='name=dfc_mr-sim')"
-SFTP_SIM="$(docker ps -q --filter='name=dfc_sftp-server')"
-FTPS_SIM="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd')"
+SFTP_SIM[0]="$(docker ps -q --filter='name=dfc_sftp-server0')"
+SFTP_SIM[1]="$(docker ps -q --filter='name=dfc_sftp-server1')"
+SFTP_SIM[2]="$(docker ps -q --filter='name=dfc_sftp-server2')"
+SFTP_SIM[3]="$(docker ps -q --filter='name=dfc_sftp-server3')"
+SFTP_SIM[4]="$(docker ps -q --filter='name=dfc_sftp-server4')"
+FTPS_SIM[0]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd0')"
+FTPS_SIM[1]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd1')"
+FTPS_SIM[2]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd2')"
+FTPS_SIM[3]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd3')"
+FTPS_SIM[4]="$(docker ps -q --filter='name=dfc_ftpes-server-vsftpd4')"
+CBS_SIM="$(docker ps -q --filter='name=dfc_cbs')"
+CONSUL_SIM="$(docker ps -q --filter='name=dfc_consul')"
 
 #Wait for initialization of docker containers for all simulators
 for i in {1..10}; do
 if [ $(docker inspect --format '{{ .State.Running }}' $DR_SIM) ] && \
 [ $(docker inspect --format '{{ .State.Running }}' $DR_RD_SIM) ] && \
 [ $(docker inspect --format '{{ .State.Running }}' $MR_SIM) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $SFTP_SIM) ] && \
-[ $(docker inspect --format '{{ .State.Running }}' $FTPS_SIM) ]
+[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${SFTP_SIM[4]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[0]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[1]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[2]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[3]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' ${FTPS_SIM[4]}) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' $CBS_SIM) ] && \
+[ $(docker inspect --format '{{ .State.Running }}' $CONSUL_SIM) ]
  then
-   echo "All simulators Running"
+   echo "All simulators Started"
    break
  else
    echo sleep $i
    sleep $i
- fi 
+ fi
 done
 
+server_check      "cbs          " 10000 "/healthcheck"
+server_check      "consul       " 8500 "/v1/catalog/service/agent"
+server_check      "DR sim       " 3906 "/"
+server_check      "DR redir sim " 3908 "/"
+server_check      "MR sim       " 2222 "/"
+ftps_server_check "FTPS server 0" 1032
+ftps_server_check "FTPS server 1" 1033
+ftps_server_check "FTPS server 2" 1034
+ftps_server_check "FTPS server 3" 1035
+ftps_server_check "FTPS server 4" 1036
+sftp_server_check "SFTP server 0" 1022
+sftp_server_check "SFTP server 1" 1023
+sftp_server_check "SFTP server 2" 1024
+sftp_server_check "SFTP server 3" 1025
+sftp_server_check "SFTP server 4" 1026
+
+echo ""
+
 #Populate the ftp server with files
 if [ -z "$NUM_FTPFILES" ]
  then
@@ -47,17 +127,35 @@
  then
  FTP_TYPE="ALL"
 fi
+if [ -z "$FTP_FILE_PREFIXES" ]
+ then
+ FTP_FILE_PREFIXES="A"
+fi
+
+if [ -z "$NUM_FTP_SERVERS" ]
+ then
+ NUM_FTP_SERVERS=1
+fi
+
 
 if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "SFTP" ]; then
 	echo "Creating files for SFTP server, may take time...."
-	docker cp setup-ftp-files-for-image.sh $SFTP_SIM:/tmp/
-	#Double slash needed for docker on win...
-	docker exec -w //home/onap/ $SFTP_SIM //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1
+	p=0
+	while [ $p -lt $NUM_FTP_SERVERS ]; do
+		docker cp setup-ftp-files-for-image.sh ${SFTP_SIM[$p]}:/tmp/
+		#Double slash needed for docker on win...
+		docker exec -w //home/onap/ ${SFTP_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+		let p=p+1
+	done
 fi
 if [ $FTP_TYPE = "ALL" ] || [ $FTP_TYPE = "FTPS" ]; then
 	echo "Creating files for FTPS server, may take time...."
-	docker cp setup-ftp-files-for-image.sh $FTPS_SIM:/tmp/setup-ftp-files-for-image.sh
-	#Double slash needed for docker on win...
-	docker exec -w //srv $FTPS_SIM //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE #>/dev/null 2>&1
+	p=0
+	while [ $p -lt $NUM_FTP_SERVERS ]; do
+		docker cp setup-ftp-files-for-image.sh ${FTPS_SIM[$p]}:/tmp/setup-ftp-files-for-image.sh
+		#Double slash needed for docker on win...
+		docker exec -w //srv ${FTPS_SIM[$p]} //tmp/setup-ftp-files-for-image.sh $NUM_FTPFILES $NUM_PNFS $FILE_SIZE $FTP_FILE_PREFIXES $NUM_FTP_SERVERS $p #>/dev/null 2>&1
+		let p=p+1
+	done
 fi
 echo "Done: All simulators started and configured"