DB utils update for db initialization

Issue-ID: DMAAP-566
Change-Id: I3dad4059a3b192f2c54598985893b1334809d1f0
Signed-off-by: Fiachra Corcoran <fiachra.corcoran@ericsson.com>
diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
index 580fe99..0357d17 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/authz/impl/ProvDataProvider.java
@@ -52,14 +52,14 @@
 

 	/** Get the identity of the owner of a feed by group id -  Rally : US708115

 	 * 

-	 * @param feedid, user the ID of the feed whose owner is being looked up.

+	 * @param feedId, user the ID of the feed whose owner is being looked up.

 	 * @return the feed owner's identity by group.

 	 */

 	public String getGroupByFeedGroupId(String owner, String feedId);

 	

 	/** Get the identity of the owner of a sub by group id Rally : US708115

 	 * 

-	 * @param subid, user the ID of the feed whose owner is being looked up.

+	 * @param subId, user the ID of the feed whose owner is being looked up.

 	 * @return the feed owner's identity by group.

 	 */

 	public String getGroupBySubGroupId(String owner, String subId);

diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
index ff0218f..b703050 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/StatisticsServlet.java
@@ -356,8 +356,8 @@
 	

 	/**

 	 * queryGeneretor - Generating sql query

-	 * @exception SQL Query parse exception.

-	 * @param Map as key value pare of all user input fields

+	 * @exception ParseException

+	 * @param map as key value pare of all user input fields

 	 */

 	public String queryGeneretor(Map<String, String> map) throws ParseException{

 		 

diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
index 1e5751a..2e0fc21 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/SynchronizerTask.java
@@ -83,7 +83,7 @@
  * <ol>

  * <li>Checking DNS once per minute to see which POD the DNS CNAME points to. The CNAME will point to

  * the active (master) POD.</li>

- * <li>On non-master (standby) PODs, fetches provisioning data and logs in order to keep MySQL in sync.</li>

+ * <li>On non-master (standby) PODs, fetches provisioning data and logs in order to keep MariaDB in sync.</li>

  * <li>Providing information to other parts of the system as to the current role (ACTIVE, STANDBY, UNKNOWN)

  * of this POD.</li>

  * </ol>

diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LOGJSONable.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LOGJSONable.java
index 20dde64..53897a7 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LOGJSONable.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/beans/LOGJSONable.java
@@ -27,7 +27,7 @@
 import org.onap.dmaap.datarouter.provisioning.utils.LOGJSONObject;

 

 /**

- * An object that can be represented as a {@link JSONObject}.

+ * An object that can be represented as a {@link LOGJSONObject}.

  * @author Robert Eby

  * @version $Id: JSONable.java,v 1.1 2013/04/26 21:00:26 eby Exp $

  */

diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
index 0160571..98cc824 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/DB.java
@@ -24,32 +24,11 @@
 

 package org.onap.dmaap.datarouter.provisioning.utils;

 

-import java.io.File;

-import java.io.FileReader;

-import java.io.IOException;

-import java.io.InputStream;

-import java.io.LineNumberReader;

-import java.lang.reflect.Constructor;

-import java.lang.reflect.InvocationTargetException;

-import java.sql.Connection;

-import java.sql.DatabaseMetaData;

-import java.sql.DriverManager;

-import java.sql.PreparedStatement;

-import java.sql.ResultSet;

-import java.sql.SQLException;

-import java.sql.Statement;

-import java.util.HashSet;

-import java.util.LinkedList;

-import java.util.NoSuchElementException;

-import java.util.Properties;

-import java.util.Queue;

-import java.util.Set;

-

 import org.apache.log4j.Logger;

-import org.onap.dmaap.datarouter.provisioning.beans.DeliveryRecord;

-import org.onap.dmaap.datarouter.provisioning.beans.ExpiryRecord;

-import org.onap.dmaap.datarouter.provisioning.beans.Loadable;

-import org.onap.dmaap.datarouter.provisioning.beans.PublishRecord;

+

+import java.io.*;

+import java.sql.*;

+import java.util.*;

 

 /**

  * Load the DB JDBC driver, and manage a simple pool of connections to the DB.

@@ -59,15 +38,14 @@
  */

 public class DB {

 	/** The name of the properties file (in CLASSPATH) */

-	public static final String CONFIG_FILE = "provserver.properties";

+	private static final String CONFIG_FILE = "provserver.properties";

 

-	private static String DB_DRIVER   = "com.mysql.jdbc.Driver";

-	private static String DB_URL      = "jdbc:mysql://127.0.0.1:3306/datarouter";

-	private static String DB_LOGIN    = "datarouter";

-	private static String DB_PASSWORD = "datarouter";

+	private static String DB_URL;

+	private static String DB_LOGIN;

+	private static String DB_PASSWORD;

 	private static Properties props;

 	private static Logger intlogger = Logger.getLogger("org.onap.dmaap.datarouter.provisioning.internal");

-	private static Queue<Connection> queue = new LinkedList<Connection>();

+	private static final Queue<Connection> queue = new LinkedList<>();

 

 	public static String HTTPS_PORT;

 	public static String HTTP_PORT;

@@ -80,29 +58,23 @@
 	public DB() {

 		if (props == null) {

 			props = new Properties();

-			InputStream inStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE);

-			try {

+			try (InputStream inStream = getClass().getClassLoader().getResourceAsStream(CONFIG_FILE)) {

 				props.load(inStream);

-				DB_DRIVER   = (String) props.get("org.onap.dmaap.datarouter.db.driver");

-				DB_URL      = (String) props.get("org.onap.dmaap.datarouter.db.url");

-				DB_LOGIN    = (String) props.get("org.onap.dmaap.datarouter.db.login");

+				String DB_DRIVER = (String) props.get("org.onap.dmaap.datarouter.db.driver");

+				DB_URL = (String) props.get("org.onap.dmaap.datarouter.db.url");

+				DB_LOGIN = (String) props.get("org.onap.dmaap.datarouter.db.login");

 				DB_PASSWORD = (String) props.get("org.onap.dmaap.datarouter.db.password");

 				HTTPS_PORT = (String) props.get("org.onap.dmaap.datarouter.provserver.https.port");

 				HTTP_PORT = (String) props.get("org.onap.dmaap.datarouter.provserver.http.port");

 				Class.forName(DB_DRIVER);

 			} catch (IOException e) {

-				intlogger.fatal("PROV9003 Opening properties: "+e.getMessage());

+				intlogger.fatal("PROV9003 Opening properties: " + e.getMessage());

 				e.printStackTrace();

 				System.exit(1);

 			} catch (ClassNotFoundException e) {

-				intlogger.fatal("PROV9004 cannot find the DB driver: "+e);

+				intlogger.fatal("PROV9004 cannot find the DB driver: " + e);

 				e.printStackTrace();

 				System.exit(1);

-			} finally {

-				try {

-					inStream.close();

-				} catch (IOException e) {

-				}

 			}

 		}

 	}

@@ -120,41 +92,40 @@
 	 */

 	@SuppressWarnings("resource")

 	public Connection getConnection() throws SQLException {

-		Connection c = null;

-		while (c == null) {

+		Connection connection = null;

+		while (connection == null) {

 			synchronized (queue) {

 				try {

-					c = queue.remove();

-				} catch (NoSuchElementException e) {

+					connection = queue.remove();

+				} catch (NoSuchElementException nseEx) {

 					int n = 0;

 					do {

 						// Try up to 3 times to get a connection

 						try {

-							c = DriverManager.getConnection(DB_URL, DB_LOGIN, DB_PASSWORD);

-						} catch (SQLException e1) {

+							connection = DriverManager.getConnection(DB_URL, DB_LOGIN, DB_PASSWORD);

+						} catch (SQLException sqlEx) {

 							if (++n >= 3)

-								throw e1;

+								throw sqlEx;

 						}

-					} while (c == null);

+					} while (connection == null);

 				}

 			}

-			if (c != null && !c.isValid(1)) {

-				c.close();

-				c = null;

+			if (connection != null && !connection.isValid(1)) {

+				connection.close();

+				connection = null;

 			}

 		}

-		return c;

+		return connection;

 	}

 	/**

 	 * Returns a JDBC connection to the pool.

-	 * @param c the Connection to return

-	 * @throws SQLException

+	 * @param connection the Connection to return

 	 */

-	public void release(Connection c) {

-		if (c != null) {

+	public void release(Connection connection) {

+		if (connection != null) {

 			synchronized (queue) {

-				if (!queue.contains(c))

-					queue.add(c);

+				if (!queue.contains(connection))

+					queue.add(connection);

 			}

 		}

 	}

@@ -165,499 +136,51 @@
 	 * @return true if all retrofits worked, false otherwise

 	 */

 	public boolean runRetroFits() {

-		return retroFit1()

-			&& retroFit2()

-			&& retroFit3()

-			&& retroFit4()

-			&& retroFit5()

-			&& retroFit6()

-			&& retroFit7()

-			&& retroFit8()

-			&& retroFit9()  //New retroFit call to add CREATED_DATE column Rally:US674199 - 1610

-			&& retroFit10() //New retroFit call to add BUSINESS_DESCRIPTION column Rally:US708102 - 1610

-			&& retroFit11() //New retroFit call for groups feature Rally:US708115 - 1610	

-			;

+		return retroFit1();

 	}

+

 	/**

-	 * Retrofit 1 - Make sure the expected tables are in MySQL and are initialized.

-	 * Uses mysql_init_0000 and mysql_init_0001 to setup the DB.

+	 * Retrofit 1 - Make sure the expected tables are in DB and are initialized.

+	 * Uses sql_init_01.sql to setup the DB.

 	 * @return true if the retrofit worked, false otherwise

 	 */

 	private boolean retroFit1() {

-		final String[] expected_tables = {

-			"FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS", "SUBSCRIPTIONS"

+		final String[] expectedTables = {

+				"FEEDS", "FEED_ENDPOINT_ADDRS", "FEED_ENDPOINT_IDS", "PARAMETERS",

+				"SUBSCRIPTIONS", "LOG_RECORDS", "INGRESS_ROUTES", "EGRESS_ROUTES",

+				"NETWORK_ROUTES", "NODESETS", "NODES", "GROUPS"

 		};

-		Connection c = null;

+		Connection connection = null;

 		try {

-			c = getConnection();

-			Set<String> tables = getTableSet(c);

+			connection = getConnection();

+			Set<String> actualTables = getTableSet(connection);

 			boolean initialize = false;

-			for (String s : expected_tables) {

-				initialize |= !tables.contains(s);

+			for (String table : expectedTables) {

+				initialize |= !actualTables.contains(table);

 			}

 			if (initialize) {

 				intlogger.info("PROV9001: First time startup; The database is being initialized.");

-				runInitScript(c, 0);		// script 0 creates the provisioning tables

-				runInitScript(c, 1);		// script 1 initializes PARAMETERS

+				runInitScript(connection, 1);

 			}

 		} catch (SQLException e) {

 			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

 			return false;

 		} finally {

-			if (c != null)

-				release(c);

+			if (connection != null)

+				release(connection);

 		}

 		return true;

 	}

-	/**

-	 * Retrofit 2 - if the LOG_RECORDS table is missing, add it.

-	 * Uses mysql_init_0002 to create this table.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	private boolean retroFit2() {

-		Connection c = null;

-		try {

-			// If LOG_RECORDS table is missing, add it

-			c = getConnection();

-			Set<String> tables = getTableSet(c);

-			if (!tables.contains("LOG_RECORDS")) {

-				intlogger.info("PROV9002: Creating LOG_RECORDS table.");

-				runInitScript(c, 2);		// script 2 creates the LOG_RECORDS table

-			}

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	/**

-	 * Retrofit 3 - if the FEEDS_UNIQUEID table (from release 1.0.*) exists, drop it.

-	 * If SUBSCRIPTIONS.SUBID still has the auto_increment attribute, remove it.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit3() {

-		Connection c = null;

-		try {

-			// if SUBSCRIPTIONS.SUBID still has auto_increment, remove it

-			boolean doremove = false;

-			c = getConnection();

-			DatabaseMetaData md = c.getMetaData();

-			ResultSet rs = md.getColumns("datarouter", "", "SUBSCRIPTIONS", "SUBID");

-			if (rs != null) {

-				while (rs.next()) {

-					doremove = rs.getString("IS_AUTOINCREMENT").equals("YES");

-				}

-				rs.close();

-				rs = null;

-			}

-			if (doremove) {

-				intlogger.info("PROV9002: Modifying SUBSCRIPTIONS SUBID column to remove auto increment.");

-				Statement s = c.createStatement();

-				s.execute("ALTER TABLE SUBSCRIPTIONS MODIFY COLUMN SUBID INT UNSIGNED NOT NULL");

-				s.close();

-			}

-

-			// Remove the FEEDS_UNIQUEID table, if it exists

-			Set<String> tables = getTableSet(c);

-			if (tables.contains("FEEDS_UNIQUEID")) {

-				intlogger.info("PROV9002: Dropping FEEDS_UNIQUEID table.");

-				Statement s = c.createStatement();

-				s.execute("DROP TABLE FEEDS_UNIQUEID");

-				s.close();

-			}

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	private long nextid = 0;	// used for initial creation of LOG_RECORDS table.

-	/**

-	 * Retrofit 4 - if old log tables exist (from release 1.0.*), copy them to LOG_RECORDS, then drop them.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit4() {

-		Connection c = null;

-		try {

-			c = getConnection();

-			Set<String> tables = getTableSet(c);

-			if (tables.contains("PUBLISH_RECORDS")) {

-				intlogger.info("PROV9002: Copying PUBLISH_RECORDS to LOG_RECORDS table.");

-				copyLogTable("PUBLISH_RECORDS", PublishRecord.class);

-				intlogger.info("PROV9002: Dropping PUBLISH_RECORDS table.");

-				Statement s = c.createStatement();

-				s.execute("DROP TABLE PUBLISH_RECORDS");

-				s.close();

-			}

-			if (tables.contains("DELIVERY_RECORDS")) {

-				intlogger.info("PROV9002: Copying DELIVERY_RECORDS to LOG_RECORDS table.");

-				copyLogTable("DELIVERY_RECORDS", DeliveryRecord.class);

-				intlogger.info("PROV9002: Dropping DELIVERY_RECORDS table.");

-				Statement s = c.createStatement();

-				s.execute("DROP TABLE DELIVERY_RECORDS");

-				s.close();

-			}

-			if (tables.contains("EXPIRY_RECORDS")) {

-				intlogger.info("PROV9002: Copying EXPIRY_RECORDS to LOG_RECORDS table.");

-				copyLogTable("EXPIRY_RECORDS", ExpiryRecord.class);

-				intlogger.info("PROV9002: Dropping EXPIRY_RECORDS table.");

-				Statement s = c.createStatement();

-				s.execute("DROP TABLE EXPIRY_RECORDS");

-				s.close();

-			}

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	/**

-	 * Retrofit 5 - Create the new routing tables required for Release 2.

-	 * Adds a new "SUSPENDED" column to FEEDS and SUBSCRIPTIONS.

-	 * Modifies the LOG_RECORDS table to handle new R2 records.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit5() {

-		final String[] expected_tables = {

-			"INGRESS_ROUTES", "EGRESS_ROUTES", "NETWORK_ROUTES", "NODESETS", "NODES"

-		};

-		Connection c = null;

-		try {

-			// If expected tables are not present, then add new routing tables

-			c = getConnection();

-			Set<String> tables = getTableSet(c);

-			boolean initialize = false;

-			for (String s : expected_tables) {

-				initialize |= !tables.contains(s);

-			}

-			if (initialize) {

-				intlogger.info("PROV9002: Adding routing tables for Release 2.0.");

-				runInitScript(c, 3);		// script 3 creates the routing tables

-			}

-

-			// Add SUSPENDED column to FEEDS/SUBSCRIPTIONS

-			DatabaseMetaData md = c.getMetaData();

-			for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {

-				boolean add_col = true;

-				ResultSet rs = md.getColumns("datarouter", "", tbl, "SUSPENDED");

-				if (rs != null) {

-					add_col = !rs.next();

-					rs.close();

-					rs = null;

-				}

-				if (add_col) {

-					intlogger.info("PROV9002: Adding SUSPENDED column to "+tbl+" table.");

-					Statement s = c.createStatement();

-					s.execute("ALTER TABLE "+tbl+" ADD COLUMN SUSPENDED BOOLEAN DEFAULT FALSE");

-					s.close();

-				}

-			}

-

-			// Modify LOG_RECORDS for R2

-			intlogger.info("PROV9002: Modifying LOG_RECORDS table.");

-			Statement s = c.createStatement();

-			s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN TYPE ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL");

-			s.close();

-			s = c.createStatement();

-			s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN REASON ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other')");

-			s.close();

-			boolean add_col = true;

-			ResultSet rs = md.getColumns("datarouter", "", "LOG_RECORDS", "CONTENT_LENGTH_2");

-			if (rs != null) {

-				add_col = !rs.next();

-				rs.close();

-				rs = null;

-			}

-			if (add_col) {

-				intlogger.info("PROV9002: Fixing two columns in LOG_RECORDS table (this may take some time).");

-				s = c.createStatement();

-				s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN CONTENT_LENGTH BIGINT NOT NULL, ADD COLUMN CONTENT_LENGTH_2 BIGINT AFTER RECORD_ID");

-				s.close();

-			}

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	/**

-	 * Retrofit 6 - Adjust LOG_RECORDS.USER to be 50 chars (MR #74).

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit6() {

-		Connection c = null;

-		try {

-			c = getConnection();

-			// Modify LOG_RECORDS for R2

-			intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");

-			Statement s = c.createStatement();

-			s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN USER VARCHAR(50)");

-			s.close();

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	/**

-	 * Retrofit 7 - Adjust LOG_RECORDS.FEED_FILEID and LOG_RECORDS.DELIVERY_FILEID to be 256 chars.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit7() {

-		Connection c = null;

-		try {

-			c = getConnection();

-			// Modify LOG_RECORDS for long (>128) FILEIDs

-			intlogger.info("PROV9002: Modifying LOG_RECORDS.USER length.");

-			Statement s = c.createStatement();

-			s.execute("ALTER TABLE LOG_RECORDS MODIFY COLUMN FEED_FILEID VARCHAR(256), MODIFY COLUMN DELIVERY_FILEID VARCHAR(256)");

-			s.close();

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	/**

-	 * Retrofit 8 - Adjust FEEDS.NAME to be 255 chars (MR #74).

-	 * @return true if the retrofit worked, false otherwise

-	 */

-	@SuppressWarnings("resource")

-	private boolean retroFit8() {

-		Connection c = null;

-		try {

-			c = getConnection();

-			intlogger.info("PROV9002: Modifying FEEDS.NAME length.");

-			Statement s = c.createStatement();

-			s.execute("ALTER TABLE FEEDS MODIFY COLUMN NAME VARCHAR(255)");

-			s.close();

-		} catch (SQLException e) {

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-	

-	/**

-	 * Retrofit 9 - Add column FEEDS.CREATED_DATE and SUBSCRIPTIONS.CREATED_DATE, 1610 release user story US674199.

-	 * @return true if the retrofit worked, false otherwise

-	 */

-

-	@SuppressWarnings("resource")		

-	private boolean retroFit9() {		

-		Connection c = null;		

-		try {		

-			c = getConnection();		

-			// Add CREATED_DATE column to FEEDS/SUBSCRIPTIONS tables

-			DatabaseMetaData md = c.getMetaData();		

-			for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {		

-				boolean add_col = true;		

-				ResultSet rs = md.getColumns("datarouter", "", tbl, "CREATED_DATE");		

-				if (rs != null) {		

-					add_col = !rs.next();		

-					rs.close();		

-					rs = null;		

-				}		

-				if (add_col) {		

-					intlogger.info("PROV9002: Adding CREATED_DATE column to "+tbl+" table.");		

-					Statement s = c.createStatement();

-					s.execute("ALTER TABLE "+tbl+" ADD COLUMN CREATED_DATE timestamp DEFAULT CURRENT_TIMESTAMP");		

-					s.close();		

-				}		

-			}						

-		} catch (SQLException e) {		

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());		

-			return false;		

-		} finally {		

-			if (c != null)		

-				release(c);		

-		}		

-		return true;		

-	}

-

-	/**

-	 * Retrofit 10 -Adding business BUSINESS_DESCRIPTION to FEEDS table (Rally

-	 * US708102).

-	 * 

-	 * @return true if the retrofit worked, false otherwise

-	 */

-

-	@SuppressWarnings("resource")

-	private boolean retroFit10() {

-		Connection c = null;

-		boolean addColumn = true;

-		

-		try {

-

-			c = getConnection();		

-			// Add BUSINESS_DESCRIPTION column to FEEDS table

-			DatabaseMetaData md = c.getMetaData();		

-				boolean add_col = true;		

-				ResultSet rs = md.getColumns("datarouter", "", "FEEDS", "BUSINESS_DESCRIPTION");		

-				if (rs != null) {		

-					add_col = !rs.next();		

-					rs.close();		

-					rs = null;		

-				}	

-		if(add_col) {

-			intlogger

-					.info("PROV9002: Adding BUSINESS_DESCRIPTION column to FEEDS table.");

-			Statement s = c.createStatement();

-			s.execute("ALTER TABLE FEEDS ADD COLUMN BUSINESS_DESCRIPTION varchar(1000) DEFAULT NULL AFTER DESCRIPTION, MODIFY COLUMN DESCRIPTION VARCHAR(1000)");

-			s.close();

-			}

-		}

-		catch (SQLException e) {

-			intlogger

-					.fatal("PROV9000: The database credentials are not working: "

-							+ e.getMessage());

-			return false;

-		} finally {

-			if (c != null)

-				release(c);

-		}

-		return true;

-	}

-

-

-	/*New retroFit method is added for groups feature Rally:US708115 - 1610	

-	* @retroFit11()

-	* @parmas: none

-	* @return - boolean if table and fields are created (Group table, group id in FEEDS, SUBSCRIPTION TABLES)

-	*/

-	@SuppressWarnings("resource")	

-	private boolean retroFit11() {		

-		final String[] expected_tables = {		

-			"GROUPS"		

-		};		

-		Connection c = null;		

-			

-		try {		

-			// If expected tables are not present, then add new routing tables		

-			c = getConnection();		

-			Set<String> tables = getTableSet(c);		

-			boolean initialize = false;		

-			for (String s : expected_tables) {		

-				initialize |= !tables.contains(s);		

-			}		

-			if (initialize) {		

-				intlogger.info("PROV9002: Adding GROUPS table for Release 1610.");		

-				runInitScript(c, 4);		// script 4 creates the routing tables		

-			}		

-					

-			// Add GROUPID column to FEEDS/SUBSCRIPTIONS		

-			DatabaseMetaData md = c.getMetaData();		

-			for (String tbl : new String[] {"FEEDS", "SUBSCRIPTIONS" }) {		

-				boolean add_col = true;		

-				ResultSet rs = md.getColumns("datarouter", "", tbl, "GROUPID");		

-				if (rs != null) {		

-					add_col = !rs.next();		

-					rs.close();		

-					rs = null;		

-				}		

-				if (add_col) {		

-					intlogger.info("PROV9002: Adding GROUPID column to "+tbl+" table.");		

-					Statement s = c.createStatement();		

-					s.execute("ALTER TABLE "+tbl+" ADD COLUMN GROUPID INT(10) UNSIGNED NOT NULL DEFAULT 0 AFTER FEEDID");		

-					s.close();		

-				}		

-			}						

-		} catch (SQLException e) {		

-			intlogger.fatal("PROV9000: The database credentials are not working: "+e.getMessage());		

-			return false;		

-		} finally {		

-			if (c != null)		

-				release(c);		

-		}		

-		return true;		

-	}

-

-

-	/**

-	 * Copy the log table <i>table_name</i> to LOG_RECORDS;

-	 * @param table_name the name of the old (1.0.*) table to copy

-	 * @param table_class the class used to instantiate a record from the table

-	 * @throws SQLException if there is a problem getting a MySQL connection

-	 */

-	@SuppressWarnings("resource")

-	private void copyLogTable(String table_name, Class<? extends Loadable> table_class) throws SQLException {

-		long start = System.currentTimeMillis();

-		int n = 0;

-		Connection c1 = getConnection();

-		Connection c2 = getConnection();

-

-		try {

-			Constructor<? extends Loadable> cnst = table_class.getConstructor(ResultSet.class);

-			PreparedStatement ps = c2.prepareStatement(LogfileLoader.INSERT_SQL);

-			Statement stmt = c1.createStatement();

-			ResultSet rs = stmt.executeQuery("select * from "+table_name);

-			while (rs.next()) {

-				Loadable rec = cnst.newInstance(rs);

-				rec.load(ps);

-				ps.setLong(18, ++nextid);

-				ps.executeUpdate();

-				if ((++n % 10000) == 0)

-					intlogger.debug("  "+n+" records done.");

-			}

-			stmt.close();

-			ps.close();

-		} catch (SQLException e) {

-			e.printStackTrace();

-		} catch (NoSuchMethodException e) {

-			e.printStackTrace();

-		} catch (SecurityException e) {

-			e.printStackTrace();

-		} catch (InstantiationException e) {

-			e.printStackTrace();

-		} catch (IllegalAccessException e) {

-			e.printStackTrace();

-		} catch (IllegalArgumentException e) {

-			e.printStackTrace();

-		} catch (InvocationTargetException e) {

-			e.printStackTrace();

-		}

-

-		release(c1);

-		release(c2);

-		long x = (System.currentTimeMillis() - start);

-		intlogger.debug("  "+n+" records done in "+x+" ms.");

-	}

 

 	/**

 	 * Get a set of all table names in the DB.

-	 * @param c a DB connection

+	 * @param connection a DB connection

 	 * @return the set of table names

 	 */

-	private Set<String> getTableSet(Connection c) {

+	private Set<String> getTableSet(Connection connection) {

 		Set<String> tables = new HashSet<String>();

 		try {

-			DatabaseMetaData md = c.getMetaData();

+			DatabaseMetaData md = connection.getMetaData();

 			ResultSet rs = md.getTables("datarouter", "", "", null);

 			if (rs != null) {

 				while (rs.next()) {

@@ -672,19 +195,19 @@
 	/**

 	 * Initialize the tables by running the initialization scripts located in the directory specified

 	 * by the property <i>org.onap.dmaap.datarouter.provserver.dbscripts</i>.  Scripts have names of

-	 * the form mysql_init_NNNN.

-	 * @param c a DB connection

-	 * @param n the number of the mysql_init_NNNN script to run

+	 * the form sql_init_NN.sql

+	 * @param connection a DB connection

+	 * @param scriptId the number of the sql_init_NN.sql script to run

 	 */

-	private void runInitScript(Connection c, int n) {

-		String scriptdir = (String) props.get("org.onap.dmaap.datarouter.provserver.dbscripts");

+	private void runInitScript(Connection connection, int scriptId) {

+		String scriptDir = (String) props.get("org.onap.dmaap.datarouter.provserver.dbscripts");

 		StringBuilder sb = new StringBuilder();

 		try {

-			String scriptfile = String.format("%s/mysql_init_%04d", scriptdir, n);

-			if (!(new File(scriptfile)).exists())

+			String scriptFile = String.format("%s/sql_init_%02d.sql", scriptDir, scriptId);

+			if (!(new File(scriptFile)).exists())

 				return;

 

-			LineNumberReader in = new LineNumberReader(new FileReader(scriptfile));

+			LineNumberReader in = new LineNumberReader(new FileReader(scriptFile));

 			String line;

 			while ((line = in.readLine()) != null) {

 				if (!line.startsWith("--")) {

@@ -694,7 +217,7 @@
 						// Execute one DDL statement

 						String sql = sb.toString();

 						sb.setLength(0);

-						Statement s = c.createStatement();

+						Statement s = connection.createStatement();

 						s.execute(sql);

 						s.close();

 					}

diff --git a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
index 830e21c..45b8788 100644
--- a/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
+++ b/datarouter-prov/src/main/java/org/onap/dmaap/datarouter/provisioning/utils/LogfileLoader.java
@@ -59,7 +59,7 @@
 

 /**

  * This class provides methods that run in a separate thread, in order to process logfiles uploaded into the spooldir.

- * These logfiles are loaded into the MySQL LOG_RECORDS table. In a running provisioning server, there should only be

+ * These logfiles are loaded into the MariaDB LOG_RECORDS table. In a running provisioning server, there should only be

  * two places where records can be loaded into this table; here, and in the method DB.retroFit4() which may be run at

  * startup to load the old (1.0) style log tables into LOG_RECORDS;

  * <p>This method maintains an {@link RLEBitSet} which can be used to easily see what records are presently in the

@@ -148,7 +148,7 @@
 	}

 	/**

 	 * Run continuously to look for new logfiles in the spool directory and import them into the DB.

-	 * The spool is checked once per second.  If free space on the MySQL filesystem falls below

+	 * The spool is checked once per second.  If free space on the MariaDB filesystem falls below

 	 * REQUIRED_FREE_PCT (normally 20%) then the oldest logfile entries are removed and the LOG_RECORDS

 	 * table is compacted until free space rises above the threshold.

 	 */

@@ -361,72 +361,7 @@
 			db.release(conn);

 		}

 	}

-// OLD CODE - commented here for historical purposes

-//

-//	private boolean pruneRecordsOldAlgorithm() {

-//		// Determine space available -- available space must be at least 20% under /opt/app/mysql

-//		int pct = getFreePercentage();

-//		boolean did1 = false;

-//		while (pct < REQUIRED_FREE_PCT) {

-//			logger.info("PROV8008: Free space is " + pct + "% - removing old log entries");

-//			boolean didit = removeOldestEntries();

-//			pct = didit ? getFreePercentage() : 100; // don't loop endlessly

-//			did1 |= didit;

-//		}

-//		return did1;

-//	}

-//	private int getFreePercentage() {

-//		FileSystem fs = (Paths.get("/opt/app/mysql")).getFileSystem();

-//		long total = 0;

-//		long avail = 0;

-//		try {

-//			for (FileStore store : fs.getFileStores()) {

-//				total += store.getTotalSpace();

-//				avail += store.getUsableSpace();

-//			}

-//		} catch (IOException e) {

-//		}

-//		try { fs.close(); } catch (Exception e) { }

-//		return (int)((avail * 100) / total);

-//	}

-//	private boolean removeOldestEntries() {

-//		// Remove the last days worth of entries

-//		Connection conn = null;

-//		try {

-//			conn = db.getConnection();

-//			Statement stmt = conn.createStatement();

-//			ResultSet rs = stmt.executeQuery("select min(event_time) as MIN from LOG_RECORDS");

-//			if (rs != null) {

-//				if (rs.next()) {

-//					// Compute the end of the first day of logs

-//					long first = rs.getLong("MIN");

-//					Calendar cal = new GregorianCalendar();

-//					cal.setTime(new Date(first));

-//					cal.add(Calendar.DAY_OF_YEAR, 1);

-//					cal.set(Calendar.HOUR_OF_DAY, 0);

-//					cal.set(Calendar.MINUTE, 0);

-//					cal.set(Calendar.SECOND, 0);

-//					cal.set(Calendar.MILLISECOND, 0);

-//					if (!stmt.execute("delete from LOG_RECORDS where event_time < " + cal.getTimeInMillis())) {

-//						int count = stmt.getUpdateCount();

-//						logger.info("PROV0009: Removed "+count+" old log entries.");

-//						stmt.execute("OPTIMIZE TABLE LOG_RECORDS");

-//					}

-//					rs.close();

-//					stmt.close();

-//					return true;

-//				}

-//				rs.close();

-//			}

-//			stmt.close();

-//		} catch (SQLException e) {

-//			System.err.println(e);

-//			e.printStackTrace();

-//		} finally {

-//			db.release(conn);

-//		}

-//		return false;

-//	}

+

 	@SuppressWarnings("resource")

 	private int[] process(File f) {

 		int ok = 0, total = 0;

diff --git a/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql b/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql
deleted file mode 100644
index 64a0762..0000000
--- a/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql
+++ /dev/null
@@ -1,143 +0,0 @@
-CREATE DATABASE IF NOT EXISTS datarouter;
-
-CREATE USER 'datarouter'@'%' IDENTIFIED BY 'datarouter';
-
-GRANT ALL PRIVILEGES ON * . * TO 'datarouter'@'%';
-
-use datarouter;
-
-CREATE TABLE FEEDS (
-    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
-    NAME           VARCHAR(20) NOT NULL,
-    VERSION        VARCHAR(20) NOT NULL,
-    DESCRIPTION    VARCHAR(256),
-    AUTH_CLASS     VARCHAR(32) NOT NULL,
-    PUBLISHER      VARCHAR(8) NOT NULL,
-    SELF_LINK      VARCHAR(256),
-    PUBLISH_LINK   VARCHAR(256),
-    SUBSCRIBE_LINK VARCHAR(256),
-    LOG_LINK       VARCHAR(256),
-    DELETED        BOOLEAN DEFAULT FALSE,
-    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE FEED_ENDPOINT_IDS (
-    FEEDID        INT UNSIGNED NOT NULL,
-    USERID        VARCHAR(20) NOT NULL,
-    PASSWORD      VARCHAR(32) NOT NULL
-);
-
-CREATE TABLE FEED_ENDPOINT_ADDRS (
-    FEEDID        INT UNSIGNED NOT NULL,
-    ADDR          VARCHAR(44) NOT NULL
-);
-
-CREATE TABLE SUBSCRIPTIONS (
-    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
-    FEEDID             INT UNSIGNED NOT NULL,
-    DELIVERY_URL       VARCHAR(256),
-    DELIVERY_USER      VARCHAR(20),
-    DELIVERY_PASSWORD  VARCHAR(32),
-    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
-    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
-    SUBSCRIBER         VARCHAR(8) NOT NULL,
-    SELF_LINK          VARCHAR(256),
-    LOG_LINK           VARCHAR(256),
-    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE PARAMETERS (
-    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
-    VALUE          VARCHAR(4096) NOT NULL
-);
-
-CREATE TABLE LOG_RECORDS (
-    TYPE	   ENUM('pub', 'del', 'exp') NOT NULL,
-    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
-    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
-    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
-    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
-    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
-    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
-    CONTENT_LENGTH BIGINT UNSIGNED NOT NULL,  /* content length of published file */
-
-    FEED_FILEID    VARCHAR(128),		/* file ID of published file */
-    REMOTE_ADDR    VARCHAR(40),			/* IP address of publishing endpoint */
-    USER           VARCHAR(20),			/* user name of publishing endpoint */
-    STATUS         SMALLINT,			/* status code returned to delivering agent */
-
-    DELIVERY_SUBID INT UNSIGNED,		/* pointer to subscription in SUBSCRIPTIONS */
-    DELIVERY_FILEID  VARCHAR(128),		/* file ID of file being delivered */
-    RESULT         SMALLINT,			/* result received from subscribing agent */
-
-    ATTEMPTS       INT,				/* deliveries attempted */
-    REASON         ENUM('notRetryable', 'retriesExhausted'),
-
-    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
-
-    INDEX (FEEDID) USING BTREE,
-    INDEX (DELIVERY_SUBID) USING BTREE,
-    INDEX (RECORD_ID) USING BTREE
-) ENGINE = MyISAM;
-
-CREATE TABLE INGRESS_ROUTES (
-    SEQUENCE  INT UNSIGNED NOT NULL,
-    FEEDID    INT UNSIGNED NOT NULL,
-    USERID    VARCHAR(20),
-    SUBNET    VARCHAR(44),
-    NODESET   INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE EGRESS_ROUTES (
-    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
-    NODEID   INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NETWORK_ROUTES (
-    FROMNODE INT UNSIGNED NOT NULL,
-    TONODE   INT UNSIGNED NOT NULL,
-    VIANODE  INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODESETS (
-    SETID   INT UNSIGNED NOT NULL,
-    NODEID  INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODES (
-    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
-    NAME    VARCHAR(255) NOT NULL,
-    ACTIVE  BOOLEAN DEFAULT TRUE
-);
-
-CREATE TABLE GROUPS (
-    GROUPID  INT UNSIGNED NOT NULL PRIMARY KEY,
-    AUTHID    VARCHAR(100) NOT NULL,
-    NAME    VARCHAR(50) NOT NULL,
-    DESCRIPTION    VARCHAR(255),
-    CLASSIFICATION    VARCHAR(20) NOT NULL,
-    MEMBERS    TINYTEXT,
-    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
--- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
-INSERT INTO PARAMETERS VALUES
-	('ACTIVE_POD',  'prov.datarouternew.com'),
-	('PROV_ACTIVE_NAME',  'prov.datarouternew.com'),
-	('STANDBY_POD', ''),
-	('PROV_NAME',   'prov.datarouternew.com'),
-	('NODES',       'node.datarouternew.com'),
-	('PROV_DOMAIN', 'datarouternew.com'),
-	('DELIVERY_INIT_RETRY_INTERVAL', '10'),
-	('DELIVERY_MAX_AGE', '86400'),
-	('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
-	('DELIVERY_RETRY_RATIO', '2'),
-	('LOGROLL_INTERVAL', '300'),
-	('PROV_AUTH_ADDRESSES', 'prov.datarouternew.com'), 
-	('PROV_AUTH_SUBJECTS', ''),
-	('PROV_MAXFEED_COUNT',	'10000'),
-	('PROV_MAXSUB_COUNT',	'100000'),
-	('PROV_REQUIRE_CERT', 'false'),
-	('PROV_REQUIRE_SECURE', 'false'),
-	('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
-	;
\ No newline at end of file
diff --git a/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml b/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml
index 1c33e3b..eece155 100644
--- a/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml
+++ b/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml
@@ -21,49 +21,77 @@
 # *

 #-------------------------------------------------------------------------------

 version: '2.1'

-services: 

+services:

   datarouter-prov:

-    image: onap/dmaap/datarouter-prov

+    image: nexus3.onap.org:10003/onap/dmaap/datarouter-prov

     container_name: datarouter-prov

     hostname: prov.datarouternew.com

     ports:

      - "8443:8443"

      - "8080:8080"  

-#    volumes:

-#     - ./prov_data/proserver.properties:/opt/app/datartr/etc/proserver.properties

+    volumes:

+     - ./prov_data/proserver.properties:/opt/app/datartr/etc/proserver.properties

 #     - ./prov_data/datarouter-prov-jar-with-dependencies.jar:/opt/app/datartr/lib/datarouter-prov-jar-with-dependencies.jar

-#      - ./prov_data/addSubscriber.txt:/opt/app/datartr/addSubscriber.txt

-#      - ./prov_data/addFeed3.txt:/opt/app/datartr/addFeed3.txt

-    entrypoint: ["bash", "-c", "sleep 10; /bin/sh -c ./startup.sh"]

+     - ./prov_data/addSubscriber.txt:/opt/app/datartr/addSubscriber.txt

+     - ./prov_data/addFeed3.txt:/opt/app/datartr/addFeed3.txt

+    entrypoint: ["bash", "-c", "./startup.sh"]

     depends_on:

-      mysql_container:

+      mariadb_container:

         condition: service_healthy

+    healthcheck:

+      test: ["CMD", "curl", "-f", "http://prov.datarouternew.com:8080/internal/prov"]

+      interval: 10s

+      timeout: 10s

+      retries: 5

     extra_hosts:

-      - "node.datarouternew.com:172.18.0.4"

-    

+      - "node.datarouternew.com:172.100.0.4"

+    networks:

+      testing_net:

+        ipv4_address: 172.100.0.3

+

   datarouter-node:

-    image: onap/dmaap/datarouter-node

+    image: nexus3.onap.org:10003/onap/dmaap/datarouter-node

     container_name: datarouter-node

     hostname: node.datarouternew.com

     ports:

      - "9443:8443"

      - "9090:8080"

-#    volumes:

-#     - ./node_data/node.properties:/opt/app/datartr/etc/node.properties

-    entrypoint: ["bash", "-c", "sleep 15; /bin/sh -c ./startup.sh"]    

+    volumes:

+     - ./node_data/node.properties:/opt/app/datartr/etc/node.properties

+    entrypoint: ["bash", "-c", "./startup.sh"]

     depends_on:

-      - datarouter-prov

+      datarouter-prov:

+        condition: service_healthy

     extra_hosts:

-      - "prov.datarouternew.com:172.18.0.3"

+      - "prov.datarouternew.com:172.100.0.3"

+    networks:

+      testing_net:

+        ipv4_address: 172.100.0.4

       

-  mysql_container:

-    image: mysql/mysql-server:5.6

-    container_name: mysql

+  mariadb_container:

+    image: mariadb:10.2.14

+    container_name: mariadb

     ports:

       - "3306:3306"

     environment:

-      MYSQL_ROOT_PASSWORD: att2017

-    volumes:

-      - ./database:/tmp/database

-      - ./database:/docker-entrypoint-initdb.d

-    

+      MYSQL_ROOT_PASSWORD: datarouter

+      MYSQL_DATABASE: datarouter

+      MYSQL_USER: datarouter

+      MYSQL_PASSWORD: datarouter

+    healthcheck:

+      test: ["CMD", "mysqladmin" ,"ping", "-h", "localhost"]

+      interval: 10s

+      timeout: 10s

+      retries: 5

+

+    networks:

+      testing_net:

+        ipv4_address: 172.100.0.2

+

+networks:

+  testing_net:

+    driver: bridge

+    ipam:

+      driver: default

+      config:

+        - subnet: 172.100.0.0/16

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt
index a21c7ae..03eabb7 100644
--- a/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt
@@ -31,7 +31,7 @@
      "authorization": {

           "classification": "unclassified",

           "endpoint_addrs": [

-               "172.18.0.3",

+               "172.100.0.3",

 			],

           "endpoint_ids": [

                {

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt
index e974631..15ca309 100644
--- a/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt
@@ -24,9 +24,9 @@
                 "delivery" :	

                				

                 { 

-                                "url" : "http://172.18.0.3:7070/", 

-                                "user" : "LOGIN", 

-                                "password" : "PASSWORD", 

+                                "url" : "http://172.100.0.3:7070/",

+                                "user" : "datarouter",

+                                "password" : "datarouter",

                                 "use100" : true 

                 },

                 "metadataOnly" : false, 

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties b/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties
index b722a64..f37330a 100644
--- a/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties
@@ -38,11 +38,11 @@
 org.onap.dmaap.datarouter.provserver.truststore.password = changeit

 org.onap.dmaap.datarouter.provserver.accesslog.dir       = /opt/app/datartr/logs

 org.onap.dmaap.datarouter.provserver.spooldir            = /opt/app/datartr/spool

-#org.onap.dmaap.datarouter.provserver.dbscripts          = /home/eby/dr2/cvs/datarouter/prov/misc/

+org.onap.dmaap.datarouter.provserver.dbscripts           = /opt/app/datartr/etc/misc

 org.onap.dmaap.datarouter.provserver.logretention        = 30

 

 # Database access

-org.onap.dmaap.datarouter.db.driver   = com.mysql.jdbc.Driver

-org.onap.dmaap.datarouter.db.url      = jdbc:mysql://172.18.0.2:3306/datarouter

+org.onap.dmaap.datarouter.db.driver   = org.mariadb.jdbc.Driver

+org.onap.dmaap.datarouter.db.url      = jdbc:mariadb://172.100.0.2:3306/datarouter

 org.onap.dmaap.datarouter.db.login    = datarouter

 org.onap.dmaap.datarouter.db.password = datarouter

diff --git a/datarouter-prov/src/main/resources/misc/drtrprov b/datarouter-prov/src/main/resources/misc/drtrprov
index 2afb6ec..9f86c23 100644
--- a/datarouter-prov/src/main/resources/misc/drtrprov
+++ b/datarouter-prov/src/main/resources/misc/drtrprov
@@ -56,7 +56,7 @@
 # 	fi
 	if [ "`pgrep -u mysql mysqld`" = "" ]
 	then
-		echo MySQL is not running.  It must be started before drtrprov
+		echo MariaDB is not running.  It must be started before drtrprov
 		exit 0
 	fi
 	PIDS=`pids`
diff --git a/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql b/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql
deleted file mode 100644
index 837030c..0000000
--- a/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql
+++ /dev/null
@@ -1,139 +0,0 @@
-create database datarouter;
-
-use datarouter;
-
-CREATE TABLE FEEDS (
-    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
-    NAME           VARCHAR(20) NOT NULL,
-    VERSION        VARCHAR(20) NOT NULL,
-    DESCRIPTION    VARCHAR(256),
-    AUTH_CLASS     VARCHAR(32) NOT NULL,
-    PUBLISHER      VARCHAR(8) NOT NULL,
-    SELF_LINK      VARCHAR(256),
-    PUBLISH_LINK   VARCHAR(256),
-    SUBSCRIBE_LINK VARCHAR(256),
-    LOG_LINK       VARCHAR(256),
-    DELETED        BOOLEAN DEFAULT FALSE,
-    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE FEED_ENDPOINT_IDS (
-    FEEDID        INT UNSIGNED NOT NULL,
-    USERID        VARCHAR(20) NOT NULL,
-    PASSWORD      VARCHAR(32) NOT NULL
-);
-
-CREATE TABLE FEED_ENDPOINT_ADDRS (
-    FEEDID        INT UNSIGNED NOT NULL,
-    ADDR          VARCHAR(44) NOT NULL
-);
-
-CREATE TABLE SUBSCRIPTIONS (
-    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
-    FEEDID             INT UNSIGNED NOT NULL,
-    DELIVERY_URL       VARCHAR(256),
-    DELIVERY_USER      VARCHAR(20),
-    DELIVERY_PASSWORD  VARCHAR(32),
-    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
-    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
-    SUBSCRIBER         VARCHAR(8) NOT NULL,
-    SELF_LINK          VARCHAR(256),
-    LOG_LINK           VARCHAR(256),
-    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
-CREATE TABLE PARAMETERS (
-    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
-    VALUE          VARCHAR(4096) NOT NULL
-);
-
-CREATE TABLE LOG_RECORDS (
-    TYPE	   ENUM('pub', 'del', 'exp') NOT NULL,
-    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
-    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
-    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
-    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
-    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
-    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
-    CONTENT_LENGTH BIGINT UNSIGNED NOT NULL,  /* content length of published file */
-
-    FEED_FILEID    VARCHAR(128),		/* file ID of published file */
-    REMOTE_ADDR    VARCHAR(40),			/* IP address of publishing endpoint */
-    USER           VARCHAR(20),			/* user name of publishing endpoint */
-    STATUS         SMALLINT,			/* status code returned to delivering agent */
-
-    DELIVERY_SUBID INT UNSIGNED,		/* pointer to subscription in SUBSCRIPTIONS */
-    DELIVERY_FILEID  VARCHAR(128),		/* file ID of file being delivered */
-    RESULT         SMALLINT,			/* result received from subscribing agent */
-
-    ATTEMPTS       INT,				/* deliveries attempted */
-    REASON         ENUM('notRetryable', 'retriesExhausted'),
-
-    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
-
-    INDEX (FEEDID) USING BTREE,
-    INDEX (DELIVERY_SUBID) USING BTREE,
-    INDEX (RECORD_ID) USING BTREE
-) ENGINE = MyISAM;
-
-CREATE TABLE INGRESS_ROUTES (
-    SEQUENCE  INT UNSIGNED NOT NULL,
-    FEEDID    INT UNSIGNED NOT NULL,
-    USERID    VARCHAR(20),
-    SUBNET    VARCHAR(44),
-    NODESET   INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE EGRESS_ROUTES (
-    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
-    NODEID   INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NETWORK_ROUTES (
-    FROMNODE INT UNSIGNED NOT NULL,
-    TONODE   INT UNSIGNED NOT NULL,
-    VIANODE  INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODESETS (
-    SETID   INT UNSIGNED NOT NULL,
-    NODEID  INT UNSIGNED NOT NULL
-);
-
-CREATE TABLE NODES (
-    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
-    NAME    VARCHAR(255) NOT NULL,
-    ACTIVE  BOOLEAN DEFAULT TRUE
-);
-
-CREATE TABLE GROUPS (
-    GROUPID  INT UNSIGNED NOT NULL PRIMARY KEY,
-    AUTHID    VARCHAR(100) NOT NULL,
-    NAME    VARCHAR(50) NOT NULL,
-    DESCRIPTION    VARCHAR(255),
-    CLASSIFICATION    VARCHAR(20) NOT NULL,
-    MEMBERS    TINYTEXT,
-    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
-);
-
--- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
-INSERT INTO PARAMETERS VALUES
-	('ACTIVE_POD',  '127.0.0.1'),
-	('PROV_ACTIVE_NAME',  '${PROV_ACTIVE_NAME}'),
-	('STANDBY_POD', '${DRTR_PROV_STANDBYPOD}'),
-	('PROV_NAME',   'ALCDTL47TJ6015:6080'),
-	('NODES',       '127.0.0.1:8080'),
-	('PROV_DOMAIN', '127.0.0.1'),
-	('DELIVERY_INIT_RETRY_INTERVAL', '10'),
-	('DELIVERY_MAX_AGE', '86400'),
-	('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
-	('DELIVERY_RETRY_RATIO', '2'),
-	('LOGROLL_INTERVAL', '300'),
-	('PROV_AUTH_ADDRESSES', '192.168.56.1'), 
-	('PROV_AUTH_SUBJECTS', ''),
-	('PROV_MAXFEED_COUNT',	'10000'),
-	('PROV_MAXSUB_COUNT',	'100000'),
-	('PROV_REQUIRE_CERT', 'false'),
-	('PROV_REQUIRE_SECURE', 'false'),
-	('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
-	;
\ No newline at end of file
diff --git a/datarouter-prov/src/main/resources/misc/notes b/datarouter-prov/src/main/resources/misc/notes
index e3f872e..4888dc2 100644
--- a/datarouter-prov/src/main/resources/misc/notes
+++ b/datarouter-prov/src/main/resources/misc/notes
@@ -71,8 +71,8 @@
 	file is specified.
 
 DRTR_PROV_DBLOGIN (default datarouter)
-	The login used to access MySQL
+	The login used to access MariaDB
 DRTR_PROV_DBPASS (default datarouter)
-	The password used to access MySQL
+	The password used to access MariaDB
 DRTR_PROV_DBSCRIPTS (default /opt/app/datartr/etc)
 	The directory containing DB initialization scripts
diff --git a/datarouter-prov/src/main/resources/misc/runreports b/datarouter-prov/src/main/resources/misc/runreports
index 170d6ef..f6037f4 100644
--- a/datarouter-prov/src/main/resources/misc/runreports
+++ b/datarouter-prov/src/main/resources/misc/runreports
@@ -47,7 +47,7 @@
 fi
 if [ "`pgrep -u mysql mysqld`" = "" ]
 then
-	echo MySQL is not running.  It must be started before runreports
+	echo MariaDB is not running.  It must be started before runreports
 	exit 1
 fi
 
diff --git a/datarouter-prov/src/main/resources/misc/sql_init_01.sql b/datarouter-prov/src/main/resources/misc/sql_init_01.sql
new file mode 100644
index 0000000..e1dfd0c
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/sql_init_01.sql
@@ -0,0 +1,148 @@
+use datarouter;
+
+CREATE TABLE FEEDS (
+    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
+    GROUPID        INT(10) UNSIGNED NOT NULL DEFAULT 0,
+    NAME           VARCHAR(255) NOT NULL,
+    VERSION        VARCHAR(20) NOT NULL,
+    DESCRIPTION    VARCHAR(1000),
+    BUSINESS_DESCRIPTION VARCHAR(1000) DEFAULT NULL,
+    AUTH_CLASS     VARCHAR(32) NOT NULL,
+    PUBLISHER      VARCHAR(8) NOT NULL,
+    SELF_LINK      VARCHAR(256),
+    PUBLISH_LINK   VARCHAR(256),
+    SUBSCRIBE_LINK VARCHAR(256),
+    LOG_LINK       VARCHAR(256),
+    DELETED        BOOLEAN DEFAULT FALSE,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+    SUSPENDED      BOOLEAN DEFAULT FALSE,
+    CREATED_DATE   TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE FEED_ENDPOINT_IDS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    USERID        VARCHAR(20) NOT NULL,
+    PASSWORD      VARCHAR(32) NOT NULL
+);
+
+CREATE TABLE FEED_ENDPOINT_ADDRS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    ADDR          VARCHAR(44) NOT NULL
+);
+
+CREATE TABLE SUBSCRIPTIONS (
+    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
+    FEEDID             INT UNSIGNED NOT NULL,
+    GROUPID            INT(10) UNSIGNED NOT NULL DEFAULT 0,
+    DELIVERY_URL       VARCHAR(256),
+    DELIVERY_USER      VARCHAR(20),
+    DELIVERY_PASSWORD  VARCHAR(32),
+    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
+    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
+    SUBSCRIBER         VARCHAR(8) NOT NULL,
+    SELF_LINK          VARCHAR(256),
+    LOG_LINK           VARCHAR(256),
+    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+    SUSPENDED          BOOLEAN DEFAULT FALSE,
+    CREATED_DATE       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+
+);
+
+CREATE TABLE PARAMETERS (
+    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
+    VALUE          VARCHAR(4096) NOT NULL
+);
+
+CREATE TABLE LOG_RECORDS (
+    TYPE           ENUM('pub', 'del', 'exp', 'pbf', 'dlx') NOT NULL,
+    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
+    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
+    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
+    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
+    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
+    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
+    CONTENT_LENGTH BIGINT NOT NULL,  /* content length of published file */
+
+    FEED_FILEID    VARCHAR(256),        /* file ID of published file */
+    REMOTE_ADDR    VARCHAR(40),         /* IP address of publishing endpoint */
+    USER           VARCHAR(50),         /* user name of publishing endpoint */
+    STATUS         SMALLINT,            /* status code returned to delivering agent */
+
+    DELIVERY_SUBID INT UNSIGNED,        /* pointer to subscription in SUBSCRIPTIONS */
+    DELIVERY_FILEID  VARCHAR(256),      /* file ID of file being delivered */
+    RESULT         SMALLINT,            /* result received from subscribing agent */
+
+    ATTEMPTS       INT,             /* deliveries attempted */
+    REASON         ENUM('notRetryable', 'retriesExhausted', 'diskFull', 'other'),
+
+    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
+    CONTENT_LENGTH_2 BIGINT,
+
+    INDEX (FEEDID) USING BTREE,
+    INDEX (DELIVERY_SUBID) USING BTREE,
+    INDEX (RECORD_ID) USING BTREE
+) ENGINE = MyISAM;
+
+CREATE TABLE INGRESS_ROUTES (
+    SEQUENCE  INT UNSIGNED NOT NULL,
+    FEEDID    INT UNSIGNED NOT NULL,
+    USERID    VARCHAR(20),
+    SUBNET    VARCHAR(44),
+    NODESET   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE EGRESS_ROUTES (
+    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
+    NODEID   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NETWORK_ROUTES (
+    FROMNODE INT UNSIGNED NOT NULL,
+    TONODE   INT UNSIGNED NOT NULL,
+    VIANODE  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODESETS (
+    SETID   INT UNSIGNED NOT NULL,
+    NODEID  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODES (
+    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME    VARCHAR(255) NOT NULL,
+    ACTIVE  BOOLEAN DEFAULT TRUE
+);
+
+CREATE TABLE GROUPS (
+    GROUPID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    AUTHID    VARCHAR(100) NOT NULL,
+    NAME    VARCHAR(50) NOT NULL,
+    DESCRIPTION    VARCHAR(255),
+    CLASSIFICATION    VARCHAR(20) NOT NULL,
+    MEMBERS    TINYTEXT,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+INSERT INTO PARAMETERS VALUES
+    ('ACTIVE_POD',  'prov.datarouternew.com'),
+    ('PROV_ACTIVE_NAME',  'prov.datarouternew.com'),
+    ('STANDBY_POD', ''),
+    ('PROV_NAME',   'prov.datarouternew.com'),
+    ('NODES',       '172.100.0.1|node.datarouternew.com'),
+    ('PROV_DOMAIN', 'datarouternew.com'),
+    ('DELIVERY_INIT_RETRY_INTERVAL', '10'),
+    ('DELIVERY_MAX_AGE', '86400'),
+    ('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
+    ('DELIVERY_RETRY_RATIO', '2'),
+    ('LOGROLL_INTERVAL', '300'),
+    ('PROV_AUTH_ADDRESSES', '172.100.0.1|prov.datarouternew.com|node.datarouternew.com'),
+    ('PROV_AUTH_SUBJECTS', ''),
+    ('PROV_MAXFEED_COUNT',  '10000'),
+    ('PROV_MAXSUB_COUNT',   '100000'),
+    ('PROV_REQUIRE_CERT', 'false'),
+    ('PROV_REQUIRE_SECURE', 'false'),
+    ('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
+    ;
+
+INSERT INTO FEED_ENDPOINT_ADDRS VALUES
+    (1,  '172.100.0.1');
\ No newline at end of file
diff --git a/datarouter-prov/src/main/resources/provserver.properties b/datarouter-prov/src/main/resources/provserver.properties
index b722a64..f37330a 100644
--- a/datarouter-prov/src/main/resources/provserver.properties
+++ b/datarouter-prov/src/main/resources/provserver.properties
@@ -38,11 +38,11 @@
 org.onap.dmaap.datarouter.provserver.truststore.password = changeit

 org.onap.dmaap.datarouter.provserver.accesslog.dir       = /opt/app/datartr/logs

 org.onap.dmaap.datarouter.provserver.spooldir            = /opt/app/datartr/spool

-#org.onap.dmaap.datarouter.provserver.dbscripts          = /home/eby/dr2/cvs/datarouter/prov/misc/

+org.onap.dmaap.datarouter.provserver.dbscripts           = /opt/app/datartr/etc/misc

 org.onap.dmaap.datarouter.provserver.logretention        = 30

 

 # Database access

-org.onap.dmaap.datarouter.db.driver   = com.mysql.jdbc.Driver

-org.onap.dmaap.datarouter.db.url      = jdbc:mysql://172.18.0.2:3306/datarouter

+org.onap.dmaap.datarouter.db.driver   = org.mariadb.jdbc.Driver

+org.onap.dmaap.datarouter.db.url      = jdbc:mariadb://172.100.0.2:3306/datarouter

 org.onap.dmaap.datarouter.db.login    = datarouter

 org.onap.dmaap.datarouter.db.password = datarouter

diff --git a/datarouter-prov/src/main/resources/startup.sh b/datarouter-prov/src/main/resources/startup.sh
deleted file mode 100644
index e69de29..0000000
--- a/datarouter-prov/src/main/resources/startup.sh
+++ /dev/null
diff --git a/datarouter-prov/src/test/java/datarouter/provisioning/testBase.java b/datarouter-prov/src/test/java/datarouter/provisioning/testBase.java
index d124af4..dd8e86c 100644
--- a/datarouter-prov/src/test/java/datarouter/provisioning/testBase.java
+++ b/datarouter-prov/src/test/java/datarouter/provisioning/testBase.java
@@ -32,6 +32,7 @@
 import java.security.KeyStore;
 import java.util.Properties;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.StatusLine;
@@ -49,7 +50,7 @@
 
 public class testBase {
 	/** The properties file to read the DB properties from */
-	public static final String CONFIG_FILE = "tests.properties";
+	public static final String CONFIG_FILE = "integration_test.properties";
 
 	public Properties props;
 	protected AbstractHttpClient httpclient;
@@ -145,6 +146,7 @@
 		// shut down the connection manager to ensure
 		// immediate deallocation of all system resources
 		httpclient.getConnectionManager().shutdown();
+		FileUtils.deleteDirectory(new File("./unit-test-logs"));
 	}
 
 	protected void ckResponse(HttpResponse response, int expect) {
diff --git a/datarouter-prov/src/test/resources/integration_test.properties b/datarouter-prov/src/test/resources/integration_test.properties
new file mode 100644
index 0000000..36b2ac3
--- /dev/null
+++ b/datarouter-prov/src/test/resources/integration_test.properties
@@ -0,0 +1,5 @@
+test.keystore=self_signed/keystore.jks
+test.kspassword=changeit
+test.truststore=self_signed/cacerts.jks
+test.tspassword=changeit
+test.host=https://prov.datarouternew.com:8443
\ No newline at end of file
diff --git a/datarouter-prov/src/test/resources/log4j.properties b/datarouter-prov/src/test/resources/log4j.properties
new file mode 100644
index 0000000..b3be5b0
--- /dev/null
+++ b/datarouter-prov/src/test/resources/log4j.properties
@@ -0,0 +1,68 @@
+#-------------------------------------------------------------------------------
+# ============LICENSE_START==================================================
+# * org.onap.dmaap
+# * ===========================================================================
+# * Copyright � 2017 AT&T Intellectual Property. All rights reserved.
+# * ===========================================================================
+# * Licensed under the Apache License, Version 2.0 (the "License");
+# * you may not use this file except in compliance with the License.
+# * You may obtain a copy of the License at
+# * 
+#  *      http://www.apache.org/licenses/LICENSE-2.0
+# * 
+#  * Unless required by applicable law or agreed to in writing, software
+# * distributed under the License is distributed on an "AS IS" BASIS,
+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# * See the License for the specific language governing permissions and
+# * limitations under the License.
+# * ============LICENSE_END====================================================
+# *
+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.
+# *
+#-------------------------------------------------------------------------------
+
+
+log4j.rootLogger=debug, eventlog, intlog, pelog
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d %5p [%t] - %m%n
+
+#
+# Logger used for provisioning events
+#
+log4j.logger.org.onap.dmaap.datarouter.provisioning.events=debug, eventlog
+log4j.additivity.org.onap.dmaap.datarouter.provisioning.events=false
+
+log4j.appender.eventlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.eventlog.file=./unit-test-logs/provevent.log
+log4j.appender.eventlog.datePattern='.'yyyyMMdd
+log4j.appender.eventlog.append=true
+log4j.appender.eventlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.eventlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for internal provisioning server events
+#
+log4j.logger.org.onap.dmaap.datarouter.provisioning.internal=debug, intlog
+log4j.additivity.org.onap.dmaap.datarouter.provisioning.internal=false
+
+log4j.appender.intlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.intlog.file=./unit-test-logs/provint.log
+log4j.appender.intlog.datePattern='.'yyyyMMdd
+log4j.appender.intlog.append=true
+log4j.appender.intlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.intlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for policy engine
+#
+log4j.logger.org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer=debug, pelog
+log4j.additivity.org.onap.dmaap.datarouter.authz.impl.ProvAuthorizer=false
+
+log4j.appender.pelog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.pelog.file=./unit-test-logs/policyengine.log
+log4j.appender.pelog.datePattern='.'yyyyMMdd
+log4j.appender.pelog.append=true
+log4j.appender.pelog.layout=org.apache.log4j.PatternLayout
+log4j.appender.pelog.layout.ConversionPattern=%d %-5p [%t] - %m%n