[DMAAP-48] Initial code import

Change-Id: I3e65371093487d7de167ec6c29f327f366f1e299
Signed-off-by: sg481n <sg481n@att.com>
diff --git a/datarouter-prov/src/main/resources/EelfMessages.properties b/datarouter-prov/src/main/resources/EelfMessages.properties
new file mode 100644
index 0000000..5e8b179
--- /dev/null
+++ b/datarouter-prov/src/main/resources/EelfMessages.properties
@@ -0,0 +1,58 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+########################################################################

+#Resource key=Error Code|Message text|Resolution text |Description text

+#######

+#Newlines can be utilized to add some clarity ensuring continuing line

+#has atleast one leading space

+#ResourceKey=\

+#             ERR0000E\

+#             Sample error msg txt\

+#             Sample resolution msg\

+#             Sample description txt

+#

+######

+#Error code classification category

+#100	Permission errors

+#200	Availability errors/Timeouts

+#300	Data errors

+#400	Schema Interface type/validation errors

+#500	Business process errors

+#900	Unknown errors

+#

+########################################################################

+

+# Messages for Data Router EELF framework

+

+#Prints FeedID in the EELF apilog

+MESSAGE_WITH__FEEDID=EELF0001I| FeedID  = {0}

+

+#Prints User in the EELF apilog

+MESSAGE_WITH_BEHALF=EELF0002I| User = {0}

+

+#Prints User and FeedID in the EELF apilog

+MESSAGE_WITH_BEHALF_AND_FEEDID=EELF0003I| User = {0} FeedID  = {1}

+

+#Prints User and SubID in the EELF apilog

+MESSAGE_WITH_BEHALF_AND_SUBID=EELF0004I| User = {0} SubscriberID  = {1}

+

diff --git a/datarouter-prov/src/main/resources/authz.jar b/datarouter-prov/src/main/resources/authz.jar
new file mode 100644
index 0000000..6d0dd8a
--- /dev/null
+++ b/datarouter-prov/src/main/resources/authz.jar
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql b/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql
new file mode 100644
index 0000000..64a0762
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/database/install_db.sql
@@ -0,0 +1,143 @@
+CREATE DATABASE IF NOT EXISTS datarouter;
+
+CREATE USER 'datarouter'@'%' IDENTIFIED BY 'datarouter';
+
+GRANT ALL PRIVILEGES ON * . * TO 'datarouter'@'%';
+
+use datarouter;
+
+CREATE TABLE FEEDS (
+    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME           VARCHAR(20) NOT NULL,
+    VERSION        VARCHAR(20) NOT NULL,
+    DESCRIPTION    VARCHAR(256),
+    AUTH_CLASS     VARCHAR(32) NOT NULL,
+    PUBLISHER      VARCHAR(8) NOT NULL,
+    SELF_LINK      VARCHAR(256),
+    PUBLISH_LINK   VARCHAR(256),
+    SUBSCRIBE_LINK VARCHAR(256),
+    LOG_LINK       VARCHAR(256),
+    DELETED        BOOLEAN DEFAULT FALSE,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE FEED_ENDPOINT_IDS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    USERID        VARCHAR(20) NOT NULL,
+    PASSWORD      VARCHAR(32) NOT NULL
+);
+
+CREATE TABLE FEED_ENDPOINT_ADDRS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    ADDR          VARCHAR(44) NOT NULL
+);
+
+CREATE TABLE SUBSCRIPTIONS (
+    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
+    FEEDID             INT UNSIGNED NOT NULL,
+    DELIVERY_URL       VARCHAR(256),
+    DELIVERY_USER      VARCHAR(20),
+    DELIVERY_PASSWORD  VARCHAR(32),
+    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
+    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
+    SUBSCRIBER         VARCHAR(8) NOT NULL,
+    SELF_LINK          VARCHAR(256),
+    LOG_LINK           VARCHAR(256),
+    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE PARAMETERS (
+    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
+    VALUE          VARCHAR(4096) NOT NULL
+);
+
+CREATE TABLE LOG_RECORDS (
+    TYPE	   ENUM('pub', 'del', 'exp') NOT NULL,
+    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
+    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
+    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
+    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
+    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
+    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
+    CONTENT_LENGTH BIGINT UNSIGNED NOT NULL,  /* content length of published file */
+
+    FEED_FILEID    VARCHAR(128),		/* file ID of published file */
+    REMOTE_ADDR    VARCHAR(40),			/* IP address of publishing endpoint */
+    USER           VARCHAR(20),			/* user name of publishing endpoint */
+    STATUS         SMALLINT,			/* status code returned to delivering agent */
+
+    DELIVERY_SUBID INT UNSIGNED,		/* pointer to subscription in SUBSCRIPTIONS */
+    DELIVERY_FILEID  VARCHAR(128),		/* file ID of file being delivered */
+    RESULT         SMALLINT,			/* result received from subscribing agent */
+
+    ATTEMPTS       INT,				/* deliveries attempted */
+    REASON         ENUM('notRetryable', 'retriesExhausted'),
+
+    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
+
+    INDEX (FEEDID) USING BTREE,
+    INDEX (DELIVERY_SUBID) USING BTREE,
+    INDEX (RECORD_ID) USING BTREE
+) ENGINE = MyISAM;
+
+CREATE TABLE INGRESS_ROUTES (
+    SEQUENCE  INT UNSIGNED NOT NULL,
+    FEEDID    INT UNSIGNED NOT NULL,
+    USERID    VARCHAR(20),
+    SUBNET    VARCHAR(44),
+    NODESET   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE EGRESS_ROUTES (
+    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
+    NODEID   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NETWORK_ROUTES (
+    FROMNODE INT UNSIGNED NOT NULL,
+    TONODE   INT UNSIGNED NOT NULL,
+    VIANODE  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODESETS (
+    SETID   INT UNSIGNED NOT NULL,
+    NODEID  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODES (
+    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME    VARCHAR(255) NOT NULL,
+    ACTIVE  BOOLEAN DEFAULT TRUE
+);
+
+CREATE TABLE GROUPS (
+    GROUPID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    AUTHID    VARCHAR(100) NOT NULL,
+    NAME    VARCHAR(50) NOT NULL,
+    DESCRIPTION    VARCHAR(255),
+    CLASSIFICATION    VARCHAR(20) NOT NULL,
+    MEMBERS    TINYTEXT,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+-- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
+INSERT INTO PARAMETERS VALUES
+	('ACTIVE_POD',  'prov.datarouternew.com'),
+	('PROV_ACTIVE_NAME',  'prov.datarouternew.com'),
+	('STANDBY_POD', ''),
+	('PROV_NAME',   'prov.datarouternew.com'),
+	('NODES',       'node.datarouternew.com'),
+	('PROV_DOMAIN', 'datarouternew.com'),
+	('DELIVERY_INIT_RETRY_INTERVAL', '10'),
+	('DELIVERY_MAX_AGE', '86400'),
+	('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
+	('DELIVERY_RETRY_RATIO', '2'),
+	('LOGROLL_INTERVAL', '300'),
+	('PROV_AUTH_ADDRESSES', 'prov.datarouternew.com'), 
+	('PROV_AUTH_SUBJECTS', ''),
+	('PROV_MAXFEED_COUNT',	'10000'),
+	('PROV_MAXSUB_COUNT',	'100000'),
+	('PROV_REQUIRE_CERT', 'false'),
+	('PROV_REQUIRE_SECURE', 'false'),
+	('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
+	;
\ No newline at end of file
diff --git a/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml b/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml
new file mode 100644
index 0000000..4e2a81a
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/docker-compose.yml
@@ -0,0 +1,69 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+version: '2'

+services: 

+  datarouter-prov:

+    image: attos/datarouter-prov

+    container_name: datarouter-prov

+    hostname: prov.datarouternew.com

+    ports:

+     - "8443:8443"

+     - "8080:8080"  

+#    volumes:

+#     - ./prov_data/proserver.properties:/opt/app/datartr/etc/proserver.properties

+#     - ./prov_data/datarouter-prov-jar-with-dependencies.jar:/opt/app/datartr/lib/datarouter-prov-jar-with-dependencies.jar

+#      - ./prov_data/addSubscriber.txt:/opt/app/datartr/addSubscriber.txt

+#      - ./prov_data/addFeed3.txt:/opt/app/datartr/addFeed3.txt

+    entrypoint: ["bash", "-c", "sleep 10; /bin/sh -c ./startup.sh"]

+    depends_on:

+      - mysql_container

+    extra_hosts:

+      - "node.datarouternew.com:172.18.0.4"

+

+    

+  datarouter-node:

+    image: attos/datarouter-node

+    container_name: datarouter-node

+    hostname: node.datarouternew.com

+    ports:

+     - "9443:8443"

+     - "9090:8080"

+#    volumes:

+#     - ./node_data/node.properties:/opt/app/datartr/etc/node.properties

+    entrypoint: ["bash", "-c", "sleep 15; /bin/sh -c ./startup.sh"]    

+    depends_on:

+      - datarouter-prov

+    extra_hosts:

+      - "prov.datarouternew.com:172.18.0.3"

+      

+  mysql_container:

+    image: mysql/mysql-server:5.6

+    container_name: mysql

+    ports:

+     - "3306:3306"

+    environment:

+      MYSQL_ROOT_PASSWORD: att2017

+    volumes:

+    - ./database:/tmp/database

+    - ./database:/docker-entrypoint-initdb.d

+    

diff --git a/datarouter-prov/src/main/resources/docker-compose/node_data/node.properties b/datarouter-prov/src/main/resources/docker-compose/node_data/node.properties
new file mode 100644
index 0000000..f57833c
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/node_data/node.properties
@@ -0,0 +1,112 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+#

+#	Configuration parameters fixed at startup for the DataRouter node

+#

+#	URL to retrieve dynamic configuration

+#

+#ProvisioningURL:	${DRTR_PROV_INTURL:-https://feeds-drtr.web.att.com/internal/prov}

+ProvisioningURL=https://prov.datarouternew.com:8443/internal/prov

+

+#

+#	URL to upload PUB/DEL/EXP logs

+#

+#LogUploadURL:	${DRTR_LOG_URL:-https://feeds-drtr.web.att.com/internal/logs}

+LogUploadURL=https://prov.datarouternew.com:8443/internal/logs

+

+#

+#	The port number for http as seen within the server

+#

+#IntHttpPort:	${DRTR_NODE_INTHTTPPORT:-8080}

+IntHttpPort=8080

+#

+#	The port number for https as seen within the server

+#

+IntHttpsPort=8443

+#

+#	The external port number for https taking port mapping into account

+#

+ExtHttpsPort=443

+#

+#	The minimum interval between fetches of the dynamic configuration

+#	from the provisioning server

+#

+MinProvFetchInterval=10000

+#

+#	The minimum interval between saves of the redirection data file

+#

+MinRedirSaveInterval=10000

+#

+#	The path to the directory where log files are stored

+#

+LogDir=/opt/app/datartr/logs

+#

+#	The retention interval (in days) for log files

+#

+LogRetention=30

+#

+#	The path to the directories where data and meta data files are stored

+#

+SpoolDir=/opt/app/datartr/spool

+#

+#	The path to the redirection data file

+#

+#RedirectionFile:	etc/redirections.dat

+#

+#	The type of keystore for https

+#

+KeyStoreType:	jks

+#

+#	The path to the keystore for https

+#

+KeyStoreFile:/opt/app/datartr/self_signed/keystore.jks

+#

+#	The password for the https keystore

+#

+KeyStorePassword=changeit

+#

+#	The password for the private key in the https keystore

+#

+KeyPassword=changeit

+#

+#	The type of truststore for https

+#

+TrustStoreType=jks

+#

+#	The path to the truststore for https

+#

+#TrustStoreFile=/usr/lib/jvm/java-8-oracle/jre/lib/security/cacerts

+TrustStoreFile=/opt/app/datartr/self_signed/cacerts.jks

+#

+#	The password for the https truststore

+#

+TrustStorePassword=changeit

+#

+#	The path to the file used to trigger an orderly shutdown

+#

+QuiesceFile=etc/SHUTDOWN

+#

+#	The key used to generate passwords for node to node transfers

+#

+NodeAuthKey=Node123!

+

diff --git a/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/cacerts.jks b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/cacerts.jks
new file mode 100644
index 0000000..dfd8143
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/cacerts.jks
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/keystore.jks b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/keystore.jks
new file mode 100644
index 0000000..e5a4e78
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/keystore.jks
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/mykey.cer b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/mykey.cer
new file mode 100644
index 0000000..2a5c9d7
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/mykey.cer
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/nodekey.cer b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/nodekey.cer
new file mode 100644
index 0000000..4cdfdfe
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/node_data/self_signed/nodekey.cer
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt
new file mode 100644
index 0000000..a21c7ae
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addFeed3.txt
@@ -0,0 +1,44 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+{

+     "name": "Jettydemo",

+     "version": "m1.0",

+     "description": "Jettydemo",

+     "business_description": "Jettydemo",

+     "suspend": false,

+     "deleted": false,

+     "changeowner": true,

+     "authorization": {

+          "classification": "unclassified",

+          "endpoint_addrs": [

+               "172.18.0.3",

+			],

+          "endpoint_ids": [

+               {

+                    "password": "rs873m",

+                    "id": "rs873m"

+               }

+          ]

+     },

+}

+

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt
new file mode 100644
index 0000000..e974631
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/addSubscriber.txt
@@ -0,0 +1,36 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+{ 

+                "delivery" :	

+               				

+                { 

+                                "url" : "http://172.18.0.3:7070/", 

+                                "user" : "LOGIN", 

+                                "password" : "PASSWORD", 

+                                "use100" : true 

+                },

+                "metadataOnly" : false, 

+                "suspend" : false, 

+				"groupid" : 29,

+                "subscriber" : "sg481n"

+}

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties b/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties
new file mode 100644
index 0000000..6a03cbd
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/provserver.properties
@@ -0,0 +1,59 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+#

+#                        AT&T - PROPRIETARY

+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF

+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN

+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.

+#

+#          Copyright (c) 2013 AT&T Knowledge Ventures

+#              Unpublished and Not for Publication

+#                     All Rights Reserved

+#

+# CVS: $Id: provserver.properties,v 1.7 2013/05/29 14:44:36 eby Exp $

+#

+

+#Jetty Server properties

+com.att.research.datarouter.provserver.http.port           = 8080

+com.att.research.datarouter.provserver.https.port          = 8443

+com.att.research.datarouter.provserver.https.relaxation	   = true

+com.att.research.datarouter.provserver.keymanager.password = changeit

+com.att.research.datarouter.provserver.keystore.type       = jks

+com.att.research.datarouter.provserver.keystore.path       = /opt/app/datartr/self_signed/keystore.jks

+

+com.att.research.datarouter.provserver.keystore.password   = changeit

+#com.att.research.datarouter.provserver.truststore.path     = /home/eby/dr2/misc/cacerts+1

+#com.att.research.datarouter.provserver.truststore.path     = /usr/lib/jvm/java-8-oracle/jre/lib/security/cacerts

+com.att.research.datarouter.provserver.truststore.path     = /opt/app/datartr/self_signed/cacerts.jks

+

+com.att.research.datarouter.provserver.truststore.password = changeit

+com.att.research.datarouter.provserver.accesslog.dir       = /opt/app/datartr/logs

+com.att.research.datarouter.provserver.spooldir            = /opt/app/datartr/spool

+#com.att.research.datarouter.provserver.dbscripts          = /home/eby/dr2/cvs/datarouter/prov/misc/

+com.att.research.datarouter.provserver.logretention        = 30

+

+# Database access

+com.att.research.datarouter.db.driver   = com.mysql.jdbc.Driver

+com.att.research.datarouter.db.url      = jdbc:mysql://172.18.0.2:3306/datarouter

+com.att.research.datarouter.db.login    = datarouter

+com.att.research.datarouter.db.password = datarouter

diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/cacerts.jks b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/cacerts.jks
new file mode 100644
index 0000000..76a480a
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/cacerts.jks
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/keystore.jks b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/keystore.jks
new file mode 100644
index 0000000..2c22b4a
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/keystore.jks
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/mykey.cer b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/mykey.cer
new file mode 100644
index 0000000..2a5c9d7
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker-compose/prov_data/self_signed/mykey.cer
Binary files differ
diff --git a/datarouter-prov/src/main/resources/docker/Dockerfile b/datarouter-prov/src/main/resources/docker/Dockerfile
new file mode 100644
index 0000000..215c433
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker/Dockerfile
@@ -0,0 +1,9 @@
+FROM java:8

+ADD opt /opt/

+ADD startup.sh /startup.sh

+RUN chmod 700 /startup.sh

+ENTRYPOINT ./startup.sh start

+EXPOSE 8443

+EXPOSE 8080

+

+

diff --git a/datarouter-prov/src/main/resources/docker/startup.sh b/datarouter-prov/src/main/resources/docker/startup.sh
new file mode 100644
index 0000000..191a804
--- /dev/null
+++ b/datarouter-prov/src/main/resources/docker/startup.sh
@@ -0,0 +1,16 @@
+LIB=/opt/app/datartr/lib
+ETC=/opt/app/datartr/etc
+echo "this is LIB" $LIB
+echo "this is ETC" $ETC
+mkdir -p /opt/app/datartr/logs
+mkdir -p /opt/app/datartr/spool
+CLASSPATH=$ETC
+for FILE in `find $LIB -name *.jar`; do
+  CLASSPATH=$CLASSPATH:$FILE
+done
+java -classpath $CLASSPATH  com.att.research.datarouter.provisioning.Main
+
+runner_file="$LIB/datarouter-prov-jar-with-dependencies.jar"
+echo "Starting using" $runner_file
+java -Dcom.att.eelf.logging.file==/opt/app/datartr/etc/logback.xml -Dcom.att.eelf.logging.path=/root -jar $runner_file
+
diff --git a/datarouter-prov/src/main/resources/log4j.properties b/datarouter-prov/src/main/resources/log4j.properties
new file mode 100644
index 0000000..bb4eaa0
--- /dev/null
+++ b/datarouter-prov/src/main/resources/log4j.properties
@@ -0,0 +1,68 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+

+

+log4j.rootLogger=info

+

+log4j.appender.stdout=org.apache.log4j.ConsoleAppender

+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

+log4j.appender.stdout.layout.ConversionPattern=%d %5p [%t] - %m%n

+

+#

+# Logger used for provisioning events

+#

+log4j.logger.com.att.research.datarouter.provisioning.events=info, eventlog

+log4j.additivity.com.att.research.datarouter.provisioning.events=false

+

+log4j.appender.eventlog=org.apache.log4j.DailyRollingFileAppender

+log4j.appender.eventlog.file=/root/dr2/logs/provevent.log

+log4j.appender.eventlog.datePattern='.'yyyyMMdd

+log4j.appender.eventlog.append=true

+log4j.appender.eventlog.layout=org.apache.log4j.PatternLayout

+log4j.appender.eventlog.layout.ConversionPattern=%d %-5p [%t] - %m%n

+

+#

+# Logger used for internal provisioning server events

+#

+log4j.logger.com.att.research.datarouter.provisioning.internal=debug, intlog

+log4j.additivity.com.att.research.datarouter.provisioning.internal=false

+

+log4j.appender.intlog=org.apache.log4j.DailyRollingFileAppender

+log4j.appender.intlog.file=/root/dr2/logs/provint.log

+log4j.appender.intlog.datePattern='.'yyyyMMdd

+log4j.appender.intlog.append=true

+log4j.appender.intlog.layout=org.apache.log4j.PatternLayout

+log4j.appender.intlog.layout.ConversionPattern=%d %-5p [%t] - %m%n

+

+#

+# Logger used for policy engine

+#

+log4j.logger.com.att.research.datarouter.authz.impl.ProvAuthorizer=debug, pelog

+log4j.additivity.com.att.research.datarouter.authz.impl.ProvAuthorizer=false

+

+log4j.appender.pelog=org.apache.log4j.DailyRollingFileAppender

+log4j.appender.pelog.file=/root/dr2/logs/policyengine.log

+log4j.appender.pelog.datePattern='.'yyyyMMdd

+log4j.appender.pelog.append=true

+log4j.appender.pelog.layout=org.apache.log4j.PatternLayout

+log4j.appender.pelog.layout.ConversionPattern=%d %-5p [%t] - %m%n

diff --git a/datarouter-prov/src/main/resources/logback.xml b/datarouter-prov/src/main/resources/logback.xml
new file mode 100644
index 0000000..7d73e0d
--- /dev/null
+++ b/datarouter-prov/src/main/resources/logback.xml
@@ -0,0 +1,405 @@
+<!--

+  ============LICENSE_START==================================================

+  * org.onap.dmaap

+  * ===========================================================================

+  * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+  * ===========================================================================

+  * Licensed under the Apache License, Version 2.0 (the "License");

+  * you may not use this file except in compliance with the License.

+  * You may obtain a copy of the License at

+  * 

+   *      http://www.apache.org/licenses/LICENSE-2.0

+  * 

+   * Unless required by applicable law or agreed to in writing, software

+  * distributed under the License is distributed on an "AS IS" BASIS,

+  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+  * See the License for the specific language governing permissions and

+  * limitations under the License.

+  * ============LICENSE_END====================================================

+  *

+  * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+  *

+-->

+<configuration scan="true" scanPeriod="3 seconds" debug="true">

+  <!--<jmxConfigurator /> -->

+  <!-- directory path for all other type logs -->

+  <!-- property name="logDir" value="/home/eby/dr2/logs" / -->

+  <property name="logDir" value="/opt/app/datartr/logs" />

+ 

+  <!-- directory path for debugging type logs -->

+  <!-- property name="debugDir" value="/home/eby/dr2/debug-logs" /-->

+  

+  <!--  specify the component name 

+    <ECOMP-component-name>::= "MSO" | "DCAE" | "ASDC " | "AAI" |"Policy" | "SDNC" | "AC"  -->

+  <!-- This creates the MSO directory in in the LogDir which is not needed, mentioned last directory of the path-->

+  <!-- property name="componentName" value="logs"></property -->

+  

+  <!--  log file names -->

+  <property name="generalLogName" value="apicalls" />

+  <!-- name="securityLogName" value="security" -->

+  <!-- name="performanceLogName" value="performance" -->

+  <!-- name="serverLogName" value="server" -->

+  <!-- name="policyLogName" value="policy"-->

+  <property name="errorLogName" value="errors" />

+  <!-- name="metricsLogName" value="metrics" -->

+  <!-- name="auditLogName" value="audit" -->

+  <!-- name="debugLogName" value="debug" -->

+  <property name="jettyLogName" value="jetty"></property> 

+  <property name="defaultPattern"    value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|%msg%n" />

+  <property name="jettyLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%logger|%thread|%.-5level|%msg%n" />

+  

+  <property name="debugLoggerPattern" value="%d{MM/dd-HH:mm:ss.SSS}|%X{RequestId}|%X{ServiceInstanceId}|%thread|%X{ServiceName}|%X{InstanceUUID}|%.-5level|%X{AlertSeverity}|%X{ServerIPAddress}|%X{ServerFQDN}|%X{RemoteHost}|%X{Timer}|[%caller{3}]|%msg%n" />

+     

+  <property name="logDirectory" value="${logDir}" />

+  <!-- property name="debugLogDirectory" value="${debugDir}/${componentName}" /-->

+  

+  

+  <!-- Example evaluator filter applied against console appender -->

+  <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">

+    <encoder>

+      <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+

+  <!-- ============================================================================ -->

+  <!-- EELF Appenders -->

+  <!-- ============================================================================ -->

+

+  <!-- The EELFAppender is used to record events to the general application 

+    log -->

+    

+    

+  <appender name="EELF"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${generalLogName}.log</file>

+     <filter class="ch.qos.logback.classic.filter.LevelFilter">

+		<level>INFO</level>

+		<onMatch>ACCEPT</onMatch>

+		<onMismatch>DENY</onMismatch>

+	</filter>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${generalLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  <appender name="asyncEELF" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELF" />

+  </appender>

+

+  <!-- EELF Security Appender. This appender is used to record security events 

+    to the security log file. Security events are separate from other loggers 

+    in EELF so that security log records can be captured and managed in a secure 

+    way separate from the other logs. This appender is set to never discard any 

+    events. -->

+  <!--appender name="EELFSecurity"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${securityLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${securityLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  <appender name="asyncEELFSecurity" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <discardingThreshold>0</discardingThreshold>

+    <appender-ref ref="EELFSecurity" />

+  </appender-->

+

+  <!-- EELF Performance Appender. This appender is used to record performance 

+    records. -->

+  <!--appender name="EELFPerformance"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${performanceLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${performanceLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <outputPatternAsHeader>true</outputPatternAsHeader>

+      <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  <appender name="asyncEELFPerformance" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFPerformance" />

+  </appender-->

+

+  <!-- EELF Server Appender. This appender is used to record Server related 

+    logging events. The Server logger and appender are specializations of the 

+    EELF application root logger and appender. This can be used to segregate Server 

+    events from other components, or it can be eliminated to record these events 

+    as part of the application root log. -->

+  <!--appender name="EELFServer"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${serverLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${serverLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+        <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  <appender name="asyncEELFServer" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFServer" />

+  </appender-->

+

+  

+  <!-- EELF Policy Appender. This appender is used to record Policy engine 

+    related logging events. The Policy logger and appender are specializations 

+    of the EELF application root logger and appender. This can be used to segregate 

+    Policy engine events from other components, or it can be eliminated to record 

+    these events as part of the application root log. -->

+  <!--appender name="EELFPolicy"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${policyLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${policyLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+        <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  <appender name="asyncEELFPolicy" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFPolicy" >

+  </appender-->

+  

+  

+  <!-- EELF Audit Appender. This appender is used to record audit engine 

+    related logging events. The audit logger and appender are specializations 

+    of the EELF application root logger and appender. This can be used to segregate 

+    Policy engine events from other components, or it can be eliminated to record 

+    these events as part of the application root log. -->

+    

+  <!--appender name="EELFAudit"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${auditLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${auditLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+         <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  <appender name="asyncEELFAudit" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFAudit" />

+  </appender-->

+

+<!--appender name="EELFMetrics"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${metricsLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${metricsLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder-->

+      <!-- <pattern>"%d{HH:mm:ss.SSS} [%thread] %-5level %logger{1024} - 

+        %msg%n"</pattern> -->

+      <!--pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  

+  <appender name="asyncEELFMetrics" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFMetrics"/>

+  </appender-->

+   

+  <appender name="EELFError"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${errorLogName}.log</file>

+    <filter class="ch.qos.logback.classic.filter.LevelFilter">

+		<level>ERROR</level>

+		<onMatch>ACCEPT</onMatch>

+		<onMismatch>DENY</onMismatch>

+	</filter>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${errorLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <pattern>${defaultPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  <appender name="asyncEELFError" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFError"/>

+  </appender>

+  

+  <!-- ============================================================================ -->

+   <appender name="jettylog"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${logDirectory}/${jettyLogName}.log</file>

+	 <filter class="com.att.research.datarouter.provisioning.eelf.JettyFilter" />

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${logDirectory}/${jettyLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <pattern>${jettyLoggerPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  <appender name="asyncEELFjettylog" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="jettylog" />

+    <includeCallerData>true</includeCallerData>

+  </appender>

+  

+   <!-- ============================================================================ -->

+

+

+   <!--appender name="EELFDebug"

+    class="ch.qos.logback.core.rolling.RollingFileAppender">

+    <file>${debugLogDirectory}/${debugLogName}.log</file>

+    <rollingPolicy

+      class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">

+      <fileNamePattern>${debugLogDirectory}/${debugLogName}.%i.log.zip

+      </fileNamePattern>

+      <minIndex>1</minIndex>

+      <maxIndex>9</maxIndex>

+    </rollingPolicy>

+    <triggeringPolicy

+      class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">

+      <maxFileSize>5MB</maxFileSize>

+    </triggeringPolicy>

+    <encoder>

+      <pattern>${debugLoggerPattern}</pattern>

+    </encoder>

+  </appender>

+  

+  <appender name="asyncEELFDebug" class="ch.qos.logback.classic.AsyncAppender">

+    <queueSize>256</queueSize>

+    <appender-ref ref="EELFDebug" />

+    <includeCallerData>true</includeCallerData>

+  </appender-->

+ 

+  

+  <!-- ============================================================================ -->

+  <!--  EELF loggers -->

+  <!-- ============================================================================ -->

+  <logger name="com.att.eelf" level="info" additivity="false">

+    <appender-ref ref="asyncEELF" />

+  </logger>

+  

+     <logger name="com.att.eelf.error" level="error" additivity="false">

+ 		 <appender-ref ref="asyncEELFError" />

+ 	 </logger>

+  

+     <logger name="log4j.logger.org.eclipse.jetty" additivity="false" level="info">

+		<appender-ref ref="asyncEELFjettylog"/>

+	</logger> 

+	

+  <!-- logger name="com.att.eelf.security" level="info" additivity="false">

+    <appender-ref ref="asyncEELFSecurity" /> 

+  </logger>

+  <logger name="com.att.eelf.perf" level="info" additivity="false">

+    <appender-ref ref="asyncEELFPerformance" />

+  </logger>

+  <logger name="com.att.eelf.server" level="info" additivity="false">

+    <appender-ref ref="asyncEELFServer" />

+  </logger>

+  <logger name="com.att.eelf.policy" level="info" additivity="false">

+    <appender-ref ref="asyncEELFPolicy" />

+  </logger>

+

+  <logger name="com.att.eelf.audit" level="info" additivity="false">

+    <appender-ref ref="asyncEELFAudit" />

+  </logger>

+  

+  <logger name="com.att.eelf.metrics" level="info" additivity="false">

+        <appender-ref ref="asyncEELFMetrics" />

+  </logger>

+   

+   <logger name="com.att.eelf.debug" level="debug" additivity="false">

+        <appender-ref ref="asyncEELFDebug" />

+  </logger-->

+

+  

+

+  

+  <root level="INFO">

+    <appender-ref ref="asyncEELF" />

+    <appender-ref ref="asyncEELFError" />

+     <appender-ref ref="asyncEELFjettylog" />

+  </root>

+

+</configuration>

diff --git a/datarouter-prov/src/main/resources/misc/doaction b/datarouter-prov/src/main/resources/misc/doaction
new file mode 100644
index 0000000..4319332
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/doaction
@@ -0,0 +1,53 @@
+#!/bin/bash
+
+cd /opt/app/datartr/etc
+for action in "$@"
+do
+case "$action" in
+'stop')
+	/opt/app/platform/init.d/drtrprov stop
+	;;
+'start')
+	/opt/app/platform/init.d/drtrprov start || exit 1
+	;;
+'backup')
+	cp log4j.properties log4j.properties.save 2>/dev/null
+	cp provserver.properties provserver.properties.save 2>/dev/null
+	cp mail.properties mail.properties.save 2>/dev/null
+	cp havecert havecert.save 2>/dev/null
+	cp mysql_init_0001 mysql_init_0001.save 2>/dev/null
+	;;
+'restore')
+	cp log4j.properties.save log4j.properties 2>/dev/null
+	cp provserver.properties.save provserver.properties 2>/dev/null
+	cp mail.properties.save mail.properties 2>/dev/null
+	cp havecert.save havecert 2>/dev/null
+	cp mysql_init_0001.save mysql_init_0001 2>/dev/null
+	;;
+'config')
+	/bin/bash log4j.properties.tmpl >log4j.properties
+	/bin/bash provserver.properties.tmpl >provserver.properties
+	/bin/bash mail.properties.tmpl >mail.properties
+	/bin/bash havecert.tmpl >havecert
+	/bin/bash mysql_init_0001.tmpl >mysql_init_0001
+	echo "$AFTSWM_ACTION_NEW_VERSION" >VERSION.prov
+	chmod +x havecert
+	rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov
+	ln -s ../init.d/drtrprov /opt/app/platform/rc.d/K90zdrtrprov
+	ln -s ../init.d/drtrprov /opt/app/platform/rc.d/S99zdrtrprov
+	;;
+'clean')
+	rm -f log4j.properties log4j.properties.save
+	rm -f provserver.properties provserver.properties.save
+	rm -f mail.properties mail.properties.save
+	rm -f havecert havecert.properties.save
+	rm -f mysql_init_0001 mysql_init_0001.save
+	rm -f VERSION.prov
+	rm -f /opt/app/platform/rc.d/K90zdrtrprov /opt/app/platform/rc.d/S99zdrtrprov
+	;;
+*)
+	exit 1
+	;;
+esac
+done
+exit 0
diff --git a/datarouter-prov/src/main/resources/misc/dr-route b/datarouter-prov/src/main/resources/misc/dr-route
new file mode 100644
index 0000000..77c6c18
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/dr-route
@@ -0,0 +1,26 @@
+#!/bin/bash
+#
+#                        AT&T - PROPRIETARY
+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.
+#
+#          Copyright (c) 2013 AT&T Knowledge Ventures
+#              Unpublished and Not for Publication
+#                     All Rights Reserved
+#
+#  dr-route -- A script to interact with a provisioning server to manage the DR routing tables.
+#
+#  $Id: dr-route,v 1.2 2013/11/06 16:23:54 eby Exp $
+#
+
+JAVA_HOME=/opt/java/jdk/jdk180
+JAVA_OPTS="-Xms1G -Xmx1G"
+TZ=GMT0
+PATH=$JAVA_HOME/bin:/bin:/usr/bin
+CLASSPATH=`echo /opt/app/datartr/etc /opt/app/datartr/lib/*.jar | tr ' ' ':'`
+export CLASSPATH JAVA_HOME JAVA_OPTS TZ PATH
+
+$JAVA_HOME/bin/java \
+	-Dlog4j.configuration=file:///opt/app/datartr/etc/log4j.drroute.properties \
+	com.att.research.datarouter.provisioning.utils.DRRouteCLI $*
diff --git a/datarouter-prov/src/main/resources/misc/drtrprov b/datarouter-prov/src/main/resources/misc/drtrprov
new file mode 100644
index 0000000..c801ce0
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/drtrprov
@@ -0,0 +1,131 @@
+#!/bin/bash
+#
+#                        AT&T - PROPRIETARY
+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.
+#
+#          Copyright (c) 2013 AT&T Knowledge Ventures
+#              Unpublished and Not for Publication
+#                     All Rights Reserved
+#
+#  This is the startup/shutdown script for the AT&T Data Router Provisioning Server.
+#
+#  $Id: drtrprov,v 1.3 2013/10/29 16:57:57 eby Exp $
+#
+
+umask 0022
+
+JAVA_HOME=/opt/java/jdk/jdk180
+JAVA_OPTS="-Xms2G -Xmx8G"
+TZ=GMT0
+PATH=$JAVA_HOME/bin:/bin:/usr/bin
+CLASSPATH=`echo /opt/app/datartr/etc /opt/app/datartr/lib/*.jar | tr ' ' ':'`
+export CLASSPATH JAVA_HOME JAVA_OPTS TZ PATH
+
+pids() {
+	pgrep -u datartr -f provisioning.Main
+}
+
+start() {
+	ID=`id -n -u`
+	GRP=`id -n -g`
+	if [ "$ID" != "root" ]
+	then
+		echo drtrprov must be started as user datartr not $ID
+		exit 1
+	fi
+#  if [ "$GRP" != "datartr" ]
+# 	then
+# 		echo drtrprov must be started as group datartr not $GRP
+# 		exit 1
+# 	fi  
+# 	cd /opt/app/datartr
+# 	if etc/havecert
+# 	then
+# 		echo >/dev/null
+# 	else
+# 		echo No certificate file available.  Cannot start
+# 		exit 0
+# 	fi
+	if [ "`pgrep -u mysql mysqld`" = "" ]
+	then
+		echo MySQL is not running.  It must be started before drtrprov
+		exit 0
+	fi
+	PIDS=`pids`
+	if [ "$PIDS" != "" ]
+	then
+		echo drtrprov already running
+		exit 0
+	fi
+	echo '0 1 * * * /opt/app/datartr/bin/runreports' | crontab
+	nohup java $JAVA_OPTS com.att.research.datarouter.provisioning.Main </dev/null &
+	sleep 5
+	PIDS=`pids`
+	if [ "$PIDS" = "" ]
+	then
+		echo drtrprov startup failed
+	else
+		echo drtrprov started
+	fi
+}
+
+stop() {
+	ID=`id -n -u`
+	GRP=`id -n -g`
+	if [ "$ID" != "datartr" ]
+	then
+		echo drtrprov must be stopped as user datartr not $ID
+		exit 1
+	fi
+	if [ "$GRP" != "datartr" ]
+	then
+		echo drtrprov must be stopped as group datartr not $GRP
+		exit 1
+	fi
+	/usr/bin/curl http://127.0.0.1:8080/internal/halt
+	sleep 5
+	PIDS=`pids`
+	if [ "$PIDS" != "" ]
+	then
+		sleep 5
+		kill -9 $PIDS
+		sleep 5
+		echo drtrprov stopped
+	else
+		echo drtrprov not running
+	fi
+}
+
+status() {
+	PIDS=`pids`
+	if [ "$PIDS" != "" ]
+	then
+		echo drtrprov running
+	else
+		echo drtrprov not running
+	fi
+}
+
+case "$1" in
+'start')
+	start
+	;;
+'stop')
+	stop
+	;;
+'restart')
+	stop
+	sleep 20
+	start
+	;;
+'status')
+	status
+	;;
+*)
+	echo "Usage: $0 { start | stop | restart | status }"
+	exit 1
+	;;
+esac
+exit 0
diff --git a/datarouter-prov/src/main/resources/misc/havecert.tmpl b/datarouter-prov/src/main/resources/misc/havecert.tmpl
new file mode 100644
index 0000000..e238986
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/havecert.tmpl
@@ -0,0 +1,11 @@
+#!/bin/bash
+cat <<!EOF
+TZ=GMT0
+cd /opt/app/datartr;
+if [ -f ${DRTR_PROV_KSTOREFILE:-etc/keystore} ]
+then
+	exit 0
+fi
+echo `date '+%F %T,000'` WARN Certificate file "${DRTR_PROV_KSTOREFILE:-etc/keystore}" is missing >>${DRTR_PROV_LOGS:-logs}/provint.log
+exit 1
+!EOF
diff --git a/datarouter-prov/src/main/resources/misc/log4j.drroute.properties b/datarouter-prov/src/main/resources/misc/log4j.drroute.properties
new file mode 100644
index 0000000..4ff4278
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/log4j.drroute.properties
@@ -0,0 +1,41 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+#

+#                        AT&T - PROPRIETARY

+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF

+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN

+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.

+#

+#          Copyright (c) 2013 AT&T Knowledge Ventures

+#              Unpublished and Not for Publication

+#                     All Rights Reserved

+#

+# CVS: $Id: log4j.drroute.properties,v 1.1 2013/11/06 16:23:54 eby Exp $

+#	This log4j properties file is used only by dr-route

+#

+

+log4j.rootLogger=INFO, stdout

+log4j.appender.stdout=org.apache.log4j.ConsoleAppender

+log4j.appender.stdout.Target=System.out

+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout

+log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n

diff --git a/datarouter-prov/src/main/resources/misc/log4j.properties.tmpl b/datarouter-prov/src/main/resources/misc/log4j.properties.tmpl
new file mode 100644
index 0000000..ed1d7fa
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/log4j.properties.tmpl
@@ -0,0 +1,68 @@
+cat <<!EOF
+#
+#                        AT&T - PROPRIETARY
+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.
+#
+#          Copyright (c) 2013 AT&T Knowledge Ventures
+#              Unpublished and Not for Publication
+#                     All Rights Reserved
+#
+# CVS: $Id: log4j.properties.tmpl,v 1.4 2014/01/13 19:44:57 eby Exp $
+#
+
+log4j.rootLogger=info
+
+#
+# Logger used for provisioning events
+#
+log4j.logger.com.att.research.datarouter.provisioning.events=info, eventlog
+log4j.additivity.com.att.research.datarouter.provisioning.events=false
+
+log4j.appender.eventlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.eventlog.file=${DRTR_PROV_LOGS:-/opt/app/datartr/logs}/provevent.log
+log4j.appender.eventlog.datePattern='.'yyyyMMdd
+log4j.appender.eventlog.append=true
+log4j.appender.eventlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.eventlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for internal provisioning server events
+#
+log4j.logger.com.att.research.datarouter.provisioning.internal=debug, intlog
+log4j.additivity.com.att.research.datarouter.provisioning.internal=false
+
+log4j.appender.intlog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.intlog.file=${DRTR_PROV_LOGS:-/opt/app/datartr/logs}/provint.log
+log4j.appender.intlog.datePattern='.'yyyyMMdd
+log4j.appender.intlog.append=true
+log4j.appender.intlog.layout=org.apache.log4j.PatternLayout
+log4j.appender.intlog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for policy engine
+#
+log4j.logger.com.att.research.datarouter.authz.impl.ProvAuthorizer=debug, pelog
+log4j.additivity.com.att.research.datarouter.authz.impl.ProvAuthorizer=false
+
+log4j.appender.pelog=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.pelog.file=${DRTR_PROV_LOGS:-/opt/app/datartr/logs}/policyengine.log
+log4j.appender.pelog.datePattern='.'yyyyMMdd
+log4j.appender.pelog.append=true
+log4j.appender.pelog.layout=org.apache.log4j.PatternLayout
+log4j.appender.pelog.layout.ConversionPattern=%d %-5p [%t] - %m%n
+
+#
+# Logger used for Jetty server
+#
+log4j.logger.org.eclipse.jetty=info, jetty
+log4j.additivity.org.eclipse.jetty.server.Server=false
+
+log4j.appender.jetty=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.jetty.file=${DRTR_PROV_LOGS:-/opt/app/datartr/logs}/jetty.log
+log4j.appender.jetty.datePattern='.'yyyyMMdd
+log4j.appender.jetty.append=true
+log4j.appender.jetty.layout=org.apache.log4j.PatternLayout
+log4j.appender.jetty.layout.ConversionPattern=%d %-5p [%t] - %m%n
+!EOF
diff --git a/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql b/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql
new file mode 100644
index 0000000..837030c
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/mysql_dr_schema.sql
@@ -0,0 +1,139 @@
+create database datarouter;
+
+use datarouter;
+
+CREATE TABLE FEEDS (
+    FEEDID         INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME           VARCHAR(20) NOT NULL,
+    VERSION        VARCHAR(20) NOT NULL,
+    DESCRIPTION    VARCHAR(256),
+    AUTH_CLASS     VARCHAR(32) NOT NULL,
+    PUBLISHER      VARCHAR(8) NOT NULL,
+    SELF_LINK      VARCHAR(256),
+    PUBLISH_LINK   VARCHAR(256),
+    SUBSCRIBE_LINK VARCHAR(256),
+    LOG_LINK       VARCHAR(256),
+    DELETED        BOOLEAN DEFAULT FALSE,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE FEED_ENDPOINT_IDS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    USERID        VARCHAR(20) NOT NULL,
+    PASSWORD      VARCHAR(32) NOT NULL
+);
+
+CREATE TABLE FEED_ENDPOINT_ADDRS (
+    FEEDID        INT UNSIGNED NOT NULL,
+    ADDR          VARCHAR(44) NOT NULL
+);
+
+CREATE TABLE SUBSCRIPTIONS (
+    SUBID              INT UNSIGNED NOT NULL PRIMARY KEY,
+    FEEDID             INT UNSIGNED NOT NULL,
+    DELIVERY_URL       VARCHAR(256),
+    DELIVERY_USER      VARCHAR(20),
+    DELIVERY_PASSWORD  VARCHAR(32),
+    DELIVERY_USE100    BOOLEAN DEFAULT FALSE,
+    METADATA_ONLY      BOOLEAN DEFAULT FALSE,
+    SUBSCRIBER         VARCHAR(8) NOT NULL,
+    SELF_LINK          VARCHAR(256),
+    LOG_LINK           VARCHAR(256),
+    LAST_MOD           TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+CREATE TABLE PARAMETERS (
+    KEYNAME        VARCHAR(32) NOT NULL PRIMARY KEY,
+    VALUE          VARCHAR(4096) NOT NULL
+);
+
+CREATE TABLE LOG_RECORDS (
+    TYPE	   ENUM('pub', 'del', 'exp') NOT NULL,
+    EVENT_TIME     BIGINT NOT NULL,           /* time of the publish request */
+    PUBLISH_ID     VARCHAR(64) NOT NULL,      /* unique ID assigned to this publish attempt */
+    FEEDID         INT UNSIGNED NOT NULL,     /* pointer to feed in FEEDS */
+    REQURI         VARCHAR(256) NOT NULL,     /* request URI */
+    METHOD         ENUM('DELETE', 'GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'TRACE') NOT NULL, /* HTTP method */
+    CONTENT_TYPE   VARCHAR(256) NOT NULL,     /* content type of published file */
+    CONTENT_LENGTH BIGINT UNSIGNED NOT NULL,  /* content length of published file */
+
+    FEED_FILEID    VARCHAR(128),		/* file ID of published file */
+    REMOTE_ADDR    VARCHAR(40),			/* IP address of publishing endpoint */
+    USER           VARCHAR(20),			/* user name of publishing endpoint */
+    STATUS         SMALLINT,			/* status code returned to delivering agent */
+
+    DELIVERY_SUBID INT UNSIGNED,		/* pointer to subscription in SUBSCRIPTIONS */
+    DELIVERY_FILEID  VARCHAR(128),		/* file ID of file being delivered */
+    RESULT         SMALLINT,			/* result received from subscribing agent */
+
+    ATTEMPTS       INT,				/* deliveries attempted */
+    REASON         ENUM('notRetryable', 'retriesExhausted'),
+
+    RECORD_ID      BIGINT UNSIGNED NOT NULL PRIMARY KEY, /* unique ID for this record */
+
+    INDEX (FEEDID) USING BTREE,
+    INDEX (DELIVERY_SUBID) USING BTREE,
+    INDEX (RECORD_ID) USING BTREE
+) ENGINE = MyISAM;
+
+CREATE TABLE INGRESS_ROUTES (
+    SEQUENCE  INT UNSIGNED NOT NULL,
+    FEEDID    INT UNSIGNED NOT NULL,
+    USERID    VARCHAR(20),
+    SUBNET    VARCHAR(44),
+    NODESET   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE EGRESS_ROUTES (
+    SUBID    INT UNSIGNED NOT NULL PRIMARY KEY,
+    NODEID   INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NETWORK_ROUTES (
+    FROMNODE INT UNSIGNED NOT NULL,
+    TONODE   INT UNSIGNED NOT NULL,
+    VIANODE  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODESETS (
+    SETID   INT UNSIGNED NOT NULL,
+    NODEID  INT UNSIGNED NOT NULL
+);
+
+CREATE TABLE NODES (
+    NODEID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    NAME    VARCHAR(255) NOT NULL,
+    ACTIVE  BOOLEAN DEFAULT TRUE
+);
+
+CREATE TABLE GROUPS (
+    GROUPID  INT UNSIGNED NOT NULL PRIMARY KEY,
+    AUTHID    VARCHAR(100) NOT NULL,
+    NAME    VARCHAR(50) NOT NULL,
+    DESCRIPTION    VARCHAR(255),
+    CLASSIFICATION    VARCHAR(20) NOT NULL,
+    MEMBERS    TINYTEXT,
+    LAST_MOD       TIMESTAMP DEFAULT CURRENT_TIMESTAMP
+);
+
+-- 'PROV_AUTH_ADDRESSES', '192.168.56.1' ipv4 address of provision server
+INSERT INTO PARAMETERS VALUES
+	('ACTIVE_POD',  '127.0.0.1'),
+	('PROV_ACTIVE_NAME',  '${PROV_ACTIVE_NAME}'),
+	('STANDBY_POD', '${DRTR_PROV_STANDBYPOD}'),
+	('PROV_NAME',   'ALCDTL47TJ6015:6080'),
+	('NODES',       '127.0.0.1:8080'),
+	('PROV_DOMAIN', '127.0.0.1'),
+	('DELIVERY_INIT_RETRY_INTERVAL', '10'),
+	('DELIVERY_MAX_AGE', '86400'),
+	('DELIVERY_MAX_RETRY_INTERVAL', '3600'),
+	('DELIVERY_RETRY_RATIO', '2'),
+	('LOGROLL_INTERVAL', '300'),
+	('PROV_AUTH_ADDRESSES', '192.168.56.1'), 
+	('PROV_AUTH_SUBJECTS', ''),
+	('PROV_MAXFEED_COUNT',	'10000'),
+	('PROV_MAXSUB_COUNT',	'100000'),
+	('PROV_REQUIRE_CERT', 'false'),
+	('PROV_REQUIRE_SECURE', 'false'),
+	('_INT_VALUES', 'LOGROLL_INTERVAL|PROV_MAXFEED_COUNT|PROV_MAXSUB_COUNT|DELIVERY_INIT_RETRY_INTERVAL|DELIVERY_MAX_RETRY_INTERVAL|DELIVERY_RETRY_RATIO|DELIVERY_MAX_AGE')
+	;
\ No newline at end of file
diff --git a/datarouter-prov/src/main/resources/misc/notes b/datarouter-prov/src/main/resources/misc/notes
new file mode 100644
index 0000000..e3f872e
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/notes
@@ -0,0 +1,78 @@
+Package notes for com.att.dmaap.datarouter:prov
+
+This component is for the Data Router Provisioning Server software.
+
+The following pre-requisite components should already be present:
+	com.att.aft.swm:swm-cli
+	com.att.aft.swm:swm-node
+	- SWM Variables: AFTSWM_AUTOLINK_PARENTS=/opt/app:/opt/app/workload,/opt/app/aft:/opt/app/workload/aft
+	com.att.platform:uam-auto
+	com.att.java:jdk8lin
+	com.att.platform:initd
+	com.att.platform:port-fwd
+	- SWM Variables: PLATFORM_PORT_FWD=80,8080|443,8443
+	mysql:mysql
+	mysql:mysql-config
+	- SWM Variables: MYSQL_CONFIG_SIZE=small
+		MYSQL_DB_DATABASES=datarouter
+		MYSQL_DB_datarouter_USERS=datarouter,tier2
+		MYSQL_DB_datarouter_USERS_datarouter_LEVEL=RW
+		MYSQL_DB_datarouter_USERS_datarouter_PASSWORD=datarouter
+		MYSQL_DB_datarouter_USERS_tier2_LEVEL=RO
+		MYSQL_DB_datarouter_USERS_tier2_PASSWORD=<password>
+		MYSQL_MAX_ALLOWED_PACKET=32M
+		MYSQL_MAX_CONNECTIONS=300
+		MYSQL_PASSWORD=datarouter
+		MYSQL_PORT=3306
+
+
+In a production environment, the SWM variables that MUST be overwridden are:
+	DRTR_PROV_ACTIVEPOD, DRTR_PROV_STANDBYPOD, DRTR_PROV_NODES
+In addition, in a non-production environment, the DRTR_PROV_CNAME SWM variable
+must also be overwridden.
+
+The SWM variables that can be set to control the provisioning server are:
+
+DRTR_PROV_ACTIVEPOD
+	The FQDN of the active POD
+DRTR_PROV_STANDBYPOD
+	The FQDN of the standby POD
+DRTR_PROV_CNAME (default feeds-drtr.web.att.com)
+	The DNS CNAME used for the prov server in this environment.
+DRTR_PROV_NODES
+	Pipe-delimited list of DR nodes to init the DB with.
+DRTR_PROV_DOMAIN (default web.att.com)
+	Domain to use for non-FQDN node names
+
+DRTR_PROV_INTHTTPPORT (default 8080)
+	The TCP/IP port number the component should listen on for "go fetch"
+	requests from the provisioning server
+DRTR_PROV_INTHTTPSPORT (default 8443)
+	The TCP/IP port number the component should listen on for publish
+	requests from feed publishers and other nodes
+DRTR_PROV_LOGS (default /opt/app/datartr/logs)
+	The directory where log files should be kept
+DRTR_PROV_SPOOL (default /opt/app/datartr/spool)
+	The directory where logfiles from the DR nodes are spooled before being
+	imported into the DB.
+
+DRTR_PROV_KEYMGRPASS (default changeit)
+	The password for the key manager
+DRTR_PROV_KSTOREFILE (default /opt/app/datartr/etc/keystore)
+	The java keystore file containing the server certificate and private key
+	for this server
+DRTR_PROV_KSTOREPASS (default changeit)
+	The password for the keystore file
+DRTR_PROV_TSTOREFILE (by default, use the truststore from the Java JDK)
+	The java keystore file containing the trusted certificate authority
+	certificates
+DRTR_PROV_TSTOREPASS (default changeit)
+	The password for the trust store file.  Only applies if a trust store
+	file is specified.
+
+DRTR_PROV_DBLOGIN (default datarouter)
+	The login used to access MySQL
+DRTR_PROV_DBPASS (default datarouter)
+	The password used to access MySQL
+DRTR_PROV_DBSCRIPTS (default /opt/app/datartr/etc)
+	The directory containing DB initialization scripts
diff --git a/datarouter-prov/src/main/resources/misc/provcmd b/datarouter-prov/src/main/resources/misc/provcmd
new file mode 100644
index 0000000..63efa54
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/provcmd
@@ -0,0 +1,163 @@
+#!/bin/bash
+#
+#                        AT&T - PROPRIETARY
+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.
+#
+#          Copyright (c) 2013 AT&T Knowledge Ventures
+#              Unpublished and Not for Publication
+#                     All Rights Reserved
+#
+#  provcmd -- A script to interact with a provisioning server to manage the provisioning parameters.
+#     Set $VERBOSE to a non-empty string to see the curl commands as they are executed.
+#
+#  $Id: provcmd,v 1.6 2014/03/31 13:23:33 eby Exp $
+#
+
+PATH=/opt/app/datartr/bin:/bin:/usr/bin:$PATH
+PROVCMD="$0"
+export PATH PROVSRVR PROVCMD NOPROXY
+
+if [ ! -x /usr/bin/curl ]
+then
+	echo provcmd: curl is required for this tool.
+	exit 1
+fi
+optloop=
+while [ -z "$optloop" ]
+do
+	if [ "$1" == '-s' ]
+	then
+		shift
+		PROVSRVR="$1"
+		shift
+	elif [ "$1" == '-v' ]
+	then
+		shift
+		VERBOSE=x
+	elif [ "$1" == '-N' ]
+	then
+		shift
+		NOPROXY='?noproxy=1'
+	else
+		optloop=1
+	fi
+done
+if [ -z "$PROVSRVR" ]
+then
+	echo "provcmd: you need to specify the server, either via the -s option"
+	echo "         or by setting and exporting PROVSRVR"
+	exit 1
+fi
+
+CMD="$1"
+shift
+if [ "$CMD" == 'delete' ]
+then
+	if [ $# -gt 0 ]
+	then
+		for i
+		do
+			[ -n "$VERBOSE" ] && echo curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY"
+			curl -4 -k -X DELETE "https://$PROVSRVR/internal/api/$1$NOPROXY"
+		done
+		exit 0
+	fi
+elif [ "$CMD" == 'create' ]
+then
+	if [ $# -eq 2 ]
+	then
+		# create (with POST), then set the value
+		[ -n "$VERBOSE" ] && echo curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY"
+		curl -4 -k -X POST --data '' "https://$PROVSRVR/internal/api/$1$NOPROXY"
+		$PROVCMD set "$1" "$2"
+		exit 0
+	fi
+elif [ "$CMD" == 'get' ]
+then
+	if [ $# -eq 1 ]
+	then
+		# get
+		[ -n "$VERBOSE" ] && echo curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY"
+		curl -4 -k "https://$PROVSRVR/internal/api/$1$NOPROXY" 2>/dev/null | tr '|' '\012' | sort
+		exit 0
+	fi
+elif [ "$CMD" == 'set' ]
+then
+	if [ $# -ge 2 ]
+	then
+		p="$1"
+		shift
+		v=""
+		for i; do [ -n "$v" ] && v="$v|"; v="$v$i"; done
+		# set (with PUT)
+		ue=`urlencode "$v"`
+		NOPROXY=`echo $NOPROXY | tr '?' '&'`
+		[ -n "$VERBOSE" ] && echo curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY"
+		curl -4 -k -X PUT "https://$PROVSRVR/internal/api/$p?val=$ue$NOPROXY"
+		exit 0
+	fi
+elif [ "$CMD" == 'append' ]
+then
+	if [ $# -ge 2 ]
+	then
+		p="$1"
+		shift
+		tmp=`curl -4 -k "https://$PROVSRVR/internal/api/$p$NOPROXY" 2>/dev/null`
+		$PROVCMD set "$p" "$tmp" "$@"
+		exit 0
+	fi
+elif [ "$CMD" == 'remove' ]
+then
+	if [ $# -eq 2 ]
+	then
+		p="$1"
+		rm="$2"
+		$PROVCMD get "$p" | grep -v "^$rm\$" > /tmp/pc$$
+		IFS=$'\r\n'
+		$PROVCMD set "$p" `cat /tmp/pc$$`
+		rm /tmp/pc$$
+		exit 0
+	fi
+fi
+
+# Some error somewhere - display usage
+cat <<'EOF'
+usage: provcmd [ -s server ] delete name1 [ name2 ... ]
+       provcmd [ -s server ] get name
+       provcmd [ -s server ] create name value
+       provcmd [ -s server ] set name value1 [ value2 ... ]
+       provcmd [ -s server ] append name value1 [ value2 ... ]
+       provcmd [ -s server ] remove name value
+
+delete - remove the parameters named name1, name2 ...
+get    - displays the parameters' value
+create - creates a new parameter
+set    - sets the value of an existing parameter
+append - appends the value to a list-based parameter
+remove - removes a value from a list based parameter
+
+server - the provisioning server FQDN (feeds-drtr.web.att.com for production)
+
+Standard Parameters Names:
+------------------------------
+ACTIVE_POD
+DELIVERY_INIT_RETRY_INTERVAL
+DELIVERY_MAX_AGE
+DELIVERY_MAX_RETRY_INTERVAL
+DELIVERY_RETRY_RATIO
+LOGROLL_INTERVAL
+NODES
+PROV_ACTIVE_NAME
+PROV_AUTH_ADDRESSES
+PROV_AUTH_SUBJECTS
+PROV_DOMAIN
+PROV_MAXFEED_COUNT
+PROV_MAXSUB_COUNT
+PROV_NAME
+PROV_REQUIRE_CERT
+PROV_REQUIRE_SECURE
+STANDBY_POD
+EOF
+exit 1
diff --git a/datarouter-prov/src/main/resources/misc/runreports b/datarouter-prov/src/main/resources/misc/runreports
new file mode 100644
index 0000000..009b749
--- /dev/null
+++ b/datarouter-prov/src/main/resources/misc/runreports
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+#                        AT&T - PROPRIETARY
+#          THIS FILE CONTAINS PROPRIETARY INFORMATION OF
+#        AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
+#             ACCORDANCE WITH APPLICABLE AGREEMENTS.
+#
+#          Copyright (c) 2013 AT&T Knowledge Ventures
+#              Unpublished and Not for Publication
+#                     All Rights Reserved
+#
+#  This script runs daily to generate reports files in the logs directory.
+#
+#  $Id: runreports,v 1.2 2013/11/06 16:23:54 eby Exp $
+#
+
+umask 0022
+
+JAVA_HOME=/opt/java/jdk/jdk180
+JAVA_OPTS="-Xms1G -Xmx4G"
+JAVA_CLASS=com.att.research.datarouter.reports.Report
+TZ=GMT0
+PATH=$JAVA_HOME/bin:/bin:/usr/bin
+CLASSPATH=`echo /opt/app/datartr/etc /opt/app/datartr/lib/*.jar | tr ' ' ':'`
+LOGDIR=/opt/app/datartr/logs
+YESTERDAY=`/bin/date --date=yesterday '+%Y%m%d'`
+
+export CLASSPATH JAVA_HOME JAVA_OPTS TZ PATH
+
+ID=`id -n -u`
+GRP=`id -n -g`
+if [ "$ID" != "datartr" ]
+then
+	echo runreports must be started as user datartr not $ID
+	exit 1
+fi
+if [ "$GRP" != "datartr" ]
+then
+	echo runreports must be started as group datartr not $GRP
+	exit 1
+fi
+if [ "`pgrep -u mysql mysqld`" = "" ]
+then
+	echo MySQL is not running.  It must be started before runreports
+	exit 1
+fi
+
+# Volume report
+java $JAVA_OPTS $JAVA_CLASS -t volume -o $LOGDIR/volume.csv.$YESTERDAY yesterday </dev/null >/dev/null
+
+# Subscriber report
+java $JAVA_OPTS $JAVA_CLASS -t subscriber -o $LOGDIR/subscriber.csv.$YESTERDAY yesterday </dev/null >/dev/null
+
+exit 0
diff --git a/datarouter-prov/src/main/resources/provserver.properties b/datarouter-prov/src/main/resources/provserver.properties
new file mode 100644
index 0000000..af5073e
--- /dev/null
+++ b/datarouter-prov/src/main/resources/provserver.properties
@@ -0,0 +1,48 @@
+#-------------------------------------------------------------------------------

+# ============LICENSE_START==================================================

+# * org.onap.dmaap

+# * ===========================================================================

+# * Copyright © 2017 AT&T Intellectual Property. All rights reserved.

+# * ===========================================================================

+# * Licensed under the Apache License, Version 2.0 (the "License");

+# * you may not use this file except in compliance with the License.

+# * You may obtain a copy of the License at

+# * 

+#  *      http://www.apache.org/licenses/LICENSE-2.0

+# * 

+#  * Unless required by applicable law or agreed to in writing, software

+# * distributed under the License is distributed on an "AS IS" BASIS,

+# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+# * See the License for the specific language governing permissions and

+# * limitations under the License.

+# * ============LICENSE_END====================================================

+# *

+# * ECOMP is a trademark and service mark of AT&T Intellectual Property.

+# *

+#-------------------------------------------------------------------------------

+

+

+#Jetty Server properties

+com.att.research.datarouter.provserver.http.port           = 8080

+com.att.research.datarouter.provserver.https.port          = 8443

+com.att.research.datarouter.provserver.https.relaxation	   = true

+com.att.research.datarouter.provserver.keymanager.password = changeit

+com.att.research.datarouter.provserver.keystore.type       = jks

+com.att.research.datarouter.provserver.keystore.path       = /opt/app/datartr/self_signed/keystore.jks

+

+com.att.research.datarouter.provserver.keystore.password   = changeit

+#com.att.research.datarouter.provserver.truststore.path     = /home/eby/dr2/misc/cacerts+1

+#com.att.research.datarouter.provserver.truststore.path     = /usr/lib/jvm/java-8-oracle/jre/lib/security/cacerts

+com.att.research.datarouter.provserver.truststore.path     = /opt/app/datartr/self_signed/cacerts.jks

+

+com.att.research.datarouter.provserver.truststore.password = changeit

+com.att.research.datarouter.provserver.accesslog.dir       = /opt/app/datartr/logs

+com.att.research.datarouter.provserver.spooldir            = /opt/app/datartr/spool

+#com.att.research.datarouter.provserver.dbscripts          = /home/eby/dr2/cvs/datarouter/prov/misc/

+com.att.research.datarouter.provserver.logretention        = 30

+

+# Database access

+com.att.research.datarouter.db.driver   = com.mysql.jdbc.Driver

+com.att.research.datarouter.db.url      = jdbc:mysql://172.18.0.2:3306/datarouter

+com.att.research.datarouter.db.login    = datarouter

+com.att.research.datarouter.db.password = datarouter

diff --git a/datarouter-prov/src/main/resources/startup.sh b/datarouter-prov/src/main/resources/startup.sh
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/datarouter-prov/src/main/resources/startup.sh
diff --git a/datarouter-prov/src/main/resources/subscriber.jar b/datarouter-prov/src/main/resources/subscriber.jar
new file mode 100644
index 0000000..c8e4775
--- /dev/null
+++ b/datarouter-prov/src/main/resources/subscriber.jar
Binary files differ