copyright and controller ENV changes

Change-Id: Ic12aa439a03f19c7a06a536368a8d2a1f26855aa
Issue-ID: DCAEGEN2-271
Signed-off-by: Ladue, David (dl3158) <dl3158@att.com>
diff --git a/README.md b/README.md
index bb79a0f..39cdda6 100644
--- a/README.md
+++ b/README.md
@@ -1,8 +1,6 @@
 DCAE SNMPTrapReceiver
 ======================================
 
-FIXME: placeholder for dcae_snmptrap application
-
 This is the repository for SNMP Trap Receiver for Open DCAE. 
 
 ### Build Instructions
diff --git a/src/mod/trapd_exit.py b/bin/mod/trapd_exit.py
similarity index 96%
rename from src/mod/trapd_exit.py
rename to bin/mod/trapd_exit.py
index e3e9364..a7ffc8a 100644
--- a/src/mod/trapd_exit.py
+++ b/bin/mod/trapd_exit.py
@@ -1,7 +1,7 @@
 # ============LICENSE_START=======================================================
 # org.onap.dcae
 # ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/bin/mod/trapd_file_utils.py b/bin/mod/trapd_file_utils.py
new file mode 100644
index 0000000..2da099b
--- /dev/null
+++ b/bin/mod/trapd_file_utils.py
@@ -0,0 +1,220 @@
+# ============LICENSE_START=======================================================)
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+"""
+"""
+
+__docformat__ = 'restructuredtext'
+
+# basics
+import datetime
+import errno
+import inspect
+import json
+import logging
+import logging.handlers
+import os
+import sys
+import string
+import time
+import traceback
+import unicodedata
+
+# dcae_snmptrap
+import trapd_settings as tds
+from trapd_logging import ecomp_logger, stdout_logger
+from trapd_exit import cleanup_and_exit
+
+prog_name = os.path.basename(__file__)
+
+
+# # # # # # # # # # # # # # # # # # #
+# fx: roll_all_logs -> roll all logs to timestamped backup
+# # # # # # # # # # ## # # # # # # #
+
+
+def roll_all_logs():
+    """
+    roll all active logs to timestamped version, open new one
+    based on frequency defined in files.roll_frequency
+    """
+
+    # first roll all the eelf files
+    # NOTE:  this will go away when onap logging is standardized/available
+    try:
+        # open various ecomp logs - if any fails, exit
+        for fd in [tds.eelf_error_fd, tds.eelf_debug_fd, tds.eelf_audit_fd, \
+                tds.eelf_metrics_fd, tds.arriving_traps_fd, tds.json_traps_fd]:
+            fd.close()
+
+        roll_file(tds.eelf_error_file_name)
+        roll_file(tds.eelf_debug_file_name)
+        roll_file(tds.eelf_audit_file_name)
+        roll_file(tds.eelf_metrics_file_name)
+
+    except Exception as e:
+        msg = "Error closing logs: " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    reopened_successfully = open_eelf_logs()
+    if not reopened_successfully:
+        msg = "Error re-opening EELF logs during roll-over to timestamped versions - EXITING"
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    # json log
+    roll_file(tds.json_traps_filename)
+
+    try:
+        tds.json_traps_fd = open_file(tds.json_traps_filename)
+    except Exception as e:
+        msg = ("Error opening json_log %s : %s" % (json_traps_filename, str(e)))
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    # arriving trap log
+    roll_file(tds.arriving_traps_filename)
+
+    try:
+        tds.arriving_traps_fd = open_file(tds.arriving_traps_filename)
+    except Exception as e:
+        msg = ("Error opening arriving traps %s : %s" % (arriving_traps_filename, str(e)))
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    
+
+# # # # # # # # # # # # # # # # # # #
+# fx: setup_ecomp_logs -> log in eelf format until standard
+#     is released for python via LOG-161
+# # # # # # # # # # ## # # # # # # #
+
+
+def open_eelf_logs():
+    """
+    open various (multiple ???) logs
+    """
+
+    try:
+        # open various ecomp logs - if any fails, exit
+
+        tds.eelf_error_file_name = (tds.c_config['files.eelf_base_dir'] + "/" + tds.c_config['files.eelf_error'])
+        tds.eelf_error_fd = open_file(tds.eelf_error_file_name)
+
+    except Exception as e:
+        msg = "Error opening eelf error log : " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+
+    try:
+        tds.eelf_debug_file_name = (tds.c_config['files.eelf_base_dir'] + "/" + tds.c_config['files.eelf_debug'])
+        tds.eelf_debug_fd = open_file(tds.eelf_debug_file_name)
+
+    except Exception as e:
+        msg = "Error opening eelf debug log : " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    try:
+        tds.eelf_audit_file_name = (tds.c_config['files.eelf_base_dir'] + "/" + tds.c_config['files.eelf_audit'])
+        tds.eelf_audit_fd = open_file(tds.eelf_audit_file_name)
+    except Exception as e:
+        msg = "Error opening eelf audit log : " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    try:
+        tds.eelf_metrics_file_name = (tds.c_config['files.eelf_base_dir'] + "/" + tds.c_config['files.eelf_metrics'])
+        tds.eelf_metrics_fd = open_file(tds.eelf_metrics_file_name)
+    except Exception as e:
+        msg = "Error opening eelf metric log : " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    return True
+
+# # # # # # # # # # # # # # # # # # #
+# fx: roll_log_file -> move provided filename to timestamped version
+# # # # # # # # # # ## # # # # # # #
+
+
+def roll_file(_loc_file_name):
+    """
+    move active file to timestamped archive
+    """
+
+    _file_name_suffix = "%s" % (datetime.datetime.fromtimestamp(time.time()).
+                                  fromtimestamp(time.time()).
+                                  strftime('%Y-%m-%dT%H:%M:%S'))
+
+    _loc_file_name_bak = _loc_file_name + '.' + _file_name_suffix
+
+    # roll existing file if present
+    if os.path.isfile(_loc_file_name):
+        try:
+            os.rename(_loc_file_name, _loc_file_name_bak)
+        except:
+            _msg = ("ERROR: Unable to rename %s to %s"
+                                % (_loc_file_name,
+                                   _loc_file_name_bak))
+            ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_CRIT, tds.CODE_GENERAL, _msg)
+
+
+# # # # # # # # # # # # #
+# fx: open_log_file
+# # # # # # # # # # # # #
+
+
+def open_file(_loc_file_name):
+    """
+    open _loc_file_name, return file handle
+    """
+
+
+    try:
+        # open append mode just in case so nothing is lost, but should be
+        # non-existent file
+        _loc_fd = open(_loc_file_name, 'a')
+        return _loc_fd
+    except Exception as e:
+        msg = "Error opening " + _loc_file_name + " append mode - " + str(e)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+
+# # # # # # # # # # # # #
+# fx: close_file
+# # # # # # # # # # # # #
+    """
+    close _loc_file_name, return True with success, False otherwise
+    """
+
+
+def close_file(_loc_fd, _loc_filename):
+
+    try:
+        _loc_fd.close()
+        return True
+    except Exception as e:
+        msg = "Error closing %s : %s - results indeterminate" % (_loc_filename, str(e))
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_FATAL, tds.CODE_GENERAL, msg)
+        return False
diff --git a/bin/mod/trapd_get_cbs_config.py b/bin/mod/trapd_get_cbs_config.py
new file mode 100644
index 0000000..775e0b2
--- /dev/null
+++ b/bin/mod/trapd_get_cbs_config.py
@@ -0,0 +1,116 @@
+# ============LICENSE_START=======================================================
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+"""
+Look for CBS broker and return application config; if not present, look for
+env variable that specifies JSON equiv of CBS config (typically used for 
+testing purposes)
+"""
+
+__docformat__ = 'restructuredtext'
+
+import json
+import os
+import sys
+import string
+import time
+import traceback
+import collections
+
+import trapd_settings as tds
+from onap_dcae_cbs_docker_client.client import get_config
+from trapd_exit import cleanup_and_exit
+from trapd_logging import stdout_logger
+
+prog_name = os.path.basename(__file__)
+
+
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+# function: trapd_get_config_sim
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+
+
+def get_cbs_config():
+    """
+    Get config values from CBS or JSON file (fallback)
+    :Parameters:
+      none
+    :Exceptions:
+    """
+
+    tds.c_config = {}
+
+    # See if we are in a config binding service (CBS) /controller environment
+    try:
+        tds.c_config = get_config()
+        if tds.c_config == {}:
+            msg = "Unable to fetch CBS config or it is erroneously empty - trying override/simulator config"
+            stdout_logger(msg)
+    
+    # if no CBS present, default to JSON config specified via CBS_SIM_JSON env var
+    except Exception as e:
+        msg = "ONAP controller not present, trying json config override via CBS_SIM_JSON env variable"
+        stdout_logger(msg)
+    
+        try:
+            _cbs_sim_json_file = os.getenv("CBS_SIM_JSON", "None")
+        except Exception as e:
+            msg = "CBS_SIM_JSON not defined - FATAL ERROR, exiting"
+            stdout_logger(msg)
+            cleanup_and_exit(1, pid_file_name)
+    
+        if _cbs_sim_json_file == "None":
+            msg = "CBS_SIM_JSON not defined - FATAL ERROR, exiting"
+            stdout_logger(msg)
+            cleanup_and_exit(1, pid_file_name)
+        else:
+            msg = ("ONAP controller override specified via CBS_SIM_JSON: %s" % _cbs_sim_json_file )
+            stdout_logger(msg)
+            try:
+                tds.c_config = json.load(open(_cbs_sim_json_file))
+            except Exception as e:
+                msg = "Unable to load CBS_SIM_JSON " + _cbs_sim_json_file + " (invalid json?) - FATAL ERROR, exiting"
+                stdout_logger(msg)
+                cleanup_and_exit(1, tds.pid_file_name)
+
+    # recalc timeout, set default if not present
+    try:
+        tds.timeout_seconds = tds.c_config['publisher.http_timeout_milliseconds'] / 1000.0
+    except:
+        tds.timeout_seconds = 1.5
+
+    # recalc seconds_between_retries, set default if not present
+    try:
+        tds.seconds_between_retries = tds.c_config['publisher.http_milliseconds_between_retries'] / 1000.0
+    except:
+        tds.seconds_between_retries = .750
+
+    # recalc min_severity_to_log, set default if not present
+    try:
+        tds.minimum_severity_to_log = tds.c_config['files.minimum_severity_to_log']
+    except:
+        tds.minimum_severity_to_log = 3
+
+    try:
+        tds.publisher_retries = tds.c_config['publisher.http_retries']
+    except:
+        tds.publisher_retries = 3
+
+    return True
diff --git a/src/mod/trapd_http_session.py b/bin/mod/trapd_http_session.py
similarity index 80%
rename from src/mod/trapd_http_session.py
rename to bin/mod/trapd_http_session.py
index f087423..2e0b77e 100644
--- a/src/mod/trapd_http_session.py
+++ b/bin/mod/trapd_http_session.py
@@ -1,7 +1,7 @@
 # ============LICENSE_START=======================================================
 # org.onap.dcae
 # ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -25,24 +25,24 @@
 
 __docformat__ = 'restructuredtext'
 
-import logging
 import os
 import requests
-import string
-import time
 import traceback
 
+# snmptrapd
+import trapd_settings
+
 prog_name = os.path.basename(__file__)
 
 
 # # # # # # # # # # # # #
 # fx: init_session_obj
 # # # # # # # # # # # # #
-def init_session_obj(_dcae_logger):
+def init_session_obj():
     """
     Initializes and returns a http request session object for later use
     :Parameters:
-      dcae logger for diagnostics
+      none
     :Exceptions:
       session object creation
         this function will throw an exception if unable to create
@@ -54,9 +54,8 @@
     """
 
     try:
-        s = requests.Session()
-        _dcae_logger.debug("New requests session has been initialized: %s" % s)
-    except:
-        _dcae_logger.error("Failed to create new requests session")
+        _loc_session = requests.Session()
+    except Exception as e:
+        return None
 
-    return s
+    return _loc_session
diff --git a/bin/mod/trapd_logging.py b/bin/mod/trapd_logging.py
new file mode 100644
index 0000000..435f03e
--- /dev/null
+++ b/bin/mod/trapd_logging.py
@@ -0,0 +1,199 @@
+# ============LICENSE_START=======================================================)
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+"""
+"""
+
+__docformat__ = 'restructuredtext'
+
+# basics
+import datetime
+import errno
+import inspect
+import json
+import logging
+import logging.handlers
+import os
+import sys
+import string
+import time
+import traceback
+import unicodedata
+
+import trapd_settings as tds
+
+prog_name = os.path.basename(__file__)
+
+
+# # # # # # # # # # # # # # # # # # #
+# fx: ecomp_logger -> log in eelf format until standard 
+#     is released for python via LOG-161
+# # # # # # # # # # ## # # # # # # #
+
+def ecomp_logger(_log_type, _sev, _error_code, _msg):
+    """
+    Log to ecomp-style logfiles.  Logs include:
+
+    Note:  this will be updated when https://jira.onap.org/browse/LOG-161 
+    is closed/available; until then, we resort to a generic format with
+    valuable info in "extra=" field (?)
+
+    :Parameters:
+       _msg - 
+    :Exceptions:
+       none
+    :Keywords:
+       eelf logging 
+    :Log Styles:
+
+       :error.log:
+
+       if CommonLogger.verbose: print("using CommonLogger.ErrorFile")
+          self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+          % (requestID, threadID, serviceName, partnerName, targetEntity, targetServiceName,
+             errorCategory, errorCode, errorDescription, detailMessage))
+
+       error.log example:
+
+       2018-02-20T07:21:34,007+00:00||MainThread|snmp_log_monitor||||FATAL|900||Tue Feb 20 07:21:11 UTC 2018 CRITICAL: [a0cae74e-160e-11e8-8f9f-0242ac110002] ALL publish attempts failed to DMAPP server: dcae-mrtr-zltcrdm5bdce1.1dff83.rdm5b.tci.att.com, topic: DCAE-COLLECTOR-UCSNMP, 339 trap(s) not published in epoch_serno range: 15191112530000 - 15191112620010
+
+       :debug.log:
+
+       if CommonLogger.verbose: print("using CommonLogger.DebugFile")
+          self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+          % (requestID, threadID, serverName, serviceName, instanceUUID, upperLogLevel,
+          severity, serverIPAddress, server, IPAddress, className, timer, detailMessage))
+
+       debug.log example:
+
+         none available
+
+       :audit.log:
+
+       if CommonLogger.verbose: print("using CommonLogger.AuditFile")
+       endAuditTime, endAuditMsec = self._getTime()
+       if self._begTime is not None:
+          d = {'begtime': self._begTime, 'begmsecs': self._begMsec, 'endtime': endAuditTime,
+               'endmsecs': endAuditMsec}
+       else:
+          d = {'begtime': endAuditTime, 'begmsecs': endAuditMsec, 'endtime': endAuditTime,
+               'endmsecs': endAuditMsec}
+    
+       self._logger.log(50, '%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+       % (requestID, serviceInstanceID, threadID, serverName, serviceName, partnerName,
+       statusCode, responseCode, responseDescription, instanceUUID, upperLogLevel,
+       severity, serverIPAddress, timer, server, IPAddress, className, unused,
+       processKey, customField1, customField2, customField3, customField4,
+       detailMessage), extra=d)
+
+
+       :metrics.log:
+
+          self._logger.log(50,'%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+          % (requestID, serviceInstanceID, threadID, serverName, serviceName, partnerName,
+          targetEntity, targetServiceName, statusCode, responseCode, responseDescription,
+          instanceUUID, upperLogLevel, severity, serverIPAddress, timer, server,
+          IPAddress,
+          className, unused, processKey, targetVirtualEntity, customField1, customField2,
+          customField3, customField4, detailMessage), extra=d)
+
+       metrics.log example:
+
+          none available
+
+
+    """
+
+    unused = ""
+
+    # ct = time.time()
+    # lt = time.localtime(ct)
+    # t_hman = time.strftime(DateFmt, lt)
+    # t_ms = (ct - int(ct)) * 1000
+    # above were various attempts at setting time string found in other
+    # libs; instead, let's keep it real:
+    t_out = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S,%f")[:-3]
+    calling_fx = inspect.stack()[1][3]
+
+    # FIXME: this entire module is a hack to override concept of prog logging 
+    #        written across multiple files (???), making diagnostics IMPOSSIBLE!  
+    #        Hoping to leverage ONAP logging libraries & standards when available
+
+    # catch invalid log type
+    if _log_type < 1 or _log_type > 5: 
+        msg = ("INVALID log type: %s " % _log_type )
+        _out_rec = ("%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" \
+        % ((t_out, calling_fx, "snmptrapd", unused, unused, unused, tds.SEV_TYPES[_sev], _error_code, unused, (msg + _msg))))
+        tds.eelf_error_fd.write('%s\n' % str(_out_rec))
+        return False
+
+    if _sev >= tds.minimum_severity_to_log:
+        # log to appropriate eelf log (different files ??)
+        if _log_type == tds.LOG_TYPE_ERROR:
+            _out_rec = ('%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+            % ((t_out, calling_fx, "snmptrapd", unused, unused, unused, tds.SEV_TYPES[_sev], _error_code, unused, _msg)))
+            tds.eelf_error_fd.write('%s\n' % str(_out_rec))
+        elif _log_type == tds.LOG_TYPE_AUDIT:
+            # log message in AUDIT format
+            _out_rec = ('%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+            % ((t_out, calling_fx, "snmptrapd", unused, unused, unused, tds.SEV_TYPES[_sev], _error_code, unused, _msg)))
+            tds.eelf_audit_fd.write('%s\n' % str(_out_rec))
+        elif _log_type == tds.LOG_TYPE_METRICS:
+            # log message in METRICS format
+            _out_rec = ('%s|%s|%s|%s|%s|%s|%s|%s|%s|%s' \
+            % ((t_out, calling_fx, "snmptrapd", unused, unused, unused, tds.SEV_TYPES[_sev], _error_code, unused, _msg)))
+            tds.eelf_metrics_fd.write('%s\n' % str(_out_rec))
+    
+        # DEBUG *AND* others - there *MUST BE* a single time-sequenced log for diagnostics!
+        # FIXME: too much I/O !!!
+        # always write to debug; we need ONE logfile that has time-sequence full view !!!
+        # if (_log_type == tds.LOG_TYPE_DEBUG and _sev >= tds.current_min_sev_log_level) or (_log_type != tds.LOG_TYPE_DEBUG):
+        
+        # log message in DEBUG format
+        _out_rec = ("%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" \
+        % ((t_out, calling_fx, "snmptrapd", unused, unused, unused, tds.SEV_TYPES[_sev], _error_code, unused, _msg)))
+        tds.eelf_debug_fd.write('%s\n' % str(_out_rec))
+
+    return True
+
+# # # # # # # # # # # # #
+# fx: stdout_logger
+# # # # # # # # # # # # #
+
+
+def stdout_logger(_msg):
+    """
+    Log info/errors to stdout.  This is done:
+      - for critical runtime issues
+
+    :Parameters:
+      _msg
+         message to print
+    :Exceptions:
+      none
+    :Keywords:
+      log stdout
+    :Variables:
+    """
+
+    t_out = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S,%f")[:-3]
+    # calling_fx = inspect.stack()[1][3]
+
+    print('%s %s' % ( t_out, _msg))
diff --git a/src/mod/trapd_runtime_pid.py b/bin/mod/trapd_runtime_pid.py
similarity index 92%
rename from src/mod/trapd_runtime_pid.py
rename to bin/mod/trapd_runtime_pid.py
index 96594e6..c6ef76e 100644
--- a/src/mod/trapd_runtime_pid.py
+++ b/bin/mod/trapd_runtime_pid.py
@@ -1,7 +1,7 @@
 # ============LICENSE_START=======================================================
 # org.onap.dcae
 # ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -62,7 +62,7 @@
     #     print("Error saving PID file %s :" % _pid_file_name)
     #     return False
     else:
-        print("Runtime PID file:    %s" % _pid_file_name)
+        # print("Runtime PID file:    %s" % _pid_file_name)
         return True
 
 
@@ -85,7 +85,10 @@
     try:
         if os.path.isfile(_pid_file_name):
             os.remove(_pid_file_name)
-        return True
+            return True
+        else:
+            return False
+
     except IOError:
         print("Error removing Runtime PID file:    %s" % _pid_file_name)
         return False
diff --git a/bin/mod/trapd_settings.py b/bin/mod/trapd_settings.py
new file mode 100644
index 0000000..735b68c
--- /dev/null
+++ b/bin/mod/trapd_settings.py
@@ -0,0 +1,168 @@
+# ============LICENSE_START=======================================================)
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+"""
+"""
+
+__docformat__ = 'restructuredtext'
+
+def init():
+
+    # <CONSUL config cache>
+    # consul config or simulated via json file
+    global c_config
+    c_config = None
+    # </CONSUL config cache>
+
+    # <DNS cache>
+    #
+    #     dns_cache_ip_to_name
+    #        key [ip address] -> fqdn
+    #     dns_cache_ip_expires
+    #        key [ip address] -> epoch time this entry expires and must be reloaded
+    global dns_cache_ip_to_name
+    dns_cache_ip_to_name = {}
+    global dns_cache_ip_expires
+    dns_cache_ip_expires = {}
+    # </DNS cache>
+
+    # <EELF logs>
+    global eelf_error_file_name
+    eelf_error_file_name = ""
+    global eelf_error_fd
+    eelf_error_fd = None
+
+    global eelf_debug_file_name
+    eelf_debug_file_name = ""
+    global eelf_debug_fd
+    eelf_debug_fd = None
+
+    global eelf_audit_file_name
+    eelf_audit_file_name = ""
+    global eelf_audit_fd
+    eelf_audit_fd = None
+
+    global eelf_metrics_file_name
+    eelf_metrics_file_name = ""
+    global eelf_metrics_fd
+    eelf_metrics_fd = None
+
+    global last_minute
+    last_minute = 0
+    global last_hour
+    last_hour = 0
+    global last_day
+    last_day = 0
+    # </EELF logs>
+
+    # <trap dictionary and corresponding strings for publish
+    global first_trap
+    first_trap = True
+    global first_varbind
+    first_varbind = True
+    global trap_dict
+    trap_dict = {}
+    global all_traps_str
+    all_traps_str = ""
+    global all_vb_json_str
+    all_vb_json_str = ""
+    global trap_uuids_in_buffer
+    trap_uuids_in_buffer = ""
+    # </trap and varbind dictionaries> 
+
+    # <publish timers and counters> 
+    global traps_in_minute
+    traps_in_minute = 0
+    global last_epoch_second
+    last_epoch_second = 0
+    global traps_since_last_publish
+    traps_since_last_publish = 0
+    global last_pub_time
+    last_pub_time = 0
+    global milliseconds_since_last_publish
+    milliseconds_since_last_publish = 0
+    global timeout_seconds
+    timeout_seconds = 1.5
+    global seconds_between_retries
+    seconds_between_retries = 2
+    global publisher_retries
+    publisher_retries = 2
+    # </publish timers and counters> 
+
+    # <publish http request session (persistent as much as possible)> 
+    global http_requ_session
+    http_requ_session = None
+    # </publish http request session> 
+
+    # <json log of traps published>
+    global json_traps_filename
+    json_log_filename = ""
+    global json_traps_fd
+    json_fd = None
+    # </json log of traps published>
+
+    # <log of arriving traps >
+    global arriving_traps_filename
+    arriving_traps_filename = ""
+    global arriving_traps_fd
+    arriving_traps_fd = None
+    # <log of arriving traps >
+
+    # <runtime PID>
+    global pid_file_name
+    pid_file_name = ""
+
+    # <logging types and severities>
+    global LOG_TYPES
+    global LOG_TYPE_NONE
+    global LOG_TYPE_ERROR
+    global LOG_TYPE_DEBUG
+    global LOG_TYPE_AUDIT
+    global LOG_TYPE_METRICS
+    LOG_TYPES = ["none", "ERROR", "DEBUG", "AUDIT", "METRICS"]
+    LOG_TYPE_NONE = 0
+    LOG_TYPE_ERROR = 1
+    LOG_TYPE_DEBUG = 2
+    LOG_TYPE_AUDIT = 3
+    LOG_TYPE_METRICS = 4
+
+    global SEV_TYPES
+    global SEV_NONE
+    global SEV_DETAILED
+    global SEV_INFO
+    global SEV_WARN
+    global SEV_CRIT
+    global SEV_FATAL
+    SEV_TYPES = ["none", "DETAILED", "INFO", "WARN", "CRITICAL", "FATAL"]
+    SEV_NONE = 0
+    SEV_DETAILED = 1
+    SEV_INFO = 2
+    SEV_WARN = 3
+    SEV_CRIT = 4
+    SEV_FATAL = 5
+
+    global CODE_GENERAL
+    CODE_GENERAL="100"
+
+    global minimum_severity_to_log
+    minimum_severity_to_log=3
+
+
+    # </logging types and severities>
diff --git a/bin/snmptrapd.py b/bin/snmptrapd.py
new file mode 100644
index 0000000..dde4e39
--- /dev/null
+++ b/bin/snmptrapd.py
@@ -0,0 +1,766 @@
+# ============LICENSE_START=======================================================)
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+#
+"""
+dcae_snmptrapd is responsible for SNMP trap receipt and publishing activities.
+It's behavior is controlled by CBS (config binding service) using a
+JSON construct obtained via a "get_config" call or (for testing/standalone
+purposes) a file specified using the env variable "CBS_SIM_JSON".
+
+As traps arrive they are decomposed and transformed into a JSON message which
+is published to a dmaap instance that has been defined by controller.
+
+:Parameters:
+    usage:  snmptrapd.py [-v]
+:Keywords:
+    onap dcae snmp trap publish dmaap
+"""
+
+__docformat__ = 'restructuredtext'
+
+# basics
+import argparse
+import array
+import asyncio
+from collections import Counter
+import datetime
+import errno
+import inspect
+import json
+import logging
+import logging.handlers
+import os
+import pprint
+import requests
+import re
+import sys
+import signal
+import string
+import socket
+import time
+import traceback
+import unicodedata
+import uuid as uuid_mod
+
+# pysnmp
+from pysnmp.entity import engine, config
+from pysnmp.carrier.asyncio.dgram import udp, udp6
+# from pysnmp.carrier.asyncore.dgram import udp
+from pysnmp.entity.rfc3413 import ntfrcv
+from pysnmp.proto.api import v2c
+
+# dcae_snmptrap
+import trapd_settings as tds
+from trapd_runtime_pid import save_pid, rm_pid
+from trapd_get_cbs_config import get_cbs_config
+from trapd_exit import cleanup_and_exit
+from trapd_http_session import init_session_obj
+
+from trapd_file_utils import roll_all_logs, open_eelf_logs, roll_file, open_file, close_file
+from trapd_logging import ecomp_logger, stdout_logger
+
+prog_name = os.path.basename(__file__)
+verbose = False
+
+# # # # # # # # # # #
+# fx: usage_err
+# # # # # # # # # # #
+
+
+def usage_err():
+    """
+    Notify of incorrect (argument) usage
+    :Parameters:
+       none
+    :Exceptions:
+       none
+    :Keywords:
+       usage args
+    """
+
+    print('Incorrect usage invoked.  Correct usage:')
+    print('  %s [-v]' % prog_name)
+    cleanup_and_exit(1, "undefined")
+
+
+# # # # # # # # # # # # # # # # # # #
+# fx: load_all_configs
+# # # # # # # # # # ## # # # # # # #
+
+
+def load_all_configs(_signum, _frame):
+    """
+    Calls individual functions to read various config files required.  This
+    function is called directly (e.g. at startup) and is also registered
+    with signal handling (e.g. kill -sigusr1 <pid>)
+
+    :Parameters:
+      signum and frame (only present when called via signal to running process)
+    :Exceptions:
+      none
+    :Keywords:
+      config files
+    :Variables:
+      yaml_conf_file
+      rs
+    """
+
+    if int(_signum) != 0:
+        msg = ("received signal %s at frame %s; re-reading configs"
+                         % (_signum, _frame))
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    # Initialize dmaap requests session object. Close existing session
+    # if applicable.
+    if tds.http_requ_session is not None:
+        tds.http_requ_session.close()
+
+    tds.http_requ_session = init_session_obj()
+    if tds.http_requ_session == None:
+        msg = "Unable to create new http session - FATAL ERROR, exiting"
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_FATAL, tds.CODE_GENERAL, msg)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+    # re-request config from config binding service 
+    # (either broker, or json file override)
+    if not get_cbs_config():
+        msg = "error (re)loading CBS config - FATAL ERROR, exiting"
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+
+# # # # # # # # # # # # #
+# fx: log_all_arriving_traps
+# # # # # # # # # # # # #
+
+
+def log_all_arriving_traps():
+
+
+    # roll logs as needed/defined in files.roll_frequency
+    if tds.c_config['files.roll_frequency'] == "minute":
+        curr_minute = datetime.datetime.now().minute
+        if curr_minute != tds.last_minute:
+            roll_all_logs()
+            tds.last_minute = curr_minute
+    elif tds.c_config['files.roll_frequency'] == "hour":
+        curr_hour = datetime.datetime.now().hour
+        if curr_hour != tds.last_hour:
+            roll_all_logs()
+            tds.last_hour = curr_hour
+    else:
+        # otherwise, assume daily roll
+        curr_day = datetime.datetime.now().day
+        if curr_day != tds.last_day:
+            roll_all_logs()
+            tds.last_day = curr_day
+
+    # now log latest arriving trap
+    try:
+        # going for:
+        #    1520971776 Tue Mar 13 16:09:36 2018; 1520971776 2018-03-13 16:09:36 DCAE-COLLECTOR-UCSNMP 15209717760049 .1.3.6.1.4.1.2636.4.1.6 gfpmt5pcs10.oss.att.com 135.91.10.139 12.123.1.240 12.123.1.240 2 varbinds: [0] .1.3.6.1.2.1.1.3.0 {10} 1212058366 140 days, 6:49:43.66 [1] .1.3.6.1.6.3.1.1.4.1.0 {6} .1.3.6.1.4.1.2636.4.1.6 [2] .1.3.6.1.4.1.2636.3.1.15.1.1.2.4.0.0 {2} 2 [3] .1.3.6.1.4.1.2636.3.1.15.1.2.2.4.0.0 {2} 4 [4] .1.3.6.1.4.1.2636.3.1.15.1.3.2.4.0.0 {2} 0 [5] .1.3.6.1.4.1.2636.3.1.15.1.4.2.4.0.0 {2} 0 [6] .1.3.6.1.4.1.2636.3.1.15.1.5.2.4.0.0 {4} PEM 3 [7] .1.3.6.1.4.1.2636.3.1.15.1.6.2.4.0.0 {2} 7 [8] .1.3.6.1.4.1.2636.3.1.15.1.7.2.4.0.0 {2} 4 [9] .1.3.6.1.6.3.18.1.3.0 {7} 12.123.1.240
+
+        tds.arriving_traps_fd.write('%s %s; %s %s %s %s %s %s %s %s %s %s %s\n' % 
+        (tds.trap_dict["time received"],
+        time.strftime("%a %b %d %H:%M:%S %Y", time.localtime(time.time())),
+        time.strftime("%a %b %d %H:%M:%S %Y", time.localtime(tds.trap_dict["time received"])),
+        tds.trap_dict["trap category"],
+        tds.trap_dict["epoch_serno"],
+        tds.trap_dict["notify OID"],
+        tds.trap_dict["agent name"],
+        tds.trap_dict["agent address"],
+        tds.trap_dict["cambria.partition"],
+        tds.trap_dict["protocol version"],
+        tds.trap_dict["sysUptime"],
+        tds.trap_dict["uuid"],
+        tds.all_vb_json_str))
+
+    except Exception as e:
+        msg = "Error writing to %s : %s - arriving trap %s NOT LOGGED" %(tds.arriving_traps_filename, str(e), tds.trap_dict["uuid"])
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_CRIT, tds.CODE_GENERAL, msg)
+
+
+# # # # # # # # # # # # #
+# fx: log_published_messages
+# # # # # # # # # # # # #
+
+
+def log_published_messages(_post_data_enclosed):
+
+    # FIXME: should keep data dictionary of Fd's open, and reference those vs.
+    #        repeatedly opening append-mode
+
+    msg = "adding trap UUID %s to json log" % tds.trap_dict["uuid"]
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+
+    try:
+        tds.json_traps_fd.write('%s\n' % _post_data_enclosed)
+        msg = "successfully logged json for %s to %s" % (tds.trap_dict["uuid"], tds.json_traps_filename)
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+    except Exception as e:
+        msg = "Error writing to %s : %s - trap %s NOT LOGGED" %(tds.json_traps_filename, str(e), tds.trap_dict["uuid"])
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_CRIT, tds.CODE_GENERAL, msg)
+
+
+# # # # # # # # # # # # #
+# fx: post_dmaap
+# # # # # # # # # # # # #
+
+
+def post_dmaap():
+    """
+    Publish trap daata in json format to dmaap
+    :Parameters:
+    :Exceptions:
+      none
+    :Keywords:
+      http post dmaap json message
+    :Variables:
+    """
+
+    http_headers = {"Content-type": "application/json"}
+
+    if tds.http_requ_session is None:
+        msg = "tds.http_requ_session is None - getting new (%s)" % tds.http_requ_session
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+        tds.http_requ_session = init_session_obj()
+
+    # if only 1 trap, ship as-is
+    if tds.traps_since_last_publish == 1:
+        post_data_enclosed = tds.all_traps_str
+    else:
+        # otherwise, add brackets around package
+        post_data_enclosed = '[' + tds.all_traps_str + ']'
+
+    msg = "post_data_enclosed: %s" % (post_data_enclosed)
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    k = 0
+    dmaap_pub_success = False
+
+    while not dmaap_pub_success and k < (int(tds.c_config['publisher.http_retries'])):
+        try:
+            if tds.c_config['streams_publishes']['sec_fault_unsecure']['aaf_username'] == "" or tds.c_config['streams_publishes']['sec_fault_unsecure']['aaf_username'] == None:
+                msg = "%d trap(s) : %s - attempt %d (unsecure)" % (tds.traps_since_last_publish, tds.trap_uuids_in_buffer, k)
+                ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+                http_resp = tds.http_requ_session.post(tds.c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url'], post_data_enclosed,
+                                                        headers=http_headers,
+                                                        timeout=tds.timeout_seconds)
+            else:
+                msg = "%d trap(s) : %s - attempt %d (secure)" % (tds.traps_since_last_publish, tds.trap_uuids_in_buffer, k)
+                ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+                http_resp = tds.http_requ_session.post(tds.c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url'], post_data_enclosed,
+                                                        auth=(tds.c_config['streams_publishes']['sec_fault_unsecure']['aaf_username'],
+                                                              tds.c_config['streams_publishes']['sec_fault_unsecure']['aaf_password']),
+                                                        headers=http_headers,
+                                                        timeout=tds.timeout_seconds)
+
+            if http_resp.status_code == requests.codes.ok:
+                # msg = "%d trap(s) : %s successfully published - response from %s: %d %s" % (traps_since_last_publish, trap_uuids_in_buffer, ((c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url']).split('/')[2][:-5]) ,http_resp.status_code, http_resp.text)
+                msg = "%d trap(s) successfully published: %s" % (tds.traps_since_last_publish, tds.trap_uuids_in_buffer)
+                ecomp_logger(tds.LOG_TYPE_METRICS, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+                log_published_messages(post_data_enclosed)
+                tds.last_pub_time = time.time()
+                dmaap_pub_success = True
+                break
+            else:
+                msg = "Trap(s) %s publish attempt %d returned non-normal: %d %s" % (tds.trap_uuids_in_buffer, k, http_resp.status_code, http_resp.text)
+                ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_WARN, tds.CODE_GENERAL, msg)
+
+        except OSError as e:
+            msg = "OS exception while attempting to post %s attempt %s: (%s) %s %s" % (tds.trap_uuids_in_buffer, k,  e.errno, e.strerror, str(e))
+            ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_WARN, tds.CODE_GENERAL, msg)
+
+        except requests.exceptions.RequestException as e:
+            msg = "Requests exception while attempting to post %s attempt %d: (%d) %s" % (tds.trap_uuids_in_buffer, int(k),  int(e.errno), str(e.strerror))
+            ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_WARN, tds.CODE_GENERAL, msg)
+
+        k += 1
+
+        if k < tds.c_config['publisher.http_retries']:
+            msg = "sleeping %.4f seconds and retrying" % (tds.seconds_between_retries)
+            ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+            time.sleep(tds.seconds_between_retries)
+        else:
+            break
+
+    if not dmaap_pub_success:
+        msg = "ALL publish attempts failed for traps %s to URL %s "\
+                   % (tds.trap_uuids_in_buffer, tds.c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url'])
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_CRIT, tds.CODE_GENERAL, msg)
+
+    # FIXME: This currently tries, then logs error and trashes buffer if all dmaap attempts fail. Better way? 
+    tds.traps_since_last_publish = 0
+    tds.trap_uuids_in_buffer=""
+    tds.all_traps_str = ""
+    tds.first_trap = True
+
+# # # # # # # # # # # # # # # # # # #
+# fx: request_observer for community string rewrite
+# # # # # # # # # # # # # # # # # # #
+def comm_string_rewrite_observer(snmpEngine, execpoint, variables, cbCtx):
+
+    # match ALL community strings
+    if re.match('.*', str(variables['communityName'])):
+        # msg = "Rewriting communityName '%s' from %s into 'public'"  % (variables['communityName'], ':'.join([str(x) for x in
+        #                                                                variables['transportInformation'][1]]))
+        # ecomp_logger(eelf_debug_fd, eelf_debug_fd, tds.LOG_TYPE_DEBUG, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+        variables['communityName'] = variables['communityName'].clone('public')
+
+# # # # # # # # # # # # # # # # # # #
+# fx: snmp_engine_observer_cb
+#     callback for when trap is received
+# # # # # # # # # # # # # # # # # # #
+
+
+def snmp_engine_observer_cb(snmp_engine, execpoint, variables, cbCtx):
+    """
+    Decompose trap attributes and load in dictionary which is later used to
+    create json string for publishing to dmaap.
+    :Parameters:
+      snmp_engine
+         snmp engine created to listen for arriving traps
+      execpoint
+         point in code that snmp_engine_observer_cb was invoked
+      variables
+         trap attributes
+      cbCtx
+         callback context
+    :Exceptions:
+      none
+    :Keywords:
+      UEB non-AAF legacy http post
+    :Variables:
+    """
+
+    # init dictionary on new trap
+    tds.trap_dict = {}
+
+    # assign uuid to trap
+    tds.trap_dict["uuid"] = str(uuid_mod.uuid1())
+
+    # ip and hostname
+    ip_addr_str = str(variables['transportAddress'][0])
+    tds.trap_dict["agent address"] = ip_addr_str
+
+    msg = 'snmp trap arrived from %s, assigned uuid: %s' % \
+              (ip_addr_str, tds.trap_dict["uuid"])
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    try:
+        if int(tds.dns_cache_ip_expires[ip_addr_str] < int(time.time())):
+            raise Exception('cache expired for %s at %d - updating value' %
+                            (ip_addr_str, (tds.dns_cache_ip_expires[ip_addr_str])))
+        else:
+            tds.trap_dict["agent name"] = tds.dns_cache_ip_to_name[ip_addr_str]
+    except:
+        msg = "dns cache expired or missing for %s - refreshing" % ip_addr_str
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+        try:
+            agent_fqdn,alias,addresslist = socket.gethostbyaddr(ip_addr_str)
+        except:
+            agent_fqdn = ip_addr_str
+
+        tds.trap_dict["agent name"] = agent_fqdn
+
+        tds.dns_cache_ip_to_name[ip_addr_str] = agent_fqdn
+        tds.dns_cache_ip_expires[ip_addr_str] = (
+            time.time() + tds.c_config['cache.dns_cache_ttl_seconds'])
+        msg = "cache for %s (%s) updated - set to expire at %d" % \
+                          (agent_fqdn, ip_addr_str, tds.dns_cache_ip_expires[ip_addr_str])
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    tds.trap_dict["cambria.partition"] = str(tds.trap_dict["agent name"])
+    tds.trap_dict["community"] = ""    # do not include cleartext community in pub
+    tds.trap_dict["community len"] = 0
+
+    # FIXME.CHECK_WITH_DOWNSTREAM_CONSUMERS: get rid of round for millisecond val
+    # epoch_second = int(round(time.time()))
+    epoch_msecond = time.time()
+    epoch_second = int(round(epoch_msecond))
+    if epoch_second == tds.last_epoch_second:
+        tds.traps_in_epoch += 1
+    else:
+        tds.traps_in_epoch = 0
+    tds.last_epoch_second = epoch_second
+    traps_in_epoch_04d = format(tds.traps_in_epoch, '04d')
+    tds.trap_dict['epoch_serno'] = int(
+        (str(epoch_second) + str(traps_in_epoch_04d)))
+
+    snmp_version = variables['securityModel']
+    if snmp_version == 1:
+        tds.trap_dict["protocol version"] = "v1"
+    else:
+        if snmp_version == 2:
+            tds.trap_dict["protocol version"] = "v2c"
+        else:
+            if snmp_version == 3:
+                tds.trap_dict["protocol version"] = "v3"
+            else:
+                tds.trap_dict["protocol version"] = "unknown"
+
+    if snmp_version == 3:
+        tds.trap_dict["protocol version"] = "v3"
+        tds.trap_dict["security level"] = str(variables['securityLevel'])
+        tds.trap_dict["context name"] = str(variables['contextName'].prettyPrint())
+        tds.trap_dict["security name"] = str(variables['securityName'])
+        tds.trap_dict["security engine"] = str(
+            variables['contextEngineId'].prettyPrint())
+    tds.trap_dict['time received'] = epoch_msecond
+    tds.trap_dict['trap category'] = (tds.c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url']).split('/')[-1]
+
+
+# # # # # # # # # # # # # # # # # # #
+# fx: request_observer for community string rewrite
+# # # # # # # # # # # # # # # # # # #
+def add_varbind_to_json(vb_idx, vb_oid, vb_type, vb_val):
+    """
+    Called for each varbind, adds individual attributes of varbind instance to
+    vb_json_str.  vb_json_str will be added to curr_trap_json_str prior to publish.
+    :Parameters:
+      vb_idx
+        index to specific varbind being processed
+      vb_oid
+        the varbind oid
+      vb_val
+        the value of the varbind
+    :Exceptions:
+      none
+    :Keywords:
+      varbind extract json
+    :Variables:
+    """
+
+    _individual_vb_dict = {}
+
+    if tds.trap_dict["protocol version"] == "v2c":
+        # if v2c and first 2 varbinds, special handling required - e.g. put
+        # in trap_dict, not vb_json_str
+        if vb_idx == 0:
+            tds.trap_dict["sysUptime"] = str(vb_val.prettyPrint())
+            return True
+        else:
+            if vb_idx == 1:
+                tds.trap_dict["notify OID"] = str(vb_val.prettyPrint())
+                tds.trap_dict["notify OID len"] = (
+                    tds.trap_dict["notify OID"].count('.') + 1)
+                return True
+    if tds.first_varbind:
+        tds.all_vb_json_str = ', \"varbinds\": ['
+        tds.first_varbind = False
+    else:
+        # all_vb_json_str = ''.join([all_vb_json_str, ' ,'])
+        # all_vb_json_str = "%s ," % all_vb_json_str
+        tds.all_vb_json_str = tds.all_vb_json_str + " ," 
+
+    _individual_vb_dict.clear()
+    _individual_vb_dict['varbind_oid'] = vb_oid.prettyPrint()
+    _individual_vb_dict['varbind_type'] = vb_type
+    _individual_vb_dict['varbind_value'] = vb_val.prettyPrint()
+
+    _individual_vb_json_str = json.dumps(_individual_vb_dict)
+
+    # all_vb_json_str = "%s%s" % (all_vb_json_str, individual_vb_json_str)
+    # all_vb_json_str = ''.join([all_vb_json_str, individual_vb_json_str])
+    tds.all_vb_json_str = tds.all_vb_json_str + _individual_vb_json_str
+    return True
+
+
+# Callback function for receiving notifications
+# noinspection PyUnusedLocal,PyUnusedLocal,PyUnusedLocal
+def notif_receiver_cb(snmp_engine, stateReference, contextEngineId, contextName,
+                      varBinds, cbCtx):
+    """
+    Callback executed when trap arrives
+    :Parameters:
+      snmp_engine
+        snmp engine created to listen for arriving traps
+      stateReference
+      contextEngineId
+      contextName
+      varBinds
+        trap varbinds - why we are here
+      cbCtx
+        callback context
+    :Exceptions:
+      none
+    :Keywords:
+      callback trap arrival
+    :Variables:
+    """
+
+    msg = "processing varbinds for %s" % (tds.trap_dict["uuid"])
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    # FIXME update reset location when batching publishes
+    vb_idx = 0
+
+    # For reference: 
+    #
+    # print('\nvarBinds ==> %s' % (varBinds))
+    #
+    # varBinds ==> [(ObjectName('1.3.6.1.2.1.1.3.0'), TimeTicks(1243175676)),
+    #               (ObjectName('1.3.6.1.6.3.1.1.4.1.0'), ObjectIdentifier('1.3.6.1.4.1.74.2.46.12.1.1')),
+    #               (ObjectName('1.3.6.1.4.1.74.2.46.12.1.1.1'), OctetString(b'ucsnmp heartbeat - ignore')),
+    #               (ObjectName('1.3.6.1.4.1.74.2.46.12.1.1.2'), OctetString(b'Fri Aug 11 17:46:01 EDT 2017'))]
+    #
+
+    tds.all_vb_json_str = ""
+    vb_idx = 0
+    tds.first_varbind = True
+
+    # iterate over varbinds, add to json struct
+    for vb_oid, vb_val in varBinds:
+        add_varbind_to_json(vb_idx, vb_oid, vb_val.__class__.__name__, vb_val)
+        vb_idx += 1
+
+    # FIXME: DL back out first 2 varbinds for v2c notifs prior to publishing varbind count
+    # trap_dict["varbind count"] = vb_idx
+    curr_trap_json_str = json.dumps(tds.trap_dict)
+    # now have everything except varbinds in "curr_trap_json_str"
+
+    # if varbinds present - which will almost always be the case - add all_vb_json_str to trap_json_message
+    if vb_idx != 0:
+        # close out vb array
+        # all_vb_json_str += "]"
+        # all_vb_json_str = ''.join([all_vb_json_str, ']'])
+        tds.all_vb_json_str = tds.all_vb_json_str + ']'
+
+        # remove last close bracket from curr_trap_json_str
+        curr_trap_json_str = curr_trap_json_str[:-1]
+
+        # add vb_json_str to payload
+        # curr_trap_json_str += all_vb_json_str
+        # curr_trap_json_str = ''.join([curr_trap_json_str, all_vb_json_str])
+        curr_trap_json_str = curr_trap_json_str + tds.all_vb_json_str
+
+        # add last close brace back in
+        # curr_trap_json_str += "}"
+        # curr_trap_json_str = ''.join([curr_trap_json_str, '}'])
+        curr_trap_json_str = curr_trap_json_str + '}'
+
+    msg = "trap %s : %s" % (tds.trap_dict["uuid"], curr_trap_json_str)
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+
+    # now have a complete json message for this trap in "curr_trap_json_str"
+    tds.traps_since_last_publish += 1
+    milliseconds_since_last_publish = (time.time() - tds.last_pub_time) * 1000
+
+    msg = "adding %s to buffer" % (tds.trap_dict["uuid"])
+    ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+    if tds.first_trap:
+       tds.all_traps_str = curr_trap_json_str
+       tds.trap_uuids_in_buffer = tds.trap_dict["uuid"]
+       tds.first_trap = False
+    else:
+       tds.trap_uuids_in_buffer = tds.trap_uuids_in_buffer + ', ' + tds.trap_dict["uuid"]
+       tds.all_traps_str = tds.all_traps_str + ', ' + curr_trap_json_str
+
+    # always log arriving traps
+    log_all_arriving_traps()
+
+    # publish to dmaap after last varbind is processed
+    if tds.traps_since_last_publish >= tds.c_config['publisher.max_traps_between_publishes']:
+        msg = "num traps since last publish (%d) exceeds threshold (%d) - publish traps" % (tds.traps_since_last_publish, tds.c_config['publisher.max_traps_between_publishes'])
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+        post_dmaap()
+    elif milliseconds_since_last_publish >= tds.c_config['publisher.max_milliseconds_between_publishes']:
+        msg = "num milliseconds since last publish (%.0f) exceeds threshold - publish traps"% milliseconds_since_last_publish
+        ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_DETAILED, tds.CODE_GENERAL, msg)
+        post_dmaap()
+
+
+# # # # # # # # # # # # #
+# Main  MAIN  Main  MAIN
+# # # # # # # # # # # # #
+# parse command line args
+parser = argparse.ArgumentParser(description='Post SNMP traps '
+                                             'to message bus')
+parser.add_argument('-v', action="store_true", dest="verbose",
+                    help="verbose logging")
+parser.add_argument('-?', action="store_true", dest="usage_requested",
+                    help="show command line use")
+
+# parse args
+args = parser.parse_args()
+
+# set vars from args
+verbose = args.verbose
+usage_requested = args.usage_requested
+
+# if usage, just display and exit
+if usage_requested:
+    usage_err()
+
+# init vars
+tds.init()
+
+# Set initial startup hour for rolling logfile
+tds.last_hour = datetime.datetime.now().hour
+
+# get config binding service (CBS) values (either broker, or json file override)
+load_all_configs(0,0)
+msg = "%s : %s version %s starting" % (prog_name, tds.c_config['snmptrap.title'], tds.c_config['snmptrap.version'])
+stdout_logger(msg)
+
+# Avoid this unless needed for testing; it prints sensitive data to log
+#
+# msg = "Running config: "
+# stdout_logger(msg)
+# msg = json.dumps(c_config, sort_keys=False, indent=4)
+# stdout_logger(msg)
+
+# open various ecomp logs
+open_eelf_logs()
+
+# bump up logging level if overridden at command line
+if verbose:
+    msg = "WARNING:  '-v' argument present.  All messages will be logged.  This can slow things down, use only when needed."
+    tds.minimum_severity_to_log=0
+    stdout_logger(msg)
+
+# name and open arriving trap log
+tds.arriving_traps_filename = tds.c_config['files.runtime_base_dir'] + "/" + tds.c_config['files.log_dir'] + "/" + (tds.c_config['files.arriving_traps_log'])
+tds.arriving_traps_fd = open_file(tds.arriving_traps_filename)
+msg = ("arriving traps logged to: %s" % tds.arriving_traps_filename)
+stdout_logger(msg)
+ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+
+# name and open json trap log
+tds.json_traps_filename = tds.c_config['files.runtime_base_dir'] + "/" + tds.c_config['files.log_dir'] + "/" + "DMAAP_" + (tds.c_config['streams_publishes']['sec_fault_unsecure']['dmaap_info']['topic_url'].split('/')[-1]) + ".json"
+tds.json_traps_fd = open_file(tds.json_traps_filename)
+msg = ("published traps logged to: %s" % tds.json_traps_filename)
+stdout_logger(msg)
+ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+
+# setup signal handling for config reload
+signal.signal(signal.SIGUSR1, load_all_configs)
+
+# save current PID for future/external reference
+tds.pid_file_name = tds.c_config['files.runtime_base_dir'] + \
+    '/' + tds.c_config['files.pid_dir'] + '/' + prog_name + ".pid"
+msg = "Runtime PID file: %s" % tds.pid_file_name
+ecomp_logger(tds.LOG_TYPE_DEBUG, tds.SEV_INFO, tds.CODE_GENERAL, msg)
+rc = save_pid(tds.pid_file_name)
+
+# Get the event loop for this thread
+loop = asyncio.get_event_loop()
+
+# Create SNMP engine with autogenernated engineID pre-bound
+# to socket transport dispatcher
+snmp_engine = engine.SnmpEngine()
+
+# # # # # # # # # # # #
+# Transport setup
+# # # # # # # # # # # #
+
+# UDP over IPv4
+# FIXME:  add check for presense of ipv4_interface prior to attempting add OR just put entire thing in try/except clause
+try:
+    ipv4_interface = tds.c_config['protocols.ipv4_interface']
+    ipv4_port = tds.c_config['protocols.ipv4_port']
+
+    try:
+        config.addTransport(
+            snmp_engine,
+            udp.domainName + (1,),
+            udp.UdpTransport().openServerMode(
+                (ipv4_interface, ipv4_port))
+        )
+    except Exception as e:
+        msg = "Unable to bind to %s:%s - %s" % (ipv4_interface, ipv4_port, str(e))
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_FATAL, tds.CODE_GENERAL, msg)
+        stdout_logger(msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+except Exception as e:
+    msg = "IPv4 interface and/or port not specified in config - not listening for IPv4 traps"
+    stdout_logger(msg)
+    ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_WARN, tds.CODE_GENERAL, msg)
+
+
+# UDP over IPv4, second listening interface/port example if you don't want to listen on all
+# config.addTransport(
+#     snmp_engine,
+#     udp.domainName + (2,),
+#     udp.UdpTransport().openServerMode(('127.0.0.1', 2162))
+# )
+
+
+# UDP over IPv6
+# FIXME:  add check for presense of ipv6_interface prior to attempting add OR just put entire thing in try/except clause
+try:
+    ipv6_interface = tds.c_config['protocols.ipv6_interface']
+    ipv6_port = tds.c_config['protocols.ipv6_port']
+
+    try:
+        config.addTransport(
+            snmp_engine,
+            udp6.domainName,
+            udp6.Udp6Transport().openServerMode(
+                (ipv6_interface, ipv6_port))
+        )
+    except Exception as e:
+        msg = "Unable to bind to %s:%s - %s" % (ipv6_interface,ipv6_port, str(e))
+        stdout_logger(msg)
+        ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_FATAL, tds.CODE_GENERAL, msg)
+        cleanup_and_exit(1, tds.pid_file_name)
+
+except Exception as e:
+    msg = "IPv6 interface and/or port not specified in config - not listening for IPv6 traps"
+    stdout_logger(msg)
+    ecomp_logger(tds.LOG_TYPE_ERROR, tds.SEV_WARN, tds.CODE_GENERAL, msg)
+
+
+# # # # # # # # # # # #
+# SNMPv1/2c setup
+# # # # # # # # # # # #
+
+# SecurityName <-> CommunityName mapping
+#     to restrict trap reception to only those with specific community
+#     strings
+config.addV1System(snmp_engine, 'my-area', 'public')
+
+# register comm_string_rewrite_observer for message arrival
+snmp_engine.observer.registerObserver(
+    comm_string_rewrite_observer,
+    'rfc2576.processIncomingMsg:writable'
+)
+
+# register snmp_engine_observer_cb for message arrival
+snmp_engine.observer.registerObserver(
+    snmp_engine_observer_cb,
+    'rfc3412.receiveMessage:request',
+    'rfc3412.returnResponsePdu',
+)
+
+# Register SNMP Application at the SNMP engine
+ntfrcv.NotificationReceiver(snmp_engine, notif_receiver_cb)
+
+snmp_engine.transportDispatcher.jobStarted(1)  # loop forever
+
+# Run I/O dispatcher which will receive traps
+try:
+    snmp_engine.transportDispatcher.runDispatcher()
+except:
+    snmp_engine.observer.unregisterObserver()
+    snmp_engine.transportDispatcher.closeDispatcher()
+    cleanup_and_exit(1, tds.pid_file_name)
diff --git a/src/dcae_snmptrapd.sh b/bin/snmptrapd.sh
similarity index 76%
rename from src/dcae_snmptrapd.sh
rename to bin/snmptrapd.sh
index 37ca96f..52f3913 100755
--- a/src/dcae_snmptrapd.sh
+++ b/bin/snmptrapd.sh
@@ -23,7 +23,7 @@
 
 
 # get to where we are supposed to be for startup
-cd /opt/app/snmptrap/src
+cd /opt/app/snmptrap/bin
 
 # include path to 3.6+ version of python that has required dependencies included
 export PATH=/opt/app/python-3.6.1/bin:$PATH
@@ -35,13 +35,18 @@
 export REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-bundle.crt
 
 # PYTHONUNBUFFERED:
-#    set PYTHONUNBUFFERED to True to avoid output buffering; comment out for 
-#    better performance!
-# export PYTHONUNBUFFERED='True'
+#    set PYTHONUNBUFFERED to a non-empty string to avoid output buffering; 
+#    comment out for runtime environments/better performance!
+export PYTHONUNBUFFERED="True"
 
-# less verbose at startup?  Use this:
-# python dcae_snmptrapd.py -c ../etc/trapd.yaml
+# set location of config broker server overrride IF NEEDED
+#
+export CBS_SIM_JSON=../etc/snmptrapd.json
+
 # want tracing?  Use this:
-# python -m trace --trackcalls dcae_snmptrapd.py -c ../etc/trapd.yaml
+# python -m trace --trackcalls snmptrapd.py -v
+# want verbose logging?  Use this:
+# python snmptrapd.py -v
 # standard startup?  Use this:
-python dcae_snmptrapd.py -v -c ../etc/trapd.yaml
+# python snmptrapd.py
+python snmptrapd.py -v
diff --git a/data-formats/snmptrap_output_format.json b/data-formats/snmptrap_output_format.json
new file mode 100644
index 0000000..fbbe4ae
--- /dev/null
+++ b/data-formats/snmptrap_output_format.json
@@ -0,0 +1,167 @@
+{
+    "self": {
+        "name": "snmptrap_output",
+        "version": "1.3.0",
+        "description": "The output format for snmptrap, which is a SNMP trap that is loaded into a JSON structure"
+    },
+    "dataformatversion": "1.3.0",
+    "jsonschema": {
+      "anyOf": [{
+        "name": "SNMP Alert v1_0",
+        "version": "1.3.0",
+        "additionalProperties": false,
+        "properties": {
+          "cambria.partition": {
+            "type": "string"
+          },
+          "uuid": {
+            "type": "string"
+          },
+          "agent address": {
+            "type": "string"
+          },
+          "agent name": {
+            "type": "string"
+          },
+          "time received": {
+            "type": "string"
+          },
+          "epoch_serno": {
+            "type": "string"
+          },
+          "protocol version": {
+            "type": "string",
+            "enum": [ "v1", "v2c", "unsupported(v2u)" ]
+          },
+          "community len": {
+            "type": "string"
+          },
+          "community": {
+            "type": "string"
+          },
+          "trap category": {
+            "type": "string"
+          },
+          "sysUptime": {
+            "type": "string"
+          },
+          "notify OID len": {
+            "type": "string"
+          },
+          "notify OID": {
+            "type": "string"
+          },
+          "varbinds": {
+            "type": "array",
+            "items": { 
+              "$ref": "#varbind"
+            }
+          }
+        },
+        "required": ["cambria.partition", "uuid", "agent address", "agent name", "time received", "epoch_serno", "protocol version", "community len", "community", "trap category", "notify OID len", "notify OID", "varbinds"]
+      },
+      {"name": "SNMP Alert v3",
+        "version": "1.3.0",
+        "additionalProperties": false,
+        "properties": {
+          "cambria.partition": {
+            "type": "string"
+          },
+          "uuid": {
+            "type": "string"
+          },
+          "agent address": {
+            "type": "string"
+          },
+          "agent name": {
+            "type": "string"
+          },
+          "time received": {
+            "type": "string"
+          },
+          "epoch_serno": {
+            "type": "string"
+          },
+          "protocol version": {
+            "type": "string",
+            "enum": [ "v3" ]
+          },
+          "community len": {
+            "type": "string"
+          },
+          "community": {
+            "type": "string"
+          },
+          "trap category": {
+            "type": "string"
+          },
+          "sysUptime": {
+            "type": "string"
+          },
+          "notify OID len": {
+            "type": "string"
+          },
+          "notify OID": {
+            "type": "string"
+          },
+          "msg id": {
+            "type": "string"
+          },
+          "security level": {
+            "type": "string",
+            "enum": [ "noAuthNoPriv", "authNoPriv", "authPriv" ]
+          },
+          "context name": {
+            "type": "string"
+          },
+          "security name": {
+            "type": "string"
+          },
+          "security engine": {
+            "type": "string"
+          },
+          "varbinds": {
+            "type": "array",
+            "items": { 
+              "$ref": "#varbind" 
+            }
+          }
+        },
+        "required": ["cambria.partition", "uuid", "agent address", "agent name", "time received", "epoch_serno", "protocol version", "community len", "community", "trap category", "notify OID len", "notify OID", "msg id", "security level", "context name", "security name", "security engine", "varbinds"]
+      }
+      ],
+      "definitions" : {
+        "varbind" : {
+          "id" : "#varbind",
+          "additionalProperties": false,
+          "properties" : {
+            "varbind_oid": {
+              "type": "string"
+            },
+            "varbind_type": {
+              "type": "string",
+              "enum": [ "boolean",
+                        "integer",
+                        "bit",
+                        "octet",
+                        "null",
+                        "oid",
+                        "ipaddress",
+                        "counter",
+                        "unsigned",
+                        "timeticks",
+                        "opaque",
+                        "unused1",
+                        "counter64",
+                        "unused2"
+              ]
+            },
+            "varbind_value": {
+              "type": "string"
+            }
+          },
+          "required": ["varbind_oid", "varbind_type", "varbind_value"]
+        }
+      }
+    }
+}
diff --git a/data/.blank b/data/.blank
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/data/.blank
diff --git a/etc/dcae_snmptrapd.yaml b/etc/dcae_snmptrapd.yaml
deleted file mode 100644
index 0b4feb7..0000000
--- a/etc/dcae_snmptrapd.yaml
+++ /dev/null
@@ -1,41 +0,0 @@
-# ================================================================================
-# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-protocol:
-    transport: udp
-    interface: localhost
-    port: 6163
-    dns_cache_ttl_seconds: 60
-
-files:
-    runtime_base_dir: /opt/app/dcae_snmptrapd
-    log_dir: /opt/app/dcae_snmptrapd/logs
-    data_dir: /opt/app/dcae_snmptrapd/data
-    pid_dir: /var/tmp
-    dcae_snmptrapd_diag: /opt/app/dcae_snmptrapd/logs/dcae_snmptrapd.log
-    raw_traps_log: /opt/app/dcae_snmptrapd/logs/trapd.log
-    published_traps_dir: /opt/app/dcae_snmptrapd/logs
-    trap_stats_log: /opt/app/dcae_snmptrapd/logs/dcae_snmptrapd_stats.csv
-    perm_status_file: /opt/app/dcae_snmptrapd/logs/dcae_snmptrapd.permStatus.log
-
-ueb:
-    dmaap_conf: /etc/dcae/dmaap.conf
-    http_timeout: 5
-    primary_publisher: true
-    peer_publisher: null
-    max_traps_between_publish: 50
-    max_milliseconds_between_publish: 3500
diff --git a/etc/dcae_snmptrapd_logging.yaml b/etc/dcae_snmptrapd_logging.yaml
deleted file mode 100644
index 43af342..0000000
--- a/etc/dcae_snmptrapd_logging.yaml
+++ /dev/null
@@ -1,57 +0,0 @@
-# ================================================================================
-# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-version: 1
-disable_existing_loggers: False
-formatters:
-    simple:
-        format: "%(levelname)s|%(asctime)s|%(name)s|%(process)d|%(funcName)s|'%(message)s"
-
-handlers:
-    console:
-        class: logging.StreamHandler
-        level: DEBUG
-        formatter: simple
-        stream: ext://sys.stdout
-
-    info_file_handler:
-        class: logging.handlers.RotatingFileHandler
-        level: INFO
-        formatter: simple
-        filename: info.log
-        maxBytes: 10480000 # 10MB
-        backupCount: 10
-        encoding: utf8
-
-    error_file_handler:
-        class: logging.handlers.RotatingFileHandler
-        level: ERROR
-        formatter: simple
-        filename: errors.log
-        maxBytes: 6144000 # 6MB
-        backupCount: 10 
-        encoding: utf8
-
-loggers:
-    my_module:
-        level: ERROR
-        handlers: [console]
-        propagate: no
-
-root:
-    level: INFO
-    handlers: [console, info_file_handler, error_file_handler]
diff --git a/etc/snmptrapd.json b/etc/snmptrapd.json
new file mode 100644
index 0000000..c9caa76
--- /dev/null
+++ b/etc/snmptrapd.json
@@ -0,0 +1,69 @@
+{
+"snmptrap.version": "1.3.0", 
+"snmptrap.title": "ONAP SNMP Trap Receiver" ,
+"protocols.transport": "udp",
+"protocols.ipv4_interface": "0.0.0.0",
+"protocols.ipv4_port": 6164,
+"protocols.ipv6_interface": "::1",
+"protocols.ipv6_port": 6164,
+"cache.dns_cache_ttl_seconds": 60,
+"publisher.http_timeout_milliseconds": 1500,
+"publisher.http_retries": 3,
+"publisher.http_milliseconds_between_retries": 750,
+"publisher.http_primary_publisher": "true",
+"publisher.http_peer_publisher": "unavailable",
+"publisher.max_traps_between_publishes": 10,
+"publisher.max_milliseconds_between_publishes": 10000,
+    "streams_publishes": {
+            "sec_measurement": {
+                "type": "message_router",
+                "aaf_password": "aaf_password",
+                "dmaap_info": {
+                    "location": "mtl5",
+                    "client_id": "111111",
+                    "client_role": "com.att.dcae.member",
+                    "topic_url": null
+                },
+                "aaf_username": "aaf_username"
+            },
+            "sec_fault_unsecure": {
+                "type": "message_router",
+                "aaf_password": null,
+                "dmaap_info": {
+                    "location": "mtl5",
+                    "client_id": null,
+                    "client_role": null,
+                    "topic_url": "http://ueb_server:3904/events/ONAP-COLLECTOR-SNMPTRAP"
+                },
+                "aaf_username": null
+            }
+    },
+"files.runtime_base_dir": "/opt/app/snmptrap",
+"files.log_dir": "logs",
+"files.data_dir": "data",
+"files.pid_dir": "tmp",
+"files.arriving_traps_log": "snmptrapd_arriving_traps.log",
+"files.snmptrapd_diag": "snmptrapd_prog_diag.log",
+"files.traps_stats_log": "snmptrapd_stats.csv",
+"files.perm_status_file": "snmptrapd_status.log",
+"files.eelf_base_dir": "/opt/app/snmptrap/logs",
+"files.eelf_error": "error.log",
+"files.eelf_debug": "debug.log",
+"files.eelf_audit": "audit.log",
+"files.eelf_metrics": "metrics.log",
+"files.roll_frequency": "hour",
+"files.minimum_severity_to_log": 2,
+"trap_def.1.trap_oid" : ".1.3.6.1.4.1.74.2.46.12.1.1",
+"trap_def.1.trap_category": "DCAE-SNMP-TRAPS",
+"trap_def.2.trap_oid" : "*",
+"trap_def.2.trap_category": "DCAE-SNMP-TRAPS",
+"stormwatch.1.stormwatch_oid" : ".1.3.6.1.4.1.74.2.46.12.1.1",
+"stormwatch.1.low_water_rearm_per_minute" : "5",
+"stormwatch.1.high_water_arm_per_minute" : "100",
+"stormwatch.2.stormwatch_oid" : ".1.3.6.1.4.1.74.2.46.12.1.2",
+"stormwatch.2.low_water_rearm_per_minute" : "2",
+"stormwatch.2.high_water_arm_per_minute" : "200",
+"stormwatch.3.stormwatch_oid" : ".1.3.6.1.4.1.74.2.46.12.1.2",
+"stormwatch.3.low_water_rearm_per_minute" : "2",
+"stormwatch.3.high_water_arm_per_minute" : "200"
+}
diff --git a/etc/trap.conf b/etc/trap.conf
deleted file mode 100644
index 56c6c3a..0000000
--- a/etc/trap.conf
+++ /dev/null
@@ -1,3 +0,0 @@
-# 
-.1.3.6.1.4.1.74.2.46.12.1.1 DCAE-SNMP-TRAPS
-* DCAE-SNMP-TRAPS
diff --git a/etc/trapd.yaml b/etc/trapd.yaml
deleted file mode 100644
index 7432e56..0000000
--- a/etc/trapd.yaml
+++ /dev/null
@@ -1,51 +0,0 @@
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-snmptrap: '2.0'
-info:
-    version: 2.1
-    title: ONAP SNMP Trap Receiver
-
-protocol:
-    transport: udp
-    ipv4_interface: 0.0.0.0
-    ipv4_port: 6164
-    ipv6_interface: ::1
-    ipv6_port: 6164
-    dns_cache_ttl_seconds: 60
-
-files:
-    runtime_base_dir: /opt/app/snmptrap
-    log_dir: /opt/app/snmptrap/logs
-    data_dir: /opt/app/snmptrap/data
-    pid_dir: /var/tmp
-    snmptrapd_diag: /opt/app/snmptrap/logs/dcae_snmptrap.log
-    trap_conf: /opt/app/snmptrap/etc/trap.conf
-    raw_traps_log: /opt/app/snmptrap/logs/trapd.log
-    published_traps_dir: /opt/app/snmptrap/logs
-    trap_stats_log: /opt/app/snmptrap/logs/trapd_stats.csv
-    perm_status_file: /opt/app/snmptrap/logs/trapd.perm_status.log
-
-dmaap:
-    dmaap_conf: /etc/dcae/dmaap.conf
-    http_timeout: 5
-    http_retries: 3 
-    http_secs_between_retries: .75
-    primary_publisher: true
-    peer_publisher: null
-    max_traps_between_publish: 50
-    max_milliseconds_between_publish: 3500
diff --git a/etc/trapd_logging.yaml b/etc/trapd_logging.yaml
deleted file mode 100644
index 6719e5e..0000000
--- a/etc/trapd_logging.yaml
+++ /dev/null
@@ -1,57 +0,0 @@
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-version: 1
-disable_existing_loggers: False
-formatters:
-    simple:
-        format: "%(levelname)s|%(asctime)s|%(name)s|%(process)d|%(funcName)s|'%(message)s"
-
-handlers:
-    console:
-        class: logging.StreamHandler
-        level: DEBUG
-        formatter: simple
-        stream: ext://sys.stdout
-
-    info_file_handler:
-        class: logging.handlers.RotatingFileHandler
-        level: INFO
-        formatter: simple
-        filename: info.log
-        maxBytes: 10480000 # 10MB
-        backupCount: 10
-        encoding: utf8
-
-    error_file_handler:
-        class: logging.handlers.RotatingFileHandler
-        level: ERROR
-        formatter: simple
-        filename: errors.log
-        maxBytes: 6144000 # 6MB
-        backupCount: 10 
-        encoding: utf8
-
-loggers:
-    my_module:
-        level: ERROR
-        handlers: [console]
-        propagate: no
-
-root:
-    level: INFO
-    handlers: [console, info_file_handler, error_file_handler]
diff --git a/pom.xml b/pom.xml
index 1a61b2a..8765657 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,162 +1,51 @@
 <?xml version="1.0"?>
-<!--
-================================================================================
-Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
-================================================================================
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-============LICENSE_END=========================================================
-
-ECOMP is a trademark and service mark of AT&T Intellectual Property.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0   http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <parent>
     <groupId>org.onap.oparent</groupId>
     <artifactId>oparent</artifactId>
-    <version>0.1.1</version>
+    <version>1.1.0</version>
   </parent>
+  <!-- parent>
+           <groupId>org.onap.dcae.platform</groupId>
+    <artifactId>plugins</artifactId>
+    <version>1.0.0</version>
+  </parent -->
 
   <!--- CHANGE THE FOLLOWING 3 OBJECTS for your own repo -->
   <groupId>org.onap.dcaegen2.collectors</groupId>
   <artifactId>snmptrap</artifactId>
-  <name>dcaegen2-collectors-snmptrap</name>
-  <version>1.2.0-SNAPSHOT</version>
+  <name>dcae_snmptrap</name>
+  
+  <version>1.3.0-SNAPSHOT</version>
   <url>http://maven.apache.org</url>
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    <sonar.skip>true</sonar.skip>
     <sonar.sources>.</sonar.sources>
     <!-- customize the SONARQUBE URL -->
-    <!-- sonar.host.url>http://localhost:9000</sonar.host.url -->
+    <sonar.host.url>http://localhost:9000</sonar.host.url>
     <!-- below are language dependent -->
     <!-- for Python -->
     <sonar.language>py</sonar.language>
     <sonar.pluginName>Python</sonar.pluginName>
     <sonar.inclusions>**/*.py</sonar.inclusions>
-    <!-- for JavaScaript -->
-    <!--
-    <sonar.language>js</sonar.language>
-    <sonar.pluginName>JS</sonar.pluginName>
-    <sonar.inclusions>**/*.js</sonar.inclusions>
-    -->
   </properties>
+
   <build>
     <finalName>${project.artifactId}-${project.version}</finalName>
     <pluginManagement>
       <plugins>
-        <!-- the following plugins are invoked from oparent, we do not need them -->
-        <plugin>
-          <groupId>org.sonatype.plugins</groupId>
-          <artifactId>nexus-staging-maven-plugin</artifactId>
-          <version>1.6.7</version>
-          <configuration>
-            <skipNexusStagingDeployMojo>true</skipNexusStagingDeployMojo>
-          </configuration>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-deploy-plugin</artifactId>
-          <!-- This version supports the "deployAtEnd" parameter -->
-          <version>2.8</version>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-enforcer-plugin</artifactId>
-          <version>3.0.0-M1</version>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
-        <!-- first disable the default Java plugins at various stages -->
-        <!-- maven-resources-plugin is called during "*resource" phases by default behavior.  it prepares the resources
-       dir.  we do not need it -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-resources-plugin</artifactId>
-          <version>2.6</version>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
-        <!-- maven-compiler-plugin is called during "compile" phases by default behavior.  we do not need it -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-compiler-plugin</artifactId>
-          <version>3.1</version>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
-        <!-- maven-jar-plugin is called during "compile" phase by default behavior.  we do not need it -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-jar-plugin</artifactId>
-          <version>2.4</version>
-          <executions>
-            <execution>
-              <id>default-jar</id>
-              <phase/>
-            </execution>
-          </executions>
-        </plugin>
-        <!-- maven-install-plugin is called during "install" phase by default behavior.  it tries to copy stuff under 
-       target dir to ~/.m2.  we do not need it -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-install-plugin</artifactId>
-          <version>2.4</version>
-          <configuration>
-            <skip>true</skip>
-          </configuration>
-        </plugin>
-        <!-- maven-surefire-plugin is called during "test" phase by default behavior.  it triggers junit test.
-       we do not need it -->
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-surefire-plugin</artifactId>
-          <version>2.12.4</version>
-          <configuration>
-            <skipTests>true</skipTests>
-          </configuration>
-        </plugin>
         <plugin>
           <groupId>org.codehaus.mojo</groupId>
-          <artifactId>exec-maven-plugin</artifactId>
-          <version>1.2.1</version>
-          <configuration>
-            <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable>
-            <environmentVariables>
-              <!-- make mvn properties as env for our script -->
-              <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID>
-              <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID>
-              <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION>
-              <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY>
-              <MVN_RAWREPO_BASEURL_UPLOAD>${onap.nexus.rawrepo.baseurl.upload}</MVN_RAWREPO_BASEURL_UPLOAD>
-              <MVN_RAWREPO_BASEURL_DOWNLOAD>${onap.nexus.rawrepo.baseurl.download}</MVN_RAWREPO_BASEURL_DOWNLOAD>
-              <MVN_RAWREPO_SERVERID>${onap.nexus.rawrepo.serverid}</MVN_RAWREPO_SERVERID>
-              <MVN_DOCKERREGISTRY_DAILY>${onap.nexus.dockerregistry.daily}</MVN_DOCKERREGISTRY_DAILY>
-              <MVN_DOCKERREGISTRY_RELEASE>${onap.nexus.dockerregistry.release}</MVN_DOCKERREGISTRY_RELEASE>
-            </environmentVariables>
-          </configuration>
+          <artifactId>sonar-maven-plugin</artifactId>
+          <version>2.7.1</version>
         </plugin>
       </plugins>
     </pluginManagement>
+
     <plugins>
       <!-- plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
+                   <artifactId>maven-assembly-plugin</artifactId>
         <version>2.4.1</version>
         <configuration>
           <descriptors>
@@ -173,6 +62,65 @@
           </execution>
         </executions>
       </plugin -->
+
+      <!-- first disable the default Java plugins at various stages -->
+      <!-- maven-resources-plugin is called during "*resource" phases by default behavior.  it prepares the resources
+                  dir.  we do not need it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-resources-plugin</artifactId>
+        <version>2.6</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+
+      <!-- maven-compiler-plugin is called during "compile" phases by default behavior.  we do not need it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.1</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+
+      <!-- maven-jar-plugin is called during "compile" phase by default behavior.  we do not need it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-jar-plugin</artifactId>
+        <version>2.4</version>
+        <executions>
+          <execution>
+            <id>default-jar</id>
+            <phase/>
+          </execution>
+        </executions>
+      </plugin>
+
+      <!-- maven-install-plugin is called during "install" phase by default behavior.  it tries to copy stuff under 
+                  target dir to ~/.m2.  we do not need it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-install-plugin</artifactId>
+        <version>2.4</version>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+
+      <!-- maven-surefire-plugin is called during "test" phase by default behavior.  it triggers junit test.
+                  we do not need it -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.12.4</version>
+        <configuration>
+          <skipTests>true</skipTests>
+        </configuration>
+      </plugin>
+
+
       <!-- now we configure custom action (calling a script) at various lifecycle phases -->
       <plugin>
         <groupId>org.codehaus.mojo</groupId>
@@ -182,92 +130,147 @@
           <execution>
             <id>clean phase script</id>
             <phase>clean</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>${session.executionRootDirectory}/mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>clean</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>generate-sources script</id>
             <phase>generate-sources</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>generate-sources</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>compile script</id>
             <phase>compile</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>compile</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>package script</id>
             <phase>package</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>package</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>test script</id>
             <phase>test</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>test</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>install script</id>
             <phase>install</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>install</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
+
           <execution>
             <id>deploy script</id>
             <phase>deploy</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
+            <goals><goal>exec</goal></goals>
             <configuration>
+              <executable>mvn-phase-script.sh</executable>
               <arguments>
-                <argument>__</argument>
+                <argument>${project.artifactId}</argument>
                 <argument>deploy</argument>
               </arguments>
+              <environmentVariables>
+                <!-- make mvn properties as env for our script -->
+                <MVN_PROJECT_GROUPID>${project.groupId}</MVN_PROJECT_GROUPID> 
+                <MVN_PROJECT_ARTIFACTID>${project.artifactId}</MVN_PROJECT_ARTIFACTID> 
+                <MVN_PROJECT_VERSION>${project.version}</MVN_PROJECT_VERSION> 
+                <MVN_NEXUSPROXY>${onap.nexus.url}</MVN_NEXUSPROXY> 
+                <!--MVN_DOCKERREG_URL>${docker.push.registry}</MVN_DOCKERREG_URL--> 
+              </environmentVariables> 
             </configuration>
           </execution>
         </executions>
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..a0b06ee
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+snmptrap
+pysnmp==4.4.2
+onap_dcae_cbs_docker_client==0.0.3
diff --git a/setup.py b/setup.py
index 660629b..5351ca0 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,6 @@
 # org.onap.dcae
 # ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
+# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -55,11 +55,11 @@
 reqs = [str(ir.req) for ir in install_reqs]
 
 setup(
-    name = "dcaegen2_collectors_snmptrap",
+    name = "onap_dcae_cbs_docker_client",
     description = "snmp trap receiver for a DCAE docker image",
     version = "1.0",
     packages=find_packages(),
-    author = "Dave LaDue",
+    author = "Dave L",
     author_email = "dl3158@att.com",
     license='Apache 2',
     keywords = "",
diff --git a/spec/snmptrap-collector-component-spec.json b/spec/snmptrap-collector-component-spec.json
new file mode 100644
index 0000000..85cadde
--- /dev/null
+++ b/spec/snmptrap-collector-component-spec.json
@@ -0,0 +1,251 @@
+{
+        "self": {
+        	"version": "1.3.0",
+		"name": "snmptrap-collector",
+                "description": "Collector for receiving SNMP traps and publishing to DMAAP/MR",
+                "component_type": "docker"
+        },
+        "streams": {
+                "subscribes": [
+
+                ],
+                "publishes": [
+                        {
+                                "format": "snmptrap_output",
+                                "version": "1.3.0",
+                                "type": "message router",
+                                "config_key": "snmp_trap"
+                        }
+                ]
+        },
+        "services": {
+                "calls": [],
+                "provides": [],
+        },
+        "parameters": [
+                    {
+                        "name": "snmptrap.version",
+                        "value":"1.3.0",
+                        "description": "version number"
+                    },
+                    {
+                        "name": "snmptrap.title",
+                        "value":"ONAP SNMP Trap Receiver",
+                        "description": "title for logging"
+                    },
+                    {
+                        "name": "protocols.transport",
+                        "value":"udp",
+                        "description": "protocol used to transport trap (udp|tcp)"
+                    },
+                    {
+                        "name": "protocols.ipv4_interface",
+                        "value":"0.0.0.0",
+                        "description": "ipv4 address trap receiver should listen to (0.0.0.0 -> all)"
+                    },
+                    {
+                        "name": "protocols.ipv4_port",
+                        "value":162,
+                        "description": "port trap receiver should bind to for ipv4 traps"
+                    },
+                    {
+                        "name": "protocols.ipv6_interface",
+                        "value":"::1",
+                        "description": "ipv6 address trap receiver should listen to (::1 -> all)"
+                    },
+                    {
+                        "name": "protocols.ipv6_port",
+                        "value":162,
+                        "description": "port trap receiver should bind to for ipv6 traps"
+                    },
+                    {
+                        "name": "cache.dns_cache_ttl_seconds",
+                        "value":60,
+                        "description": "number of seconds snmptrapd will cache dns entry before re-loading it"
+                    },
+                    {
+                        "name": "publisher.http_timeout_milliseconds",
+                        "value":1500,
+                        "description":"milliseconds snmptrapd will wait for MR to respond to publish attempt",
+                    },
+                    {
+                        "name":"publisher.http_retries",
+                        "value":3,
+                        "description":"number of times snmptrapd will re-attempt MR publish before moving on",
+                    },
+                    {
+                        "name": "publisher.http_milliseconds_between_retries",
+                        "value":750,
+                        "description":"milliseconds snmptrapd will wait between MR publish re-attempts",
+                    },
+                    {
+                        "name": "publisher.http_primary_publisher",
+                        "value":"true",
+                        "description": "future use (resiliency)"
+                    },
+                    {
+                        "name": "publisher.http_peer_publisher",
+                        "value":"unavailable",
+                        "description": "future use (resiliency)"
+                    },
+                    {
+                        "name": "publisher.max_traps_between_publishes",
+                        "value":10,
+                        "description": "max number of traps snmptrapd will queue before publishing"
+                    },
+                    {
+                        "name": "publisher.max_milliseconds_between_publishes",
+                        "value":1000,
+                        "description": "max number of milliseconds snmptrapd will accumulate traps before publishing"
+                    },
+                    {
+                        "name": "files.runtime_base_dir",
+                        "value":"/opt/app/snmptrap",
+                        "description": "base dir of snmptrapd install"
+                    },
+                    {
+                        "name": "files.log_dir",
+                        "value":"logs",
+                        "description": "location from runtime_base_dir for logs"
+                    },
+                    {
+                        "name": "files.data_dir",
+                        "value":"data",
+                        "description": "location from runtime_base_dir for data"
+                    },
+                    {
+                        "name": "files.pid_dir",
+                        "value":"tmp",
+                        "description": "location from runtime_base_dir for pid_dir"
+                    },
+                    {
+                        "name": "files.arriving_traps_log",
+                        "value":"snmptrapd_arriving_traps.log",
+                        "description": "log of all arriving traps (published or not)"
+                    },
+                    {
+                        "name": "files.snmptrapd_diag",
+                        "value":"snmptrapd_prog_diag.log",
+                        "description": "future use"
+                    },
+                    {
+                        "name": "files.traps_stats_log",
+                        "value":"snmptrapd_stats.csv",
+                        "description": "future use"
+                    },
+                    {
+                        "name": "files.perm_status_file",
+                        "value":"snmptrapd_status.log",
+                        "description": "future use"
+                    },
+                    {
+                        "name": "files.eelf_base_dir",
+                        "value":"/opt/app/snmptrap/logs",
+                        "description": "directory that all EELF format logs will be written to"
+                    },
+                    {
+                        "name": "files.eelf_error",
+                        "value":"error.log",
+                        "description": "EELF error log"
+                    },
+                    {
+                        "name": "files.eelf_debug",
+                        "value":"debug.log",
+                        "description": "EELF debug log"
+                    },
+                    {
+                        "name": "files.eelf_audit",
+                        "value":"audit.log",
+                        "description": "EELF audit log"
+                    },
+                    {
+                        "name": "files.eelf_metrics",
+                        "value":"metrics.log",
+                        "description": "EELF metrics log"
+                    },
+                    {
+                        "name": "files.roll_frequency",
+                        "value":"hour",
+                        "description": "how often snmptrapd will roll logs to <logfilename>.timestamp, and start a new one"
+                    },
+                    {
+                        "name": "files.minimum_severity_to_log",
+                        "value":2,
+                        "description": "minimium severity to log in above EELF log files: SEV_DETAILED=1, SEV_INFO=2, SEV_WARN=3, SEV_CRIT=4, SEV_FATAL=5"
+
+                    },
+                    {
+                        "name": "trap_def.1.trap_oid",
+                        "value":".1.3.6.1.4.1.74.2.46.12.1.1",
+                        "description": "oid 1 of interest"
+                    },
+                    {
+                        "name": "trap_def.1.trap_category",
+                        "value":"DCAE-SNMP-TRAPS",
+                        "description": "topic to publish oid 1 to (future use)"
+                    },
+                    {
+                        "name": "trap_def.2.trap_oid",
+                        "value":"*",
+                        "description": "oid 1 of interest"
+                    },
+                    {
+                        "name": "trap_def.2.trap_category",
+                        "value":"DCAE-SNMP-TRAPS",
+                        "description": "topic to publish oid 1 to (future use)"
+                    },
+                    {
+                        "name": "stormwatch.1.stormwatch_oid",
+                        "value":".1.3.6.1.4.1.74.2.46.12.1.1",
+                        "description": "stormWatch candidate oid 1"
+                    },
+                    {
+                        "name": "stormwatch.1.low_water_rearm_per_minute",
+                        "value":"5",
+                        "description": "stormWatch candidate oid 1 low-water rearm value (future use)"
+                    },
+                    {
+                        "name": "stormwatch.1.high_water_arm_per_minute",
+                        "value":"100",
+                        "description": "stormWatch candidate oid 1 high-water storm activation value (future use)"
+                    },
+                    {
+                        "name": "stormwatch.2.stormwatch_oid",
+                        "value":".1.3.6.1.4.1.74.2.46.12.1.2",
+                        "description": "stormWatch candidate oid 2"
+                    },
+                    {
+                        "name": "stormwatch.2.low_water_rearm_per_minute",
+                        "value":"2",
+                        "description": "stormWatch candidate oid 2 low-water rearm value (future use)"
+                    },
+                    {
+                        "name": "stormwatch.2.high_water_arm_per_minute",
+                        "value":"200",
+                        "description": "stormWatch candidate oid 2 high-water storm activation value (future use)"
+                    },
+                    {
+                        "name": "stormwatch.3.stormwatch_oid",
+                        "value":".1.3.6.1.4.1.74.2.46.12.1.2",
+                        "description": "stormWatch candidate oid 3"
+                    },
+                    {
+                        "name": "stormwatch.3.low_water_rearm_per_minute",
+                        "value":"2",
+                        "description": "stormWatch candidate oid 3 low-water rearm value (future use)"
+                    },
+                    {
+                        "name": "stormwatch.3.high_water_arm_per_minute",
+                        "value":"200",
+                        "description": "stormWatch candidate oid 3 high-water storm activation value (future use)"
+                    }
+        ],
+        "auxilary": {
+        },
+        "artifacts": [
+                {
+                        "type": "docker image",
+                        "uri": "dockercentral.it.att.com:5100/com.att.dcae.controller/dcae-controller-snmptrap-collector:18.x.x"
+                }
+        ]
+}
diff --git a/src/dcae_snmptrapd.py b/src/dcae_snmptrapd.py
deleted file mode 100644
index 77043d8..0000000
--- a/src/dcae_snmptrapd.py
+++ /dev/null
@@ -1,832 +0,0 @@
-# ============LICENSE_START=======================================================)
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2017-2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-dcae_snmptrapd is responsible for SNMP trap receipt and publishing activities.
-It's behavior is controlled by several configs, the primary being:
-
-    ../etc/trapd.yaml
-
-As traps arrive they are decomposed and transformed into a JSON message which
-is published to a dmaap instance that has been defined by controller.
-
-:Parameters:
-    usage:  dcae_snmptrapd.py -c <yaml_conf_file_name> [-v]
-:Keywords:
-    onap dcae snmp trap publish dmaap
-"""
-
-__docformat__ = 'restructuredtext'
-
-# basics
-import argparse
-import array
-import asyncio
-from collections import Counter
-import datetime
-import errno
-import json
-import logging
-import logging.handlers
-import os
-import pprint
-import requests
-import re
-import sys
-import signal
-import string
-import socket
-import time
-import traceback
-import unicodedata
-import uuid as uuid_mod
-import yaml
-
-# pysnmp
-from pysnmp.entity import engine, config
-from pysnmp.carrier.asyncio.dgram import udp, udp6
-# from pysnmp.carrier.asyncore.dgram import udp
-from pysnmp.entity.rfc3413 import ntfrcv
-from pysnmp.proto.api import v2c
-
-# gen2 controller
-from onap_dcae_cbs_docker_client.client import get_config
-
-# dcae_snmptrap
-from trapd_runtime_pid import save_pid, rm_pid
-from trapd_yaml_config import read_yaml_config
-from trapd_trap_config import read_trap_config
-from trapd_dmaap_config import read_dmaap_config
-from trapd_exit import cleanup_and_exit
-from trapd_http_session import init_session_obj
-from trapd_perm_status import log_to_perm_status
-
-prog_name = os.path.basename(__file__)
-
-traps_in_second = 0
-last_epoch_second = 0
-
-# <dmaap.conf>
-dmaap_url = ""
-dmaap_user_name = ""
-dmaap_p_var = ""
-dmaap_stream_id = ""
-dmaap_host = ""
-# </dmaap.conf>
-
-# Requests session object
-dmaap_requests_session = None
-http_headers = {"Content-type": "application/json"}
-
-# <DNS cache>
-#
-#     dns_cache_ip_to_name
-#        key [ip address] -> fqdn
-#     dns_cache_ip_expires
-#        key [ip address] -> epoch time this entry expires and must be reloaded
-dns_cache_ip_to_name = {}
-dns_cache_ip_expires = {}
-# </DNS cache>
-
-# <trap config>
-num_trap_conf_entries = 0
-trap_conf_dict = {}
-# </trap config>
-
-pid_file_name = ""
-
-# logging
-dcae_logger = logging.getLogger('dcae_logger')
-handler = ""
-dcae_logger_max_bytes = 60000000
-dcae_logger_num_archives = 10
-
-undefined = "undefined"
-rc = 0
-usage_requested = False
-
-json_fd = None
-last_hour = -1
-
-verbose = False
-
-trap_dict = {}
-
-# # # # # # # # # # #
-# fx: usage_err
-# # # # # # # # # # #
-def usage_err():
-    """
-    Notify of incorrect (argument) usage
-    :Parameters:
-       none
-    :Exceptions:
-       none
-    :Keywords:
-       usage args
-    """
-
-    print('Correct usage:')
-    print('  %s -c <yaml_conf_file_name> [-v]' % prog_name)
-    cleanup_and_exit(1, "undefined")
-
-
-# # # # # # # # # # # # # # # # # # #
-# fx: setup dcae_logger custom logger
-# # # # # # # # # # ## # # # # # # #
-def setup_dcae_logger(_yc_trapd_diag):
-    """
-    Setup custom logger for dcae_snmptrapd that incorporates
-    a rotating file handler with 10 backups of diagnostic messages
-    :Parameters:
-       _yc_trapd_diag - the full path output filename
-    :Exceptions:
-       none
-    :Keywords:
-       logging rotation
-    """
-
-    global dcae_logger
-    global handler
-
-    date_fmt = '%m/%d/%Y %H:%M:%S'
-
-    _yc_trapd_diag_bak = "%s.bak" % (_yc_trapd_diag)
-    if os.path.isfile(_yc_trapd_diag):
-        os.rename(_yc_trapd_diag, _yc_trapd_diag_bak)
-
-    # handler = logging.handlers.RotatingFileHandler(yc_trapd_diag, maxBytes=60000000, backupCount=10)
-    handler = logging.handlers.RotatingFileHandler(_yc_trapd_diag,
-                                                   maxBytes=dcae_logger_max_bytes,
-                                                   backupCount=dcae_logger_num_archives)
-
-    # set logLevel - valid values NOTSET, DEBUG, INFO, WARNING, ERROR, CRITICAL
-    handler.setLevel(logging.DEBUG)
-    dcae_logger.setLevel(logging.DEBUG)
-
-    log_fmt = '%(levelname)s|%(asctime)s|%(name)s|%(process)d|%(funcName)s|'\
-              '%(message)s'
-    formatter = logging.Formatter(log_fmt)
-    handler.setFormatter(formatter)
-    dcae_logger.addHandler(handler)
-
-    if os.path.isfile(_yc_trapd_diag):
-        os.chmod(_yc_trapd_diag, 0o640)
-
-    if os.path.isfile(_yc_trapd_diag_bak):
-        os.chmod(_yc_trapd_diag_bak, 0o640)
-
-
-# # # # # # # # # # # # # # # # # # #
-# fx: load_all_configs
-# FIXME:  currently on hold for load and signal handling convergence
-# # # # # # # # # # ## # # # # # # #
-def load_all_configs(_signum, _frame):
-    """
-    Calls individual functions to read various config files required.  This
-    function is called directly (e.g. at startup) and is also registered
-    with signal handling (e.g. kill -sigusr1 <pid>)
-
-    :Parameters:
-      signum and frame (only present when called via signal to running process)
-    :Exceptions:
-      none
-    :Keywords:
-      config files
-    :Variables:
-      yaml_conf_file
-      dmaap_requests_session
-    """
-
-    if int(_signum) != 0:
-        dcae_logger.info("%s Received signal %s at frame %s; re-reading config file"
-                         % (prog_name, _signum, _frame))
-    else:
-        dcae_logger("Reading config files")
-
-    # FIXME: should be re-reading all configs here
-
-    # Initialize dmaap requests session object. Close existing session
-    # if applicable.
-    if dmaap_requests_session != None:
-        dmaap_requests_session.close()
-    dmaap_requests_session = init_session_obj(dcae_logger)
-
-    return _yaml_config_values
-
-
-# # # # # # # # # # # # #
-# fx: rename_json_log
-# # # # # # # # # # # # #
-def rename_json_log(_outputFname):
-    """
-    Renames JSON output file to include ISO-formatted date suffix
-    :Parameters:
-      signum and frame (only present when called via signal to running process)
-    :Exceptions:
-      none
-    :Keywords:
-      json log
-    :Variables:
-      json log filename
-    """
-
-    global json_fd
-
-    # check if outputfile exists; if it does, move to timestamped version
-    _outputFnameBak = "%s.%s" % (_outputFname,
-                                 datetime.datetime.fromtimestamp(time.time()).
-                                 fromtimestamp(time.time()).
-                                 strftime('%Y-%m-%dT%H:%M:%S'))
-
-    # close existing file
-    close_json_log()
-
-    if os.path.isfile(_outputFname):
-        dcae_logger.debug('Renaming %s to %s' %
-                          (_outputFname, _outputFnameBak))
-        os.rename(_outputFname, _outputFnameBak)
-    else:
-        dcae_logger.error("Unable to move %s to %s - source file does not exist" %
-                          (_outputFname, _outputFnameBak))
-    # open new (empty) log file
-    try:
-        json_fd = open_json_log()
-    except:
-        dcae_logger.exception(
-            "Error opening new json log file %s - exiting  " % _outputFname)
-        sys.exit(1)
-
-# # # # # # # # # # # # #
-# fx: open_json_log
-# # # # # # # # # # # # #
-def open_json_log():
-
-    try:
-        # open append mode just in case so nothing is lost, but should be
-        # non-existent file
-        _json_fd = open(json_log_filename, 'a')
-        dcae_logger.exception("Opened %s append mode: " % json_log_filename)
-        return _json_fd
-    except:
-        dcae_logger.exception(
-            "Error opening %s append mode: " % json_log_filename)
-        sys.exit(1)
-
-
-# # # # # # # # # # # # #
-# fx: close_json_log
-# # # # # # # # # # # # #
-def close_json_log():
-
-    global json_fd
-
-    try:
-        json_fd.close()
-    except:
-        dcae_logger.error("ERROR closing json audit file %s - results "
-                          "indeterminate" % (json_log_filename))
-
-# # # # # # # # # # # # #
-# fx: log_published_messages
-# # # # # # # # # # # # #
-
-
-def log_published_messages(loc_post_data_enclosed):
-
-    # FIXME: should keep data dictionary of Fd's open, and reference those vs.
-    #        repeatedly opening append-mode
-    # open json audit log file
-
-    global json_fd, last_hour
-
-    # close output file, backup current and move new one into place on day change
-    dcae_logger.info('%.4f adding %s to json log' %
-                     (time.time(), trap_dict["uuid"]))
-    curr_hour = datetime.datetime.now().hour
-    if curr_hour < last_hour:
-        rename_json_log(json_log_filename)
-        json_fd = open_json_log(json_log_filename)
-
-    try:
-        m = loc_post_data_enclosed + '\n'
-        json_fd.write('%s' % str(m))
-    except Exception as e:
-        dcae_logger.error("ERROR writing json audit file %s - message NOT LOGGED: %s"
-                          % (json_log_filename, str(e)))
-
-    last_hour = curr_hour
-    dcae_logger.info('%.4f logged %s' % (time.time(), trap_dict["uuid"]))
-
-# # # # # # # # # # # # #
-# fx: post_dmaap
-# # # # # # # # # # # # #
-
-
-def post_dmaap(dmaap_url, dmaap_user_name, dmaap_p_var, dmaap_stream_id, dmaap_host, uuid, traps_json_string):
-    """
-    Publish trap daata in json format to dmaap
-    :Parameters:
-      dmaap_url
-         base url for http post
-      dmaap_user_name
-         username for http post
-      dmaap_p_var
-         access credential for http post
-      dmaap_stream_id
-         appended to dmaap_url, equiv to "topic"
-      dmaap_host
-         target dmaap server to submit http post
-      uuid
-         unique ID associated with this trap
-      traps_json_string
-         json format string to include in http post
-    :Exceptions:
-      none
-    :Keywords:
-      http post dmaap json message
-    :Variables:
-    """
-
-    global http_resp, dmaap_requests_session, last_pub_time
-
-    if dmaap_requests_session == None:
-        dmaap_requests_session = init_session_obj(dcae_logger)
-
-    post_data_enclosed = '[' + traps_json_string + ']'
-
-    k = 0
-    dmaap_pub_success = False
-
-    if verbose:
-        print('%.4f starting publish of %s' % (time.time(), trap_dict["uuid"]))
-    dcae_logger.info('%.4f starting publish of %s' %
-                     (time.time(), trap_dict["uuid"]))
-    while not dmaap_pub_success and k < yaml_config_values.yc_http_retries:
-        try:
-            dcae_logger.debug("Attempt %d to %s dmaap_url: "
-                              "%s dmaap_user_name: %s post_data: %s"
-                              % (k, dmaap_host,
-                                 dmaap_url,
-                                 dmaap_user_name,
-                                 post_data_enclosed))
-
-            # below disable_warnings required until python updated:
-            #       https://github.com/shazow/urllib3/issues/497
-            # requests.packages.urllib3.disable_warnings()
-            http_resp = dmaap_requests_session.post(dmaap_url, post_data_enclosed,
-                                                    auth=(dmaap_user_name,
-                                                          dmaap_p_var),
-                                                    headers=http_headers,
-                                                    timeout=yaml_config_values.yc_http_timeout)
-            dcae_logger.debug("Response from %s on stream %s: %s dmaap_requests_session: %s"
-                              % (dmaap_host, dmaap_stream_id, http_resp.status_code, dmaap_requests_session))
-            if verbose:
-                print('%.4f published %s successfully' %
-                      (time.time(), trap_dict["uuid"]))
-            dcae_logger.info('%.4f published %s successfully' %
-                             (time.time(), trap_dict["uuid"]))
-            if http_resp.status_code == requests.codes.ok:
-                dcae_logger.debug("Response from %s: %s dmaap_request_sesson: %s" % (
-                    dmaap_url, http_resp.status_code, dmaap_requests_session))
-                log_published_messages(post_data_enclosed)
-                last_pub_time = time.time()
-                dmaap_pub_success = True
-                break
-            else:
-                dcae_logger.debug("Response (non-200) detail from %s on stream "
-                                  "%s: %s" % (dmaap_host, dmaap_stream_id, http_resp.text))
-
-        except OSError as e:
-            dcae_logger.debug("Exception while posting message to host: %s, stream: %s, dmaap_requests_session: %s, exception: %s %s"
-                              % (dmaap_host, dmaap_stream_id, dmaap_requests_session, e.errno, e.strerror))
-        except requests.exceptions.RequestException as e:
-            dcae_logger.error("Exception while posting to %s topic %s: -->%s<--"
-                              % (dmaap_host, dmaap_stream_id, e))
-
-        k += 1
-
-        if k < yaml_config_values.yc_http_retries:
-            dcae_logger.error("sleeping %s and retrying" %
-                              yaml_config_values.yc_http_secs_between_retries)
-            time.sleep(yaml_config_values.yc_http_secs_between_retries)
-        else:
-            dcae_logger.error("exhausted all attempts - giving up")
-            break
-
-    if verbose:
-        print('%.4f exiting post_dmaap for %s' %
-              (time.time(), trap_dict["uuid"]))
-    dcae_logger.info('%.4f exiting post_dmaap for %s' %
-                     (time.time(), trap_dict["uuid"]))
-    if not dmaap_pub_success:
-        # uuid = uuid_mod.uuid1()
-        perm_msg = "CRITICAL: publish failure to DMAAP server: "\
-                   "%s, stream: %s trap: %s" % (
-                       dmaap_host, dmaap_stream_id, uuid)
-        dcae_logger.error(perm_msg)
-        dcae_logger.error("SEND-TO-PERM-STATUS: %s" % perm_msg)
-        log_to_perm_status(
-            yaml_config_values.yc_perm_status_file, perm_msg, dcae_logger)
-        dcae_logger.info("%.4f %s" % (time.time(), perm_msg))
-        if verbose:
-            print("%.4f %s" % (time.time(), perm_msg))
-
-
-# # # # # # # # # # # # # # # # # # #
-# fx: request_observer for community string rewrite
-# # # # # # # # # # # # # # # # # # #
-def comm_string_rewrite_observer(snmpEngine, execpoint, variables, cbCtx):
-
-    # match ALL community strings
-    if re.match('.*', str(variables['communityName'])):
-        dcae_logger.debug('Rewriting communityName \'%s\' from %s into \'public\'' % (variables['communityName'], ':'.join([str(x) for x in
-                                                                                                                            variables['transportInformation'][1]])))
-        variables['communityName'] = variables['communityName'].clone('public')
-
-# # # # # # # # # # # # # # # # # # #
-# fx: snmp_engine_observer_cb
-#     callback for when trap is received
-# # # # # # # # # # # # # # # # # # #
-
-
-def snmp_engine_observer_cb(snmp_engine, execpoint, variables, cbCtx):
-    """
-    Decompose trap attributes and load in dictionary which is later used to
-    create json string for publishing to dmaap.
-    :Parameters:
-      snmp_engine
-         snmp engine created to listen for arriving traps
-      execpoint
-         point in code that snmp_engine_observer_cb was invoked
-      variables
-         trap attributes
-      cbCtx
-         callback context
-    :Exceptions:
-      none
-    :Keywords:
-      UEB non-AAF legacy http post
-    :Variables:
-    """
-
-    global trap_dict, last_epoch_second, traps_in_epoch
-
-    # init dictionary on new trap
-    trap_dict = {}
-
-    # assign uuid to trap
-    trap_dict["uuid"] = str(uuid_mod.uuid1())
-
-    if verbose:
-        print('%.4f snmp trap arrived from %s, assigned uuid: %s' %
-              (time.time(), variables['transportAddress'][0], trap_dict["uuid"]))
-    dcae_logger.info('%.4f snmp trap arrived from %s, assigned uuid: %s' % (
-        time.time(), variables['transportAddress'][0], trap_dict["uuid"]))
-
-    # if re.match('.*', str(variables['communityName'])):
-    #     print('Rewriting communityName \'%s\' from %s into \'public\'' % (variables['communityName'], ':'.join([str(x) for x in variables['transportInformation'][1]])))
-    #    variables['communityName'] = variables['communityName'].clone('public')
-
-    # ip and hostname
-    ip_addr_str = str(variables['transportAddress'][0])
-    trap_dict["agent address"] = ip_addr_str
-    try:
-        if int(dns_cache_ip_expires[ip_addr_str] < int(time.time())):
-            dcae_logger.debug('dns cache expired for %s' % ip_addr_str)
-            raise Exception('cache expired for %s at %d - updating value' %
-                            (ip_addr_str, (dns_cache_ip_expires[ip_addr_str])))
-        else:
-            trap_dict["agent name"] = dns_cache_ip_to_name[ip_addr_str]
-    except:
-        if verbose:
-            print('%.4f dns cache expired for %s' % (time.time(), ip_addr_str))
-        dcae_logger.debug(
-            'dns cache expired or missing for %s - reloading' % ip_addr_str)
-        host_addr_info = socket.gethostbyaddr(ip_addr_str)
-        agent_fqdn = str(host_addr_info[0])
-        trap_dict["agent name"] = agent_fqdn
-
-        dns_cache_ip_to_name[ip_addr_str] = agent_fqdn
-        dns_cache_ip_expires[ip_addr_str] = (
-            time.time() + yaml_config_values.yc_dns_cache_ttl_seconds)
-        dcae_logger.debug('cache for %s (%s) updated - set to expire at %d' %
-                          (agent_fqdn, ip_addr_str, dns_cache_ip_expires[ip_addr_str]))
-
-        dns_cache_ip_to_name[str(trap_dict["agent address"])]
-
-    trap_dict["cambria.partition"] = str(trap_dict["agent name"])
-    trap_dict["community"] = ""    # do not include cleartext community in pub
-    # do not include cleartext community in pub
-    trap_dict["community len"] = 0
-
-    # FIXME.CHECK_WITH_DOWNSTREAM_CONSUMERS: get rid of round for millisecond val
-    # epoch_second = int(round(time.time()))
-    epoch_msecond = time.time()
-    epoch_second = int(round(epoch_msecond))
-    if epoch_second == last_epoch_second:
-        traps_in_epoch += 1
-    else:
-        traps_in_epoch = 0
-    last_epoch_second = epoch_second
-    traps_in_epoch_04d = format(traps_in_epoch, '04d')
-    trap_dict['epoch_serno'] = int(
-        (str(epoch_second) + str(traps_in_epoch_04d)))
-
-    snmp_version = variables['securityModel']
-    if snmp_version == 1:
-        trap_dict["protocol version"] = "v1"
-    else:
-        if snmp_version == 2:
-            trap_dict["protocol version"] = "v2c"
-        else:
-            if snmp_version == 3:
-                trap_dict["protocol version"] = "v3"
-            else:
-                trap_dict["protocol version"] = "unknown"
-
-    if snmp_version == 3:
-        trap_dict["protocol version"] = "v3"
-        trap_dict["security level"] = str(variables['securityLevel'])
-        trap_dict["context name"] = str(variables['contextName'].prettyPrint())
-        trap_dict["security name"] = str(variables['securityName'])
-        trap_dict["security engine"] = str(
-            variables['contextEngineId'].prettyPrint())
-    trap_dict['time received'] = epoch_msecond
-    # get this from dmaap_url when ready
-    trap_dict['trap category'] = "DCAE-COLLECTOR-UCSNMP"
-
-
-# Callback function for receiving notifications
-# noinspection PyUnusedLocal,PyUnusedLocal,PyUnusedLocal
-def notif_receiver_cb(snmp_engine, stateReference, contextEngineId, contextName,
-                      varBinds, cbCtx):
-    """
-    Callback executed when trap arrives
-    :Parameters:
-      snmp_engine
-        snmp engine created to listen for arriving traps
-      stateReference
-      contextEngineId
-      contextName
-      varBinds
-        trap varbinds
-      cbCtx
-        callback context
-    :Exceptions:
-      none
-    :Keywords:
-      callback trap arrival
-    :Variables:
-    """
-
-    global trap_dict
-
-    if verbose:
-        print('%.4f processing varbinds for %s' %
-              (time.time(), trap_dict["uuid"]))
-    dcae_logger.info('%.4f processing varbinds for %s' %
-                     (time.time(), trap_dict["uuid"]))
-
-    # FIXME:  add conversion from v1 to v2 prior to below? or special handling for v1?
-
-    # FIXME update reset location when batching publishes
-    vb_dict = {}
-
-    vb_idx = 0
-    k1 = ""
-    k2 = ""
-
-    # FIXME: Note that the vb type is present, just need to extract it efficiently somehow
-    # print('\nvarBinds ==> %s' % (varBinds))
-    #
-    # varBinds ==> [(ObjectName('1.3.6.1.2.1.1.3.0'), TimeTicks(1243175676)),
-    #               (ObjectName('1.3.6.1.6.3.1.1.4.1.0'), ObjectIdentifier('1.3.6.1.4.1.74.2.46.12.1.1')),
-    #               (ObjectName('1.3.6.1.4.1.74.2.46.12.1.1.1'), OctetString(b'ucsnmp heartbeat - ignore')),
-    #               (ObjectName('1.3.6.1.4.1.74.2.46.12.1.1.2'), OctetString(b'Fri Aug 11 17:46:01 EDT 2017'))]
-    #
-    # This does NOT work:
-    # for name, typ, val in varBinds:
-    #     print('name = %s' % (name))
-    #     print('typ = %s' % (typ))
-    #     print('val = %s\n' % (val))
-
-    vb_all_string = ""
-    for name, val in varBinds:
-        vb_dict = {}
-        if vb_idx == 0:
-            vb_sys_uptime_oid = name
-            vb_sys_uptime = val
-            trap_dict["sysUptime"] = str(val)
-            # print('vb_sys_uptime = %s' % (vb_sys_uptime))
-        else:
-            if vb_idx == 1:
-                trap_dict["notify OID"] = str(val)
-                trap_dict["notify OID len"] = (
-                    trap_dict["notify OID"].count('.') + 1)
-                # print('vb_notify_oid = %s' % (vb_notify_oid))
-            # else:
-                # vb_idx_02d = format((vb_idx - 2), '02d')
-        vb_idx_02d = format((vb_idx), '02d')
-
-        k1 = "varbind_oid_" + str(vb_idx_02d)
-        k2 = "varbind_value_" + str(vb_idx_02d)
-        # vb_dict[k1] = name.prettyPrint()
-        # vb_dict[k2] = val.prettyPrint()
-        vb_dict["varbind_type"] = "tbd"
-        vb_dict["varbind_oid"] = name.prettyPrint()
-        vb_dict["varbind_value"] = val.prettyPrint()
-        vb_json = json.dumps(vb_dict)
-        vb_all_string += vb_json
-
-        vb_idx += 1
-
-    trap_dict["num varbinds"] = vb_idx
-
-    # add varbind dict to trap dict
-    # trap_dict["varbinds"] = vb_dict
-    trap_dict["varbinds"] = vb_all_string
-
-    dcae_logger.debug("vb_dict json-ized: %s" % (json.dumps(vb_dict)))
-    trap_json_msg = json.dumps(trap_dict)
-
-    # publish to dmaap after last varbind is processed
-    post_dmaap(dmaap_config_values.dmaap_url, dmaap_config_values.dmaap_user_name, dmaap_config_values.dmaap_p_var,
-               dmaap_config_values.dmaap_stream_id, dmaap_config_values.dmaap_host, trap_dict["uuid"], trap_json_msg)
-
-
-# # # # # # # # # # # # #
-# Main  MAIN  Main  MAIN
-# # # # # # # # # # # # #
-# parse command line args
-parser = argparse.ArgumentParser(description='Post SNMP traps '
-                                             'to DCAE DMaap MR')
-parser.add_argument('-c', action="store", dest="yaml_conf_file", type=str,
-                    help="yaml config file name")
-parser.add_argument('-v', action="store_true", dest="verbose",
-                    help="verbose logging")
-parser.add_argument('-?', action="store_true", dest="usage_requested",
-                    help="show command line use")
-
-# set vars from args
-parser.set_defaults(yaml_conf_file="")
-
-# parse args
-args = parser.parse_args()
-
-# set vars from args
-yaml_conf_file = args.yaml_conf_file
-verbose = args.verbose
-usage_requested = args.usage_requested
-
-# if usage, just display and exit
-if usage_requested:
-    usage_err()
-
-# Get non-ENV settings from config file; spoof 2 params
-# so same fx can be used for signal handling
-if yaml_conf_file == "":
-    usage_err()
-
-# always get yaml config values
-yaml_config_values = read_yaml_config(yaml_conf_file)
-
-# setup custom logger
-setup_dcae_logger(yaml_config_values.yc_trapd_diag)
-
-# bump up logging level if overridden at command line
-if verbose:
-    dcae_logger.setLevel(logging.DEBUG)
-    handler.setLevel(logging.DEBUG)
-    dcae_logger.debug("log level increased to DEBUG")
-
-dcae_logger.info("log will include info level messages")
-dcae_logger.error("log will include error level messages")
-dcae_logger.debug("log will include debug level messages")
-dcae_logger.info("Runtime PID file: %s" % pid_file_name)
-
-# setup signal handling for config file reload
-# FIXME: need to have signal handler return all tuples for configs
-# signal.signal(signal.SIGUSR1, load_all_configs)
-
-# save current PID for future/external reference
-pid_file_name = '%s/%s.pid' % (yaml_config_values.yc_pid_dir, prog_name)
-rc = save_pid(pid_file_name)
-
-# always get trap configs
-trap_config_values = read_trap_config(
-    yaml_config_values.yc_trap_conf, dcae_logger)
-
-# Set initial startup hour for rolling logfile
-last_hour = datetime.datetime.now().hour
-
-#make sure my env is set properly
-try:
-   c = get_config()
-   if c == {}:
-       msg = "Unable to fetch configuration or it is erroneously empty - fatal ONAP controller error, trying OpenDCAE config"
-       dcae_logger.error(msg)
-       print('%s' % msg)
-
-#if new controller not present, try dmaap.conf
-except:
-    msg = "ONAP controller not present, attempting OpenDCAE dmaap.conf config"
-    dcae_logger.error(msg)
-    dmaap_config_values = read_dmaap_config(
-        yaml_config_values.yc_dmaap_conf, dcae_logger)
-
-    # get the topic from the url
-    dmaap_topic = dmaap_config_values.dmaap_url.split('.')[-1]
-    dcae_logger.info("Topic: %s" % dmaap_topic)
-    json_log_filename = yaml_config_values.yc_published_traps_dir + '/' + 'DMAAP' + '_' \
-        + dmaap_topic + '.json'
-    json_fd = open_json_log()
-    msg = "Using OpenDCAE dmaap.conf config"
-
-# Get the event loop for this thread
-loop = asyncio.get_event_loop()
-
-# Create SNMP engine with autogenernated engineID pre-bound
-# to socket transport dispatcher
-snmp_engine = engine.SnmpEngine()
-
-# # # # # # # # # # # #
-# Transport setup
-# # # # # # # # # # # #
-
-# UDP over IPv4
-# FIXME:  add check for presense of ipv4_interface prior to attempting add OR just put entire thing in try/except clause
-config.addTransport(
-    snmp_engine,
-    udp.domainName + (1,),
-    udp.UdpTransport().openServerMode(
-        (yaml_config_values.yc_ipv4_interface, yaml_config_values.yc_ipv4_port))
-)
-
-# UDP over IPv6
-# FIXME:  add check for presense of ipv6_interface prior to attempting add OR just put entire thing in try/except clause
-config.addTransport(
-    snmp_engine,
-    udp6.domainName,
-    udp6.Udp6Transport().openServerMode(
-        (yaml_config_values.yc_ipv6_interface, yaml_config_values.yc_ipv6_port))
-)
-
-# UDP over IPv4, second listening interface/port
-# config.addTransport(
-#     snmp_engine,
-#     udp.domainName + (2,),
-#     udp.UdpTransport().openServerMode(('127.0.0.1', 2162))
-# )
-
-# # # # # # # # # # # #
-# SNMPv1/2c setup
-# # # # # # # # # # # #
-
-# SecurityName <-> CommunityName mapping
-#     to restrict trap reception to only those with specific community
-#     strings
-config.addV1System(snmp_engine, 'my-area', 'public')
-
-# register comm_string_rewrite_observer for message arrival
-snmp_engine.observer.registerObserver(
-    comm_string_rewrite_observer,
-    'rfc2576.processIncomingMsg:writable'
-)
-
-# register snmp_engine_observer_cb for message arrival
-snmp_engine.observer.registerObserver(
-    snmp_engine_observer_cb,
-    'rfc3412.receiveMessage:request',
-    'rfc3412.returnResponsePdu',
-)
-
-# Register SNMP Application at the SNMP engine
-ntfrcv.NotificationReceiver(snmp_engine, notif_receiver_cb)
-
-snmp_engine.transportDispatcher.jobStarted(1)  # loop forever
-
-# Run I/O dispatcher which would receive queries and send confirmations
-try:
-    snmp_engine.transportDispatcher.runDispatcher()
-except:
-    snmp_engine.observer.unregisterObserver()
-    snmp_engine.transportDispatcher.closeDispatcher()
-    cleanup_and_exit(1, pid_file_name)
diff --git a/src/mod/trapd_dcae_logger.py b/src/mod/trapd_dcae_logger.py
deleted file mode 100644
index c47d8cf..0000000
--- a/src/mod/trapd_dcae_logger.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-NOTE:  This is a placeholder for now - logger has not been externalized
-from the main source.
-
-Setup custom logger for dcae_snmptrapd that incorporates
-a rotating file _handler with 10 backups of diagnostic messages
-:Parameters:
-
-:Exceptions:
-
-:Keywords:
-
-"""
-
-__docformat__ = 'restructuredtext'
-
-import logging
-
-
-# # # # # # # # # # # # # # # # # # #
-# fx: setup _dcae_logger custom logger
-# # # # # # # # # # ## # # # # # # #
-def setup_dcae_logger(_yc_snmptrapd_diag, _dcae_logger_max_bytes, _dcae_logger_num_archives):
-    """
-    """
-
-    _date_fmt = '%m/%d/%Y %H:%M:%S'
-
-    _yc_snmptrapd_diag_bak = "%s.bak" % (_yc_snmptrapd_diag)
-    if os.path.isfile(_yc_snmptrapd_diag):
-        os.rename(_yc_snmptrapd_diag, _yc_snmptrapd_diag_bak)
-
-    _handler = logging._handlers.RotatingFileHandler(_yc_snmptrapd_diag,
-                                                     maxBytes=_dcae_logger_max_bytes,
-                                                     backupCount=_dcae_logger_num_archives)
-
-    # set logLevel - valid values NOTSET, DEBUG, INFO, WARNING, ERROR, CRITICAL
-    _handler.setLevel(logging.DEBUG)
-    _dcae_logger.setLevel(logging.DEBUG)
-
-    log_fmt = '%(levelname)s|%(asctime)s|%(name)s|%(process)d|%(funcName)s|'\
-              '%(message)s'
-    _formatter = logging.Formatter(log_fmt)
-    _handler.setFormatter(formatter)
-    _dcae_logger.addHandler(_handler)
-
-    if os.path.isfile(_yc_snmptrapd_diag):
-        os.chmod(_yc_snmptrapd_diag, 0o640)
-
-    if os.path.isfile(_yc_snmptrapd_diag_bak):
-        os.chmod(_yc_snmptrapd_diag_bak, 0o640)
-
-    return _dcae_logger
diff --git a/src/mod/trapd_dmaap_config.py b/src/mod/trapd_dmaap_config.py
deleted file mode 100644
index 6e83821..0000000
--- a/src/mod/trapd_dmaap_config.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-NOTE:  This module is for temporary use.  It will be removed when dcae_snmptrapd
-is migrated to the new controller infrastructure.
-
-trapd_dmaap_config is responsible for reading/parsing the previous generation
-'dmaap.conf' file, which includes stream, server and authentication details for
-publishing activities.
-"""
-
-__docformat__ = 'restructuredtext'
-
-import os
-import sys
-import string
-import time
-import traceback
-import collections
-import json
-
-from trapd_exit import cleanup_and_exit
-
-prog_name = os.path.basename(__file__)
-
-
-# # # # # # # # # # #
-# fx: read_dmaap_config
-# # # # # # # # # # #
-def read_dmaap_config(_yc_dmaap_conf, _dcae_logger):
-    # FIXME NOTE: This is for testing purposes only, and utilizes the
-    # previous generation of the controller; dispose of when ready
-    """
-    Load dmaap config /etc/dcae/dmaap.conf file (legacy controller)
-    :Parameters:
-      name of dmaap config file
-    :Exceptions:
-      file open
-        this function will throw an exception if unable to open
-        yc_dmaap_conf(fatal error)
-    :Keywords:
-      legacy controller dmaap.conf
-    :Variables:
-      yc_dmaap_conf
-        full path filename of dmaap_conf file provided by previous
-        generation controller
-    :Returns:
-      named tuple of config values
-    """
-
-    _dmaap_cfg_values_nt = collections.namedtuple('dmaap_config_values', [
-                                                  'dmaap_url', 'dmaap_user_name', 'dmaap_p_var', 'dmaap_stream_id', 'dmaap_host'])
-    if os.path.isfile(_yc_dmaap_conf):
-        _dcae_logger.debug('Reading DMaaP config file %s ' %
-                           _yc_dmaap_conf)
-    else:
-        _dcae_logger.error('DMaaP config file %s does NOT exist - exiting'
-                           % (_yc_dmaap_conf))
-        cleanup_and_exit(1, undefined)
-
-    with open(_yc_dmaap_conf) as _dmaap_config_fd:
-        _dmaapCfgData = json.load(_dmaap_config_fd)
-
-    try:
-        dmaap_url = _dmaapCfgData[0]["dmaapUrl"]
-        _dcae_logger.debug('dmaap_url: %s' % (dmaap_url))
-        dmaap_user_name = _dmaapCfgData[0]["dmaapUserName"]
-        _dcae_logger.debug('dmaap_user_name: %s' % (dmaap_user_name))
-        dmaap_p_var = _dmaapCfgData[0]["dmaapPassword"]
-        _dcae_logger.debug('dmaap_p_var: -')
-        dmaap_stream_id = _dmaapCfgData[0]["dmaapStreamId"]
-        _dcae_logger.debug('dmaap_stream_id: %s' % (dmaap_stream_id))
-    except:
-        _dcae_logger.error('DMaaP config file %s has missing data - exiting'
-                           % (_yc_dmaap_conf))
-        cleanup_and_exit(1, "undefined")
-
-    # This is for logging purposes only.
-    dmaap_host = dmaap_url.split('/')[2][:-5]
-    _dcae_logger.debug('dmaap_host: %s' % (dmaap_host))
-
-    _dmaap_config_fd.close()
-
-    _dmaap_cfg_values = _dmaap_cfg_values_nt(dmaap_url=dmaap_url, dmaap_user_name=dmaap_user_name,
-                                             dmaap_p_var=dmaap_p_var, dmaap_stream_id=dmaap_stream_id, dmaap_host=dmaap_host)
-    return _dmaap_cfg_values
diff --git a/src/mod/trapd_perm_status.py b/src/mod/trapd_perm_status.py
deleted file mode 100644
index 43ad1df..0000000
--- a/src/mod/trapd_perm_status.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-trapd_perm_status maintains a 'permanent' status file
-important messages for audit/diagnostics/etc
-"""
-
-__docformat__ = 'restructuredtext'
-
-import logging
-import os
-import string
-import time
-import traceback
-
-prog_name = os.path.basename(__file__)
-
-
-# # # # # # # # # # # # #
-# fx: log_to_perm_status
-# # # # # # # # # # # # #
-def log_to_perm_status(_loc_perm_file, _loc_perm_msg, _dcae_logger):
-    """
-    Log select errors too permanent logfile
-    access.
-    :Parameters:
-      log message, logger
-    :Exceptions:
-      file open
-        this function will catch exception of unable to
-        open the log file
-    :Keywords:
-      permstatus
-    """
-
-    perm_fmt_date = time.strftime("%a %b %d %H:%M:%S %Z %Y")
-
-    try:
-        f = open(_loc_perm_file, 'a')
-        f.write("%s %s\n" % (perm_fmt_date, _loc_perm_msg))
-        f.close()
-    except IOError:
-        _dcae_logger.exception("File I/O Exception on %s" % perm_status_fd)
diff --git a/src/mod/trapd_trap_config.py b/src/mod/trapd_trap_config.py
deleted file mode 100644
index 4b1e2e4..0000000
--- a/src/mod/trapd_trap_config.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-trapd_trap_conf reads config file of traps and stores/returns them
-in a data dictionary that is used to compare arriving SNMP OID's
-to the list contained in this file for a keep(/publish) or ignore
-decision.
-"""
-
-__docformat__ = 'restructuredtext'
-
-import os
-import sys
-import string
-import time
-import traceback
-from trapd_exit import cleanup_and_exit
-
-
-prog_name = os.path.basename(__file__)
-
-
-# # # # # # # # # # #
-# fx: read_trap_config
-# # # # # # # # # # #
-
-def read_trap_config(_yc_trap_conf, _dcae_logger):
-    """
-    Load trap config file specified in yaml conf.  This config (1) specifies
-    which traps should be published(inclusion) and which traps should be discarded
-    (not present in config) and (2) maps SNMP Notify OID to DMAAP/MR topics
-    :Parameters:
-      none
-    :Exceptions:
-      file open
-        this function will throw an exception if unable to open
-        _yc_trap_conf
-    :Keywords:
-      NotifyOID trap config topic
-    :Variables:
-    """
-
-    _trap_conf_dict = {}
-
-    if os.path.isfile(_yc_trap_conf):
-        _dcae_logger.debug('Reading trap config file %s ' % _yc_trap_conf)
-    else:
-        _dcae_logger.error('ERROR:  trap config file %s does NOT exist - exiting'
-                           % (_yc_trap_conf))
-        cleanup_and_exit(1, "undefined")
-
-    # reset dictionaries in case we've been here before
-    _num_trap_conf_entries = 0
-
-    field_separator = " "
-
-    _dcae_logger.debug('processing trap config settings from %s'
-                       % (_yc_trap_conf))
-    for line in open(_yc_trap_conf):
-        # format:
-        #
-        # oid_including_regex <topic>
-        #
-        if line[0] != '#':
-            columns = line.rstrip().split(field_separator)
-            # process trap config entries
-            if len(columns) == 2:
-                _trap_conf_oid = columns[0]
-                _trap_conf_dict[_trap_conf_oid] = columns[1]
-                _dcae_logger.debug('%d oid: %s topic: %s' %
-                                   (_num_trap_conf_entries, _trap_conf_oid, _trap_conf_dict[_trap_conf_oid]))
-                _num_trap_conf_entries += 1
-            else:
-                _dcae_logger.debug('ERROR: Invalid trap config entry - '
-                                   'skipping: %s' % (line.rstrip()))
-
-    _dcae_logger.debug('%d trap config entries found in %s' % (_num_trap_conf_entries,
-                                                               _yc_trap_conf))
-
-    return _trap_conf_dict
diff --git a/src/mod/trapd_yaml_config.py b/src/mod/trapd_yaml_config.py
deleted file mode 100644
index f0d12e3..0000000
--- a/src/mod/trapd_yaml_config.py
+++ /dev/null
@@ -1,193 +0,0 @@
-# ============LICENSE_START=======================================================
-# org.onap.dcae
-# ================================================================================
-# Copyright (c) 2018 AT&T Intellectual Property. All rights reserved.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ============LICENSE_END=========================================================
-#
-# ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-"""
-Read the SNMP trap receiver YAML config file, which contains the vast
-majority of configurable parameters for the process, including
-location of other config files, http timeouts, dns cache times,
-etc.
-"""
-
-__docformat__ = 'restructuredtext'
-
-import os
-import sys
-import string
-import time
-import traceback
-import collections
-import yaml
-from trapd_exit import cleanup_and_exit
-
-
-prog_name = os.path.basename(__file__)
-
-
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-# function: get_yaml_cfg
-# # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
-
-
-def read_yaml_config(loc_yaml_conf_file):
-    """
-    Load all sorts of goodies from yaml config file.
-    :Parameters:
-      loc_yaml_conf_file
-        filename including full path to yaml config file
-    :Exceptions:
-      file open
-        this function will throw an exception if unable to open
-        loc_yaml_conf_file (fatal error) or any of the required
-        values are not found in the loc_yaml_conf_file (fatal error)
-    :Keywords:
-      yaml config runtime protocol files dmaap
-    :Variables:
-      yc_transport
-        protocol transport for snmp traps (udp|tcp)
-      yc_ipv4_interface
-        what ipv4 interface to listen for traps on
-      yc_ipv4_port
-        what port to listen for traps on
-      yc_dns_cache_ttl_seconds
-        how many seconds an entry remains in DNS cache prior to refresh
-      yc_runtime_base_dir
-        base directory of dcae_snmptrapd application
-      yc_log_dir
-        log directory of dcae_snmptrapd application
-      yc_data_dir
-        data directory of dcae_snmptrapd application
-      yc_pid_dir
-        directory where running PID file will be written (filename <yc_pid_dir>/<prog_name>.pid)
-      yc_trapd_diag
-        program diagnostic log, auto rotated and archived via python handler
-      yc_raw_traps_log
-        file to write raw trap data to
-      yc_published_traps_dir
-        file to write json formatted trap data for successful publishes (only!)
-      yc_trap_stats_log
-        file to write trap stats (traps per second, by OID, by agent)
-      yc_perm_status_file
-        file to write trap stats (traps per second, by OID, by agent)
-      yc_dmaap_conf
-        file (full path) of yaml config entries referenced at runtime, passed as
-        runtime command argument "-c <yc_dmaap_conf>
-      yc_http_timeout
-        http timeout in seconds for dmaap publish attempt
-      yc_http_retries
-        num of http retries to attempt in response to failed post
-      yc_primary_publisher
-        boolean defining whether local instance is primary (future use)
-      yc_peer_publisher
-        identity of peer publisher in case this one fails (future use)
-      yc_max_traps_between_publish
-        if batching publishes, max number of traps to queue before http post
-      yc_max_milliseconds_between_publish
-        if batching publishes, max number of milliseconds between http post
-        Note:  using the batch feature creates an opportunity for trap loss if
-        traps stop arriving and the process exits (traps in queue will remain
-        there until another trap arrives and kicks of the evaluation of max_traps
-        or max_milliseconds above).
-    """
-
-    # named tuple for values in yaml config file
-    _yaml_config_values_nt = collections.namedtuple('yaml_config_values', ['yc_transport', 'yc_ipv4_port', 'yc_ipv4_interface', 'yc_ipv6_port', 'yc_ipv6_interface', 'yc_dns_cache_ttl_seconds', 'yc_runtime_base_dir', 'yc_log_dir', 'yc_data_dir', 'yc_pid_dir', 'yc_trap_conf', 'yc_trapd_diag',
-                                                                           'yc_raw_traps_log', 'yc_published_traps_dir', 'yc_trap_stats_log', 'yc_perm_status_file', 'yc_dmaap_conf', 'yc_http_timeout', 'yc_http_retries', 'yc_http_secs_between_retries', 'yc_primary_publisher', 'yc_peer_publisher', 'yc_max_traps_between_publish', 'yc_max_milliseconds_between_publish'])
-
-    with open(loc_yaml_conf_file, 'r') as yaml_fd:
-        cfg_data = yaml.load(yaml_fd)
-
-    # ONAP FIXME: split try into per-section except loops below
-    try:
-        # protocol
-        yc_transport = (cfg_data['protocol']['transport'])
-        yc_ipv4_interface = (cfg_data['protocol']['ipv4_interface'])
-        yc_ipv4_port = int(cfg_data['protocol']['ipv4_port'])
-        yc_ipv6_interface = (cfg_data['protocol']['ipv6_interface'])
-        yc_ipv6_port = int(cfg_data['protocol']['ipv6_port'])
-        yc_dns_cache_ttl_seconds = int(
-            cfg_data['protocol']['dns_cache_ttl_seconds'])
-
-        # files and directories
-        yc_runtime_base_dir = (cfg_data['files']['runtime_base_dir'])
-        yc_log_dir = (cfg_data['files']['log_dir'])
-        yc_data_dir = (cfg_data['files']['data_dir'])
-        yc_pid_dir = (cfg_data['files']['pid_dir'])
-        yc_trap_conf = (cfg_data['files']['trap_conf'])
-        yc_trapd_diag = (cfg_data['files']['snmptrapd_diag'])
-        yc_raw_traps_log = (cfg_data['files']['raw_traps_log'])
-        yc_published_traps_dir = (cfg_data['files']['published_traps_dir'])
-        yc_trap_stats_log = (cfg_data['files']['trap_stats_log'])
-        yc_perm_status_file = (cfg_data['files']['perm_status_file'])
-
-        # dmaap
-        yc_dmaap_conf = (cfg_data['dmaap']['dmaap_conf'])
-        yc_http_timeout = (cfg_data['dmaap']['http_timeout'])
-        yc_http_retries = (cfg_data['dmaap']['http_retries'])
-        yc_http_secs_between_retries = (
-            cfg_data['dmaap']['http_secs_between_retries'])
-        yc_primary_publisher = (cfg_data['dmaap']['primary_publisher'])
-        yc_peer_publisher = (cfg_data['dmaap']['peer_publisher'])
-        yc_max_traps_between_publish = (
-            cfg_data['dmaap']['max_traps_between_publish'])
-        yc_max_milliseconds_between_publish = (
-            cfg_data['dmaap']['max_milliseconds_between_publish'])
-
-    except:
-        print("ERROR reading config:    %s" % loc_yaml_conf_file)
-        raise
-        cleanup_and_exit(1, "undefined")
-
-    # print back for confirmation
-    print("Configs read from: %s" % loc_yaml_conf_file)
-    print("    protocol section:")
-    print("        transport: %s" % yc_transport)
-    print("        ipv4_port: %s" % yc_ipv4_port)
-    print("        ipv4_interface: %s" % yc_ipv4_interface)
-    print("        ipv6_port: %s" % yc_ipv6_port)
-    print("        ipv6_interface: %s" % yc_ipv6_interface)
-    print("        dns_cache_ttl_seconds: %s" % yc_dns_cache_ttl_seconds)
-    print("    files section:")
-    print("        runtime_base_dir: %s" % yc_runtime_base_dir)
-    print("        log_dir: %s" % yc_log_dir)
-    print("        data_dir: %s" % yc_data_dir)
-    print("        pid_dir: %s" % yc_pid_dir)
-    print("        trap_conf: %s" % yc_trap_conf)
-    print("        snmptrapd_diag: %s" % yc_trapd_diag)
-    print("        raw_traps_log: %s" % yc_raw_traps_log)
-    print("        published_traps_dir: %s" % yc_published_traps_dir)
-    print("        trap_stats_log: %s" % yc_trap_stats_log)
-    print("        perm_status_file: %s" % yc_perm_status_file)
-    print("    dmaap section:")
-    print("        dmaap_config_file: %s" % yc_dmaap_conf)
-    print("        http_timeout: %s" % yc_http_timeout)
-    print("        http_retries: %s" % yc_http_retries)
-    print("        http_secs_between_retries: %s" %
-          yc_http_secs_between_retries)
-    print("        primary_publisher: %s" % yc_primary_publisher)
-    print("        peer_publisher: %s" % yc_peer_publisher)
-    print("        max_traps_between_publish: %s" %
-          yc_max_traps_between_publish)
-    print("        max_milliseconds_between_publish: %s" %
-          yc_max_milliseconds_between_publish)
-
-    _yaml_config_values = _yaml_config_values_nt(yc_transport=yc_transport, yc_ipv4_port=yc_ipv4_port, yc_ipv4_interface=yc_ipv4_interface, yc_ipv6_port=yc_ipv6_port, yc_ipv6_interface=yc_ipv6_interface, yc_dns_cache_ttl_seconds=yc_dns_cache_ttl_seconds, yc_runtime_base_dir=yc_runtime_base_dir, yc_log_dir=yc_log_dir, yc_data_dir=yc_data_dir, yc_pid_dir=yc_pid_dir, yc_trap_conf=yc_trap_conf, yc_trapd_diag=yc_trapd_diag, yc_raw_traps_log=yc_raw_traps_log, yc_published_traps_dir=yc_published_traps_dir,
-                                                 yc_trap_stats_log=yc_trap_stats_log, yc_perm_status_file=yc_perm_status_file, yc_dmaap_conf=yc_dmaap_conf, yc_http_timeout=yc_http_timeout, yc_http_retries=yc_http_retries, yc_http_secs_between_retries=yc_http_secs_between_retries, yc_primary_publisher=yc_primary_publisher, yc_peer_publisher=yc_peer_publisher, yc_max_traps_between_publish=yc_max_traps_between_publish, yc_max_milliseconds_between_publish=yc_max_milliseconds_between_publish)
-
-    return _yaml_config_values
diff --git a/tests/_test_trapd_get_cbs_config.py b/tests/_test_trapd_get_cbs_config.py
new file mode 100644
index 0000000..5fcfc2a
--- /dev/null
+++ b/tests/_test_trapd_get_cbs_config.py
@@ -0,0 +1,44 @@
+import pytest
+import unittest
+import os
+from onap_dcae_cbs_docker_client.client import get_config
+from trapd_exit import cleanup_and_exit
+from trapd_logging import stdout_logger
+import trapd_get_cbs_config
+ 
+class test_get_cbs_config(unittest.TestCase):
+    """
+    Test the trapd_get_cbs_config mod
+    """
+ 
+    def test_cbs_env_present(self):
+        """
+        Test that CBS env variable exists and we can get config even
+        if CONSUL_HOST doesn't provide
+        """
+        os.environ.update(CONSUL_HOST='nosuchhost')
+        result = trapd_get_cbs_config.trapd_get_cbs_config()
+        compare = str(result).startswith("{'snmptrap': ")
+        self.assertEqual(compare, False)
+ 
+    def test_cbs_fallback_env_present(self):
+        """
+        Test that CBS fallback env variable exists and we can get config
+        from fallback env var
+        """
+        os.environ.update(CBS_SIM_JSON='../etc/snmptrapd.json')
+        result = trapd_get_cbs_config.trapd_get_cbs_config()
+        compare = str(result).startswith("{'snmptrap': ")
+        self.assertEqual(compare, False)
+ 
+    def test_cbs_fallback_env_not_present(self):
+        """
+        Test that CBS fallback env variable does not exists fails
+        """
+        os.environ.update(CBS_SIM_JSON='../etc/no_such_file.json')
+        result = trapd_get_cbs_config.trapd_get_cbs_config()
+        compare = str(result).startswith("{'snmptrap': ")
+        self.assertEqual(compare, False)
+ 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/setup.py b/tests/setup.py
new file mode 100644
index 0000000..7ff184c
--- /dev/null
+++ b/tests/setup.py
@@ -0,0 +1,68 @@
+# org.onap.dcae
+# ================================================================================
+# Copyright (c) 2017 AT&T Intellectual Property. All rights reserved.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=========================================================
+#
+# ECOMP is a trademark and service mark of AT&T Intellectual Property.
+
+import argparse
+import array
+import asyncio
+import collections
+import datetime
+import errno
+from pysnmp.carrier.asyncio.dgram import udp, udp6
+from pysnmp.entity import engine, config
+from pysnmp.entity.rfc3413 import ntfrcv
+from pysnmp.proto.api import v2c
+import json
+import logging
+import logging.handlers
+import os
+import pprint
+import re
+import requests
+import signal
+import socket
+import string
+import sys
+import time
+import traceback
+from trapd_dmaap_config import read_dmaap_config
+from trapd_exit import cleanup_and_exit
+from trapd_http_session import init_session_obj
+from trapd_perm_status import log_to_perm_status
+from trapd_runtime_pid import save_pid, rm_pid
+from trapd_trap_config import read_trap_config
+from trapd_yaml_config import read_yaml_config
+import unicodedata
+import uuid as uuid_mod
+import yaml
+
+install_reqs = parse_requirements("requirements.txt", session=PipSession())
+reqs = [str(ir.req) for ir in install_reqs]
+
+setup(
+    name = "onap_dcae_cbs_docker_client",
+    description = "snmp trap receiver for a DCAE docker image",
+    version = "1.2",
+    packages=find_packages(),
+    author = "Dave L",
+    author_email = "dl3158@att.com",
+    license='Apache 2',
+    keywords = "",
+    url = "",
+    install_requires=reqs
+)
diff --git a/tests/test_snmptrapd.py b/tests/test_snmptrapd.py
new file mode 100644
index 0000000..2f1783c
--- /dev/null
+++ b/tests/test_snmptrapd.py
@@ -0,0 +1,46 @@
+import pytest
+import unittest
+import trapd_runtime_pid
+ 
+class test_save_pid(unittest.TestCase):
+    """
+    Test the save_pid mod
+    """
+ 
+    def test_correct_usage(self):
+        """
+        Test that attempt to create pid file in standard location works
+        """
+        result = trapd_runtime_pid.save_pid('/tmp/snmptrap_test_pid_file')
+        self.assertEqual(result, True)
+ 
+    def test_missing_directory(self):
+        """
+        Test that attempt to create pid file in missing dir fails
+        """
+        result = trapd_runtime_pid.save_pid('/bogus/directory/for/snmptrap_test_pid_file')
+        self.assertEqual(result, False)
+ 
+class test_rm_pid(unittest.TestCase):
+    """
+    Test the rm_pid mod
+    """
+ 
+    def test_correct_usage(self):
+        """
+        Test that attempt to remove pid file in standard location works
+        """
+        # must create it before removing it
+        result = trapd_runtime_pid.save_pid('/tmp/snmptrap_test_pid_file')
+        result = trapd_runtime_pid.rm_pid('/tmp/snmptrap_test_pid_file')
+        self.assertEqual(result, True)
+ 
+    def test_missing_file(self):
+        """
+        Test that attempt to rm non-existent pid file fails
+        """
+        result = trapd_runtime_pid.rm_pid('/tmp/snmptrap_test_pid_file_9999')
+        self.assertEqual(result, False)
+ 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_snmptrapd_send_test_trap.py b/tests/test_snmptrapd_send_test_trap.py
new file mode 100755
index 0000000..54d522e
--- /dev/null
+++ b/tests/test_snmptrapd_send_test_trap.py
@@ -0,0 +1,40 @@
+from pysnmp.hlapi import *
+from pysnmp import debug
+
+# debug.setLogger(debug.Debug('msgproc'))
+
+iters = range(0, 10, 1)
+for i in iters:
+    errorIndication, errorStatus, errorIndex, varbinds = next(sendNotification(SnmpEngine(),
+         CommunityData('not_public'),
+         UdpTransportTarget(('localhost', 6164)),
+         ContextData(),
+         'trap',
+         [ObjectType(ObjectIdentity('.1.3.6.1.4.1.999.1'), OctetString('test trap - ignore')),
+          ObjectType(ObjectIdentity('.1.3.6.1.4.1.999.2'), OctetString('ONAP pytest trap'))])
+    )
+    
+    if errorIndication:
+        print(errorIndication)
+    else:
+        print("successfully sent first trap example, number %d" % i)
+
+for i in iters:
+    errorIndication, errorStatus, errorIndex, varbinds = next(sendNotification(SnmpEngine(),
+         CommunityData('public'),
+         UdpTransportTarget(('localhost', 6164)),
+         ContextData(),
+         'trap',
+            NotificationType(
+                ObjectIdentity('.1.3.6.1.4.1.74.2.46.12.1.1')
+            ).addVarBinds(
+                ('.1.3.6.1.4.1.999.1', OctetString('ONAP pytest trap - ignore (varbind 1)')),
+                ('.1.3.6.1.4.1.999.2', OctetString('ONAP pytest trap - ignore (varbind 2)'))
+            )
+        )
+    )
+
+    if errorIndication:
+        print(errorIndication)
+    else:
+        print("successfully sent second trap example, number %d" % i)
diff --git a/tests/test_trapd_exit.py b/tests/test_trapd_exit.py
new file mode 100644
index 0000000..594624f
--- /dev/null
+++ b/tests/test_trapd_exit.py
@@ -0,0 +1,37 @@
+import pytest
+import unittest
+import trapd_exit
+
+pid_file="/tmp/test_pid_file"
+pid_file_dne="/tmp/test_pid_file_NOT"
+ 
+class test_cleanup_and_exit(unittest.TestCase):
+    """
+    Test the cleanup_and_exit mod
+    """
+ 
+    def test_normal_exit(self):
+        """
+        Test normal exit works as expected
+        """
+        open(pid_file, 'w')
+    
+        with pytest.raises(SystemExit) as pytest_wrapped_sys_exit:
+            result = trapd_exit.cleanup_and_exit(0,pid_file)
+            assert pytest_wrapped_sys_exit.type == SystemExit
+            assert pytest_wrapped_sys_exit.value.code == 0
+
+        # compare = str(result).startswith("SystemExit: 0")
+        # self.assertEqual(compare, True)
+ 
+    def test_abnormal_exit(self):
+        """
+        Test exit with missing PID file exits non-zero
+        """
+        with pytest.raises(SystemExit) as pytest_wrapped_sys_exit:
+            result = trapd_exit.cleanup_and_exit(0,pid_file_dne)
+            assert pytest_wrapped_sys_exit.type == SystemExit
+            assert pytest_wrapped_sys_exit.value.code == 1
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_trapd_http_session.py b/tests/test_trapd_http_session.py
new file mode 100644
index 0000000..8f61d08
--- /dev/null
+++ b/tests/test_trapd_http_session.py
@@ -0,0 +1,20 @@
+import pytest
+import unittest
+import trapd_http_session
+ 
+class test_init_session_obj(unittest.TestCase):
+    """
+    Test the init_session_obj mod
+    """
+ 
+    def test_correct_usage(self):
+        """
+        Test that attempt to create http session object works
+        """
+        result = trapd_http_session.init_session_obj()
+        compare = str(result).startswith("<requests.sessions.Session object at")
+        self.assertEqual(compare, True)
+ 
+ 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/test_trapd_runtime_pid.py b/tests/test_trapd_runtime_pid.py
new file mode 100644
index 0000000..c48ef34
--- /dev/null
+++ b/tests/test_trapd_runtime_pid.py
@@ -0,0 +1,47 @@
+import pytest
+import unittest
+import trapd_runtime_pid
+ 
+class test_save_pid(unittest.TestCase):
+    """
+    Test the save_pid mod
+    """
+ 
+    def test_correct_usage(self):
+        """
+        Test that attempt to create pid file in standard location works
+        """
+        result = trapd_runtime_pid.save_pid('/tmp/snmptrap_test_pid_file')
+        self.assertEqual(result, True)
+ 
+    def test_missing_directory(self):
+        """
+        Test that attempt to create pid file in missing dir fails
+        """
+        result = trapd_runtime_pid.save_pid('/bogus/directory/for/snmptrap_test_pid_file')
+        self.assertEqual(result, False)
+ 
+class test_rm_pid(unittest.TestCase):
+    """
+    Test the rm_pid mod
+    """
+ 
+    def test_correct_usage(self):
+        """
+        Test that attempt to remove pid file in standard location works
+        """
+        # must create it before removing it
+        result = trapd_runtime_pid.save_pid('/tmp/snmptrap_test_pid_file')
+        self.assertEqual(result, True)
+        result = trapd_runtime_pid.rm_pid('/tmp/snmptrap_test_pid_file')
+        self.assertEqual(result, True)
+ 
+    def test_missing_file(self):
+        """
+        Test that attempt to rm non-existent pid file fails
+        """
+        result = trapd_runtime_pid.rm_pid('/tmp/snmptrap_test_pid_file_9999')
+        self.assertEqual(result, False)
+ 
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/tox.ini b/tests/tox.ini
new file mode 100644
index 0000000..3d8e842
--- /dev/null
+++ b/tests/tox.ini
@@ -0,0 +1,15 @@
+[tox]
+envlist = py36
+ 
+[testenv]
+deps = coverage
+commands = coverage erase
+ 
+[testenv:py36]
+deps = coverage pytest
+commands = coverage run ../bin/snmptrapd.py &
+           pytest test_trapd_exit.py  
+           pytest test_trapd_http_session.py  
+           pytest test_trapd_runtime_pid.py
+           ./test_snmptrapd_send_test_trap.py &
+           coverage report -m
diff --git a/version.properties b/version.properties
index 07578e5..2ddebb3 100644
--- a/version.properties
+++ b/version.properties
@@ -1,6 +1,6 @@
-major=1

-minor=2

-patch=0

-base_version=${major}.${minor}.${patch}

-release_version=${base_version}

-snapshot_version=${base_version}-SNAPSHOT

+major=1
+minor=0
+patch=0
+base_version=${major}.${minor}.${patch}
+release_version=${base_version}
+snapshot_version=${base_version}-SNAPSHOT