skip processing for log

skip specific http processing in case of logfile events

Issue-ID: CLAMP-240
Change-Id: I26d38ea99d46b186fa81fda01b63ab207c881057
Signed-off-by: osgn422w <gervais-martial.ngueko@intl.att.com>
diff --git a/pom.xml b/pom.xml
index 9918a3f..0f1eab2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -397,7 +397,7 @@
 				<dependency>
 						<groupId>org.onap.policy.engine</groupId>
 						<artifactId>PolicyEngineAPI</artifactId>
-						<version>1.3.0</version>
+						<version>1.3.1</version>
 						<exclusions>
 								<exclusion>
 										<groupId>com.google.guava</groupId>
diff --git a/src/main/docker/logstash/pipeline/logstash.conf b/src/main/docker/logstash/pipeline/logstash.conf
index e6cee9c..6fe9d96 100644
--- a/src/main/docker/logstash/pipeline/logstash.conf
+++ b/src/main/docker/logstash/pipeline/logstash.conf
@@ -61,48 +61,54 @@
 }
 
 filter {
-    # avoid noise if no entry in the list
-    if [message] == "[]" {
-        drop { }
-    }
+    if [type] != "dmaap_log" {
+    #only execute this section for dmaap events from http request
+    #it doesn't apply to dmaap events from log file
 
-    if [http_request_failure] or [@metadata][code] != "200" {
-       mutate {
-       	   add_tag => [ "error" ]
-       }
-    }
+	    # avoid noise if no entry in the list
+	    if [message] == "[]" {
+	        drop { }
+	    }
 
-    if "dmaap_source" in [tags] {
-        #
-        # Dmaap provides a json list, whose items are Strings containing the event
-        # provided to Dmaap, which itself is an escaped json.
-        #
-        # We first need to parse the json as we have to use the plaintext as it cannot
-        # work with list of events, then split that list into multiple string events,
-        # that we then transform into json.
-        #
-        json {
-            source => "[message]"
-            target => "message"
-        }
-        ruby {
-            code => "
-            for ev in event.get('message', [])
-                ev.set('@metadata', event.get('@metadata'))
-            end
-            "
-        }
-    	
-        split {
-            field => "message"
-        }
-        json {
-            source => "message"
-        }
-        mutate {
-            remove_field => [ "message" ]
-        }
-    }
+	    if [http_request_failure] or [@metadata][code] != "200" {
+	       mutate {
+	       	   add_tag => [ "error" ]
+	       }
+	    }
+
+	    if "dmaap_source" in [tags] {
+	        #
+	        # Dmaap provides a json list, whose items are Strings containing the event
+	        # provided to Dmaap, which itself is an escaped json.
+	        #
+	        # We first need to parse the json as we have to use the plaintext as it cannot
+	        # work with list of events, then split that list into multiple string events,
+	        # that we then transform into json.
+	        #
+	        json {
+	            source => "[message]"
+	            target => "message"
+	        }
+	        ruby {
+	            code => "
+	            for ev in event.get('message', [])
+	                ev.set('@metadata', event.get('@metadata'))
+	            end
+	            "
+	        }
+	
+	        split {
+	            field => "message"
+	        }
+	        json {
+	            source => "message"
+	        }
+	        mutate {
+	            remove_field => [ "message" ]
+	        }
+	    }
+	}
+	#now start the common, to both http request and log file events, processing
 
     #
     # Some timestamps are expressed as milliseconds, some are in microseconds
@@ -250,7 +256,7 @@
         elasticsearch {
             codec => "json"
             hosts => ["${elasticsearch_base_url}"]
-            index => "events-%{+YYYY.MM.DD}" # creates daily indexes
+            index => "events-raw-%{+YYYY.MM.DD}" # creates daily indexes
             doc_as_upsert => true
         }
     }