Added K6 script test to check a throughput of CPS kafka consumer

- Added new K6 script to produce cm avc enents. (It is not a part
  regular automated tests)
- Added a json resource (avc event)

Issue-ID: CPS-2329
Change-Id: I6446bc120382257c5039df17fd34b84b7c6f6550
Signed-off-by: sourabh_sourabh <sourabh.sourabh@est.tech>
diff --git a/k6-tests/once-off-test/kafka/produce-avc-event.js b/k6-tests/once-off-test/kafka/produce-avc-event.js
new file mode 100644
index 0000000..981a21a
--- /dev/null
+++ b/k6-tests/once-off-test/kafka/produce-avc-event.js
@@ -0,0 +1,99 @@
+/*
+ *  ============LICENSE_START=======================================================
+ *  Copyright (C) 2024 Nordix Foundation
+ *  ================================================================================
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ *
+ *  SPDX-License-Identifier: Apache-2.0
+ *  ============LICENSE_END=========================================================
+ */
+
+import { crypto } from 'k6/experimental/webcrypto';
+import { check } from 'k6';
+import { Writer, SchemaRegistry, SCHEMA_TYPE_STRING } from 'k6/x/kafka';
+
+const testEventPayload = JSON.stringify(JSON.parse(open('../../resources/sampleAvcInputEvent.json')));
+const schemaRegistry = new SchemaRegistry();
+const kafkaProducer = new Writer({
+    brokers: ['localhost:9092'],
+    topic: 'dmi-cm-events',
+    autoCreateTopic: true,
+    batchSize: 5000,
+    compression: 'gzip',
+    requestTimeout: 30000
+});
+
+const TOTAL_MESSAGES = 100000;
+const VIRTUAL_USERS = 1000;
+
+export const options = {
+    setupTimeout: '1m',
+    teardownTimeout: '1m',
+    scenarios: {
+        produceKafkaMessages: {
+            executor: 'shared-iterations',
+            exec: 'sendKafkaMessages',
+            vus: VIRTUAL_USERS,
+            iterations: TOTAL_MESSAGES,
+            maxDuration: '10m',
+        }
+    }
+};
+
+function getCloudEventHeaders() {
+    return {
+        ce_type: 'org.onap.cps.ncmp.events.avc1_0_0.AvcEvent',
+        ce_source: 'DMI',
+        ce_destination: 'dmi-cm-events',
+        ce_specversion: '1.0',
+        ce_time: new Date().toISOString(),
+        ce_id: crypto.randomUUID(),
+        ce_dataschema: 'urn:cps:org.onap.cps.ncmp.events.avc1_0_0.AvcEvent:1.0.0',
+        ce_correlationid: crypto.randomUUID()
+    };
+}
+
+export function sendKafkaMessages() {
+    const cloudEventHeaders = getCloudEventHeaders();
+
+    const avcCloudEvent = {
+        key: schemaRegistry.serialize({
+            data: cloudEventHeaders.ce_correlationid,
+            schemaType: SCHEMA_TYPE_STRING,
+        }),
+        value: schemaRegistry.serialize({
+            data: testEventPayload,
+            schemaType: SCHEMA_TYPE_STRING
+        }),
+        headers: cloudEventHeaders
+    };
+
+    try {
+        kafkaProducer.produce({ messages: [avcCloudEvent] });
+
+        const isMessageSent = check(kafkaProducer, {
+            'Message sent successfully': (producer) => producer != null,
+        });
+
+        if (!isMessageSent) {
+            console.error('Failed to send message:', avcCloudEvent);
+        }
+
+    } catch (error) {
+        console.error('Error during message production:', error, avcCloudEvent);
+    }
+}
+
+export function teardown() {
+    kafkaProducer.close();
+}
diff --git a/k6-tests/resources/sampleAvcInputEvent.json b/k6-tests/resources/sampleAvcInputEvent.json
new file mode 100644
index 0000000..4c9cd72
--- /dev/null
+++ b/k6-tests/resources/sampleAvcInputEvent.json
@@ -0,0 +1,38 @@
+{
+  "data": {
+    "push-change-update": {
+      "datastore-changes": {
+        "ietf-yang-patch:yang-patch": {
+          "patch-id": "34534ffd98",
+          "edit": [
+            {
+              "edit-id": "ded43434-1",
+              "operation": "replace",
+              "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']",
+              "value": {
+                "attributes": []
+              }
+            },
+            {
+              "edit-id": "ded43434-2",
+              "operation": "create",
+              "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']",
+              "value": {
+                "attributes": [
+                  {
+                    "isHoAllowed": false
+                  }
+                ]
+              }
+            },
+            {
+              "edit-id": "ded43434-3",
+              "operation": "delete",
+              "target": "ancestor:ancestor/parent[@id='parent1']/child[@id='child1']/grandchild[@id='grandchild1']/relation[@id='relation1']"
+            }
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file