Add k6 for legacy async batch passthrough read operation
- add kafka support to k6 codebase
- add two new tests: data operation batch passthrough read
and data operation async batch read (from kafka)
- calculate the events/second via Trend
- add kafka ui support to docker-compose
Note: Before this commit being merged, the k6 executable
should have been compiled with the kafka extension of xk6-kafka
Issue-ID: CPS-2268
Change-Id: Ib7777b7bc9f15b210ea36d3541cba0e0c943f883
Signed-off-by: halil.cakal <halil.cakal@est.tech>
diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js
index 43a215f..76bda4e 100644
--- a/k6-tests/ncmp/common/passthrough-crud.js
+++ b/k6-tests/ncmp/common/passthrough-crud.js
@@ -19,7 +19,12 @@
*/
import http from 'k6/http';
-import { NCMP_BASE_URL, CONTENT_TYPE_JSON_PARAM, getRandomCmHandleId } from './utils.js';
+import {
+ CONTENT_TYPE_JSON_PARAM,
+ getRandomCmHandleId,
+ NCMP_BASE_URL,
+ TOPIC_DATA_OPERATIONS_BATCH_READ
+} from './utils.js';
export function passthroughRead() {
const cmHandleId = getRandomCmHandleId();
@@ -40,3 +45,21 @@
const response = http.post(url, JSON.stringify(body), CONTENT_TYPE_JSON_PARAM);
return response;
}
+
+export function batchRead(cmHandleIds) {
+ const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}`
+ const payload = {
+ "operations": [
+ {
+ "resourceIdentifier": "parent/child",
+ "targetIds": cmHandleIds,
+ "datastore": "ncmp-datastore:passthrough-operational",
+ "options": "(fields=schemas/schema)",
+ "operationId": "12",
+ "operation": "read"
+ }
+ ]
+ };
+ const response = http.post(url, JSON.stringify(payload), CONTENT_TYPE_JSON_PARAM);
+ return response;
+}
\ No newline at end of file
diff --git a/k6-tests/ncmp/common/utils.js b/k6-tests/ncmp/common/utils.js
index 0f3b8d9..f24edc5 100644
--- a/k6-tests/ncmp/common/utils.js
+++ b/k6-tests/ncmp/common/utils.js
@@ -25,6 +25,9 @@
export const READ_DATA_FOR_CM_HANDLE_DELAY_MS = 300; // must have same value as in docker-compose.yml
export const WRITE_DATA_FOR_CM_HANDLE_DELAY_MS = 670; // must have same value as in docker-compose.yml
export const CONTENT_TYPE_JSON_PARAM = { headers: {'Content-Type': 'application/json'} };
+export const DATA_OPERATION_READ_BATCH_SIZE = 200;
+export const TOPIC_DATA_OPERATIONS_BATCH_READ = 'topic-data-operations-batch-read';
+export const KAFKA_BOOTSTRAP_SERVERS = ['localhost:9092'];
export function recordTimeInSeconds(functionToExecute) {
const startTimeInMillis = Date.now();
@@ -65,6 +68,7 @@
makeSummaryCsvLine('5b', 'NCMP overhead for Synchronous single CM-handle pass-through read', 'milliseconds', 'ncmp_overhead_passthrough_read', data, options),
makeSummaryCsvLine('6a', 'Synchronous single CM-handle pass-through write', 'requests/second', 'http_reqs{scenario:passthrough_write}', data, options),
makeSummaryCsvLine('6b', 'NCMP overhead for Synchronous single CM-handle pass-through write', 'milliseconds', 'ncmp_overhead_passthrough_write', data, options),
+ makeSummaryCsvLine('7', 'Data operations batch read', 'events/second', 'data_operations_batch_read_cmhandles_per_second', data, options),
];
return summaryCsvLines.join('\n') + '\n';
}