Calculate throughput for legacy batch read data operations
- stress the cps-and-ncmp module for batch read data operation
- for this aim use shared-ireations during async http request
- as parallel to this, consume expected number of messages
as fast as possible
Issue-ID: CPS-2268
Change-Id: I1b6724479dac7391cbb6407fda52d15176aa8985
Signed-off-by: halil.cakal <halil.cakal@est.tech>
diff --git a/k6-tests/ncmp/common/passthrough-crud.js b/k6-tests/ncmp/common/passthrough-crud.js
index 0cd96ad..86fcef6 100644
--- a/k6-tests/ncmp/common/passthrough-crud.js
+++ b/k6-tests/ncmp/common/passthrough-crud.js
@@ -23,8 +23,8 @@
performPostRequest,
performGetRequest,
NCMP_BASE_URL,
- TOPIC_DATA_OPERATIONS_BATCH_READ,
- TOTAL_CM_HANDLES
+ LEGACY_BATCH_TOPIC_NAME,
+ TOTAL_CM_HANDLES,
} from './utils.js';
export function passthroughRead(useAlternateId) {
@@ -46,8 +46,8 @@
return performPostRequest(url, payload, 'passthroughWrite');
}
-export function batchRead(cmHandleIds) {
- const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${TOPIC_DATA_OPERATIONS_BATCH_READ}`;
+export function legacyBatchRead(cmHandleIds) {
+ const url = `${NCMP_BASE_URL}/ncmp/v1/data?topic=${LEGACY_BATCH_TOPIC_NAME}`
const payload = JSON.stringify({
"operations": [
{