Enable hibernate write batching

- Implement fragment ID sequence generator with allocation size of 100
- Enable write batching for insert and update, with batch size of 100
- Write performance is over 2x faster

Issue-ID: CPS-1795
Signed-off-by: danielhanrahan <daniel.hanrahan@est.tech>
Change-Id: I7ebd0f834abb57c593603e2cd3fafc7e3425ab1e
diff --git a/cps-application/src/main/resources/application.yml b/cps-application/src/main/resources/application.yml
index f973b6f..4e20a4b 100644
--- a/cps-application/src/main/resources/application.yml
+++ b/cps-application/src/main/resources/application.yml
@@ -1,7 +1,7 @@
 #  ============LICENSE_START=======================================================
 #  Copyright (C) 2021 Pantheon.tech
 #  Modifications Copyright (C) 2021-2022 Bell Canada
-#  Modifications Copyright (C) 2021-2023 Nordix Foundation
+#  Modifications Copyright (C) 2021-2024 Nordix Foundation
 #  ================================================================================
 #  Licensed under the Apache License, Version 2.0 (the "License");
 #  you may not use this file except in compliance with the License.
@@ -39,6 +39,9 @@
         properties:
             hibernate.enable_lazy_load_no_trans: true
             hibernate.dialect: org.hibernate.dialect.PostgreSQLDialect
+            # Please ensure these values match those used in integration-test/src/test/resources/application.yml
+            hibernate.id.new_generator_mappings: true
+            hibernate.jdbc.batch_size: 100
 
     datasource:
         url: jdbc:postgresql://${DB_HOST}:${DB_PORT:5432}/cpsdb
diff --git a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java
old mode 100755
new mode 100644
index 12f42f2..c763f61
--- a/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java
+++ b/cps-ri/src/main/java/org/onap/cps/spi/entities/FragmentEntity.java
@@ -1,6 +1,6 @@
 /*
  * ============LICENSE_START=======================================================
- * Copyright (C) 2020-2023 Nordix Foundation.
+ * Copyright (C) 2020-2024 Nordix Foundation.
  * Modifications Copyright (C) 2021 Pantheon.tech
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -31,6 +31,7 @@
 import jakarta.persistence.JoinColumn;
 import jakarta.persistence.ManyToOne;
 import jakarta.persistence.OneToMany;
+import jakarta.persistence.SequenceGenerator;
 import jakarta.persistence.Table;
 import jakarta.validation.constraints.NotNull;
 import java.io.Serializable;
@@ -63,7 +64,8 @@
     private static final long serialVersionUID = 7737669789097119667L;
 
     @Id
-    @GeneratedValue(strategy = GenerationType.IDENTITY)
+    @GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "fragment_id_seq_generator")
+    @SequenceGenerator(name = "fragment_id_seq_generator", sequenceName = "fragment_id_seq", allocationSize = 100)
     private Long id;
 
     @NotNull
diff --git a/cps-ri/src/main/resources/changelog/changelog-master.yaml b/cps-ri/src/main/resources/changelog/changelog-master.yaml
index f76c5ba..e10ce63 100644
--- a/cps-ri/src/main/resources/changelog/changelog-master.yaml
+++ b/cps-ri/src/main/resources/changelog/changelog-master.yaml
@@ -1,6 +1,6 @@
 # ============LICENSE_START=======================================================
 # Copyright (c) 2021 Bell Canada.
-# Modifications Copyright (C) 2022-2023 Nordix Foundation.
+# Modifications Copyright (C) 2022-2024 Nordix Foundation.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -58,3 +58,5 @@
       file: changelog/db/changes/20-change-foreign-key-id-types-to-integer.yaml
   - include:
       file: changelog/db/changes/21-escape-quotes-in-xpath.yaml
+  - include:
+      file: changelog/db/changes/22-fragment-id-sequence.yaml
diff --git a/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-forward.sql b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-forward.sql
new file mode 100644
index 0000000..7f8d686
--- /dev/null
+++ b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-forward.sql
@@ -0,0 +1,20 @@
+/*
+   ============LICENSE_START=======================================================
+    Copyright (C) 2024 Nordix Foundation.
+   ================================================================================
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+   SPDX-License-Identifier: Apache-2.0
+   ============LICENSE_END=========================================================
+*/
+
+ALTER TABLE fragment ALTER COLUMN id DROP IDENTITY;
+CREATE SEQUENCE fragment_id_seq INCREMENT BY 100 START WITH 100;
+SELECT setval('fragment_id_seq', (SELECT ((max(id) / 100) + 1) * 100 FROM fragment), TRUE);
diff --git a/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-rollback.sql b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-rollback.sql
new file mode 100644
index 0000000..88bb8c7
--- /dev/null
+++ b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence-rollback.sql
@@ -0,0 +1,20 @@
+/*
+   ============LICENSE_START=======================================================
+    Copyright (C) 2024 Nordix Foundation.
+   ================================================================================
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+        http://www.apache.org/licenses/LICENSE-2.0
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+   SPDX-License-Identifier: Apache-2.0
+   ============LICENSE_END=========================================================
+*/
+
+DROP SEQUENCE fragment_id_seq;
+ALTER TABLE fragment ALTER id ADD GENERATED BY DEFAULT AS IDENTITY;
+SELECT setval('fragment_id_seq', (SELECT max(id) FROM fragment), TRUE);
diff --git a/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence.yaml b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence.yaml
new file mode 100644
index 0000000..8bdabb5
--- /dev/null
+++ b/cps-ri/src/main/resources/changelog/db/changes/22-fragment-id-sequence.yaml
@@ -0,0 +1,28 @@
+# ============LICENSE_START=======================================================
+# Copyright (C) 2024 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+
+databaseChangeLog:
+  - changeSet:
+      author: cps
+      id: 21
+      changes:
+        - sqlFile:
+            path: changelog/db/changes/22-fragment-id-sequence-forward.sql
+      rollback:
+        - sqlFile:
+            path: changelog/db/changes/22-fragment-id-sequence-rollback.sql
diff --git a/docs/release-notes.rst b/docs/release-notes.rst
index b7f4c4f..1eac184 100644
--- a/docs/release-notes.rst
+++ b/docs/release-notes.rst
@@ -43,8 +43,20 @@
 
 Features
 --------
+    - `CPS-1795 <https://jira.onap.org/browse/CPS-1795>`_ Double performance of CPS write operations (via write batching)
     - `CPS-2018 <https://jira.onap.org/browse/CPS-2018>`_ Improve performance of CPS update operations.
 
+Known Limitations, Issues and Workarounds
+-----------------------------------------
+
+*System Limitations*
+
+For upgrading, CPS uses Liquibase for database upgrades. In order to enable Hibernate write batching
+(`CPS-1795 <https://jira.onap.org/browse/CPS-1795>`_), a change to the database entity ID generation is required.
+As such, *this release does not fully support In-Service Software Upgrade* - CPS will not store new DataNodes and
+NCMP will not register new CM-handles during an upgrade with old and new versions of CPS running concurrently.
+Other operations (read, update, delete) are not impacted.
+
 
 Version: 3.4.1
 ==============
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
index 816aeff..920f407 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/base/CpsPerfTestBase.groovy
@@ -1,6 +1,6 @@
 /*
  *  ============LICENSE_START=======================================================
- *  Copyright (C) 2023 Nordix Foundation
+ *  Copyright (C) 2023-2024 Nordix Foundation
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the 'License');
  *  you may not use this file except in compliance with the License.
@@ -67,7 +67,7 @@
         addAnchorsWithData(OPENROADM_ANCHORS, CPS_PERFORMANCE_TEST_DATASPACE, LARGE_SCHEMA_SET, 'openroadm', data)
         resourceMeter.stop()
         def durationInSeconds = resourceMeter.getTotalTimeInSeconds()
-        recordAndAssertResourceUsage('Creating openroadm anchors with large data tree', 200, durationInSeconds, 600, resourceMeter.getTotalMemoryUsageInMB())
+        recordAndAssertResourceUsage('Creating openroadm anchors with large data tree', 100, durationInSeconds, 600, resourceMeter.getTotalMemoryUsageInMB())
     }
 
     def generateOpenRoadData(numberOfNodes) {
@@ -87,7 +87,7 @@
         then: 'memory used is within #peakMemoryUsage'
             assert resourceMeter.getTotalMemoryUsageInMB() <= 30
         and: 'all data is read within expected time'
-            recordAndAssertResourceUsage("Warming database", 200, durationInSeconds, 600, resourceMeter.getTotalMemoryUsageInMB())
+            recordAndAssertResourceUsage("Warming database", 100, durationInSeconds, 600, resourceMeter.getTotalMemoryUsageInMB())
     }
 
 }
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimitsPerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimitsPerfTest.groovy
index 4d4d612..0c1e1f5 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimitsPerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/CpsDataServiceLimitsPerfTest.groovy
@@ -1,6 +1,6 @@
 /*
  *  ============LICENSE_START=======================================================
- *  Copyright (C) 2023 Nordix Foundation
+ *  Copyright (C) 2023-2024 Nordix Foundation
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the 'License');
  *  you may not use this file except in compliance with the License.
@@ -46,8 +46,8 @@
             }
             resourceMeter.stop()
             def durationInSeconds = resourceMeter.getTotalTimeInSeconds()
-        then: 'the operation completes within 25 seconds'
-            recordAndAssertResourceUsage("Creating 33,000 books", 25, durationInSeconds, 150, resourceMeter.getTotalMemoryUsageInMB())
+        then: 'the operation completes within 12 seconds'
+            recordAndAssertResourceUsage("Creating 33,000 books", 12, durationInSeconds, 150, resourceMeter.getTotalMemoryUsageInMB())
     }
 
     def 'Get data nodes from multiple xpaths 32K (2^15) limit exceeded.'() {
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy
index e7bfabe..2efbf7d 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/DeletePerfTest.groovy
@@ -1,6 +1,6 @@
 /*
  *  ============LICENSE_START=======================================================
- *  Copyright (C) 2023 Nordix Foundation
+ *  Copyright (C) 2023-2024 Nordix Foundation
  *  ================================================================================
  *  Licensed under the Apache License, Version 2.0 (the 'License');
  *  you may not use this file except in compliance with the License.
@@ -40,7 +40,7 @@
             resourceMeter.stop()
             def setupDurationInSeconds = resourceMeter.getTotalTimeInSeconds()
         then: 'setup duration is within expected time and memory used is within limit'
-            recordAndAssertResourceUsage('Delete test setup', 200, setupDurationInSeconds, 800, resourceMeter.getTotalMemoryUsageInMB())
+            recordAndAssertResourceUsage('Delete test setup', 100, setupDurationInSeconds, 800, resourceMeter.getTotalMemoryUsageInMB())
     }
 
     def 'Delete 100 container nodes'() {
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy
index b3030b1..69f6477 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/UpdatePerfTest.groovy
@@ -79,12 +79,12 @@
                     memoryLimit, resourceMeter.getTotalMemoryUsageInMB())
         where:
             scenario                           | totalNodes | startId | changeLeaves || timeLimit | memoryLimit
-            'Replace 0 nodes with 100'         | 100        | 1       | false        ||         7 | 250
-            'Replace 100 using same data'      | 100        | 1       | false        ||         3 | 250
-            'Replace 100 with new leaf values' | 100        | 1       | true         ||         3 | 250
-            'Replace 100 with 100 new nodes'   | 100        | 101     | false        ||        12 | 300
-            'Replace 50 existing and 50 new'   | 100        | 151     | true         ||         8 | 250
-            'Replace 100 nodes with 0'         | 0          | 1       | false        ||         5 | 250
+            'Replace 0 nodes with 100'         | 100        | 1       | false        ||       2.5 | 200
+            'Replace 100 using same data'      | 100        | 1       | false        ||       3.0 | 200
+            'Replace 100 with new leaf values' | 100        | 1       | true         ||       3.0 | 200
+            'Replace 100 with 100 new nodes'   | 100        | 101     | false        ||       6.0 | 200
+            'Replace 50 existing and 50 new'   | 100        | 151     | true         ||       4.5 | 200
+            'Replace 100 nodes with 0'         | 0          | 1       | false        ||       3.0 | 200
     }
 
     def 'Replace list content: #scenario.'() {
@@ -105,12 +105,12 @@
                     memoryLimit, resourceMeter.getTotalMemoryUsageInMB())
         where:
             scenario                                   | totalNodes | startId | changeLeaves || timeLimit | memoryLimit
-            'Replace list of 0 with 100'               | 100        | 1       | false        ||         7 | 250
-            'Replace list of 100 using same data'      | 100        | 1       | false        ||         3 | 250
-            'Replace list of 100 with new leaf values' | 100        | 1       | true         ||         3 | 250
-            'Replace list with 100 new nodes'          | 100        | 101     | false        ||        12 | 300
-            'Replace list with 50 existing and 50 new' | 100        | 151     | true         ||         8 | 250
-            'Replace list of 100 nodes with 1'         | 1          | 1       | false        ||         5 | 250
+            'Replace list of 0 with 100'               | 100        | 1       | false        ||       2.5 | 200
+            'Replace list of 100 using same data'      | 100        | 1       | false        ||       3.0 | 200
+            'Replace list of 100 with new leaf values' | 100        | 1       | true         ||       3.0 | 200
+            'Replace list with 100 new nodes'          | 100        | 101     | false        ||       6.0 | 200
+            'Replace list with 50 existing and 50 new' | 100        | 151     | true         ||       4.5 | 200
+            'Replace list of 100 nodes with 1'         | 1          | 1       | false        ||       3.0 | 200
     }
 
     def 'Update leaves for 100 data nodes.'() {
@@ -127,7 +127,7 @@
             assert 100 == countDataNodes('/openroadm-devices/openroadm-device[@status="fail"]')
         and: 'update completes within expected time and memory used is within limit'
             recordAndAssertResourceUsage('Update leaves for 100 data nodes',
-                    0.5, resourceMeter.getTotalTimeInSeconds(),
+                    0.4, resourceMeter.getTotalTimeInSeconds(),
                     120, resourceMeter.getTotalMemoryUsageInMB())
     }
 
diff --git a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/WritePerfTest.groovy b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/WritePerfTest.groovy
index a5669ba..10329a5 100644
--- a/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/WritePerfTest.groovy
+++ b/integration-test/src/test/groovy/org/onap/cps/integration/performance/cps/WritePerfTest.groovy
@@ -45,10 +45,10 @@
             cpsAnchorService.deleteAnchor(CPS_PERFORMANCE_TEST_DATASPACE, WRITE_TEST_ANCHOR)
         where:
             totalNodes || expectedDuration | memoryLimit
-            50         || 4                | 100
-            100        || 7                | 200
-            200        || 14               | 400
-            400        || 28               | 500
+            50         || 2                | 100
+            100        || 4                | 200
+            200        || 7                | 400
+            400        || 14               | 500
     }
 
     def 'Writing bookstore data has exponential time.'() {
@@ -71,8 +71,8 @@
             cpsAnchorService.deleteAnchor(CPS_PERFORMANCE_TEST_DATASPACE, WRITE_TEST_ANCHOR)
         where:
             totalBooks || expectedDuration | memoryLimit
-            800        || 1                | 50
-            1600       || 2                | 100
+            800        || 0.5              | 50
+            1600       || 1.5              | 100
             3200       || 6                | 150
             6400       || 18               | 200
     }
diff --git a/integration-test/src/test/resources/application.yml b/integration-test/src/test/resources/application.yml
index 0aefac8..55560aa 100644
--- a/integration-test/src/test/resources/application.yml
+++ b/integration-test/src/test/resources/application.yml
@@ -1,5 +1,5 @@
 # ============LICENSE_START=======================================================
-# Copyright (C) 2023 Nordix Foundation.
+# Copyright (C) 2023-2024 Nordix Foundation.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -20,11 +20,13 @@
     ddl-auto: create
     show-sql: false
     properties:
-      hibernate:
-        enable_lazy_load_no_trans: true
-        dialect: org.hibernate.dialect.PostgreSQLDialect
-        format_sql: true
-        show_sql: false
+      hibernate.enable_lazy_load_no_trans: true
+      hibernate.dialect: org.hibernate.dialect.PostgreSQLDialect
+      hibernate.format_sql: true
+      hibernate.show_sql: false
+      # Please ensure these values match those used in cps-application/src/main/resources/application.yml
+      hibernate.id.new_generator_mappings: true
+      hibernate.jdbc.batch_size: 100
 
   datasource:
     url: ${DB_URL}
diff --git a/integration-test/src/test/resources/liquibase/test-changelog.yaml b/integration-test/src/test/resources/liquibase/test-changelog.yaml
index 3f12331..ba639ed 100644
--- a/integration-test/src/test/resources/liquibase/test-changelog.yaml
+++ b/integration-test/src/test/resources/liquibase/test-changelog.yaml
@@ -1,5 +1,5 @@
 # ============LICENSE_START=======================================================
-# Copyright (c) 2023 Nordix Foundation.
+# Copyright (c) 2023-2024 Nordix Foundation.
 # ================================================================================
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -100,7 +100,7 @@
         - createTable:
             columns:
               - column:
-                  autoIncrement: true
+                  autoIncrement: false
                   constraints:
                     nullable: false
                     primaryKey: true
@@ -127,6 +127,11 @@
                   name: schema_node_id
                   type: INTEGER
             tableName: fragment
+        - createSequence:
+            dataType: BIGINT
+            incrementBy: 100
+            sequenceName: fragment_id_seq
+            startValue: 100
   - changeSet:
       id: 1-5
       author: cps