Catalog alignment

Issue-ID: SDC-2724
Signed-off-by: ys9693 <ys9693@att.com>
Change-Id: I52b4aacb58cbd432ca0e1ff7ff1f7dd52099c6fe
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java b/asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java
similarity index 66%
rename from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java
rename to asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java
index a22e862..3f66031 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java
+++ b/asdctool/src/main/java/com/att/nsa/cambria/client/CambriaConsumer.java
@@ -2,14 +2,14 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,14 +18,8 @@
  * ============LICENSE_END=========================================================
  */
 
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package com.att.nsa.cambria.client;
 
-import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
-import org.openecomp.sdc.common.api.HealthCheckInfo;
-
-public class EsHealthCheckDaoMock implements IEsHealthCheckDao {
-    @Override
-    public HealthCheckInfo.HealthCheckStatus getClusterHealthStatus() {
-        return HealthCheckInfo.HealthCheckStatus.UP;
-    }
+public class CambriaConsumer {
+    //mock for bean init
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
index f7aaa1e..b433357 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
@@ -23,15 +23,12 @@
 import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Hello world!
  *
  */
 public class App {
-	private static Logger log = LoggerFactory.getLogger(App.class);
 	public static void main(String[] args) {
 
 		String asdcToolPort = "8087";
@@ -55,12 +52,12 @@
 		try {
 			jettyServer.start();
 
-			log.info("Server was started on port {}", asdcToolPort);
+			System.out.println("Server was started on port " + asdcToolPort);
 
 			jettyServer.join();
 
 		} catch (Exception e) {
-			log.info("Server failed to start - {}", e);
+			e.printStackTrace();
 			System.exit(1);
 		} finally {
 			jettyServer.destroy();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
index 44bfc53..378b81a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
@@ -20,12 +20,12 @@
 
 package org.openecomp.sdc.asdctool;
 
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
 import org.apache.commons.configuration.Configuration;
 import org.apache.tinkerpop.gremlin.structure.Element;
 import org.apache.tinkerpop.gremlin.structure.Property;
 import org.apache.tinkerpop.gremlin.structure.util.ElementHelper;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
 import javax.ws.rs.core.Response;
@@ -38,7 +38,7 @@
 
 	private static Logger log = Logger.getLogger(Utils.class.getName());
 
-	public final static String NEW_LINE = System.getProperty("line.separator");
+	public static String NEW_LINE = System.getProperty("line.separator");
 
 	public static Response buildOkResponse(
 			/*
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
index 6cb6a5b..d4ebff6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
@@ -20,7 +20,12 @@
 
 package org.openecomp.sdc.asdctool.cli;
 
-import org.apache.commons.cli.*;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
index abfd105..9923436 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
@@ -30,12 +30,10 @@
 import org.openecomp.sdc.be.tosca.CsarUtils;
 import org.openecomp.sdc.be.tosca.ToscaExportHandler;
 import org.openecomp.sdc.config.CatalogBESpringConfig;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.ComponentScan;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
 
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@@ -51,13 +49,4 @@
         return new ArtifactUuidFix(janusGraphDao, toscaOperationFacade, toscaExportHandler, artifactCassandraDao, csarUtils);
     }
     
-    @Bean(name = "elasticsearchConfig")
-    public PropertiesFactoryBean mapper() {
-        String configHome = System.getProperty("config.home");
-        PropertiesFactoryBean bean = new PropertiesFactoryBean();
-        bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
-        return bean;
-    }
-
-
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
index d115f9c..975066f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
@@ -1,6 +1,9 @@
-/*
- * Copyright © 2016-2018 AT&T
- *
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2016-2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
@@ -12,6 +15,9 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ * ============LICENSE_END=========================================================
+ * Modifications copyright (c) 2019 Nokia
+ * ================================================================================
  */
 
 package org.openecomp.sdc.asdctool.configuration;
@@ -21,6 +27,8 @@
 import org.openecomp.sdc.common.impl.ExternalConfiguration;
 import org.openecomp.sdc.common.impl.FSConfigurationSource;
 
+import java.io.File;
+
 public class ConfigurationUploader {
 
     public static void uploadConfigurationFiles(String appConfigDir) {
@@ -28,5 +36,15 @@
         new ConfigurationManager(configurationSource);
         ExternalConfiguration.setAppVersion(ConfigurationManager.getConfigurationManager().getConfiguration().getAppVersion());
         System.setProperty("config.home", appConfigDir);
+        System.setProperty("artifactgenerator.config", buildArtifactGeneratorPath(appConfigDir));
+    }
+
+    private static String buildArtifactGeneratorPath(String appConfigDir) {
+        StringBuilder artifactGeneratorPath = new StringBuilder(appConfigDir);
+        if(!appConfigDir.endsWith(File.separator)){
+            artifactGeneratorPath.append(File.separator);
+        }
+        artifactGeneratorPath.append(ConfigurationManager.getConfigurationManager().getConfiguration().getArtifactGeneratorConfig());
+        return artifactGeneratorPath.toString();
     }
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
index 32c37a3..ee4d2c1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/CsarGeneratorConfiguration.java
@@ -21,7 +21,6 @@
 package org.openecomp.sdc.asdctool.configuration;
 
 import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
-import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
 import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
 import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
 import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
@@ -30,12 +29,10 @@
 import org.openecomp.sdc.be.tosca.CsarUtils;
 import org.openecomp.sdc.be.tosca.ToscaExportHandler;
 import org.openecomp.sdc.config.CatalogBESpringConfig;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.ComponentScan;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
 
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@@ -51,12 +48,5 @@
             artifactCassandraDao, toscaExportHandler);
     }
 
-    @Bean(name = "elasticsearchConfig")
-    public PropertiesFactoryBean mapper() {
-        String configHome = System.getProperty("config.home");
-        PropertiesFactoryBean bean = new PropertiesFactoryBean();
-        bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
-        return bean;
-    }
 
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
index 5195673..1b09b2a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/InternalToolConfiguration.java
@@ -26,22 +26,13 @@
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTypeOperation;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
 
 @Configuration
 @Import({DAOSpringConfig.class, CatalogModelSpringConfig.class})
 public class InternalToolConfiguration {
-    @Bean(name = "elasticsearchConfig")
-    public PropertiesFactoryBean mapper() {
-        String configHome = System.getProperty("config.home");
-        PropertiesFactoryBean bean = new PropertiesFactoryBean();
-        bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
-        return bean;
-    }
     
     @Bean
     public DeleteComponentHandler deleteComponentHandler(
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
similarity index 65%
rename from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java
rename to asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
index 8c70ad3..0cfd894 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/SdcSchemaFileImportConfiguration.java
@@ -2,7 +2,7 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -20,21 +20,22 @@
 
 package org.openecomp.sdc.asdctool.configuration;
 
-import org.openecomp.sdc.be.dao.config.JanusGraphSpringConfig;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
-import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
+import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Import;
 
 @Configuration
-@Import({JanusGraphSpringConfig.class})
-public class GetConsumersConfiguration {
+public class SdcSchemaFileImportConfiguration {
 
 
-    @Bean("consumer-operation")
-    public ConsumerOperation consumerOperation(JanusGraphGenericDao janusGraphGenericDao) {
-        return new ConsumerOperation(janusGraphGenericDao);
-    }
+	@Bean(name = "cassandra-client")
+	public CassandraClient cassandraClient() {
+		return new CassandraClient();
+	}
+	@Bean(name = "sdc-schema-files-cassandra-dao")
+	public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao() {
+		return new SdcSchemaFilesCassandraDao(cassandraClient());
+	}
 
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
deleted file mode 100644
index 75283f1..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import fj.data.Either;
-import org.openecomp.sdc.be.dao.api.ICatalogDAO;
-import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-
-import java.util.List;
-
-public class ESCatalogDAOMock implements ICatalogDAO {
-
-    @Override
-    public void addToIndicesMap(String typeName, String indexName) {
-
-    }
-
-    @Override
-    public void writeArtifact(ESArtifactData artifactData) {
-
-    }
-
-    @Override
-    public Either<ESArtifactData, ResourceUploadStatus> getArtifact(String id) {
-        return null;
-    }
-
-    @Override
-    public Either<List<ESArtifactData>, ResourceUploadStatus> getArtifacts(String[] ids) {
-        return null;
-    }
-
-    @Override
-    public void deleteArtifact(String id) {
-
-    }
-
-    @Override
-    public void deleteAllArtifacts() {
-
-    }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java
deleted file mode 100644
index fd68de2..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
-
-import org.openecomp.sdc.be.dao.api.ICatalogDAO;
-import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class ElasticSearchMocksConfiguration {
-
-    @Bean("elasticsearch-client")
-    public ElasticSearchClient elasticSearchClientMock() {
-        return new ElasticSearchClientMock();
-    }
-
-    @Bean("resource-dao")
-    public ICatalogDAO esCatalogDAOMock() {
-        return new ESCatalogDAOMock();
-    }
-
-    @Bean("esHealthCheckDao")
-    public IEsHealthCheckDao esHealthCheckDaoMock() {
-        return new EsHealthCheckDaoMock();
-    }
-
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java
new file mode 100644
index 0000000..92c4a7c
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/DistributionStatusEnum.java
@@ -0,0 +1,46 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.enums;
+
+public enum DistributionStatusEnum {
+    DISTRIBUTION_NOT_APPROVED("Distribution not approved"), DISTRIBUTION_APPROVED("Distribution approved"), DISTRIBUTED("Distributed"), DISTRIBUTION_REJECTED("Distribution rejected");
+
+    private String value;
+
+    private DistributionStatusEnum(String value) {
+        this.value = value;
+    }
+
+    public String getValue() {
+        return value;
+    }
+
+    public static DistributionStatusEnum findState(String state) {
+
+        for (DistributionStatusEnum distributionStatus : DistributionStatusEnum.values()) {
+            if (distributionStatus.name().equalsIgnoreCase(state) || distributionStatus.getValue().equalsIgnoreCase(state)) {
+                return distributionStatus;
+            }
+        }
+        return null;
+    }
+
+}
\ No newline at end of file
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java
new file mode 100644
index 0000000..b29f5fb
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifeCycleTransitionEnum.java
@@ -0,0 +1,83 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.enums;
+
+public enum LifeCycleTransitionEnum {
+
+    CHECKOUT("checkout"),
+    CHECKIN("checkin"),
+    CERTIFICATION_REQUEST("certificationRequest"),
+    UNDO_CHECKOUT("undoCheckout"),
+    CANCEL_CERTIFICATION("cancelCertification"),
+    START_CERTIFICATION("startCertification"),
+    FAIL_CERTIFICATION("failCertification"),
+    CERTIFY("certify"),
+    DISTRIBUTE("distribute");
+
+    String displayName;
+
+    LifeCycleTransitionEnum(String displayName) {
+        this.displayName = displayName;
+    }
+
+    public String getDisplayName() {
+        return displayName;
+    }
+
+    public static LifeCycleTransitionEnum getFromDisplayName(String name) {
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CHECKOUT.getDisplayName())) {
+            return LifeCycleTransitionEnum.CHECKOUT;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CHECKIN.getDisplayName())) {
+            return LifeCycleTransitionEnum.CHECKIN;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CERTIFICATION_REQUEST.getDisplayName())) {
+            return LifeCycleTransitionEnum.CERTIFICATION_REQUEST;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.UNDO_CHECKOUT.getDisplayName())) {
+            return LifeCycleTransitionEnum.UNDO_CHECKOUT;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CANCEL_CERTIFICATION.getDisplayName())) {
+            return LifeCycleTransitionEnum.CANCEL_CERTIFICATION;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.START_CERTIFICATION.getDisplayName())) {
+            return LifeCycleTransitionEnum.START_CERTIFICATION;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.FAIL_CERTIFICATION.getDisplayName())) {
+            return LifeCycleTransitionEnum.FAIL_CERTIFICATION;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.CERTIFY.getDisplayName())) {
+            return LifeCycleTransitionEnum.CERTIFY;
+        }
+        if (name.equalsIgnoreCase(LifeCycleTransitionEnum.DISTRIBUTE.getDisplayName())) {
+            return LifeCycleTransitionEnum.DISTRIBUTE;
+        } else
+            throw new IllegalArgumentException(name + " value does not match any of LifeCycleTransitionEnum values");
+    }
+
+    public static String valuesAsString() {
+        StringBuilder sb = new StringBuilder();
+        for (LifeCycleTransitionEnum op : LifeCycleTransitionEnum.values()) {
+            sb.append(op.getDisplayName()).append(" ");
+        }
+        return sb.toString();
+    }
+}
\ No newline at end of file
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
similarity index 61%
copy from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
copy to asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
index 04b398b..98fb95f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/LifecycleStateEnum.java
@@ -2,14 +2,14 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,27 +18,27 @@
  * ============LICENSE_END=========================================================
  */
 
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package org.openecomp.sdc.asdctool.enums;
 
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+public enum LifecycleStateEnum {
 
-public class ElasticSearchClientMock extends ElasticSearchClient {
+    READY_FOR_CERTIFICATION,
 
-    @Override
-    public void initialize() {
+    CERTIFICATION_IN_PROGRESS,
 
+    CERTIFIED,
+
+    NOT_CERTIFIED_CHECKIN,
+
+    NOT_CERTIFIED_CHECKOUT;
+
+    public static LifecycleStateEnum findState(String state) {
+
+        for (LifecycleStateEnum lifecycleStateEnum : LifecycleStateEnum.values()) {
+            if (lifecycleStateEnum.name().equals(state)) {
+                return lifecycleStateEnum;
+            }
+        }
+        return null;
     }
-
-    @Override
-    public void setClusterName(final String clusterName) {
-
-    }
-
-    @Override
-    public void setLocal(final String strIsLocal) {
-    }
-
-    @Override
-    public void setTransportClient(final String strIsTransportclient) {
-    }
-}
+}
\ No newline at end of file
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
index 471b54d..1ab2b80 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/enums/SchemaZipFileEnum.java
@@ -20,7 +20,9 @@
 
 package org.openecomp.sdc.asdctool.enums;
 
-import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.*;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.DATA_IMPORT_LIST;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.EMPTY_IMPORT_LIST;
+import static org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum.SchemaZipConstants.RELATIONSHIPS_TYPES_IMPORT_LIST;
 
 public enum SchemaZipFileEnum {
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
index 4dab15f..4ad90b8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
@@ -44,7 +44,16 @@
 import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
 import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.DistributionStatusEnum;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupInstance;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
 import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.TopologyTemplate;
 import org.openecomp.sdc.be.model.jsonjanusgraph.datamodel.ToscaElement;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
@@ -52,7 +61,7 @@
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
 import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
 import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.DAOArtifactData;
 import org.openecomp.sdc.be.tosca.CsarUtils;
 import org.openecomp.sdc.be.tosca.ToscaError;
 import org.openecomp.sdc.be.tosca.ToscaExportHandler;
@@ -72,23 +81,23 @@
 import java.io.Writer;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.EnumMap;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
 import java.util.Set;
-import java.util.EnumMap;
 import java.util.stream.Collectors;
 
 @org.springframework.stereotype.Component("artifactUuidFix")
 public class ArtifactUuidFix {
-	
+
 	private static final String MIGRATION1707_ARTIFACT_UUID_FIX = "Migration1707ArtifactUuidFix  fix group:  group name {} correct artifactId {} artifactUUID {} ";
 
-    private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
+	private static final String FAILED_TO_FETCH_VF_RESOURCES = "Failed to fetch vf resources ";
 
-    private static final String UTF8 = "utf-8";
+	private static final String UTF8 = "utf-8";
 
 	private JanusGraphDao janusGraphDao;
 	private ToscaOperationFacade toscaOperationFacade;
@@ -249,7 +258,7 @@
 							vfLst.add(resource);
 							writeModuleResultToFile(writer, resource, service);
 							writer.flush();
-							
+
 						}
 						janusGraphDao.commit();
 					}
@@ -274,7 +283,7 @@
 			Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
 			hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
 			if ("distributed_only".equals(fixServices)) {
-				hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());			
+				hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
 				hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name());
 			}
 
@@ -302,17 +311,17 @@
 					continue;
 				}
 				Service service = toscaElement.left().value();
-				
+
 				String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME);
-				
+
 				boolean isProblematic = isProblematicService(service, serviceName);
 				if (isProblematic) {
 					serviceList.add(service);
 					writeModuleResultToFile(writer, service, null);
 					writer.flush();
-				
+
 				}
-				
+
 				janusGraphDao.commit();
 			}
 			log.info("output file with list of services : {}", fileName);
@@ -326,9 +335,9 @@
 	}
 
 	private boolean isProblematicService( Service service, String serviceName) {
-		
+
 		List<ComponentInstance> componentInstances = service.getComponentInstances();
-		
+
 		if (componentInstances == null) {
 			log.info("No instances for service {} ", service.getUniqueId());
 			return false;
@@ -350,21 +359,21 @@
 			if(isCheckVFModules){
 				Optional<ArtifactDefinition> optionalVfModuleArtifact = deploymentArtifacts.values().stream()
 						.filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny();
-				
-				 if(!optionalVfModuleArtifact.isPresent())
-					 return true;
-			
-				 ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
-				 Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
-				 if(vfModulesEither.isRight()){
-					 log.error("Failed to parse vfModule for service {} status is {}", service.getUniqueId(), vfModulesEither.right().value());
-					 return true;
-				 }
-				 vfModules = vfModulesEither.left().value();
-				 if(vfModules == null || vfModules.isEmpty() ){
-					 log.info("vfModules empty for service {}", service.getUniqueId());
-					 return true;
-				 }	
+
+				if(!optionalVfModuleArtifact.isPresent())
+					return true;
+
+				ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
+				Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
+				if(vfModulesEither.isRight()){
+					log.error("Failed to parse vfModule for service {} status is {}", service.getUniqueId(), vfModulesEither.right().value());
+					return true;
+				}
+				vfModules = vfModulesEither.left().value();
+				if(vfModules == null || vfModules.isEmpty() ){
+					log.info("vfModules empty for service {}", service.getUniqueId());
+					return true;
+				}
 			}
 
 			for (GroupInstance gi : groupInstances) {
@@ -373,7 +382,7 @@
 					if(isCheckVFModules && vfModules != null){
 						Optional<VfModuleArtifactPayloadEx> op = vfModules.stream().filter(vf -> vf.getVfModuleModelName().equals(gi.getGroupName())).findAny();
 						if(!op.isPresent()){
-							 log.error("Failed to find vfModule for group {}", gi.getGroupName());
+							log.error("Failed to find vfModule for group {}", gi.getGroupName());
 							return true;
 						}
 						vfModule = op.get();
@@ -383,13 +392,13 @@
 					}
 				}
 			}
-			
+
 		}
 		return false;
 	}
 
 	private boolean isProblematicGroup(GroupDefinition gr, String resourceName,
-			Map<String, ArtifactDefinition> deploymentArtifacts) {
+									   Map<String, ArtifactDefinition> deploymentArtifacts) {
 		List<String> artifacts = gr.getArtifacts();
 		List<String> artifactsUuid = gr.getArtifactsUuid();
 		Set<String> artifactsSet = new HashSet<>();
@@ -454,14 +463,14 @@
 	}
 
 	private boolean isProblematicGroupInstance(GroupInstance gi, String instName, String servicename,
-			Map<String, ArtifactDefinition> deploymentArtifacts, VfModuleArtifactPayloadEx vfModule) {
+											   Map<String, ArtifactDefinition> deploymentArtifacts, VfModuleArtifactPayloadEx vfModule) {
 		List<String> artifacts = gi.getArtifacts();
 		List<String> artifactsUuid = gi.getArtifactsUuid();
 		List<String> instArtifactsUuid = gi.getGroupInstanceArtifactsUuid();
 		List<String> instArtifactsId = gi.getGroupInstanceArtifacts();
 		Set<String> instArtifatIdSet = new HashSet<>();
-		Set<String> artifactsSet = new HashSet<>();	
-	
+		Set<String> artifactsSet = new HashSet<>();
+
 		log.info("check group {} for instance {} ", gi.getGroupName(), instName);
 		if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
 			log.info("No instance groups for instance {} in service {} ", instName, servicename);
@@ -548,12 +557,12 @@
 		if(vfModule != null && artifactsUuid != null){
 			return isProblematicVFModule(vfModule, artifactsUuid, instArtifactsUuid);
 		}
-		
+
 		return false;
 	}
 
 	private boolean isProblematicVFModule(VfModuleArtifactPayloadEx vfModule, List<String> artifactsUuid,
-			List<String> instArtifactsUuid) {
+										  List<String> instArtifactsUuid) {
 		log.info(" isProblematicVFModule  {}  ", vfModule.getVfModuleModelName());
 		List<String> vfModuleArtifacts = vfModule.getArtifacts();
 		List<String> allArtifacts = new ArrayList<>();
@@ -580,10 +589,10 @@
 		return false;
 	}
 
-	
+
 
 	private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
-			Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
+						Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
 		boolean res = true;
 		log.info(" Fix started ***** ");
 		if (vfLst != null && !vfLst.isEmpty()) {
@@ -600,6 +609,7 @@
 
 		long time = System.currentTimeMillis();
 		String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
+
 		try(Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), UTF8))) {
 			writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
 			List<Component> failedList = new ArrayList<>();
@@ -614,7 +624,7 @@
 				generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst, failedList);
 
 			}
-			
+
 			for (Component component : vfLst) {
 				res = generateToscaPerComponent(fixedIds, component);
 				if (res) {
@@ -642,7 +652,7 @@
 
 			}
 
-			
+
 			for (Component component : serviceList) {
 				res = generateToscaPerComponent(fixedIds, component);
 				if (res) {
@@ -690,7 +700,7 @@
 	}
 
 	private boolean generateAndSaveToscaArtifacts(Map<String, List<Component>> nodesToFixTosca, Set<String> fixedIds,
-			List<? extends Component> componentsWithFailedGroups, List<Component> failedList) {
+												  List<? extends Component> componentsWithFailedGroups, List<Component> failedList) {
 		boolean res = true;
 		log.debug("Migration1707ArtifactUuidFix  generateAndSaveToscaArtifacts started ");
 		for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
@@ -751,9 +761,9 @@
 				log.error("Couldn't generate and save tosca template component  unique id {}, name {} error: {}",
 						toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
 				res = false;
-				
+
 			}
-			
+
 			if (res) {
 				c.setToscaArtifacts(either.left().value().getToscaArtifacts());
 				fixedIds.add(toscaElementFull.getUniqueId());
@@ -770,7 +780,7 @@
 	}
 
 	private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum,
-			EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
+																   EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
 		log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(),
 				edgeLabelEnum, groups.size());
 		boolean res = true;
@@ -860,7 +870,7 @@
 	}
 
 	private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap,
-			List<GroupInstance> groupsToDelete, GroupInstance group) {
+								   List<GroupInstance> groupsToDelete, GroupInstance group) {
 		if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
 			log.debug("Migration1707ArtifactUuidFix  fix group:  resource id {}, group name {} ", service.getUniqueId(),
 					group.getName());
@@ -871,9 +881,9 @@
 					groupArtifactsSet.addAll(groupInsArtifacts);
 				}
 				List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
-	
+
 				clearGroupInstanceArtifacts(group);
-	
+
 				for (String artifactId : groupArtifacts) {
 					fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId);
 				}
@@ -902,14 +912,14 @@
 		else{
 			group.setGroupInstanceArtifacts(new ArrayList<>());
 		}
-		if(group.getGroupInstanceArtifactsUuid() != null )				
+		if(group.getGroupInstanceArtifactsUuid() != null )
 			group.getGroupInstanceArtifactsUuid().clear();
 		else
 			group.setGroupInstanceArtifactsUuid(new ArrayList<>());
 	}
 
 	private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group,
-			List<String> groupArtifacts, String artifactId) {
+												List<String> groupArtifacts, String artifactId) {
 		String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
 		log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ",
 				group.getName(), artifactId, artifactlabel);
@@ -938,7 +948,7 @@
 			} else {
 				log.debug(
 						MIGRATION1707_ARTIFACT_UUID_FIX,
-						group.getName(), correctArtifactId, correctArtifactUUID);				
+						group.getName(), correctArtifactId, correctArtifactUUID);
 				Set<String> tmpSet = new HashSet<>(group.getGroupInstanceArtifacts());
 				tmpSet.add(correctArtifactId);
 				group.setGroupInstanceArtifacts(new ArrayList<>(tmpSet));
@@ -997,7 +1007,7 @@
 	}
 
 	private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group,
-			List<String> groupArtifacts, String artifactId) {
+									   List<String> groupArtifacts, String artifactId) {
 
 		String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
 		log.debug("Migration1707ArtifactUuidFix  fix group:  group name {} artifactId for fix {} artifactlabel {} ",
@@ -1037,7 +1047,7 @@
 	}
 
 	private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component,
-			Service service) {
+										 Service service) {
 		try {
 			// "service name, service id, state, version
 			StringBuilder sb = new StringBuilder(component.getName());
@@ -1071,7 +1081,7 @@
 	}
 
 	public boolean doFixTosca(Map<String, List<Component>> nodeToFix, Map<String, List<Component>> vfToFix,
-			Map<String, List<Component>> serviceToFix) {
+							  Map<String, List<Component>> serviceToFix) {
 
 		Map<GraphPropertyEnum, Object> hasProps = new EnumMap<>(GraphPropertyEnum.class);
 		hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
@@ -1099,7 +1109,7 @@
 	}
 
 	public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
-			Map<GraphPropertyEnum, Object> hasProps) {
+															  Map<GraphPropertyEnum, Object> hasProps) {
 
 		Map<String, List<Component>> result = new HashMap<>();
 		try {
@@ -1146,7 +1156,7 @@
 	}
 
 	public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix,
-			String name) {
+								 String name) {
 		boolean result = true;
 		long time = System.currentTimeMillis();
 		String fileName = name + "_" + time + ".csv";
@@ -1238,7 +1248,7 @@
 				toscaArtifact.setEsId(toscaArtifact.getUniqueId());
 
 				toscaArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
-				ESArtifactData artifactData = new ESArtifactData(toscaArtifact.getEsId(), decodedPayload);
+				DAOArtifactData artifactData = new DAOArtifactData(toscaArtifact.getEsId(), decodedPayload);
 				artifactCassandraDao.saveArtifact(artifactData);
 
 				log.debug("Tosca yaml artifact esId  {} ", toscaArtifact.getEsId());
@@ -1270,7 +1280,7 @@
 				csarArtifact.setEsId(csarArtifact.getUniqueId());
 
 				csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
-				ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+				DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
 				artifactCassandraDao.saveArtifact(artifactData);
 				log.debug("Tosca csar artifact esId  {} ", csarArtifact.getEsId());
 
@@ -1285,29 +1295,29 @@
 
 		return Either.left(parent);
 	}
-	
-	   private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF, Service service) {
 
-	        ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
+	private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF, Service service) {
 
-	        vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
-	        vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
-	        vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
-	        vfModuleArtifactDefinition.setArtifactGroupType(ArtifactGroupTypeEnum.DEPLOYMENT);
-	        vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
-	        vfModuleArtifactDefinition.setTimeout(0);
-	        vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
-	       
-	       return vfModuleArtifactDefinition;
-	    }
+		ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
+
+		vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
+		vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
+		vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
+		vfModuleArtifactDefinition.setArtifactGroupType(ArtifactGroupTypeEnum.DEPLOYMENT);
+		vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
+		vfModuleArtifactDefinition.setTimeout(0);
+		vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
+
+		return vfModuleArtifactDefinition;
+	}
 
 
 	private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
-			ArtifactDefinition vfModuleArtifact) {
+												ArtifactDefinition vfModuleArtifact) {
 		log.debug("generate new vf module for component. name  {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
-		
+
 		String uniqueId = UniqueIdBuilder.buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
-				
+
 		vfModuleArtifact.setUniqueId(uniqueId);
 		vfModuleArtifact.setEsId(vfModuleArtifact.getUniqueId());
 
@@ -1329,7 +1339,7 @@
 						.calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
 				vfModuleArtifact.setArtifactChecksum(newCheckSum);
 
-				ESArtifactData artifactData = new ESArtifactData(vfModuleArtifact.getEsId(),
+				DAOArtifactData artifactData = new DAOArtifactData(vfModuleArtifact.getEsId(),
 						vfModulePayloadString.getBytes());
 				artifactCassandraDao.saveArtifact(artifactData);
 
@@ -1338,21 +1348,21 @@
 		}
 
 	}
-	
+
 	private Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> parseVFModuleJson(ArtifactDefinition vfModuleArtifact) {
 		log.info("Try to get vfModule json from cassandra {}", vfModuleArtifact.getEsId());
-		Either<ESArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
-		
+		Either<DAOArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
+
 		if (vfModuleData.isRight()) {
 			CassandraOperationStatus resourceUploadStatus = vfModuleData.right().value();
 			StorageOperationStatus storageResponse = DaoStatusConverter.convertCassandraStatusToStorageStatus(resourceUploadStatus);
 			log.error("failed to fetch vfModule json {} from cassandra. Status is {}", vfModuleArtifact.getEsId(), storageResponse);
 			return Either.right(storageResponse);
-			
+
 		}
 
-		ESArtifactData esArtifactData = vfModuleData.left().value();
-		String gsonData = new String( esArtifactData.getDataAsArray());
+		DAOArtifactData DAOArtifactData = vfModuleData.left().value();
+		String gsonData = new String( DAOArtifactData.getDataAsArray());
 		final Gson gson = new GsonBuilder().setPrettyPrinting().create();
 		JsonArray jsonElement = new JsonArray();
 		jsonElement = gson.fromJson(gsonData, jsonElement.getClass());
@@ -1361,10 +1371,10 @@
 			VfModuleArtifactPayloadEx vfModule = ComponentsUtils.parseJsonToObject(je.toString(), VfModuleArtifactPayloadEx.class);
 			vfModules.add(vfModule);
 		});
-		
+
 		log.debug  ("parse vf module finish {}", gsonData);
 		return Either.left(vfModules);
-		
+
 	}
 }
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
deleted file mode 100644
index 2e14b90..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
+++ /dev/null
@@ -1,813 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.impl;
-
-
-import com.carrotsearch.hppc.cursors.ObjectCursor;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import fj.data.Either;
-import org.apache.commons.lang.SystemUtils;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.collect.ImmutableOpenMap;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.unit.TimeValue;
-import org.elasticsearch.index.query.QueryBuilder;
-import org.elasticsearch.index.query.QueryBuilders;
-import org.elasticsearch.search.SearchHit;
-import org.openecomp.sdc.be.auditing.api.AuditEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditAuthRequestEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditConsumerEventFactory;
-import org.openecomp.sdc.be.auditing.impl.AuditGetUebClusterEventFactory;
-import org.openecomp.sdc.be.auditing.impl.category.AuditCategoryEventFactory;
-import org.openecomp.sdc.be.auditing.impl.category.AuditGetCategoryHierarchyEventFactory;
-import org.openecomp.sdc.be.auditing.impl.distribution.*;
-import org.openecomp.sdc.be.auditing.impl.resourceadmin.AuditResourceAdminEventMigrationFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditGetUsersListEventFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAccessEventFactory;
-import org.openecomp.sdc.be.auditing.impl.usersadmin.AuditUserAdminEventFactory;
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-import org.openecomp.sdc.be.dao.cassandra.schema.Table;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingActionEnum;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingGenericEvent;
-import org.openecomp.sdc.be.resources.data.auditing.AuditingTypesConstants;
-import org.openecomp.sdc.be.resources.data.auditing.model.*;
-import org.openecomp.sdc.common.datastructure.AuditingFieldsKey;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-import org.springframework.beans.factory.annotation.Autowired;
-
-import java.io.*;
-import java.net.MalformedURLException;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.EnumMap;
-import java.util.Map;
-
-/**
- * Created by mlando on 5/16/2016.
- */
-public class DataMigration {
-
-	private ObjectMapper jsonMapper = new ObjectMapper();
-
-	private static Logger log = Logger.getLogger(DataMigration.class.getName());
-
-	private ElasticSearchClient elasticSearchClient;
-	private AuditCassandraDao auditCassandraDao;
-	private ArtifactCassandraDao artifactCassandraDao;
-
-	@Autowired
-	public DataMigration(AuditCassandraDao auditCassandraDao,
-		ArtifactCassandraDao artifactCassandraDao) {
-		this.auditCassandraDao = auditCassandraDao;
-		this.artifactCassandraDao = artifactCassandraDao;
-	}
-
-    /**
-	 * the method exports and imports the records from ES to cassandra the flow
-	 * will check to see if the files are not empty if the files are not empty
-	 * the export will be skiped and the flow will use the existing files. the
-	 * flow will check if the tables in cassandra are empty, if the tables are
-	 * not empty the proces will stop and exit. if the tables are empty the
-	 * method will import the records from the files. in case of a fail the flow
-	 * will exit and clear all the Cassandra tables.
-	 *
-	 * @param appConfigDir
-	 *            the location of the dir in wich the output files will be
-	 *            stored
-	 * @param exportFromEs
-	 *            should the es be exported again and overwrite the old export
-	 * @param importToCassandra
-	 *            should we import the data into cassandra
-	 * @return true in case the operation was successful.
-	 */
-	public boolean migrateDataESToCassndra(String appConfigDir, boolean exportFromEs, boolean importToCassandra) {
-		if (!initEsClient()) {
-			return false;
-		}
-		Map<Table, File> files = createOutPutFiles(appConfigDir, exportFromEs);
-		if (files == null) {
-			return false;
-		}
-		if (exportFromEs && filesEmpty(files)) {
-			Map<Table, PrintWriter> printerWritersMap = createWriters(files);
-			if (printerWritersMap == null) {
-				return false;
-			}
-			try {
-				ImmutableOpenMap<String, IndexMetaData> indexData = getIndexData();
-				for (ObjectCursor<String> key : indexData.keys()) {
-					if (("resources".equalsIgnoreCase(key.value) || key.value.startsWith("auditingevents"))
-                        && !exportArtifacts(key.value, printerWritersMap)) {
-                        return false;
-                    }
-				}
-			} finally {
-				if (elasticSearchClient != null) {
-					elasticSearchClient.close();
-				}
-				for (PrintWriter writer : printerWritersMap.values()) {
-					writer.close();
-				}
-			}
-		}
-
-		return !importToCassandra || importToCassndra(files);
-	}
-
-	private boolean initEsClient() {
-		String configHome = System.getProperty("config.home");
-		URL url = null;
-		Settings settings = null;
-		try {
-			if (SystemUtils.IS_OS_WINDOWS) {
-				url = new URL("file:///" + configHome + "/elasticsearch.yml");
-			} else {
-				url = new URL("file:" + configHome + "/elasticsearch.yml");
-			}
-			log.debug("URL {}", url);
-			settings = Settings.settingsBuilder().loadFromPath(Paths.get(url.toURI())).build();
-		} catch (MalformedURLException | URISyntaxException e1) {
-			log.error("Failed to create URL in order to load elasticsearch yml", e1);
-			return true;
-		}
-
-		this.elasticSearchClient = new ElasticSearchClient();
-		this.elasticSearchClient.setClusterName(settings.get("cluster.name"));
-		this.elasticSearchClient.setLocal(settings.get("elasticSearch.local"));
-		this.elasticSearchClient.setTransportClient(settings.get("elasticSearch.transportclient"));
-		try {
-			elasticSearchClient.initialize();
-		} catch (URISyntaxException e) {
-		    log.error(e.getMessage());
-			return false;
-		}
-		return true;
-	}
-
-	/**
-	 * the method clears all the cassandra tables
-	 */
-	private void truncateCassandraTable() {
-		log.info("import failed. truncating Cassandra tables.");
-		artifactCassandraDao.deleteAllArtifacts();
-		auditCassandraDao.deleteAllAudit();
-	}
-
-	/**
-	 * the method imports the records from the files into cassandra
-	 * 
-	 * @param files
-	 *            a map of files holding
-	 * @return true if the operation was successful
-	 */
-	private boolean importToCassndra(Map<Table, File> files) {
-		log.info("starting to import date into Cassandra.");
-		if (!validtaTablsNotEmpty(files))
-			return true;
-		for (Table table : files.keySet()) {
-			log.info("importing recordes into {}", table.getTableDescription().getTableName());
-			if (!handleImport(files, table)) {
-				truncateCassandraTable();
-				return false;
-			}
-		}
-		log.info("finished to import date into Cassandra.");
-		return true;
-	}
-
-	private boolean validtaTablsNotEmpty(Map<Table, File> files) {
-		for (Table table : files.keySet()) {
-			Either<Boolean, CassandraOperationStatus> isTableEmptyRes = checkIfTableIsEmpty(table);
-			if (isTableEmptyRes.isRight() || !isTableEmptyRes.left().value()) {
-				log.error("Cassandra table {} is not empty operation aborted.",
-						table.getTableDescription().getTableName());
-				return false;
-			}
-		}
-		return true;
-	}
-
-	/**
-	 * the method retrieves the fields from the given map and generates
-     * corresponding audit event according to the table name
-	 * 
-	 * @param map
-	 *            the map from which we will retrieve the fields enum values
-	 * @param table
-	 *            the table we are going to store the record in.
-	 * @return an AuditingGenericEvent event representing the audit record that is going to be
-	 *         created.
-	 */
-	AuditingGenericEvent createAuditEvent(Map<AuditingFieldsKey, String> map, Table table) {
-		AuditEventFactory factory = null;
-		switch (table) {
-			case USER_ADMIN_EVENT:
-				factory = getAuditUserAdminEventFactory(map);
-				break;
-			case USER_ACCESS_EVENT:
-				factory = getAuditUserAccessEventFactory(map);
-				break;
-			case RESOURCE_ADMIN_EVENT:
-				factory = getAuditResourceAdminEventMigrationFactory(map);
-				break;
-			case DISTRIBUTION_DOWNLOAD_EVENT:
-				factory = getAuditDistributionDownloadEventFactory(map);
-				break;
-			case DISTRIBUTION_ENGINE_EVENT:
-				factory = getAuditDistributionEngineEventMigrationFactory(map);
-				break;
-			case DISTRIBUTION_NOTIFICATION_EVENT:
-				factory = getAuditDistributionNotificationEventFactory(map);
-				break;
-			case DISTRIBUTION_STATUS_EVENT:
-				factory = getAuditDistributionStatusEventFactory(map);
-				break;
-			case DISTRIBUTION_DEPLOY_EVENT:
-				factory = getAuditDistributionDeployEventFactory(map);
-				break;
-			case DISTRIBUTION_GET_UEB_CLUSTER_EVENT:
-				factory = getAuditGetUebClusterEventFactory(map);
-				break;
-			case AUTH_EVENT:
-				factory = getAuditAuthRequestEventFactory(map);
-				break;
-			case CONSUMER_EVENT:
-				factory = getAuditConsumerEventFactory(map);
-				break;
-			case CATEGORY_EVENT:
-				factory = getAuditCategoryEventFactory(map);
-				break;
-			case GET_USERS_LIST_EVENT:
-				factory = getAuditGetUsersListEventFactory(map);
-				break;
-			case GET_CATEGORY_HIERARCHY_EVENT:
-				factory = getAuditGetCategoryHierarchyEventFactory(map);
-				break;
-			default:
-				break;
-		}
-		return factory != null ? factory.getDbEvent() : null;
-	}
-
-	private AuditEventFactory getAuditGetCategoryHierarchyEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditGetCategoryHierarchyEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_DETAILS),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditGetUsersListEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditGetUsersListEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_USER_DETAILS),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditCategoryEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditCategoryEventFactory(
-			AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_CATEGORY_NAME),
-			map.get(AuditingFieldsKey.AUDIT_SUB_CATEGORY_NAME),
-			map.get(AuditingFieldsKey.AUDIT_GROUPING_NAME),
-			map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditUserAccessEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditUserAccessEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_USER_UID),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditUserAdminEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditUserAdminEventFactory(
-            AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_USER_BEFORE),
-			map.get(AuditingFieldsKey.AUDIT_USER_AFTER),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditConsumerEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditConsumerEventFactory(
-		    AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_ECOMP_USER),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditAuthRequestEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditAuthRequestEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_USER_UID),
-			map.get(AuditingFieldsKey.AUDIT_AUTH_URL),
-			map.get(AuditingFieldsKey.AUDIT_AUTH_REALM),
-			map.get(AuditingFieldsKey.AUDIT_AUTH_STATUS),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditGetUebClusterEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditGetUebClusterEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditResourceAdminEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditResourceAdminEventMigrationFactory(
-                AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
-					map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
-			ResourceVersionInfo.newBuilder()
-					.artifactUuid(map.get(AuditingFieldsKey.AUDIT_PREV_ARTIFACT_UUID))
-					.state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_STATE))
-					.version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_PREV_VERSION))
-					.distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DPREV_STATUS))
-					.build(),
-			ResourceVersionInfo.newBuilder()
-					.artifactUuid(map.get(AuditingFieldsKey.AUDIT_CURR_ARTIFACT_UUID))
-					.state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
-					.version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
-					.distributionStatus(map.get(AuditingFieldsKey.AUDIT_RESOURCE_DCURR_STATUS))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_INVARIANT_UUID),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_ARTIFACT_DATA),
-			map.get(AuditingFieldsKey.AUDIT_RESOURCE_COMMENT),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
-			map.get(AuditingFieldsKey.AUDIT_RESOURCE_TOSCA_NODE_TYPE),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditDistributionDownloadEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditDistributionDownloadEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
-					map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
-			        map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditDistributionEngineEventMigrationFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditDistributionEngineEventMigrationFactory(
-		    AuditingActionEnum.fromName(map.get(AuditingFieldsKey.AUDIT_ACTION)),
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			DistributionTopicData.newBuilder()
-					.notificationTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_NOTIFICATION_TOPIC_NAME))
-					.statusTopic(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TOPIC_NAME))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_API_KEY),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVRIONMENT_NAME),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ROLE),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditDistributionDeployEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditDistributionDeployEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
-					map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
-			map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditDistributionStatusEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditDistributionStatusEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			new DistributionData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_CONSUMER_ID),
-					map.get(AuditingFieldsKey.AUDIT_RESOURCE_URL)),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_STATUS_TIME),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-	private AuditEventFactory getAuditDistributionNotificationEventFactory(Map<AuditingFieldsKey, String> map) {
-		return new AuditDistributionNotificationEventFactory(
-			CommonAuditData.newBuilder()
-					.description(map.get(AuditingFieldsKey.AUDIT_DESC))
-					.status(map.get(AuditingFieldsKey.AUDIT_STATUS))
-					.requestId(map.get(AuditingFieldsKey.AUDIT_REQUEST_ID))
-					.serviceInstanceId(map.get(AuditingFieldsKey.AUDIT_SERVICE_INSTANCE_ID))
-					.build(),
-			new ResourceCommonInfo(map.get(AuditingFieldsKey.AUDIT_RESOURCE_NAME),
-					map.get(AuditingFieldsKey.AUDIT_RESOURCE_TYPE)),
-			ResourceVersionInfo.newBuilder()
-					.state(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_STATE))
-					.version(map.get(AuditingFieldsKey.AUDIT_RESOURCE_CURR_VERSION))
-					.build(),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ID),
-			map.get(AuditingFieldsKey.AUDIT_MODIFIER_UID),
-			map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TOPIC_NAME),
-			new OperationalEnvAuditData(map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_ENVIRONMENT_ID),
-					map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_VNF_WORKLOAD_CONTEXT),
-					map.get(AuditingFieldsKey.AUDIT_DISTRIBUTION_TENANT)),
-			map.get(AuditingFieldsKey.AUDIT_TIMESTAMP));
-	}
-
-
-
-	/**
-	 * the method reads the content of the file intended for a given table, and
-	 * sores them in cassandra
-	 * 
-	 * @param files
-	 *            a map of files from which the recordes will be retrieved.
-	 * @param table
-	 *            the name of the table we want to look up in the files and sore
-	 *            in Cassandra // * @param store the function to call when
-	 *            storing recordes in cassndra
-	 * @return true if the operation was successful
-	 */
-	private boolean handleImport(Map<Table, File> files, Table table) {
-		BufferedReader br = null;
-		try {
-			br = new BufferedReader(new FileReader(files.get(table)));
-			String line = null;
-			while ((line = br.readLine()) != null) {
-				CassandraOperationStatus res = CassandraOperationStatus.GENERAL_ERROR;
-				if (Table.ARTIFACT.equals(table)) {
-					res = artifactCassandraDao.saveArtifact(jsonMapper.readValue(line, ESArtifactData.class));
-				}
-				else {
-                    AuditingGenericEvent recordForCassandra = createAuditRecordForCassandra(line, table);
-					if (recordForCassandra != null) {
-                        res = auditCassandraDao.saveRecord(recordForCassandra);
-                    }
-				}
-				if (!res.equals(CassandraOperationStatus.OK)) {
-					log.error("save recored to cassndra {} failed with status {} aborting.",
-							table.getTableDescription().getTableName(), res);
-					return false;
-				}
-			}
-			return true;
-		} catch (IOException e) {
-			log.error("failed to read file", e);
-			return false;
-		} finally {
-			if (br != null) {
-				try {
-					br.close();
-				} catch (IOException e) {
-					log.error("failed to close file reader", e);
-				}
-			}
-		}
-	}
-
-    AuditingGenericEvent createAuditRecordForCassandra(String json, Table table) throws IOException{
-        return createAuditEvent(parseToMap(json), table);
-    }
-
-	private Map<AuditingFieldsKey, String> parseToMap(String json) throws IOException {
-		return jsonMapper.readValue(json, new TypeReference<Map<AuditingFieldsKey, String>>(){});
-	}
-
-	/**
-	 * the method checks if the given table is empty
-	 * 
-	 * @param table
-	 *            the name of the table we want to check
-	 * @return true if the table is empty
-	 */
-	private Either<Boolean, CassandraOperationStatus> checkIfTableIsEmpty(Table table) {
-		if (Table.ARTIFACT.equals(table)) {
-			return artifactCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
-		} else {
-			return auditCassandraDao.isTableEmpty(table.getTableDescription().getTableName());
-		}
-	}
-
-	private boolean filesEmpty(Map<Table, File> files) {
-		for (Table table : files.keySet()) {
-			File file = files.get(table);
-			if (file.length() != 0) {
-				log.info("file:{} is not empty skipping export", table.getTableDescription().getTableName());
-				return false;
-			}
-		}
-		return true;
-	}
-
-	/**
-	 * the method reads the records from es index of audit's into a file as
-	 * json's.
-	 * 
-	 * @param value
-	 *            the name of the index we want
-	 * @param printerWritersMap
-	 *            a map of the writers we use to write to a file.
-	 * @return true in case the export was successful.
-	 */
-	private boolean exportAudit(String value, Map<Table, PrintWriter> printerWritersMap) {
-		log.info("stratng to export audit data from es index{} to file.", value);
-		QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
-		SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(value).setScroll(new TimeValue(60000))
-				.setQuery(queryBuilder).setSize(100).execute().actionGet();
-		while (true) {
-			for (SearchHit hit : scrollResp.getHits().getHits()) {
-				PrintWriter out = printerWritersMap.get(TypeToTableMapping.getTableByType(hit.getType()));
-				out.println(hit.getSourceAsString());
-			}
-			scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
-					.setScroll(new TimeValue(60000)).execute().actionGet();
-			if (scrollResp.getHits().getHits().length == 0) {
-				break;
-
-			}
-		}
-
-		log.info("export audit data from es to file. finished succsesfully");
-		return true;
-	}
-
-	/**
-	 * the method reads the records from es index of resources into a file as
-	 * json's.
-	 *
-	 * @param index
-	 *            the name of the index we want to read
-	 * @param printerWritersMap
-	 *            a map of the writers we use to write to a file.
-	 * @return true in case the export was successful.
-	 */
-	private boolean exportArtifacts(String index, Map<Table, PrintWriter> printerWritersMap) {
-		log.info("stratng to export artifact data from es to file.");
-		PrintWriter out = printerWritersMap.get(Table.ARTIFACT);
-		QueryBuilder queryBuilder = QueryBuilders.matchAllQuery();
-		SearchResponse scrollResp = elasticSearchClient.getClient().prepareSearch(index).setScroll(new TimeValue(60000))
-				.setQuery(queryBuilder).setSize(100).execute().actionGet();
-		while (true) {
-			for (SearchHit hit : scrollResp.getHits().getHits()) {
-				;
-				out.println(hit.getSourceAsString());
-			}
-			scrollResp = elasticSearchClient.getClient().prepareSearchScroll(scrollResp.getScrollId())
-					.setScroll(new TimeValue(60000)).execute().actionGet();
-			if (scrollResp.getHits().getHits().length == 0) {
-				break;
-
-			}
-		}
-
-		log.info("export artifact data from es to file. finished succsesfully");
-		return true;
-	}
-
-	/**
-	 * the method retrieves all the indexes from elasticsearch
-	 * 
-	 * @return a map of indexes and there metadata
-	 */
-	private ImmutableOpenMap<String, IndexMetaData> getIndexData() {
-		return elasticSearchClient.getClient().admin().cluster().prepareState().get().getState().getMetaData()
-				.getIndices();
-	}
-
-	/**
-	 * the method creates all the files and dir which holds them. in case the
-	 * files exist they will not be created again.
-	 * 
-	 * @param appConfigDir
-	 *            the base path under which the output dir will be created and
-	 *            the export result files the created filesa are named according
-	 *            to the name of the table into which it will be imported.
-	 * @param exportToEs
-	 *            if true all the export files will be recreated
-	 * @returnthe returns a map of tables and the files representing them them
-	 */
-	private Map<Table, File> createOutPutFiles(String appConfigDir, boolean exportToEs) {
-		Map<Table, File> result = new EnumMap<Table, File>(Table.class);
-		File outputDir = new File(appConfigDir + "/output/");
-		if (!createOutPutFolder(outputDir)) {
-			return null;
-		}
-		for (Table table : Table.values()) {
-			File file = new File(outputDir + "/" + table.getTableDescription().getTableName());
-			if (exportToEs) {
-				try {
-					if (file.exists()) {
-						Files.delete(file.toPath());
-					}
-				} catch (IOException e) {
-					log.error("failed to delete output file {}", file.getAbsolutePath(), e);
-					return null;
-				}
-				file = new File(outputDir + "/" + table.getTableDescription().getTableName());
-			}
-			if (!file.exists()) {
-				try {
-					file.createNewFile();
-				} catch (IOException e) {
-					log.error("failed to create output file {}", file.getAbsolutePath(), e);
-					return null;
-				}
-			}
-			result.put(table, file);
-
-		}
-		return result;
-	}
-
-	/**
-	 * the method create the writers to each file
-	 * 
-	 * @param files
-	 *            a map of the files according to table
-	 * @return returns a map of writers according to table.
-	 */
-	private Map<Table, PrintWriter> createWriters(Map<Table, File> files) {
-		Map<Table, PrintWriter> printerWritersMap = new EnumMap<>(Table.class);
-      
-			for (Table table : files.keySet()) {
-				log.info("creating writer for {}", table);
-				File file = files.get(table);
-                try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)) )){
-				printerWritersMap.put(table, out);
-				log.info("creating writer for {} was successful", table);
-            } catch (IOException e) {
-            	log.error("create writer to file failed",e);
-            	return null;
-			} 
-        } 
-		return printerWritersMap;
-	}
-
-	/**
-	 * the method creates the output dir in case it does not exist
-	 * 
-	 * @param outputDir
-	 *            the path under wich the directory will be created.
-	 * @return true in case the create was succsesful or the dir already exists
-	 */
-	private boolean createOutPutFolder(File outputDir) {
-		if (!outputDir.exists()) {
-			log.info("creating output dir {}", outputDir.getAbsolutePath());
-			try {
-				Files.createDirectories(outputDir.toPath());
-			} catch (IOException e) {
-				log.error("failed to create output dir {}", outputDir.getAbsolutePath(), e);
-				return false;
-			}
-		}
-		return true;
-	}
-
-	public enum TypeToTableMapping {
-		USER_ADMIN_EVENT_TYPE(AuditingTypesConstants.USER_ADMIN_EVENT_TYPE,
-				Table.USER_ADMIN_EVENT), USER_ACCESS_EVENT_TYPE(AuditingTypesConstants.USER_ACCESS_EVENT_TYPE,
-						Table.USER_ACCESS_EVENT), RESOURCE_ADMIN_EVENT_TYPE(
-								AuditingTypesConstants.RESOURCE_ADMIN_EVENT_TYPE,
-								Table.RESOURCE_ADMIN_EVENT), DISTRIBUTION_DOWNLOAD_EVENT_TYPE(
-										AuditingTypesConstants.DISTRIBUTION_DOWNLOAD_EVENT_TYPE,
-										Table.DISTRIBUTION_DOWNLOAD_EVENT), DISTRIBUTION_ENGINE_EVENT_TYPE(
-												AuditingTypesConstants.DISTRIBUTION_ENGINE_EVENT_TYPE,
-												Table.DISTRIBUTION_ENGINE_EVENT), DISTRIBUTION_NOTIFICATION_EVENT_TYPE(
-														AuditingTypesConstants.DISTRIBUTION_NOTIFICATION_EVENT_TYPE,
-														Table.DISTRIBUTION_NOTIFICATION_EVENT), DISTRIBUTION_STATUS_EVENT_TYPE(
-																AuditingTypesConstants.DISTRIBUTION_STATUS_EVENT_TYPE,
-																Table.DISTRIBUTION_STATUS_EVENT), DISTRIBUTION_DEPLOY_EVENT_TYPE(
-																		AuditingTypesConstants.DISTRIBUTION_DEPLOY_EVENT_TYPE,
-																		Table.DISTRIBUTION_DEPLOY_EVENT), DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE(
-																				AuditingTypesConstants.DISTRIBUTION_GET_UEB_CLUSTER_EVENT_TYPE,
-																				Table.DISTRIBUTION_GET_UEB_CLUSTER_EVENT), AUTH_EVENT_TYPE(
-																						AuditingTypesConstants.AUTH_EVENT_TYPE,
-																						Table.AUTH_EVENT), CONSUMER_EVENT_TYPE(
-																								AuditingTypesConstants.CONSUMER_EVENT_TYPE,
-																								Table.CONSUMER_EVENT), CATEGORY_EVENT_TYPE(
-																										AuditingTypesConstants.CATEGORY_EVENT_TYPE,
-																										Table.CATEGORY_EVENT), GET_USERS_LIST_EVENT_TYPE(
-																												AuditingTypesConstants.GET_USERS_LIST_EVENT_TYPE,
-																												Table.GET_USERS_LIST_EVENT), GET_CATEGORY_HIERARCHY_EVENT_TYPE(
-																														AuditingTypesConstants.GET_CATEGORY_HIERARCHY_EVENT_TYPE,
-																														Table.GET_CATEGORY_HIERARCHY_EVENT);
-
-		String typeName;
-		Table table;
-
-		TypeToTableMapping(String typeName, Table table) {
-			this.typeName = typeName;
-			this.table = table;
-		}
-
-		public String getTypeName() {
-			return typeName;
-		}
-
-		public Table getTable() {
-			return table;
-		}
-
-		public static Table getTableByType(String type) {
-			for (TypeToTableMapping mapping : TypeToTableMapping.values()) {
-				if (mapping.getTypeName().equalsIgnoreCase(type)) {
-					return mapping.getTable();
-				}
-			}
-			return null;
-		}
-	}
-
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
deleted file mode 100644
index 6b15cb0..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/EsToCassandraDataMigrationConfig.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.impl;
-
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
-import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-@Configuration
-public class EsToCassandraDataMigrationConfig {
-	@Bean(name = "DataMigrationBean")
-	public DataMigration dataMigration(AuditCassandraDao auditCassandraDao, ArtifactCassandraDao artifactCassandraDao) {
-		return new DataMigration(auditCassandraDao, artifactCassandraDao);
-	}
-
-	@Bean(name = "artifact-cassandra-dao")
-	public ArtifactCassandraDao artifactCassandraDao(CassandraClient cassandraClient) {
-		return new ArtifactCassandraDao(cassandraClient);
-	}
-
-	@Bean(name = "audit-cassandra-dao")
-	public AuditCassandraDao auditCassandraDao(CassandraClient cassandraClient) {
-		return new AuditCassandraDao(cassandraClient);
-	}
-
-	@Bean(name = "cassandra-client")
-	public CassandraClient cassandraClient() {
-		return new CassandraClient();
-	}
-	
-	@Bean(name = "sdc-schema-files-cassandra-dao")
-	public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao(CassandraClient cassandraClient) {
-		return new SdcSchemaFilesCassandraDao(cassandraClient);
-	}
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
index 57a7c25..2b96ba3 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
@@ -22,7 +22,6 @@
 
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import java.util.stream.Stream;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
 import java.io.IOException;
@@ -31,6 +30,7 @@
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
+import java.util.stream.Stream;
 
 /**
  * simple util class to verify that the janusgraph export json graph is not corrupted
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
index deb766f..ae4a559 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
@@ -21,20 +21,6 @@
 package org.openecomp.sdc.asdctool.impl;
 
 import com.google.gson.Gson;
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
 import org.apache.commons.configuration.BaseConfiguration;
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.tinkerpop.gremlin.structure.Element;
@@ -54,6 +40,21 @@
 import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
 public class GraphMLConverter {
 
     private static final String STORAGE_BACKEND = "storage.backend";
@@ -95,6 +96,7 @@
             return importJsonGraph(graph, inputFile, propertiesCriteriaToDelete);
 
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("import graph failed ", e);
             return false;
         } finally {
@@ -121,6 +123,7 @@
 
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("export graph failed ", e);
             return false;
         } finally {
@@ -145,6 +148,7 @@
 
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("export exportGraphMl failed ", e);
             return null;
         } finally {
@@ -172,6 +176,7 @@
 
             log.info(LOG_FORMATTER, EXPORTED_FILE, result);
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("find Error In Json Graph failed ", e);
             return false;
         } finally {
@@ -208,8 +213,10 @@
             result = outputFile;
 
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("export Json Graph failed ", e);
             graph.tx().rollback();
+				e.printStackTrace();
         }
         return result;
 
@@ -227,6 +234,7 @@
             graph.tx().commit();
         } catch (Exception e) {
             graph.tx().rollback();
+			e.printStackTrace();
             log.info("export Graph Ml failed ", e);
         }
         return result;
@@ -278,7 +286,9 @@
 
         } catch (Exception e) {
             log.info("Failed to import graph ", e);
+			e.printStackTrace();
             graph.tx().rollback();
+				e.printStackTrace();
         }
         return result;
 
@@ -306,8 +316,10 @@
             graph.tx().rollback();
 
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("find Error In Json Graph failed ", e);
             graph.tx().rollback();
+				e.printStackTrace();
         }
         return result;
 
@@ -355,8 +367,10 @@
             result = outputFile;
 
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("export Users failed ", e);
             graph.tx().rollback();
+				e.printStackTrace();
         }
         return result;
 
@@ -397,6 +411,7 @@
 
             log.info(EXPORTED_FILE, result);
         } catch (Exception e) {
+			e.printStackTrace();
             log.info("export Users failed ", e);
             return false;
         } finally {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
index 8537092..21e22be 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLDataAnalyzer.java
@@ -20,12 +20,6 @@
 
 package org.openecomp.sdc.asdctool.impl;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
 import org.apache.poi.hssf.usermodel.HSSFWorkbook;
 import org.apache.poi.ss.usermodel.Row;
 import org.apache.poi.ss.usermodel.Sheet;
@@ -38,6 +32,13 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.File;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
 public class GraphMLDataAnalyzer {
 
     private static Logger log = LoggerFactory.getLogger(GraphMLDataAnalyzer.class);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
index fbebe2c..33fbf29 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/JanusGraphInitializer.java
@@ -20,12 +20,16 @@
 
 package org.openecomp.sdc.asdctool.impl;
 
-import org.janusgraph.core.*;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphException;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphQuery;
+import org.janusgraph.core.PropertyKey;
 import org.janusgraph.core.schema.ConsistencyModifier;
 import org.janusgraph.core.schema.JanusGraphIndex;
 import org.janusgraph.core.schema.JanusGraphManagement;
-import org.apache.tinkerpop.gremlin.structure.Edge;
-import org.apache.tinkerpop.gremlin.structure.Vertex;
 import org.openecomp.sdc.be.dao.graph.datatype.ActionEnum;
 import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
index 883f5e5..e60640f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
@@ -20,10 +20,10 @@
 
 package org.openecomp.sdc.asdctool.impl;
 
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
-import org.janusgraph.core.JanusGraphVertex;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphVertex;
 import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
 import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
@@ -75,7 +75,8 @@
 			graph.tx().commit();
 			return productsToDelete;
 		} catch (Exception e) {
-			log.info("get All Products failed - {}" , e);
+            e.printStackTrace();
+            log.info("get All Products failed - {}" , e);
 			if(graph != null) {
 			    graph.tx().rollback();
 			}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
index 9911fb7..7f9064f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
@@ -20,10 +20,10 @@
 
 package org.openecomp.sdc.asdctool.impl;
 
-import org.janusgraph.core.JanusGraphFactory;
-import org.janusgraph.core.JanusGraph;
-import org.janusgraph.core.JanusGraphQuery;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraph;
+import org.janusgraph.core.JanusGraphFactory;
+import org.janusgraph.core.JanusGraphQuery;
 import org.openecomp.sdc.asdctool.Utils;
 import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
@@ -99,7 +99,7 @@
 			return numberOfUpdatedVertexes;
 
 		} catch (Exception e) {
-			log.info("update Property On Service At Least Certified failed -{}" , e);
+			e.printStackTrace();
 			graph.tx().rollback();
 
 			return null;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
index cee0ded..3633be7 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/VrfObjectFixHandler.java
@@ -42,7 +42,11 @@
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
 
 import static java.util.Collections.emptyList;
 import static java.util.stream.Collectors.toList;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
index 1808175..0adaf51 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CommonInternalTool.java
@@ -19,13 +19,13 @@
  */
 package org.openecomp.sdc.asdctool.impl.internal.tool;
 
-import java.io.IOException;
-import java.util.Map;
-
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.openecomp.sdc.asdctool.utils.ReportWriter;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 
+import java.io.IOException;
+import java.util.Map;
+
 public abstract class CommonInternalTool {
     protected ReportWriter reportWriter;
     private String reportType;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
index 78f0ecb..8d74ea5 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/CsarGenerator.java
@@ -19,6 +19,30 @@
  */
 package org.openecomp.sdc.asdctool.impl.internal.tool;
 
+import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.DAOArtifactData;
+import org.openecomp.sdc.be.tosca.CsarUtils;
+import org.openecomp.sdc.be.tosca.ToscaExportHandler;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.openecomp.sdc.common.util.GeneralUtility;
+import org.springframework.beans.factory.annotation.Autowired;
+
 import java.io.IOException;
 import java.util.EnumMap;
 import java.util.HashMap;
@@ -29,30 +53,6 @@
 import java.util.function.Supplier;
 import java.util.stream.Collectors;
 
-import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
-import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
-import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
-import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
-import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
-import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
-import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
-import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
-import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
-import org.openecomp.sdc.be.model.ArtifactDefinition;
-import org.openecomp.sdc.be.model.Component;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
-import org.openecomp.sdc.be.resources.data.ESArtifactData;
-import org.openecomp.sdc.be.tosca.CsarUtils;
-import org.openecomp.sdc.be.tosca.ToscaExportHandler;
-import org.openecomp.sdc.common.api.ArtifactTypeEnum;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-import org.openecomp.sdc.common.util.GeneralUtility;
-import org.springframework.beans.factory.annotation.Autowired;
-
 @org.springframework.stereotype.Component("csarGenerator")
 public class CsarGenerator extends CommonInternalTool {
 
@@ -174,7 +174,7 @@
         
         
         csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
-        ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+        DAOArtifactData artifactData = new DAOArtifactData(csarArtifact.getEsId(), decodedPayload);
         artifactCassandraDao.saveArtifact(artifactData);
         ConsoleWriter.dataLine("Artifact generated and saved into Cassandra ", csarArtifact.getArtifactLabel());
         report(component, csarArtifact);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
index 3bd13d0..62dd489 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/internal/tool/DeleteComponentHandler.java
@@ -19,11 +19,11 @@
  */
 package org.openecomp.sdc.asdctool.impl.internal.tool;
 
-import org.janusgraph.core.JanusGraphVertex;
 import fj.data.Either;
 import org.apache.tinkerpop.gremlin.structure.Direction;
 import org.apache.tinkerpop.gremlin.structure.Edge;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraphVertex;
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
index cc75dfc..cdb3b2c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationConfigManager.java
@@ -24,14 +24,14 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.config;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Properties;
 
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 /**
  * Created by chaya on 7/4/2017.
  */
@@ -76,6 +76,7 @@
             input = new FileInputStream(path);
             prop.load(input);
         } catch (IOException ex) {
+            ex.printStackTrace();
             log.info("FileInputStream failed - {}", ex);
         }
         return prop;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
index d59a9aa..b83417c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
@@ -20,11 +20,16 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.config;
 
-import java.util.List;
 import org.openecomp.sdc.asdctool.impl.VrfObjectFixHandler;
 import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
 import org.openecomp.sdc.asdctool.impl.validator.ValidationToolBL;
-import org.openecomp.sdc.asdctool.impl.validator.executers.*;
+import org.openecomp.sdc.asdctool.impl.validator.executers.IArtifactValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VfValidatorExecuter;
 import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
 import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
 import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
@@ -46,7 +51,13 @@
 import org.openecomp.sdc.be.dao.jsongraph.HealingJanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.*;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ArchiveOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ByToscaNameDerivedNodeTypeResolver;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.GroupsOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTemplateOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTypeOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 import org.openecomp.sdc.be.model.operations.api.IGraphLockOperation;
 import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
 import org.openecomp.sdc.config.CatalogBESpringConfig;
@@ -58,6 +69,8 @@
 import org.springframework.context.annotation.Primary;
 import org.springframework.core.io.FileSystemResource;
 
+import java.util.List;
+
 /**
  * Created by chaya on 7/3/2017.
  */
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
index 33cf9d4..a5e7d78 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
@@ -34,8 +34,19 @@
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
-import java.io.*;
-import java.util.*;
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 public class ArtifactValidatorExecuter{
@@ -134,7 +145,7 @@
 			try {
 				// "service name, service id, state, version
 				for(Component component: components ){
-					StringBuilder sb = new StringBuilder(component.getName());
+					StringBuffer sb = new StringBuffer(component.getName());
 					sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID()).append(",").append(component.getLifecycleState()).append(",").append(component.getVersion());
 					
 					sb.append("\n");
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
index 99e6637..9fa9220 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
@@ -26,11 +26,11 @@
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 import org.openecomp.sdc.be.model.Component;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
 public class NodeToscaArtifactsValidatorExecuter extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
 	 protected String name;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
index ed4b6ea..aad803e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
@@ -26,12 +26,12 @@
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 import org.openecomp.sdc.be.model.Component;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
+import org.springframework.beans.factory.annotation.Autowired;
 
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
-import org.springframework.beans.factory.annotation.Autowired;
 
 @org.springframework.stereotype.Component
 public class ServiceToscaArtifactsValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
index 8f36dd3..a52fb37 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceValidatorExecuter.java
@@ -25,10 +25,10 @@
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
 
 import java.util.ArrayList;
 import java.util.List;
-import org.springframework.stereotype.Component;
 
 /**
  * Created by chaya on 7/4/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
index 5287ea1..ca027cb 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/TopologyTemplateValidatorExecuter.java
@@ -24,17 +24,22 @@
 import org.openecomp.sdc.asdctool.impl.validator.tasks.TopologyTemplateValidationTask;
 import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
 import org.openecomp.sdc.asdctool.impl.validator.utils.VertexResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 /**
  * Created by chaya on 7/3/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
index 29d49ae..f1c9af6 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
@@ -27,11 +27,11 @@
 import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.be.model.Component;
 import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 
 public class VFToscaArtifactValidatorExecutor extends ArtifactValidatorExecuter implements IArtifactValidatorExecuter{
 	
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
index eefd195..181495a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VfValidatorExecuter.java
@@ -20,13 +20,14 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.executers;
 
-import java.util.List;
 import org.openecomp.sdc.asdctool.impl.validator.tasks.VfValidationTask;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import java.util.List;
+
 /**
  * Created by chaya on 7/3/2017.
  */
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
index 2e804cc..11c80ea 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/artifacts/ArtifactValidationUtils.java
@@ -21,12 +21,7 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-
+import fj.data.Either;
 import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
 import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
 import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -41,7 +36,11 @@
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import fj.data.Either;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 
 /**
  * Created by chaya on 7/6/2017.
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
index d6fafcb..d45c896 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/tasks/moduleJson/ModuleJsonTask.java
@@ -37,7 +37,12 @@
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
 import org.springframework.beans.factory.annotation.Autowired;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 import java.util.stream.Collectors;
 
 /**
@@ -108,8 +113,7 @@
     }
 
     private boolean isAfterSubmitForTesting(GraphVertex vertex){
-        List allowedStates = new ArrayList<>(Arrays.asList(LifecycleStateEnum.READY_FOR_CERTIFICATION.name(),
-                LifecycleStateEnum.CERTIFICATION_IN_PROGRESS.name(), LifecycleStateEnum.CERTIFIED.name()));
+        List allowedStates = new ArrayList<>(Arrays.asList(LifecycleStateEnum.CERTIFIED.name()));
         return allowedStates.contains(vertex.getMetadataProperty(GraphPropertyEnum.STATE));
     }
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
index 2be8f92..e575ffc 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/ReportManager.java
@@ -21,18 +21,21 @@
 
 package org.openecomp.sdc.asdctool.impl.validator.utils;
 
+import org.apache.commons.lang.text.StrBuilder;
+import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Paths;
 import java.nio.file.StandardOpenOption;
-import java.util.*;
-
-import org.apache.commons.lang.text.StrBuilder;
-import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
-import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 /**
  * Created by chaya on 7/5/2017.
@@ -50,6 +53,7 @@
             initCsvFile();
             initReportFile();
         } catch (IOException e) {
+            e.printStackTrace();
             log.info("Init file failed - {}", e.getClass().getSimpleName(), e);
         }
     }
@@ -98,6 +102,7 @@
             Files.write(Paths.get(reportOutputFilePath), new StrBuilder().appendNewLine().toString().getBytes(), StandardOpenOption.APPEND);
             Files.write(Paths.get(reportOutputFilePath), message.getBytes(), StandardOpenOption.APPEND);
         } catch (IOException e) {
+            e.printStackTrace();
             log.info("write to file failed - {}", e.getClass().getSimpleName(), e);
         }
     }
@@ -145,6 +150,7 @@
                     new StrBuilder().appendNewLine().toString().getBytes(),
                     StandardOpenOption.APPEND);
             } catch (IOException e) {
+                    e.printStackTrace();
                 log.info("write to file failed - {}", e.getClass().getSimpleName(), e);
             }
         }));
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
index 511e9ba..882a4e1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/CsarGeneratorTool.java
@@ -20,14 +20,14 @@
 
 package org.openecomp.sdc.asdctool.main;
 
-import java.util.Scanner;
-
 import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
 import org.openecomp.sdc.asdctool.configuration.CsarGeneratorConfiguration;
 import org.openecomp.sdc.asdctool.impl.internal.tool.CsarGenerator;
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
+import java.util.Scanner;
+
 public class CsarGeneratorTool extends SdcInternalTool {
 
     public static void main(String[] args) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
index d2d5e77..4b3496f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
@@ -35,67 +35,70 @@
 
 	private static Logger log = Logger.getLogger(DataSchemaMenu.class.getName());
 
-    public static void main(String[] args) {
+	public static void main(String[] args) {
 
 		String operation = args[0];
 
-        String appConfigDir = args[1];
+		String appConfigDir = args[1];
 
-        if (args == null || args.length < 2) {
-            usageAndExit();
-        }
+		if (args == null || args.length < 2) {
+			usageAndExit();
+		}
 
-        ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
-        ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+		ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
+
+		try {
 
         SdcSchemaBuilder sdcSchemaBuilder = new SdcSchemaBuilder(new SdcSchemaUtils(),
             ConfigurationManager.getConfigurationManager().getConfiguration()::getCassandraConfig);
 
-        switch (operation.toLowerCase()) {
-            case "create-cassandra-structures":
-                log.debug("Start create cassandra keyspace, tables and indexes");
+			switch (operation.toLowerCase()) {
+			case "create-cassandra-structures":
+				log.debug("Start create cassandra keyspace, tables and indexes");
                 if (sdcSchemaBuilder.createSchema()) {
-                    log.debug("create cassandra keyspace, tables and indexes successfull");
-                    System.exit(0);
-                } else {
-                    log.debug("create cassandra keyspace, tables and indexes failed");
-                    System.exit(2);
-                }
-                break;
+					log.debug("create cassandra keyspace, tables and indexes successfull");
+					System.exit(0);
+				} else {
+					log.debug("create cassandra keyspace, tables and indexes failed");
+					System.exit(2);
+				}
             case "create-janusgraph-structures":
                 log.debug("Start create janusgraph keyspace");
                 String janusGraphCfg = 2 == args.length ? configurationManager.getConfiguration().getJanusGraphCfgFile() : args[2];
                 if (JanusGraphInitializer.createGraph(janusGraphCfg)) {
                     log.debug("create janusgraph keyspace successfull");
-                    System.exit(0);
-                } else {
+					System.exit(0);
+				} else {
                     log.debug("create janusgraph keyspace failed");
-                    System.exit(2);
-                }
-                break;
-            case "clean-cassndra":
-                log.debug("Start clean keyspace, tables");
+					System.exit(2);
+				}
+			case "clean-cassndra":
+				log.debug("Start clean keyspace, tables");
                 if (sdcSchemaBuilder.deleteSchema()) {
-                    log.debug(" successfull");
-                    System.exit(0);
-                } else {
-                    log.debug(" failed");
-                    System.exit(2);
-                }
-                break;
-            default:
-                usageAndExit();
-                break;
-        }
-    }
+					log.debug(" successfull");
+					System.exit(0);
+				} else {
+					log.debug(" failed");
+					System.exit(2);
+				}
+			default:
+				usageAndExit();
+			}
+		} catch (Throwable t) {
+			t.printStackTrace();
+			log.debug("create cassandra keyspace, tables and indexes failed");
+			System.exit(3);
+		}
+	}
 
-    private static void usageAndExit() {
-        DataSchemeUsage();
-        System.exit(1);
-    }
+	private static void usageAndExit() {
+		DataSchemeUsage();
+		System.exit(1);
+	}
 
-    private static void DataSchemeUsage() {
-        System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+	private static void DataSchemeUsage() {
+		System.out.println("Usage: create-cassandra-structures <configuration dir> ");
         System.out.println("Usage: create-janusgraph-structures <configuration dir> ");
-    }
+	}
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
index d30249e..309d23c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DeleteComponentTool.java
@@ -20,14 +20,14 @@
 
 package org.openecomp.sdc.asdctool.main;
 
-import java.util.Scanner;
-
 import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
 import org.openecomp.sdc.asdctool.configuration.InternalToolConfiguration;
 import org.openecomp.sdc.asdctool.impl.internal.tool.DeleteComponentHandler;
 import org.openecomp.sdc.asdctool.utils.ConsoleWriter;
 import org.springframework.context.annotation.AnnotationConfigApplicationContext;
 
+import java.util.Scanner;
+
 public class DeleteComponentTool extends SdcInternalTool{
     private static final String PSW = "ItIsTimeToDelete";
 
@@ -39,11 +39,6 @@
         String appConfigDir = args[0];
         String password = args[1];
         
-        if ( !PSW.equals(password) ){
-            ConsoleWriter.dataLine("Wrong password");
-            System.exit(1);
-        }
-        
         disableConsole();
         ConsoleWriter.dataLine("STARTED... ");
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
deleted file mode 100644
index c119d7e..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.main;
-
-import org.openecomp.sdc.asdctool.impl.DataMigration;
-import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
-import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.common.api.ConfigurationSource;
-import org.openecomp.sdc.common.impl.ExternalConfiguration;
-import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.openecomp.sdc.common.log.wrappers.Logger;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-
-public class EsToCassandraDataMigrationMenu {
-
-	private static Logger log = Logger.getLogger(EsToCassandraDataMigrationMenu.class.getName());
-
-	public static void main(String[] args) {
-
-		if (args == null || args.length < 2) {
-			usageAndExit();
-		}
-		String operation = args[0];
-
-		String appConfigDir = args[1];
-		System.setProperty("config.home", appConfigDir);
-		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
-				appConfigDir);
-		ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-
-		AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(
-				EsToCassandraDataMigrationConfig.class);
-		DataMigration dataMigration = null;
-		try {
-			switch (operation.toLowerCase()) {
-			case "es-to-cassndra-migration":
-				dataMigration = (DataMigration) context.getBean("DataMigrationBean");
-				log.debug("Start migration from ES to C* ");
-				if (dataMigration.migrateDataESToCassndra(appConfigDir, true, true)) {
-					log.debug("migration from ES to C* was finished successfull");
-					System.exit(0);
-				} else {
-					log.debug("migration from ES to C* failed");
-					System.exit(2);
-				}
-				break;
-			case "es-to-cassndra-migration-export-only":
-				dataMigration = (DataMigration) context.getBean("DataMigrationBean");
-				log.debug("Start migration export only from ES to C* ");
-				if (dataMigration.migrateDataESToCassndra(appConfigDir, true, false)) {
-					log.debug("migration export only from ES to C* was finished successfull");
-					System.exit(0);
-				} else {
-					log.debug("migration export only from ES to C* failed");
-					System.exit(2);
-				}
-				break;
-			case "es-to-cassndra-migration-import-only":
-				dataMigration = (DataMigration) context.getBean("DataMigrationBean");
-				log.debug("Start migration import only from ES to C* ");
-				if (dataMigration.migrateDataESToCassndra(appConfigDir, false, true)) {
-					log.debug("migration import only from ES to C* was finished successfull");
-					System.exit(0);
-				} else {
-					log.debug("migration import only from ES to C* failed");
-					System.exit(2);
-				}
-				break;
-			default:
-				usageAndExit();
-			}
-		} catch (Throwable t) {
-			log.info("data migration failed - {}", t);
-			System.exit(3);
-		}
-	}
-
-	private static void usageAndExit() {
-		MigrationUsage();
-		System.exit(1);
-	}
-
-	private static void MigrationUsage() {
-		System.out.println(
-				"Usage: es-to-cassndra-migration/es-to-cassndra-migration-import-only/es-to-cassndra-migration-export-only <configuration dir>");
-	}
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
index 9c8ca99..98aea26 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
@@ -22,13 +22,14 @@
 
 package org.openecomp.sdc.asdctool.main;
 
+import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
+import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
+import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
+
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.stream.Collectors;
-import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
-import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
-import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
 
 public class ExportImportMenu {
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
deleted file mode 100644
index 3c4f745..0000000
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * SDC
- * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- * 
- *      http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- * ============LICENSE_END=========================================================
- */
-
-package org.openecomp.sdc.asdctool.main;
-
-import fj.data.Either;
-import org.openecomp.sdc.asdctool.cli.CLIToolData;
-import org.openecomp.sdc.asdctool.cli.SpringCLITool;
-import org.openecomp.sdc.asdctool.configuration.GetConsumersConfiguration;
-import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
-import org.openecomp.sdc.be.resources.data.ConsumerData;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.List;
-
-public class GetConsumersMenu extends SpringCLITool {
-
-    private static final Logger LOGGER = LoggerFactory.getLogger(GetConsumersMenu.class);
-
-    public static void main(String[] args) {
-        GetConsumersMenu getConsumersMenu = new GetConsumersMenu();
-        CLIToolData cliToolData = getConsumersMenu.init(args);
-        ConsumerOperation consumersService = cliToolData.getSpringApplicationContext().getBean(ConsumerOperation.class);
-        printConsumers(getConsumersMenu, consumersService);
-    }
-
-    private static void printConsumers(GetConsumersMenu getConsumersMenu, ConsumerOperation consumersService) {
-        Either<List<ConsumerData>, StorageOperationStatus> allConsumers = consumersService.getAll();
-        allConsumers.left().foreachDoEffect(getConsumersMenu::printConsumers);
-        allConsumers.right().foreachDoEffect(getConsumersMenu::printErr);
-    }
-
-    private void printConsumers(List<ConsumerData> consumers) {
-        System.out.println("SDC consumers: ");
-        consumers.forEach(consumer -> {
-            System.out.println("#########################");
-            System.out.println(consumer);
-        });
-        System.exit(0);
-    }
-
-    private void printErr(StorageOperationStatus err) {
-        String errMsg = String.format("failed to fetch consumers. reason: %s", err);
-        LOGGER.error(errMsg);
-        System.err.print(errMsg);
-        System.exit(1);
-    }
-
-    @Override
-    protected String commandName() {
-        return "get-consumers";
-    }
-
-    @Override
-    protected Class<?> getSpringConfigurationClass() {
-        return GetConsumersConfiguration.class;
-    }
-}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
index aeb7abe..2fb99e2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcInternalTool.java
@@ -20,11 +20,10 @@
 
 package org.openecomp.sdc.asdctool.main;
 
+import ch.qos.logback.core.Appender;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.slf4j.LoggerFactory;
 
-import ch.qos.logback.core.Appender;
-
 public abstract class SdcInternalTool {
     protected static void disableConsole() {
         org.slf4j.Logger rootLogger = LoggerFactory.getILoggerFactory().getLogger(Logger.ROOT_LOGGER_NAME);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
index eb1d487..47a08ea 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/SdcSchemaFileImport.java
@@ -20,6 +20,21 @@
 
 package org.openecomp.sdc.asdctool.main;
 
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.commons.lang3.ArrayUtils;
+import org.openecomp.sdc.asdctool.configuration.SdcSchemaFileImportConfiguration;
+import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
+import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.yaml.snakeyaml.DumperOptions;
+import org.yaml.snakeyaml.Yaml;
+
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.FileInputStream;
@@ -36,20 +51,6 @@
 import java.util.stream.Stream;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipOutputStream;
-import org.apache.commons.codec.digest.DigestUtils;
-import org.apache.commons.lang3.ArrayUtils;
-import org.openecomp.sdc.asdctool.enums.SchemaZipFileEnum;
-import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
-import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
-import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
-import org.openecomp.sdc.be.resources.data.SdcSchemaFilesData;
-import org.openecomp.sdc.common.api.ConfigurationSource;
-import org.openecomp.sdc.common.impl.ExternalConfiguration;
-import org.openecomp.sdc.common.impl.FSConfigurationSource;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
-import org.yaml.snakeyaml.DumperOptions;
-import org.yaml.snakeyaml.Yaml;
 
 
 public class SdcSchemaFileImport {
@@ -106,10 +107,11 @@
 		//Loop over schema file list and create each yaml file from /import/tosca folder 
 		SchemaZipFileEnum[] schemaFileList = SchemaZipFileEnum.values();
 		for (SchemaZipFileEnum schemaZipFileEnum : schemaFileList) {
-			String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR +  schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
-			try(InputStream input = new FileInputStream(new File(pathname));) {
+			try {
 				//get the source yaml file
+				String pathname = importToscaPath + SEPARATOR + schemaZipFileEnum.getSourceFolderName() + SEPARATOR +  schemaZipFileEnum.getSourceFileName() + YAML_EXTENSION;
 				System.out.println("Processing file "+pathname+"....");
+				InputStream input = new FileInputStream(new File(pathname));
 				//Convert the content of file to yaml 
 				Yaml yamlFileSource = new Yaml();
 			    Object content = yamlFileSource.load(input);
@@ -275,6 +277,6 @@
 	private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
 		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
 		new ConfigurationManager(configurationSource);
-		return  new AnnotationConfigApplicationContext(EsToCassandraDataMigrationConfig.class);
+		return  new AnnotationConfigApplicationContext(SdcSchemaFileImportConfiguration.class);
 	}
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
index e5e0740..65f8c7b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
@@ -27,6 +27,8 @@
 import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
 import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
 import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
+import org.openecomp.sdc.be.components.distribution.engine.DmaapClientFactory;
+import org.openecomp.sdc.be.components.health.HealthCheckBusinessLogic;
 import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
 import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
 import org.openecomp.sdc.be.components.scheduledtasks.ComponentsCleanBusinessLogic;
@@ -42,12 +44,10 @@
 import org.openecomp.sdc.be.model.operations.impl.InterfaceLifecycleOperation;
 import org.openecomp.sdc.config.CatalogBESpringConfig;
 import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.config.PropertiesFactoryBean;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.ComponentScan;
 import org.springframework.context.annotation.Configuration;
 import org.springframework.context.annotation.Import;
-import org.springframework.core.io.FileSystemResource;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -55,8 +55,8 @@
 @Configuration
 @Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
 @ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
-        "org.openecomp.sdc.asdctool.migration.config.mocks"
-                })
+        "org.openecomp.sdc.asdctool.migration.config.mocks",
+        "org.openecomp.sdc.be.filters" })
 public class MigrationSpringConfig {
 
     @Autowired(required=false)
@@ -85,14 +85,6 @@
         return new MigrationTasksDao(cassandraClient);
     }
 
-    @Bean(name = "elasticsearchConfig")
-    public PropertiesFactoryBean mapper() {
-        String configHome = System.getProperty("config.home");
-        PropertiesFactoryBean bean = new PropertiesFactoryBean();
-        bean.setLocation(new FileSystemResource(configHome + "/elasticsearch.yml"));
-        return bean;
-    }
-
     @Bean(name = "componentsCleanBusinessLogic")
     public ComponentsCleanBusinessLogic componentsCleanBusinessLogic(
         IElementOperation elementDao,
@@ -108,5 +100,12 @@
         groupInstanceOperation, groupTypeOperation, interfaceOperation, interfaceLifecycleTypeOperation, resourceBusinessLogic,
         serviceBusinessLogic, artifactToscaOperation);
     }
+    
+    @Bean(name = "dmaapClientFactory")
+    public DmaapClientFactory getDmaapClientFactory() {return new DmaapClientFactory();}
 
+    @Bean(name = "healthCheckBusinessLogic")
+    public HealthCheckBusinessLogic getHealthCheckBusinessLogic() {
+        return new HealthCheckBusinessLogic();
+    }
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java
new file mode 100644
index 0000000..e8c6a95
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/CambriaHandlerMock.java
@@ -0,0 +1,87 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import com.att.nsa.apiClient.credentials.ApiCredential;
+import fj.data.Either;
+import org.openecomp.sdc.be.components.distribution.engine.CambriaErrorResponse;
+import org.openecomp.sdc.be.components.distribution.engine.ICambriaHandler;
+import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
+import org.openecomp.sdc.be.components.distribution.engine.SubscriberTypeEnum;
+import org.springframework.stereotype.Component;
+
+import java.util.Collection;
+import java.util.List;
+import java.util.Set;
+
+@Component("cambriaHandler")
+public class CambriaHandlerMock implements ICambriaHandler {
+
+    @Override
+    public Either<Set<String>, CambriaErrorResponse> getTopics(List<String> hostSet) {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse createTopic(Collection<String> hostSet, String apiKey, String secretKey, String topicName, int partitionCount, int replicationCount) {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse unRegisterFromTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse registerToTopic(Collection<String> hostSet, String managerApiKey, String managerSecretKey, String subscriberApiKey, SubscriberTypeEnum subscriberTypeEnum, String topicName) {
+        return null;
+    }
+
+    @Override
+    public com.att.nsa.cambria.client.CambriaConsumer createConsumer(Collection<String> hostSet, String topicName, String apiKey, String secretKey, String consumerId, String consumerGroup, int timeoutMS) throws Exception {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse sendNotification(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data) {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse sendNotificationAndClose(String topicName, String uebPublicKey, String uebSecretKey, List<String> uebServers, INotificationData data, long waitBeforeCloseTimeout) {
+        return null;
+    }
+
+    @Override
+    public CambriaErrorResponse getApiKey(String server, String apiKey) {
+        return null;
+    }
+
+    @Override
+    public Either<ApiCredential, CambriaErrorResponse> createUebKeys(List<String> hostSet) {
+        return null;
+    }
+
+    @Override
+    public Either<Iterable<String>, CambriaErrorResponse> fetchFromTopic(com.att.nsa.cambria.client.CambriaConsumer topicConsumer) {
+        return null;
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
index c4150e41..dde7f6a 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
@@ -29,6 +29,8 @@
 import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
 import org.springframework.stereotype.Component;
 
+import java.util.List;
+
 @Component("distributionEngine")
 public class DistributionEngineMock implements IDistributionEngine {
     @Override
@@ -76,4 +78,9 @@
         return null;
     }
 
+    @Override
+    public OperationalEnvironmentEntry getEnvironmentByDmaapUebAddress(List<String> dmaapUebAddress) {
+        return null;
+    }
+
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
similarity index 62%
rename from asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
rename to asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
index 04b398b..46470cc 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/PortalHealthCheckBuilderMock.java
@@ -2,14 +2,14 @@
  * ============LICENSE_START=======================================================
  * SDC
  * ================================================================================
- * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
  * ================================================================================
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *      http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,27 +18,27 @@
  * ============LICENSE_END=========================================================
  */
 
-package org.openecomp.sdc.asdctool.configuration.mocks.es;
+package org.openecomp.sdc.asdctool.migration.config.mocks;
 
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+import org.openecomp.sdc.be.components.health.PortalHealthCheckBuilder;
+import org.springframework.stereotype.Component;
 
-public class ElasticSearchClientMock extends ElasticSearchClient {
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+
+@Component("portalHealthCheckBusinessLogic")
+public class PortalHealthCheckBuilderMock extends PortalHealthCheckBuilder {
+
 
     @Override
-    public void initialize() {
-
+    @PostConstruct
+    public PortalHealthCheckBuilder init() {
+        return null;
     }
 
     @Override
-    public void setClusterName(final String clusterName) {
+    @PreDestroy
+    protected void destroy() {
 
     }
-
-    @Override
-    public void setLocal(final String strIsLocal) {
-    }
-
-    @Override
-    public void setTransportClient(final String strIsTransportclient) {
-    }
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
index a713f92..17c3aea 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
@@ -33,7 +33,7 @@
     /**
      * The current db version. should be tested against real db to verify it is compatible to the db version
      */
-    public static final DBVersion CURRENT_VERSION = new DBVersion(1710, 0);
+    public static final DBVersion DEFAULT_VERSION = new DBVersion(1710, 0);
 
     private DBVersion(BigInteger major, BigInteger minor) {
         this.major = major;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
index f341ab2..5ed2e56 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/PostMigration.java
@@ -27,7 +27,7 @@
 	@Override
 	default
 	public DBVersion getVersion() {
-		return DBVersion.CURRENT_VERSION;
+		return DBVersion.DEFAULT_VERSION;
 	}
 	
 	@Override
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
index 2dd51fc..74c7405 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksAccessor.java
@@ -31,6 +31,9 @@
     @Query("SELECT minor_version FROM sdcrepository.migrationTasks WHERE major_version = :majorVersion order by minor_version desc limit 1")
     ResultSet getLatestMinorVersion(@Param("majorVersion") Long majorVersion);
 
+    @Query("SELECT major_version FROM sdcrepository.migrationTasks")
+    ResultSet getLatestMajorVersion();
+
     @Query("DELETE FROM sdcrepository.migrationTasks WHERE major_version = :majorVersion")
     void deleteTasksForMajorVersion(@Param("majorVersion") Long majorVersion);
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
index 1d7e662..aabd4d8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/dao/MigrationTasksDao.java
@@ -27,6 +27,7 @@
 import com.datastax.driver.mapping.MappingManager;
 import fj.data.Either;
 import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
 import org.openecomp.sdc.be.dao.cassandra.CassandraDao;
 import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
@@ -38,6 +39,9 @@
 
 import javax.annotation.PostConstruct;
 import java.math.BigInteger;
+import java.util.Collections;
+import java.util.List;
+import java.util.stream.Collectors;
 
 @Service
 public class MigrationTasksDao extends CassandraDao {
@@ -77,13 +81,29 @@
         try {
             ResultSet latestMinorVersion = migrationTasksAccessor.getLatestMinorVersion(majorVersion.longValue());
             Row minorVersionRow = latestMinorVersion.one();
-            return minorVersionRow == null ? BigInteger.valueOf(Long.MIN_VALUE) : BigInteger.valueOf(minorVersionRow.getLong(0));
+            return minorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMinor() : BigInteger.valueOf(minorVersionRow.getLong(0));
         } catch (RuntimeException e) {
             logger.error("failed to get latest minor version for major version {}", majorVersion,  e);
             throw e;
         }
     }
 
+    public BigInteger getLatestMajorVersion() {
+        try {
+            ResultSet latestMajorVersion = migrationTasksAccessor.getLatestMajorVersion();
+            List<Row> all = latestMajorVersion.all();
+            Long majorVersionRow = null;
+            if (all.size() != 0){
+                List<Long> majorVersions = all.stream().map(p -> p.getLong(0)).collect(Collectors.toList());
+                majorVersionRow = Collections.max(majorVersions);
+            }
+            return majorVersionRow == null ? DBVersion.DEFAULT_VERSION.getMajor() : BigInteger.valueOf(majorVersionRow);
+        } catch (RuntimeException e) {
+            logger.error("failed to get latest major version ",  e);
+            throw e;
+        }
+    }
+
     public void deleteAllTasksForVersion(BigInteger majorVersion) {
         try {
             migrationTasksAccessor.deleteTasksForMajorVersion(majorVersion.longValue());
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
index 9e62530..9141295 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/service/SdcRepoService.java
@@ -35,13 +35,13 @@
     }
 
     public DBVersion getLatestDBVersion() {
-        BigInteger currentMajorVersion = DBVersion.CURRENT_VERSION.getMajor();
+        BigInteger currentMajorVersion = migrationTasksDao.getLatestMajorVersion();
         BigInteger latestMinorVersion = migrationTasksDao.getLatestMinorVersion(currentMajorVersion);
-        return latestMinorVersion == null ? DBVersion.from(currentMajorVersion, BigInteger.valueOf(Integer.MIN_VALUE)) : DBVersion.from(currentMajorVersion, latestMinorVersion);
+        return DBVersion.from(currentMajorVersion, latestMinorVersion);
     }
 
     public void clearTasksForCurrentMajor() {
-        BigInteger currentMajorVersion = DBVersion.CURRENT_VERSION.getMajor();
+        BigInteger currentMajorVersion = DBVersion.DEFAULT_VERSION.getMajor();
         migrationTasksDao.deleteAllTasksForVersion(currentMajorVersion);
     }
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java
new file mode 100644
index 0000000..b0a1d50
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/InstanceMigrationBase.java
@@ -0,0 +1,178 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.janusgraph.core.JanusGraphVertex;
+import org.openecomp.sdc.asdctool.migration.tasks.mig2002.SdcCollapsingRolesRFCstateMigration;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+public abstract class InstanceMigrationBase {
+
+    private static final Logger log = LoggerFactory.getLogger(InstanceMigrationBase.class);
+    protected JanusGraphDao janusGraphDao;
+
+    public InstanceMigrationBase(JanusGraphDao janusGraphDao) {
+        this.janusGraphDao = janusGraphDao;
+    }
+
+    protected StorageOperationStatus upgradeTopologyTemplates() {
+        Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
+        hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+        hasNotProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC);
+
+        return janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, hasNotProps, JsonParseFlagEnum.ParseAll)
+                .either(this::proceed, this::handleError);
+    }
+
+    protected abstract StorageOperationStatus handleOneContainer(GraphVertex containerV);
+
+    protected StorageOperationStatus proceed(List<GraphVertex> containersV) {
+        int failureCounter = 0;
+        log.info("found {} vertices to migrate ", containersV.size());
+        for (GraphVertex container : containersV) {
+            StorageOperationStatus storageOperationStatus = handleOneContainer(container);
+            if (storageOperationStatus != StorageOperationStatus.OK) {
+                failureCounter++;
+            }
+        }
+
+        if (failureCounter > 0) {
+            log.info("Failed to update {} vertices", failureCounter);
+        } else {
+            log.info("All vertices were successfully updated");
+        }
+
+        return StorageOperationStatus.OK;
+    }
+
+    protected GraphVertex getVertexById(String vertexId) {
+        Either<GraphVertex, JanusGraphOperationStatus> vertexById = janusGraphDao.getVertexById(vertexId);
+        if (vertexById.isRight()) {
+            log.info("Exception occurred while query vertexId: {} exception: {} " + vertexId + vertexById.right().value());
+            return null;
+        }
+        else return vertexById.left().value();
+    }
+
+    protected StorageOperationStatus updateVertexAndCommit(GraphVertex graphVertex) {
+        StorageOperationStatus status;
+        if ((status = janusGraphDao.updateVertex(graphVertex)
+                .either(v -> StorageOperationStatus.OK, this::handleError)) != StorageOperationStatus.OK) {
+            return status;
+        }
+        return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(janusGraphDao.commit());
+    }
+
+    protected StorageOperationStatus handleError(JanusGraphOperationStatus err) {
+        return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(JanusGraphOperationStatus.NOT_FOUND == err ? JanusGraphOperationStatus.OK : err);
+    }
+
+    protected void removeEdges(Iterator<Edge> edges) {
+
+        while (edges.hasNext()) {
+            Edge edge = edges.next();
+            edge.remove();
+        }
+    }
+
+    protected void removeEdgesInState(Iterator<Edge> edges, String state) {
+
+        while (edges.hasNext()) {
+            Edge edge = edges.next();
+            String edgeState = (String) janusGraphDao.getProperty(edge, EdgePropertyEnum.STATE);
+            if (edgeState.equals(state)) {
+                edge.remove();
+            }
+        }
+    }
+
+
+    protected void updateEdgeProperty(EdgePropertyEnum property, String value, Iterator<Edge> edges) throws IOException {
+        while (edges.hasNext()) {
+            Edge edge = edges.next();
+            Map<EdgePropertyEnum, Object> prop = new HashMap<>();
+            prop.put(property, value);
+            janusGraphDao.setEdgeProperties(edge, prop);
+        }
+
+    }
+
+
+    // check if user has both edges state and last_state
+    protected boolean sameUser(List<JanusGraphVertex> stateList, List<JanusGraphVertex> lastStateList) {
+
+        for (JanusGraphVertex lsVertex : lastStateList) {
+            String idLs = (String) janusGraphDao.getProperty(lsVertex, GraphPropertyEnum.USERID.getProperty());
+            String idSt = (String) janusGraphDao.getProperty(stateList.get(0), GraphPropertyEnum.USERID.getProperty());
+            if (idLs.equals(idSt)) {
+                return true;
+            }
+        }
+        return false;
+    }
+
+    protected List<JanusGraphVertex> getVertexByEdgeSide(Iterator<Edge> edges, SdcCollapsingRolesRFCstateMigration.EdgeSide side) {
+        List<JanusGraphVertex> vertexList = new ArrayList();
+        while (edges.hasNext()) {
+            Edge edge = edges.next();
+
+            if (side == SdcCollapsingRolesRFCstateMigration.EdgeSide.OUT) {
+                vertexList.add((JanusGraphVertex) edge.outVertex());
+            } else {
+                vertexList.add((JanusGraphVertex) edge.inVertex());
+            }
+        }
+
+        return vertexList;
+    }
+
+    protected Iterator<Edge> getVertexEdge(GraphVertex containerV, Direction direction, EdgeLabelEnum edgeLabel) {
+        return containerV.getVertex().edges(direction, edgeLabel.name());
+    }
+
+    public enum EdgeSide {
+        IN, OUT;
+    }
+}
+
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
index d51271b..758589c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/handlers/XlsOutputHandler.java
@@ -82,7 +82,7 @@
 			file.close();
 			return true;
 		} catch (Exception e) {
-			log.debug("#writeOutputAndCloseFile - Failed to write an output file. ", e);
+			log.debug("#writeOutputAndCloseFile - Failed to write an output file. The {} exception occurred. ", e.getMessage());
 			return false;
 		}
 	}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
index 49cd1fe..8eda864 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
@@ -26,12 +26,14 @@
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
 import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
 import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
 import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
 import org.openecomp.sdc.be.components.impl.ComponentInstanceBusinessLogic;
 import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
 import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.exceptions.ByActionStatusComponentException;
 import org.openecomp.sdc.be.components.impl.exceptions.ByResponseFormatComponentException;
 import org.openecomp.sdc.be.components.impl.exceptions.ComponentException;
 import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
@@ -47,22 +49,37 @@
 import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
-import org.openecomp.sdc.be.datatypes.enums.*;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.*;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentInstanceProperty;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.User;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.ToscaOperationFacade;
 import org.openecomp.sdc.be.model.jsonjanusgraph.utils.ModelConverter;
-import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
 import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
 import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.openecomp.sdc.exception.ResponseFormat;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
 import javax.annotation.PostConstruct;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
@@ -103,7 +120,7 @@
     private LifecycleBusinessLogic lifecycleBusinessLogic;
 
     @Autowired
-    private IUserAdminOperation userAdminOperation;
+    private UserAdminOperation userAdminOperation;
 
     @Autowired
     private ResourceBusinessLogic resourceBusinessLogic;
@@ -481,20 +498,25 @@
 
     private Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateComposition(org.openecomp.sdc.be.model.Component component) {
         if (component != null && component.getComponentInstances() != null) {
-            Either<ComponentInstance, ResponseFormat> upgradeInstanceRes;
             for (ComponentInstance instance : component.getComponentInstances()) {
-                upgradeInstanceRes = upgradeInstance(component, instance);
-                if (upgradeInstanceRes.isRight()) {
-                    log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage());
-                    outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage());
-                    return Either.right(upgradeInstanceRes.right().value());
+                try {
+                    upgradeInstance(component, instance);
+                }catch (ComponentException e){
+                    ResponseFormat responseFormat = e.getResponseFormat();
+                    log.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(),
+                            component.getInvariantUUID(), component.getVersion(), "upgradeInstance",
+                            responseFormat.getFormattedMessage());
+                    outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(),
+                            component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(),
+                            responseFormat.getFormattedMessage());
+                    return Either.right(responseFormat);
                 }
             }
         }
         return Either.left(component);
     }
 
-    private Either<ComponentInstance, ResponseFormat> upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
+    private ComponentInstance upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
         log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
         ComponentInstance newComponentInstance = new ComponentInstance(instance);
         if (instance.getOriginType() == OriginTypeEnum.ServiceProxy) {
@@ -503,35 +525,33 @@
         return upgradeResourceInstance(component, instance, newComponentInstance);
     }
 
-    private Either<ComponentInstance, ResponseFormat> upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
+    private ComponentInstance upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
 
         log.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
-        Either<ComponentInstance, ResponseFormat> upgradeInstanceRes = null;
+        ComponentInstance upgradeInstanceRes = null;
         VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
         Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
         if(getOriginRes.isRight()){
             log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
                     component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
-            upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+            throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType()));
         }
-        if(upgradeInstanceRes == null) {
-            copyComponentNameAndVersionToNewInstance(newComponentInstance, getOriginRes.left().value());
+        copyComponentNameAndVersionToNewInstance(newComponentInstance, getOriginRes.left().value());
 
-            if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
-                upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
-            }
-            if((upgradeInstanceRes == null || upgradeInstanceRes.isLeft()) && isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
-                ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes.left().value();
-                upgradeInstanceRes = Either.left(updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId())));
-            }
+        if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
+            upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
+        }
+        if(isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
+            ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes;
+            upgradeInstanceRes = updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId()));
         }
         //upgrade nodes contained by CVFC
         if(upgradeInstanceRes == null && isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC &&
                 !upgradeVf(getOriginRes.left().value().getUniqueId(), false, true)) {
-            upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
+            throw new ByActionStatusComponentException(ActionStatus.GENERAL_ERROR);
         }
         if(upgradeInstanceRes == null){
-            upgradeInstanceRes = Either.left(instance);
+            upgradeInstanceRes = instance;
         }
         log.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ",
                 component.getComponentType().getValue(), instance.getName());
@@ -584,17 +604,17 @@
         return isAllottedResource(component.getUniqueId());
     }
 
-    private Either<ComponentInstance, ResponseFormat> upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
+    private ComponentInstance upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
         Either<List<GraphVertex>, JanusGraphOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(instance.getSourceModelInvariant());
         if (getLatestOriginServiceRes.isRight()) {
-            return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertJanusGraphStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType())));
+            throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertJanusGraphStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType()));
         }
         ModelConverter.getVertexType(instance.getOriginType().name());
         Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestByName(instance.getComponentName());
         if(getOriginRes.isRight()){
             log.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
                     component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
-            return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+            throw new ByActionStatusComponentException(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType()));
         }
         newComponentInstance.setComponentUid((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
         return changeAssetVersion(component, instance, newComponentInstance);
@@ -613,7 +633,7 @@
             .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata);
     }
 
-    private Either<ComponentInstance, ResponseFormat> changeAssetVersion(org.openecomp.sdc.be.model.Component containerComponent, ComponentInstance instance, ComponentInstance newComponentInstance) {
+    private ComponentInstance changeAssetVersion(org.openecomp.sdc.be.model.Component containerComponent, ComponentInstance instance, ComponentInstance newComponentInstance) {
         return componentInstanceBusinessLogic.changeComponentInstanceVersion(ComponentTypeEnum.SERVICE_PARAM_NAME, containerComponent.getUniqueId(), instance.getUniqueId(), user.getUserId(), newComponentInstance);
     }
 
@@ -857,7 +877,7 @@
         log.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
         log.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
         Either<List<GraphVertex>, JanusGraphOperationStatus> parentResourceRes = janusGraphDao
-            .getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
+            .getParentVertices(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
         if (parentResourceRes.isRight() && parentResourceRes.right().value() != JanusGraphOperationStatus.NOT_FOUND) {
             return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(parentResourceRes.right().value());
 
@@ -916,18 +936,9 @@
     private Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> performFullCertification(org.openecomp.sdc.be.model.Component component) {
         log.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ",
                 component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
-
-        Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false);
-        if (changeStateEither.isRight()) {
-            log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST);
-            return changeStateEither;
-        }
-        changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.START_CERTIFICATION, changeInfo, true, false);
-        if (changeStateEither.isRight()) {
-            log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION);
-            return changeStateEither;
-        }
-        changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
+        org.openecomp.sdc.be.model.Component updatedComponent = component;
+        Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither;
+        changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), updatedComponent.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
         if (changeStateEither.isRight()) {
             log.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
         } else {
@@ -997,6 +1008,7 @@
         propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, componentType.name());
         propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
 
+
         Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
         propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
         if (vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
index 812c07e..f0f59c4 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ForwardPathMigration.java
@@ -17,33 +17,23 @@
 package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
 
 import com.google.common.collect.ImmutableSet;
-import org.janusgraph.core.JanusGraphVertex;
 import fj.data.Either;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
 import org.apache.tinkerpop.gremlin.structure.Direction;
 import org.apache.tinkerpop.gremlin.structure.Edge;
 import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.janusgraph.core.JanusGraphVertex;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.core.task.Migration;
 import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
 import org.openecomp.sdc.be.config.ConfigurationManager;
 import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
-import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.datatypes.elements.ForwardingPathDataDefinition;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
@@ -56,7 +46,18 @@
 import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
 import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
 
-@org.springframework.stereotype.Component
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+//@org.springframework.stereotype.Component
 public class ForwardPathMigration implements Migration {
 
     private JanusGraphDao janusGraphDao;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
index ea1e4a5..8f7fc3e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/ResourceLifecycleMigration.java
@@ -21,6 +21,7 @@
 package org.openecomp.sdc.asdctool.migration.tasks.mig1806;
 
 import fj.data.Either;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.core.task.Migration;
 import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
@@ -36,7 +37,6 @@
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
 import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
 import org.openecomp.sdc.be.model.User;
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
 import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
index bcb2363..a28c27f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1806/SDCInstancesMigration.java
@@ -24,34 +24,35 @@
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
 import org.openecomp.sdc.asdctool.migration.core.task.Migration;
 import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
 import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
 import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
 import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
 import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
 import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
-import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
 import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
 import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
 import org.openecomp.sdc.be.datatypes.elements.MapPropertiesDataDefinition;
 import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
 import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
 import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
-import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
 import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
 import org.openecomp.sdc.be.model.jsonjanusgraph.operations.NodeTemplateOperation;
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.stereotype.Component;
 
 import java.math.BigInteger;
-import java.util.*;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Optional;
 
 @Component
-public class SDCInstancesMigration implements Migration {
+public class SDCInstancesMigration extends InstanceMigrationBase implements Migration {
 
-    private JanusGraphDao janusGraphDao;
     private NodeTemplateOperation nodeTemplateOperation;
 
     private static final Logger log = Logger.getLogger(SDCInstancesMigration.class);
@@ -62,7 +63,7 @@
  
  
     public SDCInstancesMigration(JanusGraphDao janusGraphDao, NodeTemplateOperation nodeTemplateOperation) {
-        this.janusGraphDao = janusGraphDao;
+        super(janusGraphDao);
         this.nodeTemplateOperation = nodeTemplateOperation;
     }
 
@@ -78,41 +79,14 @@
 
     @Override
     public MigrationResult migrate() {
-        StorageOperationStatus status = connectAllContainers();
-
+        StorageOperationStatus status = upgradeTopologyTemplates();
         return status == StorageOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to create connection between instances and origins. Error : " + status);
     }
 
-    private StorageOperationStatus connectAllContainers() {
-        StorageOperationStatus status;
-        Map<GraphPropertyEnum, Object> hasNotProps = new EnumMap<>(GraphPropertyEnum.class);
-        hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
-        hasNotProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC);
-
-        status = janusGraphDao
-            .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, hasNotProps, JsonParseFlagEnum.ParseAll)
-                .either(this::connectAll, this::handleError);
-        return status;
-    }
-
-    private StorageOperationStatus handleError(JanusGraphOperationStatus err) {
-        return DaoStatusConverter.convertJanusGraphStatusToStorageStatus(
-            JanusGraphOperationStatus.NOT_FOUND == err ? JanusGraphOperationStatus.OK : err);
-    }
-
-    private StorageOperationStatus connectAll(List<GraphVertex> containersV) {
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
         StorageOperationStatus status = StorageOperationStatus.OK;
-        for (GraphVertex container : containersV) {
-            status = handleOneContainer(container);
-            if (status != StorageOperationStatus.OK) {
-                break;
-            }
-        }
-        return status;
-    }
 
-    private StorageOperationStatus handleOneContainer(GraphVertex containerV) {
-        StorageOperationStatus status = StorageOperationStatus.OK;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
 
         boolean needConnectAllotted = false;
         ComponentTypeEnum componentType = containerV.getType();
@@ -154,6 +128,7 @@
         return status;
     }
 
+
     private Either<Map<String, MapPropertiesDataDefinition>, StorageOperationStatus> getInstProperties(GraphVertex containerV) {
         Map<String, MapPropertiesDataDefinition> instanceProperties;
        Either<GraphVertex, JanusGraphOperationStatus> instProps = janusGraphDao
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
index 35e7950..fd71336 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/InterfaceOperationMigration.java
@@ -17,10 +17,6 @@
 package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
 
 import fj.data.Either;
-import java.math.BigInteger;
-import java.util.EnumMap;
-import java.util.List;
-import java.util.Map;
 import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.openecomp.sdc.asdctool.migration.core.DBVersion;
@@ -45,6 +41,11 @@
 import org.openecomp.sdc.common.log.wrappers.Logger;
 import org.springframework.beans.factory.annotation.Autowired;
 
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
 @org.springframework.stereotype.Component
 public class InterfaceOperationMigration implements Migration {
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java
new file mode 100644
index 0000000..16f0c48
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcConsumerMigration.java
@@ -0,0 +1,108 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphGenericDao;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.resources.data.ConsumerData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.List;
+
+@Component
+public class SdcConsumerMigration implements Migration {
+
+    private static final Logger logger = LoggerFactory.getLogger(SdcConsumerMigration.class);
+
+    private JanusGraphGenericDao janusGraphGenericDao;
+
+    public SdcConsumerMigration(JanusGraphGenericDao janusGraphGenericDao) {
+        this.janusGraphGenericDao = janusGraphGenericDao;
+    }
+
+    @Override
+    public String description() {
+        return "remove all consumer nodes";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        JanusGraphOperationStatus status = null;
+        try {
+            status = handleConsumerNodes();
+            if (status == JanusGraphOperationStatus.OK){
+                logger.info("removed all consumer nodes.");
+                return MigrationResult.success();
+            } else {
+                return MigrationResult.error("failed to remove consumer nodes. error: " + status);
+            }
+        } finally {
+            commitOrRollBack(status);
+        }
+    }
+
+    private void commitOrRollBack(JanusGraphOperationStatus status) {
+        if (status == JanusGraphOperationStatus.OK) {
+            janusGraphGenericDao.commit();
+        } else {
+            janusGraphGenericDao.rollback();
+        }
+    }
+
+    private JanusGraphOperationStatus handleConsumerNodes() {
+        logger.info("getting all consumer nodes.");
+        return janusGraphGenericDao.getAll(NodeTypeEnum.ConsumerCredentials, ConsumerData.class)
+                .either(this::removeConsumerNodes, this::handleError);
+    }
+
+    private JanusGraphOperationStatus removeConsumerNodes(List<ConsumerData> consumerNodes){
+        logger.info("found {} consumer nodes.", consumerNodes.size());
+        return consumerNodes.stream()
+                .map(consumerNode -> janusGraphGenericDao.deleteNode(consumerNode, ConsumerData.class))
+                .filter(Either::isRight)
+                .map(either -> either.right().value())
+                .findAny()
+                .orElse(JanusGraphOperationStatus.OK);
+    }
+
+    private JanusGraphOperationStatus handleError(JanusGraphOperationStatus status){
+        if (status == JanusGraphOperationStatus.NOT_FOUND) {
+            logger.info("found 0 consumer nodes.");
+            return JanusGraphOperationStatus.OK;
+        } else{
+            return status;
+        }
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java
new file mode 100644
index 0000000..1045634
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcGroupsMigration.java
@@ -0,0 +1,186 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.datatypes.elements.GroupDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcGroupsMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = LoggerFactory.getLogger(SdcGroupsMigration.class);
+
+    private final GroupTypeOperation groupTypeOperation;
+
+    private Map<String, GroupTypeDefinition> latestGroupTypeMap = new HashMap<>();
+
+    public enum GroupsForUpgrade {
+        NW_COLLECTION_GROUP_NAME("org.openecomp.groups.NetworkCollection"),
+        VFC_INSTANCE_GROUP_NAME("org.openecomp.groups.VfcInstanceGroup");
+
+        private String toscaType;
+
+        GroupsForUpgrade(String toscaType) {
+            this.toscaType = toscaType;
+        }
+
+        public static boolean containsToscaType(String type) {
+            try {
+                return Arrays.stream(values()).anyMatch(g->g.getToscaType().equals(type));
+            }
+            catch (IllegalArgumentException ex) {
+                return false;
+            }
+        }
+
+        public String getToscaType() {
+            return toscaType;
+        }
+
+    }
+    public SdcGroupsMigration(JanusGraphDao janusGraphDao, GroupTypeOperation groupTypeOperation) {
+        super(janusGraphDao);
+        this.groupTypeOperation = groupTypeOperation;
+    }
+
+    @Override
+    public String description() {
+        return "update derived from field value for NetworkCollection and VfcInstanceGroup group instances ";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        loadLatestGroupTypeDefinitions();
+        StorageOperationStatus status = upgradeTopologyTemplates();
+        return status == StorageOperationStatus.OK ?
+                MigrationResult.success() : MigrationResult.error("failed to update derived from value for NetworkCollection and VfcInstanceGroup group instances. Error : " + status);
+    }
+
+    void loadLatestGroupTypeDefinitions() {
+        Arrays.stream(GroupsForUpgrade.values()).forEach(this::getLatestGroupTypeDefinition);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+
+        try {
+            status = janusGraphDao.getChildVertex(containerV, EdgeLabelEnum.GROUPS, JsonParseFlagEnum.ParseAll)
+                    .either(this::updateGroupPropertiesIfRequired, this::handleError);
+        }
+        catch (Exception e) {
+            log.error("Exception occurred:", e);
+            status = StorageOperationStatus.GENERAL_ERROR;
+        }
+        finally {
+            if (status != StorageOperationStatus.OK) {
+                janusGraphDao.rollback();
+                if (status == StorageOperationStatus.NOT_FOUND) {
+                    //it is happy flow as well
+                    status = StorageOperationStatus.OK;
+                }
+            }
+            if (log.isInfoEnabled()) {
+                log.info("Upgrade status is <{}> for topology template <{}> uniqueId <{}>",
+                        status.name(), containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+                        containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+            }
+        }
+        return status;
+    }
+
+    private StorageOperationStatus updateGroupPropertiesIfRequired(GraphVertex vertex) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        boolean isUpdated = false;
+        Map<String, GroupDataDefinition> groupDefinitionMap = (Map<String, GroupDataDefinition>) vertex.getJson();
+        for (GroupDataDefinition groupDef : groupDefinitionMap.values()) {
+           if (GroupsForUpgrade.containsToscaType(groupDef.getType())) {
+                if (log.isDebugEnabled()) {
+                    log.debug("Group instance named <{}> of type <{}> is supposed to be updated on vertex <{}>",
+                            groupDef.getName(), groupDef.getType(), vertex.getUniqueId());
+                }
+                isUpdated = isGroupPropertiesUpdateDone(groupDef.getProperties(), latestGroupTypeMap.get(groupDef.getType()).getProperties());
+                if (log.isDebugEnabled()) {
+                    String result = isUpdated ? "has been updated" : "is up to date ";
+                    log.debug("Group instance named <{}> of type <{}> uniqueID <{}> {} on vertex <{}>",
+                                            groupDef.getName(), groupDef.getType(), groupDef.getUniqueId(), result, vertex.getUniqueId());
+                }
+            }
+        }
+        if (isUpdated) {
+            vertex.setJson(groupDefinitionMap);
+            status = updateVertexAndCommit(vertex);
+            if (status == StorageOperationStatus.OK && log.isDebugEnabled()) {
+                log.debug("Group properties change is committed on vertex <{}>", vertex.getUniqueId());
+            }
+        }
+        return status;
+    }
+
+    private boolean isGroupPropertiesUpdateDone(List<PropertyDataDefinition> curPropDefList, List<PropertyDefinition> latestGroupDefList) {
+        boolean isUpdated = false;
+        for (PropertyDefinition prop: latestGroupDefList) {
+            if (curPropDefList.stream().noneMatch(l->l.getName().equals(prop.getName()))) {
+                curPropDefList.add(prop);
+                isUpdated = true;
+            }
+        }
+        return isUpdated;
+    }
+
+    StorageOperationStatus getLatestGroupTypeDefinition(GroupsForUpgrade groupsForUpgrade) {
+        return groupTypeOperation.getLatestGroupTypeByType(groupsForUpgrade.getToscaType(), false)
+                .either(g-> {
+                    latestGroupTypeMap.put(groupsForUpgrade.getToscaType(), g);
+                    return StorageOperationStatus.OK;
+                }, err->err);
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java
new file mode 100644
index 0000000..837abf1
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1902/SdcResourceIconMigration.java
@@ -0,0 +1,187 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1902;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.CompositionDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.jsonjanusgraph.enums.JsonConstantKeysEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.common.log.wrappers.Logger;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcResourceIconMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = Logger.getLogger(SdcResourceIconMigration.class);
+
+    private Map <String, String> resourceTypeToIconMap = new HashMap<>();
+
+    @VisibleForTesting
+    SdcResourceIconMigration(JanusGraphDao janusGraphDao) {
+        super(janusGraphDao);
+    }
+
+
+    @Override
+    public String description() {
+        return "update iconPath for VL and CP nodes";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(1902), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        StorageOperationStatus status;
+        try {
+            updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.VL);
+            updateNodeTypeIconAndStoreInMap(ResourceTypeEnum.CP);
+
+            if (!resourceTypeToIconMap.isEmpty()) {
+                status = upgradeTopologyTemplates();
+            } else {
+                log.error("No VL and CP node definitions found");
+                status = StorageOperationStatus.NOT_FOUND;
+            }
+        }
+        catch(Exception e) {
+            log.error("Exception thrown: {}", e);
+            status = StorageOperationStatus.GENERAL_ERROR;
+        }
+        return status == StorageOperationStatus.OK ?
+                    MigrationResult.success() : MigrationResult.error("failed to update iconPath for VL and CP nodes. Error : " + status);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.OK;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+
+        Map<String, CompositionDataDefinition> jsonComposition = (Map<String, CompositionDataDefinition>)containerV.getJson();
+        if (jsonComposition != null && !jsonComposition.isEmpty()) {
+            CompositionDataDefinition compositionDataDefinition = jsonComposition.get(JsonConstantKeysEnum.COMPOSITION.getValue());
+            Map<String, ComponentInstanceDataDefinition> componentInstances = compositionDataDefinition.getComponentInstances();
+
+            long updateCount = componentInstances.values()
+                    .stream()
+                    .filter(this::updateIconInsideInstance).count();
+            if (updateCount > 0) {
+                status = updateVertexAndCommit(containerV);
+            }
+        }
+        else {
+            log.warn("No json found for template <{}> uniqueId <{}>",
+                    containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+                    containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+        }
+        if (log.isInfoEnabled()) {
+            log.info("Upgrade status is <{}> for topology template <{}> uniqueId <{}>",
+                    status.name(), containerV.getMetadataProperties().get(GraphPropertyEnum.NAME),
+                    containerV.getMetadataProperties().get(GraphPropertyEnum.UNIQUE_ID));
+        }
+        return status;
+    }
+
+
+    @VisibleForTesting
+    boolean updateIconInsideInstance(ComponentInstanceDataDefinition componentInstanceDataDefinition) {
+        String iconPath = resourceTypeToIconMap.get(componentInstanceDataDefinition.getComponentName());
+        if (iconPath != null) {
+            componentInstanceDataDefinition.setIcon(iconPath);
+            if (log.isDebugEnabled()) {
+                log.debug("Icon of component {} is set to {}", componentInstanceDataDefinition.getComponentName(), iconPath);
+            }
+            return true;
+        }
+        return false;
+    }
+
+    @VisibleForTesting
+    void updateNodeTypeIconAndStoreInMap(ResourceTypeEnum resourceType) {
+        Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+
+        propertiesToMatch.put(GraphPropertyEnum.RESOURCE_TYPE, resourceType.name());
+        propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+
+        propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+
+        String iconPath = String.valueOf(resourceType.getValue()).toLowerCase();
+
+        Map<String, String> resourceNameToIconMap = janusGraphDao.getByCriteria(VertexTypeEnum.NODE_TYPE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll)
+                .either(vl-> updateIconResource(vl, iconPath), status->null);
+
+        if (resourceNameToIconMap != null) {
+            resourceTypeToIconMap.putAll(resourceNameToIconMap);
+        }
+        else {
+            log.warn("Failed to get resources of type <{}>", resourceType.name());
+        }
+    }
+
+    private Map <String, String> updateIconResource(List<GraphVertex> vertexList, String iconPath) {
+        if (vertexList.isEmpty()) {
+            return null;
+        }
+        Map <String, String> nameToIconMap = new HashMap<>();
+        vertexList.forEach(v->{
+            StorageOperationStatus status = updateIconOnVertex(v, iconPath);
+            if (status == StorageOperationStatus.OK) {
+                if (log.isDebugEnabled()) {
+                    log.debug("Node type's {} icon is updated to {}", v.getMetadataProperty(GraphPropertyEnum.NAME), iconPath);
+                }
+                nameToIconMap.put(String.valueOf(v.getMetadataProperty(GraphPropertyEnum.NAME)), iconPath);
+            }
+            else {
+                log.error("Failed to update node type {} icon due to a reason: {}",
+                                v.getMetadataProperty(GraphPropertyEnum.NAME), status);
+                throw new RuntimeException("Node update failure");
+            }
+        });
+        return nameToIconMap;
+    }
+
+    private StorageOperationStatus updateIconOnVertex(GraphVertex vertex, String iconPath) {
+        vertex.setJsonMetadataField(JsonPresentationFields.ICON, iconPath);
+        return updateVertexAndCommit(vertex);
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java
new file mode 100644
index 0000000..cba5627
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1911/SdcDeploymentArtTimeOutMigration.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig1911;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.Collection;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+@Component
+public class SdcDeploymentArtTimeOutMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = LoggerFactory.getLogger(SdcDeploymentArtTimeOutMigration.class);
+    private static Integer defaultTimeOut = 120;
+
+    public SdcDeploymentArtTimeOutMigration(JanusGraphDao janusGraphDao) {
+        super(janusGraphDao);
+    }
+
+    @Override
+    public String description() {
+        return "update instance deployment artifact timeOut to default value 120 minutes";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(1911), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        StorageOperationStatus status = updateDeploymentArtifactTimeOut();
+        return status == StorageOperationStatus.OK ?
+                MigrationResult.success() : MigrationResult.error("failed to update instance deployment artifact timeOut. Error : " + status);
+    }
+
+    protected StorageOperationStatus updateDeploymentArtifactTimeOut() {
+        Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+        Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+        Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+        return byCriteria.either(this::proceed, this::handleError);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+        try {
+            Either<GraphVertex, JanusGraphOperationStatus> childVertex = janusGraphDao.getChildVertex(containerV, EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, JsonParseFlagEnum.ParseAll);
+            GraphVertex instDeployArt = childVertex.left().value();
+            Collection<MapArtifactDataDefinition> values = (Collection<MapArtifactDataDefinition>) instDeployArt.getJson().values();
+                List<ArtifactDataDefinition> artifactDataDefinitionsList = values.stream().map(f -> f.getMapToscaDataDefinition().values()).flatMap(f -> f.stream().filter(isRelevantArtifact())).collect(Collectors.toList());
+                artifactDataDefinitionsList.forEach(t -> t.setTimeout(defaultTimeOut));
+                status =  updateVertexAndCommit(instDeployArt);
+
+        } catch (NullPointerException e) {
+            log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+            status = StorageOperationStatus.OK;
+        }
+        catch (Exception e) {
+            //it is happy flow as well
+            log.error("Exception occurred:", e);
+            log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+            if (containerV != null){
+                log.error("containerV.getUniqueId() {} ---> ", containerV.getUniqueId());
+            }
+
+            status = StorageOperationStatus.OK;
+        } finally {
+            if (status != StorageOperationStatus.OK) {
+                janusGraphDao.rollback();
+                log.info("failed to update vertex ID {} ",  containerV.getUniqueId());
+                if (status == StorageOperationStatus.NOT_FOUND) {
+                    //it is happy flow as well
+                    status = StorageOperationStatus.OK;
+                }
+            }
+            else{
+                log.info("vertex ID {} successfully updated",  containerV.getUniqueId());
+            }
+
+        }
+        return status;
+    }
+
+    private static Predicate<ArtifactDataDefinition> isRelevantArtifact() {
+
+        return p -> ((p.getArtifactType().equals(ArtifactTypeEnum.HEAT.getType()) || p.getArtifactType().equals(ArtifactTypeEnum.HEAT_VOL.getType()) || p.getArtifactType().equals(ArtifactTypeEnum.HEAT_NET.getType()))
+                && p.getTimeout() != defaultTimeOut);
+
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java
new file mode 100644
index 0000000..1d7d3d1
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCERTIFIEDstateMigration.java
@@ -0,0 +1,139 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.openecomp.sdc.asdctool.enums.DistributionStatusEnum;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesCERTIFIEDstateMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesCERTIFIEDstateMigration.class);
+
+    public SdcCollapsingRolesCERTIFIEDstateMigration(JanusGraphDao janusGraphDao) {
+        super(janusGraphDao);
+    }
+
+    @Override
+    public String description() {
+        return "remove LS=READY_FOR_CERTIFICATION edge from service node + migrate DISTRIBUTION approved/rejected states to <waiting for distribution> state";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        StorageOperationStatus status = updateServiceLifeCycleState();
+        return status == StorageOperationStatus.OK ?
+                MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+    }
+
+    protected StorageOperationStatus updateServiceLifeCycleState() {
+        Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+        propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+        Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+        Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+        return byCriteria.either(this::proceed, this::handleError);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+        try {
+
+            //update edges to meet above change
+            // update LS eges from RFC to  NOT_CERTIFIED_CHECKIN
+
+            updateEdgeProperty(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name(), getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+
+            if (containerV.getMetadataProperty(GraphPropertyEnum.DISTRIBUTION_STATUS).equals(DistributionStatusEnum.DISTRIBUTION_APPROVED.name()) || containerV.getMetadataProperty(GraphPropertyEnum.DISTRIBUTION_STATUS).equals(DistributionStatusEnum.DISTRIBUTION_REJECTED.name())) {
+
+                // update vertex state property from DISTRIBUTION_APPROVED/REJECTED to DISTRIBUTION_NOT_APPROVED state
+
+                Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+                metadataProperties.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTION_NOT_APPROVED.name());
+                containerV.setMetadataProperties(metadataProperties);
+
+                //update edges to meet above change
+                //delete LAST_DISTRIBUTION_STATE_MODIFIER edge
+
+                removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_DISTRIBUTION_STATE_MODIFIER));
+
+            }
+
+            status = updateVertexAndCommit(containerV);
+
+        } catch (NullPointerException e) {
+            log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+            status = StorageOperationStatus.EXEUCTION_FAILED;
+        } catch (Exception e) {
+            //it is happy flow as well
+            log.error("Exception occurred:", e);
+            log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+            if (containerV != null) {
+                log.error("containerV.getUniqueId() ---> {}  ", containerV.getUniqueId());
+            }
+
+        } finally {
+            if (status != StorageOperationStatus.OK) {
+                janusGraphDao.rollback();
+                log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+                log.info("Storage Operation Status {}", status.toString());
+            } else {
+                log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+            }
+
+        }
+        return status;
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java
new file mode 100644
index 0000000..463ccd8
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesCIPstateMigration.java
@@ -0,0 +1,153 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesCIPstateMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesCIPstateMigration.class);
+
+    public SdcCollapsingRolesCIPstateMigration(JanusGraphDao janusGraphDao) {
+        super(janusGraphDao);
+    }
+
+    @Override
+    public String description() {
+        return "update Service state from CERTIFICATION_IN_PROGRES to NOT_CERTIFIED_CHECKOUT state ";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        StorageOperationStatus status = updateServiceLifeCycleState();
+        return status == StorageOperationStatus.OK ?
+                MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+    }
+
+    protected StorageOperationStatus updateServiceLifeCycleState() {
+        Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+        propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFICATION_IN_PROGRESS.name());
+        propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+        Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+        Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+        return byCriteria.either(this::proceed, this::handleError);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+        try {
+
+            // update vertex state property from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKIN state
+
+            Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+            metadataProperties.put(GraphPropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+            containerV.setMetadataProperties(metadataProperties);
+
+             //update edges to meet above change
+            // remove STATE and LAST_MODIFIER edges
+            removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+            removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_MODIFIER));
+
+            //find designer with LS = NOT_CERTIFIED_CHECKIN
+            Vertex relevantDesigner = findRelevantDesigner(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+            removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+            Map<EdgePropertyEnum, Object> edgeProperties = new HashMap<>();
+            edgeProperties.put(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+            JanusGraphOperationStatus createSTedgeStatus = janusGraphDao.createEdge(relevantDesigner, containerV.getVertex(), EdgeLabelEnum.STATE, edgeProperties);
+            JanusGraphOperationStatus createLMedgeStatus = janusGraphDao.createEdge(relevantDesigner, containerV.getVertex(), EdgeLabelEnum.LAST_MODIFIER, new HashMap<>());
+
+            status = updateVertexAndCommit(containerV);
+
+        } catch (NullPointerException e) {
+            log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+            status = StorageOperationStatus.EXEUCTION_FAILED;
+        } catch (Exception e) {
+            //it is happy flow as well
+            log.error("Exception occurred:", e);
+            log.error("Migration task will continue anyway, please find below vertex details related to this exception", e);
+            if (containerV != null) {
+                log.error("containerV.getUniqueId() ---> {}  ", containerV.getUniqueId());
+            }
+
+        } finally {
+            if (status != StorageOperationStatus.OK) {
+                janusGraphDao.rollback();
+                log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+                log.info("Storage Operation Status {}", status.toString());
+            } else {
+                log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+            }
+
+        }
+        return status;
+    }
+
+    private Vertex findRelevantDesigner(Iterator<Edge> edges) {
+        Vertex vertex = null;
+        while (edges.hasNext()) {
+            Edge edge = edges.next();
+            String state = (String) janusGraphDao.getProperty(edge, EdgePropertyEnum.STATE);
+            if (state.equals(LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name())) {
+                vertex = edge.outVertex();
+            }
+        }
+        return vertex;
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java
new file mode 100644
index 0000000..559715e
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig2002/SdcCollapsingRolesRFCstateMigration.java
@@ -0,0 +1,147 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2020 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.migration.tasks.mig2002;
+
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.janusgraph.core.JanusGraphVertex;
+import org.openecomp.sdc.asdctool.enums.LifecycleStateEnum;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.InstanceMigrationBase;
+import org.openecomp.sdc.be.dao.janusgraph.JanusGraphOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.JanusGraphDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgePropertyEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+@Component
+public class SdcCollapsingRolesRFCstateMigration extends InstanceMigrationBase implements Migration {
+
+    private static final Logger log = LoggerFactory.getLogger(SdcCollapsingRolesRFCstateMigration.class);
+
+    public SdcCollapsingRolesRFCstateMigration(JanusGraphDao janusGraphDao) {
+        super(janusGraphDao);
+    }
+
+    @Override
+    public String description() {
+        return "update Service state from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKOUT state ";
+    }
+
+    @Override
+    public DBVersion getVersion() {
+        return DBVersion.from(BigInteger.valueOf(2002), BigInteger.valueOf(0));
+    }
+
+    @Override
+    public MigrationResult migrate() {
+        StorageOperationStatus status = updateServiceLifeCycleState();
+        return status == StorageOperationStatus.OK ?
+                MigrationResult.success() : MigrationResult.error("failed to service state. Error : " + status);
+    }
+
+    protected StorageOperationStatus updateServiceLifeCycleState() {
+        Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+        propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.READY_FOR_CERTIFICATION.name());
+        propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
+        Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
+        propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
+        Either<List<GraphVertex>, JanusGraphOperationStatus> byCriteria = janusGraphDao.getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseAll);
+        return byCriteria.either(this::proceed, this::handleError);
+    }
+
+    @Override
+    protected StorageOperationStatus handleOneContainer(GraphVertex containerVorig) {
+        StorageOperationStatus status = StorageOperationStatus.NOT_FOUND;
+        GraphVertex containerV = getVertexById(containerVorig.getUniqueId());
+        try {
+
+            // update vertex state property from READY_FOR_CERTIFICATION to NOT_CERTIFIED_CHECKIN state
+
+            Map<GraphPropertyEnum, Object> metadataProperties = containerV.getMetadataProperties();
+            metadataProperties.put(GraphPropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+            containerV.setMetadataProperties(metadataProperties);
+
+            //update edges to meet above change
+
+            List<JanusGraphVertex> stateEdgesOutVertexList = getVertexByEdgeSide(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE), EdgeSide.OUT);
+            List<JanusGraphVertex> lastStateEdgesOutVertexList = getVertexByEdgeSide(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE), EdgeSide.OUT);
+
+            if (sameUser(stateEdgesOutVertexList, lastStateEdgesOutVertexList)) {
+                updateEdgeProperty(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name(), getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+                removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+            } else {
+                removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.STATE));
+                removeEdges(getVertexEdge(containerV, Direction.IN, EdgeLabelEnum.LAST_STATE));
+                Map<EdgePropertyEnum, Object> edgeProperties = new HashMap<>();
+                edgeProperties.put(EdgePropertyEnum.STATE, LifecycleStateEnum.NOT_CERTIFIED_CHECKIN.name());
+                janusGraphDao.createEdge(lastStateEdgesOutVertexList.get(0), containerV.getVertex(), EdgeLabelEnum.STATE, edgeProperties);
+
+            }
+
+            status = updateVertexAndCommit(containerV);
+
+        } catch (NullPointerException e) {
+            log.error("Null Pointer Exception occurred - this mean we have zombie vertex, migration task will continue anyway", e);
+            status = StorageOperationStatus.EXEUCTION_FAILED;
+        } catch (Exception e) {
+            //it is happy flow as well
+            log.error("Exception occurred:", e);
+            log.error("Migration task will ?" +
+                    "" +
+                    "" +
+                    ", please find below vertex details related to this exception", e);
+            if (containerV != null) {
+                log.error("containerV.getUniqueId() ---> {}  ", containerV.getUniqueId());
+            }
+
+        } finally {
+            if (status != StorageOperationStatus.OK) {
+                janusGraphDao.rollback();
+                log.info("failed to update vertex ID {} ", containerV.getUniqueId());
+                log.info("Storage Operation Status {}", status.toString());
+            } else {
+                log.info("vertex ID {} successfully updated", containerV.getUniqueId());
+            }
+
+        }
+        return status;
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
index e7d39ff..412926f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportJanusGraphServlet.java
@@ -20,11 +20,11 @@
 
 package org.openecomp.sdc.asdctool.servlets;
 
-import org.janusgraph.core.JanusGraph;
 import org.apache.commons.configuration.BaseConfiguration;
 import org.apache.commons.configuration.Configuration;
 import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
 import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.janusgraph.core.JanusGraph;
 import org.openecomp.sdc.asdctool.Utils;
 import org.openecomp.sdc.common.log.wrappers.Logger;
 
@@ -34,7 +34,13 @@
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.io.*;
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
 import java.util.Map.Entry;
 import java.util.Properties;
 //import com.tinkerpop.blueprints.util.io.graphml.GraphMLWriter;
@@ -146,7 +152,7 @@
 			result = outputFile;
 
 		} catch (Exception e) {
-			log.info("export Graph failed - {}" , e);
+			e.printStackTrace();
 			// graph.rollback();
 			graph.tx().rollback();
 		} finally {
@@ -155,7 +161,7 @@
 					out.close();
 				}
 			} catch (IOException e) {
-				log.info("close FileOutputStream failed - {}" , e);
+				e.printStackTrace();
 			}
 		}
 		return result;
diff --git a/asdctool/src/main/resources/application-context.xml b/asdctool/src/main/resources/application-context.xml
index c9a13df..252b951 100644
--- a/asdctool/src/main/resources/application-context.xml
+++ b/asdctool/src/main/resources/application-context.xml
@@ -1,11 +1,5 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-       xmlns:util="http://www.springframework.org/schema/util"
        xsi:schemaLocation="
-        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd
-        http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-3.0.xsd">
-
-  
-  <util:properties id="elasticsearchConfig" location="file:${config.home}/elasticsearch.yml" />
-  
+        http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd">
 </beans>
diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml
index cc7a3cf..93bb2de 100644
--- a/asdctool/src/main/resources/config/configuration.yaml
+++ b/asdctool/src/main/resources/config/configuration.yaml
@@ -23,12 +23,13 @@
 
 version: 1.0
 released: 2012-11-30
-toscaConformanceLevel: 8.0
+toscaConformanceLevel: 12.0
 minToscaConformanceLevel: 3.0
 
 # These values are necessary for running upgrade migration 1710.0 process
 enableAutoHealing: false
 appVersion: 1.1.0
+artifactGeneratorConfig: Artifact-Generator.properties
 resourcesForUpgrade:
  8.0:
   - org.openecomp.resource.cp.extCP
@@ -53,8 +54,6 @@
 # The read timeout towards JanusGraph DB when health check is invoked:
 janusGraphHealthCheckReadTimeout: 1
 
-# The interval to try and reconnect to Elasticsearch when it is down during ASDC startup:
-esReconnectIntervalInSeconds: 3
 uebHealthCheckReconnectIntervalInSeconds: 15
 uebHealthCheckReadTimeout: 4
 
@@ -107,30 +106,6 @@
         - { name: sdccomponent, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}       
         - { name: sdcrepository, replicationStrategy: NetworkTopologyStrategy, replicationInfo: ['DC-AIO-Ubuntu1', '1']}
 
-
-#Application-specific settings of ES
-elasticSearch:
-    # Mapping of index prefix to time-based frame. For example, if below is configured:
-    #
-    # - indexPrefix: auditingevents
-    #    creationPeriod: minute
-    #
-    # then ES object of type which is mapped to "auditingevents-*" template, and created on 2015-12-23 13:24:54, will enter "auditingevents-2015-12-23-13-24" index.
-    # Another object created on 2015-12-23 13:25:54, will enter "auditingevents-2015-12-23-13-25" index.
-    # If creationPeriod: month, both of the above will enter "auditingevents-2015-12" index.
-    #
-    # PLEASE NOTE: the timestamps are created in UTC/GMT timezone! This is needed so that timestamps will be correctly presented in Kibana.
-    #
-    # Legal values for creationPeriod - year, month, day, hour, minute, none (meaning no time-based behaviour).
-    #
-    # If no creationPeriod is configured for indexPrefix, default behavour is creationPeriod: month.
-    
-    indicesTimeFrequency:
-      - indexPrefix: auditingevents
-        creationPeriod: month
-      - indexPrefix: monitoring_events
-        creationPeriod: month
-
 artifactTypes:
    - CHEF
    - PUPPET
@@ -176,11 +151,6 @@
 #        - VF
 #        - VL
 deploymentResourceArtifacts:
-  cdsBlueprint:
-      displayName: "CDS Blueprint"
-      type: CONTROLLER_BLUEPRINT_ARCHIVE
-      description: "CDS deployment artifact"
-      fileExtension: "zip"
 #  heat:
 #      displayName: "Base HEAT Template"
 #      type: HEAT
@@ -318,7 +288,10 @@
     isProxy: false
     probeIntervalInSeconds: 15
 
-defaultHeatArtifactTimeoutMinutes: 60
+heatArtifactDeploymentTimeout:
+  defaultMinutes: 30
+  minMinutes: 1
+  maxMinutes: 120
 
 serviceDeploymentArtifacts:
     CONTROLLER_BLUEPRINT_ARCHIVE:
@@ -506,7 +479,6 @@
     ONBOARDED_PACKAGE:
         acceptedTypes:
             - csar
-            - zip
         validForResourceTypes:
             - VF
             - PNF
@@ -614,10 +586,6 @@
 
 resourceInformationalDeployedArtifacts:
 
-requirementsToFulfillBeforeCert:
-
-capabilitiesToConsumeBeforeCert:
-
 unLoggedUrls:
    - /sdc2/rest/healthCheck
 
@@ -701,3 +669,59 @@
   VF : org.openecomp.resource.abstract.nodes.VF
   PNF: org.openecomp.resource.abstract.nodes.PNF
   Service: org.openecomp.resource.abstract.nodes.service
+
+dmaapConsumerConfiguration:
+  hosts: olsd004.wnsnet.attws.com:3905
+  consumerGroup: asdc
+  consumerId: mama #mama - in Order To Consume Remove This String And Replace It With -> mama
+  timeoutMs: 15000
+  limit: 1
+  pollingInterval: 2
+  topic: com.att.sdc.23911-SDCforTestDev-v001
+  latitude: 32.109333
+  longitude: 34.855499
+  version: 1.0
+  serviceName: dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/events
+  environment: TEST
+  partner: BOT_R
+  routeOffer: MR1
+  protocol: https
+  contenttype: application/json
+  dme2TraceOn: true
+  aftEnvironment: AFTUAT
+  aftDme2ConnectionTimeoutMs: 15000
+  aftDme2RoundtripTimeoutMs: 240000
+  aftDme2ReadTimeoutMs: 50000
+  dme2preferredRouterFilePath: DME2preferredRouter.txt
+  timeLimitForNotificationHandleMs: 120000
+  credential:
+      username: m09875@sdc.att.com
+      password: hmXYcznAljMSisdy8zgcag==
+
+dmaapProducerConfiguration:
+    hosts: olsd004.wnsnet.attws.com:3905
+    consumerGroup: asdc
+    consumerId: mama #mama - in Order To Consume Remove This String And Replace It With -> mama
+    timeoutMs: 15000
+    limit: 1
+    pollingInterval: 2
+    topic: com.att.sdc.23911-SDCforTestDev-v001
+    latitude: 32.109333
+    longitude: 34.855499
+    version: 1.0
+    serviceName: dmaap-v1.dev.dmaap.dt.saat.acsi.att.com/events
+    environment: TEST
+    partner: BOT_R
+    routeOffer: MR1
+    protocol: https
+    contenttype: application/json
+    dme2TraceOn: true
+    aftEnvironment: AFTUAT
+    aftDme2ConnectionTimeoutMs: 15000
+    aftDme2RoundtripTimeoutMs: 240000
+    aftDme2ReadTimeoutMs: 50000
+    dme2preferredRouterFilePath: DME2preferredRouter.txt
+    timeLimitForNotificationHandleMs: 120000
+    credential:
+        username: m09875@sdc.att.com
+        password: hmXYcznAljMSisdy8zgcag==
\ No newline at end of file
diff --git a/asdctool/src/main/resources/config/dataTypes.yml b/asdctool/src/main/resources/config/dataTypes.yml
index d768bff..43c7f0c 100644
--- a/asdctool/src/main/resources/config/dataTypes.yml
+++ b/asdctool/src/main/resources/config/dataTypes.yml
@@ -114,12 +114,12 @@
       type: string
       description: MAC address
       required: false
-      status: supported
+      status: SUPPORTED
     ip_address:
       type: string
       description: IP address
       required: false
-      status: supported
+      status: SUPPORTED
 org.openecomp.datatypes.heat.network.subnet.HostRoute:
   derived_from: tosca.datatypes.Root
   description: Host route info for the subnet
@@ -128,12 +128,12 @@
       type: string
       description: The destination for static route
       required: false
-      status: supported
+      status: SUPPORTED
     nexthop:
       type: string
       description: The next hop for the destination
       required: false
-      status: supported
+      status: SUPPORTED
       
 org.openecomp.datatypes.heat.network.AllocationPool:
   derived_from: tosca.datatypes.Root
@@ -143,12 +143,12 @@
       type: string
       description: Start address for the allocation pool
       required: false
-      status: supported
+      status: SUPPORTED
     end:
       type: string
       description: End address for the allocation pool
       required: false
-      status: supported
+      status: SUPPORTED
 
 org.openecomp.datatypes.heat.network.neutron.Subnet:
   derived_from: tosca.datatypes.Root
@@ -158,18 +158,18 @@
       type: string
       description: The ID of the tenant who owns the network
       required: false
-      status: supported
+      status: SUPPORTED
     enable_dhcp:
       type: boolean
       description: Set to true if DHCP is enabled and false if DHCP is disabled
       required: false
       default: true
-      status: supported
+      status: SUPPORTED
     ipv6_address_mode:
       type: string
       description: IPv6 address mode
       required: false
-      status: supported
+      status: SUPPORTED
       constraints:
       - valid_values:
         - dhcpv6-stateful
@@ -179,7 +179,7 @@
       type: string
       description: IPv6 RA (Router Advertisement) mode
       required: false
-      status: supported
+      status: SUPPORTED
       constraints:
       - valid_values:
         - dhcpv6-stateful
@@ -191,35 +191,35 @@
       required: false
       default: {
         }
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: string
     allocation_pools:
        type: list
        description: The start and end addresses for the allocation pools
        required: false
-       status: supported
+       status: SUPPORTED
        entry_schema:
          type: org.openecomp.datatypes.heat.network.AllocationPool
     subnetpool:
       type: string
       description: The name or ID of the subnet pool
       required: false
-      status: supported
+      status: SUPPORTED
     dns_nameservers:
       type: list
       description: A specified set of DNS name servers to be used
       required: false
       default: [
         ]
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: string     
     host_routes:
       type: list
       description: The gateway IP address
       required: false
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: org.openecomp.datatypes.heat.network.subnet.HostRoute
     ip_version:
@@ -227,7 +227,7 @@
       description: The gateway IP address
       required: false
       default: 4
-      status: supported
+      status: SUPPORTED
       constraints:
       - valid_values:
         - '4'
@@ -236,24 +236,24 @@
       type: string
       description: The name of the subnet
       required: false
-      status: supported
+      status: SUPPORTED
     prefixlen:
       type: integer
       description: Prefix length for subnet allocation from subnet pool
       required: false
-      status: supported
+      status: SUPPORTED
       constraints:
       - greater_or_equal: 0
     cidr:
       type: string
       description: The CIDR
       required: false
-      status: supported
+      status: SUPPORTED
     gateway_ip:
       type: string
       description: The gateway IP address
       required: false
-      status: supported
+      status: SUPPORTED
 
 org.openecomp.datatypes.heat.novaServer.network.PortExtraProperties:
   derived_from: tosca.datatypes.Root
@@ -263,35 +263,35 @@
       type: boolean
       description: Flag to enable/disable port security on the port
       required: false
-      status: supported
+      status: SUPPORTED
     mac_address:
       type: string
       description: MAC address to give to this port
       required: false
-      status: supported
+      status: SUPPORTED
     admin_state_up:
       type: boolean
       description: The administrative state of this port
       required: false
       default: true
-      status: supported
+      status: SUPPORTED
     qos_policy:
       type: string
       description: The name or ID of QoS policy to attach to this port
       required: false
-      status: supported
+      status: SUPPORTED
     allowed_address_pairs:
       type: list
       description: Additional MAC/IP address pairs allowed to pass through the port
       required: false
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: org.openecomp.datatypes.heat.network.AddressPair
     binding:vnic_type:
       type: string
       description: The vnic type to be bound on the neutron port
       required: false
-      status: supported
+      status: SUPPORTED
       constraints:
       - valid_values:
         - macvtap
@@ -303,7 +303,7 @@
       required: false
       default: {
         }
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: string
 org.openecomp.datatypes.heat.novaServer.network.AddressInfo:
@@ -314,7 +314,7 @@
       type: string
       description: Port id
       required: false
-      status: supported
+      status: SUPPORTED
 org.openecomp.datatypes.heat.neutron.port.FixedIps:
   derived_from: tosca.datatypes.Root
   description: subnet/ip_address
@@ -323,12 +323,12 @@
       type: string
       description: Subnet in which to allocate the IP address for this port
       required: false
-      status: supported
+      status: SUPPORTED
     ip_address:
       type: string
       description: IP address desired in the subnet for this port
       required: false
-      status: supported
+      status: SUPPORTED
 org.openecomp.datatypes.heat.FileInfo:
   derived_from: tosca.datatypes.Root
   description: Heat File Info
@@ -337,12 +337,12 @@
       type: string
       description: The required URI string (relative or absolute) which can be used to locate the file
       required: true
-      status: supported
+      status: SUPPORTED
     file_type:
       type: string
       description: The type of the file
       required: true
-      status: supported
+      status: SUPPORTED
       constraints:
       - valid_values:
         - base
@@ -357,12 +357,12 @@
         type: string
         description: Start port
         required: false
-        status: supported
+        status: SUPPORTED
       end_port:
         type: string
         description: End port
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrail.network.rule.Rule:
     derived_from: tosca.datatypes.Root
     description: policy rule
@@ -371,45 +371,45 @@
         type: list
         description: Source ports
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
       protocol:
         type: string
         description: Protocol
         required: false
-        status: supported
+        status: SUPPORTED
       dst_addresses:
         type: list
         description: Destination addresses
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
       apply_service:
         type: string
         description: Service to apply
         required: false
-        status: supported
+        status: SUPPORTED
       dst_ports:
         type: list
         description: Destination ports
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
       src_addresses:
         type: list
         description: Source addresses
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
       direction:
         type: string
         description: Direction
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrail.network.rule.RuleList:
     derived_from: tosca.datatypes.Root
     description: list of policy rules
@@ -418,7 +418,7 @@
         type: list
         description: Contrail network rule
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrail.network.rule.Rule
 org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork:
@@ -429,7 +429,7 @@
         type: string
         description: Virtual network
         required: false
-        status: supported
+        status: SUPPORTED
 
 org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
     derived_from: tosca.datatypes.Root
@@ -439,12 +439,12 @@
         type: string
         description: The remote group ID to be associated with this security group rule
         required: false
-        status: supported
+        status: SUPPORTED
       protocol:
         type: string
         description: The protocol that is matched by the security group rule
         required: false
-        status: supported
+        status: SUPPORTED
         constraints:
         - valid_values:
           - tcp
@@ -455,7 +455,7 @@
         description: Ethertype of the traffic
         required: false
         default: IPv4
-        status: supported
+        status: SUPPORTED
         constraints:
         - valid_values:
           - IPv4
@@ -465,7 +465,7 @@
         description: 'The maximum port number in the range that is matched by the
           security group rule. '
         required: false
-        status: supported
+        status: SUPPORTED
         constraints:
         - in_range:
           - 0
@@ -474,13 +474,13 @@
         type: string
         description: The remote IP prefix (CIDR) to be associated with this security group rule
         required: false
-        status: supported
+        status: SUPPORTED
       remote_mode:
         type: string
         description: Whether to specify a remote group or a remote IP prefix
         required: false
         default: remote_ip_prefix
-        status: supported
+        status: SUPPORTED
         constraints:
         - valid_values:
           - remote_ip_prefix
@@ -490,7 +490,7 @@
         description: The direction in which the security group rule is applied
         required: false
         default: ingress
-        status: supported
+        status: SUPPORTED
         constraints:
         - valid_values:
           - egress
@@ -499,7 +499,7 @@
         type: integer
         description: The minimum port number in the range that is matched by the security group rule.
         required: false
-        status: supported
+        status: SUPPORTED
         constraints:
         - in_range:
           - 0
@@ -512,13 +512,13 @@
         type: string
         description: Substitute Service Template
         required: true
-        status: supported
+        status: SUPPORTED
       index_value:
         type: integer
         description: Index value of the substitution service template runtime instance
         required: false
         default: 0
-        status: supported
+        status: SUPPORTED
         constraints:
         - greater_or_equal: 0
       count:
@@ -526,19 +526,19 @@
         description: Count
         required: false
         default: 1
-        status: supported
+        status: SUPPORTED
       scaling_enabled:
         type: boolean
         description: Indicates whether service scaling is enabled
         required: false
         default: true
-        status: supported
+        status: SUPPORTED
       mandatory:
         type: boolean
         description: Mandatory
         required: false
         default: true
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence:
     derived_from: tosca.datatypes.Root
     description: network policy refs data sequence
@@ -547,12 +547,12 @@
         type: integer
         description: Network Policy ref data sequence Major
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_refs_data_sequence_minor:
         type: integer
         description: Network Policy ref data sequence Minor
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefData:
     derived_from: tosca.datatypes.Root
     description: network policy refs data
@@ -561,7 +561,7 @@
         type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence
         description: Network Policy ref data sequence
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet:
     derived_from: tosca.datatypes.Root
     description: Network Ipam Ref Data Subnet
@@ -570,12 +570,12 @@
         type: string
         description: Network ipam refs data ipam subnets ip prefix len
         required: false
-        status: supported
+        status: SUPPORTED
       network_ipam_refs_data_ipam_subnets_subnet_ip_prefix:
         type: string
         description: Network ipam refs data ipam subnets ip prefix
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList:
     derived_from: tosca.datatypes.Root
     description: Network Ipam Ref Data Subnet List
@@ -584,12 +584,12 @@
         type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet
         description: Network ipam refs data ipam subnets
         required: false
-        status: supported
+        status: SUPPORTED
       network_ipam_refs_data_ipam_subnets_addr_from_start:
         type: string
         description: Network ipam refs data ipam subnets addr from start
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.IpamRefData:
     derived_from: tosca.datatypes.Root
     description: Network Ipam Ref Data
@@ -598,7 +598,7 @@
         type: list
         description: Network ipam refs data ipam subnets
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList
 org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork:
@@ -609,7 +609,7 @@
         type: string
         description: Source addresses Virtual network
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork:
     derived_from: tosca.datatypes.Root
     description: destination addresses
@@ -618,7 +618,7 @@
         type: string
         description: Destination addresses Virtual network
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs:
     derived_from: tosca.datatypes.Root
     description: destination port pairs
@@ -627,12 +627,12 @@
         type: string
         description: Start port
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_dst_ports_end_port:
         type: string
         description: End port
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs:
     derived_from: tosca.datatypes.Root
     description: source port pairs
@@ -641,12 +641,12 @@
         type: string
         description: Start port
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_src_ports_end_port:
         type: string
         description: End port
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
     derived_from: tosca.datatypes.Root
     description: Action List
@@ -655,12 +655,12 @@
         type: string
         description: Simple Action
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_action_list_apply_service:
         type: list
         description: Apply Service
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: string
 org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
@@ -671,12 +671,12 @@
         type: string
         description: Simple Action
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_action_list_apply_service:
         type: list
         description: Apply Service
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: string
 org.openecomp.datatypes.heat.contrailV2.network.rule.Rule:
@@ -687,45 +687,45 @@
         type: list
         description: Destination addresses
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork
       network_policy_entries_policy_rule_dst_ports:
         type: list
         description: Destination ports
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs
       network_policy_entries_policy_rule_protocol:
         type: string
         description: Protocol
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_src_addresses:
         type: list
         description: Source addresses
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork
       network_policy_entries_policy_rule_direction:
         type: string
         description: Direction
         required: false
-        status: supported
+        status: SUPPORTED
       network_policy_entries_policy_rule_src_ports:
         type: list
         description: Source ports
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs
       network_policy_entries_policy_rule_action_list:
         type: org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList
         description: Action list
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.network.rule.RuleList:
     derived_from: tosca.datatypes.Root
     description: list of policy rules
@@ -734,7 +734,7 @@
         type: list
         description: Contrail network rule
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.contrailV2.network.rule.Rule
 org.openecomp.datatypes.heat.network.contrail.port.StaticRoute:
@@ -745,17 +745,17 @@
         type: string
         description: Route prefix
         required: false
-        status: supported
+        status: SUPPORTED
       next_hop:
         type: string
         description: Next hop
         required: false
-        status: supported
+        status: SUPPORTED
       next_hop_type:
         type: string
         description: Next hop type
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.network.contrail.AddressPair:
     derived_from: tosca.datatypes.Root
     description: Address Pair
@@ -764,7 +764,7 @@
         type: string
         description: Address mode active-active or active-standy
         required: false
-        status: supported
+        status: SUPPORTED
         constraints:
         - valid_values:
           - active-active
@@ -773,12 +773,12 @@
         type: string
         description: IP address prefix
         required: false
-        status: supported
+        status: SUPPORTED
       mac_address:
         type: string
         description: Mac address
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.network.contrail.InterfaceData:
     derived_from: tosca.datatypes.Root
     description: Interface Data
@@ -787,26 +787,26 @@
         type: list
         description: An ordered list of static routes to be added to this interface
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.network.contrail.port.StaticRoute
       virtual_network:
         type: string
         description: Virtual Network for this interface
         required: true
-        status: supported
+        status: SUPPORTED
       allowed_address_pairs:
         type: list
         description: List of allowed address pair for this interface
         required: false
-        status: supported
+        status: SUPPORTED
         entry_schema:
           type: org.openecomp.datatypes.heat.network.contrail.AddressPair
       ip_address:
         type: string
         description: IP for this interface
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.heat.contrailV2.virtual.machine.interface.Properties:
     derived_from: tosca.datatypes.Root
     description: Virtual Machine Interface Properties.
@@ -815,7 +815,7 @@
         type: string
         description: Service Interface Type.
         required: false
-        status: supported
+        status: SUPPORTED
 org.openecomp.datatypes.Root:
   derived_from: tosca.datatypes.Root
   description: >
@@ -1061,12 +1061,12 @@
       type: string
       description: IP Prefix.
       required: false
-      status: supported
+      status: SUPPORTED
     ip_prefix_len:
       type: integer
       description: IP Prefix Len.
       required: false
-      status: supported
+      status: SUPPORTED
 
 org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.MacAddress:
   derived_from: tosca.datatypes.Root
@@ -1076,7 +1076,7 @@
       type: list
       description: Mac Addresses List.
       required: false
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: string
 
@@ -1088,7 +1088,7 @@
       type: string
       description: Sub Interface VLAN Tag.
       required: false
-      status: supported
+      status: SUPPORTED
 
 org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair:
   derived_from: tosca.datatypes.Root
@@ -1098,17 +1098,17 @@
       type: string
       description: Address Mode.
       required: false
-      status: supported
+      status: SUPPORTED
     ip:
       type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairIp
       description: IP.
       required: false
-      status: supported
+      status: SUPPORTED
     mac:
       type: string
       description: Mac.
       required: false
-      status: supported
+      status: SUPPORTED
 
 org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairs:
   derived_from: tosca.datatypes.Root
@@ -1118,6 +1118,6 @@
       type: list
       description: Addresses pair List.
       required: false
-      status: supported
+      status: SUPPORTED
       entry_schema:
         type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair
\ No newline at end of file
diff --git a/asdctool/src/main/resources/config/elasticsearch.yml b/asdctool/src/main/resources/config/elasticsearch.yml
deleted file mode 100644
index 38482e2..0000000
--- a/asdctool/src/main/resources/config/elasticsearch.yml
+++ /dev/null
@@ -1,393 +0,0 @@
-
-elasticSearch.local: true
-elasticSearch.transportclient: false
-cluster.name: elasticsearch
-  
-discovery.zen.ping.multicast.enabled: false
-discovery.zen.ping.unicast.enabled: true
-discovery.zen.ping.unicast.hosts: elasticsearch_host
-transport.client.initial_nodes:
-   - elasticsearch_host:9300
-
-http.cors.enabled: true
-
-#plugin.types: "DeleteByQueryPlugin"
-
-##################### Elasticsearch Configuration Example #####################
-
-# This file contains an overview of various configuration settings,
-# targeted at operations staff. Application developers should
-# consult the guide at <http://elasticsearch.org/guide>.
-#
-# The installation procedure is covered at
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html>.
-#
-# Elasticsearch comes with reasonable defaults for most settings,
-# so you can try it out without bothering with configuration.
-#
-# Most of the time, these defaults are just fine for running a production
-# cluster. If you're fine-tuning your cluster, or wondering about the
-# effect of certain configuration option, please _do ask_ on the
-# mailing list or IRC channel [http://elasticsearch.org/community].
-
-# Any element in the configuration can be replaced with environment variables
-# by placing them in ${...} notation. For example:
-#
-# node.rack: ${RACK_ENV_VAR}
-
-# For information on supported formats and syntax for the config file, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup-configuration.html>
-
-
-################################### Cluster ###################################
-
-# Cluster name identifies your cluster for auto-discovery. If you're running
-# multiple clusters on the same network, make sure you're using unique names.
-#
-# cluster.name: elasticsearch
-
-
-#################################### Node #####################################
-
-# Node names are generated dynamically on startup, so you're relieved
-# from configuring them manually. You can tie this node to a specific name:
-#
-# node.name: "Franz Kafka"
-
-# Every node can be configured to allow or deny being eligible as the master,
-# and to allow or deny to store the data.
-#
-# Allow this node to be eligible as a master node (enabled by default):
-#
-# node.master: true
-#
-# Allow this node to store data (enabled by default):
-#
-# node.data: true
-
-# You can exploit these settings to design advanced cluster topologies.
-#
-# 1. You want this node to never become a master node, only to hold data.
-#    This will be the "workhorse" of your cluster.
-#
-# node.master: false
-# node.data: true
-#
-# 2. You want this node to only serve as a master: to not store any data and
-#    to have free resources. This will be the "coordinator" of your cluster.
-#
-# node.master: true
-# node.data: false
-#
-# 3. You want this node to be neither master nor data node, but
-#    to act as a "search load balancer" (fetching data from nodes,
-#    aggregating results, etc.)
-#
-# node.master: false
-# node.data: false
-
-# Use the Cluster Health API [http://localhost:9200/_cluster/health], the
-# Node Info API [http://localhost:9200/_nodes] or GUI tools
-# such as <http://www.elasticsearch.org/overview/marvel/>,
-# <http://github.com/karmi/elasticsearch-paramedic>,
-# <http://github.com/lukas-vlcek/bigdesk> and
-# <http://mobz.github.com/elasticsearch-head> to inspect the cluster state.
-
-# A node can have generic attributes associated with it, which can later be used
-# for customized shard allocation filtering, or allocation awareness. An attribute
-# is a simple key value pair, similar to node.key: value, here is an example:
-#
-# node.rack: rack314
-
-# By default, multiple nodes are allowed to start from the same installation location
-# to disable it, set the following:
-# node.max_local_storage_nodes: 1
-
-
-#################################### Index ####################################
-
-# You can set a number of options (such as shard/replica options, mapping
-# or analyzer definitions, translog settings, ...) for indices globally,
-# in this file.
-#
-# Note, that it makes more sense to configure index settings specifically for
-# a certain index, either when creating it or by using the index templates API.
-#
-# See <http://elasticsearch.org/guide/en/elasticsearch/reference/current/index-modules.html> and
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-# for more information.
-
-# Set the number of shards (splits) of an index (5 by default):
-#
-# index.number_of_shards: 5
-
-# Set the number of replicas (additional copies) of an index (1 by default):
-#
-# index.number_of_replicas: 1
-
-# Note, that for development on a local machine, with small indices, it usually
-# makes sense to "disable" the distributed features:
-#
-index.number_of_shards: 1
-index.number_of_replicas: 0
-
-# These settings directly affect the performance of index and search operations
-# in your cluster. Assuming you have enough machines to hold shards and
-# replicas, the rule of thumb is:
-#
-# 1. Having more *shards* enhances the _indexing_ performance and allows to
-#    _distribute_ a big index across machines.
-# 2. Having more *replicas* enhances the _search_ performance and improves the
-#    cluster _availability_.
-#
-# The "number_of_shards" is a one-time setting for an index.
-#
-# The "number_of_replicas" can be increased or decreased anytime,
-# by using the Index Update Settings API.
-#
-# Elasticsearch takes care about load balancing, relocating, gathering the
-# results from nodes, etc. Experiment with different settings to fine-tune
-# your setup.
-
-# Use the Index Status API (<http://localhost:9200/A/_status>) to inspect
-# the index status.
-
-
-#################################### Paths ####################################
-path.home: /src/test/resources
-# Path to directory containing configuration (this file and logging.yml):
-#
-path.conf: /src/test/resources
-
-# Path to directory where to store index data allocated for this node.
-#
-path.data: target/esdata
-#
-# Can optionally include more than one location, causing data to be striped across
-# the locations (a la RAID 0) on a file level, favouring locations with most free
-# space on creation. For example:
-#
-# path.data: /path/to/data1,/path/to/data2
-
-# Path to temporary files:
-#
-path.work: /target/eswork
-
-# Path to log files:
-#
-path.logs: /target/eslogs
-
-# Path to where plugins are installed:
-#
-# path.plugins: /path/to/plugins
-
-
-#################################### Plugin ###################################
-
-# If a plugin listed here is not installed for current node, the node will not start.
-#
-# plugin.mandatory: mapper-attachments,lang-groovy
-
-
-################################### Memory ####################################
-
-# Elasticsearch performs poorly when JVM starts swapping: you should ensure that
-# it _never_ swaps.
-#
-# Set this property to true to lock the memory:
-#
-# bootstrap.mlockall: true
-
-# Make sure that the ES_MIN_MEM and ES_MAX_MEM environment variables are set
-# to the same value, and that the machine has enough memory to allocate
-# for Elasticsearch, leaving enough memory for the operating system itself.
-#
-# You should also make sure that the Elasticsearch process is allowed to lock
-# the memory, eg. by using `ulimit -l unlimited`.
-
-
-############################## Network And HTTP ###############################
-
-# Elasticsearch, by default, binds itself to the 0.0.0.0 address, and listens
-# on port [9200-9300] for HTTP traffic and on port [9300-9400] for node-to-node
-# communication. (the range means that if the port is busy, it will automatically
-# try the next port).
-
-# Set the bind address specifically (IPv4 or IPv6):
-#
-# network.bind_host: 192.168.0.1
-
-# Set the address other nodes will use to communicate with this node. If not
-# set, it is automatically derived. It must point to an actual IP address.
-#
-# network.publish_host: 192.168.0.1
-
-# Set both 'bind_host' and 'publish_host':
-#
-# network.host: 192.168.0.1
-
-# Set a custom port for the node to node communication (9300 by default):
-#
-# transport.tcp.port: 9300
-
-# Enable compression for all communication between nodes (disabled by default):
-#
-# transport.tcp.compress: true
-
-# Set a custom port to listen for HTTP traffic:
-#
-# http.port: 9200
-
-# Set a custom allowed content length:
-#
-# http.max_content_length: 100mb
-
-# Disable HTTP completely:
-#
-# http.enabled: false
-
-
-################################### Gateway ###################################
-
-# The gateway allows for persisting the cluster state between full cluster
-# restarts. Every change to the state (such as adding an index) will be stored
-# in the gateway, and when the cluster starts up for the first time,
-# it will read its state from the gateway.
-
-# There are several types of gateway implementations. For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-gateway.html>.
-
-# The default gateway type is the "local" gateway (recommended):
-#
-# gateway.type: local
-
-# Settings below control how and when to start the initial recovery process on
-# a full cluster restart (to reuse as much local data as possible when using shared
-# gateway).
-
-# Allow recovery process after N nodes in a cluster are up:
-#
-gateway.recover_after_nodes: 1
-
-# Set the timeout to initiate the recovery process, once the N nodes
-# from previous setting are up (accepts time value):
-#
-# gateway.recover_after_time: 5m
-
-# Set how many nodes are expected in this cluster. Once these N nodes
-# are up (and recover_after_nodes is met), begin recovery process immediately
-# (without waiting for recover_after_time to expire):
-#
-gateway.expected_nodes: 1
-
-
-############################# Recovery Throttling #############################
-
-# These settings allow to control the process of shards allocation between
-# nodes during initial recovery, replica allocation, rebalancing,
-# or when adding and removing nodes.
-
-# Set the number of concurrent recoveries happening on a node:
-#
-# 1. During the initial recovery
-#
-# cluster.routing.allocation.node_initial_primaries_recoveries: 4
-#
-# 2. During adding/removing nodes, rebalancing, etc
-#
-# cluster.routing.allocation.node_concurrent_recoveries: 2
-
-# Set to throttle throughput when recovering (eg. 100mb, by default 20mb):
-#
-# indices.recovery.max_bytes_per_sec: 20mb
-
-# Set to limit the number of open concurrent streams when
-# recovering a shard from a peer:
-#
-# indices.recovery.concurrent_streams: 5
-
-
-################################## Discovery ##################################
-
-# Discovery infrastructure ensures nodes can be found within a cluster
-# and master node is elected. Multicast discovery is the default.
-
-# Set to ensure a node sees N other master eligible nodes to be considered
-# operational within the cluster. Its recommended to set it to a higher value
-# than 1 when running more than 2 nodes in the cluster.
-#
-# discovery.zen.minimum_master_nodes: 1
-
-# Set the time to wait for ping responses from other nodes when discovering.
-# Set this option to a higher value on a slow or congested network
-# to minimize discovery failures:
-#
-# discovery.zen.ping.timeout: 3s
-
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-zen.html>
-
-# Unicast discovery allows to explicitly control which nodes will be used
-# to discover the cluster. It can be used when multicast is not present,
-# or to restrict the cluster communication-wise.
-#
-# 1. Disable multicast discovery (enabled by default):
-#
-# discovery.zen.ping.multicast.enabled: false
-#
-# 2. Configure an initial list of master nodes in the cluster
-#    to perform discovery when new nodes (master or data) are started:
-#
-# discovery.zen.ping.unicast.hosts: ["host1", "host2:port"]
-
-# EC2 discovery allows to use AWS EC2 API in order to perform discovery.
-#
-# You have to install the cloud-aws plugin for enabling the EC2 discovery.
-#
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-ec2.html>
-#
-# See <http://elasticsearch.org/tutorials/elasticsearch-on-ec2/>
-# for a step-by-step tutorial.
-
-# GCE discovery allows to use Google Compute Engine API in order to perform discovery.
-#
-# You have to install the cloud-gce plugin for enabling the GCE discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-gce>.
-
-# Azure discovery allows to use Azure API in order to perform discovery.
-#
-# You have to install the cloud-azure plugin for enabling the Azure discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-azure>.
-
-################################## Slow Log ##################################
-
-# Shard level query and fetch threshold logging.
-
-#index.search.slowlog.threshold.query.warn: 10s
-#index.search.slowlog.threshold.query.info: 5s
-#index.search.slowlog.threshold.query.debug: 2s
-#index.search.slowlog.threshold.query.trace: 500ms
-
-#index.search.slowlog.threshold.fetch.warn: 1s
-#index.search.slowlog.threshold.fetch.info: 800ms
-#index.search.slowlog.threshold.fetch.debug: 500ms
-#index.search.slowlog.threshold.fetch.trace: 200ms
-
-#index.indexing.slowlog.threshold.index.warn: 10s
-#index.indexing.slowlog.threshold.index.info: 5s
-#index.indexing.slowlog.threshold.index.debug: 2s
-#index.indexing.slowlog.threshold.index.trace: 500ms
-
-################################## GC Logging ################################
-
-#monitor.jvm.gc.young.warn: 1000ms
-#monitor.jvm.gc.young.info: 700ms
-#monitor.jvm.gc.young.debug: 400ms
-
-#monitor.jvm.gc.old.warn: 10s
-#monitor.jvm.gc.old.info: 5s
-#monitor.jvm.gc.old.debug: 2s
-
diff --git a/asdctool/src/main/resources/config/groupTypes.yml b/asdctool/src/main/resources/config/groupTypes.yml
index 0c0abc9..ce457e4 100644
--- a/asdctool/src/main/resources/config/groupTypes.yml
+++ b/asdctool/src/main/resources/config/groupTypes.yml
@@ -6,12 +6,12 @@
       type: string
       description: Heat file which associate to this group/heat stack
       required: true
-      status: supported
+      status: SUPPORTED
     description:
       type: string
       description: group description
       required: true
-      status: supported
+      status: SUPPORTED      
 org.openecomp.groups.VfModule:
   derived_from: tosca.groups.Root
   description: Grouped all heat resources which are in the same VF Module
@@ -21,7 +21,7 @@
       description: Whether this module should be deployed before other modules
       required: true
       default: false
-      status: supported
+      status: SUPPORTED
     vf_module_label: 
       type: string
       required: true
diff --git a/asdctool/src/main/resources/config/janusgraph.properties b/asdctool/src/main/resources/config/janusgraph.properties
index 5f22a08..3e88b0d 100644
--- a/asdctool/src/main/resources/config/janusgraph.properties
+++ b/asdctool/src/main/resources/config/janusgraph.properties
@@ -7,14 +7,14 @@
 storage.cassandra.keyspace=sdctitan
 
 storage.cassandra.ssl.enabled=true
-storage.cassandra.ssl.truststore.location=C:\\gitWork\\vagrant-sdc-all-in-one\\mytmp.trust
+storage.cassandra.ssl.truststore.location=/var/lib/jetty/etc/truststore
 storage.cassandra.ssl.truststore.password=Aa123456
 
 storage.cassandra.read-consistency-level=LOCAL_QUORUM
 storage.cassandra.write-consistency-level=LOCAL_QUORUM
 storage.cassandra.replication-strategy-class=org.apache.cassandra.locator.NetworkTopologyStrategy
-storage.cassandra.replication-strategy-options=DC-sdc-iltlv633,1
-storage.cassandra.astyanax.local-datacenter=DC-sdc-iltlv633
+storage.cassandra.replication-strategy-options=DC-ILTLV2083,1
+storage.cassandra.astyanax.local-datacenter=DC-ILTLV2083
 
 
 cache.db-cache = false
diff --git a/asdctool/src/main/resources/config/tmp.trust b/asdctool/src/main/resources/config/tmp.trust
new file mode 100644
index 0000000..f74b8f5
--- /dev/null
+++ b/asdctool/src/main/resources/config/tmp.trust
Binary files differ
diff --git a/asdctool/src/main/resources/elasticsearch.yml b/asdctool/src/main/resources/elasticsearch.yml
deleted file mode 100644
index 71ccdbb..0000000
--- a/asdctool/src/main/resources/elasticsearch.yml
+++ /dev/null
@@ -1,399 +0,0 @@
-
-cluster.name: elasticsearch
-
-discovery.zen.ping.multicast.enabled: false
-discovery.zen.ping.unicast.enabled: true
-discovery.zen.ping.unicast.hosts: elasticsearch_host
-
-http.cors.enabled: true
-
-path.home: "/home/vagrant/catalog-be/config"
-
-elasticSearch.transportclient: true
-
-transport.client.initial_nodes:
-   - elasticsearch_host:9300
-
-#shield.user: asdc:Aa12345
-#shield.ssl.keystore.path: "/vagrant/install/resources/catalog-be/keystore/es-client.jks"
-#shield.ssl.keystore.password: Aa123456
-#shield.transport.ssl: true
-
-##################### Elasticsearch Configuration Example #####################
-
-# This file contains an overview of various configuration settings,
-# targeted at operations staff. Application developers should
-# consult the guide at <http://elasticsearch.org/guide>.
-#
-# The installation procedure is covered at
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html>.
-#
-# Elasticsearch comes with reasonable defaults for most settings,
-# so you can try it out without bothering with configuration.
-#
-# Most of the time, these defaults are just fine for running a production
-# cluster. If you're fine-tuning your cluster, or wondering about the
-# effect of certain configuration option, please _do ask_ on the
-# mailing list or IRC channel [http://elasticsearch.org/community].
-
-# Any element in the configuration can be replaced with environment variables
-# by placing them in ${...} notation. For example:
-#
-# node.rack: ${RACK_ENV_VAR}
-
-# For information on supported formats and syntax for the config file, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup-configuration.html>
-
-
-################################### Cluster ###################################
-
-# Cluster name identifies your cluster for auto-discovery. If you're running
-# multiple clusters on the same network, make sure you're using unique names.
-#
-# cluster.name: elasticsearch
-
-
-#################################### Node #####################################
-
-# Node names are generated dynamically on startup, so you're relieved
-# from configuring them manually. You can tie this node to a specific name:
-#
-# node.name: "Franz Kafka"
-
-# Every node can be configured to allow or deny being eligible as the master,
-# and to allow or deny to store the data.
-#
-# Allow this node to be eligible as a master node (enabled by default):
-#
-# node.master: true
-#
-# Allow this node to store data (enabled by default):
-#
-# node.data: true
-
-# You can exploit these settings to design advanced cluster topologies.
-#
-# 1. You want this node to never become a master node, only to hold data.
-#    This will be the "workhorse" of your cluster.
-#
-# node.master: false
-# node.data: true
-#
-# 2. You want this node to only serve as a master: to not store any data and
-#    to have free resources. This will be the "coordinator" of your cluster.
-#
-# node.master: true
-# node.data: false
-#
-# 3. You want this node to be neither master nor data node, but
-#    to act as a "search load balancer" (fetching data from nodes,
-#    aggregating results, etc.)
-#
-# node.master: false
-# node.data: false
-
-# Use the Cluster Health API [http://localhost:9200/_cluster/health], the
-# Node Info API [http://localhost:9200/_nodes] or GUI tools
-# such as <http://www.elasticsearch.org/overview/marvel/>,
-# <http://github.com/karmi/elasticsearch-paramedic>,
-# <http://github.com/lukas-vlcek/bigdesk> and
-# <http://mobz.github.com/elasticsearch-head> to inspect the cluster state.
-
-# A node can have generic attributes associated with it, which can later be used
-# for customized shard allocation filtering, or allocation awareness. An attribute
-# is a simple key value pair, similar to node.key: value, here is an example:
-#
-# node.rack: rack314
-
-# By default, multiple nodes are allowed to start from the same installation location
-# to disable it, set the following:
-# node.max_local_storage_nodes: 1
-
-
-#################################### Index ####################################
-
-# You can set a number of options (such as shard/replica options, mapping
-# or analyzer definitions, translog settings, ...) for indices globally,
-# in this file.
-#
-# Note, that it makes more sense to configure index settings specifically for
-# a certain index, either when creating it or by using the index templates API.
-#
-# See <http://elasticsearch.org/guide/en/elasticsearch/reference/current/index-modules.html> and
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/indices-create-index.html>
-# for more information.
-
-# Set the number of shards (splits) of an index (5 by default):
-#
-# index.number_of_shards: 5
-
-# Set the number of replicas (additional copies) of an index (1 by default):
-#
-# index.number_of_replicas: 1
-
-# Note, that for development on a local machine, with small indices, it usually
-# makes sense to "disable" the distributed features:
-#
-index.number_of_shards: 1
-index.number_of_replicas: 0
-
-# These settings directly affect the performance of index and search operations
-# in your cluster. Assuming you have enough machines to hold shards and
-# replicas, the rule of thumb is:
-#
-# 1. Having more *shards* enhances the _indexing_ performance and allows to
-#    _distribute_ a big index across machines.
-# 2. Having more *replicas* enhances the _search_ performance and improves the
-#    cluster _availability_.
-#
-# The "number_of_shards" is a one-time setting for an index.
-#
-# The "number_of_replicas" can be increased or decreased anytime,
-# by using the Index Update Settings API.
-#
-# Elasticsearch takes care about load balancing, relocating, gathering the
-# results from nodes, etc. Experiment with different settings to fine-tune
-# your setup.
-
-# Use the Index Status API (<http://localhost:9200/A/_status>) to inspect
-# the index status.
-
-
-#################################### Paths ####################################
-
-# Path to directory containing configuration (this file and logging.yml):
-#
-path.conf: /src/test/resources
-
-# Path to directory where to store index data allocated for this node.
-#
-path.data: target/esdata
-#
-# Can optionally include more than one location, causing data to be striped across
-# the locations (a la RAID 0) on a file level, favouring locations with most free
-# space on creation. For example:
-#
-# path.data: /path/to/data1,/path/to/data2
-
-# Path to temporary files:
-#
-path.work: /target/eswork
-
-# Path to log files:
-#
-path.logs: /target/eslogs
-
-# Path to where plugins are installed:
-#
-# path.plugins: /path/to/plugins
-
-
-#################################### Plugin ###################################
-
-# If a plugin listed here is not installed for current node, the node will not start.
-#
-# plugin.mandatory: mapper-attachments,lang-groovy
-
-
-################################### Memory ####################################
-
-# Elasticsearch performs poorly when JVM starts swapping: you should ensure that
-# it _never_ swaps.
-#
-# Set this property to true to lock the memory:
-#
-# bootstrap.mlockall: true
-
-# Make sure that the ES_MIN_MEM and ES_MAX_MEM environment variables are set
-# to the same value, and that the machine has enough memory to allocate
-# for Elasticsearch, leaving enough memory for the operating system itself.
-#
-# You should also make sure that the Elasticsearch process is allowed to lock
-# the memory, eg. by using `ulimit -l unlimited`.
-
-
-############################## Network And HTTP ###############################
-
-# Elasticsearch, by default, binds itself to the 0.0.0.0 address, and listens
-# on port [9200-9300] for HTTP traffic and on port [9300-9400] for node-to-node
-# communication. (the range means that if the port is busy, it will automatically
-# try the next port).
-
-# Set the bind address specifically (IPv4 or IPv6):
-#
-# network.bind_host: 192.168.0.1
-
-# Set the address other nodes will use to communicate with this node. If not
-# set, it is automatically derived. It must point to an actual IP address.
-#
-# network.publish_host: 192.168.0.1
-
-# Set both 'bind_host' and 'publish_host':
-#
-# network.host: 192.168.0.1
-
-# Set a custom port for the node to node communication (9300 by default):
-#
-# transport.tcp.port: 9300
-
-# Enable compression for all communication between nodes (disabled by default):
-#
-# transport.tcp.compress: true
-
-# Set a custom port to listen for HTTP traffic:
-#
-# http.port: 9200
-
-# Set a custom allowed content length:
-#
-# http.max_content_length: 100mb
-
-# Disable HTTP completely:
-#
-# http.enabled: false
-
-
-################################### Gateway ###################################
-
-# The gateway allows for persisting the cluster state between full cluster
-# restarts. Every change to the state (such as adding an index) will be stored
-# in the gateway, and when the cluster starts up for the first time,
-# it will read its state from the gateway.
-
-# There are several types of gateway implementations. For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-gateway.html>.
-
-# The default gateway type is the "local" gateway (recommended):
-#
-# gateway.type: local
-
-# Settings below control how and when to start the initial recovery process on
-# a full cluster restart (to reuse as much local data as possible when using shared
-# gateway).
-
-# Allow recovery process after N nodes in a cluster are up:
-#
-gateway.recover_after_nodes: 1
-
-# Set the timeout to initiate the recovery process, once the N nodes
-# from previous setting are up (accepts time value):
-#
-# gateway.recover_after_time: 5m
-
-# Set how many nodes are expected in this cluster. Once these N nodes
-# are up (and recover_after_nodes is met), begin recovery process immediately
-# (without waiting for recover_after_time to expire):
-#
-gateway.expected_nodes: 1
-
-
-############################# Recovery Throttling #############################
-
-# These settings allow to control the process of shards allocation between
-# nodes during initial recovery, replica allocation, rebalancing,
-# or when adding and removing nodes.
-
-# Set the number of concurrent recoveries happening on a node:
-#
-# 1. During the initial recovery
-#
-# cluster.routing.allocation.node_initial_primaries_recoveries: 4
-#
-# 2. During adding/removing nodes, rebalancing, etc
-#
-# cluster.routing.allocation.node_concurrent_recoveries: 2
-
-# Set to throttle throughput when recovering (eg. 100mb, by default 20mb):
-#
-# indices.recovery.max_bytes_per_sec: 20mb
-
-# Set to limit the number of open concurrent streams when
-# recovering a shard from a peer:
-#
-# indices.recovery.concurrent_streams: 5
-
-
-################################## Discovery ##################################
-
-# Discovery infrastructure ensures nodes can be found within a cluster
-# and master node is elected. Multicast discovery is the default.
-
-# Set to ensure a node sees N other master eligible nodes to be considered
-# operational within the cluster. Its recommended to set it to a higher value
-# than 1 when running more than 2 nodes in the cluster.
-#
-# discovery.zen.minimum_master_nodes: 1
-
-# Set the time to wait for ping responses from other nodes when discovering.
-# Set this option to a higher value on a slow or congested network
-# to minimize discovery failures:
-#
-# discovery.zen.ping.timeout: 3s
-
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-zen.html>
-
-# Unicast discovery allows to explicitly control which nodes will be used
-# to discover the cluster. It can be used when multicast is not present,
-# or to restrict the cluster communication-wise.
-#
-# 1. Disable multicast discovery (enabled by default):
-#
-# discovery.zen.ping.multicast.enabled: false
-#
-# 2. Configure an initial list of master nodes in the cluster
-#    to perform discovery when new nodes (master or data) are started:
-#
-# discovery.zen.ping.unicast.hosts: ["host1", "host2:port"]
-
-# EC2 discovery allows to use AWS EC2 API in order to perform discovery.
-#
-# You have to install the cloud-aws plugin for enabling the EC2 discovery.
-#
-# For more information, see
-# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/modules-discovery-ec2.html>
-#
-# See <http://elasticsearch.org/tutorials/elasticsearch-on-ec2/>
-# for a step-by-step tutorial.
-
-# GCE discovery allows to use Google Compute Engine API in order to perform discovery.
-#
-# You have to install the cloud-gce plugin for enabling the GCE discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-gce>.
-
-# Azure discovery allows to use Azure API in order to perform discovery.
-#
-# You have to install the cloud-azure plugin for enabling the Azure discovery.
-#
-# For more information, see <https://github.com/elasticsearch/elasticsearch-cloud-azure>.
-
-################################## Slow Log ##################################
-
-# Shard level query and fetch threshold logging.
-
-#index.search.slowlog.threshold.query.warn: 10s
-#index.search.slowlog.threshold.query.info: 5s
-#index.search.slowlog.threshold.query.debug: 2s
-#index.search.slowlog.threshold.query.trace: 500ms
-
-#index.search.slowlog.threshold.fetch.warn: 1s
-#index.search.slowlog.threshold.fetch.info: 800ms
-#index.search.slowlog.threshold.fetch.debug: 500ms
-#index.search.slowlog.threshold.fetch.trace: 200ms
-
-#index.indexing.slowlog.threshold.index.warn: 10s
-#index.indexing.slowlog.threshold.index.info: 5s
-#index.indexing.slowlog.threshold.index.debug: 2s
-#index.indexing.slowlog.threshold.index.trace: 500ms
-
-################################## GC Logging ################################
-
-#monitor.jvm.gc.young.warn: 1000ms
-#monitor.jvm.gc.young.info: 700ms
-#monitor.jvm.gc.young.debug: 400ms
-
-#monitor.jvm.gc.old.warn: 10s
-#monitor.jvm.gc.old.info: 5s
-#monitor.jvm.gc.old.debug: 2s
-
diff --git a/asdctool/src/main/resources/es-resources/README.txt b/asdctool/src/main/resources/es-resources/README.txt
deleted file mode 100644
index a7006ef..0000000
--- a/asdctool/src/main/resources/es-resources/README.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-ASDC elasticsearch tool
-========================
-
-This tool purpose is to ease and allow updating elasticsearch indices.
-
-In order to use the scripts, you need to verify Python is installed and to install the elasticsearc-py library:
-	Verify pip is installed:		$command -v pip
-	if not installed:	
-		Download https://bootstrap.pypa.io/get-pip.py
-		$python get-pip.py  (see instruction: https://pip.pypa.io/en/latest/installing/#installing-with-get-pip-py)
-	$pip install elasticsearch
-
-
-Tool contains:
-	- index_ops.py
-	  This script includes operations on elasticsearch index:
-	  
-	  create index:
-		$python index_ops.py -o create -a <elasticsearch hostname> -n <indexName> -f <index mapping file>
-		
-	  delete index:
-		$python index_ops.py -o delete -a <elasticsearch hostname> -n <indexName>
-	  
-	  copy index (assumes destination index already exists):
-	    $python index_ops.py -o move -a <elasticsearch hostname> -n <indexName> -t <toIndex>
-		
-		
-	- file_utils.py
-	  This script includes operations on files 
-	  
-	- audit_migration_1602.py
-	  This script run full flow to migrate audit information from previous versions to ASDC 1602
-	  It has 2 inputs:
-	   1. config_properties.py - this file holds configuration (hostname, index name, index mapping file etc.)
-	   2. folder of fields mapping per elasticsearch type (map old field to new field)
-	  The flow of this script is as follow:
-	   * create temp index with correct index mapping
-	   * scan the audit index to get all records
-	   * manipulate fields data and insert it to temp index
-	   * delete audit index 
-	   * create audit index with correct mapping
-	   * copy from temp index to newly created audit index
-	   * delete temp index
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/auditMappings.txt b/asdctool/src/main/resources/es-resources/auditMappings.txt
deleted file mode 100644
index 7de77cc..0000000
--- a/asdctool/src/main/resources/es-resources/auditMappings.txt
+++ /dev/null
@@ -1,169 +0,0 @@
-{ "settings": {}, "mappings":  
-{ 
-"distributiondownloadevent": 
-{ "properties": { 
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "RESOURCE_URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }}, 
-  "_all": { "enabled": true } },
-  "auditinggetuebclusterevent": 
-{ "properties": { 
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }}, 
-  "_all": { "enabled": true } },
-  "distributionstatusevent": 
-{ "properties": { 
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "RESOURCE_URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }}, 
-  "_all": { "enabled": true } },
-"distributionengineevent": 
-{ "properties": { 
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "ROLE": { "include_in_all": true, "type": "string" }, 
-  "API_KEY": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-    "D_ENV": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "CONSUMER_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }}, 
-  "_all": { "enabled": true } },
-  "useraccessevent": { 
-    "properties": { 
-      "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-      "USER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-      "USER_NAME": { "include_in_all": true, "type": "string" }} , 
-      "_all": { "enabled": true }}, 
-      "resourceadminevent": 
-      { "properties": { 
-        "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "CURR_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "MODIFIER_NAME": { "include_in_all": true, "type": "string" }, 
-        "PREV_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "PREV_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "RESOURCE_NAME": { "include_in_all": true, "type": "string" }, 
-        "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-        "DPREV_STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-        "DCURR_STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-        "COMMENT": { "include_in_all": true, "type": "string" }, 
-        "ARTIFACT_NAME": { "include_in_all": true, "index": "not_analyzed", "type": "string" } }, 
-        "_all": { "enabled": true }} , 
-        "useradminevent": 
-        { "properties": { 
-           "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "MODIFIER_NAME": { "include_in_all": true, "type": "string" }, 
-          "USER_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_AFTER_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_BEFORE_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_AFTER_ROLE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_NAME": { "include_in_all": true, "type": "string" }, 
-          "USER_BEFORE_NAME": { "include_in_all": true, "type": "string" }, 
-          "USER_BEFORE_EMAIL": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-          "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-          "USER_AFTER_NAME": { "include_in_all": true, "type": "string" } }, 
-          "_all": { "enabled": true } }, 
-"distributionnotificationevent": 
- {"properties":{ 
-   "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "CURR_STATE": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "MODIFIER_NAME": { "include_in_all": true, "type": "string" }, 
-  "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "RESOURCE_NAME": { "include_in_all": true, "type": "string" }, 
-  "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "TOPIC_NAME":{ "include_in_all": true, "index": "not_analyzed", "type": "string" }}},
-"categoryevent":
-{"properties":{
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "MODIFIER": { "include_in_all": true, "type": "string" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "CATEGORY_NAME": { "include_in_all": true, "type": "string" }, 
-  "SUB_CATEGORY_NAME": { "include_in_all": true, "type": "string" }, 
-  "GROUPING_NAME": { "include_in_all": true, "type": "string" }, 
-  "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }}, 
-     "_all": { "enabled": true } },
- "authevent": { 
-   "properties": { 
-      "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }, 
-     "DESC": { "include_in_all": true, "type": "string" }, 
-      "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-     "URL": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-      "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-     "USER": { "include_in_all": true, "type": "string" } ,
-      "AUTH_STATUS": { "include_in_all": true, "index": "not_analyzed","type": "string" } , 
-      "REALM": { "include_in_all": true, "index": "not_analyzed","type": "string" }} , 
-      "_all": { "enabled": true }}, 
- "consumerevent":  
-  {"properties":{
-  "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "MODIFIER": { "include_in_all": true, "type": "string" }, 
-  "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "DESC": { "include_in_all": true, "type": "string" }, 
-  "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-  "ECOMP_USER": { "include_in_all": true, "index": "not_analyzed", "type": "string" },
-  "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }}, 
-  "_all": { "enabled": true } },  
- "distributiondeployevent": 
-          {        "properties": {          
-            "ACTION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-            "CURR_VERSION": { "include_in_all": true, "index": "not_analyzed", "type": "string" },    
-            "DESC": { "include_in_all": true, "type": "string" },         
-            "DID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-            "MODIFIER_NAME": { "include_in_all": true, "type": "string" },         
-            "MODIFIER_UID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-            "REQUEST_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" }, 
-            "RESOURCE_NAME": { "include_in_all": true, "type": "string" },         
-            "RESOURCE_TYPE": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-            "SERVICE_INSTANCE_ID": { "include_in_all": true, "index": "not_analyzed", "type": "string" },  
-            "STATUS": { "include_in_all": true, "index": "not_analyzed", "type": "string" },    
-            "TIMESTAMP": { "include_in_all": true, "ignore_malformed": false, "format": "yyyy-MM-dd HH:mm:ss.SSS", "precision_step": 4, "type": "date" }}}}}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/audit_migration_1602.py b/asdctool/src/main/resources/es-resources/audit_migration_1602.py
deleted file mode 100644
index 8b61ebf..0000000
--- a/asdctool/src/main/resources/es-resources/audit_migration_1602.py
+++ /dev/null
@@ -1,132 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient
-import sys, os
-from index_ops import createIndex, deleteIndex, copyIndex
-from config_properties import getGlobalVar 
-from file_utils import readFileToJson
-
-def updateFieldNames(client, queryFrom, fromIndex, destIndex, addUTC):
-    typesDir="types"
-    typeFields = {}
-    for filename in os.listdir(typesDir):
-       print filename
-       fieldNames=readFileToJson(typesDir+os.sep+filename)
-       
-       type=filename.split(".")[0]
-       typeFields[type] = fieldNames
-   
-    client.indices.refresh(index=fromIndex)
-    res = elasticsearch.helpers.scan(client, query=queryFrom, index=fromIndex)
-       
-    actions = []
-    for i in res:
-       res_type = i['_type']
-       fieldNames = typeFields.get(res_type)
-       if (fieldNames != None):
-         action={}
-         for field in i['_source']:
-             updatedName=fieldNames.get(field)
-             if (updatedName != None):        
-                 if (field == 'timestamp' and addUTC == True):
-                     value+=" UTC"
-                 value=i['_source'].get(field)   
-                 action[updatedName]=value
-             else:
-                 action[field]=i['_source'].get(field)
-         i['_source']=action
-       
-       i['_index']=destIndex
-       i.pop('_id', None)
-       actions.append(i)
-
-    bulk_res = elasticsearch.helpers.bulk(client, actions)
-    print "bulk response: ", bulk_res
-
-
-
-def updateAllrecordsWithUTC(client, queryFrom, fromIndex, destIndex):
-
-    #scan indices
-    client.indices.refresh(index=fromIndex)
-    res = elasticsearch.helpers.scan(client, query=queryFrom, index=fromIndex)
-
-    actions = []
-    for i in res:
-        print i
-        i['_index']=destIndex
-        i['_source']['TIMESTAMP']+=" UTC"
-        actions.append(i)
-
-    bulk_res = elasticsearch.helpers.bulk(client, actions)
-    print "bulk response: ", bulk_res
-
-
-def printQueryResults(client, myQuery, indexName):
-    client.indices.refresh(index=indexName)
-    res = elasticsearch.helpers.scan(client, query=myQuery, index=indexName)
-    for i in res:
-       print i
-
-def main():
-   print "start script for changing fields"
-   print "================================="
-   
-   # initialize es
-   es = Elasticsearch([getGlobalVar('host')])
-
-   try:
-    mapping=readFileToJson(getGlobalVar('mappingFileName'))
-    res = createIndex(es, getGlobalVar('tempIndexName'), mapping)
-    if (res != 0):
-      print "script results in error"
-      sys.exit(1)
-
-    print "scan audit index and manipulate data"
-    print "===================================="
-
-    print "start time: ", datetime.now().time()
-    updateFieldNames(es, getGlobalVar('matchAllQuery'), getGlobalVar('origIndexName'), getGlobalVar('tempIndexName'), getGlobalVar('addUTC'))
-   
-    print "re-create original index"
-    print "========================="
-    res = createIndex(es, getGlobalVar('origIndexName'), mapping)
-    if (res != 0):
-      print "script results in error"
-      sys.exit(1)
-   
-    print "copy data from temp index to original"
-    print "======================================="
-    res = copyIndex(es, getGlobalVar('tempIndexName'), getGlobalVar('origIndexName'))
-    if (res != 0):
-      print "script results in error"
-      sys.exit(1)
-   
-    print "delete temp index"
-    print "=================="
-    res = deleteIndex(es, getGlobalVar('tempIndexName'))
-    if (res != 0):
-      print "script results in error"
-      sys.exit(1)
-   
-   
-    print "end time: ", datetime.now().time()
-
-   except Exception, error:
-      print "An exception was thrown!"
-      print str(error)
-      return 2
-  
-
-if __name__ == "__main__":
-        main()
-
-
-
-
-
diff --git a/asdctool/src/main/resources/es-resources/config_properties.py b/asdctool/src/main/resources/es-resources/config_properties.py
deleted file mode 100644
index d097300..0000000
--- a/asdctool/src/main/resources/es-resources/config_properties.py
+++ /dev/null
@@ -1,11 +0,0 @@
-globalVars={
-  "host": "127.0.0.1",
-  "origIndexName": "temp_audit",
-  "tempIndexName": "temp_audit2",
-  "addUTC": False,
-  "mappingFileName": "auditMappings.txt",
-  "matchAllQuery":{"query": {"match_all": {}}}
-}
-
-def getGlobalVar(propertyName):
-  return globalVars.get(propertyName)
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/file_utils.py b/asdctool/src/main/resources/es-resources/file_utils.py
deleted file mode 100644
index 7439020..0000000
--- a/asdctool/src/main/resources/es-resources/file_utils.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient
-import sys, os
-
-def readFileToJson(fileName):
-   print "read file ", fileName
-   fo=open(fileName)
-   try:
-     json_mapping=json.load(fo)
-     fo.close()
-   except ValueError:
-     print "error in reading file " , fileName
-     fo.close()
-     raise
-   return json_mapping
diff --git a/asdctool/src/main/resources/es-resources/index_ops.py b/asdctool/src/main/resources/es-resources/index_ops.py
deleted file mode 100644
index d1f3bb0..0000000
--- a/asdctool/src/main/resources/es-resources/index_ops.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import itertools
-import string
-import json
-from datetime import datetime
-from elasticsearch import Elasticsearch
-import elasticsearch
-import elasticsearch.helpers
-from elasticsearch.client import IndicesClient, CatClient
-import sys, os, getopt
-from file_utils import readFileToJson
-from config_properties import getGlobalVar 
-
-
-
-def createIndex(client, indexName, createBody):
-    try:
-      print "start createIndex"
-      if (client == None):
-         client = Elasticsearch(['localhost'])
-      esIndexClient = IndicesClient(client)
-      res = deleteIndex(client, indexName)
-      if (res != 0):
-         print "operation failed"
-         return 2
-      create_res=elasticsearch.client.IndicesClient.create(esIndexClient, index=indexName, body=createBody)
-      print "create index response: ", create_res
-      if (create_res['acknowledged'] != True):
-         print "failed to create index"
-         return 1
-      else:
-         print "index ",indexName, " created successfully"
-         return 0
-    except Exception, error:
-      print "An exception was thrown!"
-      print str(error)
-      return 2
-  
-
-def deleteIndex(client, indexName):
-   try:
-     print "start deleteIndex"
-     if (client == None):
-         client = Elasticsearch(['localhost'])
-     esIndexClient = IndicesClient(client)
-     isExists=elasticsearch.client.IndicesClient.exists(esIndexClient, indexName)
-     if ( isExists == True ):
-        delete_res=elasticsearch.client.IndicesClient.delete(esIndexClient, index=indexName)
-        if (delete_res['acknowledged'] != True):
-           print "failed to delete index"
-           return 1
-        else:
-           print "index ",indexName, " deleted"
-           return 0
-     else:
-        print "index not found - assume already deleted"
-        return 0
-   except Exception, error:
-      print "An exception was thrown!"
-      print str(error)
-      return 2
-
-def copyIndex(client, fromIndex, toIndex):
-    try: 
-      print "start copyIndex"
-      if (client == None):
-         client = Elasticsearch(['localhost'])
-      client.indices.refresh(index=fromIndex)
-      count=client.search(fromIndex, search_type='count')
-      print "original index count: ",count
-      docNum, docErrors = elasticsearch.helpers.reindex(client, fromIndex, toIndex)
-      print "copy result: ", docNum, docErrors 
-      if (docNum != count['hits']['total']):
-         print "Failed to copy all documents. expected: ", count['hits']['total'], " actual: ", docNum
-         return 1
-      # if (len(docErrors) != 0):
-         # print "copy returned with errors"
-         # print docErrors
-         # return 1
-      return 0
-    except Exception, error:
-      print "An exception was thrown!"
-      print str(error)
-      return 2
-  
-
-def usage():
-     print 'USAGE: ', sys.argv[0], '-o <operation : create | delete | move> -n <indexName> -a <address> -f <mappingFile (for create)> -t <toIndex (for move operation)>' 
-     
-
-
-def main(argv):
-   print "start script with ", len(sys.argv), 'arguments.'
-   print "=============================================="
-
-   try:
-         opts, args = getopt.getopt(argv, "h:o:a:n:f:t:", ["operation","address","indexName","file","toIndex"])
-   except getopt.GetoptError:
-         usage()
-         sys.exit(2)
- 
-   host = None
-   for opt, arg in opts:
-         print opt, arg
-         if opt == '-h':
-             usage()
-             sys.exit(2)
-         elif opt in ('-f', '--file'):
-            mapping=readFileToJson(arg)
-         elif opt in ('-a', '--address'):
-            host=arg
-         elif opt in ('-o', '--operation'):
-            operation=arg
-         elif opt in ('-n', '--indexName'):
-            indexName=arg
-         elif opt in ('-t', '--toIndex'):
-            destIndexName=arg
-
-   if (operation == None):
-       usage()
-       sys.exit(2)
-   elif (host == None):
-       print "address is mandatory argument"
-       usage()
-       sys.exit(2)
-   elif operation == 'create':
-       print "create new index ", indexName
-       client = Elasticsearch([{'host': host, 'timeout':5}] )
-       res = createIndex(client, indexName, mapping)
-   
-   elif operation == 'delete':
-       print "delete index ", indexName
-       client = Elasticsearch([{'host': host, 'timeout':5}] )
-       res = deleteIndex(client, indexName)
-
-   elif operation == 'move':
-       print "move index ", indexName, " to ", destIndexName
-       client = Elasticsearch([{'host': host, 'timeout':5}] )
-       res = copyIndex(client, indexName, destIndexName)
-   else:
-       usage()
-       exit(2)
-   if res != 0:
-      print "ERROR: operation Failed"
-      exit(1)
-    
-
-  
-if __name__ == "__main__":
-        main(sys.argv[1:])
-
-
diff --git a/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt b/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt
deleted file mode 100644
index b7e9435..0000000
--- a/asdctool/src/main/resources/es-resources/types/auditinggetuebclusterevent.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-{            "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "consumerId": "CONSUMER_ID"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt b/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt
deleted file mode 100644
index a74f037..0000000
--- a/asdctool/src/main/resources/es-resources/types/distributiondeployevent.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "currVersion": "CURR_VERSION",
-             "distributionId": "DID",
-             "modifierName": "MODIFIER_NAME",
-             "modifierUid": "MODIFIER_UID",
-             "resourceName": "RESOURCE_NAME",
-             "resourceType": "RESOURCE_TYPE"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt b/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt
deleted file mode 100644
index 879c4c4..0000000
--- a/asdctool/src/main/resources/es-resources/types/distributiondownloadevent.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-{            "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "resourceUrl": "RESOURCE_URL",
-             "consumerId": "CONSUMER_ID"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt b/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt
deleted file mode 100644
index a261042..0000000
--- a/asdctool/src/main/resources/es-resources/types/distributionengineevent.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "consumerId": "CONSUMER_ID",
-             "role": "ROLE",
-             "topicName": "TOPIC_NAME",
-             "apiKey": "API_KEY",
-             "environmentName": "D_ENV"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt b/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt
deleted file mode 100644
index 6375ead..0000000
--- a/asdctool/src/main/resources/es-resources/types/distributionnotificationevent.txt
+++ /dev/null
@@ -1,16 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "currVersion": "CURR_VERSION",
-             "currState": "CURR_STATE",
-             "distributionId": "DID",
-             "modifierName": "MODIFIER_NAME",
-             "modifierUid": "MODIFIER_UID",
-             "resourceName": "RESOURCE_NAME",
-             "resourceType": "RESOURCE_TYPE",
-             "topicName": "TOPIC_NAME"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt b/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt
deleted file mode 100644
index 8fed9dd..0000000
--- a/asdctool/src/main/resources/es-resources/types/distributionstatusevent.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "resourceUrl": "RESOURCE_URL",
-             "consumerId": "CONSUMER_ID",
-             "distributionId": "DID",
-             "topicName": "TOPIC_NAME"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt b/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt
deleted file mode 100644
index 4631aa3..0000000
--- a/asdctool/src/main/resources/es-resources/types/resourceadminevent.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "currVersion": "CURR_VERSION",
-             "currState": "CURR_STATE",
-             "distributionId": "DID",
-             "modifierName": "MODIFIER_NAME",
-             "modifierUid": "MODIFIER_UID",
-             "prevVersion": "PREV_VERSION",
-             "prevState": "PREV_STATE",
-             "resourceName": "RESOURCE_NAME",
-             "resourceType": "RESOURCE_TYPE",
-             "dPrevStatus": "DPREV_STATUS",
-             "dCurrStatus": "DCURR_STATUS",
-             "comment": "COMMENT",
-             "artifactName": "ARTIFACT_NAME"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/useraccessevent.txt b/asdctool/src/main/resources/es-resources/types/useraccessevent.txt
deleted file mode 100644
index ebd27b5..0000000
--- a/asdctool/src/main/resources/es-resources/types/useraccessevent.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "userUid": "USER_UID",
-             "userName": "USER_NAME"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/es-resources/types/useradminevent.txt b/asdctool/src/main/resources/es-resources/types/useradminevent.txt
deleted file mode 100644
index 15e0d9b..0000000
--- a/asdctool/src/main/resources/es-resources/types/useradminevent.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-{
-             "action": "ACTION",
-             "timestamp": "TIMESTAMP",
-             "requestId": "REQUEST_ID",
-             "serviceInstanceId": "SERVICE_INSTANCE_ID",
-             "desc": "DESC",
-             "status": "STATUS",
-             "modifierName": "MODIFIER_NAME",
-             "modifierUid": "MODIFIER_UID",
-             "userUid": "USER_UID",
-             "userName": "USER_NAME",
-             "userEmail": "USER_EMAIL",
-             "userRole": "USER_ROLE",
-             "userBeforeName": "USER_BEFORE_NAME",
-             "userBeforeEmail": "USER_BEFORE_EMAIL",
-             "userBeforeRole": "USER_BEFORE_ROLE",
-             "userAfterName": "USER_AFTER_NAME",
-             "userAfterEmail": "USER_AFTER_EMAIL",
-             "userAfterRole": "USER_AFTER_ROLE"
-}
\ No newline at end of file
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigration.sh b/asdctool/src/main/resources/scripts/esToCassandraMigration.sh
deleted file mode 100644
index 383904c..0000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigration.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
-                FULL_PATH=$BASEDIR
-else
-                FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh b/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh
deleted file mode 100644
index 2c8e346..0000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigrationExportOnly.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
-                FULL_PATH=$BASEDIR
-else
-                FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration-export-only $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh b/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh
deleted file mode 100644
index 9ce3ca8..0000000
--- a/asdctool/src/main/resources/scripts/esToCassandraMigrationImportOnly.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
-                FULL_PATH=$BASEDIR
-else
-                FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.EsToCassandraDataMigrationMenu"
-
-command="java $JVM_LOG_FILE -cp $JARS $mainClass es-to-cassndra-migration-import-only $@"
-echo $command
-
-$command
-result=$?
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
diff --git a/asdctool/src/main/resources/scripts/getConsumers.sh b/asdctool/src/main/resources/scripts/getConsumers.sh
deleted file mode 100644
index d02aac6..0000000
--- a/asdctool/src/main/resources/scripts/getConsumers.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-##############################
-# Get list of SDC consumers
-##############################
-
-
-CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
-
-if [ ${BASEDIR:0:1} = "/" ]
-then
-                FULL_PATH=$BASEDIR
-else
-                FULL_PATH=$CURRENT_DIR/$BASEDIR
-fi
-
-source ${FULL_PATH}/baseOperation.sh
-
-mainClass="org.openecomp.sdc.asdctool.main.GetConsumersMenu"
-
-command="java $JVM_LOG_FILE -Xmx1024M -cp $JARS $mainClass $@"
-echo $command
-
-$command
-result=$?
-
-
-
-echo "***********************************"
-echo "***** $result *********************"
-echo "***********************************"
-
-exit $result
-
-
-
diff --git a/asdctool/src/main/resources/scripts/python/user/exportUsers.py b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
index 9e695ad..ed7515c 100644
--- a/asdctool/src/main/resources/scripts/python/user/exportUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
@@ -40,7 +40,8 @@
 		c.setopt(pycurl.HTTPHEADER, ['Content-Type: application/json', 'Accept: application/json', adminHeader])
 
 		if scheme == 'https':
-			c.setopt(c.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYHOST, 0)
 
 		res = c.perform()
 		#print(res)
diff --git a/asdctool/src/main/resources/scripts/python/user/importUsers.py b/asdctool/src/main/resources/scripts/python/user/importUsers.py
index 984b75b..82ddec5 100644
--- a/asdctool/src/main/resources/scripts/python/user/importUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/importUsers.py
@@ -70,7 +70,8 @@
 		c.setopt(c.WRITEFUNCTION, lambda x: None)
 
 		if scheme == 'https':
-			c.setopt(c.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYHOST, 0)
 
 		res = c.perform()
 					
@@ -111,7 +112,8 @@
 		c.setopt(c.WRITEFUNCTION, lambda x: None)
 
 		if scheme == 'https':
-			c.setopt(c.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYPEER, 0)
+			c.setopt(pycurl.SSL_VERIFYHOST, 0)
 
 		#print("before perform")	
 		res = c.perform()