[SDC-29] catalog 1707 rebase commit.

Change-Id: I43c3dc5cf44abf5da817649bc738938a3e8388c1
Signed-off-by: Michael Lando <ml636r@att.com>
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
new file mode 100644
index 0000000..b433357
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/App.java
@@ -0,0 +1,66 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool;
+
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+
+/**
+ * Hello world!
+ *
+ */
+public class App {
+	public static void main(String[] args) {
+
+		String asdcToolPort = "8087";
+
+		ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
+		context.setContextPath("/asdctool");
+
+		Server jettyServer = new Server(Integer.valueOf(asdcToolPort));
+		jettyServer.setHandler(context);
+
+		ServletHolder jerseyServlet = context.addServlet(org.glassfish.jersey.servlet.ServletContainer.class, "/*");
+		jerseyServlet.setInitOrder(0);
+
+		// Tells the Jersey Servlet which REST service/class to load.
+		// jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
+		// EntryPoint.class.getCanonicalName());
+		jerseyServlet.setInitParameter("jersey.config.server.provider.packages", "org.openecomp.sdc.asdctool.servlets");
+		jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
+				"org.glassfish.jersey.media.multipart.MultiPartFeature");
+
+		try {
+			jettyServer.start();
+
+			System.out.println("Server was started on port " + asdcToolPort);
+
+			jettyServer.join();
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			System.exit(1);
+		} finally {
+			jettyServer.destroy();
+		}
+	}
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
index 3b7d3ec..23612a8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/Utils.java
@@ -95,7 +95,7 @@
 					if (rightValue == null) {
 						continue;
 					} else {
-						log.debug("The key {} cannot be found in the properties {}", key, leftProps);
+						log.debug("The key {} cannot be found in the properties {}",key,leftProps);
 						return false;
 					}
 				}
@@ -103,7 +103,7 @@
 				// if (false == leftValue instanceof Map && false == leftValue
 				// instanceof List) {
 				if (false == leftValue.equals(rightValue)) {
-					log.trace("The value of key {} is different between properties {} vs {}", key, leftValue, rightValue);
+					log.trace("The value of key {} is differnet between properties. {} vs {}",key,leftValue,rightValue);
 					return false;
 				}
 				// }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
index a78ea9b..eafec8b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/DataMigration.java
@@ -608,7 +608,7 @@
 						Files.delete(file.toPath());
 					}
 				} catch (IOException e) {
-					log.error("failed to delete output file " + file.getAbsolutePath(), e);
+					log.error("failed to delete output file {}", file.getAbsolutePath(), e);
 					return null;
 				}
 				file = new File(outputDir + "/" + table.getTableDescription().getTableName());
@@ -617,7 +617,7 @@
 				try {
 					file.createNewFile();
 				} catch (IOException e) {
-					log.error("failed to create output file " + file.getAbsolutePath(), e);
+					log.error("failed to create output file {}", file.getAbsolutePath(), e);
 					return null;
 				}
 			}
@@ -662,11 +662,11 @@
 	 */
 	private boolean createOutPutFolder(File outputDir) {
 		if (!outputDir.exists()) {
-			log.info("creating output dir" + outputDir.getAbsolutePath());
+			log.info("creating output dir {}", outputDir.getAbsolutePath());
 			try {
 				Files.createDirectories(outputDir.toPath());
 			} catch (IOException e) {
-				log.error("failed to create output dir" + outputDir.getAbsolutePath(), e);
+				log.error("failed to create output dir {}", outputDir.getAbsolutePath(), e);
 				return false;
 			}
 		}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
new file mode 100644
index 0000000..2ad4fac
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphJsonValidator.java
@@ -0,0 +1,57 @@
+package org.openecomp.sdc.asdctool.impl;
+
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * simple util class to verify that the titan export json graph is not corrupted
+ */
+public class GraphJsonValidator {
+
+    private static Logger log = LoggerFactory.getLogger(GraphJsonValidator.class.getName());
+
+    public boolean verifyTitanJson(String filePath) throws IOException {
+        ObjectMapper objectMapper = new ObjectMapper();
+        List<Integer> invalidRows = new ArrayList<>();
+        AtomicInteger atomicInteger = new AtomicInteger(1);
+        Files.lines(Paths.get(filePath)).forEach(line -> {
+            try {
+                verifyJsonLine(objectMapper, atomicInteger, line);
+            } catch (RuntimeException  | IOException e) {
+                logInvalidJsonRow(atomicInteger, line, e);
+                invalidRows.add(atomicInteger.get());
+            }
+        });
+        return verificationResult(invalidRows);
+    }
+
+    private void verifyJsonLine(ObjectMapper objectMapper, AtomicInteger atomicInteger, String line) throws IOException {
+        log.info("verifying line: " +  atomicInteger.get());
+        objectMapper.readTree(line);
+        atomicInteger.incrementAndGet();
+    }
+
+    private void logInvalidJsonRow(AtomicInteger atomicInteger, String line, Exception e) {
+        log.error("Invalid Json!!!!!!!!!!!!!!!!!!!!", e);
+        log.info("line number: " +  atomicInteger.get());
+        log.info("line value: " + line);
+    }
+
+    private boolean verificationResult(List<Integer> invalidRows) {
+        if (!invalidRows.isEmpty()) {
+            log.info("the following lines are not valid: " + invalidRows);
+            return false;
+        }
+        return true;
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
index bf62072..7bea2ea 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/GraphMLConverter.java
@@ -390,15 +390,9 @@
 						// ElementHelper.getProperties(edge));
 						Utils.setProperties(addEdge, Utils.getProperties(edge));
 
-						// log.info("fromVertex=" +
-						// ElementHelper.getProperties(vertexFrom));
-						log.info("fromVertex=" + Utils.getProperties(vertexFrom));
-						// log.info("toVertex=" +
-						// ElementHelper.getProperties(vertexTo));
-						log.info("toVertex=" + Utils.getProperties(vertexTo));
-						// log.info("edge=" + edge.getLabel() + " " +
-						// ElementHelper.getProperties(edge));
-						log.info("edge=" + edge.label() + " " + Utils.getProperties(edge));
+						log.info("fromVertex={}", Utils.getProperties(vertexFrom));
+						log.info("toVertex={}", Utils.getProperties(vertexTo));
+						log.info("edge={} {} ",edge.label(),Utils.getProperties(edge));
 
 						// GraphSONWriter.outputGraph(openGraph, outputFile);
 						GraphSONWriter create = GraphSONWriter.build().create();
@@ -410,15 +404,9 @@
 					} catch (Exception e) {
 						e.printStackTrace();
 
-						// log.error("fromVertex=" +
-						// ElementHelper.getProperties(vertexFrom));
-						log.error("fromVertex=" + Utils.getProperties(vertexFrom));
-						// log.error("toVertex=" +
-						// ElementHelper.getProperties(vertexTo));
-						log.error("toVertex=" + Utils.getProperties(vertexTo));
-						// log.error("edge=" + edge.getLabel() + " " +
-						// ElementHelper.getProperties(edge));
-						log.error("edge=" + edge.label() + " " + Utils.getProperties(edge));
+						log.error("fromVertex={}", Utils.getProperties(vertexFrom));
+						log.error("toVertex={}", Utils.getProperties(vertexTo));
+						log.error("edge={} {} ",edge.label(),Utils.getProperties(edge));
 
 						break;
 
@@ -455,20 +443,13 @@
 							TitanGraph openGraph = Utils.openGraph(conf);
 
 							TitanVertex addVertexFrom = openGraph.addVertex();
-							// ElementHelper.setProperties(addVertexFrom,
-							// ElementHelper.getProperties(vertex));
 							Utils.setProperties(addVertexFrom, Utils.getProperties(vertex));
 
-							// log.info("fromVertex=" +
-							// ElementHelper.getProperties(addVertexFrom));
-							log.info("fromVertex=" + Utils.getProperties(addVertexFrom));
+							log.info("fromVertex={}", Utils.getProperties(addVertexFrom));
 
-							// GraphSONWriter.outputGraph(openGraph,
-							// outputFile);
 							GraphSONWriter create = GraphSONWriter.build().create();
 							create.writeGraph(out, openGraph);
 
-							// openGraph.rollback();
 							openGraph.tx().rollback();
 
 						}
@@ -476,25 +457,14 @@
 					} catch (Exception e) {
 						e.printStackTrace();
 
-						// log.error("vertex=" +
-						// ElementHelper.getProperties(vertex));
-
 						GraphPropertiesDictionary[] values = GraphPropertiesDictionary.values();
 
-						// Object property1 =
-						// vertex.getProperty(GraphPropertiesDictionary.HEALTH_CHECK.getProperty());
 						Object property1 = vertex.value(GraphPropertiesDictionary.HEALTH_CHECK.getProperty());
 						System.out.println(property1);
 
-						// Object property2 = vertex.getProperty("healthcheck");
 						Object property2 = vertex.value("healthcheck");
 						System.out.println(property2);
 
-						// for (GraphPropertiesDictionary value : values) {
-						//
-						// System.out.println(property);
-						// }
-
 						break;
 
 					}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java
index 812d534..eac9726 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/PopulateComponentCache.java
@@ -40,10 +40,8 @@
 import org.openecomp.sdc.be.model.Resource;
 import org.openecomp.sdc.be.model.Service;
 import org.openecomp.sdc.be.model.cache.ComponentCache;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
 import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
-import org.openecomp.sdc.be.model.operations.impl.ProductOperation;
-import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
-import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
 import org.openecomp.sdc.be.resources.data.ComponentCacheData;
 import org.openecomp.sdc.be.resources.data.ESArtifactData;
 import org.openecomp.sdc.common.util.SerializationUtils;
@@ -66,21 +64,15 @@
 
 	@Autowired
 	protected ComponentCassandraDao componentCassandraDao;
-
+	
 	@Autowired
-	protected ResourceOperation resourceOperation;
-
-	@Autowired
-	protected ServiceOperation serviceOperation;
-
-	@Autowired
-	protected ProductOperation productOperation;
+	ToscaOperationFacade toscaOperationFacade;
 
 	@Autowired
 	protected ComponentCache componentCache;
 
 	private void exit(String stage, int i) {
-		log.error("Failed on " + stage);
+		log.error("Failed on {}", stage);
 		System.exit(i);
 
 	}
@@ -91,15 +83,16 @@
 		populateCache(ComponentTypeEnum.PRODUCT);
 	}
 
+	@SuppressWarnings("unchecked")
 	private void populateCache(ComponentTypeEnum componentTypeEnum) {
 
 		List<String> list = new ArrayList<>();
-		Either<TitanGraph, TitanOperationStatus> graph = resourceOperation.getTitanGenericDao().getGraph();
+		Either<TitanGraph, TitanOperationStatus> graph = toscaOperationFacade.getTitanDao().getGraph();
 		TitanGraph titanGraph = graph.left().value();
-		Iterable vertices = titanGraph.query()
+		Iterable<TitanVertex> vertices = titanGraph.query()
 				.has(GraphPropertiesDictionary.LABEL.getProperty(), componentTypeEnum.name().toLowerCase()).vertices();
 
-		Iterator iterator = vertices.iterator();
+		Iterator<TitanVertex> iterator = vertices.iterator();
 		while (iterator.hasNext()) {
 			TitanVertex vertex = (TitanVertex) iterator.next();
 
@@ -129,36 +122,11 @@
 			///////////////////////////////////////////////////////////////////////////////////// there.
 			/////////////////////////////////////////////////////////////////////////////////////
 			Component component = null;
-			switch (componentTypeEnum) {
-			case RESOURCE:
-				Either<Resource, StorageOperationStatus> resourceRes = resourceOperation.getComponent(componentUid,
-						false);
-				if (resourceRes.isRight()) {
-					exit("get resource", 1);
-				}
-				component = resourceRes.left().value();
-				break;
-			case SERVICE:
-				Either<Service, StorageOperationStatus> serviceRes = serviceOperation.getComponent(componentUid, false);
-				if (serviceRes.isRight()) {
-					exit("get service", 1);
-				}
-				component = serviceRes.left().value();
-				break;
-			case PRODUCT:
-				Either<Product, StorageOperationStatus> productRes = productOperation.getComponent(componentUid, false);
-				if (productRes.isRight()) {
-					exit("get product", 1);
-				}
-				component = productRes.left().value();
-				break;
-			default:
-				break;
-			}
-
-			if (component == null) {
+			Either<Resource, StorageOperationStatus> getComponentRes = toscaOperationFacade.getToscaElement(componentUid);
+			if (getComponentRes.isRight()) {
 				exit("get component", 1);
 			}
+			component = getComponentRes.left().value();
 
 			long time2 = System.currentTimeMillis();
 			// System.out.println("fetch resource " + resource.getName());
@@ -198,7 +166,7 @@
 			 * info("After adding component {} to cassandra. Insert time is {} ms."
 			 * , componentUid, averageInsertTimeInMilli);
 			 * 
-			 * } catch (IOException e) { // TODO Auto-generated catch block
+			 * } catch (IOException e) {
 			 * e.printStackTrace(); }
 			 */
 
@@ -260,11 +228,11 @@
 			}
 		}
 		long fullSearchEnd2 = System.currentTimeMillis();
-		log.info("esofer time wait to threads finish " + ((fullSearchEnd2 - fullSearchStart2)) + " ms");
+		log.info("esofer time wait to threads finish {} ms",((fullSearchEnd2 - fullSearchStart2)));
 		// }
 		long fullSearchEnd = System.currentTimeMillis();
 
-		log.info("esofer full desrialize time " + ((fullSearchEnd - fullSearchStart)) + " ms");
+		log.info("esofer full desrialize time {} ms",((fullSearchEnd - fullSearchStart)));
 		System.out.println("esofer full desrialize time " + ((fullSearchEnd - fullSearchStart)) + " ms");
 	}
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
index c256ca0..c2ebc24 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
@@ -75,7 +75,7 @@
 			}
 			return status;
 		} catch (IOException e) {
-			log.error("Product uid:" + productUid + " delete failed with exception", e);
+			log.error("Product uid:{} delete failed with exception",productUid, e);
 		}
 		return null;
 	}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
new file mode 100644
index 0000000..3f63570
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
@@ -0,0 +1,404 @@
+package org.openecomp.sdc.asdctool.impl;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.graph.datatype.ActionEnum;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgePropertiesDictionary;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.dao.utils.UserStatusEnum;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.resources.data.UserData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thinkaurelius.titan.core.PropertyKey;
+import com.thinkaurelius.titan.core.TitanException;
+import com.thinkaurelius.titan.core.TitanFactory;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanGraphQuery;
+import com.thinkaurelius.titan.core.schema.ConsistencyModifier;
+import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
+import com.thinkaurelius.titan.core.schema.TitanManagement;
+
+public class TitanGraphInitializer {
+
+	private static Logger logger = LoggerFactory.getLogger(TitanGraphInitializer.class.getName());
+	private static TitanGraph graph;
+
+
+	public static boolean createGraph(String titanCfgFile) {
+		logger.info("** createGraph with {}", titanCfgFile);
+		try {
+			logger.info("createGraph : try to load file {}", titanCfgFile);
+			graph = TitanFactory.open(titanCfgFile);
+			if (graph.isClosed()) {
+				return false;
+			}
+
+		} catch (TitanException e) {
+			logger.info("createGraph : failed to open Titan graph with configuration file: {}", titanCfgFile, e);
+			return false;
+		}
+		
+		createIndexesAndDefaults();
+		
+		logger.info("** Titan graph created ");
+
+		return true;
+	}
+
+	private static boolean isVertexExist(Map<String, Object> properties) {
+		TitanGraphQuery query = graph.query();
+
+		if (properties != null && !properties.isEmpty()) {
+			for (Map.Entry<String, Object> entry : properties.entrySet()) {
+				query = query.has(entry.getKey(), entry.getValue());
+			}
+		}
+		Iterable<Vertex> vertecies = query.vertices();
+		java.util.Iterator<Vertex> iterator = vertecies.iterator();
+		if (iterator.hasNext()) {
+			return true;
+		}
+		return false;
+	}
+
+	private static void createDefaultUsers() {
+		List<UserData> users = createUserList();
+		for (UserData user : users) {
+			Vertex vertex = null;
+			Map<String, Object> checkedProperties = new HashMap<String, Object>();
+			checkedProperties.put(GraphPropertiesDictionary.USERID.getProperty(), user.getUserId());
+			checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+			Map<String, Object> properties = null;
+			if (!isVertexExist(checkedProperties)) {
+				vertex = graph.addVertex();
+				vertex.property(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+				properties = user.toGraphMap();
+				for (Map.Entry<String, Object> entry : properties.entrySet()) {
+					vertex.property(entry.getKey(), entry.getValue());
+				}
+			}
+		}
+		graph.tx().commit();
+
+	}
+
+	private static List<UserData> createUserList() {
+		LinkedList<UserData> users = new LinkedList<UserData>();
+		users.add(getDefaultUserAdmin1());
+		users.add(getDefaultUserAdmin2());
+		users.add(getDefaultUserDesigner1());
+		users.add(getDefaultUserDesigner2());
+		users.add(getDefaultUserTester1());
+		users.add(getDefaultUserTester2());
+		users.add(getDefaultUserTester3());
+		users.add(getDefaultUserGovernor1());
+		users.add(getDefaultUserGovernor2());
+		users.add(getDefaultUserOps1());
+		users.add(getDefaultUserOps2());
+		users.add(getDefaultUserProductManager1());
+		users.add(getDefaultUserProductManager2());
+		users.add(getDefaultUserProductStrategist1());
+		users.add(getDefaultUserProductStrategist2());
+		users.add(getDefaultUserProductStrategist3());
+		return users;
+	}
+
+	private static UserData getDefaultUserAdmin1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("jh0003");
+		userData.setEmail("admin@sdc.com");
+		userData.setFirstName("Jimmy");
+		userData.setLastName("Hendrix");
+		userData.setRole("ADMIN");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserAdmin2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("tr0001");
+		userData.setEmail("admin@sdc.com");
+		userData.setFirstName("Todd");
+		userData.setLastName("Rundgren");
+		userData.setRole("ADMIN");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserDesigner1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("cs0008");
+		userData.setEmail("designer@sdc.com");
+		userData.setFirstName("Carlos");
+		userData.setLastName("Santana");
+		userData.setRole("DESIGNER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserDesigner2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("me0009");
+		userData.setEmail("designer@sdc.com");
+		userData.setFirstName("Melissa");
+		userData.setLastName("Etheridge");
+		userData.setRole("DESIGNER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserTester1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("jm0007");
+		userData.setEmail("tester@sdc.com");
+		userData.setFirstName("Joni");
+		userData.setLastName("Mitchell");
+		userData.setRole("TESTER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserTester2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("kb0004");
+		userData.setEmail("tester@sdc.com");
+		userData.setFirstName("Kate");
+		userData.setLastName("Bush");
+		userData.setRole("TESTER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserTester3() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("jt0005");
+		userData.setEmail("tester@sdc.com");
+		userData.setFirstName("James");
+		userData.setLastName("Taylor");
+		userData.setRole("TESTER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserOps1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("op0001");
+		userData.setEmail("ops@sdc.com");
+		userData.setFirstName("Steve");
+		userData.setLastName("Regev");
+		userData.setRole("OPS");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserOps2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("af0006");
+		userData.setEmail("designer@sdc.com");
+		userData.setFirstName("Aretha");
+		userData.setLastName("Franklin");
+		userData.setRole("OPS");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserGovernor1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("gv0001");
+		userData.setEmail("governor@sdc.com");
+		userData.setFirstName("David");
+		userData.setLastName("Shadmi");
+		userData.setRole("GOVERNOR");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserGovernor2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("ah0002");
+		userData.setEmail("admin@sdc.com");
+		userData.setFirstName("Alex");
+		userData.setLastName("Harvey");
+		userData.setRole("GOVERNOR");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserProductManager1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("pm0001");
+		userData.setEmail("pm1@sdc.com");
+		userData.setFirstName("Teddy");
+		userData.setLastName("Isashar");
+		userData.setRole("PRODUCT_MANAGER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserProductManager2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("pm0002");
+		userData.setEmail("pm2@sdc.com");
+		userData.setFirstName("Sarah");
+		userData.setLastName("Bettens");
+		userData.setRole("PRODUCT_MANAGER");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserProductStrategist1() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("ps0001");
+		userData.setEmail("ps1@sdc.com");
+		userData.setFirstName("Eden");
+		userData.setLastName("Rozin");
+		userData.setRole("PRODUCT_STRATEGIST");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserProductStrategist2() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("ps0002");
+		userData.setEmail("ps2@sdc.com");
+		userData.setFirstName("Ella");
+		userData.setLastName("Kvetny");
+		userData.setRole("PRODUCT_STRATEGIST");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static UserData getDefaultUserProductStrategist3() {
+		UserData userData = new UserData();
+		userData.setAction(ActionEnum.Create);
+		userData.setElementType(GraphElementTypeEnum.Node);
+		userData.setUserId("ps0003");
+		userData.setEmail("ps3@sdc.com");
+		userData.setFirstName("Geva");
+		userData.setLastName("Alon");
+		userData.setRole("PRODUCT_STRATEGIST");
+		userData.setStatus(UserStatusEnum.ACTIVE.name());
+		userData.setLastLoginTime(0L);
+		return userData;
+	}
+
+	private static void createVertexIndixes() {
+		logger.info("** createVertexIndixes started");
+
+		TitanManagement graphMgt = graph.openManagement();
+		TitanGraphIndex index = null;
+		for (GraphPropertiesDictionary prop : GraphPropertiesDictionary.values()) {
+			PropertyKey propKey = null;
+			if (!graphMgt.containsPropertyKey(prop.getProperty())) {
+				Class<?> clazz = prop.getClazz();
+				if (!ArrayList.class.getName().equals(clazz.getName()) && !HashMap.class.getName().equals(clazz.getName())) {
+					propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
+				}
+			} else {
+				propKey = graphMgt.getPropertyKey(prop.getProperty());
+			}
+			if (prop.isIndexed()) {
+				if (!graphMgt.containsGraphIndex(prop.getProperty())) {
+					if (prop.isUnique()) {
+						index = graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).unique().buildCompositeIndex();
+
+						graphMgt.setConsistency(propKey, ConsistencyModifier.LOCK); // Ensures
+																					// only
+																					// one
+																					// name
+																					// per
+																					// vertex
+						graphMgt.setConsistency(index, ConsistencyModifier.LOCK); // Ensures
+																					// name
+																					// uniqueness
+																					// in
+																					// the
+																					// graph
+
+					} else {
+						graphMgt.buildIndex(prop.getProperty(), Vertex.class).addKey(propKey).buildCompositeIndex();
+					}
+				}
+			}
+		}
+		graphMgt.commit();
+		logger.info("** createVertexIndixes ended");
+
+	}
+
+	private static void createEdgeIndixes() {
+		logger.info("** createEdgeIndixes started");
+		TitanManagement graphMgt = graph.openManagement();
+		for (GraphEdgePropertiesDictionary prop : GraphEdgePropertiesDictionary.values()) {
+			if (!graphMgt.containsGraphIndex(prop.getProperty())) {
+				PropertyKey propKey = graphMgt.makePropertyKey(prop.getProperty()).dataType(prop.getClazz()).make();
+				graphMgt.buildIndex(prop.getProperty(), Edge.class).addKey(propKey).buildCompositeIndex();
+
+			}
+		}
+		graphMgt.commit();
+		logger.info("** createEdgeIndixes ended");
+	}
+
+	private static void createIndexesAndDefaults() {
+		createVertexIndixes();
+		createEdgeIndixes();
+		createDefaultUsers();
+	}
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
index b480091..0336701 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/UpdatePropertyOnVertex.java
@@ -73,7 +73,7 @@
 						Map<String, Object> leftProps = Utils.getProperties(vertex);
 						boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
 						if (false == vertexLeftContainsRightProps) {
-							log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are: {}", criteria, leftProps);
+							log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}",criteria,leftProps);
 							continue;
 						}
 
@@ -90,7 +90,7 @@
 						}
 
 					} else {
-						log.debug("No certified service was found for criteria {}", criteria);
+						log.debug("No certified service was found for criteria {}",criteria);
 					}
 				}
 
@@ -137,7 +137,7 @@
 
 			boolean vertexLeftContainsRightProps = Utils.vertexLeftContainsRightProps(leftProps, criteria);
 			if (false == vertexLeftContainsRightProps) {
-				log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}", criteria, leftProps);
+				log.debug("Ignore vertex since properties it does not contains properties {}. Vertex properties are {}",criteria,leftProps);
 				continue;
 			}
 
@@ -148,16 +148,12 @@
 
 					// vertex.setProperty(key, value);
 					vertex.property(key, value);
-					//if(log.isDebugEnabled()){
-					// log.debug("After setting vertex: {} {} with key value: {}, {}", 
-					// vertex.getProperty(GraphPropertiesDictionary.NAME.getProperty()), 
-					// vertex.getProperty(GraphPropertiesDictionary.VERSION.getProperty()),
-					// key, value);
-					//}
-					log.debug("After setting vertex: {} {} with key value: {}, {}", 
+					
+					if (log.isDebugEnabled()){
+						log.debug("After setting vertex {} {} with key value {},{}",  
 							vertex.property(GraphPropertiesDictionary.NAME.getProperty()),
-							vertex.property(GraphPropertiesDictionary.VERSION.getProperty()),
-							key, value);
+							vertex.property(GraphPropertiesDictionary.VERSION.getProperty()),key,value);
+					}
 					numberOfUpdatedVertexesPerService++;
 				}
 			}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/Migration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/Migration.java
new file mode 100644
index 0000000..d74bae6
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/Migration.java
@@ -0,0 +1,17 @@
+package org.openecomp.sdc.asdctool.impl.migration;
+
+public interface Migration {
+
+    /**
+     * performs a migration operation
+     * @return true if migration completed successfully or false otherwise
+     */
+    boolean migrate();
+
+    /**
+     *
+     * @return a description of what this migration does
+     */
+    String description();
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationException.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationException.java
new file mode 100644
index 0000000..f54fccc
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationException.java
@@ -0,0 +1,9 @@
+package org.openecomp.sdc.asdctool.impl.migration;
+
+public class MigrationException extends RuntimeException {
+
+    public MigrationException(String message) {
+        super(message);
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationMsg.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationMsg.java
new file mode 100644
index 0000000..3f8b55b
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationMsg.java
@@ -0,0 +1,36 @@
+package org.openecomp.sdc.asdctool.impl.migration;
+
+public enum MigrationMsg {
+    RENMAE_KEY_PROPERTIES_1707("renaming key properties"),
+    KEY_PROPERTY_NOT_EXIST("key propery %s not exist"),
+    RENAME_KEY_PROPERTY_FAILED("failed to rename key property %s"),
+    FAILED_TO_RETRIEVE_GRAPH("failed to get graph %s"),
+    PROPERTY_KEY_NOT_EXIST("property key %s not found."),
+    FAILED_TO_RETRIEVE_NODES("failed to retrieve nodes from graph. error status: %s"),
+    FAILED_TO_GET_NODE_FROM_GRAPH("failed to retrieve node from graph. error status : %s"),
+    FAILED_TO_CREATE_NODE("failed to create node of type %s. reason: %s"),
+    FAILED_TO_RETRIEVE_CATEGORIES("failed to retrieve categories. error status: %s"),
+    FAILED_TO_RETRIEVE_CATEGORY("failed to retrieve category %s. error status: %s"),
+    FAILED_TO_CREATE_SUB_CATEGORY("failed to create sub category %s of category %s. error status: %s"),
+    FAILED_TO_CREATE_CATEGORY("failed to create category %s. error status: %s"),
+    FAILED_TO_RETRIEVE_USER_STATES("failed to retrieve user %s states. error status: %s"),
+    FAILED_TO_RETRIEVE_MIGRATION_USER_STATES("failed to retrieve migrating user %s states for deletion. error status: %s"),
+    FAILED_TO_RETRIEVE_MIGRATION_USER("failed to retrieve migration user %s. error status: %s"),
+    FAILED_TO_RETRIEVE_VERSION_RELATION("failed to retrieve version relation from component with id %s to component with id %s. error status: %s"),
+    FAILED_TO_RETRIEVE_REQ_CAP("failed to retrieve fulfilled requirements or capabilities for instance %s. error status: %s"),
+    FAILED_TO_RETRIEVE_VERTEX("failed to retrieve vertex with id: %s. error status: %s"),
+    FAILED_TO_RETRIEVE_CAP_REQ_VERTEX("failed to retrieve capabilities or requirements vertex for component %s. error status: %s"),
+    FAILED_TO_ASSOCIATE_CAP_REQ("failed to associate fulfilled capabilities or requirements for components %s. error status: %s"),
+    FAILED_TO_RETRIEVE_TOSCA_DEF("failed to retrieve tosca definition for requirement or capability %s. error status %s"),
+    ;
+
+    private String message;
+
+    MigrationMsg(String migrationDescription) {
+        this.message = migrationDescription;
+    }
+
+    public String getMessage(String ... msgProperties) {
+        return String.format(this.message, msgProperties);
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationOperationUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationOperationUtils.java
new file mode 100644
index 0000000..a4595b2
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/MigrationOperationUtils.java
@@ -0,0 +1,119 @@
+package org.openecomp.sdc.asdctool.impl.migration;
+
+import com.thinkaurelius.titan.core.PropertyKey;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanVertex;
+import com.thinkaurelius.titan.core.schema.TitanManagement;
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.VertexProperty;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import java.util.Map;
+import java.util.Optional;
+
+@Component("migrationUtils")
+public class MigrationOperationUtils {
+
+    private static Logger log = LoggerFactory.getLogger(MigrationOperationUtils.class);
+
+    @Autowired
+    private TitanGenericDao titanGenericDao;
+
+    /**
+     * rename a set or property keys
+     *
+     * @param propertyKeys a mapping between the old property key name and the property key name to replace it with
+     *
+     * @return true if rename ended successfully or false otherwise
+     */
+    public boolean renamePropertyKeys(Map<String, String> propertyKeys) {
+        Either<TitanGraph, TitanOperationStatus> graph = titanGenericDao.getGraph();
+        return graph.either((titanGraph) ->  renamePropertyKeys(titanGraph, propertyKeys),
+                            (titanOperationStatus) -> operationFailed(MigrationMsg.FAILED_TO_RETRIEVE_GRAPH.getMessage(titanOperationStatus.name())));
+    }
+
+    private boolean renamePropertyKeys(TitanGraph titanGraph, Map<String, String> propertyKeys) {
+        try {
+            for (Map.Entry<String, String> propertyKeyEntry : propertyKeys.entrySet()) {
+                boolean renameSucceeded = renamePropertyKey(titanGraph, propertyKeyEntry);
+                if (!renameSucceeded) {
+                    return false;
+                }
+            }
+            return true;
+        } catch (RuntimeException e) {
+            log.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private Boolean renamePropertyKey(TitanGraph titanGraph, Map.Entry<String, String> propertyKeyEntry) {
+        String renameFromKey = propertyKeyEntry.getKey();
+        String renameToKey = propertyKeyEntry.getValue();
+        log.info(String.format("renaming property key %s to %s", renameFromKey, renameToKey));
+        return renameProperty(titanGraph, renameFromKey, renameToKey);
+    }
+
+    private Boolean renameProperty(TitanGraph titanGraph, String renameFromKey, String renameToKey) {
+        if (titanGraph.containsPropertyKey(renameFromKey) && titanGraph.containsPropertyKey(renameToKey)) {//new property already exist, we cant rename to it we need to add new and remove old on every vertices which has the old one.
+            return renamePropertyOnEachVertex(titanGraph, renameFromKey, renameToKey);
+        }
+        return renamePropertyOnGraphLevel(titanGraph, renameFromKey, renameToKey);
+    }
+
+    private Boolean renamePropertyOnGraphLevel(TitanGraph titanGraph, String renameFromKey, String renameToKey) {
+        TitanManagement titanManagement = titanGraph.openManagement();
+        return Optional.ofNullable(titanManagement.getPropertyKey(renameFromKey))
+                .map(propertyKey -> renamePropertyOnGraph(titanManagement, propertyKey, renameToKey))
+                .orElseGet(() -> {log.info(MigrationMsg.PROPERTY_KEY_NOT_EXIST.getMessage(renameFromKey)); return true;}) ;//if property key not exist rename is considered to be successful
+    }
+
+    private boolean renamePropertyOnEachVertex(TitanGraph graph, String oldKey, String newKey) {
+        addNewPropertyKeyOnVertices(graph, oldKey, newKey);
+        removeOldPropertyKeyFromGraph(graph, oldKey);
+        graph.tx().commit();
+        return true;
+    }
+
+    private void removeOldPropertyKeyFromGraph(TitanGraph graph, String oldKey) {
+        graph.getPropertyKey(oldKey).remove();
+    }
+
+	private void addNewPropertyKeyOnVertices(TitanGraph graph, String oldKey, String newKey) {
+        graph.query().has(oldKey).vertices().forEach(titanVertex -> {
+            copyOldKeyValueAndDropKey(oldKey, newKey, (TitanVertex) titanVertex);
+        });
+    }
+
+    private void copyOldKeyValueAndDropKey(String oldKey, String newKey, TitanVertex titanVertex) {
+        VertexProperty<Object> oldProperty = titanVertex.property(oldKey);
+        Object oldKeyValue = oldProperty.value();
+
+        titanVertex.property(newKey, oldKeyValue);
+        oldProperty.remove();
+    }
+
+    private boolean renamePropertyOnGraph(TitanManagement titanManagement, PropertyKey fromPropertyKey, String toKey) {
+        try {
+            titanManagement.changeName(fromPropertyKey, toKey);
+            titanManagement.commit();
+            return true;
+        } catch (RuntimeException e) {
+            log.error(MigrationMsg.RENAME_KEY_PROPERTY_FAILED.getMessage(fromPropertyKey.name()), e.getMessage());
+            titanManagement.rollback();
+            return false;
+        }
+    }
+
+    private boolean operationFailed(String errorMessage) {
+        log.error(errorMessage);
+        return false;
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java
index db8fee0..03583fd 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AddGroupUuid.java
@@ -90,7 +90,7 @@
 
 						groupUUID = UniqueIdBuilder.generateUUID();
 
-						log.debug("Before updating groups {} with groupUUID {}", builder.toString(), groupUUID);
+						log.debug("Before updating groups {} with groupUUID {}",builder.toString(),groupUUID);
 
 						for (GroupData groupData : groupsData) {
 
@@ -99,21 +99,20 @@
 							Either<GroupData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(groupData,
 									GroupData.class);
 							if (updateNode.isRight()) {
-								log.error("Failed to update group " + groupData + ". Error is {}",
-										updateNode.right().value().toString());
+								log.error("Failed to update group {}. Error is {}",groupData,updateNode.right().value().toString());
 								result = false;
 								return result;
 							}
 
 						}
 
-						log.debug("After updating groups {} with groupUUID {}", builder.toString(), groupUUID);
+						log.debug("After updating groups {} with groupUUID {}",builder.toString(),groupUUID);
 					}
 
 				}
 			}
 
-			log.info("The number of groups updated with groupUUID is " + numberOfUpdates);
+			log.info("The number of groups updated with groupUUID is {}", numberOfUpdates);
 
 			return result;
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java
index 561cfb5..3c9d6fa 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AllowMultipleHeats.java
@@ -68,7 +68,7 @@
 				}
 
 				List<ArtifactData> list = allHeatArtifacts.left().value();
-				log.debug("Found {} artifacts with label {}", (list == null ? 0 : list.size()), artifactLabel);
+				log.debug("Found {} artifacts with label {}",(list == null ? 0 : list.size()),artifactLabel);
 
 				if (list != null && false == list.isEmpty()) {
 
@@ -87,7 +87,7 @@
 				for (ArtifactData artifactData : artifactsToDelete) {
 					// System.out.println("Going to delete artifact " +
 					// artifactData);
-					log.debug("Going to delete artifact {}", artifactData);
+					log.debug("Going to delete artifact {}",artifactData);
 					Either<ArtifactData, TitanOperationStatus> deleteNode = titanGenericDao.deleteNode(artifactData,
 							ArtifactData.class);
 					if (deleteNode.isRight()) {
@@ -95,12 +95,12 @@
 						result = false;
 						return result;
 					} else {
-						log.debug("Delete artifact node {}", deleteNode.left().value());
+						log.debug("Delete artifact node {}",deleteNode.left().value());
 					}
 				}
 			}
 
-			log.debug("Number of deleted artifacts is {}", artifactsToDelete.size());
+			log.debug("Number of deleted artifacts is {}",artifactsToDelete.size());
 
 			int counter = 0;
 			if (false == artifactsToUpdate.isEmpty()) {
@@ -110,7 +110,7 @@
 
 					if (artifactData.getArtifactDataDefinition().getMandatory() != null
 							&& true == artifactData.getArtifactDataDefinition().getMandatory()) {
-						log.debug("Going to update artifact {}", artifactData);
+						log.debug("Going to update artifact {}",artifactData);
 						counter++;
 						artifactData.getArtifactDataDefinition().setMandatory(false);
 						Either<ArtifactData, TitanOperationStatus> updatedNode = titanGenericDao
@@ -120,13 +120,13 @@
 							result = false;
 							return result;
 						} else {
-							log.debug("Update artifact node {}", updatedNode.left().value());
+							log.debug("Update artifact node {}",updatedNode.left().value());
 						}
 					}
 				}
 			}
 
-			log.debug("Number of updated artifacts is {}", counter);
+			log.debug("Number of updated artifacts is {}",counter);
 
 			return result;
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java
index b529935..514c28b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/AppConfig.java
@@ -21,73 +21,63 @@
 package org.openecomp.sdc.asdctool.impl.migration.v1604;
 
 import org.openecomp.sdc.asdctool.impl.PopulateComponentCache;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationOperationUtils;
 import org.openecomp.sdc.asdctool.impl.migration.v1607.CsarMigration;
 import org.openecomp.sdc.asdctool.impl.migration.v1610.TitanFixUtils;
 import org.openecomp.sdc.asdctool.impl.migration.v1610.ToscaArtifactsAlignment;
+import org.openecomp.sdc.asdctool.impl.migration.v1702.DataTypesUpdate;
+import org.openecomp.sdc.asdctool.impl.migration.v1702.Migration1702;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.VfModulesPropertiesAdding;
 import org.openecomp.sdc.be.auditing.api.IAuditingManager;
 import org.openecomp.sdc.be.auditing.impl.AuditingManager;
 import org.openecomp.sdc.be.components.distribution.engine.IDistributionEngine;
 import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
-import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic;
-import org.openecomp.sdc.be.components.impl.GroupBusinessLogic;
-import org.openecomp.sdc.be.components.impl.InputsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ProductBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ProductComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceImportManager;
-import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic;
+import org.openecomp.sdc.be.components.impl.*;
 import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
 import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
 import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
 import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
 import org.openecomp.sdc.be.dao.cassandra.ComponentCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.SdcSchemaFilesCassandraDao;
+import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
 import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
 import org.openecomp.sdc.be.dao.impl.AuditingDao;
 import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
-import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
 import org.openecomp.sdc.be.impl.ComponentsUtils;
 import org.openecomp.sdc.be.model.cache.ApplicationDataTypeCache;
 import org.openecomp.sdc.be.model.cache.ComponentCache;
+import org.openecomp.sdc.be.model.jsontitan.operations.GroupsOperation;
 import org.openecomp.sdc.be.model.operations.api.IAdditionalInformationOperation;
 import org.openecomp.sdc.be.model.operations.api.IElementOperation;
 import org.openecomp.sdc.be.model.operations.api.IGraphLockOperation;
 import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
-import org.openecomp.sdc.be.model.operations.impl.AdditionalInformationOperation;
-import org.openecomp.sdc.be.model.operations.impl.ArtifactOperation;
-import org.openecomp.sdc.be.model.operations.impl.AttributeOperation;
-import org.openecomp.sdc.be.model.operations.impl.CacheMangerOperation;
-import org.openecomp.sdc.be.model.operations.impl.CapabilityInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.CapabilityOperation;
-import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
-import org.openecomp.sdc.be.model.operations.impl.ElementOperation;
-import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.HeatParametersOperation;
-import org.openecomp.sdc.be.model.operations.impl.InputsOperation;
-import org.openecomp.sdc.be.model.operations.impl.InterfaceLifecycleOperation;
-import org.openecomp.sdc.be.model.operations.impl.LifecycleOperation;
-import org.openecomp.sdc.be.model.operations.impl.OnboardingClient;
-import org.openecomp.sdc.be.model.operations.impl.ProductOperation;
-import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
-import org.openecomp.sdc.be.model.operations.impl.RequirementOperation;
-import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
-import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
-import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import org.openecomp.sdc.be.model.operations.impl.*;
 import org.openecomp.sdc.be.tosca.CsarUtils;
 import org.openecomp.sdc.be.tosca.ToscaExportHandler;
 import org.openecomp.sdc.be.user.IUserBusinessLogic;
 import org.openecomp.sdc.be.user.UserBusinessLogic;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Import;
+import org.springframework.context.annotation.Primary;
 
 @Configuration
+@Import(DAOSpringConfig.class)
 public class AppConfig {
+
+	@Bean(name = "sdc-schema-files-cassandra-dao")
+	public SdcSchemaFilesCassandraDao sdcSchemaFilesCassandraDao() {
+		return new SdcSchemaFilesCassandraDao();
+	}
+	@Bean(name = "componentsUtils")
+	public ComponentsUtils componentsUtils() {
+		return new ComponentsUtils();
+	}
+	@Bean(name = "updateDataTypes")
+	public DataTypesUpdate dataTypesUpdate() {
+		return new DataTypesUpdate();
+	}
 	@Bean(name = "serviceMigrationBean")
 	public ServiceMigration serviceMigration() {
 		return new ServiceMigration();
@@ -113,16 +103,6 @@
 		return new CsarMigration();
 	}
 
-	@Bean(name = "titan-generic-dao")
-	public TitanGenericDao titanGenericDao() {
-		return new TitanGenericDao();
-	}
-
-	@Bean(name = "titan-client", initMethod = "createGraph")
-	public TitanGraphClient titanClient() {
-		return new TitanGraphClient();
-	}
-
 	@Bean(name = "resource-operation")
 	public ResourceOperation resourceOperation() {
 		return new ResourceOperation();
@@ -144,8 +124,9 @@
 	}
 
 	@Bean(name = "property-operation")
-	public PropertyOperation propertyOperation() {
-		return new PropertyOperation();
+	@Primary
+	public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
+		return new PropertyOperation(titanGenericDao);
 	}
 
 	@Bean(name = "attribute-operation")
@@ -174,8 +155,9 @@
 	}
 
 	@Bean(name = "element-operation")
-	public IElementOperation elementOperation() {
-		return new ElementOperation();
+	@Primary
+	public IElementOperation elementOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
+		return new ElementOperation(titanGenericDao);
 	}
 
 	@Bean(name = "additional-information-operation")
@@ -213,9 +195,20 @@
 		return new GroupOperation();
 	}
 
+	@Bean(name = "groups-operation")
+	public GroupsOperation jsonGroupsOperation() {
+		return new GroupsOperation();
+	}
+	
+	@Bean(name = "group-instance-operation")
+	public GroupInstanceOperation groupInstanceOperation() {
+		return new GroupInstanceOperation();
+	}
+	
 	@Bean(name = "group-type-operation")
-	public GroupTypeOperation groupTypeOperation() {
-		return new GroupTypeOperation();
+	@Primary
+	public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenricDao, @Qualifier("property-operation")PropertyOperation propertyOperation) {
+		return new GroupTypeOperation(titanGenricDao, propertyOperation);
 	}
 
 	@Bean(name = "attribute-operation")
@@ -289,8 +282,9 @@
 	 * @return
 	 */
 	@Bean(name = "user-operation")
-	public IUserAdminOperation userOperation() {
-		return new UserAdminOperation();
+	@Primary
+	public IUserAdminOperation userOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
+		return new UserAdminOperation(titanGenericDao);
 	}
 
 	/**
@@ -534,5 +528,24 @@
 	public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() {
 		return new ServiceComponentInstanceBusinessLogic();
 	}
+	/** 
+	 * 
+	 * @return new instance of migration1702
+	 */
+	@Bean(name = "migration1702")
+	public Migration1702 migration1702() {
+		return new Migration1702();
+	}
+
+
+	@Bean(name = "migrationUtils")
+	public MigrationOperationUtils migrationUtils() {
+		return new MigrationOperationUtils();
+	}
+
+    @Bean(name = "vfModulesPropertiesAdding")
+    public VfModulesPropertiesAdding vfModulesPropertiesAdding() {
+        return new VfModulesPropertiesAdding();
+    }
 
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java
index cb7f05d..ee5171d 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/ServiceMigration.java
@@ -100,12 +100,8 @@
 
 public class ServiceMigration {
 
-	private static final String[] NORMATIVE_OLD_NAMES = { 
-			"tosca.nodes.network.Network", "tosca.nodes.network.Port",
-			"tosca.nodes.BlockStorage", "tosca.nodes.Compute", "tosca.nodes.Container.Application",
-			"tosca.nodes.Container.Runtime", "tosca.nodes.Database", "tosca.nodes.DBMS", "tosca.nodes.LoadBalancer",
-			"tosca.nodes.ObjectStorage", "tosca.nodes.Root", "tosca.nodes.SoftwareComponent",
-			"tosca.nodes.WebApplication", "tosca.nodes.WebServer", };
+	private static final String[] NORMATIVE_OLD_NAMES = { "tosca.nodes.network.Network", "tosca.nodes.network.Port", "tosca.nodes.BlockStorage", "tosca.nodes.Compute", "tosca.nodes.Container.Application", "tosca.nodes.Container.Runtime",
+			"tosca.nodes.Database", "tosca.nodes.DBMS", "tosca.nodes.LoadBalancer", "tosca.nodes.ObjectStorage", "tosca.nodes.Root", "tosca.nodes.SoftwareComponent", "tosca.nodes.WebApplication", "tosca.nodes.WebServer", };
 
 	private static Logger log = LoggerFactory.getLogger(ServiceMigration.class.getName());
 
@@ -204,7 +200,7 @@
 				return false;
 			}
 		} catch (Exception e) {
-			log.debug("Failed to load category migration file :{} error: {}",categoryMigrationFile, e);
+			log.debug("Failed to load category migration file : {}", categoryMigrationFile, e);
 			return false;
 		}
 		for (Map.Entry<String, List<MigrationCategory>> entry : categoriesFromYml.entrySet()) {
@@ -227,7 +223,7 @@
 					log.debug("updateCategories no changes for categories from type {}", componentType);
 				}
 			} else {
-				log.debug("updateCategories failed not supported component file in migration categories file {}", entry.getKey());
+				log.debug("updateCategories failed not supported component file in migration categories file" + entry.getKey());
 				return false;
 			}
 		}
@@ -236,10 +232,9 @@
 
 	private boolean updateServiceCategories(List<MigrationCategory> categories) {
 		log.debug("updateServiceCategories STARTED");
-		Either<List<CategoryDefinition>, ActionStatus> serviceCategories = elementOperation
-				.getAllCategories(NodeTypeEnum.ServiceNewCategory, true);
+		Either<List<CategoryDefinition>, ActionStatus> serviceCategories = elementOperation.getAllCategories(NodeTypeEnum.ServiceNewCategory, true);
 		if (serviceCategories.isRight()) {
-			log.debug("updateServiceCategories failed fetch all service categories ,error: {}", serviceCategories.right().value());
+			log.debug("updateServiceCategories failed fetch all service categories ,error " + serviceCategories.right().value());
 			return false;
 		}
 		for (MigrationCategory newCat : categories) {
@@ -255,10 +250,9 @@
 				}
 				if (!exist) {
 					CategoryDefinition categoryDefinition = new CategoryDefinition(newCat);
-					Either<CategoryDefinition, ActionStatus> result = elementOperation
-							.createCategory(categoryDefinition, NodeTypeEnum.ServiceNewCategory, true);
+					Either<CategoryDefinition, ActionStatus> result = elementOperation.createCategory(categoryDefinition, NodeTypeEnum.ServiceNewCategory, true);
 					if (result.isRight()) {
-						log.debug("Failed to create service category {}, error: {}", categoryDefinition, result.right().value());
+						log.debug("Failed to create service category {} error {}", categoryDefinition, result.right().value());
 						return false;
 					}
 					log.debug("service category {} created", categoryDefinition);
@@ -267,8 +261,7 @@
 				// update exist
 				for (CategoryDefinition catInDB : serviceCategories.left().value()) {
 					if (newCat.getOldName().equals(catInDB.getName())) {
-						Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(
-								newCat, NodeTypeEnum.ServiceNewCategory);
+						Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(newCat, NodeTypeEnum.ServiceNewCategory);
 						if (updateSingleResult.isRight()) {
 							return false;
 						}
@@ -281,15 +274,12 @@
 		return true;
 	}
 
-	private Either<CategoryData, TitanOperationStatus> updateSingleResourceCategory(MigrationCategory newCat,
-			NodeTypeEnum nodetype) {
+	private Either<CategoryData, TitanOperationStatus> updateSingleResourceCategory(MigrationCategory newCat, NodeTypeEnum nodetype) {
 		Map<String, Object> properties = new HashMap<>();
 		properties.put(GraphPropertiesDictionary.NAME.getProperty(), newCat.getOldName());
-		Either<List<CategoryData>, TitanOperationStatus> categoryEither = titanGenericDao.getByCriteria(nodetype,
-				properties, CategoryData.class);
+		Either<List<CategoryData>, TitanOperationStatus> categoryEither = titanGenericDao.getByCriteria(nodetype, properties, CategoryData.class);
 		if (categoryEither.isRight() && categoryEither.right().value() != TitanOperationStatus.NOT_FOUND) {
-			log.debug("Failed to get {} categories, for name {} error {}", nodetype, newCat.getOldName(),
-					categoryEither.right().value());
+			log.debug("Failed to get {} categories, for name {} error {}", nodetype, newCat.getOldName(), categoryEither.right().value());
 			return Either.right(categoryEither.right().value());
 		}
 		List<CategoryData> categoryList = (categoryEither.isLeft() ? categoryEither.left().value() : null);
@@ -300,10 +290,8 @@
 		CategoryData categoryData = categoryList.get(0);
 		categoryData.getCategoryDataDefinition().setName(newCat.getName());
 		categoryData.getCategoryDataDefinition().setIcons(newCat.getIcons());
-		categoryData.getCategoryDataDefinition()
-				.setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(newCat.getName()));
-		Either<CategoryData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(categoryData,
-				CategoryData.class);
+		categoryData.getCategoryDataDefinition().setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(newCat.getName()));
+		Either<CategoryData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(categoryData, CategoryData.class);
 		if (updateNode.isRight()) {
 			log.debug("Failed to update {} category {} error {}", nodetype, categoryData, updateNode.right().value());
 			return Either.right(updateNode.right().value());
@@ -314,11 +302,9 @@
 
 	private boolean updateResourceCategories(List<MigrationCategory> categories) {
 		log.debug("updateResourceCategories STARTED");
-		Either<List<CategoryDefinition>, ActionStatus> resourceCategories = elementOperation
-				.getAllCategories(NodeTypeEnum.ResourceNewCategory, true);
+		Either<List<CategoryDefinition>, ActionStatus> resourceCategories = elementOperation.getAllCategories(NodeTypeEnum.ResourceNewCategory, true);
 		if (resourceCategories.isRight()) {
-			log.debug("updateResourceCategories failed fetch all resource categories ,error "
-					+ resourceCategories.right().value());
+			log.debug("updateResourceCategories failed fetch all resource categories ,error " + resourceCategories.right().value());
 			return false;
 		}
 		for (MigrationCategory newCat : categories) {
@@ -333,10 +319,9 @@
 				}
 				if (!exist) {
 					CategoryDefinition categoryDefinition = new CategoryDefinition(newCat);
-					Either<CategoryDefinition, ActionStatus> resultCat = elementOperation
-							.createCategory(categoryDefinition, NodeTypeEnum.ResourceNewCategory, true);
+					Either<CategoryDefinition, ActionStatus> resultCat = elementOperation.createCategory(categoryDefinition, NodeTypeEnum.ResourceNewCategory, true);
 					if (resultCat.isRight()) {
-						log.debug("Failed to create resource category {}, error: {}", categoryDefinition, resultCat.right().value());
+						log.debug("Failed to create resource category {} error {}", categoryDefinition, resultCat.right().value());
 						return false;
 					}
 					log.debug("resource category {} created", categoryDefinition);
@@ -346,10 +331,9 @@
 					List<MigrationSubCategory> subcategories = newSubcat;
 					for (MigrationSubCategory msubcat : subcategories) {
 						SubCategoryDefinition subcat = new SubCategoryDefinition(msubcat);
-						Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation.createSubCategory(
-								resultCat.left().value().getUniqueId(), subcat, NodeTypeEnum.ResourceSubcategory, true);
+						Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation.createSubCategory(resultCat.left().value().getUniqueId(), subcat, NodeTypeEnum.ResourceSubcategory, true);
 						if (resultSubcat.isRight()) {
-							log.debug("Failed to create resource sub category {} error: {}", subcat, resultSubcat.right().value());
+							log.debug("Failed to create resource sub category {} error {}", subcat, resultSubcat.right().value());
 							return false;
 						}
 						log.debug("resource sub category {} created for category {}", categoryDefinition, resultCat.left().value().getName());
@@ -359,8 +343,7 @@
 				// update exist
 				for (CategoryDefinition catInDB : resourceCategories.left().value()) {
 					if (newCat.getOldName().equals(catInDB.getName())) {
-						Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(
-								newCat, NodeTypeEnum.ResourceNewCategory);
+						Either<CategoryData, TitanOperationStatus> updateSingleResult = updateSingleResourceCategory(newCat, NodeTypeEnum.ResourceNewCategory);
 						if (updateSingleResult.isRight()) {
 							return false;
 						}
@@ -378,18 +361,15 @@
 								if (!existSub) {
 									SubCategoryDefinition subcat = new SubCategoryDefinition(migSubCat);
 
-									Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation
-											.createSubCategory((String) categoryData.getUniqueId(), subcat,
-													NodeTypeEnum.ResourceSubcategory, true);
+									Either<SubCategoryDefinition, ActionStatus> resultSubcat = elementOperation.createSubCategory((String) categoryData.getUniqueId(), subcat, NodeTypeEnum.ResourceSubcategory, true);
 									if (resultSubcat.isRight()) {
-										log.debug("Failed to create resource sub category {} error: {}", subcat, resultSubcat.right().value());
+										log.debug("Failed to create resource sub category {} error {}", subcat, resultSubcat.right().value());
 										return false;
 									}
-									log.debug("resource sub category {}", categoryData, resultSubcat.left().value().getName());
+									log.debug("resource sub category {} created for category {}", categoryData, resultSubcat.left().value().getName());
 								}
 							} else {
-								if (updateSingleSubCategory(newCat, migSubCat,
-										updateSingleResult.left().value()) == false) {
+								if (updateSingleSubCategory(newCat, migSubCat, updateSingleResult.left().value()) == false) {
 									return false;
 								}
 							}
@@ -402,17 +382,13 @@
 		return true;
 	}
 
-	private boolean updateSingleSubCategory(MigrationCategory newCat, MigrationSubCategory migSubCat,
-			CategoryData categoryData) {
+	private boolean updateSingleSubCategory(MigrationCategory newCat, MigrationSubCategory migSubCat, CategoryData categoryData) {
 
-		Either<List<ImmutablePair<SubCategoryData, GraphEdge>>, TitanOperationStatus> subcategories = titanGenericDao
-				.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceNewCategory),
-						(String) categoryData.getUniqueId(), GraphEdgeLabels.SUB_CATEGORY,
-						NodeTypeEnum.ResourceSubcategory, SubCategoryData.class);
+		Either<List<ImmutablePair<SubCategoryData, GraphEdge>>, TitanOperationStatus> subcategories = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceNewCategory), (String) categoryData.getUniqueId(),
+				GraphEdgeLabels.SUB_CATEGORY, NodeTypeEnum.ResourceSubcategory, SubCategoryData.class);
 
 		if (subcategories.isRight()) {
-			log.debug("Failed to get resource sub categories, for name {} error {}", newCat.getOldName(),
-					subcategories.right().value());
+			log.debug("Failed to get resource sub categories, for name {} error {}", newCat.getOldName(), subcategories.right().value());
 			return false;
 		}
 
@@ -421,13 +397,10 @@
 				SubCategoryData subCategoryData = pair.getKey();
 				subCategoryData.getSubCategoryDataDefinition().setName(migSubCat.getName());
 				subCategoryData.getSubCategoryDataDefinition().setIcons(migSubCat.getIcons());
-				subCategoryData.getSubCategoryDataDefinition()
-						.setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(migSubCat.getName()));
-				Either<SubCategoryData, TitanOperationStatus> updateSubNode = titanGenericDao
-						.updateNode(subCategoryData, SubCategoryData.class);
+				subCategoryData.getSubCategoryDataDefinition().setNormalizedName(ValidationUtils.normalizeCategoryName4Uniqueness(migSubCat.getName()));
+				Either<SubCategoryData, TitanOperationStatus> updateSubNode = titanGenericDao.updateNode(subCategoryData, SubCategoryData.class);
 				if (updateSubNode.isRight()) {
-					log.debug("Failed to update resource sub category {} error {}", subCategoryData,
-							updateSubNode.right().value());
+					log.debug("Failed to update resource sub category {} error {}", subCategoryData, updateSubNode.right().value());
 					return false;
 				}
 				log.debug("Update resource subcategory category {} ", subCategoryData);
@@ -540,11 +513,9 @@
 	private boolean updateCalculatedEdges() {
 		log.debug("update calculated edges STARTED");
 
-		Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao
-				.getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
+		Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao.getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
 		if (allInstances.isRight() && !allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
-			log.debug(
-					"updateCalculatedEdges failed fetch all resource instances ,error " + allInstances.right().value());
+			log.debug("updateCalculatedEdges failed fetch all resource instances ,error " + allInstances.right().value());
 			return false;
 		}
 		if (allInstances.isRight() && allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
@@ -556,15 +527,11 @@
 			// check if already have calculated edges
 			log.debug("start handle instance {}", instance.getUniqueId());
 			boolean needProcess = true;
-			Either<List<ImmutablePair<CapabilityData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao
-					.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
-							instance.getUniqueId(), GraphEdgeLabels.CALCULATED_CAPABILITY, NodeTypeEnum.Capability,
-							CapabilityData.class);
+			Either<List<ImmutablePair<CapabilityData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), instance.getUniqueId(),
+					GraphEdgeLabels.CALCULATED_CAPABILITY, NodeTypeEnum.Capability, CapabilityData.class);
 			if (vfci.isRight()) {
 				if (!vfci.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
-					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}", 
-							instance.getComponentInstDataDefinition().getComponentUid(),
-							vfci.right().value());
+					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource " + instance.getComponentInstDataDefinition().getComponentUid() + " error " + vfci.right().value());
 					return false;
 				}
 			} else {
@@ -572,15 +539,11 @@
 					needProcess = false;
 				}
 			}
-			Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReq = titanGenericDao
-					.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
-							instance.getUniqueId(), GraphEdgeLabels.CALCULATED_REQUIREMENT, NodeTypeEnum.Requirement,
-							RequirementData.class);
+			Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReq = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), instance.getUniqueId(),
+					GraphEdgeLabels.CALCULATED_REQUIREMENT, NodeTypeEnum.Requirement, RequirementData.class);
 			if (vfciReq.isRight()) {
 				if (!vfciReq.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
-					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}",
-							instance.getComponentInstDataDefinition().getComponentUid(),
-							vfciReq.right().value());
+					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource " + instance.getComponentInstDataDefinition().getComponentUid() + " error " + vfciReq.right().value());
 					return false;
 				}
 			} else {
@@ -588,16 +551,12 @@
 					needProcess = false;
 				}
 			}
-			Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReqFF = titanGenericDao
-					.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance),
-							instance.getUniqueId(), GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED,
-							NodeTypeEnum.Requirement, RequirementData.class);
+			Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> vfciReqFF = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), instance.getUniqueId(),
+					GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED, NodeTypeEnum.Requirement, RequirementData.class);
 			if (vfciReqFF.isRight()) {
 
 				if (!vfciReqFF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
-					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource "
-							+ instance.getComponentInstDataDefinition().getComponentUid() + " error "
-							+ vfciReqFF.right().value());
+					log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource " + instance.getComponentInstDataDefinition().getComponentUid() + " error " + vfciReqFF.right().value());
 					return false;
 				}
 			} else {
@@ -613,7 +572,7 @@
 			String originId = instance.getComponentInstDataDefinition().getComponentUid();
 			Either<Resource, StorageOperationStatus> resourceE = resourceOperation.getResource(originId, true);
 			if (resourceE.isRight()) {
-				log.debug("updateCalculatedEdges failed to fetch origin resource with id {} error: {}", originId, resourceE.right().value());
+				log.debug("updateCalculatedEdges failed to fetch origin resource with id {} error {}", originId, resourceE.right().value());
 				return false;
 			}
 			Resource resource = resourceE.left().value();
@@ -631,16 +590,12 @@
 		return true;
 	}
 
-	private boolean createCalculatedCapabilitiesForInstance(ComponentInstanceData instance,
-			Map<String, List<CapabilityDefinition>> capabilities) {
+	private boolean createCalculatedCapabilitiesForInstance(ComponentInstanceData instance, Map<String, List<CapabilityDefinition>> capabilities) {
 		for (Map.Entry<String, List<CapabilityDefinition>> entry : capabilities.entrySet()) {
 			for (CapabilityDefinition capability : entry.getValue()) {
-				Either<CapabilityData, TitanOperationStatus> capNode = titanGenericDao.getNode(
-						UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Capability), capability.getUniqueId(),
-						CapabilityData.class);
+				Either<CapabilityData, TitanOperationStatus> capNode = titanGenericDao.getNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Capability), capability.getUniqueId(), CapabilityData.class);
 				if (capNode.isRight()) {
-					log.debug("createCalculatedCapabilitiesForInstance failed to fetch capability node  with id "
-							+ capability.getUniqueId() + " error " + capNode.right().value());
+					log.debug("createCalculatedCapabilitiesForInstance failed to fetch capability node  with id " + capability.getUniqueId() + " error " + capNode.right().value());
 					return false;
 				}
 				Map<String, Object> props = new HashMap<>();
@@ -649,16 +604,13 @@
 					return false;
 				}
 
-				Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance,
-						capNode.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
+				Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance, capNode.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
 				if (createRelation.isRight()) {
 					TitanOperationStatus titanOperationStatus = createRelation.right().value();
-					log.debug(
-							"Failed to create calculated requirement from component instance {} to requirement {}, error: {}",
-							instance.getUniqueId(), capNode.left().value().getUniqueId(), titanOperationStatus);
+					log.debug("Failed to create calculated requirement from component instance {} to requirement {}, error: {}", instance.getUniqueId(), capNode.left().value().getUniqueId(), titanOperationStatus);
 					return false;
 				}
-				log.debug("CALCULATED_CAPABILITY was created from {} to {} with props: {}", capNode.left().value().getUniqueId(), instance.getUniqueId(), props);
+				log.debug("CALCULATED_CAPABILITY was created from {} to {} with props : {}", capNode.left().value().getUniqueId(), instance.getUniqueId(), props);
 			}
 		}
 		return true;
@@ -666,39 +618,30 @@
 
 	private boolean fillEdgeProperties(ComponentInstanceData instance, Map<String, Object> props) {
 		if (instance.getComponentInstDataDefinition().getOriginType().equals(OriginTypeEnum.VF)) {
-			Either<List<ImmutablePair<ComponentInstanceData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao
-					.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
-							instance.getComponentInstDataDefinition().getComponentUid(), GraphEdgeLabels.RESOURCE_INST,
-							NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
+			Either<List<ImmutablePair<ComponentInstanceData, GraphEdge>>, TitanOperationStatus> vfci = titanGenericDao.getChildrenNodes(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
+					instance.getComponentInstDataDefinition().getComponentUid(), GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
 			if (vfci.isRight()) {
-				log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource {} error: {}",
-						instance.getComponentInstDataDefinition().getComponentUid(),
-						vfci.right().value());
+				log.debug("createCalculatedCapabilitiesForInstance failed to fetch instance for resource " + instance.getComponentInstDataDefinition().getComponentUid() + " error " + vfci.right().value());
 				return false;
 			}
 			ImmutablePair<ComponentInstanceData, GraphEdge> immutablePair = vfci.left().value().get(0);
 			String vfciId = immutablePair.getLeft().getUniqueId();
 			props.put(GraphEdgePropertiesDictionary.OWNER_ID.getProperty(), vfciId);
-			props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(),
-					immutablePair.getLeft().getComponentInstDataDefinition().getComponentUid());
+			props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(), immutablePair.getLeft().getComponentInstDataDefinition().getComponentUid());
 
 		} else {
 			props.put(GraphEdgePropertiesDictionary.OWNER_ID.getProperty(), instance.getUniqueId());
-			props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(),
-					instance.getComponentInstDataDefinition().getComponentUid());
+			props.put(GraphEdgePropertiesDictionary.SOURCE.getProperty(), instance.getComponentInstDataDefinition().getComponentUid());
 		}
 		return true;
 	}
 
-	private boolean createCalculatedRequirementsForInstance(ComponentInstanceData instance,
-			Map<String, List<RequirementDefinition>> requirements) {
+	private boolean createCalculatedRequirementsForInstance(ComponentInstanceData instance, Map<String, List<RequirementDefinition>> requirements) {
 		for (Map.Entry<String, List<RequirementDefinition>> entry : requirements.entrySet()) {
 			for (RequirementDefinition requirement : entry.getValue()) {
-				Either<RequirementData, TitanOperationStatus> reqNode = titanGenericDao.getNode(
-						UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Requirement), requirement.getUniqueId(),
-						RequirementData.class);
+				Either<RequirementData, TitanOperationStatus> reqNode = titanGenericDao.getNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Requirement), requirement.getUniqueId(), RequirementData.class);
 				if (reqNode.isRight()) {
-					log.debug("updateCalculatedEdges failed to fetch requirement node  with id {} error: {}", requirement.getUniqueId(), reqNode.right().value());
+					log.debug("updateCalculatedEdges failed to fetch requirement node  with id " + requirement.getUniqueId() + " error " + reqNode.right().value());
 					return false;
 				}
 				Map<String, Object> props = new HashMap<>();
@@ -708,16 +651,13 @@
 					return false;
 				}
 
-				Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance,
-						reqNode.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
+				Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(instance, reqNode.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
 				if (createRelation.isRight()) {
 					TitanOperationStatus titanOperationStatus = createRelation.right().value();
-					log.debug(
-							"Failed to create calculated requirement from component instance {} to requirement {}, error: {}",
-							instance.getUniqueId(), reqNode.left().value().getUniqueId(), titanOperationStatus);
+					log.debug("Failed to create calculated requirement from component instance {} to requirement {}, error: {}", instance.getUniqueId(), reqNode.left().value().getUniqueId(), titanOperationStatus);
 					return false;
 				}
-				log.debug("CALCULATED_REQUIREMENT was created from {} to {} with props: {}", reqNode.left().value().getUniqueId(), instance.getUniqueId(), props);
+				log.debug("CALCULATED_REQUIREMENT was created from {} to {} with props : {}", reqNode.left().value().getUniqueId(), instance.getUniqueId(), props);
 			}
 		}
 		return true;
@@ -725,8 +665,7 @@
 
 	private boolean updateRelations() {
 		log.debug("update relations and edges STARTED");
-		Either<List<RelationshipInstData>, TitanOperationStatus> allRelations = titanGenericDao
-				.getByCriteria(NodeTypeEnum.RelationshipInst, null, RelationshipInstData.class);
+		Either<List<RelationshipInstData>, TitanOperationStatus> allRelations = titanGenericDao.getByCriteria(NodeTypeEnum.RelationshipInst, null, RelationshipInstData.class);
 		if (allRelations.isRight()) {
 			if (allRelations.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
 				log.debug("updateRelations : No relations to update. updateRelations ENDED");
@@ -752,10 +691,9 @@
 				return false;
 			}
 
-			Either<RelationshipInstData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(rel,
-					RelationshipInstData.class);
+			Either<RelationshipInstData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(rel, RelationshipInstData.class);
 			if (updateNode.isRight()) {
-				log.debug("updateRelations : failed to update relation node with id {}, error: {}", rel.getUniqueId(), updateNode.right().value());
+				log.debug("updateRelations : failed to update relation node with id {} , error {}", rel.getUniqueId(), updateNode.right().value());
 				return false;
 			}
 			log.debug("Relations was updated with values {}", rel);
@@ -765,22 +703,19 @@
 	}
 
 	private boolean updateRequirementFieldsInRelation(RelationshipInstData rel) {
-		Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> reqInst = titanGenericDao
-				.getParentNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst), rel.getUniqueId(),
-						GraphEdgeLabels.RELATIONSHIP_INST, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
+		Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> reqInst = titanGenericDao.getParentNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst), rel.getUniqueId(), GraphEdgeLabels.RELATIONSHIP_INST,
+				NodeTypeEnum.ResourceInstance, ComponentInstanceData.class);
 		if (reqInst.isRight()) {
-			log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error: {}", rel.getUniqueId(), reqInst.right().value());
+			log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error {}", rel.getUniqueId(), reqInst.right().value());
 			return false;
 		}
 		ComponentInstanceData requirementInstanceData = reqInst.left().value().getLeft();
 		ComponentInstanceDataDefinition reqRI = requirementInstanceData.getComponentInstDataDefinition();
 		if (reqRI.getOriginType().equals(OriginTypeEnum.VF)) {
-			Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao
-					.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
-							reqRI.getComponentUid(), GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance,
-							ComponentInstanceData.class, null);
+			Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), reqRI.getComponentUid(),
+					GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class, null);
 			if (vfcInstInOrigVf.isRight()) {
-				log.debug("updateRelations : failed to fetch VFC instance in origin VF with id {}, error: {}", reqRI.getComponentUid(), vfcInstInOrigVf.right().value());
+				log.debug("updateRelations : failed to fetch VFC instance in origin VF with id  " + reqRI.getComponentUid() + ", error ", vfcInstInOrigVf.right().value());
 				return false;
 			}
 			rel.setRequirementOwnerId(vfcInstInOrigVf.left().value().getLeft().getUniqueId());
@@ -788,30 +723,26 @@
 			rel.setRequirementOwnerId(reqRI.getUniqueId());
 		}
 		// get vertex
-		Either<TitanVertex, TitanOperationStatus> vertexReqRI = titanGenericDao.getVertexByProperty(
-				UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), requirementInstanceData.getUniqueId());
+		Either<TitanVertex, TitanOperationStatus> vertexReqRI = titanGenericDao.getVertexByProperty(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), requirementInstanceData.getUniqueId());
 		if (vertexReqRI.isRight()) {
-			log.debug("updateRelations : failed to fetch veterx for instance {}, error: {}", requirementInstanceData.getUniqueId(), vertexReqRI.right().value());
+			log.debug("updateRelations : failed to fetch veterx for instance  {}, error {}", requirementInstanceData.getUniqueId(), vertexReqRI.right().value());
 			return false;
 		}
 		String[] splitIds = rel.getUniqueId().split("\\.");
 		String reqName = splitIds[splitIds.length - 1];
 		Map<String, Object> props = new HashMap<>();
 		props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), reqName);
-		Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao
-				.getOutgoingEdgesByCriteria(vertexReqRI.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
+		Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao.getOutgoingEdgesByCriteria(vertexReqRI.left().value(), GraphEdgeLabels.CALCULATED_REQUIREMENT, props);
 		if (edgesForNode.isRight()) {
-			log.debug("updateRelations : failed to fetch edges for instance {}, error: {}", requirementInstanceData.getUniqueId(), edgesForNode.right().value());
+			log.debug("updateRelations : failed to fetch edges for instance {}  error {}", requirementInstanceData.getUniqueId(), edgesForNode.right().value());
 			return false;
 		}
 		Edge edge = edgesForNode.left().value().get(0);
-		String reqId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(),
-				GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+		String reqId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(), GraphPropertiesDictionary.UNIQUE_ID.getProperty());
 		rel.setRequirementId(reqId);
 
 		// change edge label
-		TitanEdge newEdge = (TitanEdge) vertexReqRI.left().value()
-				.addEdge(GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED.getProperty(), edge.inVertex());
+		TitanEdge newEdge = (TitanEdge) vertexReqRI.left().value().addEdge(GraphEdgeLabels.CALCULATED_REQUIREMENT_FULLFILLED.getProperty(), edge.inVertex());
 		titanGenericDao.setProperties(newEdge, titanGenericDao.getProperties(edge));
 		edge.remove();
 
@@ -822,23 +753,19 @@
 
 	public boolean updateCapabiltyFieldsInRelation(RelationshipInstData rel) {
 		// update capability parameters
-		Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> capInst = titanGenericDao
-				.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst),
-						rel.getUniqueId(), GraphEdgeLabels.CAPABILITY_NODE, NodeTypeEnum.ResourceInstance,
-						ComponentInstanceData.class, null);
+		Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> capInst = titanGenericDao.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.RelationshipInst), rel.getUniqueId(),
+				GraphEdgeLabels.CAPABILITY_NODE, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class, null);
 		if (capInst.isRight()) {
-			log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error: {}", rel.getUniqueId(), capInst.right().value());
+			log.debug("updateRelations : failed to fetch capabilty component instance for relation {}, error {}", rel.getUniqueId(), capInst.right().value());
 			return false;
 		}
 		ComponentInstanceData capabiltyInstanceData = capInst.left().value().getLeft();
 		ComponentInstanceDataDefinition capRI = capabiltyInstanceData.getComponentInstDataDefinition();
 		if (capRI.getOriginType().equals(OriginTypeEnum.VF)) {
-			Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao
-					.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource),
-							capRI.getComponentUid(), GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance,
-							ComponentInstanceData.class, null);
+			Either<ImmutablePair<ComponentInstanceData, GraphEdge>, TitanOperationStatus> vfcInstInOrigVf = titanGenericDao.getChildByEdgeCriteria(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), capRI.getComponentUid(),
+					GraphEdgeLabels.RESOURCE_INST, NodeTypeEnum.ResourceInstance, ComponentInstanceData.class, null);
 			if (vfcInstInOrigVf.isRight()) {
-				log.debug("updateRelations : failed to fetch VFC instance in origin VF with id {}, error: {}", capRI.getComponentUid(), vfcInstInOrigVf.right().value());
+				log.debug("updateRelations : failed to fetch VFC instance in origin VF with id  " + capRI.getComponentUid() + ", error ", vfcInstInOrigVf.right().value());
 				return false;
 			}
 			rel.setCapabilityOwnerId(vfcInstInOrigVf.left().value().getLeft().getUniqueId());
@@ -847,67 +774,28 @@
 		}
 
 		// get vertex
-		Either<TitanVertex, TitanOperationStatus> vertexCapRI = titanGenericDao.getVertexByProperty(
-				UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), capabiltyInstanceData.getUniqueId());
+		Either<TitanVertex, TitanOperationStatus> vertexCapRI = titanGenericDao.getVertexByProperty(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.ResourceInstance), capabiltyInstanceData.getUniqueId());
 		if (vertexCapRI.isRight()) {
-			log.debug("updateRelations : failed to fetch veterx for instance {}, error: {}", capabiltyInstanceData.getUniqueId(), vertexCapRI.right().value());
+			log.debug("updateRelations : failed to fetch veterx for instance {} , error {}", capabiltyInstanceData.getUniqueId(), vertexCapRI.right().value());
 			return false;
 		}
 		// String[] splitIds = rel.getUniqueId().split("\\.");
-		String capName = (String) capInst.left().value().getRight().getProperties()
-				.get(GraphEdgePropertiesDictionary.NAME.getProperty());// splitIds[splitIds.length
-																		// - 1];
+		String capName = (String) capInst.left().value().getRight().getProperties().get(GraphEdgePropertiesDictionary.NAME.getProperty());// splitIds[splitIds.length
+																																			// - 1];
 		Map<String, Object> props = new HashMap<>();
 		props.put(GraphEdgePropertiesDictionary.NAME.getProperty(), capName);
-		Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao
-				.getOutgoingEdgesByCriteria(vertexCapRI.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
+		Either<List<Edge>, TitanOperationStatus> edgesForNode = titanGenericDao.getOutgoingEdgesByCriteria(vertexCapRI.left().value(), GraphEdgeLabels.CALCULATED_CAPABILITY, props);
 		if (edgesForNode.isRight()) {
-			log.debug("updateRelations : failed to fetch edges for instance {}, error: {}", capabiltyInstanceData.getUniqueId(), edgesForNode.right().value());
+			log.debug("updateRelations : failed to fetch edges for instance {} , error {}", capabiltyInstanceData.getUniqueId(), edgesForNode.right().value());
 			return false;
 		}
 		Edge edge = edgesForNode.left().value().get(0);
-		String capId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(),
-				GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+		String capId = (String) titanGenericDao.getProperty((TitanVertex) edge.inVertex(), GraphPropertiesDictionary.UNIQUE_ID.getProperty());
 		rel.setCapabiltyId(capId);
 
 		return true;
 	}
 
-	// private boolean fixDerivedFv() {
-	// Map<String, Object> props = new HashMap<String, Object>();
-	// props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(),
-	// ResourceTypeEnum.VF.name());
-	// Either<List<ResourceMetadataData>, TitanOperationStatus> allVF =
-	// titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props,
-	// ResourceMetadataData.class);
-	// if (allVF.isRight() &&
-	// !allVF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
-	// log.debug("fixDerivedFv failed fetch all VF resources,error {}", allVF.right().value());
-	// return false;
-	// }
-	// if ( allVF.right().value().equals(TitanOperationStatus.NOT_FOUND) ){
-	// log.debug("fixDerivedFv - no VF");
-	// return true;
-	// }
-	// Set<String> finishedResources = new HashSet<>();
-	//
-	// for (ResourceMetadataData metadata : allVF.left().value()) {
-	// ComponentMetadataDataDefinition metadataDD =
-	// metadata.getMetadataDataDefinition();
-	//
-	// if (!finishedResources.contains(metadataDD.getUniqueId())) {
-	// Either<List<String>, StorageOperationStatus> processedIds =
-	// handleVfGroup(metadata);
-	// if (processedIds.isRight()) {
-	// log.debug("fixDerivedFv failed to process FV group {}", processedIds.right().value());
-	// return false;
-	// }
-	// finishedResources.addAll(processedIds.left().value());
-	// }
-	// }
-	// return true;
-	// }
-
 	private Either<List<String>, StorageOperationStatus> handleVfGroup(ResourceMetadataData metadata) {
 		Map<String, Object> props = new HashMap<String, Object>();
 		props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
@@ -915,8 +803,7 @@
 
 		List<String> finished = new ArrayList<>();
 
-		Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByName = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByName = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
 		if (allVFByName.isRight()) {
 			log.debug("fixDerivedFv failed fetch all VF resources,error {}", allVFByName.right().value());
 			return Either.right(StorageOperationStatus.GENERAL_ERROR);
@@ -937,10 +824,9 @@
 		props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
 		props.put(GraphPropertiesDictionary.UUID.getProperty(), uuid10);
 
-		Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByUUID = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allVFByUUID = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
 		if (allVFByUUID.isRight()) {
-			log.debug("fixDerivedFv failed fetch all VF resources by UUID  {}, error: {}", uuid10, allVFByUUID.right().value());
+			log.debug("fixDerivedFv failed fetch all VF resources by UUID {} ,error {}", uuid10, allVFByUUID.right().value());
 			return Either.right(StorageOperationStatus.GENERAL_ERROR);
 		}
 		for (ResourceMetadataData mdata : allVFByUUID.left().value()) {
@@ -957,7 +843,7 @@
 			// handleSingleVf(finished, derivedMapping, resourceId);
 			StorageOperationStatus handleSingleVfResult = handleSingleVf(finished, resourceId);
 			if (!handleSingleVfResult.equals(StorageOperationStatus.OK)) {
-				log.debug("fixDerivedFv failed - handleSingleVfResult failed for resource {}, error: {}", resourceId, handleSingleVfResult);
+				log.debug("fixDerivedFv failed - handleSingleVfResult failed for resource {} ,error {}", resourceId, handleSingleVfResult);
 				return Either.right(StorageOperationStatus.GENERAL_ERROR);
 			}
 		}
@@ -967,10 +853,9 @@
 	// private StorageOperationStatus handleSingleVf(List<String> finished,
 	// Map<String, String> derivedMapping, String resourceId) {
 	private StorageOperationStatus handleSingleVf(List<String> finished, String resourceId) {
-		Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao
-				.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), resourceId);
+		Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), resourceId);
 		if (vertexByProperty.isRight()) {
-			log.debug("fixDerivedFv failed to fetch resource by id {}, error: {}", resourceId, vertexByProperty.right().value());
+			log.debug("fixDerivedFv failed to fetch resource by id {} ,error {}", resourceId, vertexByProperty.right().value());
 			return StorageOperationStatus.GENERAL_ERROR;
 		}
 		Vertex vertexR = vertexByProperty.left().value();
@@ -979,8 +864,7 @@
 			// move edges
 			// must be only one
 			TitanVertex vertexD = (TitanVertex) vertexDIter.next();
-			String idDerived = (String) titanGenericDao.getProperty(vertexD,
-					GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+			String idDerived = (String) titanGenericDao.getProperty(vertexD, GraphPropertiesDictionary.UNIQUE_ID.getProperty());
 
 			// TODO clone resource
 
@@ -996,30 +880,27 @@
 
 	private boolean updateComponentInstanceType() {
 		log.debug("update component instances type STARTED");
-		Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao
-				.getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
+		Either<List<ComponentInstanceData>, TitanOperationStatus> allInstances = titanGenericDao.getByCriteria(NodeTypeEnum.ResourceInstance, null, ComponentInstanceData.class);
 		if (allInstances.isRight()) {
 			if (allInstances.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
 				log.debug("updateComponentInstanceType:  no instances ti update ");
 				return true;
 			}
-			log.debug("updateComponentInstanceType failed fetch all resource instances ,error {}", allInstances.right().value());
+			log.debug("updateComponentInstanceType failed fetch all resource instances ,error " + allInstances.right().value());
 			return false;
 		}
 
 		List<ComponentInstanceData> listOfInstances = allInstances.left().value();
 		for (ComponentInstanceData instance : listOfInstances) {
 			String originId = instance.getComponentInstDataDefinition().getComponentUid();
-			Either<ComponentMetadataData, TitanOperationStatus> nodeResource = titanGenericDao.getNode(
-					UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), originId, ComponentMetadataData.class);
+			Either<ComponentMetadataData, TitanOperationStatus> nodeResource = titanGenericDao.getNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), originId, ComponentMetadataData.class);
 			if (nodeResource.isRight()) {
-				log.debug("updateComponentInstanceType failed to fetch origin resource with id {}, error: {}", originId, nodeResource.right().value());
+				log.debug("updateComponentInstanceType failed to fetch origin resource with id {} error {}", originId, nodeResource.right().value());
 				return false;
 			}
-			ResourceTypeEnum resourceType = ((ResourceMetadataDataDefinition) nodeResource.left().value()
-					.getMetadataDataDefinition()).getResourceType();
+			ResourceTypeEnum resourceType = ((ResourceMetadataDataDefinition) nodeResource.left().value().getMetadataDataDefinition()).getResourceType();
 			if (resourceType == null) {
-				log.debug("updateComponentInstanceType failed, no resource type for origin resource with id {}", originId);
+				log.debug("updateComponentInstanceType failed, no resource type for origin resource with id " + originId);
 				return false;
 			}
 			OriginTypeEnum originType;
@@ -1042,10 +923,9 @@
 			}
 			instance.getComponentInstDataDefinition().setOriginType(originType);
 
-			Either<ComponentInstanceData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(instance,
-					ComponentInstanceData.class);
+			Either<ComponentInstanceData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(instance, ComponentInstanceData.class);
 			if (updateNode.isRight()) {
-				log.debug("updateComponentInstanceType failed, failed to update component instance node with id {}, error: {}", instance.getUniqueId(), updateNode.right().value());
+				log.debug("updateComponentInstanceType failed, failed to update component instance node with id  " + instance.getUniqueId() + " error " + updateNode.right().value());
 				return false;
 			}
 			log.debug("For instance with id {} the origin type was detected as {}", instance.getUniqueId(), originType);
@@ -1056,8 +936,7 @@
 
 	private boolean addResourceCounterToResources() {
 
-		Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
 		if (allResources.isRight()) {
 			if (allResources.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
 				log.debug("addResourceCounterToResources - no resources");
@@ -1067,11 +946,9 @@
 			return false;
 		}
 		for (ResourceMetadataData resource : allResources.left().value()) {
-			Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(
-					UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), resource.getUniqueId());
+			Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), resource.getUniqueId());
 			if (vertexByProperty.isRight()) {
-				log.error("failed to add instanceCounter to VF {} . error is: {}", resource.getUniqueId(),
-						vertexByProperty.right().value().name());
+				log.error("failed to add instanceCounter to VF {} . error is: {}", resource.getUniqueId(), vertexByProperty.right().value().name());
 				return false;
 			}
 			Vertex vfVertex = vertexByProperty.left().value();
@@ -1086,8 +963,7 @@
 
 		Map<String, Object> props = new HashMap<String, Object>();
 		props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
-		Either<List<ResourceMetadataData>, TitanOperationStatus> allVF = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allVF = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
 		if (allVF.isRight()) {
 			if (allVF.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
 				log.debug("fixDerivedVf - no VFs");
@@ -1099,11 +975,9 @@
 
 		Map<String, String> vfUuidToVfcUuid = new HashMap<String, String>();
 		for (ResourceMetadataData metadata : allVF.left().value()) {
-			Either<Resource, StorageOperationStatus> eitherResource = resourceOperation
-					.getResource(metadata.getMetadataDataDefinition().getUniqueId(), true);
+			Either<Resource, StorageOperationStatus> eitherResource = resourceOperation.getResource(metadata.getMetadataDataDefinition().getUniqueId(), true);
 			if (eitherResource.isRight()) {
-				log.error("failed to migrate VF {} from version 1602 to version 1604. error is: {}",
-						metadata.getMetadataDataDefinition().getUniqueId(), eitherResource.right().value().name());
+				log.error("failed to migrate VF {} from version 1602 to version 1604. error is: {}", metadata.getMetadataDataDefinition().getUniqueId(), eitherResource.right().value().name());
 				return false;
 			}
 			Resource vfResource = eitherResource.left().value();
@@ -1121,12 +995,10 @@
 			// handle lifecycle
 			String vfUniqueId = vfResource.getUniqueId();
 			LifecycleStateEnum vfcTargetState = vfResource.getLifecycleState();
-			if (vfcTargetState.equals(LifecycleStateEnum.READY_FOR_CERTIFICATION)
-					|| vfcTargetState.equals(LifecycleStateEnum.CERTIFICATION_IN_PROGRESS)) {
+			if (vfcTargetState.equals(LifecycleStateEnum.READY_FOR_CERTIFICATION) || vfcTargetState.equals(LifecycleStateEnum.CERTIFICATION_IN_PROGRESS)) {
 				User user = new User();
 				user.setUserId(vfResource.getLastUpdaterUserId());
-				Either<? extends Component, StorageOperationStatus> checkinComponent = lifecycleOperaion
-						.checkinComponent(NodeTypeEnum.Resource, vfResource, user, user, true);
+				Either<? extends Component, StorageOperationStatus> checkinComponent = lifecycleOperaion.checkinComponent(NodeTypeEnum.Resource, vfResource, user, user, true);
 				if (checkinComponent.isRight()) {
 					log.error("failed to checkin VF {}. error={}", vfUniqueId, checkinComponent.right().value().name());
 					return false;
@@ -1138,51 +1010,35 @@
 			// delete VF Properties
 			List<PropertyDefinition> properties = vfResource.getProperties();
 			if (properties != null && !properties.isEmpty()) {
-				Either<Map<String, PropertyDefinition>, StorageOperationStatus> deleteAllProperties = propertyOperation
-						.deleteAllPropertiesAssociatedToNode(NodeTypeEnum.Resource, vfUniqueId);
-				if (deleteAllProperties.isRight()
-						&& !deleteAllProperties.right().value().equals(StorageOperationStatus.NOT_FOUND)
-						&& !deleteAllProperties.right().value().equals(StorageOperationStatus.OK)) {
-					log.error("failed to delete properties of VF {} . error is: {}",
-							metadata.getMetadataDataDefinition().getUniqueId(),
-							deleteAllProperties.right().value().name());
+				Either<Map<String, PropertyDefinition>, StorageOperationStatus> deleteAllProperties = propertyOperation.deleteAllPropertiesAssociatedToNode(NodeTypeEnum.Resource, vfUniqueId);
+				if (deleteAllProperties.isRight() && !deleteAllProperties.right().value().equals(StorageOperationStatus.NOT_FOUND) && !deleteAllProperties.right().value().equals(StorageOperationStatus.OK)) {
+					log.error("failed to delete properties of VF {} . error is: {}", metadata.getMetadataDataDefinition().getUniqueId(), deleteAllProperties.right().value().name());
 					return false;
 				}
 			}
 			// delete VF Additional Info
 			List<AdditionalInformationDefinition> additionalInformation = vfResource.getAdditionalInformation();
 			if (additionalInformation != null && !additionalInformation.isEmpty()) {
-				Either<AdditionalInformationDefinition, StorageOperationStatus> deleteAllAdditionalInformationParameters = additionalInformationOperation
-						.deleteAllAdditionalInformationParameters(NodeTypeEnum.Resource, vfUniqueId, true);
-				if (deleteAllAdditionalInformationParameters.isRight()
-						&& !deleteAllAdditionalInformationParameters.right().value().equals(StorageOperationStatus.OK)
-						&& !deleteAllAdditionalInformationParameters.right().value()
-								.equals(StorageOperationStatus.NOT_FOUND)) {
-					log.error("failed to delete properties of VF {} . error is: {}",
-							metadata.getMetadataDataDefinition().getUniqueId(),
-							deleteAllAdditionalInformationParameters.right().value().name());
+				Either<AdditionalInformationDefinition, StorageOperationStatus> deleteAllAdditionalInformationParameters = additionalInformationOperation.deleteAllAdditionalInformationParameters(NodeTypeEnum.Resource, vfUniqueId, true);
+				if (deleteAllAdditionalInformationParameters.isRight() && !deleteAllAdditionalInformationParameters.right().value().equals(StorageOperationStatus.OK)
+						&& !deleteAllAdditionalInformationParameters.right().value().equals(StorageOperationStatus.NOT_FOUND)) {
+					log.error("failed to delete properties of VF {} . error is: {}", metadata.getMetadataDataDefinition().getUniqueId(), deleteAllAdditionalInformationParameters.right().value().name());
 					return false;
 				}
 			}
 			// delete VF derivedFrom
 			GraphRelation derivedFromRelation = new GraphRelation(GraphEdgeLabels.DERIVED_FROM.getProperty());
-			derivedFromRelation.setFrom(new RelationEndPoint(NodeTypeEnum.Resource,
-					UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), vfUniqueId));
-			Either<GraphRelation, TitanOperationStatus> deleteDerivedFromRelation = titanGenericDao
-					.deleteOutgoingRelation(derivedFromRelation);
+			derivedFromRelation.setFrom(new RelationEndPoint(NodeTypeEnum.Resource, UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), vfUniqueId));
+			Either<GraphRelation, TitanOperationStatus> deleteDerivedFromRelation = titanGenericDao.deleteOutgoingRelation(derivedFromRelation);
 			if (deleteDerivedFromRelation.isRight()) {
-				log.error("failed to delete derivedFrom relation of VF {} . error is: {}",
-						metadata.getMetadataDataDefinition().getUniqueId(),
-						deleteDerivedFromRelation.right().value().name());
+				log.error("failed to delete derivedFrom relation of VF {} . error is: {}", metadata.getMetadataDataDefinition().getUniqueId(), deleteDerivedFromRelation.right().value().name());
 				return false;
 			}
 
 			// create VFC
-			Either<Resource, StorageOperationStatus> createVFC = createVFC(metadata, vfResource, vfcUUID,
-					vfcTargetState);
+			Either<Resource, StorageOperationStatus> createVFC = createVFC(metadata, vfResource, vfcUUID, vfcTargetState);
 			if (createVFC.isRight()) {
-				log.error("failed to split VF {} to VFC. error is: {}",
-						metadata.getMetadataDataDefinition().getUniqueId(), createVFC.right().value().name());
+				log.error("failed to split VF {} to VFC. error is: {}", metadata.getMetadataDataDefinition().getUniqueId(), createVFC.right().value().name());
 				return false;
 			}
 			Resource vfcResource = createVFC.left().value();
@@ -1191,11 +1047,9 @@
 			}
 			// update VFC to deleted if required
 			if (isVfDeleted != null && isVfDeleted) {
-				Either<Component, StorageOperationStatus> markResourceToDelete = resourceOperation
-						.markComponentToDelete(vfcResource, true);
+				Either<Component, StorageOperationStatus> markResourceToDelete = resourceOperation.markComponentToDelete(vfcResource, true);
 				if (markResourceToDelete.isRight()) {
-					log.error("failed to mark isDeleted on VFC {} . error is: {}", vfcResource.getUniqueId(),
-							markResourceToDelete.right().value().name());
+					log.error("failed to mark isDeleted on VFC {} . error is: {}", vfcResource.getUniqueId(), markResourceToDelete.right().value().name());
 					return false;
 				}
 			}
@@ -1204,8 +1058,7 @@
 		return true;
 	}
 
-	private Either<Resource, StorageOperationStatus> createVFC(ResourceMetadataData metadata, Resource vfcResource,
-			String uuid, LifecycleStateEnum vfcTargetState) {
+	private Either<Resource, StorageOperationStatus> createVFC(ResourceMetadataData metadata, Resource vfcResource, String uuid, LifecycleStateEnum vfcTargetState) {
 
 		Boolean highestVersion = vfcResource.isHighestVersion();
 		// Resource vfcResource = new Resource((ResourceMetadataDefinition)
@@ -1237,8 +1090,7 @@
 			return createResource;
 		}
 		Resource afterCreateResource = createResource.left().value();
-		Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(
-				UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), afterCreateResource.getUniqueId());
+		Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.Resource), afterCreateResource.getUniqueId());
 		if (vertexByProperty.isRight()) {
 			return createResource;
 		}
@@ -1264,23 +1116,18 @@
 			log.error("failed to create logical name for vfc instance");
 			return false;
 		}
-		Either<ComponentInstance, StorageOperationStatus> createComponentInstance = componentInstanceOperaion
-				.createComponentInstance(vfUniqueId, NodeTypeEnum.Resource, handleNameLogic.left().value(),
-						componentInstance, NodeTypeEnum.Resource, true);
+		Either<ComponentInstance, StorageOperationStatus> createComponentInstance = componentInstanceOperaion.createComponentInstance(vfUniqueId, NodeTypeEnum.Resource, handleNameLogic.left().value(), componentInstance, NodeTypeEnum.Resource, true);
 
 		if (createComponentInstance.isRight()) {
-			log.error("failed to create vfc instance on vf {}. error: {}", vfUniqueId,
-					createComponentInstance.right().value().name());
+			log.error("failed to create vfc instance on vf {}. error: {}", vfUniqueId, createComponentInstance.right().value().name());
 			return false;
 		}
 		return true;
 	}
 
-	private Either<String, Boolean> handleNameLogic(ComponentInstance componentInstance, String containerComponentId,
-			String resourceName) {
+	private Either<String, Boolean> handleNameLogic(ComponentInstance componentInstance, String containerComponentId, String resourceName) {
 
-		Either<Integer, StorageOperationStatus> componentInNumberStatus = resourceOperation
-				.increaseAndGetComponentInstanceCounter(containerComponentId, true);
+		Either<Integer, StorageOperationStatus> componentInNumberStatus = resourceOperation.increaseAndGetComponentInstanceCounter(containerComponentId, true);
 
 		if (componentInNumberStatus.isRight()) {
 			log.debug("Failed to get component instance number for container component {} ", containerComponentId);
@@ -1299,13 +1146,12 @@
 		return Either.left(resourceInNumber);
 	}
 
-	private Boolean validateComponentInstanceName(String resourceInstanceName, ComponentInstance resourceInstance,
-			boolean isCreate) {
+	private Boolean validateComponentInstanceName(String resourceInstanceName, ComponentInstance resourceInstance, boolean isCreate) {
 
 		if (!ValidationUtils.validateStringNotEmpty(resourceInstanceName)) {
 			return false;
 		}
-		resourceInstance.setNormalizedName(ValidationUtils.normaliseComponentInstanceName(resourceInstanceName));
+		resourceInstance.setNormalizedName(ValidationUtils.normalizeComponentInstanceName(resourceInstanceName));
 		if (!isCreate) {
 			if (!ValidationUtils.validateResourceInstanceNameLength(resourceInstanceName)) {
 				return false;
@@ -1325,8 +1171,7 @@
 		boolean result = false;
 		Either<Boolean, StorageOperationStatus> resourceEither = null;
 		try {
-			Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao
-					.getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
+			Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, null, ResourceMetadataData.class);
 			if (allResources.isRight()) {
 				log.error("Couldn't get resources from DB, error: {}", allResources.right().value());
 				result = false;
@@ -1350,8 +1195,7 @@
 						return result;
 					}
 				}
-				if (((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name()
-						.equals("VF")) {
+				if (((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name().equals("VF")) {
 					resourceEither = setVfToscaResourceName(resource);
 					if (resourceEither.isRight()) {
 						log.error("DB error during tosca resource name setting");
@@ -1388,11 +1232,9 @@
 		List<ComponentMetadataData> fullComponentList = new ArrayList<ComponentMetadataData>();
 
 		// getting resources
-		Either<List<ResourceMetadataData>, TitanOperationStatus> allHighestVersionResources = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allHighestVersionResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
 		if (allHighestVersionResources.isRight()) {
-			log.error("Couldn't get resources with highest version from DB, error: {}",
-					allHighestVersionResources.right().value());
+			log.error("Couldn't get resources with highest version from DB, error: {}", allHighestVersionResources.right().value());
 			return false;
 		}
 		List<ResourceMetadataData> allHighestVersionResourcesAL = allHighestVersionResources.left().value();
@@ -1404,11 +1246,9 @@
 		fullComponentList.addAll(allHighestVersionResourcesAL);
 
 		// getting services
-		Either<List<ServiceMetadataData>, TitanOperationStatus> allHighestVersionServices = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Service, props, ServiceMetadataData.class);
+		Either<List<ServiceMetadataData>, TitanOperationStatus> allHighestVersionServices = titanGenericDao.getByCriteria(NodeTypeEnum.Service, props, ServiceMetadataData.class);
 		if (allHighestVersionServices.isRight()) {
-			log.error("Couldn't get services with highest version from DB, error: {}",
-					allHighestVersionServices.right().value());
+			log.error("Couldn't get services with highest version from DB, error: {}", allHighestVersionServices.right().value());
 			return false;
 		}
 		List<ServiceMetadataData> allHighestVersionServicesAL = allHighestVersionServices.left().value();
@@ -1422,11 +1262,9 @@
 		List<ComponentMetadataData> reducedComponentsAL = reduceHighestVersionResourcesList(fullComponentList);
 
 		// getting products
-		Either<List<ProductMetadataData>, TitanOperationStatus> allHighestVersionProducts = titanGenericDao
-				.getByCriteria(NodeTypeEnum.Product, props, ProductMetadataData.class);
+		Either<List<ProductMetadataData>, TitanOperationStatus> allHighestVersionProducts = titanGenericDao.getByCriteria(NodeTypeEnum.Product, props, ProductMetadataData.class);
 		if (allHighestVersionProducts.isRight()) {
-			log.error("Couldn't get products with highest version from DB, error: {}",
-					allHighestVersionProducts.right().value());
+			log.error("Couldn't get products with highest version from DB, error: {}", allHighestVersionProducts.right().value());
 			return false;
 		}
 		List<ProductMetadataData> allHighestVersionProductsAL = allHighestVersionProducts.left().value();
@@ -1457,15 +1295,12 @@
 				componentMetaData.getMetadataDataDefinition().setInvariantUUID(invariantUUID);
 			}
 			log.debug("new invariantUUID {}", componentMetaData.getMetadataDataDefinition().getInvariantUUID());
-			Either<ComponentMetadataData, TitanOperationStatus> updateNode = titanGenericDao
-					.updateNode(componentMetaData, ComponentMetadataData.class);
+			Either<ComponentMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(componentMetaData, ComponentMetadataData.class);
 			if (updateNode.isRight()) {
-				log.error("DB error during while updating component {}, error: {}",
-						componentMetaData.getMetadataDataDefinition().getName(), updateNode.right().value());
+				log.error("DB error during while updating component {}, error: {}", componentMetaData.getMetadataDataDefinition().getName(), updateNode.right().value());
 				return false;
 			}
-			log.debug("updated invariantUUID {}",
-					updateNode.left().value().getMetadataDataDefinition().getInvariantUUID());
+			log.debug("updated invariantUUID {}", updateNode.left().value().getMetadataDataDefinition().getInvariantUUID());
 			if (!isOnlyVersion(componentMetaData)) {
 				ComponentOperation componentOperation = null;
 				switch (NodeTypeEnum.getByName(componentMetaData.getLabel())) {
@@ -1481,11 +1316,9 @@
 				default:
 					break;
 				}
-				Either<Component, StorageOperationStatus> getComponentResult = componentOperation
-						.getComponent((String) componentMetaData.getUniqueId(), true);
+				Either<Component, StorageOperationStatus> getComponentResult = componentOperation.getComponent((String) componentMetaData.getUniqueId(), true);
 				if (getComponentResult.isRight()) {
-					log.error("DB error during while getting component with uniqueID {}, error: {}",
-							componentMetaData.getUniqueId(), getComponentResult.right().value());
+					log.error("DB error during while getting component with uniqueID {}, error: {}", componentMetaData.getUniqueId(), getComponentResult.right().value());
 					return false;
 				}
 				Component component = getComponentResult.left().value();
@@ -1514,8 +1347,7 @@
 	}
 
 	private boolean setProductInvariantUUIDIfExists(ProductMetadataData product) {
-		Either<TitanVertex, TitanOperationStatus> getVertexRes = titanGenericDao
-				.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), product.getUniqueId());
+		Either<TitanVertex, TitanOperationStatus> getVertexRes = titanGenericDao.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), product.getUniqueId());
 		if (getVertexRes.isRight()) {
 			log.error("DB error during retrieving product vertex {}", product.getMetadataDataDefinition().getName());
 			return false;
@@ -1528,27 +1360,21 @@
 		return true;
 	}
 
-	private Either<Boolean, StorageOperationStatus> updateAllVersions(Map<String, String> allVersions,
-			String invariantUUID) {
+	private Either<Boolean, StorageOperationStatus> updateAllVersions(Map<String, String> allVersions, String invariantUUID) {
 
 		if (allVersions != null) {
 			for (String uniqueID : allVersions.values()) {
-				Either<ComponentMetadataData, TitanOperationStatus> getNodeResult = titanGenericDao.getNode(
-						GraphPropertiesDictionary.UNIQUE_ID.getProperty(), uniqueID, ComponentMetadataData.class);
+				Either<ComponentMetadataData, TitanOperationStatus> getNodeResult = titanGenericDao.getNode(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), uniqueID, ComponentMetadataData.class);
 				if (getNodeResult.isRight()) {
-					log.error("DB error during while getting component with uniqueID {}, error: {}", uniqueID,
-							getNodeResult.right().value());
+					log.error("DB error during while getting component with uniqueID {}, error: {}", uniqueID, getNodeResult.right().value());
 					return Either.right(StorageOperationStatus.GENERAL_ERROR);
 				}
 				ComponentMetadataData component = getNodeResult.left().value();
 				component.getMetadataDataDefinition().setInvariantUUID(invariantUUID);
-				Either<ComponentMetadataData, TitanOperationStatus> updateNodeResult = titanGenericDao
-						.updateNode(component, ComponentMetadataData.class);
-				log.debug("updated child invariantUUID {}",
-						updateNodeResult.left().value().getMetadataDataDefinition().getInvariantUUID());
+				Either<ComponentMetadataData, TitanOperationStatus> updateNodeResult = titanGenericDao.updateNode(component, ComponentMetadataData.class);
+				log.debug("updated child invariantUUID {}", updateNodeResult.left().value().getMetadataDataDefinition().getInvariantUUID());
 				if (updateNodeResult.isRight()) {
-					log.error("DB error during while updating component {}, error: {}",
-							component.getMetadataDataDefinition().getName(), updateNodeResult.right().value());
+					log.error("DB error during while updating component {}, error: {}", component.getMetadataDataDefinition().getName(), updateNodeResult.right().value());
 					return Either.right(StorageOperationStatus.GENERAL_ERROR);
 				}
 			}
@@ -1556,24 +1382,19 @@
 		return Either.left(true);
 	}
 
-	private List<ComponentMetadataData> reduceHighestVersionResourcesList(
-			List<ComponentMetadataData> allHighestVersionResources) {
+	private List<ComponentMetadataData> reduceHighestVersionResourcesList(List<ComponentMetadataData> allHighestVersionResources) {
 		List<ComponentMetadataData> resultList = null;
 		Map<String, ComponentMetadataData> resultHM = new HashMap<String, ComponentMetadataData>();
 		for (ComponentMetadataData resource : allHighestVersionResources) {
-			if (resource.getMetadataDataDefinition().getInvariantUUID() != null
-					&& !resource.getMetadataDataDefinition().getInvariantUUID().isEmpty()) {
+			if (resource.getMetadataDataDefinition().getInvariantUUID() != null && !resource.getMetadataDataDefinition().getInvariantUUID().isEmpty()) {
 				log.debug("invariantUUID {} ", resource.getMetadataDataDefinition().getInvariantUUID());
 				continue;
 			}
 			String curUUID = resource.getMetadataDataDefinition().getUUID();
 			if (resultHM.containsKey(curUUID)) {
-				int isHighest = resultHM.get(curUUID).getMetadataDataDefinition().getVersion()
-						.compareTo(resource.getMetadataDataDefinition().getVersion());
+				int isHighest = resultHM.get(curUUID).getMetadataDataDefinition().getVersion().compareTo(resource.getMetadataDataDefinition().getVersion());
 				if (isHighest > 0) {
-					log.debug("version {} is great than {} ",
-							resultHM.get(curUUID).getMetadataDataDefinition().getVersion(),
-							resource.getMetadataDataDefinition().getVersion());
+					log.debug("version {} is great than {} ", resultHM.get(curUUID).getMetadataDataDefinition().getVersion(), resource.getMetadataDataDefinition().getVersion());
 					continue;
 				}
 			}
@@ -1622,11 +1443,9 @@
 				return Either.right(StorageOperationStatus.GENERAL_ERROR);
 			}
 			
-			Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource,
-					ResourceMetadataData.class);
+			Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource, ResourceMetadataData.class);
 			if (updateNode.isRight()) {
-				log.error("DB error during while updating normative type {}, error: {}",
-						resource.getMetadataDataDefinition().getName(), updateNode.right().value());
+				log.error("DB error during while updating normative type {}, error: {}", resource.getMetadataDataDefinition().getName(), updateNode.right().value());
 				return Either.right(StorageOperationStatus.GENERAL_ERROR);
 			}
 			log.debug("Normative type {} was successfully updated", resource.getMetadataDataDefinition().getName());
@@ -1636,15 +1455,11 @@
 		return Either.left(false);
 	}
 
-	private Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName(ResourceMetadataData resource,
-			String toscaResourceName) {
+	private Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName(ResourceMetadataData resource, String toscaResourceName) {
 		if (toscaResourceName == null) {
-			toscaResourceName = CommonBeUtils.generateToscaResourceName(
-					((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name(),
-					resource.getMetadataDataDefinition().getSystemName());
+			toscaResourceName = CommonBeUtils.generateToscaResourceName(((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name(), resource.getMetadataDataDefinition().getSystemName());
 		}
-		Either<Boolean, StorageOperationStatus> validateToscaResourceNameExists = resourceOperation
-				.validateToscaResourceNameExists(toscaResourceName);
+		Either<Boolean, StorageOperationStatus> validateToscaResourceNameExists = resourceOperation.validateToscaResourceNameExists(toscaResourceName);
 		if (validateToscaResourceNameExists.isRight()) {
 			StorageOperationStatus storageOperationStatus = validateToscaResourceNameExists.right().value();
 			log.error("Couldn't validate toscaResourceName uniqueness - error: {}", storageOperationStatus);
@@ -1652,8 +1467,7 @@
 		}
 		if (validateToscaResourceNameExists.left().value()) {
 			log.debug("Setting tosca resource name to be {}", toscaResourceName);
-			((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition())
-					.setToscaResourceName(toscaResourceName);
+			((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).setToscaResourceName(toscaResourceName);
 			return Either.left(true);
 		} else {
 			// As agreed with Renana - cannot be fixed automatically
@@ -1673,22 +1487,17 @@
 
 	private Either<Boolean, StorageOperationStatus> setVfToscaResourceName(ResourceMetadataData resource) {
 		String resourceName = resource.getMetadataDataDefinition().getName();
-		String resourceType = ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType()
-				.name();
-		String toscaResourceName = CommonBeUtils.generateToscaResourceName(resourceType,
-				resource.getMetadataDataDefinition().getSystemName());
+		String resourceType = ((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getResourceType().name();
+		String toscaResourceName = CommonBeUtils.generateToscaResourceName(resourceType, resource.getMetadataDataDefinition().getSystemName());
 		log.debug("Setting tosca resource name {} to VF {}", toscaResourceName, resourceName);
 		((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).setToscaResourceName(toscaResourceName);
 
-		Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource,
-				ResourceMetadataData.class);
+		Either<ResourceMetadataData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(resource, ResourceMetadataData.class);
 		if (updateNode.isRight()) {
-			log.error("DB error during while updating VF tosca resource name {}, error: {}",
-					resource.getMetadataDataDefinition().getName(), updateNode.right().value());
+			log.error("DB error during while updating VF tosca resource name {}, error: {}", resource.getMetadataDataDefinition().getName(), updateNode.right().value());
 			return Either.right(StorageOperationStatus.GENERAL_ERROR);
 		}
-		log.debug("Tosca resource name of VF {} was successfully updated",
-				resource.getMetadataDataDefinition().getName());
+		log.debug("Tosca resource name of VF {} was successfully updated", resource.getMetadataDataDefinition().getName());
 		return Either.left(true);
 	}
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java
index 27b9351..e488017 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1604/VfcNamingAlignment.java
@@ -77,7 +77,7 @@
 				log.debug("Checking resource {}", vfc.getMetadataDataDefinition().getName());
 				boolean wasChanged = false;
 
-				Either<Boolean, StorageOperationStatus> vfcEither = fixToscaNameEmpty(vfc);
+				Either<Boolean, StorageOperationStatus>	vfcEither = fixToscaNameEmpty(vfc);
 				if (vfcEither.isRight()) {
 					log.error("DB error during checkIsToscaNameEmpty - exiting...");
 					result = false;
@@ -157,7 +157,6 @@
 		return Either.left(false);
 	}
 
-
 	private Either<Boolean, StorageOperationStatus> generateAndSetToscaResourceName(ResourceMetadataData vfc,
 			String toscaResourceName) {
 		if (toscaResourceName == null) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java
index 36ac98e..fedd4d2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/TitanFixUtils.java
@@ -37,9 +37,6 @@
 
 import java.util.*;
 
-/**
- * Created by mlando on 8/17/2016.
- */
 public class TitanFixUtils {
 	private static Logger log = LoggerFactory.getLogger(TitanFixUtils.class.getName());
 
@@ -291,7 +288,7 @@
 			List<TitanVertex> vertexList = new ArrayList<>();
 
 			if (iterator == null) {
-				log.error("failed to get iterator over vertices object returned for resource id " + propertyIdSecure);
+				log.error("failed to get iterator over vertices object returned for resource id {}", propertyIdSecure);
 				operationFailed = true;
 				return false;
 			}
@@ -301,7 +298,7 @@
 				vertexList.add(vertex);
 			}
 
-			if (!(vertexList.size() == 1)) {
+			if (vertexList.size() != 1) {
 				log.error("failed to get 1 vertex for resource id {} instead got {}", propertyIdSecure,
 						vertexList.size());
 				operationFailed = true;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java
index 347a570..673190e 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1610/ToscaArtifactsAlignment.java
@@ -304,7 +304,7 @@
 				if (optionalError.isPresent()) {
 					ArtifactDefinition toscaArtifact = optionalError.get().getLeft();
 					StorageOperationStatus storageError = optionalError.get().getRight();
-					log.error("{} When adding tosca artifact of type {} to component {} of type:{} " + "with uniqueId:{} a storageError occurred:{}", ERROR_PREFIX, toscaArtifact.getArtifactType(), component.getMetadataDataDefinition().getName(),
+					log.error("{} When adding tosca artifact of type {} to component {} of type:{} with uniqueId:{} a storageError occurred:{}", ERROR_PREFIX, toscaArtifact.getArtifactType(), component.getMetadataDataDefinition().getName(),
 							nodeType.getName(), component.getMetadataDataDefinition().getUniqueId(), storageError.name());
 
 					result = storageError;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/DataTypesUpdate.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/DataTypesUpdate.java
new file mode 100644
index 0000000..d7f3684
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/DataTypesUpdate.java
@@ -0,0 +1,428 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1702;
+
+import java.io.File;
+import java.io.FileReader;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.components.impl.ImportUtils;
+import org.openecomp.sdc.be.components.impl.ImportUtils.ResultStatusEnum;
+import org.openecomp.sdc.be.components.impl.ImportUtils.ToscaTagNamesEnum;
+import org.openecomp.sdc.be.config.BeEcompErrorManager;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.impl.ComponentsUtils;
+import org.openecomp.sdc.be.model.DataTypeDefinition;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.model.tosca.ToscaPropertyType;
+import org.openecomp.sdc.be.resources.data.PropertyData;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.yaml.snakeyaml.Yaml;
+
+import fj.data.Either;
+
+/**
+ * Allows to update existing or create new data types according input file (yaml)
+ * @author ns019t
+ *
+ */
+public class DataTypesUpdate {
+	
+	private static Logger log = LoggerFactory.getLogger(Migration1702.class.getName());
+	
+	@Autowired
+	private PropertyOperation propertyOperation;
+	@Autowired
+	private ComponentsUtils componentsUtils;
+
+	@SuppressWarnings("unchecked")
+	/**
+	 * Updates existing or creates new data types according input file (yaml)
+	 * @param dataTypeYmlFilePath
+	 * @return
+	 */
+	public boolean updateDataTypes(String dataTypeYmlFilePath) {
+		
+		
+		List<String> dataTypesToUpdate = new ArrayList<>();
+		dataTypesToUpdate.add("org.openecomp.datatypes.EcompHoming");
+		dataTypesToUpdate.add("org.openecomp.datatypes.EcompNaming");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.NetworkAssignments");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.ProviderNetwork");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.NetworkFlows");
+		dataTypesToUpdate.add("org.openecomp.datatypes.Artifact");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.VlanRequirements");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.IpRequirements");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.MacAssignments");
+		dataTypesToUpdate.add("org.openecomp.datatypes.network.MacRequirements");
+		dataTypesToUpdate.add("org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairIp");
+		dataTypesToUpdate.add("org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.MacAddress");
+		dataTypesToUpdate.add("org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.Properties");
+		dataTypesToUpdate.add("org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair");
+		dataTypesToUpdate.add("org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairs");
+		
+		boolean isSuccessful = true;
+		List<DataTypeDefinition> dataTypes = extractDataTypesFromYaml(dataTypeYmlFilePath);
+		
+		if(CollectionUtils.isEmpty(dataTypes)){
+			isSuccessful = false;
+		}
+		
+		List<ImmutablePair<DataTypeDefinition, Boolean>> createdElementTypes = new ArrayList<>();
+
+		Iterator<DataTypeDefinition> elementTypeItr = dataTypes.iterator();
+		if(isSuccessful ){
+			try {
+				while (elementTypeItr.hasNext()) {
+					DataTypeDefinition elementType = elementTypeItr.next();
+					String elementName = elementType.getName();
+					Either<ActionStatus, ResponseFormat> validateElementType = validateDataType(elementType);
+					if (validateElementType.isRight()) {
+						log.debug("Failed to validate data type {}. Status is {}. ", elementName, validateElementType.right().value());
+						isSuccessful =  false;
+						break;
+					}
+					log.debug("Going to get data type by name {}. ", elementName);
+					Either<DataTypeDefinition, StorageOperationStatus> findElementType = propertyOperation.getDataTypeByNameWithoutDerived(elementName);
+					if (findElementType.isRight()) {
+						StorageOperationStatus status = findElementType.right().value();
+						if (status != StorageOperationStatus.NOT_FOUND) {
+							log.debug("Failed to fetch data type {}. Status is {}. ", elementName , validateElementType.right().value());
+							isSuccessful =  false;
+							break;
+						} else {
+							log.debug("Going to add data type with name {}. ", elementName);
+							Either<DataTypeDefinition, StorageOperationStatus> dataModelResponse = propertyOperation.addDataType(elementType);
+	
+							if (dataModelResponse.isRight()) {
+									if (dataModelResponse.right().value() != StorageOperationStatus.SCHEMA_VIOLATION) {
+										log.debug("Failed to add data type {}. Status is {}. ", elementName , dataModelResponse.right().value());
+										isSuccessful =  false;
+										break;
+									} else {
+										createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, false));
+									}
+							} else {
+								createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, true));
+							}
+	
+						}
+					} else {
+						log.debug("Going to update data type with name {}. ", elementName);
+						Either<DataTypeDefinition, StorageOperationStatus> updateDataTypeRes = propertyOperation.updateDataType(elementType, findElementType.left().value());
+						if (updateDataTypeRes.isRight()) {
+							StorageOperationStatus status = updateDataTypeRes.right().value();
+							if (status == StorageOperationStatus.OK) {
+								createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, false));
+							} else {
+								log.debug("Failed to update data type {}. Status is {}. ", elementName , updateDataTypeRes.right().value());
+								isSuccessful =  false;
+								break;
+							}
+						} else {
+							createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, true));
+						}
+					}
+				}
+			} finally {
+				if(isSuccessful){
+					propertyOperation.getTitanGenericDao().commit();
+				}else{
+					propertyOperation.getTitanGenericDao().rollback();
+				}
+			}
+		}
+		return isSuccessful;
+	}
+
+	@SuppressWarnings("unchecked")
+	static public List<DataTypeDefinition> extractDataTypesFromYaml(String dataTypeYmlFilePath) {
+		String dataTypeName;
+		List<DataTypeDefinition> dataTypes = new ArrayList<>();
+		try {
+			File file = new File(dataTypeYmlFilePath);
+			FileReader fr = new FileReader(file);
+			Map<String, Object> toscaJson = (Map<String, Object>) new Yaml().load(fr);
+
+			Iterator<Entry<String, Object>> elementTypesEntryItr = toscaJson.entrySet().iterator();
+			while (elementTypesEntryItr.hasNext()) {
+				Entry<String, Object> elementTypeNameDataEntry = elementTypesEntryItr.next();
+				dataTypeName = elementTypeNameDataEntry.getKey();
+				Map<String, Object> elementTypeJsonData = (Map<String, Object>) elementTypeNameDataEntry.getValue();
+				
+				DataTypeDefinition dataType = new DataTypeDefinition();
+				dataType.setName(dataTypeName);
+
+				if (elementTypeJsonData != null) {
+					
+					if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.DESCRIPTION.getElementName())) {
+						dataType.setDescription( (String)elementTypeJsonData.get(ToscaTagNamesEnum.DESCRIPTION.getElementName()));
+					}
+					if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.DERIVED_FROM.getElementName())) {
+						dataType.setDerivedFromName( (String)elementTypeJsonData.get(ToscaTagNamesEnum.DERIVED_FROM.getElementName()));
+					}
+					List<PropertyDefinition> properties = getProperties(elementTypeJsonData);
+					if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.PROPERTIES.getElementName())) {
+						dataType.setProperties(properties);
+					}
+				}
+				dataTypes.add(dataType);
+			}
+
+		} catch (Exception e) {
+			log.debug("Failed to extract data types from Yaml file {}. ", dataTypeYmlFilePath);
+			e.printStackTrace();
+		}
+		return dataTypes;
+	}
+	
+	static public List<PropertyDefinition> getProperties(Map<String, Object> toscaJson) {
+		List<PropertyDefinition> values = null;
+		Either<Map<String, PropertyDefinition>, ResultStatusEnum> properties = ImportUtils.getProperties(toscaJson);
+
+		if (properties.isLeft()) {
+			values = new ArrayList<>();
+			Map<String, PropertyDefinition> propertiesMap = properties.left().value();
+			if (propertiesMap != null && propertiesMap.isEmpty() == false) {
+
+				for (Entry<String, PropertyDefinition> entry : propertiesMap.entrySet()) {
+					String propName = entry.getKey();
+					PropertyDefinition propertyDefinition = entry.getValue();
+					PropertyDefinition newPropertyDefinition = new PropertyDefinition(propertyDefinition);
+					newPropertyDefinition.setName(propName);
+					values.add(newPropertyDefinition);
+				}
+			}
+		}
+
+		return values;
+	}
+
+	private Either<ActionStatus, ResponseFormat> validateDataType(DataTypeDefinition dataType) {
+
+		String dataTypeName = dataType.getName();
+		List<PropertyDefinition> properties = dataType.getProperties();
+		if (properties == null) {
+			// At least one parameter should be defined either in the properties
+			// section or at one of the parents
+			String derivedDataType = dataType.getDerivedFromName();
+			// If there are no properties, then we can create a data type if it
+			// is an abstract one or it derives from non abstract data type
+			if ((derivedDataType == null || derivedDataType.isEmpty())) {
+				if (false == isAbstract(dataType.getName())) {
+					if (false == ToscaPropertyType.isScalarType(dataTypeName)) {
+						log.debug("Data type {} must have properties unless it derives from non abstract data type",dataType.getName());
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_NOR_PROPERTIES_NEITHER_DERIVED_FROM, dataType, null);
+
+						return Either.right(responseFormat);
+					}
+				}
+			} else {
+				// if it is not a scalar data type and it derives from abstract
+				// data type, we should reject the request.
+				if (false == ToscaPropertyType.isScalarType(dataTypeName) && true == isAbstract(derivedDataType)) {
+					log.debug("Data type {} which derived from abstract data type must have at least one property",dataType.getName());
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_NOR_PROPERTIES_NEITHER_DERIVED_FROM, dataType, null);
+
+					return Either.right(responseFormat);
+				}
+			}
+		} else {
+			// properties tag cannot be empty
+			if (properties.isEmpty()) {
+				ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROPERTIES_CANNOT_BE_EMPTY, dataType, null);
+
+				return Either.right(responseFormat);
+			}
+
+			// check no duplicates
+			Set<String> collect = properties.stream().map(p -> p.getName()).collect(Collectors.toSet());
+			if (collect != null) {
+				if (properties.size() != collect.size()) {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_DUPLICATE_PROPERTY, dataType, null);
+
+					return Either.right(responseFormat);
+				}
+			}
+
+			List<String> propertiesWithSameTypeAsDataType = properties.stream().filter(p -> p.getType().equals(dataType.getName())).map(p -> p.getName()).collect(Collectors.toList());
+			if (propertiesWithSameTypeAsDataType != null && propertiesWithSameTypeAsDataType.isEmpty() == false) {
+				log.debug("The data type {} contains properties with the type {}",dataType.getName(),dataType.getName());
+				ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROEPRTY_CANNOT_HAVE_SAME_TYPE_OF_DATA_TYPE, dataType, propertiesWithSameTypeAsDataType);
+
+				return Either.right(responseFormat);
+			}
+		}
+
+		String derivedDataType = dataType.getDerivedFromName();
+		if (derivedDataType != null) {
+			Either<DataTypeDefinition, StorageOperationStatus> derivedDataTypeByName = propertyOperation.getDataTypeByName(derivedDataType, true);
+			if (derivedDataTypeByName.isRight()) {
+				StorageOperationStatus status = derivedDataTypeByName.right().value();
+				if (status == StorageOperationStatus.NOT_FOUND) {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_DERIVED_IS_MISSING, dataType, null);
+
+					return Either.right(responseFormat);
+				} else {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.GENERAL_ERROR, dataType, null);
+
+					return Either.right(responseFormat);
+
+				}
+			} else {
+
+				DataTypeDefinition derivedDataTypeDef = derivedDataTypeByName.left().value();
+				if (properties != null && properties.isEmpty() == false) {
+
+					if (true == isScalarType(derivedDataTypeDef)) {
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_CANNOT_HAVE_PROPERTIES, dataType, null);
+
+						return Either.right(responseFormat);
+					}
+
+					Set<String> allParentsProps = new HashSet<>();
+					do {
+						List<PropertyDefinition> currentParentsProps = derivedDataTypeDef.getProperties();
+						if (currentParentsProps != null) {
+							for (PropertyDefinition propertyDefinition : currentParentsProps) {
+								allParentsProps.add(propertyDefinition.getName());
+							}
+						}
+						derivedDataTypeDef = derivedDataTypeDef.getDerivedFrom();
+					} while (derivedDataTypeDef != null);
+
+					// Check that no property is already defined in one of the
+					// ancestors
+					Set<String> alreadyExistPropsCollection = properties.stream().filter(p -> allParentsProps.contains(p.getName())).map(p -> p.getName()).collect(Collectors.toSet());
+					if (alreadyExistPropsCollection != null && alreadyExistPropsCollection.isEmpty() == false) {
+						List<String> duplicateProps = new ArrayList<>();
+						duplicateProps.addAll(alreadyExistPropsCollection);
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROPERTY_ALREADY_DEFINED_IN_ANCESTOR, dataType, duplicateProps);
+
+						return Either.right(responseFormat);
+					}
+
+				}
+			}
+		}
+		return Either.left(ActionStatus.OK);
+	}
+	
+	private boolean isAbstract(String dataTypeName) {
+
+		ToscaPropertyType isPrimitiveToscaType = ToscaPropertyType.isValidType(dataTypeName);
+
+		return isPrimitiveToscaType != null && isPrimitiveToscaType.isAbstract() == true;
+
+	}
+
+	private boolean isScalarType(DataTypeDefinition dataTypeDef) {
+
+		boolean isScalar = false;
+		DataTypeDefinition dataType = dataTypeDef;
+
+		while (dataType != null) {
+
+			String name = dataType.getName();
+			if (ToscaPropertyType.isScalarType(name)) {
+				isScalar = true;
+				break;
+			}
+
+			dataType = dataType.getDerivedFrom();
+		}
+
+		return isScalar;
+	}
+	
+//	public Either<DataTypeDefinition, StorageOperationStatus> updateDataType(DataTypeDefinition newDataTypeDefinition, DataTypeDefinition oldDataTypeDefinition) {
+//
+//		Either<DataTypeDefinition, StorageOperationStatus> result = null;
+//
+//		try {
+//
+//			List<PropertyDefinition> newProperties = newDataTypeDefinition.getProperties();
+//
+//			List<PropertyDefinition> oldProperties = oldDataTypeDefinition.getProperties();
+//
+//			String newDerivedFromName = getDerivedFromName(newDataTypeDefinition);
+//
+//			String oldDerivedFromName = getDerivedFromName(oldDataTypeDefinition);
+//
+//			String dataTypeName = newDataTypeDefinition.getName();
+//			
+//			List<PropertyDefinition> propertiesToAdd = new ArrayList<>();
+//			if (isPropertyOmitted(newProperties, oldProperties, dataTypeName) || isPropertyTypeChanged(dataTypeName, newProperties, oldProperties, propertiesToAdd) || isDerivedFromNameChanged(dataTypeName, newDerivedFromName, oldDerivedFromName)) {
+//
+//				log.debug("The new data type " + dataTypeName + " is invalid.");
+//
+//				result = Either.right(StorageOperationStatus.CANNOT_UPDATE_EXISTING_ENTITY);
+//				return result;
+//			}
+//
+//			if (propertiesToAdd == null || propertiesToAdd.isEmpty()) {
+//				log.debug("No new properties has been defined in the new data type " + newDataTypeDefinition);
+//				result = Either.right(StorageOperationStatus.OK);
+//				return result;
+//			}
+//
+//			Either<Map<String, PropertyData>, TitanOperationStatus> addPropertiesToDataType = addPropertiesToDataType(oldDataTypeDefinition.getUniqueId(), propertiesToAdd);
+//
+//			if (addPropertiesToDataType.isRight()) {
+//				log.debug("Failed to update data type {} to Graph. Status is {}", oldDataTypeDefinition, addPropertiesToDataType.right().value().name());
+//				BeEcompErrorManager.getInstance().logBeFailedAddingNodeTypeError("UpdateDataType", "Property");
+//				result = Either.right(DaoStatusConverter.convertTitanStatusToStorageStatus(addPropertiesToDataType.right().value()));
+//				return result;
+//			} else {
+//
+//				Either<DataTypeDefinition, TitanOperationStatus> dataTypeByUid = this.getDataTypeByUid(oldDataTypeDefinition.getUniqueId());
+//				if (dataTypeByUid.isRight()) {
+//					TitanOperationStatus status = addPropertiesToDataType.right().value();
+//					log.debug("Failed to get data type {} after update. Status is {}", oldDataTypeDefinition.getUniqueId(), status.name());
+//					BeEcompErrorManager.getInstance().logBeFailedRetrieveNodeError("UpdateDataType", "Property", status.name());
+//					result = Either.right(DaoStatusConverter.convertTitanStatusToStorageStatus(status));
+//				} else {
+//					result = Either.left(dataTypeByUid.left().value());
+//				}
+//			}
+//
+//			return result;
+//
+//		}
+//	}
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/Migration1702.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/Migration1702.java
new file mode 100644
index 0000000..861e913
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1702/Migration1702.java
@@ -0,0 +1,1408 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.impl.migration.v1702;
+
+import java.io.BufferedWriter;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.math3.analysis.solvers.RiddersSolver;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic;
+import org.openecomp.sdc.be.components.impl.ImportUtils;
+import org.openecomp.sdc.be.components.impl.ImportUtils.ResultStatusEnum;
+import org.openecomp.sdc.be.components.impl.ImportUtils.ToscaTagNamesEnum;
+import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
+import org.openecomp.sdc.be.config.BeEcompErrorManager;
+import org.openecomp.sdc.be.config.BeEcompErrorManager.ErrorSeverity;
+import org.openecomp.sdc.be.config.Configuration.VfModuleProperty;
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.graph.GraphElementFactory;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphRelation;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgeLabels;
+import org.openecomp.sdc.be.dao.neo4j.GraphEdgePropertiesDictionary;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.GroupDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.impl.ComponentsUtils;
+import org.openecomp.sdc.be.model.ArtifactDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.DataTypeDefinition;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupInstance;
+import org.openecomp.sdc.be.model.GroupProperty;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.Operation;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.RequirementCapabilityRelDef;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.ResourceMetadataDefinition;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ComponentOperation;
+import org.openecomp.sdc.be.model.operations.impl.GroupOperation;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.model.operations.impl.ResourceOperation;
+import org.openecomp.sdc.be.model.operations.impl.ServiceOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.model.tosca.ToscaPropertyType;
+import org.openecomp.sdc.be.resources.data.ArtifactData;
+import org.openecomp.sdc.be.resources.data.ComponentInstanceData;
+import org.openecomp.sdc.be.resources.data.ComponentMetadataData;
+import org.openecomp.sdc.be.resources.data.DataTypeData;
+import org.openecomp.sdc.be.resources.data.GroupData;
+import org.openecomp.sdc.be.resources.data.PropertyData;
+import org.openecomp.sdc.be.resources.data.PropertyValueData;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+import org.openecomp.sdc.be.resources.data.ServiceMetadataData;
+import org.openecomp.sdc.be.user.UserBusinessLogic;
+import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
+import org.openecomp.sdc.common.api.ArtifactTypeEnum;
+import org.openecomp.sdc.common.api.Constants;
+import org.openecomp.sdc.common.util.ValidationUtils;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.yaml.snakeyaml.Yaml;
+
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanGraphQuery;
+import com.thinkaurelius.titan.core.TitanVertex;
+
+import fj.data.Either;
+
+public class Migration1702 {
+	private static final String CONFIG_GROUP_TYPES_YML = "/config/groupTypes.yml";
+
+	private static final String CONFIG_DATA_TYPES_YML = "/config/dataTypes.yml";
+
+	private static Logger log = LoggerFactory.getLogger(Migration1702.class.getName());
+
+	@Autowired
+	protected TitanGenericDao titanGenericDao;
+	@Autowired
+	protected ResourceOperation resourceOperation;
+	@Autowired
+	protected ServiceOperation serviceOperation;
+	@Autowired
+	private ServiceBusinessLogic serviceBusinessLogic;
+	@Autowired
+	private GroupTypeOperation groupTypeOperation;
+	@Autowired
+	private PropertyOperation propertyOperation;
+	@Autowired
+	private ComponentsUtils componentsUtils;
+	@Autowired
+	private GroupOperation groupOperation;
+
+	@Autowired
+	private ArtifactsBusinessLogic artifactsBusinessLogic;
+
+	@Autowired
+	private UserBusinessLogic userAdminManager;
+
+	@Autowired
+	private ComponentInstanceOperation componentInstanceOperation;
+
+	public boolean migrate(String appConfigDir) {
+		boolean result = true;
+		String methodName = "alignCustomizationUUID";
+
+		try {
+			if (!alignCustomizationUUID()) {
+				log.error("Failed to align customization UUID");
+				result = false;
+				return result;
+			}
+			methodName = "alignGroupDataType";
+			if (!alignGroupDataType()) {
+				log.error("Failed to align Group data type");
+				result = false;
+				return result;
+			}
+			methodName = "alignVfModuleProperties";
+			if (!alignVfModuleProperties()) {
+				log.error("Failed to align Vf Module Properties");
+				result = false;
+				return result;
+			}
+			methodName = "alignDataType";
+			if (!alignDataType()) {
+				log.error("Failed to align data type");
+				result = false;
+				return result;
+			}
+			methodName = "alignHeatEnv";
+			if (!alignHeatEnv()) {
+				log.error("Failed to align heat env on VF level");
+				result = false;
+				return result;
+			}
+			methodName = "alignModuleInstances";
+			if (!alignModuleInstances()) {
+				log.error("Failed to align module instances");
+				result = false;
+				return result;
+			}
+
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", methodName, e);
+			result = false;
+		}
+		return result;
+	}
+
+	private boolean alignModuleInstances() {
+		log.info(" Align Module Instances");
+		boolean result = true;
+		boolean statusToReturn = true;
+
+		Writer writer = null;
+
+		try {
+			long time = System.currentTimeMillis();
+			writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("runstatusModules_" + time + ".csv"), "utf-8"));
+
+			writer.write("resource id, instance id, group id, status\n");
+
+			Either<List<ServiceMetadataData>, TitanOperationStatus> allServices = titanGenericDao.getByCriteria(NodeTypeEnum.Service, null, ServiceMetadataData.class);
+			if (allServices.isRight()) {
+				if (allServices.right().value() != TitanOperationStatus.NOT_FOUND) {
+					log.error("Align heat env on Vf  - Failed to fetch services {}", allServices.right().value());
+					result = false;
+					statusToReturn = false;
+					return statusToReturn;
+				} else {
+					log.debug("No Services. ");
+					return statusToReturn;
+				}
+			}
+			log.info("Need to handle {} services", allServices.left().value().size());
+			long handledServices = 0;
+			for (ServiceMetadataData metadata : allServices.left().value()) {
+				String serviceId = metadata.getMetadataDataDefinition().getUniqueId();
+				Either<ImmutablePair<List<ComponentInstance>, List<RequirementCapabilityRelDef>>, TitanOperationStatus> riRes = componentInstanceOperation.getComponentInstancesOfComponent(serviceId, NodeTypeEnum.Service, NodeTypeEnum.Resource);
+				if (riRes.isRight()) {
+					if (riRes.right().value() == TitanOperationStatus.NOT_FOUND) {
+						log.info("No instancces for service {}", serviceId);
+					} else {
+						log.info("Align vf modules - failed to fetch component instances for service {} error {}", riRes.right().value());
+						writeModuleResultToFile(writer, serviceId, null, null, riRes.right().value());
+						statusToReturn = false;
+					}
+					++handledServices;
+					continue;
+				}
+				List<ComponentInstance> componentInstances = riRes.left().value().left;
+				for (ComponentInstance ci : componentInstances) {
+					Either<TitanVertex, TitanOperationStatus> ciVertexRes = titanGenericDao.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), ci.getUniqueId());
+					if (ciVertexRes.isRight()) {
+						log.info("Failed to fetch vertex for component instance {}, error {}", ci.getUniqueId(), ciVertexRes.right().value());
+						writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), null, ciVertexRes.right().value());
+						statusToReturn = false;
+						continue;
+					}
+					TitanVertex ciVertex = ciVertexRes.left().value();
+					if (createGroupInstancesOnComponentInstance(writer, ci, ciVertex, serviceId) == false) {
+						statusToReturn = false;
+						continue;
+					}
+				}
+				writer.flush();
+				++handledServices;
+			}
+
+			log.info("Handled {} services", handledServices);
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", "alignModuleInstances", e);
+			result = false;
+			statusToReturn = false;
+		} finally {
+
+			log.info(" Align Module Instances finished");
+			if (!result) {
+				log.info("Doing rollback");
+				titanGenericDao.rollback();
+			} else {
+				log.info("Doing commit");
+				titanGenericDao.commit();
+			}
+			try {
+				writer.flush();
+				writer.close();
+			} catch (Exception ex) {
+				/* ignore */}
+		}
+		return statusToReturn;
+	}
+
+	private boolean createGroupInstancesOnComponentInstance(Writer writer, ComponentInstance ci, TitanVertex ciVertex, String serviceId) {
+		boolean statusToReturn = true;
+
+		Map<String, Object> properties = titanGenericDao.getProperties(ciVertex);
+		ComponentInstanceData createdComponentInstance = GraphElementFactory.createElement(NodeTypeEnum.ResourceInstance.getName(), GraphElementTypeEnum.Node, properties, ComponentInstanceData.class);
+
+		Either<List<GroupDefinition>, TitanOperationStatus> groupEither = groupOperation.getAllGroupsFromGraph(ci.getComponentUid(), NodeTypeEnum.Resource);
+		if (groupEither.isRight()) {
+			if (groupEither.right().value() != TitanOperationStatus.OK && groupEither.right().value() != TitanOperationStatus.NOT_FOUND) {
+				TitanOperationStatus status = groupEither.right().value();
+				log.error("Failed to associate group instances to component instance {}. Status is {}", ci.getUniqueId(), status);
+				writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), null, status);
+				return false;
+			} else {
+				log.debug("No groups for component instance {}. ", ci.getUniqueId());
+
+				writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), null, "No groups");
+				return true;
+			}
+		}
+		List<GroupDefinition> groupsIמResource = groupEither.left().value();
+		if (groupsIמResource != null && !groupsIמResource.isEmpty()) {
+			List<GroupDefinition> vfGroupsListInResource = groupsIמResource.stream().filter(p -> p.getType().equals("org.openecomp.groups.VfModule")).collect(Collectors.toList());
+
+			for (GroupDefinition groupInResource : vfGroupsListInResource) {
+				Iterator<Edge> edgesToInstances = ciVertex.edges(Direction.OUT, GraphEdgeLabels.GROUP_INST.getProperty());
+				boolean exist = false;
+				String normalizedName = ValidationUtils.normalizeComponentInstanceName(ci.getNormalizedName() + ".." + groupInResource.getName());
+				String grInstId = UniqueIdBuilder.buildResourceInstanceUniuqeId(ci.getUniqueId(), groupInResource.getUniqueId(), normalizedName);
+				
+
+				while (edgesToInstances.hasNext()) {
+					Edge edgeToInst = edgesToInstances.next();
+					Vertex grInstVertex = edgeToInst.inVertex();
+					String grId = (String) titanGenericDao.getProperty((TitanVertex) grInstVertex, GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+					if (grId.equals(grInstId)) {
+						exist = true;
+						break;
+					}
+				}
+				if (!exist) {
+					Either<GroupInstance, StorageOperationStatus> status = componentInstanceOperation.createGroupInstance(ciVertex, groupInResource, ci);
+					if (status.isRight()) {
+						log.error("Failed to create group instance {} in component instance {}. Status is {}", grInstId, ci.getUniqueId(), status.right().value());
+						statusToReturn = false;
+						writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), grInstId, status.right().value());
+					} else {
+						writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), grInstId, "OK");
+					}
+				} else {
+					writeModuleResultToFile(writer, serviceId, ci.getUniqueId(), grInstId, "Exist");
+				}
+
+			}
+		}
+		return statusToReturn;
+	}
+
+	@SuppressWarnings("resource")
+	private boolean alignHeatEnv() {
+		Writer writer = null;
+		log.info(" Align heat env on Vf level");
+		boolean statusToReturn = true;
+
+		boolean result = true;
+		try {
+			long time = System.currentTimeMillis();
+			writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream("runstatusEnv_" + time + ".csv"), "utf-8"));
+
+			writer.write("resource id, operation, artifact id, status\n");
+			User user = buildDummyUser();
+
+			Map<String, Object> props = new HashMap<String, Object>();
+			props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+
+			Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+			if (allResources.isRight()) {
+				if (allResources.right().value() != TitanOperationStatus.NOT_FOUND) {
+					log.error("Align heat env on Vf  - Failed to fetch resources {}", allResources.right().value());
+					statusToReturn = false;
+					result = false;
+					return statusToReturn;
+				} else {
+					log.debug("No VF resources. ");
+					return result;
+				}
+			}
+			List<ResourceMetadataData> resources = allResources.left().value();
+			log.debug("Need to handle {} resources", resources.size());
+
+			long totalHandledArtifacts = 0;
+			for (ResourceMetadataData metadata : resources) {
+				Either<List<ImmutablePair<ArtifactData, GraphEdge>>, TitanOperationStatus> artifactNodesRes = titanGenericDao.getChildrenNodes(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), (String) metadata.getUniqueId(),
+						GraphEdgeLabels.ARTIFACT_REF, NodeTypeEnum.ArtifactRef, ArtifactData.class);
+				if (artifactNodesRes.isRight()) {
+					if (artifactNodesRes.right().value() != TitanOperationStatus.NOT_FOUND) {
+						log.error("Align heat env on Vf  - Failed to fetch artifacts for resources {}", metadata.getUniqueId(), artifactNodesRes.right().value());
+						writer.write(metadata.getUniqueId() + ",get artifacts, ,Failed to fetch artifacts " + artifactNodesRes.right().value() + "\n");
+						statusToReturn = false;
+						continue;
+					} else {
+						log.debug("No artifact for resource {} . ", metadata.getUniqueId());
+						writer.write(metadata.getUniqueId() + ",get artifacts, ,No artfacts\n");
+						continue;
+					}
+				}
+				List<ImmutablePair<ArtifactData, GraphEdge>> artifacts = artifactNodesRes.left().value();
+
+				for (ImmutablePair<ArtifactData, GraphEdge> pair : artifacts) {
+					ArtifactData artifactData = pair.left;
+					if (isNeedCreatePlaceHolder(artifactData)) {
+						// check if exist heat env - if not -> create
+						String heatEnvId = (String) artifactData.getUniqueId() + "env";
+						if (validateOrCreateHeatEnv(user, metadata, artifactData, heatEnvId, writer) == false) {
+							statusToReturn = false;
+						}
+						// check if connected to group - if not -> connect
+						if (validateOrAssociateHeatAnv(metadata, artifactData, heatEnvId, writer) == false) {
+							statusToReturn = false;
+						}
+						++totalHandledArtifacts;
+						writer.flush();
+					}
+
+				}
+			}
+			log.debug("Total handled {}  artifacts", totalHandledArtifacts);
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", "alignHeatEnv", e);
+			result = false;
+		} finally {
+
+			log.info("Aling heat env on VF level finished ");
+			if (!result) {
+				log.info("Doing rollback");
+				titanGenericDao.rollback();
+			} else {
+				log.info("Doing commit");
+				titanGenericDao.commit();
+			}
+			try {
+				writer.flush();
+				writer.close();
+			} catch (Exception ex) {
+				/* ignore */}
+		}
+		return statusToReturn;
+	}
+
+	private boolean validateOrAssociateHeatAnv(ResourceMetadataData metadata, ArtifactData artifactData, String heatEnvId, Writer writer) {
+		boolean statusToReturn = true;
+
+		String resourceId = (String) metadata.getUniqueId();
+		Either<ArtifactData, TitanOperationStatus> heatEnvArtifactRes = titanGenericDao.getNode(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), heatEnvId, ArtifactData.class);
+		if (heatEnvArtifactRes.isRight()) {
+			log.error("Align heat env on Vf  - Failed to fetch heat env node for id {}  {}", heatEnvId, heatEnvArtifactRes.right().value());
+			writeResultToFile(writer, "get artifact node for relation", resourceId, heatEnvId, heatEnvArtifactRes.right().value());
+			return false;
+		}
+		ArtifactData heatEnvArtifact = heatEnvArtifactRes.left().value();
+
+		Either<List<ImmutablePair<GroupData, GraphEdge>>, TitanOperationStatus> groupsForHeatRes = titanGenericDao.getParentNodes(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), (String) artifactData.getUniqueId(),
+				GraphEdgeLabels.GROUP_ARTIFACT_REF, NodeTypeEnum.Group, GroupData.class);
+		if (groupsForHeatRes.isRight()) {
+			writeResultToFile(writer, "getChildrenNodes groups for heat", resourceId, (String) artifactData.getUniqueId(), groupsForHeatRes.right().value());
+			if (groupsForHeatRes.right().value() != TitanOperationStatus.NOT_FOUND) {
+				log.error("Align heat env on Vf  - Failed to fetch groups for heat artifact {} in resources {} : {}", artifactData.getUniqueId(), metadata.getUniqueId(), groupsForHeatRes.right().value());
+				return false;
+			} else {
+				log.debug("Align heat env on Vf  - No groups for heat artifact {} in resources {} : {}", artifactData.getUniqueId(), metadata.getUniqueId(), groupsForHeatRes.right().value());
+				return true;
+			}
+		}
+		List<ImmutablePair<GroupData, GraphEdge>> groupsForHeat = groupsForHeatRes.left().value();
+		Either<List<ImmutablePair<GroupData, GraphEdge>>, TitanOperationStatus> groupsForHeatEnvRes = titanGenericDao.getParentNodes(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), heatEnvId, GraphEdgeLabels.GROUP_ARTIFACT_REF, NodeTypeEnum.Group,
+				GroupData.class);
+		List<ImmutablePair<GroupData, GraphEdge>> groupsForHeatEnv;
+		if (groupsForHeatEnvRes.isRight()) {
+			if (groupsForHeatEnvRes.right().value() != TitanOperationStatus.NOT_FOUND) {
+				log.error("Align heat env on Vf  - Failed to fetch groups for heat env artifact {} in resources {} : ", artifactData.getUniqueId(), metadata.getUniqueId(), groupsForHeatEnvRes.right().value());
+				writeResultToFile(writer, "getChildrenNodes groups for heat env", resourceId, heatEnvId, groupsForHeatEnvRes.right().value());
+				return false;
+			} else {
+				groupsForHeatEnv = new ArrayList<>();
+			}
+		} else {
+			groupsForHeatEnv = groupsForHeatEnvRes.left().value();
+		}
+
+		for (ImmutablePair<GroupData, GraphEdge> heatGroup : groupsForHeat) {
+			// check if exist
+			boolean exist = false;
+			GroupDataDefinition groupDataDefinition = heatGroup.left.getGroupDataDefinition();
+			for (ImmutablePair<GroupData, GraphEdge> heatEnvGroup : groupsForHeatEnv) {
+				if (groupDataDefinition.getName().equals(heatEnvGroup.left.getGroupDataDefinition().getName())) {
+					exist = true;
+					break;
+				}
+			}
+			String groupId = (String) heatGroup.left.getUniqueId();
+			if (!exist) {
+				// need associate
+
+				Map<String, Object> properties = new HashMap<String, Object>();
+				properties.put(GraphPropertiesDictionary.NAME.getProperty(), heatEnvArtifact.getLabel());
+				Either<GraphRelation, TitanOperationStatus> createRelation = titanGenericDao.createRelation(heatGroup.left, heatEnvArtifact, GraphEdgeLabels.GROUP_ARTIFACT_REF, properties);
+				log.trace("After associate group {} to artifact {}", groupDataDefinition.getName(), heatEnvArtifact.getUniqueIdKey());
+				if (createRelation.isRight()) {
+					log.error("Align heat env on Vf  - Failed to associate heat env artifact {} to group {} : {}", artifactData.getUniqueId(), groupDataDefinition.getUniqueId(), createRelation.right().value());
+
+					writeResultToFile(writer, "associate to group- relation" + groupId, resourceId, heatEnvId, groupsForHeatRes.right().value());
+					statusToReturn = false;
+				} else {
+					writeResultToFile(writer, "associate to group " + groupId, resourceId, heatEnvId, "OK");
+				}
+			} else {
+				writeResultToFile(writer, "associate group " + groupId, resourceId, heatEnvId, "Exist");
+			}
+		}
+		return statusToReturn;
+	}
+
+	private boolean validateOrCreateHeatEnv(User user, ResourceMetadataData metadata, ArtifactData artifactData, String heatEnvId, Writer writer) {
+		String resourceId = metadata.getMetadataDataDefinition().getUniqueId();
+		boolean statusToReturn = true;
+		Either<ArtifactData, TitanOperationStatus> node = titanGenericDao.getNode(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), heatEnvId, ArtifactData.class);
+		boolean isContinue = true;
+		if (node.isRight()) {
+			if (TitanOperationStatus.NOT_FOUND == node.right().value()) {
+				// create
+				ArtifactDefinition heatArtifact = new ArtifactDefinition(artifactData.getArtifactDataDefinition());
+				ResourceMetadataDefinition resourceMetadataDataDefinition = new ResourceMetadataDefinition((ResourceMetadataDataDefinition) metadata.getMetadataDataDefinition());
+
+				Resource resource = new Resource(resourceMetadataDataDefinition);
+
+				String heatUpdater = heatArtifact.getUserIdLastUpdater();
+				Either<User, ActionStatus> userHeat = userAdminManager.getUser(heatUpdater, true);
+
+				Either<ArtifactDefinition, ResponseFormat> createHeatEnvPlaceHolder = artifactsBusinessLogic.createHeatEnvPlaceHolder(heatArtifact, ArtifactsBusinessLogic.HEAT_VF_ENV_NAME, (String) metadata.getUniqueId(), NodeTypeEnum.Resource,
+						metadata.getMetadataDataDefinition().getName(), userHeat.left().value(), resource, null, false);
+				if (createHeatEnvPlaceHolder.isRight()) {
+					log.error("Align heat env on Vf  - Failed to create  heat env {} for heat {} : {}", heatEnvId, heatArtifact.getUniqueId(), createHeatEnvPlaceHolder.right().value().getText());
+					writeResultToFile(writer, "create placeholder", resourceId, heatEnvId, createHeatEnvPlaceHolder.right().value().getText());
+					isContinue = false;
+					statusToReturn = false;
+				} else {
+					writeResultToFile(writer, "create placeholder", resourceId, heatEnvId, "OK");
+				}
+			} else {
+				log.error("Align heat env on Vf  - Failed to fetch heat env node for id {}  {}", heatEnvId, node.right().value());
+				writeResultToFile(writer, "create placeholder - get", resourceId, heatEnvId, node.right().value());
+				isContinue = false;
+				statusToReturn = false;
+			}
+		} else {
+			writeResultToFile(writer, "create placeholder - get", resourceId, heatEnvId, "Exist");
+		}
+		if (isContinue) {
+			log.debug("associate heat env artifact to all resources ");
+			String heatUniqueId = (String) artifactData.getUniqueId();
+			Either<TitanVertex, TitanOperationStatus> heatVertexRes = titanGenericDao.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), heatUniqueId);
+			if (heatVertexRes.isRight()) {
+				log.debug("Failed to fetch vertex for heat {} error {}", heatUniqueId, heatVertexRes.right().value());
+				writeResultToFile(writer, "create placeholder - get heat vertex", resourceId, heatEnvId, heatVertexRes.right().value());
+				statusToReturn = false;
+				return statusToReturn;
+			}
+			TitanVertex heatVertex = heatVertexRes.left().value();
+			Either<TitanVertex, TitanOperationStatus> heatEnvVertexRes = titanGenericDao.getVertexByProperty(GraphPropertiesDictionary.UNIQUE_ID.getProperty(), heatEnvId);
+			if (heatEnvVertexRes.isRight()) {
+				log.debug("Failed to fetch vertex for heat env {} error {}", heatEnvId, heatEnvVertexRes.right().value());
+				writeResultToFile(writer, "create placeholder - get heat env vertex", resourceId, heatEnvId, heatEnvVertexRes.right().value());
+				statusToReturn = false;
+				return statusToReturn;
+			}
+
+			Vertex heatEnvVertex = heatEnvVertexRes.left().value();
+			Iterator<Edge> edgesToHeat = heatVertex.edges(Direction.IN, GraphEdgeLabels.ARTIFACT_REF.name());
+			while (edgesToHeat.hasNext()) {
+				Edge edgeToHeat = edgesToHeat.next();
+				boolean exist = false;
+				Vertex outVertexHeat = edgeToHeat.outVertex();
+				Map<String, Object> outVertexProps = titanGenericDao.getProperties(outVertexHeat);
+
+				String resIdToHeat = (String) outVertexProps.get(GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+
+				Iterator<Edge> edgesToEnv = heatEnvVertex.edges(Direction.IN, GraphEdgeLabels.ARTIFACT_REF.name());
+				while (edgesToEnv.hasNext()) {
+					Edge edgeToEnv = edgesToEnv.next();
+					Vertex outVertexEnv = edgeToEnv.outVertex();
+					String resIdToEnv = (String) titanGenericDao.getProperty((TitanVertex) outVertexEnv, GraphPropertiesDictionary.UNIQUE_ID.getProperty());
+					if (resIdToHeat.equals(resIdToEnv)) {
+						exist = true;
+						break;
+					}
+				}
+				if (!exist) {
+					Map<String, Object> properties = titanGenericDao.getProperties(edgeToHeat);
+					// need to associate additional resource to heat env
+					// update artifact label on edge
+					String heatEnvLabel = (String) titanGenericDao.getProperty((TitanVertex) heatEnvVertex, GraphPropertiesDictionary.ARTIFACT_LABEL.getProperty());
+					properties.put(GraphEdgePropertiesDictionary.NAME.getProperty(), heatEnvLabel);
+
+					TitanOperationStatus createEdge = titanGenericDao.createEdge(outVertexHeat, heatEnvVertex, GraphEdgeLabels.ARTIFACT_REF, properties);
+					if (createEdge == TitanOperationStatus.OK) {
+						writeResultToFile(writer, "associate to resource " + resIdToHeat, resourceId, heatEnvId, "OK");
+					} else {
+						writeResultToFile(writer, "associate to resource " + resIdToHeat, resourceId, heatEnvId, createEdge);
+						statusToReturn = false;
+					}
+				} else {
+					writeResultToFile(writer, "associate to resource " + resIdToHeat, resourceId, heatEnvId, "Exist");
+				}
+			}
+		}
+		return statusToReturn;
+	}
+
+	private void writeResultToFile(Writer writer, String op, String resourceId, String artifactD, Object status) {
+		try {
+			StringBuffer sb = new StringBuffer(resourceId);
+			sb.append(",").append(op).append(",").append(artifactD).append(",").append(status).append("\n");
+			writer.write(sb.toString());
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+	}
+
+	private void writeModuleResultToFile(Writer writer, String resourceId, String instanceId, String groupId, Object status) {
+		try {
+			StringBuffer sb = new StringBuffer(resourceId);
+			sb.append(",").append(instanceId).append(",").append(groupId).append(",").append(status).append("\n");
+			writer.write(sb.toString());
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+	}
+
+	private boolean isNeedCreatePlaceHolder(ArtifactData artifactData) {
+		String artifactType = artifactData.getArtifactDataDefinition().getArtifactType();
+		ArtifactTypeEnum type = ArtifactTypeEnum.findType(artifactType);
+		if (ArtifactGroupTypeEnum.DEPLOYMENT == artifactData.getArtifactDataDefinition().getArtifactGroupType() && (ArtifactTypeEnum.HEAT == type || ArtifactTypeEnum.HEAT_NET == type || ArtifactTypeEnum.HEAT_VOL == type)) {
+			return true;
+		}
+		return false;
+	}
+
+	private boolean alignVfModuleProperties() {
+		boolean result = true;
+		try {
+			log.info(" Align Vf module properties");
+
+			final Pattern pattern = Pattern.compile("\\..(.*?)\\..");
+			final String LABEL_NAME = "vf_module_label";
+			final String VOLUME_GROUP_NAME = "volume_group";
+
+			Either<TitanGraph, TitanOperationStatus> graph = titanGenericDao.getGraph();
+			if (graph.isRight()) {
+				log.error("Align Vf module properties - Failed to get graph {}", graph.right().value());
+				result = false;
+				return result;
+			}
+
+			Map<String, Object> props = new HashMap<String, Object>();
+			props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+
+			Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, ResourceMetadataData.class);
+
+			if (allResources.isRight()) {
+				if (allResources.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+					log.debug("Align Vf module properties - no VF resources");
+					result = true;
+					return result;
+				} else {
+					log.error("Align Vf module properties - generateTosca failed fetch all resources,error {}", allResources.right().value());
+					result = false;
+					return result;
+				}
+			}
+
+			List<ResourceMetadataData> listAllVFs = allResources.left().value();
+
+			ComponentParametersView componentParametersView = new ComponentParametersView(true);
+			componentParametersView.setIgnoreGroups(false);
+			componentParametersView.setIgnoreArtifacts(false);
+
+			log.info("Align Vf module properties - Starting to update the VF's");
+			Map<String, VfModuleProperty> vfModuleProperties = ConfigurationManager.getConfigurationManager().getConfiguration().getVfModuleProperties();
+			for (ResourceMetadataData resourceMetadataData : listAllVFs) {
+				String uniqueId = (String) resourceMetadataData.getUniqueId();
+
+				Either<Resource, StorageOperationStatus> resourceResponse = resourceOperation.getResource(uniqueId, componentParametersView, true);
+
+				if (resourceResponse.isRight()) {
+					log.error("Align Vf module properties - failed resource with UniqueID: {} , error {}", uniqueId, resourceResponse.right().value());
+					result = false;
+					return result;
+				}
+
+				Resource resource = resourceResponse.left().value();
+				List<GroupDefinition> groups = resource.getGroups();
+
+				if (groups == null || groups.isEmpty()) {
+					log.debug("Align Vf module properties - resource UniqueID: {} does not contain groups", resource.getUniqueId());
+					continue;
+				} else {
+
+					for (GroupDefinition groupDefinition : groups) {
+
+						if (groupDefinition.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
+							log.info("update vf module proerties for group {} ", groupDefinition.getUniqueId());
+
+							List<GroupProperty> properties = groupDefinition.convertToGroupProperties();
+							if (properties == null) {
+								properties = new ArrayList<>();
+							}
+							Boolean isBase = false;
+							List<String> artifacts = groupDefinition.getArtifacts();
+							if (artifacts == null) {
+								artifacts = new ArrayList<>();
+							}
+							Boolean isVolumeGroup = false;
+							for (String artifactId : artifacts) {
+								ArtifactDefinition artifactDef = null;
+								Map<String, ArtifactDefinition> deploymentArtifacts = resource.getDeploymentArtifacts();
+								artifactDef = findArtifactInList(deploymentArtifacts, artifactId);
+								if (artifactDef != null && artifactDef.getArtifactType().equalsIgnoreCase(ArtifactTypeEnum.HEAT_VOL.getType())) {
+									isVolumeGroup = true;
+									break;
+								}
+							}
+							for (GroupProperty groupProperty : properties) {
+								if (groupProperty.getName().equals(Constants.IS_BASE)) {
+									isBase = Boolean.valueOf(groupProperty.getValue());
+									break;
+								}
+							}
+
+							if (null == isBase) {
+								log.error("Align Vf module properties - isBase not found in DEFAULT_GROUP_VF_MODULE");
+								result = false;
+								return result;
+							}
+
+							String vfModuleLabel = null;
+							String moduleName = groupDefinition.getName();
+							Matcher matcher = pattern.matcher(moduleName);
+
+							if (matcher.find()) {
+								vfModuleLabel = matcher.group(1);
+							} else {
+								vfModuleLabel = moduleName;
+							}
+
+							boolean isBasePrimitive = isBase;
+							boolean isVolumeGroupPrimitive = isVolumeGroup;
+							String vfModuleLabelFinal = vfModuleLabel;
+							List<GroupProperty> propertiesToAdd = new ArrayList<>();
+							properties.stream().forEach(p -> {
+								if (p.getValueUniqueUid() == null) {
+									if (vfModuleProperties.containsKey(p.getName())) {
+										if (isBasePrimitive) {
+											p.setValue(vfModuleProperties.get(p.getName()).getForBaseModule());
+										} else {
+											p.setValue(vfModuleProperties.get(p.getName()).getForNonBaseModule());
+										}
+									} else if (p.getName().equals(VOLUME_GROUP_NAME)) {
+										p.setValue(String.valueOf(isVolumeGroupPrimitive));
+									} else if (p.getName().equals(LABEL_NAME)) {
+										p.setValue(vfModuleLabelFinal);
+									}
+									propertiesToAdd.add(p);
+								}
+
+							});
+
+							List<GroupProperty> propertiesAlreadyExistOnGraph = properties.stream().filter(p -> !(p.getValueUniqueUid() == null || p.getValueUniqueUid().isEmpty())).collect(Collectors.toList());
+							int numOfPropertiesAlreadyExist = propertiesAlreadyExistOnGraph.size();
+
+							log.debug("Need to update default values vfModule {} properties {} ", properties.size(), properties);
+
+							Either<GroupTypeDefinition, TitanOperationStatus> groupTypeRes = groupTypeOperation.getGroupTypeByUid(groupDefinition.getTypeUid());
+							if (groupTypeRes.isRight()) {
+								TitanOperationStatus operationStatus = groupTypeRes.right().value();
+								log.debug("Failed to find group type {}",groupDefinition.getTypeUid());
+								if (operationStatus == TitanOperationStatus.NOT_FOUND) {
+									result = false;
+									return result;
+								}
+							}
+
+							GroupTypeDefinition groupTypeDefinition = groupTypeRes.left().value();
+							List<PropertyDefinition> groupTypeProperties = groupTypeDefinition.getProperties();
+							Map<String, PropertyDefinition> groupTypePropertiesMap = groupTypeProperties.stream().collect(Collectors.toMap(p -> p.getName(), p -> p));
+
+							int i = numOfPropertiesAlreadyExist + 1;
+							for (GroupProperty prop : propertiesToAdd) {
+								if (prop.getUniqueId() == null || prop.getUniqueId().isEmpty()) {
+									continue;
+								}
+								GroupData groupData = new GroupData(groupDefinition);
+
+								Either<PropertyValueData, TitanOperationStatus> addPropertyToGroup = groupOperation.addPropertyToGroup(groupData, prop, groupTypePropertiesMap.get(prop.getName()), i);
+								if (addPropertyToGroup.isRight()) {
+									log.info("Failed to add properties {}  to group type :{} error {} ", prop.getName(), groupData.getUniqueId(), addPropertyToGroup.right().value());
+									result = false;
+									return result;
+								}
+								++i;
+							}
+						}
+					}
+				}
+			}
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", "alignVfModuleProperties", e);
+			result = false;
+		} finally {
+			log.info(" Align Vf module properties finished");
+			if (!result) {
+				log.info("Doing rollback");
+				titanGenericDao.rollback();
+			} else {
+				log.info("Doing commit");
+				titanGenericDao.commit();
+			}
+		}
+		return true;
+	}
+
+	private ArtifactDefinition findArtifactInList(Map<String, ArtifactDefinition> deploymentArtifacts, String artifactId) {
+		Optional<ArtifactDefinition> op = deploymentArtifacts.values().stream().filter(p -> p.getUniqueId().equals(artifactId)).findAny();
+		if (op.isPresent())
+			return op.get();
+		return null;
+	}
+
+	private boolean generateTosca() {
+		log.info("Regenerate  Tosca and CSAR for VFs and Services");
+		Either<TitanGraph, TitanOperationStatus> graph = titanGenericDao.getGraph();
+		if (graph.isRight()) {
+			log.error("Failed to get graph {}", graph.right().value());
+			return false;
+		}
+		Map<String, Object> props = new HashMap<String, Object>();
+		props.put(GraphPropertiesDictionary.RESOURCE_TYPE.getProperty(), ResourceTypeEnum.VF.name());
+
+		User user = buildDummyUser();
+
+		Map<String, Object> propsHasNot = new HashMap<String, Object>();
+		propsHasNot.put(GraphPropertiesDictionary.STATE.getProperty(), LifecycleStateEnum.NOT_CERTIFIED_CHECKIN);
+		propsHasNot.put(GraphPropertiesDictionary.STATE.getProperty(), LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT);
+
+		Either<List<ResourceMetadataData>, TitanOperationStatus> allResources = titanGenericDao.getByCriteria(NodeTypeEnum.Resource, props, propsHasNot, ResourceMetadataData.class);
+		if (allResources.isRight()) {
+			if (allResources.right().value().equals(TitanOperationStatus.NOT_FOUND)) {
+				log.debug("generateTosca - no VF resources");
+			} else {
+				log.info("generateTosca failed fetch all resources,error {}", allResources.right().value());
+				return false;
+			}
+		} else {
+			if (!handleComponents(user, allResources.left().value(), resourceOperation)) {
+				log.info("generateTosca failed generate tosca artifacts for resources");
+				return false;
+
+			}
+		}
+		Either<List<ServiceMetadataData>, TitanOperationStatus> allServices = titanGenericDao.getByCriteria(NodeTypeEnum.Service, null, propsHasNot, ServiceMetadataData.class);
+		if (allServices.isRight()) {
+			if (allServices.right().value() == TitanOperationStatus.NOT_FOUND) {
+				log.debug("generateTosca - no services");
+
+			} else {
+				log.debug("generateTosca failed fetch all services,error {}",allServices.right().value());
+				return false;
+			}
+		} else {
+			if (!handleComponents(user, allServices.left().value(), serviceOperation)) {
+				log.info("generateTosca failed generate tosca artifacts for services");
+				return false;
+
+			}
+		}
+		log.info("Regenerate  Tosca and CSAR for VFs and Services finished");
+		return true;
+	}
+
+	private <T extends ComponentMetadataData> boolean handleComponents(User user, List<T> allResources, ComponentOperation operation) {
+		for (ComponentMetadataData resource : allResources) {
+			if (resource.getMetadataDataDefinition().isDeleted() == null || !resource.getMetadataDataDefinition().isDeleted()) {
+				Either<Component, StorageOperationStatus> component = operation.getComponent((String) resource.getUniqueId(), true);
+				if (component.isRight()) {
+					log.info("generateTosca failed fetch component with id {} , error {}", (String) resource.getUniqueId(), component.right().value());
+					return false;
+				}
+				if (populateToscaArtifactsWithLog(component.left().value(), user) != ActionStatus.OK) {
+					return false;
+				}
+			}
+		}
+		return true;
+	}
+
+	private boolean alignCustomizationUUID() {
+		boolean result = true;
+		try {
+			log.info("Update customization UUID for all component instances on graph");
+			Either<TitanGraph, TitanOperationStatus> graph = titanGenericDao.getGraph();
+			if (graph.isRight()) {
+				log.error("Failed to get graph {}", graph.right().value());
+				return result;
+			}
+			TitanGraph tGraph = graph.left().value();
+			TitanGraphQuery<? extends TitanGraphQuery> query = tGraph.query();
+			query = query.has(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.ResourceInstance.getName());
+			Iterable<TitanVertex> vertices = query.vertices();
+			if (vertices == null) {
+				log.info("No component instances on graph");
+				return result;
+			}
+			Iterator<TitanVertex> iterator = vertices.iterator();
+			if (!iterator.hasNext()) {
+				log.info("No component instances on graph");
+			}
+			while (iterator.hasNext()) {
+				TitanVertex vertex = iterator.next();
+				String property = (String) titanGenericDao.getProperty(vertex, GraphPropertiesDictionary.CUSTOMIZATION_UUID.getProperty());
+				if (!ValidationUtils.validateStringNotEmpty(property)) {
+					UUID uuid = UUID.randomUUID();
+					vertex.property(GraphPropertiesDictionary.CUSTOMIZATION_UUID.getProperty(), uuid.toString());
+				}
+			}
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", "alignCustomizationUUID", e);
+			result = false;
+		} finally {
+			log.info("Update customization UUID finished ");
+			if (!result) {
+				log.info("Doing rollback");
+				titanGenericDao.rollback();
+			} else {
+				log.info("Doing commit");
+				titanGenericDao.commit();
+			}
+		}
+		return result;
+	}
+
+	private ActionStatus populateToscaArtifactsWithLog(Component component, User user) {
+		ActionStatus ret = ActionStatus.OK;
+		LifecycleStateEnum lifecycleState = component.getLifecycleState();
+		if (!needRegenarateTosca(lifecycleState)) {
+			log.debug("Component {} is in state {}, don't generatate Tosca", component.getUniqueId(), lifecycleState);
+			return ret;
+		}
+
+		try {
+			Either<Either<ArtifactDefinition, Operation>, ResponseFormat> populateToscaArtifacts = serviceBusinessLogic.populateToscaArtifacts(component, user, true, false, true, true);
+			if (populateToscaArtifacts.isLeft()) {
+				log.debug("Added payload to tosca artifacts of component {} of type:{} with uniqueId:{}", component.getName(), component.getComponentType().getValue(), component.getUniqueId());
+			} else {
+				log.error("Failed to generate TOSCA artifacts for component {} of type:{} with uniqueId:{}", component.getName(), component.getComponentType().name(), component.getUniqueId());
+				return ActionStatus.GENERAL_ERROR;
+			}
+			return ret;
+		} catch (Exception e) {
+			log.error("Exception Occured When filling tosca artifact payload for component {} of type:{} with uniqueId:{}", component.getName(), component.getComponentType().name(), component.getUniqueId(), e);
+			return ActionStatus.GENERAL_ERROR;
+		}
+	}
+
+	private boolean needRegenarateTosca(LifecycleStateEnum lifecycleState) {
+		if (lifecycleState == LifecycleStateEnum.READY_FOR_CERTIFICATION || lifecycleState == LifecycleStateEnum.CERTIFICATION_IN_PROGRESS || lifecycleState == LifecycleStateEnum.CERTIFIED) {
+			return true;
+		}
+		return false;
+	}
+
+	private User buildDummyUser() {
+		User user = new User();
+		user.setUserId("migrationTask");
+		return user;
+	}
+
+	private boolean alignGroupDataType() {
+		boolean result = true;
+		try {
+			log.info(" Align group data type properties");
+			String categoryMigrationFile = CONFIG_GROUP_TYPES_YML;
+			String yamlAsString;
+			try {
+
+				InputStream inputStream = getClass().getResourceAsStream(categoryMigrationFile);
+				if (inputStream == null) {
+					log.info("Failed to load input file : {}", categoryMigrationFile);
+					result = false;
+					return result;
+				}
+				yamlAsString = IOUtils.toString(inputStream, StandardCharsets.UTF_8.name());
+
+			} catch (Exception e) {
+				log.info("Failed to load group types file exception : ", e);
+				result = false;
+				return result;
+			}
+
+			log.debug("received yaml: {}", yamlAsString);
+
+			Map<String, Object> toscaJson = (Map<String, Object>) new Yaml().load(yamlAsString);
+
+			if (toscaJson == null || toscaJson.isEmpty()) {
+				log.info("group types file is empty");
+				result = false;
+				return result;
+			}
+
+			Map<String, Object> vfModule = (Map<String, Object>) toscaJson.get("org.openecomp.groups.VfModule");
+			if (vfModule == null || vfModule.isEmpty()) {
+				log.info("No vfModule in group types file");
+				result = false;
+				return result;
+			}
+			Map<String, Object> properties = (Map<String, Object>) vfModule.get("properties");
+			if (properties == null || properties.isEmpty()) {
+				log.info("No properties for vfModule in group types file");
+				result = false;
+				return result;
+			}
+			Either<GroupTypeDefinition, StorageOperationStatus> latestGroupTypeByType = groupTypeOperation.getLatestGroupTypeByType("org.openecomp.groups.VfModule", true);
+			if (latestGroupTypeByType.isRight()) {
+				log.info("Failed to fetch org.openecomp.groups.VfModule group type, error :{}", latestGroupTypeByType.right().value());
+				result = false;
+				return result;
+			}
+			GroupTypeDefinition groupTypeInGraph = latestGroupTypeByType.left().value();
+			List<PropertyDefinition> propertiesInGraph = groupTypeInGraph.getProperties();
+
+			List<PropertyDefinition> propertiesToAdd = new ArrayList<>();
+
+			properties.entrySet().stream().filter(e -> !ifExistOnGraph(e.getKey(), propertiesInGraph)).forEach(fe -> {
+				PropertyDefinition property = new PropertyDefinition();
+				property.setName(fe.getKey());
+				Map<String, Object> definitionInYaml = (Map<String, Object>) fe.getValue();
+				property.setType((String) definitionInYaml.get("type"));
+				// Fix by Tal G
+				property.setRequired((Boolean) definitionInYaml.get("required"));
+				property.setDescription((String) definitionInYaml.get("description"));
+				// Fix by Tal G
+				String defaultValue = definitionInYaml.get("default") == null ? null : definitionInYaml.get("default").toString();
+				if (defaultValue != null) {
+					property.setDefaultValue(defaultValue);
+				}
+				propertiesToAdd.add(property);
+			});
+
+			if (!propertiesToAdd.isEmpty()) {
+				log.debug("Need to add to vfModule {} properties {} ", propertiesToAdd.size(), propertiesToAdd);
+
+				Either<Map<String, PropertyData>, TitanOperationStatus> addPropertiesToCapablityType = propertyOperation.addPropertiesToElementType(groupTypeInGraph.getUniqueId(), NodeTypeEnum.GroupType, propertiesToAdd);
+				if (addPropertiesToCapablityType.isRight()) {
+					log.info("Failed to add properties to group type :{}", addPropertiesToCapablityType.right().value());
+					result = false;
+					return result;
+				}
+			} else {
+				log.debug("No properties to add to vfModule");
+			}
+
+		} catch (Exception e) {
+			log.error("Failed {} with exception: ", "alignGroupDataType", e);
+			result = false;
+		} finally {
+			log.info(" Align group data type properties finished");
+			if (!result) {
+				log.info("Doing rollback");
+				titanGenericDao.rollback();
+			} else {
+				log.info("Doing commit");
+				titanGenericDao.commit();
+			}
+		}
+		return result;
+	}
+
+	private boolean ifExistOnGraph(String name, List<PropertyDefinition> propertiesInGraph) {
+		for (PropertyDefinition pd : propertiesInGraph) {
+			if (pd.getName().equals(name)) {
+				return true;
+			}
+		}
+		return false;
+	}
+
+	public boolean alignDataType() {
+
+		log.info(" Align data type properties");
+
+		boolean isSuccessful = true;
+		List<DataTypeDefinition> dataTypes = extractDataTypesFromYaml();
+
+		if (CollectionUtils.isEmpty(dataTypes)) {
+			isSuccessful = false;
+		}
+
+		List<ImmutablePair<DataTypeDefinition, Boolean>> createdElementTypes = new ArrayList<>();
+
+		Iterator<DataTypeDefinition> elementTypeItr = dataTypes.iterator();
+		if (isSuccessful) {
+			try {
+				while (elementTypeItr.hasNext()) {
+					DataTypeDefinition elementType = elementTypeItr.next();
+					String elementName = elementType.getName();
+					Either<ActionStatus, ResponseFormat> validateElementType = validateDataType(elementType);
+					if (validateElementType.isRight()) {
+						log.debug("Failed to validate data type {}. Status is {}. ", elementName, validateElementType.right().value());
+						isSuccessful = false;
+						break;
+					}
+					log.debug("Going to get data type by name {}. ", elementName);
+					Either<DataTypeDefinition, StorageOperationStatus> findElementType = propertyOperation.getDataTypeByNameWithoutDerived(elementName);
+					if (findElementType.isRight()) {
+						StorageOperationStatus status = findElementType.right().value();
+						if (status != StorageOperationStatus.NOT_FOUND) {
+							log.debug("Failed to fetch data type {}. Status is {}. ", elementName, validateElementType.right().value());
+							isSuccessful = false;
+							break;
+						} else {
+							log.debug("Going to add data type with name {}. ", elementName);
+							Either<DataTypeDefinition, StorageOperationStatus> dataModelResponse = propertyOperation.addDataType(elementType);
+
+							if (dataModelResponse.isRight()) {
+								if (dataModelResponse.right().value() != StorageOperationStatus.SCHEMA_VIOLATION) {
+									log.debug("Failed to add data type {}. Status is {}. ", elementName, dataModelResponse.right().value());
+									isSuccessful = false;
+									break;
+								} else {
+									createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, false));
+								}
+							} else {
+								createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(dataModelResponse.left().value(), true));
+							}
+
+						}
+					} else {
+						DataTypeDefinition dataTypeDefinition = findElementType.left().value();
+						log.debug("Going to update data type with name {}. ", elementName);
+						Either<Map<String, PropertyDefinition>, StorageOperationStatus> deleteDataTypeRes = propertyOperation.deleteAllPropertiesAssociatedToNode(NodeTypeEnum.DataType, dataTypeDefinition.getUniqueId());
+						if (deleteDataTypeRes.isRight()) {
+							StorageOperationStatus status = deleteDataTypeRes.right().value();
+							if (status != StorageOperationStatus.OK) {
+
+								log.debug("Failed to update data type {}. Status is {}. ", elementName, deleteDataTypeRes.right().value());
+								isSuccessful = false;
+								break;
+							}
+						}
+
+						Either<Map<String, PropertyData>, TitanOperationStatus> updateDataTypeRes = propertyOperation.addPropertiesToElementType(dataTypeDefinition.getUniqueId(), NodeTypeEnum.DataType, elementType.getProperties());
+
+						if (updateDataTypeRes.isRight()) {
+							TitanOperationStatus status = updateDataTypeRes.right().value();
+
+							log.debug("Failed to update data type {}. Status is {}. ", elementName, updateDataTypeRes.right().value());
+							isSuccessful = false;
+							break;
+
+						} else {
+							createdElementTypes.add(new ImmutablePair<DataTypeDefinition, Boolean>(elementType, true));
+						}
+
+						DataTypeData dataTypeData = new DataTypeData();
+						dataTypeData.setDataTypeDataDefinition(elementType);
+						dataTypeData.getDataTypeDataDefinition().setUniqueId(dataTypeDefinition.getUniqueId());
+						long modificationTime = System.currentTimeMillis();
+						dataTypeData.getDataTypeDataDefinition().setModificationTime(modificationTime);
+
+						Either<DataTypeData, TitanOperationStatus> updateNode = titanGenericDao.updateNode(dataTypeData, DataTypeData.class);
+						if (updateNode.isRight()) {
+							TitanOperationStatus operationStatus = updateNode.right().value();
+							log.debug("Failed to update modification time data type {} from graph. status is {}",
+									dataTypeDefinition.getUniqueId() ,operationStatus);
+							BeEcompErrorManager.getInstance().logInternalFlowError("AddPropertyToDataType", "Failed to fetch data type. Status is " + operationStatus, ErrorSeverity.ERROR);
+							isSuccessful = false;
+							break;
+						} else {
+							log.debug("Update data type uid {}. Set modification time to {}", dataTypeDefinition.getUniqueId(), modificationTime);
+							isSuccessful = true;
+						}
+					}
+				}
+			} finally {
+				log.info(" Finish to align data type properties");
+				if (isSuccessful) {
+					propertyOperation.getTitanGenericDao().commit();
+				} else {
+					propertyOperation.getTitanGenericDao().rollback();
+				}
+			}
+		}
+		return isSuccessful;
+	}
+
+	@SuppressWarnings("unchecked")
+	private List<DataTypeDefinition> extractDataTypesFromYaml() {
+		String dataTypeYmlFilePath = CONFIG_DATA_TYPES_YML;
+		String yamlAsString;
+		try {
+
+			InputStream inputStream = getClass().getResourceAsStream(dataTypeYmlFilePath);
+			if (inputStream == null) {
+				log.info("Failed to load input file : {}", dataTypeYmlFilePath);
+				return null;
+			}
+			yamlAsString = IOUtils.toString(inputStream, StandardCharsets.UTF_8.name());
+
+		} catch (Exception e) {
+			log.info("Failed to load group types file exception : ", e);
+			return null;
+		}
+
+		log.debug("received yaml: {}", yamlAsString);
+
+		String dataTypeName;
+		List<DataTypeDefinition> dataTypes = new ArrayList<>();
+
+		Map<String, Object> toscaJson = (Map<String, Object>) new Yaml().load(yamlAsString);
+		Iterator<Entry<String, Object>> elementTypesEntryItr = toscaJson.entrySet().iterator();
+		while (elementTypesEntryItr.hasNext()) {
+			Entry<String, Object> elementTypeNameDataEntry = elementTypesEntryItr.next();
+			dataTypeName = elementTypeNameDataEntry.getKey();
+			Map<String, Object> elementTypeJsonData = (Map<String, Object>) elementTypeNameDataEntry.getValue();
+
+			DataTypeDefinition dataType = new DataTypeDefinition();
+			dataType.setName(dataTypeName);
+
+			if (elementTypeJsonData != null) {
+
+				if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.DESCRIPTION.getElementName())) {
+					dataType.setDescription((String) elementTypeJsonData.get(ToscaTagNamesEnum.DESCRIPTION.getElementName()));
+				}
+				if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.DERIVED_FROM.getElementName())) {
+					dataType.setDerivedFromName((String) elementTypeJsonData.get(ToscaTagNamesEnum.DERIVED_FROM.getElementName()));
+				}
+				List<PropertyDefinition> properties = getProperties(elementTypeJsonData);
+				if (elementTypeJsonData.containsKey(ToscaTagNamesEnum.PROPERTIES.getElementName())) {
+					dataType.setProperties(properties);
+				}
+			}
+			dataTypes.add(dataType);
+		}
+
+		return dataTypes;
+	}
+
+	private List<PropertyDefinition> getProperties(Map<String, Object> toscaJson) {
+		List<PropertyDefinition> values = null;
+		Either<Map<String, PropertyDefinition>, ResultStatusEnum> properties = ImportUtils.getProperties(toscaJson);
+
+		if (properties.isLeft()) {
+			values = new ArrayList<>();
+			Map<String, PropertyDefinition> propertiesMap = properties.left().value();
+			if (propertiesMap != null && propertiesMap.isEmpty() == false) {
+
+				for (Entry<String, PropertyDefinition> entry : propertiesMap.entrySet()) {
+					String propName = entry.getKey();
+					PropertyDefinition propertyDefinition = entry.getValue();
+					PropertyDefinition newPropertyDefinition = new PropertyDefinition(propertyDefinition);
+					newPropertyDefinition.setName(propName);
+					values.add(newPropertyDefinition);
+				}
+			}
+		}
+
+		return values;
+	}
+
+	private Either<ActionStatus, ResponseFormat> validateDataType(DataTypeDefinition dataType) {
+
+		String dataTypeName = dataType.getName();
+		List<PropertyDefinition> properties = dataType.getProperties();
+		if (properties == null) {
+			// At least one parameter should be defined either in the properties
+			// section or at one of the parents
+			String derivedDataType = dataType.getDerivedFromName();
+			// If there are no properties, then we can create a data type if it
+			// is an abstract one or it derives from non abstract data type
+			if ((derivedDataType == null || derivedDataType.isEmpty())) {
+				if (false == isAbstract(dataType.getName())) {
+					if (false == ToscaPropertyType.isScalarType(dataTypeName)) {
+						log.debug("Data type {} must have properties unless it derives from non abstract data type",dataType.getName());
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_NOR_PROPERTIES_NEITHER_DERIVED_FROM, dataType, null);
+
+						return Either.right(responseFormat);
+					}
+				}
+			} else {
+				// if it is not a scalar data type and it derives from abstract
+				// data type, we should reject the request.
+				if (false == ToscaPropertyType.isScalarType(dataTypeName) && true == isAbstract(derivedDataType)) {
+					log.debug("Data type {} which derived from abstract data type must have at least one property",dataType.getName());
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_NOR_PROPERTIES_NEITHER_DERIVED_FROM, dataType, null);
+
+					return Either.right(responseFormat);
+				}
+			}
+		} else {
+			// properties tag cannot be empty
+			if (properties.isEmpty()) {
+				ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROPERTIES_CANNOT_BE_EMPTY, dataType, null);
+
+				return Either.right(responseFormat);
+			}
+
+			// check no duplicates
+			Set<String> collect = properties.stream().map(p -> p.getName()).collect(Collectors.toSet());
+			if (collect != null) {
+				if (properties.size() != collect.size()) {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_DUPLICATE_PROPERTY, dataType, null);
+
+					return Either.right(responseFormat);
+				}
+			}
+
+			List<String> propertiesWithSameTypeAsDataType = properties.stream().filter(p -> p.getType().equals(dataType.getName())).map(p -> p.getName()).collect(Collectors.toList());
+			if (propertiesWithSameTypeAsDataType != null && propertiesWithSameTypeAsDataType.isEmpty() == false) {
+				log.debug("The data type contains properties with the type {}",dataType.getName(),dataType.getName());
+				ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROEPRTY_CANNOT_HAVE_SAME_TYPE_OF_DATA_TYPE, dataType, propertiesWithSameTypeAsDataType);
+
+				return Either.right(responseFormat);
+			}
+		}
+
+		String derivedDataType = dataType.getDerivedFromName();
+		if (derivedDataType != null) {
+			Either<DataTypeDefinition, StorageOperationStatus> derivedDataTypeByName = propertyOperation.getDataTypeByName(derivedDataType, true);
+			if (derivedDataTypeByName.isRight()) {
+				StorageOperationStatus status = derivedDataTypeByName.right().value();
+				if (status == StorageOperationStatus.NOT_FOUND) {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_DERIVED_IS_MISSING, dataType, null);
+
+					return Either.right(responseFormat);
+				} else {
+					ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.GENERAL_ERROR, dataType, null);
+
+					return Either.right(responseFormat);
+
+				}
+			} else {
+
+				DataTypeDefinition derivedDataTypeDef = derivedDataTypeByName.left().value();
+				if (properties != null && properties.isEmpty() == false) {
+
+					if (true == isScalarType(derivedDataTypeDef)) {
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_CANNOT_HAVE_PROPERTIES, dataType, null);
+
+						return Either.right(responseFormat);
+					}
+
+					Set<String> allParentsProps = new HashSet<>();
+					do {
+						List<PropertyDefinition> currentParentsProps = derivedDataTypeDef.getProperties();
+						if (currentParentsProps != null) {
+							for (PropertyDefinition propertyDefinition : currentParentsProps) {
+								allParentsProps.add(propertyDefinition.getName());
+							}
+						}
+						derivedDataTypeDef = derivedDataTypeDef.getDerivedFrom();
+					} while (derivedDataTypeDef != null);
+
+					// Check that no property is already defined in one of the
+					// ancestors
+					Set<String> alreadyExistPropsCollection = properties.stream().filter(p -> allParentsProps.contains(p.getName())).map(p -> p.getName()).collect(Collectors.toSet());
+					if (alreadyExistPropsCollection != null && alreadyExistPropsCollection.isEmpty() == false) {
+						List<String> duplicateProps = new ArrayList<>();
+						duplicateProps.addAll(alreadyExistPropsCollection);
+						ResponseFormat responseFormat = componentsUtils.getResponseFormatByDataType(ActionStatus.DATA_TYPE_PROPERTY_ALREADY_DEFINED_IN_ANCESTOR, dataType, duplicateProps);
+
+						return Either.right(responseFormat);
+					}
+
+				}
+			}
+		}
+		return Either.left(ActionStatus.OK);
+	}
+
+	private boolean isAbstract(String dataTypeName) {
+
+		ToscaPropertyType isPrimitiveToscaType = ToscaPropertyType.isValidType(dataTypeName);
+
+		return isPrimitiveToscaType != null && isPrimitiveToscaType.isAbstract() == true;
+
+	}
+
+	private boolean isScalarType(DataTypeDefinition dataTypeDef) {
+
+		boolean isScalar = false;
+		DataTypeDefinition dataType = dataTypeDef;
+
+		while (dataType != null) {
+
+			String name = dataType.getName();
+			if (ToscaPropertyType.isScalarType(name)) {
+				isScalar = true;
+				break;
+			}
+
+			dataType = dataType.getDerivedFrom();
+		}
+
+		return isScalar;
+	}
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707.java
new file mode 100644
index 0000000..be40e4c
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707.java
@@ -0,0 +1,37 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.Resource;
+import java.util.List;
+
+@Component("migration1707")
+public class Migration1707 {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(Migration1707.class);
+
+    private List<Migration> migrations;
+
+    public Migration1707(List<Migration> migrations) {
+        this.migrations = migrations;
+    }
+
+    public boolean migrate() {
+        for (Migration migration : migrations) {
+            LOGGER.info(String.format("Starting migration. %s", migration.description()));
+            boolean migrationCompletedSuccessfully = migration.migrate();
+            if (!migrationCompletedSuccessfully) {
+                LOGGER.error(String.format("Migration of class %s has failed.", migration.getClass()));
+                return false;
+            }
+            LOGGER.info(String.format("Completed migration. %s", migration.description()));
+        }
+        return true;
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707Config.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707Config.java
new file mode 100644
index 0000000..9c39b58
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/Migration1707Config.java
@@ -0,0 +1,240 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+
+import java.util.List;
+
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.MigrationByIdDerivedNodeTypeResolver;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.NormativesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.NormativesResolver;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.ResourceVersionMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.ResourcesCategoriesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.ServiceCategoriesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.ServiceVersionMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.ServicesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.UserStatesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.UsersMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.VFResourcesMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.VersionMigration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.FulfilledCapabilitiesMigrationService;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.FulfilledRequirementsMigrationService;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.RequirementsCapabilitiesMigrationService;
+import org.openecomp.sdc.be.dao.TitanClientStrategy;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
+import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.jsontitan.operations.ArtifactsOperations;
+import org.openecomp.sdc.be.model.jsontitan.operations.CategoryOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.NodeTemplateOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.NodeTypeOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaDataOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementLifecycleOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.IElementOperation;
+import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
+import org.openecomp.sdc.be.model.operations.api.ToscaDefinitionPathCalculator;
+import org.openecomp.sdc.be.model.operations.impl.ElementOperation;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.model.operations.impl.ToscaDefinitionPathCalculatorImpl;
+import org.openecomp.sdc.be.model.operations.impl.UserAdminOperation;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.annotation.Order;
+
+@Configuration
+public class Migration1707Config {
+
+
+    @Bean(name = "migration1707")
+    public Migration1707 migration1707(List<Migration> migrations) {
+        return new Migration1707(migrations);
+    }
+
+    @Bean(name = "renameGraphPropertyKeysMigration")
+    @Order(1)
+    public Migration renameGraphPropertyKeysMigration() {
+        return new RenameGraphPropertyKeys();
+    }
+
+    @Bean(name = "toscaNamesUpdate")
+    @Order(2)
+    public Migration toscaNamesUpdate() {
+        return new ToscaNamesUpdate();
+    }
+
+    @Bean(name = "users-migration")
+    @Order(3)
+    public Migration usersMigration() {
+        return new UsersMigration();
+    }
+
+    @Bean(name = "resource-category-migration")
+    @Order(4)
+    public Migration resourceCategoriesMigration() {
+        return new ResourcesCategoriesMigration();
+    }
+
+    @Bean(name = "service-category-migration")
+    @Order(5)
+    public Migration serviceCategoriesMigration() {
+        return new ServiceCategoriesMigration();
+    }
+
+    @Bean(name = "normatives-migration")
+    @Order(6)
+    public Migration normativesMigration() {
+        return new NormativesMigration();
+    }
+
+    @Bean(name = "vf-migration")
+    @Order(7)
+    public Migration vfMigration() {
+        return new VFResourcesMigration();
+    }
+
+    @Bean(name = "service-migration")
+    @Order(8)
+    public Migration serviceMigration() {
+        return new ServicesMigration();
+    }
+
+    @Bean(name = "user-states-migration")
+    @Order(9)
+    public Migration userStatesMigration() {
+        return new UserStatesMigration();
+    }
+    
+//    @Bean(name = "tosca-template-regeneration")
+//    @Order(10)
+//    public Migration ToscaTemplateRegeneration() {
+//        return new ToscaTemplateRegeneration();
+//    }
+
+    @Bean("resource-version-migration")
+    public VersionMigration<Resource> resourceVersionMigration() {
+        return new ResourceVersionMigration();
+    }
+
+    @Bean("service-version-migration")
+    public VersionMigration<Service> serviceVersionMigration() {
+        return new ServiceVersionMigration();
+    }
+
+    @Bean(name = "normatives-resolver")
+    public NormativesResolver normativesResolver() {
+        return new NormativesResolver();
+    }
+    
+	@Bean(name = "property-operation-mig")
+	public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao-migration") TitanGenericDao titanGenericDao) {
+		return new PropertyOperation(titanGenericDao);
+	} 
+	
+    @Bean(name = "group-type-operation-mig")
+    public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao-migration") TitanGenericDao titanGenericDao, @Qualifier("property-operation-mig") PropertyOperation propertyOperation) {
+    	return new GroupTypeOperation(titanGenericDao, propertyOperation);
+    }
+
+    @Bean(name = "titan-generic-dao-migration")
+    public TitanGenericDao titanGenericDaoMigration(@Qualifier("migration-titan-client") TitanGraphClient titanGraphClient) {
+        return new TitanGenericDao(titanGraphClient);
+    }
+
+    @Bean(name = "migration-titan-strategy")
+    public TitanClientStrategy migrationStrategy() {
+        return new MigrationTitanStrategy();
+    }
+
+    @Bean(name = "migration-titan-client", initMethod = "createGraph")
+    public TitanGraphClient titanMigrationClient(@Qualifier("migration-titan-strategy") TitanClientStrategy titanClientStrategy) {
+        return new TitanGraphClient(titanClientStrategy);
+    }
+
+    @Bean(name = "user-operation-migration")
+    public IUserAdminOperation userOperationNewKeySpace(@Qualifier("titan-generic-dao-migration") TitanGenericDao titanGenericDao) {
+        return new UserAdminOperation(titanGenericDao);
+    }
+
+    @Bean(name = "element-operation-migration")
+    public IElementOperation elementOperationNewKeyspace(@Qualifier("titan-generic-dao-migration") TitanGenericDao titanGenericDao) {
+        return new ElementOperation(titanGenericDao);
+    }
+
+    @Bean(name = "tosca-operation-facade")
+    public ToscaOperationFacade toscaOperationFacade() {
+        return new ToscaOperationFacade();
+    }
+
+    @Bean(name = "node-type-operation")
+    public NodeTypeOperation nodeTypeOperation(@Qualifier("mig-derived-resolver") DerivedNodeTypeResolver migrationDerivedNodeTypeResolver) {
+        return new NodeTypeOperation(migrationDerivedNodeTypeResolver);
+    }
+
+    @Bean(name = "topology-template-operation")
+    public TopologyTemplateOperation topologyTemplateOperation() {
+        return new TopologyTemplateOperation();
+    }
+
+    @Bean(name = "node-template-operation")
+    public NodeTemplateOperation nodeTemplateOperation() {
+        return new NodeTemplateOperation();
+    }
+
+    @Bean(name = "titan-dao")
+    public TitanDao titanDao(@Qualifier("migration-titan-client") TitanGraphClient titanGraphClient) {
+        return new TitanDao(titanGraphClient);
+    }
+
+    @Bean(name = "category-operation")
+    public CategoryOperation categoryOperation() {
+        return new CategoryOperation();
+    }
+
+    @Bean(name = "artifacts-operation")
+    public ArtifactsOperations artifactsOperation() {
+        return new ArtifactsOperations();
+    }
+
+    @Bean(name = "tosca-data-operation")
+    public ToscaDataOperation toscaDataOperation() {
+        return new ToscaDataOperation();
+    }
+
+    @Bean(name = "tosca-element-lifecycle-operation")
+    public ToscaElementLifecycleOperation toscaElementLifecycleOperation() {
+        return new ToscaElementLifecycleOperation();
+    }
+
+    @Bean(name = "tosca-path-calculator")
+    public ToscaDefinitionPathCalculator pathCalculator() {
+        return new ToscaDefinitionPathCalculatorImpl();
+    }
+
+    @Bean(name = "fulfilled-capabilities-mig-service")
+    public FulfilledCapabilitiesMigrationService fulfilledCapabilitiesMigService() {
+        return new FulfilledCapabilitiesMigrationService();
+    }
+
+    @Bean(name = "fulfilled-requirements-mig-service")
+    public FulfilledRequirementsMigrationService requirementsMigService() {
+        return new FulfilledRequirementsMigrationService();
+    }
+
+    @Bean(name ="req-cap-mig-service")
+    public RequirementsCapabilitiesMigrationService reqCapMigService() {
+        return new RequirementsCapabilitiesMigrationService();
+    }
+
+    @Bean(name = "mig-derived-resolver")
+    public DerivedNodeTypeResolver migrationDerivedNodeTypeResolver() {
+        return new MigrationByIdDerivedNodeTypeResolver();
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationTitanStrategy.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationTitanStrategy.java
new file mode 100644
index 0000000..9a0cc89
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationTitanStrategy.java
@@ -0,0 +1,13 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.be.dao.TitanClientStrategy;
+
+public class MigrationTitanStrategy implements TitanClientStrategy {
+
+    @Override
+    public String getConfigFile() {
+        return ConfigurationManager.getConfigurationManager().getConfiguration().getTitanMigrationKeySpaceCfgFile();
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationUtils.java
new file mode 100644
index 0000000..81a00b0
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/MigrationUtils.java
@@ -0,0 +1,27 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import org.apache.commons.lang.enums.Enum;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class MigrationUtils {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(MigrationUtils.class);
+
+    public static boolean handleError(String errorMsg) {
+        LOGGER.error(errorMsg);
+        return false;
+    }
+
+    public static <T> T handleError(T errorStatus, String errorMsg) {
+        LOGGER.error(errorMsg);
+        return errorStatus;
+    }
+
+    public static <A> A willThrowException(String withMsg) {
+        throw new MigrationException(withMsg);
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/RenameGraphPropertyKeys.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/RenameGraphPropertyKeys.java
new file mode 100644
index 0000000..a69fb9d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/RenameGraphPropertyKeys.java
@@ -0,0 +1,38 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import org.openecomp.sdc.asdctool.impl.migration.MigrationMsg;
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationOperationUtils;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import java.util.HashMap;
+import java.util.Map;
+
+@Component("renameGraphPropertyKeysMigration")
+public class RenameGraphPropertyKeys implements Migration {
+
+    private final static Map<String, String> KEY_PROPERTIES_TO_RENAME;
+
+    @Autowired
+    private MigrationOperationUtils migrationUtils;
+
+    static {
+        KEY_PROPERTIES_TO_RENAME = new HashMap<>();
+        KEY_PROPERTIES_TO_RENAME.put("attuid", GraphPropertiesDictionary.USERID.getProperty());
+        KEY_PROPERTIES_TO_RENAME.put("pmatt", GraphPropertiesDictionary.PROJECT_CODE.getProperty());
+        KEY_PROPERTIES_TO_RENAME.put("attContact", GraphPropertiesDictionary.CONTACT_ID.getProperty());
+        KEY_PROPERTIES_TO_RENAME.put("attCreator", GraphPropertiesDictionary.CREATOR_ID.getProperty());
+    }
+
+    @Override
+    public boolean migrate() {
+        return migrationUtils.renamePropertyKeys(KEY_PROPERTIES_TO_RENAME);
+    }
+
+    @Override
+    public String description() {
+        return MigrationMsg.RENMAE_KEY_PROPERTIES_1707.getMessage();
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaNamesUpdate.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaNamesUpdate.java
new file mode 100644
index 0000000..262c300
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaNamesUpdate.java
@@ -0,0 +1,368 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.ImmutableTriple;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.be.dao.graph.GraphElementFactory;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphNode;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.components.ResourceMetadataDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.SchemaDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.resources.data.AttributeData;
+import org.openecomp.sdc.be.resources.data.AttributeValueData;
+import org.openecomp.sdc.be.resources.data.CapabilityData;
+import org.openecomp.sdc.be.resources.data.CapabilityTypeData;
+import org.openecomp.sdc.be.resources.data.DataTypeData;
+import org.openecomp.sdc.be.resources.data.GroupData;
+import org.openecomp.sdc.be.resources.data.GroupTypeData;
+import org.openecomp.sdc.be.resources.data.InputValueData;
+import org.openecomp.sdc.be.resources.data.InputsData;
+import org.openecomp.sdc.be.resources.data.PolicyTypeData;
+import org.openecomp.sdc.be.resources.data.PropertyData;
+import org.openecomp.sdc.be.resources.data.PropertyValueData;
+import org.openecomp.sdc.be.resources.data.RelationshipInstData;
+import org.openecomp.sdc.be.resources.data.RelationshipTypeData;
+import org.openecomp.sdc.be.resources.data.RequirementData;
+import org.openecomp.sdc.be.resources.data.ResourceMetadataData;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import com.thinkaurelius.titan.core.TitanVertex;
+
+import fj.data.Either;
+
+@Component("toscaNamesUpdate")
+public class ToscaNamesUpdate implements Migration {
+	private static Logger log = LoggerFactory.getLogger(ToscaNamesUpdate.class.getName());
+
+	@Override
+	public String description() {
+		return "toscaNamesUpdate";
+	}
+
+	@Autowired
+	protected TitanGenericDao titanGenericDao;
+
+	@Override
+	public boolean migrate() {
+		boolean result = true;
+		List<ImmutableTriple<NodeTypeEnum, Class<GraphNode>, Function<GraphNode, ImmutablePair<String, GraphNode>>>> updateInfoList = new ArrayList<>();
+		for (NodeTypeEnum nodeType : NodeTypeEnum.values()){
+			ImmutableTriple<NodeTypeEnum, Class<GraphNode>, Function<GraphNode, ImmutablePair<String, GraphNode>>> updateInfo = getInfo(nodeType);
+			if(null == updateInfo)
+				continue;
+			updateInfoList.add(updateInfo);
+		}
+		
+		for(ImmutableTriple<NodeTypeEnum, Class<GraphNode>, Function<GraphNode, ImmutablePair<String, GraphNode>>> nodeTypeData : updateInfoList){
+			log.debug("before updating namespace on nodeType {}", nodeTypeData.left.getName());
+			result = updateNamespaceByNodeType(nodeTypeData);
+			if(!result){
+				log.debug("alignNamespace procedure failed during execution of updating namespace on nodeType {}", nodeTypeData.left.getName());
+				return false;
+			}
+		}
+		return true;
+	}
+
+	private <T extends GraphNode> ImmutableTriple<NodeTypeEnum, Class<T>, Function<T, ImmutablePair<String, T>>> getInfo(NodeTypeEnum nodeType) {
+		switch (nodeType) {
+		case Resource:
+			Function<ResourceMetadataData, ImmutablePair<String, ResourceMetadataData>> resourceFunc = r -> updateResource(r);
+			return new ImmutableTriple(nodeType, ResourceMetadataData.class, resourceFunc);
+		case GroupType:
+			Function<GroupTypeData, ImmutablePair<String, GroupTypeData>> groupTypeFunc = g -> updateGroupType(g);
+			return new ImmutableTriple(nodeType, GroupTypeData.class, groupTypeFunc);
+		case Group:
+			Function<GroupData, ImmutablePair<String, GroupData>> groupFunc = g -> updateGroupNode(g);
+			return new ImmutableTriple(nodeType, GroupData.class, groupFunc);
+		case PolicyType:
+			Function<PolicyTypeData, ImmutablePair<String , PolicyTypeData>> policyFunc = p -> updatePolicyType(p);
+			return new ImmutableTriple(nodeType, PolicyTypeData.class, policyFunc);
+		case RelationshipType:
+			Function<RelationshipTypeData, ImmutablePair<String, RelationshipTypeData>> relTypeFunc = r -> updateRelationshipType(r);
+			return new ImmutableTriple(nodeType, RelationshipTypeData.class, relTypeFunc);
+		case RelationshipInst:
+			Function<RelationshipInstData, ImmutablePair<String, RelationshipInstData>> relFunc = r -> updateRelationshipNode(r);
+			return new ImmutableTriple(nodeType, RelationshipInstData.class, relFunc);
+		case Requirement:
+			Function<RequirementData, ImmutablePair<String, RequirementData>> reqFunc = r -> updateRequirementType(r);
+			return new ImmutableTriple(nodeType, RequirementData.class, reqFunc);
+		case CapabilityType:
+			Function<CapabilityTypeData, ImmutablePair<String, CapabilityTypeData>> capTypeFunc = c -> updateCapabilityType(c);
+			return new ImmutableTriple(nodeType, CapabilityTypeData.class, capTypeFunc);
+		case Capability:
+			Function<CapabilityData, ImmutablePair<String, CapabilityData>> capFunc = c -> updateCapabilityNode(c);
+			return new ImmutableTriple(nodeType, CapabilityData.class, capFunc);
+		case Property:
+			Function<PropertyData, ImmutablePair<String, PropertyData>> propFunc = p -> updatePropNode(p);
+			return new ImmutableTriple(nodeType, PropertyData.class, propFunc);
+		case PropertyValue:
+			Function<PropertyValueData, ImmutablePair<String, PropertyValueData>> propValueFunc = p -> updatePropValueNode(p);
+			return new ImmutableTriple(nodeType, PropertyValueData.class, propValueFunc);
+		case Attribute:	
+			Function<AttributeData, ImmutablePair<String, AttributeData>> attrFunc = a -> updateAttributeNode(a);
+			return new ImmutableTriple(nodeType, AttributeData.class, attrFunc);
+		case AttributeValue:
+			Function<AttributeValueData, ImmutablePair<String, AttributeValueData>> attrValueFunc = a -> updateAttrValueNode(a);
+			return new ImmutableTriple(nodeType, AttributeValueData.class, attrValueFunc);
+		case Input:
+			Function<InputsData, ImmutablePair<String, InputsData>> inputFunc = i -> updateInputNode(i);
+			return new ImmutableTriple(nodeType, InputsData.class, inputFunc);
+		case InputValue:
+			Function<InputValueData, ImmutablePair<String, InputValueData>> inputValueFunc = i -> updateInputValueNode(i);
+			return new ImmutableTriple(nodeType, InputValueData.class, inputValueFunc);
+		case DataType:
+			Function<DataTypeData, ImmutablePair<String, DataTypeData>> dataTypeFunc = d -> updateDataType(d);
+			return new ImmutableTriple(nodeType, DataTypeData.class, dataTypeFunc);
+		default:
+			return null;
+		}
+
+	}
+	
+	
+
+	private boolean ifRight(TitanOperationStatus status){
+		return TitanOperationStatus.NOT_FOUND == status;
+	}
+	
+	private <T extends GraphNode> boolean ifLeft(List<T> allNodes, ImmutableTriple<NodeTypeEnum, Class<T>, Function<T, ImmutablePair<String, T>>> nodeTypeData){
+		boolean result = true;
+		try {
+			for (T node : allNodes) {
+				ImmutablePair<String, T> nodeToUpdate = nodeTypeData.right.apply(node);
+				Either<T, TitanOperationStatus> updatedNode = updateNodeIncludingUID(nodeToUpdate.left, nodeToUpdate.right, nodeTypeData.middle);
+				if (updatedNode.isRight()) {
+					result = false;
+					break;
+				}
+			}
+		} finally {
+			if (!result) {
+				titanGenericDao.rollback();
+			} else {
+				titanGenericDao.commit();
+			}
+		}
+		return result;
+	}
+	
+	private <T extends GraphNode> boolean updateNamespaceByNodeType(ImmutableTriple<NodeTypeEnum, Class<T>, Function<T, ImmutablePair<String, T>>> nodeTypeData) {
+		Either<List<T>, TitanOperationStatus> getAllNodes = titanGenericDao.getByCriteria(nodeTypeData.left, null, nodeTypeData.middle);
+		return getAllNodes.either(list -> ifLeft(list, nodeTypeData), status -> ifRight(status));
+	}
+
+	private ImmutablePair<String, ResourceMetadataData> updateResource(ResourceMetadataData resource) {
+		String toscaResourceName = updateNamespace(((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).getToscaResourceName());
+		((ResourceMetadataDataDefinition) resource.getMetadataDataDefinition()).setToscaResourceName(toscaResourceName);
+		return new ImmutablePair<>((String) resource.getUniqueId(), resource);
+	}
+
+	private ImmutablePair<String, GroupTypeData> updateGroupType(GroupTypeData group) {
+		String originId = group.getUniqueId();
+		group.getGroupTypeDataDefinition().setUniqueId(updateNamespace(originId));
+		String type = updateNamespace(group.getGroupTypeDataDefinition().getType());
+		group.getGroupTypeDataDefinition().setType(type);
+		return new ImmutablePair<>(originId, group);
+	}
+
+	private ImmutablePair<String, GroupData> updateGroupNode(GroupData group) {
+		String type = updateNamespace(group.getGroupDataDefinition().getType());
+		group.getGroupDataDefinition().setType(type);
+		return new ImmutablePair<>((String) group.getUniqueId(), group);
+	}
+	
+
+	private ImmutablePair<String, PolicyTypeData> updatePolicyType(PolicyTypeData policy) {
+		String originId = policy.getUniqueId();
+		policy.getPolicyTypeDataDefinition().setUniqueId(updateNamespace(originId));
+		String type = updateNamespace(policy.getPolicyTypeDataDefinition().getType());
+		policy.getPolicyTypeDataDefinition().setType(type);
+		return new ImmutablePair<>(originId, policy);
+	}
+
+	private ImmutablePair<String, RelationshipTypeData> updateRelationshipType(RelationshipTypeData relation) {
+		String type = updateNamespace(relation.getRelationshipTypeDataDefinition().getType());
+		relation.getRelationshipTypeDataDefinition().setType(type);
+		List<String> validSources = relation.getRelationshipTypeDataDefinition().getValidSourceTypes();
+		if(null != validSources){
+			List<String> validSourceTypes = new ArrayList<>();
+			for (String validSourceType : validSources) {
+				validSourceTypes.add(updateNamespace(validSourceType));
+			}
+			relation.getRelationshipTypeDataDefinition().setValidSourceTypes(validSourceTypes);
+		}
+		return new ImmutablePair<>(relation.getUniqueId(), relation);
+	}
+
+	private ImmutablePair<String, RelationshipInstData> updateRelationshipNode(RelationshipInstData relation) {
+		String type = updateNamespace(relation.getType());
+		relation.setType(type);
+		return new ImmutablePair<>(relation.getUniqueId(), relation);
+	}
+
+	private ImmutablePair<String, RequirementData> updateRequirementType(RequirementData req) {
+		String node = req.getNode();
+		if(null != node)
+			req.setNode(updateNamespace(node));
+		String type = updateNamespace(req.getRelationshipType());
+		req.setRelationshipType(type);
+		return new ImmutablePair<>(req.getUniqueId(), req);
+	}
+
+	private ImmutablePair<String, CapabilityTypeData> updateCapabilityType(CapabilityTypeData capType) {
+		String originId = capType.getUniqueId();
+		capType.getCapabilityTypeDataDefinition().setUniqueId(updateNamespace(originId));
+		String type = updateNamespace(capType.getCapabilityTypeDataDefinition().getType());
+		capType.getCapabilityTypeDataDefinition().setType(type);
+		List<String> validSources = capType.getCapabilityTypeDataDefinition().getValidSourceTypes();
+		if(null != validSources){
+			List<String> validSourceTypes = new ArrayList<>();
+			for (String validSourceType : validSources) {
+				validSourceTypes.add(updateNamespace(validSourceType));
+			}
+			capType.getCapabilityTypeDataDefinition().setValidSourceTypes(validSourceTypes);
+		}	
+		return new ImmutablePair<>(originId, capType);
+
+	}
+
+	private ImmutablePair<String, CapabilityData> updateCapabilityNode(CapabilityData capNode) {
+		List<String> validSources = capNode.getValidSourceTypes();
+		if(null != validSources){
+			List<String> validSourceTypes = new ArrayList<>();
+			for (String validSourceType : validSources) {
+				validSourceTypes.add(updateNamespace(validSourceType));
+			}
+			capNode.setValidSourceTypes(validSourceTypes);
+		}		
+		return new ImmutablePair<>(capNode.getUniqueId(), capNode);
+	}
+
+
+	private ImmutablePair<String, PropertyData> updatePropNode(PropertyData propType) {
+		String originId = (String)propType.getUniqueId();
+		propType.getPropertyDataDefinition().setUniqueId(updateNamespace(originId));
+		String type = updateNamespace(propType.getPropertyDataDefinition().getType());
+		propType.getPropertyDataDefinition().setType(type);
+		if ("list".equalsIgnoreCase(type) || "map".equalsIgnoreCase(type)){
+			SchemaDefinition schema = propType.getPropertyDataDefinition().getSchema();
+			if(null != schema && null != schema.getProperty())
+				handleSchemaTypeDef(schema.getProperty());
+		}
+		return new ImmutablePair<>(originId, propType);
+	}
+
+	private ImmutablePair<String, PropertyValueData> updatePropValueNode(PropertyValueData prop) {
+		String type = updateNamespace(prop.getType());
+		prop.setType(type);
+		return new ImmutablePair<>(prop.getUniqueId(), prop);
+	}
+	
+	private ImmutablePair<String, AttributeValueData> updateAttrValueNode(AttributeValueData attr) {
+		String type = updateNamespace(attr.getType());
+		attr.setType(type);
+		return new ImmutablePair<>(attr.getUniqueId(), attr);
+	}
+	
+	private ImmutablePair<String, InputValueData> updateInputValueNode(InputValueData input) {
+		String type = updateNamespace(input.getType());
+		input.setType(type);
+		return new ImmutablePair<>(input.getUniqueId(), input);
+	}
+	
+	private ImmutablePair<String, InputsData> updateInputNode(InputsData input){
+		String type = updateNamespace(input.getPropertyDataDefinition().getType());
+		input.getPropertyDataDefinition().setType(type);
+		if ("list".equalsIgnoreCase(type) || "map".equalsIgnoreCase(type)){
+			SchemaDefinition schema = input.getPropertyDataDefinition().getSchema();
+			if(null != schema && null != schema.getProperty())
+				handleSchemaTypeDef(schema.getProperty());
+		}
+		return new ImmutablePair<>((String)input.getUniqueId(), input);
+	}
+
+
+	private void handleSchemaTypeDef(PropertyDataDefinition schemaProp) {
+		String schemaType = updateNamespace(schemaProp.getType());
+		schemaProp.setType(schemaType);
+	}
+
+	private ImmutablePair<String, DataTypeData> updateDataType(DataTypeData dataType) {
+		String originId = dataType.getUniqueId();
+		dataType.getDataTypeDataDefinition().setUniqueId(updateNamespace(originId));
+		String name = updateNamespace(dataType.getDataTypeDataDefinition().getName());
+		dataType.getDataTypeDataDefinition().setName(name);
+		String derivedFromName = updateNamespace(dataType.getDataTypeDataDefinition().getDerivedFromName());
+		dataType.getDataTypeDataDefinition().setDerivedFromName(derivedFromName);
+		return new ImmutablePair<>(originId, dataType);
+
+	}
+	
+	private ImmutablePair<String, AttributeData> updateAttributeNode(AttributeData attr){
+		String type = updateNamespace(attr.getAttributeDataDefinition().getType());
+		attr.getAttributeDataDefinition().setType(type);
+		if("list".equalsIgnoreCase(type) || "map".equalsIgnoreCase(type)){
+			SchemaDefinition schema = attr.getAttributeDataDefinition().getSchema();
+			if(null != schema && null != schema.getProperty())
+				handleSchemaTypeDef(schema.getProperty());
+		}
+		return new ImmutablePair<>(attr.getUniqueId(), attr);
+	}
+	
+	
+
+	private String updateNamespace(String oldName) {
+		if (oldName == null) {
+			return null;
+		}
+		String name = oldName.replace("com.att.d2.", "org.openecomp.");
+		// correcting naming convention
+		return name.replace("org.openecomp.resources.", "org.openecomp.resource.");
+	}
+	
+	private <T extends GraphNode> T onSuccess(TitanVertex vertex, GraphNode node, Class<T> clazz){
+		Map<String, Object> newProp = titanGenericDao.getProperties(vertex);
+		return GraphElementFactory.createElement(node.getLabel(), GraphElementTypeEnum.Node, newProp, clazz);
+	}
+	
+	private <T extends GraphNode> Either<T, TitanOperationStatus> handleNode(Vertex vertex, GraphNode node, Class<T> clazz){
+		try {
+			
+			Map<String, Object> mapProps = node.toGraphMap();
+
+			for (Map.Entry<String, Object> entry : mapProps.entrySet()) {
+				vertex.property(entry.getKey(), entry.getValue());
+			}
+
+			Either<TitanVertex, TitanOperationStatus> vertexByPropertyAndLabel = titanGenericDao.getVertexByProperty(node.getUniqueIdKey(), node.getUniqueId());
+			return vertexByPropertyAndLabel.either(v -> Either.left(onSuccess(v, node, clazz)), status -> Either.right(status));
+			
+		} catch (Exception e) {
+			if (log.isDebugEnabled()) {
+				log.debug("Failed to update node for {}", node.getKeyValueId(), e);
+			}
+			return Either.right(TitanGraphClient.handleTitanException(e));
+		}
+	}
+	
+	private <T extends GraphNode> Either<T, TitanOperationStatus> updateNodeIncludingUID(String originId, GraphNode node, Class<T> clazz) {
+		Either<TitanVertex, TitanOperationStatus> vertexByProperty = titanGenericDao.getVertexByProperty(node.getUniqueIdKey(), originId);
+		return vertexByProperty.either(vertex -> handleNode(vertex, node, clazz), status -> Either.right(status));	
+	}
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaTemplateRegeneration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaTemplateRegeneration.java
new file mode 100644
index 0000000..824bb83
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/ToscaTemplateRegeneration.java
@@ -0,0 +1,160 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+import org.apache.commons.collections.MapUtils;
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ArtifactDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.model.LifecycleStateEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.tosca.ToscaError;
+import org.openecomp.sdc.be.tosca.ToscaExportHandler;
+import org.openecomp.sdc.be.tosca.ToscaRepresentation;
+import org.openecomp.sdc.common.util.GeneralUtility;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import fj.data.Either;
+
+@Component("toscaTemplateRegeneration")
+public class ToscaTemplateRegeneration implements Migration {
+
+	private static Logger LOGGER = LoggerFactory.getLogger(ToscaTemplateRegeneration.class);
+	
+	@Autowired
+	protected ArtifactCassandraDao artifactCassandraDao;
+	
+	@Autowired
+	private ToscaExportHandler toscaExportUtils;
+
+	@Autowired
+    private ToscaOperationFacade toscaOperationFacade;
+    
+	@Override
+	public boolean migrate() {
+		boolean result = true;
+		Either<Map<GraphVertex, org.openecomp.sdc.be.model.Component>, StorageOperationStatus> getAllCertifiedComponentsRes;
+		try{
+			getAllCertifiedComponentsRes = getAllCertifiedComponents();
+			if(getAllCertifiedComponentsRes.isRight()){
+				result = false;
+			}
+			if(result && MapUtils.isNotEmpty(getAllCertifiedComponentsRes.left().value())){
+				result = regenerateToscaTemplateArtifacts(getAllCertifiedComponentsRes.left().value());
+			}
+		} catch(Exception e){
+			LOGGER.error("The exception {} has been occured upon tosca template regeneration migration. ", e);
+			result = false;
+		} finally {
+			if(result){
+				toscaOperationFacade.commit();
+			} else {
+				toscaOperationFacade.rollback();
+			}
+		}
+		return result;
+	}
+
+	private boolean regenerateToscaTemplateArtifacts(Map<GraphVertex, org.openecomp.sdc.be.model.Component> components) {
+		boolean result = true;
+		
+		Map<GraphVertex, org.openecomp.sdc.be.model.Component> filteredComponents = components.entrySet()
+				.stream()
+				.filter(e -> e.getValue().getToscaArtifacts()!=null && e.getValue().getToscaArtifacts().containsKey(ToscaExportHandler.ASSET_TOSCA_TEMPLATE))
+				.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
+		
+		for(Entry<GraphVertex, org.openecomp.sdc.be.model.Component> currComponent : filteredComponents.entrySet()){
+			result = regenerateToscaTemplateArtifact(currComponent);
+			if(!result){
+				break;
+			}
+		}
+		return result;
+	}
+	
+	@SuppressWarnings("unchecked")
+	private boolean regenerateToscaTemplateArtifact(Map.Entry<GraphVertex, org.openecomp.sdc.be.model.Component> parent) {
+		boolean result = true;
+		Either<GraphVertex, TitanOperationStatus> toscaDataVertexRes = null;
+		ArtifactDataDefinition data = null;
+		LOGGER.debug("tosca artifact generation");
+		Either<ToscaRepresentation, ToscaError> exportComponent = toscaExportUtils.exportComponent(parent.getValue());
+		if (exportComponent.isRight()) {
+			LOGGER.debug("Failed export tosca yaml for component {} error {}", parent.getValue().getUniqueId(), exportComponent.right().value());
+			result = false;
+		}
+		if(result){
+			LOGGER.debug("Tosca yaml exported for component {} ", parent.getValue().getUniqueId());
+			toscaDataVertexRes = toscaOperationFacade.getTitanDao().getChildVertex(parent.getKey(), EdgeLabelEnum.TOSCA_ARTIFACTS, JsonParseFlagEnum.ParseJson);
+			if(toscaDataVertexRes.isRight()){
+				LOGGER.debug("Failed to fetch tosca data vertex {} for component {}. Status is {}", EdgeLabelEnum.TOSCA_ARTIFACTS, parent.getValue().getUniqueId(), exportComponent.right().value());
+				result = false;
+			}
+		}
+		if(result){
+			data = parent.getValue().getToscaArtifacts().get(ToscaExportHandler.ASSET_TOSCA_TEMPLATE);
+			data.setArtifactChecksum(GeneralUtility.calculateMD5ByByteArray(exportComponent.left().value().getMainYaml().getBytes()));
+			
+			((Map<String, ArtifactDataDefinition>) toscaDataVertexRes.left().value().getJson()).put(ToscaExportHandler.ASSET_TOSCA_TEMPLATE, data);
+			
+			Either<GraphVertex, TitanOperationStatus>  updateVertexRes = toscaOperationFacade.getTitanDao().updateVertex(toscaDataVertexRes.left().value());
+			if(updateVertexRes.isRight()){
+				result = false;
+			}
+		}
+		if(result){
+			ESArtifactData artifactData = new ESArtifactData(data.getEsId(), exportComponent.left().value().getMainYaml().getBytes());
+			CassandraOperationStatus status = artifactCassandraDao.saveArtifact(artifactData);
+			if(status != CassandraOperationStatus.OK){
+				result = false;
+			}
+		}
+		return result;
+	}
+
+	public Either<Map<GraphVertex, org.openecomp.sdc.be.model.Component>, StorageOperationStatus> getAllCertifiedComponents() {
+
+		Map<GraphVertex, org.openecomp.sdc.be.model.Component> components = new HashMap<>();
+		Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
+		propertiesToMatch.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+		Either<List<GraphVertex>, TitanOperationStatus> getVerticiesRes = toscaOperationFacade.getTitanDao().getByCriteria(null, propertiesToMatch,JsonParseFlagEnum.ParseAll);
+
+		if (getVerticiesRes.isRight() && getVerticiesRes.right().value() != TitanOperationStatus.NOT_FOUND) {
+			LOGGER.debug("Failed to fetch all certified components. Status is {}", getVerticiesRes.right().value());
+			return Either.right(DaoStatusConverter.convertTitanStatusToStorageStatus(getVerticiesRes.right().value()));
+		}
+		if(getVerticiesRes.isLeft()){
+			List<GraphVertex> componentVerticies = getVerticiesRes.left().value();
+			for (GraphVertex componentV : componentVerticies) {
+				Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getComponentsRes = toscaOperationFacade.getToscaElement(componentV);
+				if (getComponentsRes.isRight()) {
+					return Either.right(getComponentsRes.right().value());
+				}
+				components.put(componentV, getComponentsRes.left().value());
+			}
+		}
+		return Either.left(components);
+	}
+	
+	@Override
+	public String description() {
+		return "toscaTemplateRegeneration";
+	}
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/VfModulesPropertiesAdding.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/VfModulesPropertiesAdding.java
new file mode 100644
index 0000000..5b14419
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/VfModulesPropertiesAdding.java
@@ -0,0 +1,227 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707;
+
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.stream.Collectors;
+
+import javax.annotation.Resource;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.openecomp.sdc.asdctool.impl.migration.v1702.DataTypesUpdate;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.DataTypeDefinition;
+import org.openecomp.sdc.be.model.GroupDefinition;
+import org.openecomp.sdc.be.model.GroupInstance;
+import org.openecomp.sdc.be.model.GroupInstanceProperty;
+import org.openecomp.sdc.be.model.GroupProperty;
+import org.openecomp.sdc.be.model.GroupTypeDefinition;
+import org.openecomp.sdc.be.model.PropertyDefinition;
+import org.openecomp.sdc.be.model.jsontitan.operations.BaseOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
+import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
+import org.openecomp.sdc.be.resources.data.PropertyData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+import fj.data.Either;
+
+@Component("vfModulesPropertiesAdding")
+public class VfModulesPropertiesAdding {
+
+	private static Logger LOGGER = LoggerFactory.getLogger(ToscaTemplateRegeneration.class);
+	
+	@Autowired
+    private ToscaOperationFacade toscaOperationFacade;
+	
+	@Autowired
+    private TopologyTemplateOperation topologyTemplateOperation;
+	
+	@Resource(name ="group-type-operation-mig")
+    private GroupTypeOperation groupTypeOperation;
+	
+	@Resource(name = "property-operation-mig")
+    private PropertyOperation propertyOperation;
+	
+	
+	public boolean migrate(String groupsTypeYmlFilePath) {
+		boolean result = true;
+		Either<Map<org.openecomp.sdc.be.model.Component, GraphVertex>, StorageOperationStatus> getAllComponentsRes = null;
+		GroupTypeDefinition vfModule;
+		Either<List<GraphVertex>, TitanOperationStatus> getAllTopologyTemplatesRes = null;
+		List<PropertyDefinition> newProperties = null;
+
+		Either<GroupTypeDefinition, TitanOperationStatus> getGroupTypeVfModuleRes ;
+		try{
+			getGroupTypeVfModuleRes = groupTypeOperation.getGroupTypeByUid("org.openecomp.groups.VfModule.1.0.grouptype");
+			
+			if(getGroupTypeVfModuleRes.isRight()){
+				 result = false;
+			}
+			if(result){
+				vfModule = getGroupTypeVfModuleRes.left().value();
+				newProperties = getNewVfModuleTypeProperties(getAllVfModuleTypePropertiesFromYaml(groupsTypeYmlFilePath), vfModule);
+				result = addNewPropertiesToGroupType(vfModule, newProperties);
+			}
+			if(result && CollectionUtils.isNotEmpty(newProperties)){
+				Map<GraphPropertyEnum, Object> propsHasNot = new EnumMap<>(GraphPropertyEnum.class);
+				propsHasNot.put(GraphPropertyEnum.IS_DELETED, true);
+				getAllTopologyTemplatesRes = toscaOperationFacade.getTitanDao().getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, null, propsHasNot, JsonParseFlagEnum.ParseAll);
+				if (getAllTopologyTemplatesRes.isRight() && getAllTopologyTemplatesRes.right().value() != TitanOperationStatus.NOT_FOUND) {
+					LOGGER.debug("Failed to fetch all non marked topology templates , propsHasNot {}, error {}", propsHasNot, getAllTopologyTemplatesRes.right().value());
+					result = false;
+				}
+			}
+			if(result && getAllTopologyTemplatesRes!=null && getAllTopologyTemplatesRes.isLeft()){
+				getAllComponentsRes = getAllContainerComponents(getAllTopologyTemplatesRes.left().value());
+				if(getAllComponentsRes.isRight()){
+					result = false;
+				}
+			}
+			if(result && getAllComponentsRes != null){
+				result = addNewVfModulesProperties(getAllComponentsRes.left().value(), newProperties);
+			}
+		} catch (Exception e){
+			result = false;
+		}
+		finally{
+			if(result){
+				toscaOperationFacade.commit();
+			} else {
+				toscaOperationFacade.rollback();
+			}
+		}
+		return result;
+	}
+
+	private boolean addNewVfModulesProperties(Map<org.openecomp.sdc.be.model.Component, GraphVertex> components, List<PropertyDefinition> newGroupTypeProperties) {
+		boolean result = true;
+		for(Map.Entry<org.openecomp.sdc.be.model.Component, GraphVertex> component : components.entrySet()){
+			result = addNewPropertiesToVfModules(component, newGroupTypeProperties);
+			if(!result){
+				break;
+			}
+		}
+		return result;
+	}
+
+	private boolean addNewPropertiesToVfModules(Entry<org.openecomp.sdc.be.model.Component, GraphVertex> component, List<PropertyDefinition> newGroupTypeProperties) {
+		boolean result = true;
+		List<GroupDefinition> vfModules = null;
+		if(CollectionUtils.isNotEmpty(component.getKey().getGroups())){
+			vfModules = component.getKey().getGroups().stream().filter(g -> g.getType().equals(BaseOperation.VF_MODULE)).collect(Collectors.toList());
+		}
+		if(vfModules != null){
+			vfModules.forEach(vfModule -> vfModule.getProperties().addAll(newGroupTypeProperties));
+			StorageOperationStatus status = topologyTemplateOperation.updateToscaDataOfToscaElement(component.getValue(), EdgeLabelEnum.GROUPS, VertexTypeEnum.GROUPS, vfModules, JsonPresentationFields.NAME);
+			if(status!= StorageOperationStatus.OK){
+				result = false;
+			}
+		}
+		if(result && CollectionUtils.isNotEmpty(component.getKey().getComponentInstances())){
+			result = addPropertiesToVfModuleInstances(component, newGroupTypeProperties);
+		}
+		return result;
+	}
+
+	private boolean addPropertiesToVfModuleInstances(Entry<org.openecomp.sdc.be.model.Component, GraphVertex> component, List<PropertyDefinition> newGroupTypeProperties) {
+		boolean result = true;
+		List<GroupInstance> vfModuleInstances;
+		List<String> pathKeys;
+		for(ComponentInstance componentInstance : component.getKey().getComponentInstances()){
+			vfModuleInstances = null;
+			if(CollectionUtils.isNotEmpty(componentInstance.getGroupInstances())){
+				vfModuleInstances = componentInstance.getGroupInstances()
+						.stream()
+						.filter(gi -> gi.getType().equals(BaseOperation.VF_MODULE))
+						.collect(Collectors.toList());
+			}
+			if(vfModuleInstances != null){
+				for(GroupInstance vfModuleInstance :vfModuleInstances){
+					vfModuleInstance.getProperties().addAll(newGroupTypeProperties);
+					pathKeys = new ArrayList<>();
+					pathKeys.add(componentInstance.getUniqueId());
+					StorageOperationStatus status = topologyTemplateOperation
+							.updateToscaDataDeepElementOfToscaElement(component.getValue(), EdgeLabelEnum.INST_GROUPS, VertexTypeEnum.INST_GROUPS, vfModuleInstance, pathKeys, JsonPresentationFields.NAME);
+					if(status!= StorageOperationStatus.OK){
+						result = false;
+						break;
+					}
+				}
+				if(!result){
+					break;
+				}
+			}
+		}
+		return result;
+	}
+
+	private Either<Map<org.openecomp.sdc.be.model.Component, GraphVertex>, StorageOperationStatus> getAllContainerComponents(List<GraphVertex> componentsV) {
+		Map<org.openecomp.sdc.be.model.Component, GraphVertex> foundComponents = new HashMap<>();
+		Either<Map<org.openecomp.sdc.be.model.Component, GraphVertex>, StorageOperationStatus> result = null;
+		for(GraphVertex componentV : componentsV){
+			Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getComponentRes = toscaOperationFacade.getToscaElement(componentV);
+			if(getComponentRes.isRight()){
+				result = Either.right(getComponentRes.right().value());
+				break;
+			}
+			foundComponents.put(getComponentRes.left().value(), componentV);
+		}
+		if(result == null){
+			result = Either.left(foundComponents);
+		}
+		return result;
+	}
+	
+	
+	private boolean addNewPropertiesToGroupType(GroupTypeDefinition vfModule, List<PropertyDefinition> newProperties) {
+		boolean result = true;
+		Either<Map<String, PropertyData>, TitanOperationStatus> addPropertiesRes = propertyOperation
+				.addPropertiesToElementType(vfModule.getUniqueId(), NodeTypeEnum.GroupType, newProperties);
+		if(addPropertiesRes.isRight()){
+			result = false;
+		}
+		return result;
+	}
+
+	private List<PropertyDefinition> getAllVfModuleTypePropertiesFromYaml(String groupsTypeYmlFilePath) {
+		List<DataTypeDefinition> groupTypes = DataTypesUpdate.extractDataTypesFromYaml(groupsTypeYmlFilePath);
+		DataTypeDefinition vfModule = groupTypes.stream().filter(g -> g.getName().equals(BaseOperation.VF_MODULE)).findFirst().orElse(null);
+		return vfModule.getProperties();
+	}
+	
+	private List<PropertyDefinition> getNewVfModuleTypeProperties(List<PropertyDefinition> allVfModuleTypeProperties, GroupTypeDefinition vfModule) {
+		Map<String, PropertyDefinition> existingVfModuleTypeProperties = vfModule.getProperties()
+				.stream()
+				.collect(Collectors.toMap(p -> p.getName(), p -> p));
+		
+		List<PropertyDefinition> newGroupTypeProperties = new ArrayList<>();
+		for(PropertyDefinition property : allVfModuleTypeProperties){
+			if(!existingVfModuleTypeProperties.containsKey(property.getName())){
+				newGroupTypeProperties.add(property);
+			}
+		}
+		return newGroupTypeProperties;
+	}
+
+	public String description() {
+		return "vfModulesPropertiesAdding";
+	}
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/CategoriesUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/CategoriesUtils.java
new file mode 100644
index 0000000..2ba7e01
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/CategoriesUtils.java
@@ -0,0 +1,25 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import org.openecomp.sdc.be.model.category.CategoryDefinition;
+import org.openecomp.sdc.be.model.category.SubCategoryDefinition;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class CategoriesUtils {
+
+    public static final String OLD_CATEGORY_ID_SUB_STR = "layer";
+
+    public static List<CategoryDefinition> filterOldCategories(List<CategoryDefinition> categoryDefinitions) {
+        return categoryDefinitions.stream()
+                .filter(categoryDefinition -> !categoryDefinition.getUniqueId().contains(OLD_CATEGORY_ID_SUB_STR))
+                .collect(Collectors.toList());
+    }
+
+    public static List<SubCategoryDefinition> filterOldSubCategories(List<SubCategoryDefinition> categoryDefinitions) {
+        return categoryDefinitions.stream()
+                .filter(categoryDefinition -> !categoryDefinition.getUniqueId().contains(OLD_CATEGORY_ID_SUB_STR))
+                .collect(Collectors.toList());
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ComponentMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ComponentMigration.java
new file mode 100644
index 0000000..c9212f9
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ComponentMigration.java
@@ -0,0 +1,48 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.Function;
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.FulfilledCapabilitiesMigrationService;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.FulfilledRequirementsMigrationService;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations.RequirementsCapabilitiesMigrationService;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+
+import static org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils.handleError;
+
+public abstract class ComponentMigration <T extends Component> extends JsonModelMigration<T> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(ComponentMigration.class);
+
+    @Resource(name = "tosca-operation-facade")
+    private ToscaOperationFacade toscaOperations;
+
+    @Resource(name = "req-cap-mig-service")
+    RequirementsCapabilitiesMigrationService<T> requirementsCapabilitiesMigrationService;
+
+    @Override
+    Either<T, StorageOperationStatus> save(T element) {
+        LOGGER.debug(String.format("creating component %s in new graph", element.getName()));
+        return toscaOperations.createToscaComponent(element).right().map(err -> handleError(err, String.format("failed to create component %s.", element.getName())));
+
+    }
+
+    @Override
+    Either<T, StorageOperationStatus> getElementFromNewGraph(T element) {
+        LOGGER.debug(String.format("checking if component %s already exists on new graph", element.getName()));
+        return toscaOperations.getToscaElement(element.getUniqueId(), JsonParseFlagEnum.ParseMetadata);
+    }
+
+    @Override
+    public StorageOperationStatus getNotFoundErrorStatus() {
+        return StorageOperationStatus.NOT_FOUND;
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/JsonModelMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/JsonModelMigration.java
new file mode 100644
index 0000000..6070104
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/JsonModelMigration.java
@@ -0,0 +1,90 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationMsg;
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+
+import javax.annotation.Resource;
+import java.util.List;
+
+public abstract class JsonModelMigration<T> implements Migration {
+
+    private final boolean COMPLETED_OK = true;
+
+    @Resource(name = "titan-dao")
+    TitanDao titanDao;
+
+    @Override
+    public boolean migrate() {
+        Either<List<T>, ?> elementsToMigrate = getElementsToMigrate();
+        return elementsToMigrate.either(this::migrateElementsToNewGraph,
+                                        errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_NODES.getMessage(errorStatus.toString())));
+    }
+
+    boolean doPostSaveOperation(T element) {
+        return true;
+    }
+
+    boolean doPostMigrateOperation(List<T> elements) {
+        return true;
+    }
+
+    private boolean migrateElementsToNewGraph(List<T> elementsToMigrate) {
+        for (T node : elementsToMigrate) {
+            boolean migratedSuccessfully = migrateElement(node);
+            if (!migratedSuccessfully) {
+                titanDao.rollback();
+                return false;
+            }
+            titanDao.commit();
+        }
+        return postMigrate(elementsToMigrate);
+    }
+
+    private boolean migrateElement(T node) {
+        boolean savedSuccessfully = saveElementIfNotExists(node);
+        return savedSuccessfully && doPostSaveOperation(node);
+    }
+
+    private boolean postMigrate(List<T> elements) {
+        boolean postMigrateSuccessfully = doPostMigrateOperation(elements);
+        if (!postMigrateSuccessfully) {
+            titanDao.rollback();
+            return false;
+        }
+        titanDao.commit();
+        return true;
+    }
+
+    private boolean saveElementIfNotExists(T element) {
+        return isExists(element).either(isExist -> isExist || createElement(element),
+                                        status -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_GET_NODE_FROM_GRAPH.getMessage(status.toString())));
+    }
+
+    private boolean createElement(T element) {
+        return save(element).either(savedNode -> COMPLETED_OK,
+                                 errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_CREATE_NODE.getMessage(element.getClass().getName(), errorStatus.toString())));
+    }
+
+    private Either<Boolean, ?> isExists(T element) {
+        Either<T, ?> byId = getElementFromNewGraph(element);
+        return byId.either(existingVal -> Either.left(true),
+                           this::getEitherNotExistOrErrorStatus);
+    }
+
+    private <S> Either<Boolean, S> getEitherNotExistOrErrorStatus(S status) {
+        return status == getNotFoundErrorStatus() ? Either.left(false) : Either.right(status);
+    }
+
+    abstract Either<List<T>, ?> getElementsToMigrate();
+
+    abstract Either<T, ?> getElementFromNewGraph(T element);
+
+    abstract Either<T, ?> save(T element);
+
+    abstract <S extends Enum> S getNotFoundErrorStatus();
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/MigrationByIdDerivedNodeTypeResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/MigrationByIdDerivedNodeTypeResolver.java
new file mode 100644
index 0000000..0522ed9
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/MigrationByIdDerivedNodeTypeResolver.java
@@ -0,0 +1,23 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.model.DerivedNodeTypeResolver;
+
+import javax.annotation.Resource;
+import java.util.Collections;
+import java.util.List;
+
+public class MigrationByIdDerivedNodeTypeResolver implements DerivedNodeTypeResolver {
+
+    @Resource(name = "titan-dao")
+    private TitanDao titanDao;
+
+    @Override
+    public Either<List<GraphVertex>, TitanOperationStatus> findDerivedResources(String parentResource) {
+        return titanDao.getVertexById(parentResource, JsonParseFlagEnum.ParseMetadata).left().map(Collections::singletonList);
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesMigration.java
new file mode 100644
index 0000000..5fc0230
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesMigration.java
@@ -0,0 +1,38 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class NormativesMigration extends ComponentMigration<Resource> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(NormativesMigration.class);
+
+    @javax.annotation.Resource(name = "normatives-resolver")
+    private NormativesResolver normativesResolver;
+
+    @javax.annotation.Resource(name = "resource-version-migration")
+    private VersionMigration<Resource> versionMigration;
+
+    @Override
+    public String description() {
+        return "migration of node types";
+    }
+
+    @Override
+    Either<List<Resource>, ?> getElementsToMigrate() {
+        return normativesResolver.getAllNodeTypeNormatives();
+    }
+
+    @Override
+    boolean doPostMigrateOperation(List<Resource> elements) {
+        LOGGER.info("migrating node types versions");
+        return versionMigration.buildComponentsVersionChain(elements);
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesResolver.java
new file mode 100644
index 0000000..205faf5
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/NormativesResolver.java
@@ -0,0 +1,81 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.operations.api.IResourceOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Queue;
+import java.util.stream.Collectors;
+
+public class NormativesResolver {
+
+    @javax.annotation.Resource(name = "resource-operation")
+    private IResourceOperation resourceOperation;
+
+    /**
+     *
+     * @return list of all normatives sorted by neighboring order
+     */
+    public Either<List<Resource>, StorageOperationStatus> getAllNodeTypeNormatives() {
+        Either<List<Resource>, StorageOperationStatus> rootNormatives = resourceOperation.getRootResources();
+        return rootNormatives.either(this::getAllNormatives,
+                                     Either::right);
+
+    }
+
+    private Either<List<Resource>, StorageOperationStatus> getAllNormatives(List<Resource> rootResources) {
+        List<Resource> allNormatives = new ArrayList<>();
+        for (Resource rootResource : rootResources) {
+            Either<List<Resource>, StorageOperationStatus> normativesOfRoot = getAllNodeTypeNormatives(rootResource);
+            if (normativesOfRoot.isRight()) {
+                return Either.right(normativesOfRoot.right().value());
+            }
+            allNormatives.addAll(normativesOfRoot.left().value());
+        }
+        return Either.left(allNormatives);
+    }
+
+    private Either<List<Resource>, StorageOperationStatus> getAllNodeTypeNormatives(Resource root) {
+        List<Resource> normativeResources = new ArrayList<>();
+        Queue<Resource> resources = new ArrayDeque<>();
+        resources.add(root);
+        while (!resources.isEmpty()) {
+            Resource currentResource = resources.poll();
+            normativeResources.add(currentResource);
+            Either<List<Resource>, StorageOperationStatus> allDerivedResources = getAllNonVFDerivedResources(currentResource);
+            if (allDerivedResources.isRight()) {
+                return Either.right(allDerivedResources.right().value());
+            }
+            List<Resource> derivedResources = allDerivedResources.left().value();
+            replaceDerivedNameWithDerivedUniqueId(currentResource, derivedResources);
+            resources.addAll(derivedResources);
+        }
+        return Either.left(normativeResources);
+    }
+
+    private void replaceDerivedNameWithDerivedUniqueId(Resource currentResource, List<Resource> derivedResources) {
+        derivedResources.forEach(resource -> resource.setDerivedFrom(Collections.singletonList(currentResource.getUniqueId())));
+    }
+
+    private Either<List<Resource>, StorageOperationStatus> getAllNonVFDerivedResources(Resource resource) {
+        Either<List<Resource>, StorageOperationStatus> childrenNodes = resourceOperation.getAllDerivedResources(resource);
+        return childrenNodes.either(resourceList -> Either.left(filterNonVFResources(resourceList)),
+                                    this::resolveEmptyListOrErrorStatus);
+    }
+
+    private List<Resource> filterNonVFResources(List<Resource> resources) {
+        return resources.stream().filter(resource -> resource.getResourceType() != ResourceTypeEnum.VF).collect(Collectors.toList());
+    }
+
+    private Either<List<Resource>, StorageOperationStatus> resolveEmptyListOrErrorStatus(StorageOperationStatus storageOperationStatus) {
+        return storageOperationStatus == StorageOperationStatus.NOT_FOUND ? Either.left(Collections.emptyList()) : Either.right(storageOperationStatus);
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourceVersionMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourceVersionMigration.java
new file mode 100644
index 0000000..ee222a7
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourceVersionMigration.java
@@ -0,0 +1,22 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.operations.api.IResourceOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+
+import java.util.List;
+
+public class ResourceVersionMigration extends VersionMigration<Resource> {
+
+    @javax.annotation.Resource(name = "resource-operation")
+    private IResourceOperation resourceOperation;
+
+    @Override
+    NodeTypeEnum getNodeTypeEnum() {
+        return NodeTypeEnum.Resource;
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourcesCategoriesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourcesCategoriesMigration.java
new file mode 100644
index 0000000..01654d2
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ResourcesCategoriesMigration.java
@@ -0,0 +1,150 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationMsg;
+import org.openecomp.sdc.asdctool.impl.migration.Migration;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.datatypes.category.CategoryDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.category.CategoryDefinition;
+import org.openecomp.sdc.be.model.category.SubCategoryDefinition;
+import org.openecomp.sdc.be.model.operations.api.IElementOperation;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.CategoriesUtils.filterOldCategories;
+import static org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.CategoriesUtils.filterOldSubCategories;
+
+
+public class ResourcesCategoriesMigration implements Migration {
+
+    @Resource(name = "element-operation")
+    private IElementOperation elementOperation;
+
+    @Resource(name = "element-operation-migration")
+    private IElementOperation elementOperationMigration;
+
+    @Resource(name = "titan-dao")
+    TitanDao titanDao;
+
+    @Override
+    public String description() {
+        return "migrate resource categories";
+    }
+
+    @Override
+    public boolean migrate() {
+        return getCategoriesToMigrate().either(this::migrateCategories,
+                                               errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_CATEGORIES.getMessage(errorStatus.name())));
+    }
+
+    private Either<List<CategoryDefinition>, ActionStatus> getCategoriesToMigrate() {
+        return elementOperation.getAllCategories(NodeTypeEnum.ResourceNewCategory, false);
+    }
+
+    private boolean migrateCategories(List<CategoryDefinition> categoryDefinitions) {
+        List<CategoryDefinition> categoriesToMigrate = filterOldCategoriesAndSubCategories(categoryDefinitions);
+        for (CategoryDefinition categoryDefinition : categoriesToMigrate) {
+            boolean isMigrated = migrateCategoryIfNotExists(categoryDefinition);
+            if (!isMigrated) {
+                titanDao.rollback();
+                return false;
+            }
+            titanDao.commit();
+        }
+        return true;
+    }
+
+
+    //since production was malformed we need to fixed it by removing wrong categories and sub categories
+    private List<CategoryDefinition> filterOldCategoriesAndSubCategories(List<CategoryDefinition> categoryDefinitions) {
+        Map<String, List<CategoryDefinition>> categoriesByNormalName = categoryDefinitions.stream().collect(Collectors.groupingBy(CategoryDataDefinition::getNormalizedName));
+        List<CategoryDefinition> categoriesToMigrate = filterOldCategories(categoryDefinitions);
+        for (CategoryDefinition categoryDefinition : categoriesToMigrate) {
+            List<SubCategoryDefinition> newSubCategories = getAllDistinctSubCategories(categoriesByNormalName.get(categoryDefinition.getNormalizedName()));
+            categoryDefinition.setSubcategories(newSubCategories);
+        }
+        return categoriesToMigrate;
+    }
+
+    private List<SubCategoryDefinition> getAllDistinctSubCategories (List<CategoryDefinition> categoriesDefinitions) {
+        Map<String, List<SubCategoryDefinition>> subCategoriesByNormalName = categoriesDefinitions.stream().flatMap(ct -> ct.getSubcategories().stream()).collect(Collectors.groupingBy(SubCategoryDefinition::getNormalizedName));
+        return getDistinctSubCategories(subCategoriesByNormalName);
+    }
+
+    private List<SubCategoryDefinition> getDistinctSubCategories(Map<String, List<SubCategoryDefinition>> subCategoriesByNormalName) {
+        List<SubCategoryDefinition> allSubCategories = new ArrayList<>();
+        for (List<SubCategoryDefinition> subCategoryDefinitions : subCategoriesByNormalName.values()) {
+            if (subCategoryDefinitions.size() == 1) {
+                allSubCategories.addAll(subCategoryDefinitions);
+            } else {
+                allSubCategories.addAll(filterOldSubCategories(subCategoryDefinitions));
+            }
+        }
+        return allSubCategories;
+    }
+
+    private boolean migrateCategoryIfNotExists(CategoryDefinition categoryDefinition) {
+        return isExists(categoryDefinition).either(isExist -> isExist ? migrateSubCategories(categoryDefinition) : migrateCategoryAndSubCategories(categoryDefinition),
+                                                   error -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_CATEGORY.getMessage(categoryDefinition.getName(), error.name())));
+    }
+
+    private boolean migrateCategoryAndSubCategories(CategoryDefinition resourceCategory) {
+        return elementOperationMigration.createCategory(resourceCategory, NodeTypeEnum.ResourceNewCategory)
+                .either(createdCategory -> this.migrateSubCategories(resourceCategory),
+                        status -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_CREATE_CATEGORY.getMessage(resourceCategory.getName(), status.name())));
+    }
+
+    private boolean migrateSubCategories(CategoryDefinition categoryDefinition) {
+        for (SubCategoryDefinition subCategory : categoryDefinition.getSubcategories()) {
+            boolean isMigrated = migrateSubcategoryIfNotExists(categoryDefinition, subCategory);
+            if (!isMigrated) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private boolean migrateSubcategoryIfNotExists(CategoryDefinition parentCategory, SubCategoryDefinition subCategory) {
+        return isExists(subCategory).either(isExists -> isExists || migrateSubCategory(parentCategory, subCategory),
+                                            status -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_CATEGORY.getMessage(subCategory.getName(), status.name())));
+    }
+
+    private boolean migrateSubCategory(CategoryDefinition categoryDefinition, SubCategoryDefinition subCategory) {
+        return elementOperationMigration.createSubCategory(categoryDefinition.getUniqueId(), subCategory, NodeTypeEnum.ResourceSubcategory)
+                .either(createdSubCategory -> true,
+                        errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_CREATE_SUB_CATEGORY.getMessage(subCategory.getName(), categoryDefinition.getName(), errorStatus.name())));
+
+    }
+
+    private Either<Boolean, ActionStatus> isExists(CategoryDefinition category) {
+        Either<CategoryDefinition, ActionStatus> byId = getCategoryById(category.getUniqueId());
+        return byId.either(existingVal -> Either.left(true),
+                           this::getEitherNotExistOrErrorStatus);
+    }
+
+    private Either<Boolean, ActionStatus> isExists(SubCategoryDefinition subCategory) {
+        return getSubCategoryById(subCategory.getUniqueId()).either(existingVal -> Either.left(true),
+                                               this::getEitherNotExistOrErrorStatus);
+    }
+
+    private Either<Boolean, ActionStatus> getEitherNotExistOrErrorStatus(ActionStatus status) {
+        return status == ActionStatus.COMPONENT_CATEGORY_NOT_FOUND ? Either.left(false) : Either.right(status);
+    }
+
+    private Either<CategoryDefinition, ActionStatus> getCategoryById(String uid) {
+        return elementOperationMigration.getCategory(NodeTypeEnum.ResourceNewCategory, uid);
+    }
+
+    private Either<SubCategoryDefinition, ActionStatus> getSubCategoryById(String uid) {
+        return elementOperationMigration.getSubCategory(NodeTypeEnum.ResourceSubcategory, uid);
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceCategoriesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceCategoriesMigration.java
new file mode 100644
index 0000000..f745b88
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceCategoriesMigration.java
@@ -0,0 +1,46 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.category.CategoryDefinition;
+import org.openecomp.sdc.be.model.operations.api.IElementOperation;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+
+import javax.annotation.Resource;
+import java.util.List;
+
+public class ServiceCategoriesMigration extends JsonModelMigration<CategoryDefinition> {
+
+    @Resource(name = "element-operation")
+    private IElementOperation elementOperation;
+
+    @Resource(name = "element-operation-migration")
+    private IElementOperation elementOperationMigration;
+
+
+    @Override
+    public String description() {
+        return "migrate services categories";
+    }
+
+    @Override
+    Either<List<CategoryDefinition>, ?> getElementsToMigrate() {
+        return elementOperation.getAllCategories(NodeTypeEnum.ServiceNewCategory, false).left().map(CategoriesUtils::filterOldCategories);
+    }
+
+    @Override
+    Either<CategoryDefinition, ?> getElementFromNewGraph(CategoryDefinition node) {
+        return elementOperationMigration.getCategory(NodeTypeEnum.ServiceNewCategory, node.getUniqueId());
+    }
+
+    @Override
+    Either<CategoryDefinition, ActionStatus> save(CategoryDefinition graphNode) {
+        return elementOperationMigration.createCategory(graphNode, NodeTypeEnum.ServiceNewCategory);
+    }
+
+    @Override
+    ActionStatus getNotFoundErrorStatus() {
+        return ActionStatus.COMPONENT_CATEGORY_NOT_FOUND;
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceVersionMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceVersionMigration.java
new file mode 100644
index 0000000..4a96c27
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServiceVersionMigration.java
@@ -0,0 +1,23 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.operations.api.IResourceOperation;
+import org.openecomp.sdc.be.model.operations.api.IServiceOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+
+import java.util.List;
+
+public class ServiceVersionMigration extends VersionMigration<Service> {
+
+    @javax.annotation.Resource(name = "service-operation")
+    private IServiceOperation serviceOperation;
+
+    @Override
+    NodeTypeEnum getNodeTypeEnum() {
+        return NodeTypeEnum.Service;
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServicesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServicesMigration.java
new file mode 100644
index 0000000..f5ff962
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/ServicesMigration.java
@@ -0,0 +1,107 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.datatypes.elements.ComponentInstanceDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
+import org.openecomp.sdc.be.model.ComponentInstanceAttribute;
+import org.openecomp.sdc.be.model.ComponentInstanceProperty;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.operations.api.IServiceOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.migration.MigrationErrorInformer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class ServicesMigration extends ComponentMigration<Service> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(ServicesMigration.class);
+
+    @Resource(name = "service-operation")
+    private IServiceOperation serviceOperation;
+
+    @Resource(name = "service-version-migration")
+    private VersionMigration<Service> versionMigration;
+
+    @Override
+    public String description() {
+        return "migrate services";
+    }
+
+    @Override
+    Either<List<Service>, ?> getElementsToMigrate() {
+        return serviceOperation.getAll();
+    }
+
+    @Override
+    Either<Service, StorageOperationStatus> save(Service element) {
+        MigrationErrorInformer.logIfServiceUsingMalformedVfs(element);
+        filterOutVFInstancePropsAndAttrs(element);
+        element.setConformanceLevel("0.0");
+        requirementsCapabilitiesMigrationService.overrideInstanceCapabilitiesRequirements(element);
+        return super.save(element);
+    }
+
+    @Override
+    boolean doPostSaveOperation(Service element) {
+        return element.getComponentInstances() == null ||
+               (requirementsCapabilitiesMigrationService.associateFulfilledRequirements(element, NodeTypeEnum.Service) &&
+                requirementsCapabilitiesMigrationService.associateFulfilledCapabilities(element, NodeTypeEnum.Service));
+    }
+
+    @Override
+    boolean doPostMigrateOperation(List<Service> elements) {
+        LOGGER.info("migrating services versions");
+        return versionMigration.buildComponentsVersionChain(elements);
+    }
+
+    private void filterOutVFInstancePropsAndAttrs(Service element) {
+        if (element.getComponentInstances() != null) {
+            List<String> vfInstancesIds = getVFInstancesIds(element);
+            filterOutVFInstacnecProps(element, vfInstancesIds);
+            filterOutVFInstanceAttrs(element, vfInstancesIds);
+        }
+    }
+
+    private void filterOutVFInstanceAttrs(Service element, List<String> vfInstancesIds) {
+        Map<String, List<ComponentInstanceAttribute>> componentInstancesAttributes = element.getComponentInstancesAttributes();
+        if (componentInstancesAttributes != null) {
+            element.setComponentInstancesAttributes(filterOutVFInstanceAttributes(componentInstancesAttributes, vfInstancesIds));
+        }
+    }
+
+    private void filterOutVFInstacnecProps(Service element, List<String> vfInstancesIds) {
+        Map<String, List<ComponentInstanceProperty>> componentInstancesProperties = element.getComponentInstancesProperties();
+        if (componentInstancesProperties != null) {
+            element.setComponentInstancesProperties(filterOutVFInstanceProperties(componentInstancesProperties, vfInstancesIds));
+        }
+    }
+
+    private Map<String, List<ComponentInstanceProperty>> filterOutVFInstanceProperties(Map<String, List<ComponentInstanceProperty>> instances, List<String> vfInstanceIds) {
+        return instances.entrySet()
+                .stream()
+                .filter(entry -> !vfInstanceIds.contains(entry.getKey()))
+                .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+    }
+
+    private Map<String, List<ComponentInstanceAttribute>> filterOutVFInstanceAttributes(Map<String, List<ComponentInstanceAttribute>> instances, List<String> vfInstanceIds) {
+        return instances.entrySet()
+                .stream()
+                .filter(entry -> !vfInstanceIds.contains(entry.getKey()))
+                .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+    }
+
+    private List<String> getVFInstancesIds(Service service) {
+        return service.getComponentInstances()
+                    .stream()
+                    .filter(componentInstance -> componentInstance.getOriginType() == OriginTypeEnum.VF)
+                    .map(ComponentInstanceDataDefinition::getUniqueId)
+                    .collect(Collectors.toList());
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UserStatesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UserStatesMigration.java
new file mode 100644
index 0000000..b248d90
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UserStatesMigration.java
@@ -0,0 +1,159 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import com.thinkaurelius.titan.core.TitanVertex;
+import fj.data.Either;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.apache.tinkerpop.gremlin.structure.Property;
+import org.apache.tinkerpop.gremlin.structure.Vertex;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationMsg;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.openecomp.sdc.be.resources.data.UserData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static fj.data.List.list;
+
+public class UserStatesMigration extends JsonModelMigration<Edge> {
+
+    private static final String MIGRATING_USER_ID = "jh0003";
+    private static final int OUT_VERTEX_INDEX = 0;
+    private static final int IN_VERTEX_INDEX = 1;
+    private static Logger LOGGER = LoggerFactory.getLogger(UserStatesMigration.class);
+
+    @Resource(name = "titan-generic-dao")
+    private TitanGenericDao genericDao;
+
+    @Resource(name = "titan-generic-dao-migration")
+    private TitanGenericDao genericDaoMigration;
+
+    @Override
+    public String description() {
+        return "migrate user states";
+    }
+
+
+    @Override
+    public boolean migrate() {
+//        return removeMigratingUserStates() && super.migrate();
+        return super.migrate();
+    }
+
+    @Override
+    Either<List<Edge>, TitanOperationStatus> getElementsToMigrate() {
+        LOGGER.debug("fetching user states edges from old graph");
+        return genericDao.getAll(NodeTypeEnum.User, UserData.class)
+                         .left().bind(this::getEdgesForUsers);
+    }
+
+    @Override
+    Either<Edge, TitanOperationStatus> getElementFromNewGraph(Edge edge) {
+        LOGGER.debug("finding user state edge in new graph");
+        Vertex outVertex = edge.outVertex();
+        String outVertexUIDKey = getVertexUniqueId(outVertex);
+        String outVertexUIDValue = outVertex.property(outVertexUIDKey).value().toString();
+
+        Vertex inVertex = edge.inVertex();
+        String inVertexUIDKey = getVertexUniqueId(inVertex);
+        String inVertexUIDValue = inVertex.property(inVertexUIDKey).value().toString();
+
+        return genericDaoMigration.getEdgeByVerticies(outVertexUIDKey, outVertexUIDValue, inVertexUIDKey, inVertexUIDValue, edge.label());
+    }
+
+    @Override
+    Either<Edge, TitanOperationStatus> save(Edge userState) {
+        Either<InOutVertices, TitanOperationStatus> titanVertices = findEdgeInOutVerticesInNewGraph(userState);
+        return titanVertices.left().bind(inOutVertices -> genericDaoMigration.copyEdge(inOutVertices.getOutVertex(), inOutVertices.getInVertex(), userState));
+    }
+
+    @Override
+    TitanOperationStatus getNotFoundErrorStatus() {
+        return TitanOperationStatus.NOT_FOUND;
+    }
+
+//    private boolean removeMigratingUserStates() {
+//        Either<UserData, TitanOperationStatus> migratingUser = genericDaoMigration.getNode(UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.User), MIGRATING_USER_ID, UserData.class);
+//        return migratingUser.either(user -> deleteAllEdges(user, Direction.OUT),
+//                                    errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_MIGRATION_USER.getMessage(MIGRATING_USER_ID, errorStatus.name())));
+//    }
+
+    private Either<List<Edge>, TitanOperationStatus> getEdgesForUsers(List<UserData> users) {
+        List<Edge> edges = new ArrayList<>();
+        for (UserData user : users) {
+            Either<List<Edge>, TitanOperationStatus> edgesForNode = genericDao.getEdgesForNode(user, Direction.OUT);
+            if (edgesForNode.isRight()) {
+                TitanOperationStatus errorStatus = edgesForNode.right().value();
+                LOGGER.error(MigrationMsg.FAILED_TO_RETRIEVE_USER_STATES.getMessage(user.getEmail(), errorStatus.name()));
+                return Either.right(errorStatus);
+            }
+            edges.addAll(edgesForNode.left().value());
+        }
+        return Either.left(ignoreProductEdges(edges));
+    }
+
+    private List<Edge> ignoreProductEdges(List<Edge> edges) {
+        return edges.stream().filter(edge -> !isInEdgeOfProductType(edge.inVertex())).collect(Collectors.toList());
+    }
+
+    private boolean isInEdgeOfProductType(Vertex inVertex) {
+        Property<Object> nodeLabelProperty = inVertex.property(GraphPropertiesDictionary.LABEL.getProperty());
+        return nodeLabelProperty != null && nodeLabelProperty.value().equals(NodeTypeEnum.Product.getName());
+    }
+
+    private String getVertexUniqueId(Vertex vertex) {
+        String nodeLabel = vertex.property(GraphPropertiesDictionary.LABEL.getProperty()).value().toString();
+        return UniqueIdBuilder.getKeyByNodeType(NodeTypeEnum.getByName(nodeLabel));
+    }
+
+    private Either<InOutVertices, TitanOperationStatus> findEdgeInOutVerticesInNewGraph(Edge userEdge) {
+        Either<TitanVertex, TitanOperationStatus> outVertex = getMigratedVertexByOldVertex(userEdge.outVertex());
+        Either<TitanVertex, TitanOperationStatus> inVertex = getMigratedVertexByOldVertex(userEdge.inVertex());
+        return Either.sequenceLeft(list(outVertex, inVertex)).left().map(InOutVertices::new);
+    }
+
+    private Either<TitanVertex, TitanOperationStatus> getMigratedVertexByOldVertex(Vertex vertex) {
+        String vertexUniqueId = getVertexUniqueId(vertex);
+        LOGGER.debug(String.format("fetching vertex %s from new graph", vertexUniqueId));
+        return genericDaoMigration.getVertexByProperty(vertexUniqueId, vertex.property(vertexUniqueId).value())
+                                   .right().map(err -> MigrationUtils.handleError(err, String.format("could not find vertex %s in new graph.", vertexUniqueId)))  ;
+    }
+
+//    private boolean deleteAllEdges(UserData userData, Direction direction) {
+//        Either<List<Edge>, TitanOperationStatus> edgesForNode = genericDaoMigration.getEdgesForNode(userData, direction);
+//        if (edgesForNode.isRight()) {
+//            LOGGER.error(MigrationMsg.FAILED_TO_RETRIEVE_MIGRATION_USER_STATES.getMessage(MIGRATING_USER_ID, edgesForNode.right().value().name()));
+//            return false;
+//        }
+//        edgesForNode.left().value().forEach(Edge::remove);
+//        return true;
+//    }
+
+    private class InOutVertices {
+        private TitanVertex outVertex;
+        private TitanVertex inVertex;
+
+        InOutVertices(fj.data.List<TitanVertex> inOutVertices) {
+            outVertex = inOutVertices.index(OUT_VERTEX_INDEX);
+            inVertex = inOutVertices.index(IN_VERTEX_INDEX);
+        }
+
+        TitanVertex getOutVertex() {
+            return outVertex;
+        }
+
+        TitanVertex getInVertex() {
+            return inVertex;
+        }
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UsersMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UsersMigration.java
new file mode 100644
index 0000000..9dd3fc4
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/UsersMigration.java
@@ -0,0 +1,58 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.Function;
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.dao.utils.UserStatusEnum;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+import java.util.List;
+
+import static org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils.handleError;
+
+public class UsersMigration extends JsonModelMigration<User> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(UsersMigration.class);
+
+    @Resource(name = "user-operation")
+    IUserAdminOperation userAdminOperation;
+
+    @Resource(name = "user-operation-migration")
+    IUserAdminOperation userAdminOperationMigration;
+
+
+    @Override
+    Either<List<User>, ActionStatus> getElementsToMigrate() {
+        LOGGER.debug("fetching users to migrate from old graph");
+        return userAdminOperation.getAllUsers();
+    }
+
+    @Override
+    Either<User, ActionStatus> getElementFromNewGraph(User user) {
+        LOGGER.debug(String.format("trying to load user %s from new graph", user.getUserId()));
+        return userAdminOperationMigration.getUserData(user.getUserId(), false);
+    }
+
+    @Override
+    Either<User, StorageOperationStatus> save(User user) {
+        LOGGER.debug(String.format("trying to save user %s to new graph", user.getUserId()));
+        return userAdminOperationMigration.saveUserData(user);
+    }
+
+    @Override
+    public ActionStatus getNotFoundErrorStatus() {
+        return ActionStatus.USER_NOT_FOUND;
+    }
+
+    @Override
+    public String description() {
+        return "migrate users";
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VFResourcesMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VFResourcesMigration.java
new file mode 100644
index 0000000..b684883
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VFResourcesMigration.java
@@ -0,0 +1,53 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.Resource;
+import org.openecomp.sdc.be.model.operations.api.IResourceOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class VFResourcesMigration extends ComponentMigration<Resource> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(VFResourcesMigration.class);
+
+    @javax.annotation.Resource(name = "resource-operation")
+    private IResourceOperation resourceOperation;
+
+    @javax.annotation.Resource(name = "resource-version-migration")
+    private VersionMigration<Resource> versionMigration;
+
+    @Override
+    public String description() {
+        return "migrate VFs";
+    }
+
+    @Override
+    Either<List<Resource>, ?> getElementsToMigrate() {
+        return resourceOperation.getVFResources();
+    }
+
+    @Override
+    Either<Resource, StorageOperationStatus> save(Resource element) {
+        requirementsCapabilitiesMigrationService.overrideInstanceCapabilitiesRequirements(element);
+        return super.save(element);
+    }
+
+    @Override
+    boolean doPostSaveOperation(Resource element) {
+        return element.getComponentInstances() == null ||
+                (requirementsCapabilitiesMigrationService.associateFulfilledRequirements(element, NodeTypeEnum.Resource) &&
+                 requirementsCapabilitiesMigrationService.associateFulfilledCapabilities(element, NodeTypeEnum.Resource));
+    }
+
+    @Override
+    boolean doPostMigrateOperation(List<Resource> elements) {
+        LOGGER.info("migrating VFs versions");
+        return versionMigration.buildComponentsVersionChain(elements);
+    }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VersionMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VersionMigration.java
new file mode 100644
index 0000000..7dadd79
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/VersionMigration.java
@@ -0,0 +1,126 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel;
+
+import fj.data.Either;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.tinkerpop.gremlin.structure.Edge;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationException;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationMsg;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public abstract class VersionMigration<T extends Component>  {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(VersionMigration.class);
+
+    @Resource(name = "titan-generic-dao-migration")
+    private TitanGenericDao titanGenericDaoMigration;
+
+    @Resource(name = "titan-dao")
+    private TitanDao titanDao;
+
+    public boolean buildComponentsVersionChain(List<T> components) {
+        setMissingInvariantUid(components);
+        Map<String, List<T>> componentsByInvariant = components.stream().collect(Collectors.groupingBy(Component::getInvariantUUID));
+        for (List<T> componentsList : componentsByInvariant.values()) {
+            boolean versionChainBuilt = buildVersionChainForInvariant(componentsList);
+            if (!versionChainBuilt) {
+                titanDao.rollback();
+                return false;
+            }
+            titanDao.commit();
+        }
+        return true;
+    }
+
+    private boolean buildVersionChainForInvariant(List<T> components) {
+        sortComponentsByVersion(components);
+        for (int i = 0; i < components.size() -1; i++) {
+            String lowerVersionUid = components.get(i).getUniqueId();
+            String higherVersionUid = components.get(i + 1).getUniqueId();
+            boolean versionCreated = createVersionRelationIfNotExist(lowerVersionUid, higherVersionUid);
+            if (!versionCreated) {
+                return false;
+            }
+        }
+        return true;
+    }
+
+    private void sortComponentsByVersion(List<T> components) {
+        Collections.sort(components, (o1, o2) -> o1.getVersion().compareTo(o2.getVersion()));
+    }
+
+    private boolean createVersionRelationIfNotExist(String fromUid, String toUid) {
+        Either<Boolean, TitanOperationStatus> isVersionExists = isVersionExists(fromUid, toUid);
+        return isVersionExists.either(versionExists -> versionExists || createVersionRelation(fromUid, toUid),
+                               errorStatus -> MigrationUtils.handleError(MigrationMsg.FAILED_TO_RETRIEVE_VERSION_RELATION.getMessage(fromUid, toUid, isVersionExists.right().value().name())));
+    }
+
+    private boolean createVersionRelation(String fromUid, String toUid) {
+        LOGGER.debug(String.format("creating version edge between vertex %s and vertex %s", fromUid, toUid));
+        Either<GraphVertex, TitanOperationStatus> vertexById = titanDao.getVertexById(fromUid);
+        Either<GraphVertex, TitanOperationStatus> vertexById1 = titanDao.getVertexById(toUid);
+        if (vertexById1.isLeft() && vertexById.isLeft()) {
+            TitanOperationStatus versionCreated = titanDao.createEdge(vertexById.left().value(), vertexById1.left().value(), EdgeLabelEnum.VERSION, new HashMap<>());
+            return versionCreated == TitanOperationStatus.OK;
+        }
+        return MigrationUtils.handleError(String.format("could not create version edge between vertex %s and vertex %s.", fromUid, toUid));
+    }
+
+    private Either<Boolean, TitanOperationStatus> isVersionExists(String fromUid, String toUid) {
+        LOGGER.debug(String.format("checking if version edge between vertex %s and vertex %s already exist", fromUid, toUid));
+        String uidKey = UniqueIdBuilder.getKeyByNodeType(getNodeTypeEnum());
+        Either<Edge, TitanOperationStatus> edgeByVertices = titanGenericDaoMigration.getEdgeByVerticies(uidKey, fromUid, uidKey, toUid, EdgeLabelEnum.VERSION.name());
+        if (isNotFoundStatus(edgeByVertices)) {
+            return Either.left(false);
+        }
+        return edgeByVertices.bimap(foundEdge -> true,
+                                    error -> error);
+    }
+
+    private boolean isNotFoundStatus(Either<Edge, TitanOperationStatus> edgeByVertices) {
+        return edgeByVertices.isRight() && edgeByVertices.right().value() == TitanOperationStatus.NOT_FOUND;
+    }
+
+    abstract NodeTypeEnum getNodeTypeEnum();
+
+    //some invariatn uids are missing in production
+    private void setMissingInvariantUid(List<T> components) {
+        List<T> missingInvariantCmpts = getComponentsWithNoInvariantUids(components);
+        for (T missingInvariantCmpt : missingInvariantCmpts) {
+            String uuid = missingInvariantCmpt.getUUID();
+            missingInvariantCmpt.setInvariantUUID(findInvariantUidOrElseFail(components, uuid));
+        }
+    }
+
+    private List<T> getComponentsWithNoInvariantUids(List<T> components) {
+        List<T> cmptsWithoutInvariant = components.stream().filter(c -> c.getInvariantUUID() == null).collect(Collectors.toList());
+        LOGGER.info(String.format("the following components are missing invariant uids: %s", StringUtils.join(cmptsWithoutInvariant.stream().map(Component::getUniqueId).collect(Collectors.toList()), ",")));
+        return cmptsWithoutInvariant;
+    }
+
+    private String findInvariantUidOrElseFail(List<T> components, String uuid) {
+        return components.stream()
+                .filter(c -> c.getUUID().equals(uuid))
+                .map(Component::getInvariantUUID)
+                .filter(c -> c != null)
+                .findAny().orElseThrow(() -> new MigrationException(String.format("cannot find invariantuid for component with uuid %s", uuid)));
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilitiesMigrationService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilitiesMigrationService.java
new file mode 100644
index 0000000..4a91290
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilitiesMigrationService.java
@@ -0,0 +1,84 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations;
+
+import fj.data.Either;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.CapabilityDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.ListCapabilityDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.ListDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapListCapabiltyDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.CapabilityDefinition;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.operations.api.ICapabilityOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
+import org.openecomp.sdc.be.resources.data.CapabilityData;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class FulfilledCapabilitiesMigrationService extends FulfilledCapabilityRequirementMigrationService<CapabilityDefinition, CapabilityData> {
+
+    @Resource(name = "capability-operation")
+    private ICapabilityOperation capabilityOperation;
+
+    @Resource(name = "component-instance-operation")
+    private ComponentInstanceOperation componentInstanceOperation;
+
+    @Override
+    Either<CapabilityDefinition, StorageOperationStatus> getToscaDefinition(CapabilityData data) {
+        return capabilityOperation.getCapability(data.getUniqueId());
+    }
+
+    @Override
+    void setPath(CapabilityDefinition def, List<String> path) {
+        def.setPath(path);
+    }
+
+    @Override
+    String getType(CapabilityDefinition def) {
+        return def.getType();
+    }
+
+    @Override
+    Either<List<ImmutablePair<CapabilityData, GraphEdge>>, TitanOperationStatus> getFulfilledCapReqs(ComponentInstance instance, NodeTypeEnum nodeTypeEnum) {
+        return componentInstanceOperation.getFulfilledCapabilities(instance, nodeTypeEnum);
+    }
+
+    @Override
+    ListDataDefinition convertToDefinitionListObject(List<CapabilityDefinition> capReqDefList) {
+        List<CapabilityDataDefinition> capabilityDataDefinitions = new ArrayList<>();
+        capabilityDataDefinitions.addAll(capReqDefList);
+        return new ListCapabilityDataDefinition(capabilityDataDefinitions);
+    }
+
+    @Override
+    MapDataDefinition convertToDefinitionMapObject(Map<String, ListDataDefinition> reqCapForInstance) {
+        Map<String, ListCapabilityDataDefinition> capabilitiesList = castDataDefinitionListToCapabilityList(reqCapForInstance);
+        return new MapListCapabiltyDataDefinition(capabilitiesList);
+    }
+
+    @Override
+    Either<GraphVertex, TitanOperationStatus> getAssociatedDefinitions(GraphVertex component) {
+        return titanDao.getChildVertex(component,  EdgeLabelEnum.FULLFILLED_REQUIREMENTS, JsonParseFlagEnum.NoParse);
+    }
+
+    @Override
+    Either<GraphVertex, StorageOperationStatus> associateToGraph(GraphVertex graphVertex, Map<String, MapDataDefinition> defsByInstance) {
+        return topologyTemplateOperation.assosiateElementToData(graphVertex, VertexTypeEnum.FULLFILLED_CAPABILITIES, EdgeLabelEnum.FULLFILLED_CAPABILITIES, defsByInstance);
+    }
+
+    private Map<String, ListCapabilityDataDefinition> castDataDefinitionListToCapabilityList(Map<String, ListDataDefinition> reqCapForInstance) {
+        return reqCapForInstance.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> (ListCapabilityDataDefinition) entry.getValue()));
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilityRequirementMigrationService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilityRequirementMigrationService.java
new file mode 100644
index 0000000..aad2eb4
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledCapabilityRequirementMigrationService.java
@@ -0,0 +1,164 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations;
+
+import fj.Function;
+import fj.data.Either;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.asdctool.impl.migration.MigrationException;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphNode;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ListDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.api.ToscaDefinitionPathCalculator;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.annotation.Resource;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import static org.openecomp.sdc.asdctool.impl.migration.MigrationMsg.*;
+import static org.openecomp.sdc.asdctool.impl.migration.v1707.MigrationUtils.willThrowException;
+
+public abstract class FulfilledCapabilityRequirementMigrationService<T extends ToscaDataDefinition, S extends GraphNode> {
+
+    private static Logger LOGGER = LoggerFactory.getLogger(FulfilledCapabilityRequirementMigrationService.class);
+
+    @Resource(name = "topology-template-operation")
+    TopologyTemplateOperation topologyTemplateOperation;
+
+    @Resource(name = "tosca-path-calculator")
+    private ToscaDefinitionPathCalculator toscaDefinitionPathCalculator;
+
+    @Resource(name = "titan-dao")
+    TitanDao titanDao;
+
+    public boolean associateToscaDefinitions(Component component, NodeTypeEnum componentType) {
+        try {
+            return isDefinitionsAlreadyAssociated(component) || doAssociateToscaDefinitions(component, componentType);
+        } catch (MigrationException e) {
+            LOGGER.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private boolean isDefinitionsAlreadyAssociated(Component component) {
+        GraphVertex componentVertex =  titanDao.getVertexById(component.getUniqueId()).left().on((err) -> willThrowException(FAILED_TO_RETRIEVE_VERTEX.getMessage(component.getName(), err.name())));
+        return this.getAssociatedDefinitions(componentVertex)
+                   .either(vertex -> true,
+                           errorStatus -> notFoundStatusOrFail(component, errorStatus));
+
+    }
+
+    private boolean notFoundStatusOrFail(Component component, TitanOperationStatus error) {
+        if (error.equals(TitanOperationStatus.NOT_FOUND)) {
+            return false;
+        }
+        throw new MigrationException(FAILED_TO_RETRIEVE_CAP_REQ_VERTEX.getMessage(component.getName(), error.name()));
+    }
+
+    private boolean doAssociateToscaDefinitions(Component component, NodeTypeEnum componentType) {
+        try {
+            Map<String, MapDataDefinition> toscaDefByInstance = groupToscaDefinitionByInstance(component, componentType);
+            return toscaDefByInstance.isEmpty() || updateOnGraph(component, toscaDefByInstance);
+        } catch (MigrationException e) {
+            LOGGER.error(e.getMessage(), e);
+            return false;
+        }
+    }
+
+    private  Map<String, MapDataDefinition> groupToscaDefinitionByInstance(Component component, NodeTypeEnum componentType) {
+        Map<String, MapDataDefinition> toscaDefByInstance = new HashMap<>();
+        for (ComponentInstance componentInstance : component.getComponentInstances()) {
+                List<ImmutablePair<S, GraphEdge>> fulfilledCapReq = getFulfilledCapReqs(componentType, componentInstance);
+                if (fulfilledCapReq.isEmpty()) {
+                    continue;
+                }
+                toscaDefByInstance.put(componentInstance.getUniqueId(), getReqCapToscaDefs(fulfilledCapReq, componentInstance));
+        }
+        return toscaDefByInstance;
+    }
+
+    private MapDataDefinition getReqCapToscaDefs(List<ImmutablePair<S, GraphEdge>> capReqsData, ComponentInstance componentInstance) {
+        Map<String, List<T>> capReqDefinitions = getCapReqDefinitions(componentInstance, capReqsData);
+        return convertToMapDefinition(capReqDefinitions);
+    }
+
+    private List<ImmutablePair<S, GraphEdge>> getFulfilledCapReqs(NodeTypeEnum componentType, ComponentInstance componentInstance) {
+        return getFulfilledCapReqs(componentInstance, componentType)
+                .either(Function.identity(),
+                        error ->  emptyListOrFail(error, componentInstance.getName()));
+    }
+
+    private List<ImmutablePair<S, GraphEdge>> emptyListOrFail(TitanOperationStatus error, String instanceName) {
+        if (error.equals(TitanOperationStatus.NOT_FOUND)) {
+            return Collections.emptyList();
+        }
+        String errorMsg = FAILED_TO_RETRIEVE_REQ_CAP.getMessage(instanceName, error.name());
+        throw new MigrationException(errorMsg);
+    }
+
+    private Map<String, List<T>> getCapReqDefinitions(ComponentInstance componentInstance, List<ImmutablePair<S, GraphEdge>> capReqDataList) {
+        return capReqDataList.stream()
+                .map(capReqData -> convertToToscaDef(componentInstance, capReqData))
+                .collect(Collectors.groupingBy(this::getType));
+    }
+
+    private T convertToToscaDef(ComponentInstance componentInstance, ImmutablePair<S, GraphEdge> data) {
+        T def = getReqCapDataDefinition(data);
+        List<String> definitionPath = toscaDefinitionPathCalculator.calculateToscaDefinitionPath(componentInstance, data.getRight());
+        setPath(def, definitionPath);
+        return def;
+    }
+
+    private T getReqCapDataDefinition(ImmutablePair<S, GraphEdge> data) {
+        S capReqData = data.getLeft();
+        return getToscaDefinition(capReqData).left().on(err -> willThrowException(FAILED_TO_RETRIEVE_TOSCA_DEF.getMessage(capReqData.getUniqueId().toString(), err.toString())));
+    }
+
+    private boolean updateOnGraph(Component component, Map<String, MapDataDefinition> defsByInstance) {
+        GraphVertex graphVertex = getComponentGraphVertex(component);
+        Either<GraphVertex, StorageOperationStatus> associatedVertex = associateToGraph(graphVertex, defsByInstance);
+        return associatedVertex.either(vertex -> true, err -> MigrationUtils.handleError(FAILED_TO_ASSOCIATE_CAP_REQ.getMessage(component.getName(), err.name())));
+    }
+
+    private GraphVertex getComponentGraphVertex(Component component) {
+        return titanDao.getVertexById(component.getUniqueId())
+                       .left().on(error -> willThrowException(FAILED_TO_RETRIEVE_VERTEX.getMessage(component.getUniqueId(), error.name())));
+    }
+
+    private MapDataDefinition convertToMapDefinition(Map<String, List<T>> toscaDefs) {
+        Map<String, ListDataDefinition> defsListByType = toscaDefs.entrySet().stream()
+                .collect(Collectors.toMap(Map.Entry::getKey, entry -> convertToDefinitionListObject(entry.getValue())));
+        return convertToDefinitionMapObject(defsListByType);
+    }
+
+    abstract Either<T, ?> getToscaDefinition(S data);
+
+    abstract void setPath(T def, List<String> path);
+
+    abstract String getType(T def);
+
+    abstract Either<List<ImmutablePair<S, GraphEdge>>, TitanOperationStatus> getFulfilledCapReqs(ComponentInstance instance, NodeTypeEnum nodeTypeEnum);
+
+    abstract ListDataDefinition convertToDefinitionListObject(List<T> capReqDefList);
+
+    abstract MapDataDefinition convertToDefinitionMapObject(Map<String, ListDataDefinition> reqCapForInstance);
+
+    abstract Either<GraphVertex, TitanOperationStatus> getAssociatedDefinitions(GraphVertex component);
+
+    abstract Either<GraphVertex, StorageOperationStatus> associateToGraph(GraphVertex graphVertex, Map<String, MapDataDefinition> defsByInstance);
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledRequirementsMigrationService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledRequirementsMigrationService.java
new file mode 100644
index 0000000..33c6b22
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/FulfilledRequirementsMigrationService.java
@@ -0,0 +1,84 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations;
+
+import fj.data.Either;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.openecomp.sdc.be.dao.graph.datatype.GraphEdge;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.elements.ListDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.ListRequirementDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.MapListRequirementDataDefinition;
+import org.openecomp.sdc.be.datatypes.elements.RequirementDataDefinition;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.RequirementDefinition;
+import org.openecomp.sdc.be.model.operations.api.IComponentInstanceOperation;
+import org.openecomp.sdc.be.model.operations.api.IRequirementOperation;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.resources.data.RequirementData;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class FulfilledRequirementsMigrationService extends FulfilledCapabilityRequirementMigrationService<RequirementDefinition, RequirementData> {
+
+    @Resource(name = "requirement-operation")
+    IRequirementOperation requirementOperation;
+
+    @Resource(name = "component-instance-operation")
+    IComponentInstanceOperation componentInstanceOperation;
+
+    @Override
+    Either<RequirementDefinition, TitanOperationStatus> getToscaDefinition(RequirementData data) {
+        return requirementOperation.getRequirement(data.getUniqueId());
+    }
+
+    @Override
+    void setPath(RequirementDefinition def, List<String> path) {
+        def.setPath(path);
+    }
+
+    @Override
+    String getType(RequirementDefinition def) {
+        return def.getCapability();
+    }
+
+    @Override
+    Either<List<ImmutablePair<RequirementData, GraphEdge>>, TitanOperationStatus> getFulfilledCapReqs(ComponentInstance instance, NodeTypeEnum nodeTypeEnum) {
+        return componentInstanceOperation.getFulfilledRequirements(instance, nodeTypeEnum);
+    }
+
+    @Override
+    ListDataDefinition convertToDefinitionListObject(List<RequirementDefinition> capReqDefList) {
+        List<RequirementDataDefinition> requirementDataDefinitions = new ArrayList<>();
+        requirementDataDefinitions.addAll(capReqDefList);
+        return new ListRequirementDataDefinition(requirementDataDefinitions);
+    }
+
+    @Override
+    MapDataDefinition convertToDefinitionMapObject(Map<String, ListDataDefinition> reqCapForInstance) {
+        Map<String, ListRequirementDataDefinition> reqDefList = castDefinitionListToRequirementList(reqCapForInstance);
+        return new MapListRequirementDataDefinition(reqDefList);
+    }
+
+    @Override
+    Either<GraphVertex, TitanOperationStatus> getAssociatedDefinitions(GraphVertex component) {
+        return titanDao.getChildVertex(component,  EdgeLabelEnum.FULLFILLED_REQUIREMENTS, JsonParseFlagEnum.NoParse);
+    }
+
+    @Override
+    Either<GraphVertex, StorageOperationStatus> associateToGraph(GraphVertex graphVertex, Map<String, MapDataDefinition> defsByInstance) {
+        return topologyTemplateOperation.assosiateElementToData(graphVertex, VertexTypeEnum.FULLFILLED_REQUIREMENTS, EdgeLabelEnum.FULLFILLED_REQUIREMENTS, defsByInstance);
+    }
+
+    private Map<String, ListRequirementDataDefinition> castDefinitionListToRequirementList(Map<String, ListDataDefinition> reqCapForInstance) {
+        return reqCapForInstance.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, entry -> (ListRequirementDataDefinition) entry.getValue()));
+    }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/RequirementsCapabilitiesMigrationService.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/RequirementsCapabilitiesMigrationService.java
new file mode 100644
index 0000000..f74e40a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/migration/v1707/jsonmodel/relations/RequirementsCapabilitiesMigrationService.java
@@ -0,0 +1,91 @@
+package org.openecomp.sdc.asdctool.impl.migration.v1707.jsonmodel.relations;
+
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
+import org.openecomp.sdc.be.model.CapabilityDefinition;
+import org.openecomp.sdc.be.model.Component;
+import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.RequirementDefinition;
+
+import javax.annotation.Resource;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+public class RequirementsCapabilitiesMigrationService<T extends Component> {
+
+    @Resource(name = "fulfilled-capabilities-mig-service")
+    FulfilledCapabilitiesMigrationService fulfilledCapabilityUpdate;
+
+    @Resource(name = "fulfilled-requirements-mig-service")
+    FulfilledRequirementsMigrationService fulfilledRequirementUpdate;
+
+    public boolean associateFulfilledCapabilities(T component, NodeTypeEnum nodeType) {
+        return fulfilledCapabilityUpdate.associateToscaDefinitions(component, nodeType);
+    }
+
+    public boolean associateFulfilledRequirements(T component, NodeTypeEnum nodeType) {
+        return fulfilledRequirementUpdate.associateToscaDefinitions(component, nodeType);
+    }
+
+    public void overrideInstanceCapabilitiesRequirements(T element) {
+        if (element.getComponentInstances() != null) {
+            clearInstancesCapabilitiesRequirements(element);
+            setInstancesRequirementsFromComponent(element);
+            setInstancesCapabilitiesFromComponent(element);
+        }
+    }
+
+    private void clearInstancesCapabilitiesRequirements(T element) {
+        element.getComponentInstances().forEach(componentInstance -> {
+            if (componentInstance.getCapabilities() != null) {
+                componentInstance.getCapabilities().clear();
+            }
+            if (componentInstance.getRequirements() != null) {
+                componentInstance.getRequirements().clear();
+            }
+        });
+    }
+
+    private void setInstancesCapabilitiesFromComponent(T element) {
+        if (element.getCapabilities() != null) {
+            Map<String, ComponentInstance> instancesById = groupInstancesById(element);
+            element.getCapabilities().forEach((type, definitions) -> { setCapabilitiesOnInstance(instancesById, type, definitions);});
+        }
+    }
+
+    private void setInstancesRequirementsFromComponent(T element) {
+        if (element.getRequirements() != null) {
+            Map<String, ComponentInstance> instancesById = groupInstancesById(element);
+            element.getRequirements().forEach((type, requirements) -> { setRequirementsOnInstance(instancesById, type, requirements);});
+        }
+    }
+
+    private void setCapabilitiesOnInstance(Map<String, ComponentInstance> instances, String capabilityType, List<CapabilityDefinition> definitions) {
+        Map<String, List<CapabilityDefinition>> capByInstance = definitions.stream().collect(Collectors.groupingBy(CapabilityDefinition::getOwnerId));
+        capByInstance.forEach((instanceId, capabilityDefinitions) -> { setCapabilitiesOnInstanceByType(instances.get(instanceId), capabilityType, capabilityDefinitions); });
+    }
+
+    private void setRequirementsOnInstance(Map<String, ComponentInstance> instances, String requirementType, List<RequirementDefinition> requirements) {
+        Map<String, List<RequirementDefinition>> reqByInstance = requirements.stream().collect(Collectors.groupingBy(RequirementDefinition::getOwnerId));
+        reqByInstance.forEach((instanceId, reqDefinitions) -> { setRequirementsOnInstanceByType(instances.get(instanceId), requirementType, reqDefinitions);});
+    }
+
+    private void setCapabilitiesOnInstanceByType(ComponentInstance instance, String capabilityType, List<CapabilityDefinition> capabilityDefinitions) {
+        instance.getCapabilities().putIfAbsent(capabilityType, new ArrayList<>());
+        instance.getCapabilities().get(capabilityType).addAll(capabilityDefinitions);
+    }
+
+    private void setRequirementsOnInstanceByType(ComponentInstance instance, String requirementType, List<RequirementDefinition> reqDefinitions) {
+        instance.getRequirements().putIfAbsent(requirementType, new ArrayList<>());
+        instance.getRequirements().get(requirementType).addAll(reqDefinitions);
+    }
+
+    private Map<String, ComponentInstance> groupInstancesById(T element) {
+        return element.getComponentInstances()
+                .stream()
+                .collect(Collectors.toMap(ComponentInstance::getUniqueId, Function.identity()));
+    }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
index e29cd7e..2fb3270 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/DataSchemaMenu.java
@@ -20,6 +20,7 @@
 
 package org.openecomp.sdc.asdctool.main;
 
+import org.openecomp.sdc.asdctool.impl.TitanGraphInitializer;
 import org.openecomp.sdc.be.config.ConfigurationManager;
 import org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder;
 import org.openecomp.sdc.common.api.ConfigurationSource;
@@ -42,10 +43,9 @@
 			usageAndExit();
 		}
 
-		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(),
-				appConfigDir);
+		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
 		ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-
+		
 		try {
 
 			switch (operation.toLowerCase()) {
@@ -59,12 +59,13 @@
 					System.exit(2);
 				}
 			case "create-titan-structures":
-				log.debug("Start create titan keyspace, tables and indexes");
-				if (SdcSchemaBuilder.createSchema()) {
-					log.debug("create cassandra keyspace, tables and indexes successfull");
+				log.debug("Start create titan keyspace");
+				String titanCfg = 2 == args.length? configurationManager.getConfiguration().getTitanCfgFile(): args[2];
+				if (TitanGraphInitializer.createGraph(titanCfg)) {
+					log.debug("create titan keyspace successfull");
 					System.exit(0);
 				} else {
-					log.debug("create cassandra keyspace, tables and indexes failed");
+					log.debug("create titan keyspace failed");
 					System.exit(2);
 				}
 			case "clean-cassndra":
@@ -93,5 +94,6 @@
 
 	private static void DataSchemeUsage() {
 		System.out.println("Usage: create-cassandra-structures <configuration dir> ");
+		System.out.println("Usage: create-titan-structures <configuration dir> ");
 	}
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
index 17008b3..b82b62b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/EsToCassandraDataMigrationMenu.java
@@ -22,10 +22,7 @@
 
 import org.openecomp.sdc.asdctool.impl.DataMigration;
 import org.openecomp.sdc.asdctool.impl.EsToCassandraDataMigrationConfig;
-import org.openecomp.sdc.asdctool.impl.migration.v1604.AppConfig;
-import org.openecomp.sdc.asdctool.impl.migration.v1604.ServiceMigration;
 import org.openecomp.sdc.be.config.ConfigurationManager;
-import org.openecomp.sdc.be.dao.cassandra.schema.SdcSchemaBuilder;
 import org.openecomp.sdc.common.api.ConfigurationSource;
 import org.openecomp.sdc.common.impl.ExternalConfiguration;
 import org.openecomp.sdc.common.impl.FSConfigurationSource;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
index 6b6f11c..36d2f66 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ExportImportMenu.java
@@ -20,6 +20,7 @@
 
 package org.openecomp.sdc.asdctool.main;
 
+import org.openecomp.sdc.asdctool.impl.GraphJsonValidator;
 import org.openecomp.sdc.asdctool.impl.GraphMLConverter;
 import org.openecomp.sdc.asdctool.impl.GraphMLDataAnalyzer;
 
@@ -29,6 +30,7 @@
 		exportUsage();
 		importUsage();
 		exportUsersUsage();
+		validateJsonUsage();
 
 		System.exit(1);
 	}
@@ -37,6 +39,10 @@
 		System.out.println("Usage: import <titan.properties> <graph file location>");
 	}
 
+	private static void validateJsonUsage() {
+		System.out.println("Usage: validate-json <export graph path>");
+	}
+
 	private static void exportUsage() {
 		System.out.println("Usage: export <titan.properties> <output directory>");
 	}
@@ -107,6 +113,14 @@
 				System.exit(2);
 			}
 			break;
+		case "validate-json":
+			String jsonFilePath = validateAndGetJsonFilePath(args);
+			GraphJsonValidator graphJsonValidator = new GraphJsonValidator();
+			if (graphJsonValidator.verifyTitanJson(jsonFilePath)) {
+				System.exit(2);
+			}
+			break;
+
 		case "export-as-graph-ml":
 			isValid = verifyParamsLength(args, 3);
 			if (false == isValid) {
@@ -152,6 +166,16 @@
 
 	}
 
+	private static String validateAndGetJsonFilePath(String[] args) {
+		boolean isValid;
+		isValid = verifyParamsLength(args, 2);
+		if (!isValid) {
+            validateJsonUsage();
+            System.exit(1);
+        }
+        return args[1];
+	}
+
 	private static boolean verifyParamsLength(String[] args, int i) {
 		if (args == null) {
 			if (i > 0) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java
index d7ed460..c914e3b 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/MigrationMenu.java
@@ -32,6 +32,10 @@
 import org.openecomp.sdc.asdctool.impl.migration.v1607.CsarMigration;
 import org.openecomp.sdc.asdctool.impl.migration.v1610.TitanFixUtils;
 import org.openecomp.sdc.asdctool.impl.migration.v1610.ToscaArtifactsAlignment;
+import org.openecomp.sdc.asdctool.impl.migration.v1702.Migration1702;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.Migration1707;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.Migration1707Config;
+import org.openecomp.sdc.asdctool.impl.migration.v1707.VfModulesPropertiesAdding;
 import org.openecomp.sdc.be.config.ConfigurationManager;
 import org.openecomp.sdc.common.api.ConfigurationSource;
 import org.openecomp.sdc.common.impl.ExternalConfiguration;
@@ -57,7 +61,11 @@
 		POPULATE_COMPONENT_CACHE("populate-component-cache", "populateComponentCache"), 
 		FIX_PROPERTIES("fix-properties", "titanFixUtils"), 
 		ALIGN_TOSCA_ARTIFACTS("align-tosca-artifacts", "toscaArtifactsAlignment"), 
-		FIX_ICONS("fix-icons", "titanFixUtils");
+		FIX_ICONS("fix-icons", "titanFixUtils"),
+		MIGRATION_1610_1702("migrate-1610-1702", "migration1702"),
+		MIGRATION_1702_1707("migrate-1702-1707", "migration1707"),
+		VFMODULES_PROPERTIES_ADDING("vfModules-properties-adding", "vfModulesPropertiesAdding");
+		// UPDATE_DATA_TYPES("update_data_types", "updateDataTypes");
 
 		private String value, beanName;
 
@@ -80,48 +88,52 @@
 		}
 	};
 
+	//arguments will be [operation] [version] [config path] [align derived - optional]
+	//example : migrate-1610-1702 1702 /home/config
 	public static void main(String[] args) throws Exception {
 
-		if (args == null || args.length < 2) {
+		if (args == null || args.length < 3) {
 			usageAndExit();
 		}
 		MigrationOperationEnum operationEnum = MigrationOperationEnum.findByValue(args[0]);
-		String appConfigDir = args[1];
+		String appConfigDir = args[2];
 		String dataInputFileDir = null;
-		if (operationEnum == MigrationOperationEnum.ALIGN_DERIVED_FROM_1604) {
-			dataInputFileDir = args[2];
+		if (operationEnum == MigrationOperationEnum.ALIGN_DERIVED_FROM_1604 ) {
+			dataInputFileDir = args[3];
 		}
+		log.info("Run with configuration folder {}", appConfigDir);
 		AnnotationConfigApplicationContext context = initContext(appConfigDir);
 		try {
 			ServiceMigration serviceMigration = (ServiceMigration) context.getBean(SERVICE_MIGARTION_BEAN);
 			switch (operationEnum) {
 			case MIGRATION_1602_1604:
-				log.debug("Start Titan migration from 1602 version to 1604");
+				log.info("Start Titan migration from 1602 version to 1604");
 				if (serviceMigration.migrate1602to1604(appConfigDir)) {
-					log.debug("Titan migration from 1602 version to 1604 was finished successfull");
+					log.info("Titan migration from 1602 version to 1604 was finished successfull");
 					System.exit(0);
 				} else {
-					log.debug("Titan migration from 1602 version to 1604 was failed");
+					log.info("Titan migration from 1602 version to 1604 was failed");
 					System.exit(2);
 				}
+				break;
 			case MIGRATE_1604_1607:
-				log.debug("Start Titan migration from 1604 version to 1607");
+				log.info("Start Titan migration from 1604 version to 1607");
 				if (serviceMigration.migrate1604to1607(appConfigDir)) {
-					log.debug("Titan migration from 1604 version to 1607 was finished successfull");
+					log.info("Titan migration from 1604 version to 1607 was finished successfull");
 					System.exit(0);
 				} else {
-					log.debug("Titan migration from 1604 version to 1607 was failed");
+					log.info("Titan migration from 1604 version to 1607 was failed");
 					System.exit(2);
 				}
 				break;
 			case ALIGN_VFC_NAMES_1604:
 				VfcNamingAlignment vfcNamingAlignment = (VfcNamingAlignment) context.getBean(operationEnum.getBeanName());
-				log.debug("Start VFC naming alignment on 1604");
+				log.info("Start VFC naming alignment on 1604");
 				if (vfcNamingAlignment.alignVfcNames1604(appConfigDir)) {
-					log.debug("VFC naming alignment on 1604 was finished successfull");
+					log.info("VFC naming alignment on 1604 was finished successfull");
 					System.exit(0);
 				} else {
-					log.debug("VFC naming alignment on 1604 was failed");
+					log.info("VFC naming alignment on 1604 was failed");
 					System.exit(2);
 				}
 				break;
@@ -143,12 +155,12 @@
 				break;
 			case ALIGN_DERIVED_FROM_1604:
 				DerivedFromAlignment derivedFromAlignment = (DerivedFromAlignment) context.getBean(operationEnum.getBeanName());
-				log.debug("Start derived from alignment on 1604");
+				log.info("Start derived from alignment on 1604");
 				if (derivedFromAlignment.alignDerivedFrom1604(appConfigDir, dataInputFileDir)) {
-					log.debug("Derived from alignment on 1604 was finished successfull");
+					log.info("Derived from alignment on 1604 was finished successfull");
 					System.exit(0);
 				} else {
-					log.debug("Derived from alignment on 1604 was failed");
+					log.info("Derived from alignment on 1604 was failed");
 					System.exit(2);
 				}
 				break;
@@ -164,27 +176,24 @@
 				}
 				break;
 			case CLEAN_CSAR:
-				log.debug("Start remove CSAR resources");
+				log.info("Start remove CSAR resources");
 				CsarMigration csarMigration = (CsarMigration) context.getBean(operationEnum.getBeanName());
-				// TODO Show to Michael L fixed return value
 				if (csarMigration.removeCsarResources()) {
-					log.debug("Remove CSAR resources finished successfully");
+					log.info("Remove CSAR resources finished successfully");
 					System.exit(0);
 				} else {
-					log.debug("Remove CSAR resources failed");
+					log.info("Remove CSAR resources failed");
 					System.exit(2);
 				}
 				break;
 			case POPULATE_COMPONENT_CACHE:
 				PopulateComponentCache populateComponentCache = (PopulateComponentCache) context.getBean(operationEnum.getBeanName());
-				// TODO Show to Michael L No return value always returns 0
 				populateComponentCache.populateCache();
 				System.exit(0);
 				break;
 			case FIX_PROPERTIES:
 				log.debug("Start fix capability properties types");
 				TitanFixUtils titanFixUtils = (TitanFixUtils) context.getBean(operationEnum.getBeanName());
-				// TODO Show to Michael L fixed return value
 				if (titanFixUtils.fixCapabiltyPropertyTypes()) {
 					log.debug("Fix capability properties types finished successfully");
 					System.exit(0);
@@ -194,34 +203,73 @@
 				}
 				break;
 			case FIX_ICONS:
-				log.debug("Start fix icons of vl and eline");
+				log.info("Start fix icons of vl and eline");
 				titanFixUtils = (TitanFixUtils) context.getBean(operationEnum.getBeanName());
-				// TODO Show to Michael L fixed return value
 				if (titanFixUtils.fixIconsInNormatives()) {
-					log.debug("Fix icons of vl and eline finished successfully");
+					log.info("Fix icons of vl and eline finished successfully");
 					System.exit(0);
 				} else {
-					log.debug("Fix icons of vl and eline failed");
+					log.info("Fix icons of vl and eline failed");
 					System.exit(2);
 				}
 				break;
 			case ALIGN_TOSCA_ARTIFACTS:
-				log.debug("Start align tosca artifacts");
+				log.info("Start align tosca artifacts");
 				ToscaArtifactsAlignment toscaArtifactsAlignment = (ToscaArtifactsAlignment) context.getBean(operationEnum.getBeanName());
 				boolean isSuccessful = toscaArtifactsAlignment.alignToscaArtifacts();
 				if (isSuccessful) {
-					log.debug("Tosca Artifacts alignment was finished successfull");
+					log.info("Tosca Artifacts alignment was finished successfull");
 					System.exit(0);
 				} else {
-					log.debug("Tosca Artifacts alignment has failed");
+					log.info("Tosca Artifacts alignment has failed");
+					System.exit(2);
+				}
+				break;
+			case MIGRATION_1610_1702:
+				log.info("Start ASDC migration from 1610 to 1702");
+				Migration1702 migration = (Migration1702) context.getBean(operationEnum.getBeanName());
+				isSuccessful = migration.migrate(appConfigDir);
+				if (isSuccessful) {
+					log.info("ASDC migration from 1610 to 1702 was finished successful");
+					System.exit(0);
+				} else{
+					log.info("ASDC migration from 1610 to 1702 has failed");
+					System.exit(2);
+				}
+			
+				break;
+			case MIGRATION_1702_1707:
+//				log.info("Start ASDC migration from 1702 to 1707");
+				System.exit(0);
+//				Migration1707 migration1707 = (Migration1707) context.getBean(operationEnum.getBeanName());
+//				isSuccessful = migration1707.migrate();
+//				if (isSuccessful) {
+//					log.info("SDC migration from 1702 to 1707 was finished successfully");
+//					System.exit(0);
+//				} else{
+//					log.info("SDC migration from 1702 to 1707 has failed");
+//					System.exit(2);
+//				}
+				break;
+			case VFMODULES_PROPERTIES_ADDING:
+				log.info("Start adding new properties to vfModules");
+				VfModulesPropertiesAdding migrationVfModulesProperties = (VfModulesPropertiesAdding) context.getBean(operationEnum.getBeanName());
+				isSuccessful = migrationVfModulesProperties.migrate(args[1]);
+				if (isSuccessful) {
+					log.info("Adding new properties to vfModules was finished successfully");
+					System.exit(0);
+				} else{
+					log.info("Adding new properties to vfModules has failed");
 					System.exit(2);
 				}
 				break;
 			default:
 				usageAndExit();
 			}
-		} catch (Throwable t) {
+		} catch (Exception t) {
+			log.info("Failed {} with exception: {}", operationEnum, t.toString());
 			t.printStackTrace();
+			log.debug("Error while Running MigrationMenu {}", t.getMessage(), t);
 			System.exit(3);
 		} finally {
 			context.close();
@@ -231,7 +279,7 @@
 	private static AnnotationConfigApplicationContext initContext(String appConfigDir) {
 		ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
 		ConfigurationManager configurationManager = new ConfigurationManager(configurationSource);
-		AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(AppConfig.class);
+		AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(AppConfig.class, Migration1707Config.class);
 		return context;
 	}
 
@@ -247,5 +295,9 @@
 		System.out.println("Usage: align-derived-from-1604 <configuration dir> <data_input_file dir>");
 		System.out.println("Usage: align-groups <configuration dir>");
 		System.out.println("Usage: fix-properties <configuration dir>");
+		System.out.println("Usage: migrate-1610-1702 <configuration dir>");
+		System.out.println("Usage: migrate-1702-1707 <configuration dir>");
+		System.out.println("Usage: update_data_types <configuration dir> <data_types_input_file path>");
+		System.out.println("Usage: vfModules-properties-adding <group_types_input_file path> <configuration dir>");
 	}
 }
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
index e620510..73b7306 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/UpdateIsVnfMenu.java
@@ -74,7 +74,7 @@
 			if (updatePropertyOnServiceAtLeastCertified == null) {
 				System.exit(2);
 			} else if (updatePropertyOnServiceAtLeastCertified.intValue() >= 0) {
-				log.debug("Number of updated services is {}", updatePropertyOnServiceAtLeastCertified.intValue());
+				log.debug("Number of updated services is {}",updatePropertyOnServiceAtLeastCertified.intValue());
 				System.exit(0);
 			}
 
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
new file mode 100644
index 0000000..8899aa1
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/EntryPoint.java
@@ -0,0 +1,44 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.servlets;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Path("/entrypoint")
+public class EntryPoint {
+
+	private static Logger log = LoggerFactory.getLogger(EntryPoint.class.getName());
+
+	@GET
+	@Path("test")
+	@Produces(MediaType.TEXT_PLAIN)
+	public String test() {
+
+		log.info("In test");
+		return "Test" + System.currentTimeMillis();
+	}
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
new file mode 100644
index 0000000..0ab863a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/servlets/ExportImportTitanServlet.java
@@ -0,0 +1,187 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool.servlets;
+
+import java.io.BufferedOutputStream;
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Properties;
+import java.util.Map.Entry;
+
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.commons.configuration.BaseConfiguration;
+import org.apache.commons.configuration.Configuration;
+import org.apache.tinkerpop.gremlin.structure.io.graphml.GraphMLWriter;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.openecomp.sdc.asdctool.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.thinkaurelius.titan.core.TitanGraph;
+//import com.tinkerpop.blueprints.util.io.graphml.GraphMLWriter;
+
+@Path("/titan")
+public class ExportImportTitanServlet {
+
+	private static Logger log = LoggerFactory.getLogger(ExportImportTitanServlet.class.getName());
+
+	@GET
+	@Path("export")
+	@Consumes(MediaType.MULTIPART_FORM_DATA)
+	@Produces(MediaType.APPLICATION_OCTET_STREAM)
+	public Response export(@FormDataParam("titanProperties") File titanPropertiesFile,
+			@FormDataParam("metadata") String exportGraphMetadata) {
+
+		printTitanConfigFile(titanPropertiesFile);
+		printMetadata(exportGraphMetadata);
+
+		Properties titanProperties = convertFileToProperties(titanPropertiesFile);
+
+		if (titanProperties == null) {
+			Response response = Utils.buildOkResponse(400, "cannot parse titan properties file", null);
+			return response;
+		}
+
+		Configuration conf = new BaseConfiguration();
+		for (Entry<Object, Object> entry : titanProperties.entrySet()) {
+			String key = entry.getKey().toString();
+			Object value = entry.getValue();
+			conf.setProperty(key, value);
+		}
+
+		conf.setProperty("storage.machine-id-appendix", System.currentTimeMillis() % 1000);
+
+		TitanGraph openGraph = Utils.openGraph(conf);
+		if (openGraph == null) {
+			Response buildErrorResponse = Utils.buildOkResponse(500, "failed to open graph", null);
+			return buildErrorResponse;
+		}
+
+		// Open Titan Graph
+
+		Response buildOkResponse = Utils.buildOkResponse(200, "ok man", null);
+
+		return buildOkResponse;
+	}
+
+	private Properties convertFileToProperties(File titanPropertiesFile) {
+
+		Properties properties = new Properties();
+
+		FileReader fileReader = null;
+		try {
+			fileReader = new FileReader(titanPropertiesFile);
+			properties.load(fileReader);
+
+		} catch (Exception e) {
+			log.error("Failed to convert file to properties", e);
+			return null;
+		} finally {
+			if (fileReader != null) {
+				try {
+					fileReader.close();
+				} catch (IOException e) {
+					log.error("Failed to close file", e);
+				}
+			}
+		}
+
+		return properties;
+	}
+
+	private void printTitanConfigFile(File titanPropertiesFile) {
+
+		if (log.isDebugEnabled()) {
+			StringBuilder builder = new StringBuilder();
+			try (BufferedReader br = new BufferedReader(new FileReader(titanPropertiesFile))) {
+				String line;
+				while ((line = br.readLine()) != null) {
+					builder.append(line + Utils.NEW_LINE);
+				}
+
+				log.debug(builder.toString());
+
+			} catch (IOException e) {
+				log.error("Cannot print titan properties file", e);
+			}
+		}
+	}
+
+	private void printMetadata(String exportGraphMetadata) {
+
+		log.debug(exportGraphMetadata);
+
+	}
+
+	public String exportGraph(TitanGraph graph, String outputDirectory) {
+
+		String result = null;
+
+		// GraphMLWriter graphMLWriter = new GraphMLWriter(graph);
+		GraphMLWriter graphMLWriter = GraphMLWriter.build().create();
+
+		String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".ml";
+
+		OutputStream out = null;
+		try {
+			out = new BufferedOutputStream(new ByteArrayOutputStream());
+
+			// graphMLWriter.outputGraph(out);
+
+			graphMLWriter.writeGraph(out, graph);
+
+			// graph.commit();
+			graph.tx().commit();
+
+			String exportedGraph = out.toString();
+
+			result = outputFile;
+
+		} catch (Exception e) {
+			e.printStackTrace();
+			// graph.rollback();
+			graph.tx().rollback();
+		} finally {
+			try {
+				if (out != null) {
+					out.close();
+				}
+			} catch (IOException e) {
+				e.printStackTrace();
+			}
+		}
+		return result;
+
+	}
+
+}
diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml
index 294424f..d376ba3 100644
--- a/asdctool/src/main/resources/config/configuration.yaml
+++ b/asdctool/src/main/resources/config/configuration.yaml
@@ -8,7 +8,7 @@
 
 
 # catalog backend hostname
-beFqdn: sdccatalog
+beFqdn: sdccatalog.att.com
 
 # catalog backend http port
 beHttpPort: 8080
@@ -25,19 +25,36 @@
 version: 1.0
 released: 2012-11-30
 
-titanCfgFile: src/main/resources/config/titan.properties
+titanCfgFile: C:\Git_work\Git_UGN\d2-sdnc\asdctool\src\main\resources\config\titan.properties
+titanMigrationKeySpaceCfgFile: C:\Git_work\Git_UGN\d2-sdnc\asdctool\src\main\resources\config\titan-migration.properties
 titanInMemoryGraph: false
-titanLockTimeout: 30
+titanLockTimeout: 1800
 titanReconnectIntervalInSeconds: 3
 titanHealthCheckReadTimeout: 1
 esReconnectIntervalInSeconds: 3
 uebHealthCheckReconnectIntervalInSeconds: 15
 uebHealthCheckReadTimeout: 4
 
+
 # Protocols
 protocols:
    - http
    - https
+   
+# Default imports
+defaultImports:
+   - nodes:
+        file: nodes.yml
+   - datatypes:
+        file: data.yml
+   - capabilities:
+        file: capabilities.yml
+   - relationships:
+        file: relationships.yml
+   - groups:
+        file: groups.yml
+   - policies:
+        file: policies.yml
 
 # Users
 users:
@@ -93,6 +110,9 @@
    - AAI_VF_MODULE_MODEL
    - AAI_VF_INSTANCE_MODEL
    - OTHER
+   - SNMP_POLL
+   - SNMP_TRAP
+   - GUIDE
 
 licenseTypes:
    - User
@@ -105,24 +125,25 @@
   - CP
   - VL
   - VF
+  - VFCMT
  
 # validForResourceTypes usage
 #     validForResourceTypes:
 #        - VF
 #        - VL
 deploymentResourceArtifacts:
-  heat:
-      displayName: "Base HEAT Template"
-      type: HEAT
-      validForResourceTypes: *allResourceTypes
-  heatVol:
-      displayName: "Volume HEAT Template"
-      type: HEAT_VOL
-      validForResourceTypes: *allResourceTypes
-  heatNet:
-      displayName: "Network HEAT Template"
-      type: HEAT_NET
-      validForResourceTypes: *allResourceTypes
+#  heat:
+#      displayName: "Base HEAT Template"
+#      type: HEAT
+#      validForResourceTypes: *allResourceTypes
+#  heatVol:
+#      displayName: "Volume HEAT Template"
+#      type: HEAT_VOL
+#      validForResourceTypes: *allResourceTypes
+#  heatNet:
+#      displayName: "Network HEAT Template"
+#      type: HEAT_NET
+#      validForResourceTypes: *allResourceTypes
   
 deploymentResourceInstanceArtifacts:
   heatEnv:
@@ -130,6 +151,11 @@
       type: HEAT_ENV
       description: "Auto-generated HEAT Environment deployment artifact"
       fileExtension: "env"
+  VfHeatEnv:
+      displayName: "VF HEAT ENV"
+      type: HEAT_ENV
+      description: "VF Auto-generated HEAT Environment deployment artifact"
+      fileExtension: "env"
 
 #tosca artifacts placeholders
 toscaArtifacts:
@@ -144,6 +170,7 @@
       type: TOSCA_CSAR
       description: TOSCA definition package of the asset
 
+
 #Informational artifacts placeHolder
 excludeResourceCategory:
   - Generic
@@ -160,7 +187,7 @@
   testScripts:
       displayName: Test Scripts
       type: OTHER
-  cloudQuestionnaire:
+  CloudQuestionnaire:
       displayName: Cloud Questionnaire (completed)
       type: OTHER
   HEATTemplateFromVendor:
@@ -169,7 +196,7 @@
   resourceSecurityTemplate:
       displayName: Resource Security Template
       type: OTHER
- 
+            
 excludeServiceCategory:
 
 informationalServiceArtifacts:
@@ -247,6 +274,16 @@
     YANG_XML:
         acceptedTypes:
             - xml
+    VNF_CATALOG:
+        acceptedTypes:
+            - xml
+    MODEL_INVENTORY_PROFILE:
+        acceptedTypes:
+            - xml
+    MODEL_QUERY_SPEC:
+        acceptedTypes:
+            - xml
+#AAI Artifacts
     AAI_SERVICE_MODEL:
         acceptedTypes:
             - xml
@@ -264,17 +301,80 @@
         acceptedTypes:
             - yaml
             - yml
+        validForResourceTypes: *allResourceTypes
     HEAT_VOL:
         acceptedTypes:
             - yaml
             - yml
+        validForResourceTypes: *allResourceTypes
+    HEAT_NESTED:
+        acceptedTypes:
+            - yaml
+            - yml
+        validForResourceTypes: *allResourceTypes
+    HEAT_ARTIFACT:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
     HEAT_NET:
         acceptedTypes:
             - yaml
             - yml
+        validForResourceTypes: *allResourceTypes
     YANG_XML:
         acceptedTypes:
             - xml
+        validForResourceTypes: *allResourceTypes
+    VNF_CATALOG:
+        acceptedTypes:
+            - xml
+        validForResourceTypes: *allResourceTypes
+    VF_LICENSE:
+        acceptedTypes:
+            - xml
+        validForResourceTypes: *allResourceTypes
+    VENDOR_LICENSE:
+        acceptedTypes:
+            - xml
+        validForResourceTypes: *allResourceTypes
+    MODEL_INVENTORY_PROFILE:
+        acceptedTypes:
+            - xml
+        validForResourceTypes: *allResourceTypes
+    MODEL_QUERY_SPEC:
+        acceptedTypes:
+            - xml
+        validForResourceTypes: *allResourceTypes
+    #APPC Artifatcs
+    APPC_CONFIG:
+        acceptedTypes:
+        validForResourceTypes:
+            - VF
+    #DCAE Artifacts
+    DCAE_TOSCA:
+        acceptedTypes:
+            - yml
+            - yaml
+        validForResourceTypes:
+            - VF
+    DCAE_JSON:
+        acceptedTypes:
+            - json
+        validForResourceTypes:
+            - VF
+    DCAE_POLICY:
+        acceptedTypes:
+            - emf
+        validForResourceTypes:
+            - VF
+    DCAE_DOC:
+        acceptedTypes:
+        validForResourceTypes:
+            - VF        
+    DCAE_EVENT:
+        acceptedTypes:
+        validForResourceTypes:
+            - VF
+#AAI Artifacts
     AAI_VF_MODEL:
         acceptedTypes:
             - xml
@@ -284,24 +384,102 @@
         acceptedTypes:
             - xml
         validForResourceTypes:
-            - VF 
+            - VF
     OTHER:
         acceptedTypes:
-
+        validForResourceTypes: *allResourceTypes
+#MIB artifacts
+    SNMP_POLL:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    SNMP_TRAP:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+      
 resourceInstanceDeploymentArtifacts:
     HEAT_ENV:
         acceptedTypes:
             - env
+    VF_MODULES_METADATA:
+        acceptedTypes:
+            - json
+#DCAE_VF Instance Artifacts
+    DCAE_INVENTORY_TOSCA:
+        acceptedTypes:
+            - yml
+            - yaml
+    DCAE_INVENTORY_JSON:
+        acceptedTypes:
+            - json
+    DCAE_INVENTORY_POLICY:
+      acceptedTypes:
+            - emf
+    DCAE_INVENTORY_DOC:
+      acceptedTypes:
+    DCAE_INVENTORY_BLUEPRINT:
+      acceptedTypes:
+    DCAE_INVENTORY_EVENT:
+      acceptedTypes:
+#MIB artifacts
+    SNMP_POLL:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    SNMP_TRAP:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+      
+
+resourceInformationalArtifacts:
+    CHEF:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    PUPPET:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    SHELL:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    YANG:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    YANG_XML:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    HEAT:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    BPEL:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    DG_XML:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    MURANO_PKG:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    OTHER:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    SNMP_POLL:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    SNMP_TRAP:
+        acceptedTypes:
+        validForResourceTypes: *allResourceTypes
+    GUIDE:
+        acceptedTypes:
+        validForResourceTypes:
+            - VF
+            - VFC
+
 
 resourceInformationalDeployedArtifacts:
 
 
 requirementsToFulfillBeforeCert:
-     CP:
-       - tosca.capabilities.network.Bindable
-       
-capabilitiesToConsumeBeforeCert:
 
+capabilitiesToConsumeBeforeCert:
+            
 unLoggedUrls:
    - /sdc2/rest/healthCheck
 
@@ -312,6 +490,7 @@
        - Service
 
 artifactsIndex: resources
+
 cassandraConfig:
     cassandraHosts: ['localhost']
     localDataCenter:
@@ -326,36 +505,40 @@
         - { name: sdcaudit, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
         - { name: sdcartifact, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
         - { name: sdccomponent, replicationStrategy: SimpleStrategy, replicationInfo: ['1']}
-
-
+        
+       
 switchoverDetector:
-    gBeFqdn: 
-    gFeFqdn: 
-    beVip: 1.2.3.4
-    feVip: 1.2.3.4
+    gBeFqdn: AIO-BE.ecomp.idns.cip
+    gFeFqdn: AIO-FE.ecomp.idns.cip
+    beVip: 0.0.0.0
+    feVip: 0.0.0.0
     beResolveAttempts: 3
     feResolveAttempts: 3
     enabled: false
     interval: 60
     changePriorityUser: ecompasdc
     changePriorityPassword: ecompasdc123
-    publishNetworkUrl: 
+    publishNetworkUrl: "http://xxxxxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/update_network?user=root"
     publishNetworkBody: '{"note":"publish network"}'
     groups:
-      beSet: { changePriorityUrl: "http://localhost/",
-               changePriorityBody: '{"name":"AIO-BE.ecomp.idns.cip","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.cip","no_ad_redirection":false,"v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_be","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_be"],"failover_policy":["FAILALL"]},"comment":"AIO BE G-fqdn","intended_app_proto":"DNS"}'}
-      feSet: { changePriorityUrl: "http://localhost/",
-               changePriorityBody: '{"comment":"AIO G-fqdn","name":"AIO-FE.ecomp.idns.cip","v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_fe","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_fe"],"failover_policy":["FAILALL"]},"no_ad_redirection":false,"intended_app_proto":"DNS","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.cip.att.com"}'}
+      beSet: { changePriorityUrl: "http://xxxxxxx.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.com?user=root",
+               changePriorityBody: '{"name":"AIO-BE.ecomp.idns.com","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-BE.ecomp.idns.com","no_ad_redirection":false,"v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_be","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_be"],"failover_policy":["FAILALL"]},"comment":"AIO BE G-fqdn","intended_app_proto":"DNS"}'}
+      feSet: { changePriorityUrl: "http://cora.web.att.com/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.cip.att.com?user=root",
+               changePriorityBody: '{"comment":"AIO G-fqdn","name":"AIO-FE.ecomp.idns.com","v4groups":{"failover_groups":["/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_mg_fe","/crt/CipDomain.ECOMP-ASDC-DEVST/config/groups/group_bs_fe"],"failover_policy":["FAILALL"]},"no_ad_redirection":false,"intended_app_proto":"DNS","uri":"/crt/CipDomain.ECOMP-ASDC-DEVST/config/sites/AIO-FE.ecomp.idns.com"}'}
     
+       
+heatEnvArtifactHeader: ""
+heatEnvArtifactFooter: ""
 
-heatEnvArtifactHeader:
-    ""
-heatEnvArtifactFooter:
-    ""
-
+onboarding:
+    protocol: http
+    host: localhost
+    port: 8080
+    downloadCsarUri: "/onboarding-api/v1.0/vendor-software-products/packages"
+   
 applicationL1Cache:
     datatypes:
-        enabled: false
+        enabled: true
         firstRunDelay: 10
         pollIntervalInSec: 60
 
@@ -367,10 +550,25 @@
         servicesSizeInCache: 200
         productsSizeInCache: 100
     queue:
-        syncIntervalInSecondes: 60
-        waitOnShutDownInMinutes: 30
+        syncIntervalInSecondes: 43200
+        waitOnShutDownInMinutes: 10
         numberOfCacheWorkers: 4
 
 toscaValidators:
-    stringMaxLength: 1024
-disableAudit: true
\ No newline at end of file
+    stringMaxLength: 65536
+disableAudit: false
+
+vfModuleProperties:
+    min_vf_module_instances:
+        forBaseModule: 1
+        forNonBaseModule: 0
+    max_vf_module_instances:
+        forBaseModule: 1
+        forNonBaseModule:
+    initial_count:
+        forBaseModule: 1
+        forNonBaseModule: 0
+    vf_module_type:
+        forBaseModule: Base
+        forNonBaseModule: Expansion
+        
\ No newline at end of file
diff --git a/asdctool/src/main/resources/config/dataTypes.yml b/asdctool/src/main/resources/config/dataTypes.yml
new file mode 100644
index 0000000..43c7f0c
--- /dev/null
+++ b/asdctool/src/main/resources/config/dataTypes.yml
@@ -0,0 +1,1123 @@
+tosca.datatypes.Root:
+  description: The TOSCA root Data Type all other TOSCA base Data Types derive from
+
+integer:
+  derived_from: tosca.datatypes.Root
+
+string:
+  derived_from: tosca.datatypes.Root
+
+boolean:
+  derived_from: tosca.datatypes.Root
+
+float:
+  derived_from: tosca.datatypes.Root
+
+list:
+  derived_from: tosca.datatypes.Root
+
+map:
+  derived_from: tosca.datatypes.Root
+
+tosca.datatypes.Credential: 
+  derived_from: tosca.datatypes.Root
+  properties: 
+    protocol: 
+      type: string
+      required: false
+    token_type: 
+      type: string
+      default: password
+    token: 
+      type: string
+    keys:
+      type: map
+      required: false
+      entry_schema:
+        type: string
+    user:
+      type: string
+      required: false
+
+tosca.datatypes.TimeInterval: 
+  derived_from: tosca.datatypes.Root
+  properties:
+    start_time: 
+      type: timestamp
+      required: true
+    end_time: 
+      type: timestamp
+      required: true
+
+tosca.datatypes.network.NetworkInfo:
+  derived_from: tosca.datatypes.Root
+  properties:  
+    network_name: 
+      type: string
+    network_id: 
+      type: string
+    addresses:
+      type: list
+      entry_schema:
+        type: string
+
+tosca.datatypes.network.PortInfo: 
+  derived_from: tosca.datatypes.Root
+  properties:  
+    port_name: 
+      type: string
+    port_id: 
+      type: string
+    network_id: 
+      type: string
+    mac_address: 
+      type: string
+    addresses:
+      type: list
+      entry_schema:
+        type: string
+
+tosca.datatypes.network.PortDef:
+  derived_from: integer
+  constraints: 
+    - in_range: [ 1, 65535 ]
+
+tosca.datatypes.network.PortSpec: 
+  derived_from: tosca.datatypes.Root
+  properties:  
+    protocol: 
+      type: string
+      required: true
+      default: tcp
+      constraints:
+        - valid_values: [ udp, tcp, igmp ]
+    target:  
+      type: tosca.datatypes.network.PortDef
+    target_range:
+      type: range 
+      constraints:
+        - in_range: [ 1, 65535 ]
+    source:
+      type: tosca.datatypes.network.PortDef
+    source_range:
+      type: range
+      constraints:
+        - in_range: [ 1, 65535 ]
+
+###################new Data Types Onboarding Integration##########################
+
+org.openecomp.datatypes.heat.network.AddressPair:
+  derived_from: tosca.datatypes.Root
+  description: MAC/IP address pairs
+  properties:
+    mac_address:
+      type: string
+      description: MAC address
+      required: false
+      status: SUPPORTED
+    ip_address:
+      type: string
+      description: IP address
+      required: false
+      status: SUPPORTED
+org.openecomp.datatypes.heat.network.subnet.HostRoute:
+  derived_from: tosca.datatypes.Root
+  description: Host route info for the subnet
+  properties:
+    destination:
+      type: string
+      description: The destination for static route
+      required: false
+      status: SUPPORTED
+    nexthop:
+      type: string
+      description: The next hop for the destination
+      required: false
+      status: SUPPORTED
+      
+org.openecomp.datatypes.heat.network.AllocationPool:
+  derived_from: tosca.datatypes.Root
+  description: The start and end addresses for the allocation pool
+  properties:
+    start:
+      type: string
+      description: Start address for the allocation pool
+      required: false
+      status: SUPPORTED
+    end:
+      type: string
+      description: End address for the allocation pool
+      required: false
+      status: SUPPORTED
+
+org.openecomp.datatypes.heat.network.neutron.Subnet:
+  derived_from: tosca.datatypes.Root
+  description: A subnet represents an IP address block that can be used for assigning IP addresses to virtual instances
+  properties:
+    tenant_id:
+      type: string
+      description: The ID of the tenant who owns the network
+      required: false
+      status: SUPPORTED
+    enable_dhcp:
+      type: boolean
+      description: Set to true if DHCP is enabled and false if DHCP is disabled
+      required: false
+      default: true
+      status: SUPPORTED
+    ipv6_address_mode:
+      type: string
+      description: IPv6 address mode
+      required: false
+      status: SUPPORTED
+      constraints:
+      - valid_values:
+        - dhcpv6-stateful
+        - dhcpv6-stateless
+        - slaac
+    ipv6_ra_mode:
+      type: string
+      description: IPv6 RA (Router Advertisement) mode
+      required: false
+      status: SUPPORTED
+      constraints:
+      - valid_values:
+        - dhcpv6-stateful
+        - dhcpv6-stateless
+        - slaac
+    value_specs:
+      type: map
+      description: Extra parameters to include in the request
+      required: false
+      default: {
+        }
+      status: SUPPORTED
+      entry_schema:
+        type: string
+    allocation_pools:
+       type: list
+       description: The start and end addresses for the allocation pools
+       required: false
+       status: SUPPORTED
+       entry_schema:
+         type: org.openecomp.datatypes.heat.network.AllocationPool
+    subnetpool:
+      type: string
+      description: The name or ID of the subnet pool
+      required: false
+      status: SUPPORTED
+    dns_nameservers:
+      type: list
+      description: A specified set of DNS name servers to be used
+      required: false
+      default: [
+        ]
+      status: SUPPORTED
+      entry_schema:
+        type: string     
+    host_routes:
+      type: list
+      description: The gateway IP address
+      required: false
+      status: SUPPORTED
+      entry_schema:
+        type: org.openecomp.datatypes.heat.network.subnet.HostRoute
+    ip_version:
+      type: integer
+      description: The gateway IP address
+      required: false
+      default: 4
+      status: SUPPORTED
+      constraints:
+      - valid_values:
+        - '4'
+        - '6'
+    name:
+      type: string
+      description: The name of the subnet
+      required: false
+      status: SUPPORTED
+    prefixlen:
+      type: integer
+      description: Prefix length for subnet allocation from subnet pool
+      required: false
+      status: SUPPORTED
+      constraints:
+      - greater_or_equal: 0
+    cidr:
+      type: string
+      description: The CIDR
+      required: false
+      status: SUPPORTED
+    gateway_ip:
+      type: string
+      description: The gateway IP address
+      required: false
+      status: SUPPORTED
+
+org.openecomp.datatypes.heat.novaServer.network.PortExtraProperties:
+  derived_from: tosca.datatypes.Root
+  description: Nova server network expand properties for port
+  properties:
+    port_security_enabled:
+      type: boolean
+      description: Flag to enable/disable port security on the port
+      required: false
+      status: SUPPORTED
+    mac_address:
+      type: string
+      description: MAC address to give to this port
+      required: false
+      status: SUPPORTED
+    admin_state_up:
+      type: boolean
+      description: The administrative state of this port
+      required: false
+      default: true
+      status: SUPPORTED
+    qos_policy:
+      type: string
+      description: The name or ID of QoS policy to attach to this port
+      required: false
+      status: SUPPORTED
+    allowed_address_pairs:
+      type: list
+      description: Additional MAC/IP address pairs allowed to pass through the port
+      required: false
+      status: SUPPORTED
+      entry_schema:
+        type: org.openecomp.datatypes.heat.network.AddressPair
+    binding:vnic_type:
+      type: string
+      description: The vnic type to be bound on the neutron port
+      required: false
+      status: SUPPORTED
+      constraints:
+      - valid_values:
+        - macvtap
+        - direct
+        - normal
+    value_specs:
+      type: map
+      description: Extra parameters to include in the request
+      required: false
+      default: {
+        }
+      status: SUPPORTED
+      entry_schema:
+        type: string
+org.openecomp.datatypes.heat.novaServer.network.AddressInfo:
+  derived_from: tosca.datatypes.network.NetworkInfo
+  description: Network addresses with corresponding port id
+  properties:
+    port_id:
+      type: string
+      description: Port id
+      required: false
+      status: SUPPORTED
+org.openecomp.datatypes.heat.neutron.port.FixedIps:
+  derived_from: tosca.datatypes.Root
+  description: subnet/ip_address
+  properties:
+    subnet:
+      type: string
+      description: Subnet in which to allocate the IP address for this port
+      required: false
+      status: SUPPORTED
+    ip_address:
+      type: string
+      description: IP address desired in the subnet for this port
+      required: false
+      status: SUPPORTED
+org.openecomp.datatypes.heat.FileInfo:
+  derived_from: tosca.datatypes.Root
+  description: Heat File Info
+  properties:
+    file:
+      type: string
+      description: The required URI string (relative or absolute) which can be used to locate the file
+      required: true
+      status: SUPPORTED
+    file_type:
+      type: string
+      description: The type of the file
+      required: true
+      status: SUPPORTED
+      constraints:
+      - valid_values:
+        - base
+        - env
+        - volume
+        - network
+org.openecomp.datatypes.heat.contrail.network.rule.PortPairs:
+    derived_from: tosca.datatypes.Root
+    description: source and destination port pairs
+    properties:
+      start_port:
+        type: string
+        description: Start port
+        required: false
+        status: SUPPORTED
+      end_port:
+        type: string
+        description: End port
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrail.network.rule.Rule:
+    derived_from: tosca.datatypes.Root
+    description: policy rule
+    properties:
+      src_ports:
+        type: list
+        description: Source ports
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
+      protocol:
+        type: string
+        description: Protocol
+        required: false
+        status: SUPPORTED
+      dst_addresses:
+        type: list
+        description: Destination addresses
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
+      apply_service:
+        type: string
+        description: Service to apply
+        required: false
+        status: SUPPORTED
+      dst_ports:
+        type: list
+        description: Destination ports
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrail.network.rule.PortPairs
+      src_addresses:
+        type: list
+        description: Source addresses
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork
+      direction:
+        type: string
+        description: Direction
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrail.network.rule.RuleList:
+    derived_from: tosca.datatypes.Root
+    description: list of policy rules
+    properties:
+      policy_rule:
+        type: list
+        description: Contrail network rule
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrail.network.rule.Rule
+org.openecomp.datatypes.heat.contrail.network.rule.VirtualNetwork:
+    derived_from: tosca.datatypes.Root
+    description: source and destination addresses
+    properties:
+      virtual_network:
+        type: string
+        description: Virtual network
+        required: false
+        status: SUPPORTED
+
+org.openecomp.datatypes.heat.network.neutron.SecurityRules.Rule:
+    derived_from: tosca.datatypes.Root
+    description: Rules Pairs
+    properties:
+      remote_group_id:
+        type: string
+        description: The remote group ID to be associated with this security group rule
+        required: false
+        status: SUPPORTED
+      protocol:
+        type: string
+        description: The protocol that is matched by the security group rule
+        required: false
+        status: SUPPORTED
+        constraints:
+        - valid_values:
+          - tcp
+          - udp
+          - icmp
+      ethertype:
+        type: string
+        description: Ethertype of the traffic
+        required: false
+        default: IPv4
+        status: SUPPORTED
+        constraints:
+        - valid_values:
+          - IPv4
+          - IPv6
+      port_range_max:
+        type: integer
+        description: 'The maximum port number in the range that is matched by the
+          security group rule. '
+        required: false
+        status: SUPPORTED
+        constraints:
+        - in_range:
+          - 0
+          - 65535
+      remote_ip_prefix:
+        type: string
+        description: The remote IP prefix (CIDR) to be associated with this security group rule
+        required: false
+        status: SUPPORTED
+      remote_mode:
+        type: string
+        description: Whether to specify a remote group or a remote IP prefix
+        required: false
+        default: remote_ip_prefix
+        status: SUPPORTED
+        constraints:
+        - valid_values:
+          - remote_ip_prefix
+          - remote_group_id
+      direction:
+        type: string
+        description: The direction in which the security group rule is applied
+        required: false
+        default: ingress
+        status: SUPPORTED
+        constraints:
+        - valid_values:
+          - egress
+          - ingress
+      port_range_min:
+        type: integer
+        description: The minimum port number in the range that is matched by the security group rule.
+        required: false
+        status: SUPPORTED
+        constraints:
+        - in_range:
+          - 0
+          - 65535
+org.openecomp.datatypes.heat.substitution.SubstitutionFiltering:
+    derived_from: tosca.datatypes.Root
+    description: Substitution Filter
+    properties:
+      substitute_service_template:
+        type: string
+        description: Substitute Service Template
+        required: true
+        status: SUPPORTED
+      index_value:
+        type: integer
+        description: Index value of the substitution service template runtime instance
+        required: false
+        default: 0
+        status: SUPPORTED
+        constraints:
+        - greater_or_equal: 0
+      count:
+        type: string
+        description: Count
+        required: false
+        default: 1
+        status: SUPPORTED
+      scaling_enabled:
+        type: boolean
+        description: Indicates whether service scaling is enabled
+        required: false
+        default: true
+        status: SUPPORTED
+      mandatory:
+        type: boolean
+        description: Mandatory
+        required: false
+        default: true
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence:
+    derived_from: tosca.datatypes.Root
+    description: network policy refs data sequence
+    properties:
+      network_policy_refs_data_sequence_major:
+        type: integer
+        description: Network Policy ref data sequence Major
+        required: false
+        status: SUPPORTED
+      network_policy_refs_data_sequence_minor:
+        type: integer
+        description: Network Policy ref data sequence Minor
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefData:
+    derived_from: tosca.datatypes.Root
+    description: network policy refs data
+    properties:
+      network_policy_refs_data_sequence:
+        type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.RefDataSequence
+        description: Network Policy ref data sequence
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet:
+    derived_from: tosca.datatypes.Root
+    description: Network Ipam Ref Data Subnet
+    properties:
+      network_ipam_refs_data_ipam_subnets_subnet_ip_prefix_len:
+        type: string
+        description: Network ipam refs data ipam subnets ip prefix len
+        required: false
+        status: SUPPORTED
+      network_ipam_refs_data_ipam_subnets_subnet_ip_prefix:
+        type: string
+        description: Network ipam refs data ipam subnets ip prefix
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList:
+    derived_from: tosca.datatypes.Root
+    description: Network Ipam Ref Data Subnet List
+    properties:
+      network_ipam_refs_data_ipam_subnets_subnet:
+        type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnet
+        description: Network ipam refs data ipam subnets
+        required: false
+        status: SUPPORTED
+      network_ipam_refs_data_ipam_subnets_addr_from_start:
+        type: string
+        description: Network ipam refs data ipam subnets addr from start
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.IpamRefData:
+    derived_from: tosca.datatypes.Root
+    description: Network Ipam Ref Data
+    properties:
+      network_ipam_refs_data_ipam_subnets:
+        type: list
+        description: Network ipam refs data ipam subnets
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.virtual.network.rule.ref.data.IpamSubnetList
+org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork:
+    derived_from: tosca.datatypes.Root
+    description: source addresses
+    properties:
+      network_policy_entries_policy_rule_src_addresses_virtual_network:
+        type: string
+        description: Source addresses Virtual network
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork:
+    derived_from: tosca.datatypes.Root
+    description: destination addresses
+    properties:
+      network_policy_entries_policy_rule_dst_addresses_virtual_network:
+        type: string
+        description: Destination addresses Virtual network
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs:
+    derived_from: tosca.datatypes.Root
+    description: destination port pairs
+    properties:
+      network_policy_entries_policy_rule_dst_ports_start_port:
+        type: string
+        description: Start port
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_dst_ports_end_port:
+        type: string
+        description: End port
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs:
+    derived_from: tosca.datatypes.Root
+    description: source port pairs
+    properties:
+      network_policy_entries_policy_rule_src_ports_start_port:
+        type: string
+        description: Start port
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_src_ports_end_port:
+        type: string
+        description: End port
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
+    derived_from: tosca.datatypes.Root
+    description: Action List
+    properties:
+      network_policy_entries_policy_rule_action_list_simple_action:
+        type: string
+        description: Simple Action
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_action_list_apply_service:
+        type: list
+        description: Apply Service
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: string
+org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList:
+    derived_from: tosca.datatypes.Root
+    description: Action List
+    properties:
+      network_policy_entries_policy_rule_action_list_simple_action:
+        type: string
+        description: Simple Action
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_action_list_apply_service:
+        type: list
+        description: Apply Service
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: string
+org.openecomp.datatypes.heat.contrailV2.network.rule.Rule:
+    derived_from: tosca.datatypes.Root
+    description: policy rule
+    properties:
+      network_policy_entries_policy_rule_dst_addresses:
+        type: list
+        description: Destination addresses
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstVirtualNetwork
+      network_policy_entries_policy_rule_dst_ports:
+        type: list
+        description: Destination ports
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.network.rule.DstPortPairs
+      network_policy_entries_policy_rule_protocol:
+        type: string
+        description: Protocol
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_src_addresses:
+        type: list
+        description: Source addresses
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcVirtualNetwork
+      network_policy_entries_policy_rule_direction:
+        type: string
+        description: Direction
+        required: false
+        status: SUPPORTED
+      network_policy_entries_policy_rule_src_ports:
+        type: list
+        description: Source ports
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.network.rule.SrcPortPairs
+      network_policy_entries_policy_rule_action_list:
+        type: org.openecomp.datatypes.heat.contrailV2.network.rule.ActionList
+        description: Action list
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.network.rule.RuleList:
+    derived_from: tosca.datatypes.Root
+    description: list of policy rules
+    properties:
+      network_policy_entries_policy_rule:
+        type: list
+        description: Contrail network rule
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.contrailV2.network.rule.Rule
+org.openecomp.datatypes.heat.network.contrail.port.StaticRoute:
+    derived_from: tosca.datatypes.Root
+    description: static route
+    properties:
+      prefix:
+        type: string
+        description: Route prefix
+        required: false
+        status: SUPPORTED
+      next_hop:
+        type: string
+        description: Next hop
+        required: false
+        status: SUPPORTED
+      next_hop_type:
+        type: string
+        description: Next hop type
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.network.contrail.AddressPair:
+    derived_from: tosca.datatypes.Root
+    description: Address Pair
+    properties:
+      address_mode:
+        type: string
+        description: Address mode active-active or active-standy
+        required: false
+        status: SUPPORTED
+        constraints:
+        - valid_values:
+          - active-active
+          - active-standby
+      prefix:
+        type: string
+        description: IP address prefix
+        required: false
+        status: SUPPORTED
+      mac_address:
+        type: string
+        description: Mac address
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.network.contrail.InterfaceData:
+    derived_from: tosca.datatypes.Root
+    description: Interface Data
+    properties:
+      static_routes:
+        type: list
+        description: An ordered list of static routes to be added to this interface
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.network.contrail.port.StaticRoute
+      virtual_network:
+        type: string
+        description: Virtual Network for this interface
+        required: true
+        status: SUPPORTED
+      allowed_address_pairs:
+        type: list
+        description: List of allowed address pair for this interface
+        required: false
+        status: SUPPORTED
+        entry_schema:
+          type: org.openecomp.datatypes.heat.network.contrail.AddressPair
+      ip_address:
+        type: string
+        description: IP for this interface
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.interface.Properties:
+    derived_from: tosca.datatypes.Root
+    description: Virtual Machine Interface Properties.
+    properties:
+      virtual_machine_interface_properties_service_interface_type:
+        type: string
+        description: Service Interface Type.
+        required: false
+        status: SUPPORTED
+org.openecomp.datatypes.Root:
+  derived_from: tosca.datatypes.Root
+  description: >
+    The AT&T root Data Type all other Data Types derive from
+  properties:
+    supplemental_data:
+      type: map
+      entry_schema:
+        description: > 
+           A placeholder for missing properties that would be included in future ecomp model versions.
+           fromat <key>:<value>
+        type: string
+
+org.openecomp.datatypes.network.SubnetAssignments:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    ip_network_address_plan:
+      type: string
+      required: false
+      description: Reference to EIPAM, VLAN or other address plan ID used to assign subnets to this network 
+    dhcp_enabled:
+      type: boolean
+      required: false
+      description: \"true\" indicates the network has 1 or more policies
+    ip_version:
+      type: integer
+      constraints:
+        - valid_values: [4,6]
+      required: true
+      description: The IP version of the subnet
+    cidr_mask:
+      type: integer
+      required: true
+      description: The default subnet CIDR mask 
+    min_subnets_count:
+      type: integer
+      default: 1
+      required: true
+      description: Quantity of subnets that must be initially assigned
+org.openecomp.datatypes.network.IPv4SubnetAssignments:
+  derived_from: org.openecomp.datatypes.network.SubnetAssignments
+  properties:
+    use_ipv4:
+      type: boolean
+      required: true
+      description: Indicates IPv4 subnet assignments
+
+org.openecomp.datatypes.network.IPv6SubnetAssignments:
+  derived_from: org.openecomp.datatypes.network.SubnetAssignments
+  properties:
+    use_ipv6:
+      type: boolean
+      required: true
+      description: Indicates IPv6 subnet assignments
+
+org.openecomp.datatypes.network.NetworkAssignments:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    ecomp_generated_network_assignment:
+      type: boolean
+      required: true
+      default: false
+      description: >
+        \"true\" indicates that the network assignments will be auto-generated by ECOMP
+        \"false\" indicates operator-supplied Network assignments file upload is required (e.g. VID will present prompt to operator to upload operator-supplied Network assignments file).
+    is_shared_network:
+      type: boolean
+      required: true
+      description: \"true\" means this network is shared by multiple Openstack tenants
+    is_external_network:
+      type: boolean
+      required: true
+      default: false
+      description: >
+        \"true\" means this Contrail external network
+    ipv4_subnet_default_assignment:
+      type: org.openecomp.datatypes.network.IPv4SubnetAssignments
+      required: true
+      description: IPv4 defualt subnet assignments
+    ipv6_subnet_default_assignment:
+      type: org.openecomp.datatypes.network.IPv6SubnetAssignments
+      required: true
+      description: IPv6 defualt subnet assignments
+
+org.openecomp.datatypes.network.ProviderNetwork:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    is_provider_network:
+      type: boolean
+      required: true
+      description: \"true\" indicates that this a Neutron provider type of network 
+    physical_network_name:
+      type: string
+      required: false
+      constraints:
+        - valid_values: ["Physnet41", "Physnet42", "Physnet43", "Physnet44", "Physnet21", "Physnet22"]
+      description: >
+        Identifies the NUMA processor cluster to which this physical network interface belongs.
+        NUMA instance correlates to the first digit of the Physical Network Name suffix (e.g. \"01\" = NUMA 0, \"11\" = NUMA 1)
+    numa:
+      type: string
+      required: false
+      constraints:
+        - valid_values: ["NUMA 0", "NUMA 1"]
+      description: >
+        PNIC instance within the NUMA processor cluster
+        PNIC Instance correlates to the second digit of the Physical Network Name suffix (e.g. "01" = PNIC 1, "02" = "PNIC 2)
+    pnic_instance:
+      type: integer
+      required: false
+      description: PNIC instance within the NUMA processor cluster
+
+org.openecomp.datatypes.network.NetworkFlows:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    is_network_policy:
+      type: boolean
+      required: false
+      default: false
+      description: \"true\" indicates the network has 1 or more policies
+    network_policy:
+      type: string
+      required: false
+      description: "Identifies the specific Cloud network policy that must be applied to this network (source: from Policy Manager)."
+    is_bound_to_vpn:
+      type: boolean
+      required: false
+      default: false
+      description: \"true\" indicates the network has 1 or more vpn bindings
+    vpn_binding:
+      type: string
+      required: false
+      description: "Identifies the specific VPN Binding entry in A&AI that must be applied when creating this network (source: A&AI)"
+
+org.openecomp.datatypes.network.VlanRequirements:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    vlan_range_plan:
+      type: string
+      required: true
+      description: reference to a vlan range plan
+    vlan_type:
+      type: string
+      required: true
+      constraints:
+        - valid_values: ["c-tag", "s-tag"]
+      description: identifies the vlan type (e.g., c-tag)
+    vlan_count:
+      type: integer
+      required: true
+      description: identifies the number of vlan tags to assign to the CP from the plan
+
+org.openecomp.datatypes.network.IpRequirements:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    ip_version:
+      type: integer
+      constraints:
+        - valid_values: [4,6]
+      required: true
+      description: 
+    ip_count:
+      type: integer
+      required: true
+      description: identifies the number of ip address to assign to the CP from the plan
+    floating_ip_count:
+      type: integer
+      required: false
+    subnet_role:
+      type: string
+      required: false
+    assingment_method:
+      type: string
+      constraints:
+        - valid_values: ["fixed", "dhcp"]
+      required: true
+      description:
+
+org.openecomp.datatypes.network.MacAssignments:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    mac_range_plan:
+      type: string
+      required: true
+      description: reference to a MAC address range plan
+    mac_count:
+      type: integer
+      required: true
+      description: identifies the number of MAC addresses to assign to the CP from the plan
+
+org.openecomp.datatypes.EcompHoming:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    ecomp_selected_instance_node_target:
+      type: boolean
+      required: true
+      default: false
+      description: >
+        \"true\" indicates that the target deployment node for this instance will be auto-selected by ECOMP
+        \"false\" indicates operator-supplied instance target deployment node required (e.g. VID will present a prompt to operator and collect the
+        operator-selected target node for the deployment of this Network instance).
+    homing_policy:
+      type: string
+      required: false
+      description: Referenc to a service level homing policy that ECOMP will use for instance deployment target node
+    instance_node_target:
+      type: string
+      required: false
+      description: Instance target deployment node
+
+org.openecomp.datatypes.EcompNaming:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    ecomp_generated_naming:
+      type: boolean
+      required: true
+      default: true
+      description: >
+        \"true\" indicates that the name for the instance will be auto-generated by ECOMP.
+        \"false\" indicates operator-supplied name required (e.g. VID will present prompt to operator and collect the operator-supplied instance name).
+    naming_policy:
+      type: string
+      required: false
+      description: Referenc to naming policy that ECOMP will use when the name is auto-generated
+
+org.openecomp.datatypes.network.MacRequirements:
+  derived_from: org.openecomp.datatypes.Root
+  properties:
+    mac_range_plan:
+      type: string
+      required: true
+      description: reference to a MAC address range plan
+    mac_count:
+        type: integer
+        required: true
+        description: identifies the number of MAC addresses to assign to the CP from the plan
+
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairIp:
+  derived_from: tosca.datatypes.Root
+  description: Virtual Machine Sub Interface Address Pair IP.
+  properties:
+    ip_prefix:
+      type: string
+      description: IP Prefix.
+      required: false
+      status: SUPPORTED
+    ip_prefix_len:
+      type: integer
+      description: IP Prefix Len.
+      required: false
+      status: SUPPORTED
+
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.MacAddress:
+  derived_from: tosca.datatypes.Root
+  description: Virtual Machine Sub Interface Mac Address.
+  properties:
+    mac_address:
+      type: list
+      description: Mac Addresses List.
+      required: false
+      status: SUPPORTED
+      entry_schema:
+        type: string
+
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.Properties:
+  derived_from: tosca.datatypes.Root
+  description: Virtual Machine Sub Interface VLAN Properties.
+  properties:
+    sub_interface_vlan_tag:
+      type: string
+      description: Sub Interface VLAN Tag.
+      required: false
+      status: SUPPORTED
+
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair:
+  derived_from: tosca.datatypes.Root
+  description: Virtual Machine Sub Interface Address Pair.
+  properties:
+    address_mode:
+      type: string
+      description: Address Mode.
+      required: false
+      status: SUPPORTED
+    ip:
+      type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairIp
+      description: IP.
+      required: false
+      status: SUPPORTED
+    mac:
+      type: string
+      description: Mac.
+      required: false
+      status: SUPPORTED
+
+org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPairs:
+  derived_from: tosca.datatypes.Root
+  description: Virtual Machine Sub Interface Address Pairs.
+  properties:
+    allowed_address_pair:
+      type: list
+      description: Addresses pair List.
+      required: false
+      status: SUPPORTED
+      entry_schema:
+        type: org.openecomp.datatypes.heat.contrailV2.virtual.machine.subInterface.AddressPair
\ No newline at end of file
diff --git a/asdctool/src/main/resources/config/error-configuration.yaml b/asdctool/src/main/resources/config/error-configuration.yaml
index 5e5009e..d33876a 100644
--- a/asdctool/src/main/resources/config/error-configuration.yaml
+++ b/asdctool/src/main/resources/config/error-configuration.yaml
@@ -94,17 +94,17 @@
         messageId: "SVC4007"
     }
 #---------SVC4008----------------------------- 
-# %1 - Users's userId     
+# %1 - Users's USER_ID     
     INVALID_USER_ID: {
         code: 400,
-        message: "Error: Invalid userId '%1'.",
+        message: "Error: Invalid USER_ID '%1'.",
         messageId: "SVC4008"
     }
 #---------SVC4049------------------------------ 
 # %1 - service/resource
     COMPONENT_MISSING_CONTACT: {
         code: 400,
-        message: "Error: Invalid Content. Missing %1 contact id.",
+        message: "Error: Invalid Content. Missing %1 Contact Id.",
         messageId: "SVC4049"
     } 
 #---------SVC4050----------------------------- 
@@ -245,9 +245,9 @@
 
 #---------SVC4069------------------------------ 
 # %1 - Service/Resource/Product
-    COMPONENT_INVALID_CONTACT_ID: {
+    COMPONENT_INVALID_CONTACT: {
         code: 400,
-        message: "Error: Invalid Content. %1 contact id should be in format 'mnnnnnn' or 'aannna' or 'aannnn', where m=m ,a=a-zA-Z and n=0-9",
+        message: "Error: Invalid Content. %1 Contact Id should be in format 'mnnnnnn' or 'aannna' or 'aannnn', where m=m ,a=a-zA-Z and n=0-9",
         messageId: "SVC4069"
     }
 #---------SVC4070------------------------------ 
@@ -511,7 +511,7 @@
 #-----------SVC4130---------------------------
     INVALID_PROJECT_CODE: {
         code: 400,
-        message: "Error: Invalid Content. PROJECT_CODE number must be numeric from 5 up to 10 digits.",
+        message: "Error: Invalid Content. PROJECT_CODE must be from 3 up to 50 characters.",
         messageId: "SVC4130"
     }
 #-----------SVC4131---------------------------
@@ -1060,7 +1060,7 @@
         messageId: "SVC4567"
     }
 #---------SVC4567------------------------------ 
-# %1 - "User Name and UserId"   
+# %1 - "User Name and USER_ID"   
 # %2  -"checked-out"/"in-certification"
     CANNOT_DELETE_USER_WITH_ACTIVE_ELEMENTS: {
         code: 409,
@@ -1068,7 +1068,7 @@
         messageId: "SVC4567"
     }
 #---------SVC4568------------------------------ 
-# %1 - "User Name and UserId"   
+# %1 - "User Name and USER_ID"   
 # %2  -"checked-out"/"in-certification"
     CANNOT_UPDATE_USER_WITH_ACTIVE_ELEMENTS: {
         code: 409,
@@ -1144,7 +1144,7 @@
         messageId: "SVC4580"
     }
 #---------SVC4581------------------------------ 
-# %1 - userId
+# %1 - USER_ID
     INVALID_PRODUCT_CONTACT: {
         code: 400,
         message: "Error: Invalid content. User '%1' cannot be set as Product Contact.",
diff --git a/asdctool/src/main/resources/config/groupTypes.yml b/asdctool/src/main/resources/config/groupTypes.yml
new file mode 100644
index 0000000..c72dc88
--- /dev/null
+++ b/asdctool/src/main/resources/config/groupTypes.yml
@@ -0,0 +1,72 @@
+org.openecomp.groups.heat.HeatStack:
+  derived_from: tosca.groups.Root
+  description: Grouped all heat resources which are in the same heat stack
+  properties:
+    heat_file:
+      type: string
+      description: Heat file which associate to this group/heat stack
+      required: true
+      status: SUPPORTED
+    description:
+      type: string
+      description: group description
+      required: true
+      status: SUPPORTED      
+org.openecomp.groups.VfModule:
+  derived_from: tosca.groups.Root
+  description: Grouped all heat resources which are in the same VF Module
+  properties:
+    isBase:
+      type: boolean
+      description: Whether this module should be deployed before other modules
+      required: true
+      default: false
+      status: SUPPORTED
+    vf_module_label: 
+      type: string
+      required: true
+      description: > 
+        Alternate textual key used to reference this VF-Module model. 
+        Must be unique within the VNF model
+    vf_module_description:
+      type: string
+      required: true
+      description: >
+        Description of the VF-modules contents and purpose  
+        (e.g. "Front-End" or "Database Cluster")
+    min_vf_module_instances:
+      type: integer
+      required: true
+      description: The minimum instances of this VF-Module
+    max_vf_module_instances: 
+      type: integer
+      required: false
+      description: The maximum instances of this VF-Module
+    initial_count:
+      type: integer
+      required: false
+      description: >
+        The initial count of instances of the VF-Module. The value must be in the 
+        range between min_vfmodule_instances and max_vfmodule_instances.
+        If no value provided the initial count is the min_vfmodule_instances.
+    vf_module_type:
+      type: string
+      required: true
+      constraint:
+        - valid_values: ["Base", "Expansion"]
+    volume_group:
+      type: boolean
+      required: true
+      default: false
+      description: >
+        "true" indicates that this VF Module model requires attachment to a Volume  
+        Group. 
+        VID operator must select the Volume Group instance to attach to a VF-Module 
+        at deployment time.
+      
+tosca.groups.Root:
+  description: The TOSCA Group Type all other TOSCA Group Types derive from
+  interfaces: 
+    Standard:
+      type: tosca.interfaces.node.lifecycle.Standard
+
diff --git a/asdctool/src/main/resources/config/logback.xml b/asdctool/src/main/resources/config/logback.xml
index 298587b..87795ae 100644
--- a/asdctool/src/main/resources/config/logback.xml
+++ b/asdctool/src/main/resources/config/logback.xml
@@ -36,9 +36,9 @@
 		</encoder>
 	</appender>
 
-	<root level="DEBUG">
+	<root level="INFO">
 		<appender-ref ref="DEBUG_ROLLING" />
 	</root>
-	<logger name="org.openecomp.sdc" level="DEBUG" />
+	<logger name="org.openecomp.sdc" level="INFO" />
 
 </configuration>
\ No newline at end of file
diff --git a/asdctool/src/main/resources/config/titan-migration.properties b/asdctool/src/main/resources/config/titan-migration.properties
new file mode 100644
index 0000000..e982c2d
--- /dev/null
+++ b/asdctool/src/main/resources/config/titan-migration.properties
@@ -0,0 +1,4 @@
+storage.backend=cassandra
+storage.hostname=localhost
+storage.port=9160
+storage.cassandra.keyspace=sdctitan
diff --git a/asdctool/src/main/resources/config/titan.properties b/asdctool/src/main/resources/config/titan.properties
index bfb0ebc..e9daff7 100644
--- a/asdctool/src/main/resources/config/titan.properties
+++ b/asdctool/src/main/resources/config/titan.properties
@@ -1,3 +1,4 @@
 storage.backend=cassandra
 storage.hostname=localhost
 storage.port=9160
+storage.cassandra.keyspace=titan
\ No newline at end of file
diff --git a/asdctool/src/main/resources/scripts/dataMigration.sh b/asdctool/src/main/resources/scripts/dataMigration.sh
index b05c6dc..e1f4f24 100644
--- a/asdctool/src/main/resources/scripts/dataMigration.sh
+++ b/asdctool/src/main/resources/scripts/dataMigration.sh
@@ -5,7 +5,7 @@
 ##############################
 
 CURRENT_DIR=`pwd`
-BASEDIR=$(dirname $0)
+BASEDIR=$(dirname $1)
 
 if [ ${BASEDIR:0:1} = "/" ]
 then
@@ -18,8 +18,31 @@
 
 mainClass="org.openecomp.sdc.asdctool.main.MigrationMenu"
 
-command="java $JVM_LOG_FILE -cp $JARS $mainClass migrate-1602-1604 $@"
-echo $command
+case  $1 in
+	1604) 
+		command="java $JVM_LOG_FILE -cp $JARS $mainClass migrate-1602-1604 $@"
+		echo $command
+		;;
+	1607)
+		command="sh ./dataMigration1607.sh $@"
+		echo $command
+		;;
+	1610)
+		command="sh ./dataMigration1610.sh $@"
+		echo $command
+		;;
+	1702)
+		command="sh ./dataMigration1702.sh $@"
+		echo $command
+		;;
+	1707)
+    	command="sh ./dataMigration1707.sh $@"
+        echo $command
+        ;;
+	*)
+		echo "No migration for this version $1"
+		;;
+esac
 
 $command
 result=$?
diff --git a/asdctool/src/main/resources/scripts/dataMigration1702.sh b/asdctool/src/main/resources/scripts/dataMigration1702.sh
new file mode 100644
index 0000000..f2bf56e
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/dataMigration1702.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+##############################
+# Data Migration 1702
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+                FULL_PATH=$BASEDIR
+else
+                FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.MigrationMenu"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass migrate-1610-1702 $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/dataMigration1707.sh b/asdctool/src/main/resources/scripts/dataMigration1707.sh
new file mode 100644
index 0000000..e67de2a
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/dataMigration1707.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+
+##############################
+# Data Migration 1707
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+                FULL_PATH=$BASEDIR
+else
+                FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.MigrationMenu"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass migrate-1702-1707 $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/sdcSchemaFileImport.sh b/asdctool/src/main/resources/scripts/sdcSchemaFileImport.sh
new file mode 100644
index 0000000..4002e38
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/sdcSchemaFileImport.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+##############################
+#   Sdc Schema File Import   #
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+                FULL_PATH=$BASEDIR
+else
+                FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.SdcSchemaFileImport"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass $@"
+echo $command
+
+$command
+result=$?
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/titanSchemaCreation.sh b/asdctool/src/main/resources/scripts/titanSchemaCreation.sh
new file mode 100644
index 0000000..8a2488b
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/titanSchemaCreation.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+##############################
+# Titan Schema Creation
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+                FULL_PATH=$BASEDIR
+else
+                FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.DataSchemaMenu"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass create-titan-structures $@"
+echo $command
+
+$command
+result=$?
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/main/resources/scripts/vfModulePropertiesAdding.sh b/asdctool/src/main/resources/scripts/vfModulePropertiesAdding.sh
new file mode 100644
index 0000000..1ead248
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/vfModulePropertiesAdding.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+##############################
+# Data Migration: Adding new properties to vfModules
+##############################
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+                FULL_PATH=$BASEDIR
+else
+                FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.MigrationMenu"
+
+command="java $JVM_LOG_FILE -cp $JARS $mainClass vfModules-properties-adding $@"
+echo $command
+
+$command
+result=$?
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java
new file mode 100644
index 0000000..ae23b73
--- /dev/null
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/AppTest.java
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.openecomp.sdc.asdctool;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+/**
+ * Unit test for simple App.
+ */
+public class AppTest extends TestCase {
+	/**
+	 * Create the test case
+	 *
+	 * @param testName
+	 *            name of the test case
+	 */
+	public AppTest(String testName) {
+		super(testName);
+	}
+
+	/**
+	 * @return the suite of tests being tested
+	 */
+	public static Test suite() {
+		return new TestSuite(AppTest.class);
+	}
+
+	/**
+	 * Rigourous Test :-)
+	 */
+	public void testApp() {
+		assertTrue(true);
+	}
+}