Import feature is ignoring multiple imports.

Merged SDC-666 and SDC-668 as both are dependent.

Change-Id: Idd4f67724d03bad79bab4a39b75a8145658ef8b9
Issue-ID: SDC-666
Signed-off-by: priyanshu <pagarwal@amdocs.com>
diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java
index 5e94378..b2a0da7 100644
--- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java
+++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java
@@ -28,6 +28,7 @@
 	private ArrayList<String> typeDefinitionList;
 	
 	private LinkedHashMap<String,Object> customDefs;
+	private LinkedHashMap<String,Object> allCustomDefs;
 	private ArrayList<LinkedHashMap<String,Object>> nestedToscaTpls;
 	private LinkedHashMap<String,Object> repositories;
 
@@ -39,6 +40,7 @@
 		
 	        this.importslist = _importslist;
 	        customDefs = new LinkedHashMap<String,Object>();
+			allCustomDefs = new LinkedHashMap<String,Object>();
 	        nestedToscaTpls = new ArrayList<LinkedHashMap<String,Object>>();
 	        if((_path == null || _path.isEmpty()) && tpl == null) {
 	            //msg = _('Input tosca template is not provided.')
@@ -65,7 +67,7 @@
 	}
 
 	public LinkedHashMap<String,Object> getCustomDefs() {
-	        return customDefs;
+	        return allCustomDefs;
 	}
 
     public ArrayList<LinkedHashMap<String,Object>> getNestedToscaTpls() {
@@ -131,33 +133,50 @@
     	}
     }
 
-    @SuppressWarnings("unchecked")
+	/**
+	 * This method is used to get consolidated custom definitions by passing custom Types from
+	 * each import. The resultant collection is then passed back which contains all import
+	 * definitions
+	 *
+	 * @param customType      the custom type
+	 * @param namespacePrefix the namespace prefix
+	 */
+	@SuppressWarnings("unchecked")
 	private void _updateCustomDefs(LinkedHashMap<String,Object> customType, String namespacePrefix) {
-    	LinkedHashMap<String,Object> outerCustomTypes;// = new LinkedHashMap<String,Object>();
-    	for(String typeDef: typeDefinitionList) {
-    		if(typeDef.equals("imports")) {
-    			// imports are ArrayList...
-    			customDefs.put("imports",(ArrayList<Object>)customType.get(typeDef));
-    		}
-    		else {
-	    		outerCustomTypes = (LinkedHashMap<String,Object>)customType.get(typeDef);
-	    		if(outerCustomTypes != null) {
-    				if(namespacePrefix != null && !namespacePrefix.isEmpty()) {
-    			    	LinkedHashMap<String,Object> prefixCustomTypes = new LinkedHashMap<String,Object>();
-    	    			for(Map.Entry<String,Object> me: outerCustomTypes.entrySet()) {
-    	    				String typeDefKey = me.getKey();
-    	    				String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey;
-    	    				prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey));
-    	    			}
-    	    			customDefs.putAll(prefixCustomTypes);
-    				}
-    				else {
-    	    			customDefs.putAll(outerCustomTypes);
-    				}
-	    		}
-    		}
-    	}
-    }
+		LinkedHashMap<String,Object> outerCustomTypes;
+		for(String typeDef: typeDefinitionList) {
+			if(typeDef.equals("imports")) {
+				customDefs.put("imports", customType.get(typeDef));
+				if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){
+					allCustomDefs.put("imports",customType.get(typeDef));
+				}
+				else if (customType.get(typeDef) != null){
+					Set<Object> allCustomImports = new HashSet<>((ArrayList<Object>)allCustomDefs.get("imports"));
+					allCustomImports.addAll((ArrayList<Object>) customType.get(typeDef));
+					allCustomDefs.put("imports", new ArrayList<>(allCustomImports));
+				}
+			}
+			else {
+				outerCustomTypes = (LinkedHashMap<String,Object>)customType.get(typeDef);
+				if(outerCustomTypes != null) {
+					if(namespacePrefix != null && !namespacePrefix.isEmpty()) {
+						LinkedHashMap<String,Object> prefixCustomTypes = new LinkedHashMap<String,Object>();
+						for(Map.Entry<String,Object> me: outerCustomTypes.entrySet()) {
+							String typeDefKey = me.getKey();
+							String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey;
+							prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey));
+						}
+						customDefs.putAll(prefixCustomTypes);
+						allCustomDefs.putAll(prefixCustomTypes);
+					}
+					else {
+						customDefs.putAll(outerCustomTypes);
+						allCustomDefs.putAll(outerCustomTypes);
+					}
+				}
+			}
+		}
+	}
 
     private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap<String,Object> customTpl) {
     	if(fullFileName != null && customTpl != null) {
diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java
index 4c19be6..e96ca56 100644
--- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java
+++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java
@@ -9,6 +9,9 @@
 import java.io.InputStream;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
+import java.nio.file.Files;
+import java.util.function.Predicate;
+import java.nio.file.Paths;
 
 import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector;
 import org.openecomp.sdc.toscaparser.api.common.JToscaException;
@@ -70,6 +73,7 @@
 	private boolean isFile;
 	private String path;
 	private String inputPath;
+	private String rootPath;
 	private LinkedHashMap<String,Object> parsedParams;
 	private boolean resolveGetInput;
 	private LinkedHashMap<String,Object> tpl;
@@ -91,6 +95,7 @@
     private String csarTempDir;
     private int nestingLoopCounter;
 	private LinkedHashMap<String, LinkedHashMap<String, Object>> metaProperties;
+	private Set<String> processedImports;
 
 	public ToscaTemplate(String _path,
 						LinkedHashMap<String,Object> _parsedParams,
@@ -193,6 +198,9 @@
         if(tpl != null) {
             parsedParams = _parsedParams;
             _validateField();
+            this.rootPath = path;
+            this.processedImports = new HashSet<String>();
+            this.imports = _tplImports();
             this.version = _tplVersion();
             this.metaData = _tplMetaData();
             this.relationshipTypes = _tplRelationshipTypes();
@@ -305,30 +313,200 @@
 	private ArrayList<Policy> _policies() {
 		return topologyTemplate.getPolicies();
 	}
-	
-	private LinkedHashMap<String,Object> _getAllCustomDefs(ArrayList<Object> alImports) {
-		
+
+	/**
+	 * This method is used to get consolidated custom definitions from all imports
+	 * It is logically divided in two parts to handle imports; map and list formats.
+	 * Before processing the imports; it sorts them to make sure the current directory imports are
+	 * being processed first and then others. Once sorted; it processes each import one by one in
+	 * recursive manner.
+	 * To avoid cyclic dependency among imports; this method uses a set to keep track of all
+	 * imports which are already processed and filters the imports which occurs more than once.
+	 *
+	 * @param alImports all imports which needs to be processed
+	 * @return the linked hash map containing all import definitions
+	 */
+	private LinkedHashMap<String,Object> _getAllCustomDefs(Object alImports) {
+
 		String types[] = {
-			IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, 
-			DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES
+				IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES,
+				DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES
 		};
-		LinkedHashMap<String,Object> customDefsFinal = new LinkedHashMap<String,Object>(); 
-		LinkedHashMap<String,Object> customDefs = _getCustomTypes(types,alImports);
-		if(customDefs != null) {
-			customDefsFinal.putAll(customDefs);
-			if(customDefs.get(IMPORTS) != null) {
-				@SuppressWarnings("unchecked")
-				LinkedHashMap<String,Object> importDefs = _getAllCustomDefs((ArrayList<Object>)customDefs.get(IMPORTS));
-				customDefsFinal.putAll(importDefs);
+		LinkedHashMap<String,Object> customDefsFinal = new LinkedHashMap<>();
+
+		List<Map<String, Object>> imports = (List<Map<String, Object>>) alImports;
+		if (imports != null && !imports.isEmpty()) {
+			if (imports.get(0) instanceof LinkedHashMap) {
+				imports = sortImports(imports);
+
+				for (Map<String, Object> map : imports) {
+					List<Map<String, Object>> singleImportList = new ArrayList();
+					singleImportList.add(map);
+
+					Map<String, String> importNameDetails = getValidFileNameForImportReference(singleImportList);
+					singleImportList = filterImportsForRecursion(singleImportList, importNameDetails);
+
+					if(!singleImportList.get(0).isEmpty()){
+						LinkedHashMap<String, Object> customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList));
+						processedImports.add(importNameDetails.get("importFileName"));
+
+						if (customDefs != null) {
+							customDefsFinal.putAll(customDefs);
+
+							if (customDefs.get(IMPORTS) != null) {
+								resetPathForRecursiveImports(importNameDetails.get("importRelativeName"));
+								LinkedHashMap<String, Object> importDefs = _getAllCustomDefs(customDefs.get(IMPORTS));
+								customDefsFinal.putAll(importDefs);
+							}
+						}
+					}
+				}
+			} else {
+				LinkedHashMap<String, Object> customDefs = _getCustomTypes(types, new ArrayList<>(imports));
+				if (customDefs != null) {
+					customDefsFinal.putAll(customDefs);
+
+					if (customDefs.get(IMPORTS) != null) {
+						LinkedHashMap<String, Object> importDefs = _getAllCustomDefs(customDefs.get(IMPORTS));
+						customDefsFinal.putAll(importDefs);
+					}
+				}
 			}
 		}
-		
-        // As imports are not custom_types, remove from the dict
-        customDefsFinal.remove(IMPORTS);
+
+		// As imports are not custom_types, remove from the dict
+		customDefsFinal.remove(IMPORTS);
 
 		return customDefsFinal;
 	}
 
+	/**
+	 * This method is used to sort the imports in order so that same directory
+	 * imports will be processed first
+	 *
+	 * @param customImports the custom imports
+	 * @return the sorted list of imports
+	 */
+	private List<Map<String, Object>> sortImports(List<Map<String, Object>> customImports){
+		List<Map<String, Object>> finalList1 = new ArrayList<>();
+		List<Map<String, Object>> finalList2 = new ArrayList<>();
+		Iterator<Map<String, Object>> itr = customImports.iterator();
+		while(itr.hasNext()) {
+			Map innerMap = itr.next();
+			if (innerMap.toString().contains("../")) {
+				finalList2.add(innerMap);
+				itr.remove();
+			}
+			else if (innerMap.toString().contains("/")) {
+				finalList1.add(innerMap);
+				itr.remove();
+			}
+		}
+
+		customImports.addAll(finalList1);
+		customImports.addAll(finalList2);
+		return customImports;
+	}
+
+	/**
+	 * This method is used to reset PATH variable after processing of current import file is done
+	 * This is required because of relative path nature of imports present in files.
+	 *
+	 * @param currImportRelativeName the current import relative name
+	 */
+	private void resetPathForRecursiveImports(String currImportRelativeName){
+		path = getPath(path, currImportRelativeName);
+	}
+
+	/**
+	 * This is a recursive method which starts from current import and then recursively finds a
+	 * valid path relative to current import file name.
+	 * By doing this it handles all nested hierarchy of imports defined in CSARs
+	 *
+	 * @param path           the path
+	 * @param importFileName the import file name
+	 * @return the string containing updated path value
+	 */
+	private String getPath(String path, String importFileName){
+		String tempFullPath = (Paths.get(path).toAbsolutePath().getParent()
+				.toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/');
+		String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/');
+		if(Files.exists(Paths.get(tempFullPath)))
+			return tempFullPath;
+		else
+			return getPath(tempPartialPath, importFileName);
+	}
+
+	/**
+	 * This method is used to get full path name for the file which needs to be processed. It helps
+	 * in situation where files are present in different directory and are references as relative
+	 * paths.
+	 *
+	 * @param customImports the custom imports
+	 * @return the map containing import file full and relative paths
+	 */
+	private Map<String, String> getValidFileNameForImportReference(List<Map<String, Object>>
+																																		 customImports){
+		String importFileName;
+		Map<String, String> retMap = new HashMap<>();
+		for (Map<String, Object> map1 : customImports) {
+			for (Map.Entry<String, Object> entry : map1.entrySet()) {
+				Map innerMostMap = (Map) entry.getValue();
+				Iterator<Map.Entry<String, String>> it = innerMostMap.entrySet().iterator();
+				while (it.hasNext()) {
+					Map.Entry<String, String> val = it.next();
+					if(val.getValue().contains("/")){
+						importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File
+								.separator + val.getValue().replace("../", "")).replace('\\', '/');
+					}
+					else {
+						importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File
+								.separator + val.getValue().replace("../", "")).replace('\\', '/');
+					}
+					retMap.put("importFileName", importFileName);
+					retMap.put("importRelativeName", val.getValue());
+				}
+			}
+		}
+		return retMap;
+	}
+
+	/**
+	 * This method is used to filter the imports which already gets processed in previous step.
+	 * It handles the use case of cyclic dependency in imports which may cause Stack Overflow
+	 * exception
+	 *
+	 * @param customImports     the custom imports
+	 * @param importNameDetails the import name details
+	 * @return the list containing filtered imports
+	 */
+	private List<Map<String, Object>> filterImportsForRecursion(List<Map<String, Object>>
+																																	customImports, Map<String,
+			String> importNameDetails){
+		for (Map<String, Object> map1 : customImports) {
+			for (Map.Entry<String, Object> entry : map1.entrySet()) {
+				Map innerMostMap = (Map) entry.getValue();
+				Iterator<Map.Entry<String, String>> it = innerMostMap.entrySet().iterator();
+				while (it.hasNext()) {
+					it.next();
+					if (processedImports.contains(importNameDetails.get("importFileName"))) {
+						it.remove();
+					}
+				}
+			}
+		}
+
+		// Remove Empty elements
+		Iterator<Map<String, Object>> itr = customImports.iterator();
+		while(itr.hasNext()) {
+			Map innerMap = itr.next();
+			Predicate<Map> predicate = p-> p.values().isEmpty();
+			innerMap.values().removeIf(predicate);
+		}
+
+		return customImports;
+	}
+
 	@SuppressWarnings("unchecked")
 	private LinkedHashMap<String,Object> _getCustomTypes(Object typeDefinitions,ArrayList<Object> alImports) {
 		
@@ -396,6 +574,8 @@
 			log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting");
 			return;
 		}
+		// Reset Processed Imports for nested templates
+		this.processedImports = new HashSet<>();
 		for(Map.Entry<String,Object> me: nestedToscaTplsWithTopology.entrySet()) {
 			String fname = me.getKey();
 			LinkedHashMap<String,Object> toscaTpl = 
diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java
new file mode 100644
index 0000000..c8a30fa
--- /dev/null
+++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java
@@ -0,0 +1,64 @@
+package org.openecomp.sdc.toscaparser.api;
+
+import org.junit.Test;
+import org.openecomp.sdc.toscaparser.api.common.JToscaException;
+import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.assertEquals;
+
+public class JToscaImportTest {
+
+    @Test
+    public void testNoMissingTypeValidationError() throws JToscaException {
+        String fileStr = JToscaImportTest.class.getClassLoader().getResource
+            ("csars/sdc-onboarding_csar.csar").getFile();
+        File file = new File(fileStr);
+        new ToscaTemplate(file.getAbsolutePath(), null, true, null);
+        List<String> missingTypeErrors = ThreadLocalsHolder.getCollector()
+            .getValidationIssueReport()
+            .stream()
+            .filter(s -> s.contains("JE136"))
+            .collect(Collectors.toList());
+        assertEquals(0, missingTypeErrors.size());
+    }
+
+    @Test
+    public void testNoStackOverFlowError() {
+        Exception jte = null;
+        try {
+            String fileStr = JToscaImportTest.class.getClassLoader().getResource
+                ("csars/sdc-onboarding_csar.csar").getFile();
+            File file = new File(fileStr);
+            new ToscaTemplate(file.getAbsolutePath(), null, true, null);
+        } catch (Exception e){
+            jte = e;
+        }
+        assertEquals(null, jte);
+    }
+
+  @Test
+  public void testNoInvalidImports() throws JToscaException {
+    List<String> fileNames = new ArrayList<>();
+    fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar");
+    fileNames.add("csars/sdc-onboarding_csar.csar");
+    fileNames.add("csars/resource-Spgw-csar-ZTE.csar");
+
+    for (String fileName : fileNames) {
+      String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile();
+      File file = new File(fileStr);
+      new ToscaTemplate(file.getAbsolutePath(), null, true, null);
+      List<String> invalidImportErrors = ThreadLocalsHolder.getCollector()
+          .getValidationIssueReport()
+          .stream()
+          .filter(s -> s.contains("JE195"))
+          .collect(Collectors.toList());
+      assertEquals(0, invalidImportErrors.size());
+    }
+  }
+
+}
diff --git a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar
new file mode 100644
index 0000000..58c3ddd
--- /dev/null
+++ b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar
Binary files differ
diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar
new file mode 100644
index 0000000..e1c3267
--- /dev/null
+++ b/src/test/resources/csars/sdc-onboarding_csar.csar
Binary files differ