DCAE-D tosca-lab initial commit

DCAE-D tosca-lab initial commit

Change-Id: Ia42934ce7c75abe05fa106585c9334c8b048ee36
Issue-ID: SDC-1218
Signed-off-by: Stone, Avi (as206k) <as206k@att.com>
diff --git a/app/README.md b/app/README.md
new file mode 100644
index 0000000..e1bf2e4
--- /dev/null
+++ b/app/README.md
@@ -0,0 +1,9 @@
+TOSCA Lab
+
+Ideally, I try to make TOSCA Lab like Matlab, a tool set to create/modify/connect/translate TOSCA templates. 
+
+It is also my first Python project for coding practice. 
+
+Version: 
+0.1 -- The original version was created to write everything from scratch. It is able to read YAML files following TOSCA standard and do TOSCA-to-HEAT translation provided with the translation library. 
+0.2 -- I will try to integrate with the existing open source project tosca-parser. In that case, I can make full use of the existing parser and focus more on more features.  The tosca-parser version I am working with is 0.4.0
diff --git a/app/__init__.py b/app/__init__.py
new file mode 100644
index 0000000..53da910
--- /dev/null
+++ b/app/__init__.py
@@ -0,0 +1 @@
+from app.version import __version__
\ No newline at end of file
diff --git a/app/map_sup_enrich_compose.py b/app/map_sup_enrich_compose.py
new file mode 100644
index 0000000..815a258
--- /dev/null
+++ b/app/map_sup_enrich_compose.py
@@ -0,0 +1,99 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.tosca_workbook import ToscaWorkBook
+from toscalib.tosca_builder import ToscaBuilder
+
+import getopt, sys, json, logging
+
+def usage():
+    print('OPTIONS:')
+    print('\t-h|--help: print this help message')
+    print('\t-i|--input: The home folder where all spec files are')
+    print('\t-o|--output: the output file name')
+    print('\t-v|--value: the json value file')
+
+    
+def main():
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "hi:o:v:", ["help", "input=", "output=", "value="])
+    except getopt.GetoptError as err:
+        # print help information and exit:
+        logging.error( str(err)) # will print something like "option -a not recognized"
+        usage()
+        sys.exit(2)
+
+    spec_prefix = None
+    output_file = None
+    value_file = None
+    
+    for o, a in opts:
+        if  o in ("-h", "--help"):
+            usage()
+            sys.exit()
+        elif o in ("-i", "--input"):
+            spec_prefix = a
+        elif o in ("-o", "--output"):
+            output_file = a
+        elif o in ("-v", "--value"):
+            value_file = a
+        else:
+            logging.error( 'Unrecognized option: ' + o)
+            usage()
+            sys.exit(2)
+            
+    if spec_prefix is None or output_file is None:
+        logging.error( 'Incorrect arguments!')
+        usage()
+        sys.exit(2)
+
+    model_prefix = './data/tosca_model'
+    meta_model = './data/meta_model/meta_tosca_schema.yaml'
+        
+    for ms in ['map', 'enrich', 'supplement']:
+            
+        builder = ToscaBuilder()
+        
+        builder.import_schema(meta_model)
+        builder.import_spec(spec_prefix+'/dcae-event-proc/dcae-event-proc-cdap-' + ms+ '\\' + ms+ '_spec.json')
+        builder.create_node_type()
+        builder.export_schema(model_prefix+'/' + ms + '/schema.yaml')
+        builder.import_schema(model_prefix+'/' + ms + '/schema.yaml')
+        builder.create_model(ms)
+        builder.export_model(model_prefix+'/' + ms + '/template.yaml')
+        builder.create_translate(ms)
+        builder.export_translation(model_prefix+'/' + ms + '/translate.yaml')
+        
+    workbook = ToscaWorkBook()
+    
+    workbook._import_dir(model_prefix)
+    workbook._import_dir('./data/shared_model/')
+    workbook._use('map','NO_PREFIX')
+    workbook._use('supplement','NO_PREFIX')
+    workbook._use('enrich','NO_PREFIX')
+    
+    if value_file is not None: 
+        try: 
+            with open(value_file) as data_file:
+                data = json.load(data_file)
+                for ms in ['map', 'enrich', 'supplement']:
+#                    if data.has_key(ms):
+                    if ms in data:
+                        prop_sec = data[ms]
+                        for key in prop_sec.keys():
+                            workbook._assign(ms, key, prop_sec[key])
+        except err :
+            logging.error( "Unable to read " +value_file)
+            logging.error( str(err))
+    workbook._add_shared_node([{'dcae.capabilities.cdapHost':'cdap_host'}, {'dcae.capabilities.dockerHost': 'docker_host'}, {'dcae.capabilities.composition.host': 'composition_virtual'}])
+    
+    workbook._assign('supplement', 'stream_publish_0', 'map')
+    workbook._assign('enrich', 'stream_publish_0', 'supplement')
+         
+    workbook.tran_db = workbook.db
+         
+    workbook._export_yaml('event_proc.yaml', 'no_expand,main')
+    workbook._export_yaml(output_file, 'cloudify,main')
+    
+if __name__ == "__main__":
+    main()
\ No newline at end of file
diff --git a/app/model_create.py b/app/model_create.py
new file mode 100644
index 0000000..97e6ff4
--- /dev/null
+++ b/app/model_create.py
@@ -0,0 +1,100 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.tosca_workbook import ToscaWorkBook
+from toscalib.tosca_builder import ToscaBuilder
+
+import getopt, sys, json, os, base64, logging
+
+def usage():
+    print('OPTIONS:')
+    print('\t-h|--help: print this help message')
+    print('\t-i|--input: The PATH to spec file')
+    print('\t-o|--output: the folder for the output model ')
+    print('\t-n|--name: the name of the service')
+    print('\t-t|--import: the PATH to import file')
+    print('\t-m|--meta: the PATH to meta model file (default: ./data/meta_model/meta_tosca_schema.yaml')
+
+    
+def main():
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "hi:o:n:t:m:", ["help", "input=", "output=", "name=", "import=", "meta="])
+    except getopt.GetoptError as err:
+        # print help information and exit:
+        logging.error(str(err)) # will print something like "option -a not recognized"
+        usage()
+        sys.exit(2)
+
+    spec_file = None
+    output_file = './data/tosca_model/temp'
+    name = None
+    meta_model = './data/meta_model/meta_tosca_schema.yaml'
+    import_file = None
+
+    for o, a in opts:
+        if  o in ("-h", "--help"):
+            usage()
+            sys.exit()
+        elif o in ("-i", "--input"):
+            spec_file = a
+        elif o in ("-o", "--output"):
+            output_file = a
+        elif o in ("-n", "--name"):
+            name = a
+        elif o in ("-t", "--import"):
+            import_file = a
+        elif o in ("-m", "--meta"):
+            meta_model = a
+        else:
+            logging.error('Unrecognized option: ' + o)
+            usage()
+            sys.exit(2)
+            
+    if spec_file is None:
+        logging.error('Incorrect arguments!')
+        usage()
+        sys.exit(2)
+
+    if output_file is None:
+        model_prefix = './data/tosca_model'
+    else:
+        filename = output_file + '/schema.yaml'
+        dirname = os.path.dirname(filename)           
+        try:
+            os.stat(dirname)
+        except:
+            os.mkdir(dirname) 
+        model_prefix = output_file
+        
+                    
+    builder = ToscaBuilder()
+        
+    builder.import_schema(meta_model)
+    if spec_file in ['stdin', '-']:
+        builder.import_spec_str(json.load(sys.stdin))
+    else:
+        builder.import_spec(spec_file)
+    if import_file is not None:
+        builder.import_import(import_file)
+    
+    if name is None:
+        name = builder.spec_import.name
+    
+    builder.create_node_type(name)
+    schema_str = builder.export_schema(model_prefix+ '/schema.yaml')
+    builder.import_schema(model_prefix+ '/schema.yaml')
+    builder.create_model(name)
+    template_str = builder.export_model(model_prefix+ '/template.yaml')
+    builder.create_translate(name)
+    translate_str = builder.export_translation(model_prefix+ '/translate.yaml')    
+    
+    if spec_file in ['stdin', '-']:
+        ret = {}
+        ret['schema'] = base64.encodestring(schema_str)
+        ret['template'] = base64.encodestring(template_str)
+        ret['translate'] = base64.encodestring(translate_str) 
+        
+        print (json.dumps(ret))
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
diff --git a/app/policy_create.py b/app/policy_create.py
new file mode 100644
index 0000000..9e89157
--- /dev/null
+++ b/app/policy_create.py
@@ -0,0 +1,72 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.tosca_workbook import ToscaWorkBook
+from toscalib.tosca_builder import ToscaBuilder
+
+import getopt, sys, json, os, logging
+
+def usage():
+    print('OPTIONS:')
+    print('\t-h|--help: print this help message')
+    print('\t-i|--input: The PATH to spec file')
+    print('\t-o|--output: the output file name')
+    print('\t-n|--name: the name of the service')
+
+    
+def main():
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "hi:o:n:", ["help", "input=", "output=", "name="])
+    except getopt.GetoptError as err:
+        logging.error( str(err)) # will print something like "option -a not recognized"
+        usage()
+        sys.exit(2)
+
+    spec_file = None
+    output_file = None
+    name = None
+    
+    for o, a in opts:
+        if  o in ("-h", "--help"):
+            usage()
+            sys.exit()
+        elif o in ("-i", "--input"):
+            spec_file = a
+        elif o in ("-o", "--output"):
+            output_file = a
+        elif o in ("-n", "--name"):
+            name = a
+        else:
+            logging.error( 'Unrecognized option: ' + o)
+            usage()
+            sys.exit(2)
+            
+    if spec_file is None or output_file is None:
+        logging.error( 'Incorrect arguments!')
+        usage()
+        sys.exit(2)
+
+    dirname = os.path.dirname(output_file)           
+    
+    if dirname is not None and len(dirname) > 0:
+        try:
+            os.stat(dirname)
+        except:
+            os.mkdir(dirname) 
+
+    meta_model = './data/meta_model/meta_policy_schema.yaml'
+                    
+    builder = ToscaBuilder()
+    
+    builder.import_schema(meta_model)
+    builder.import_spec(spec_file)
+    if name is None:
+        name = builder.spec_import.name
+    if builder._using_policy() is False:
+        logging.warning( 'NO policy is defined in the spec')
+        return
+    builder.create_policy()
+    builder.export_policy(output_file)
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
diff --git a/app/requirements.txt b/app/requirements.txt
new file mode 100644
index 0000000..cb7ff9f
--- /dev/null
+++ b/app/requirements.txt
@@ -0,0 +1,11 @@
+PyYAML==3.11
+argparse==1.4.0
+dateutils==0.6.6
+python-dateutil==2.5.3
+pytz==2016.6.1
+requests==2.13.0
+six==1.10.0
+virtualenv==12.1.1
+utils
+web.py==0.40.dev0
+
diff --git a/app/simple_compose.py b/app/simple_compose.py
new file mode 100644
index 0000000..8dc9561
--- /dev/null
+++ b/app/simple_compose.py
@@ -0,0 +1,24 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.tosca_workbook import ToscaWorkBook
+    
+workbook = ToscaWorkBook()
+
+workbook._import_dir('./data/tosca_model/')
+workbook._import_dir('./data/shared_model/')
+
+workbook._use('foi', 'NO_PREFIX')
+workbook._assign('policy_0', 'policy_id', 'something_filled_by_CLAMP')
+
+workbook._export_yaml('test_template.yaml', 'noexpand,main,rawfunc')
+
+workbook._add_shared_node([{'dcae.capabilities.dockerHost': 'docker_host'}, {'dcae.capabilities.composition.host': 'composition_virtual'}])
+ 
+workbook._load_translation_db('./data/tosca_model/')
+workbook._load_translation_db('./data/shared_model/')
+
+workbook._export_yaml('./data/blueprint/foi.yaml', 'cloudify,main')
+#workbook._export_yaml('test_template2.yaml', 'noexpand,main,rawfunc')
+
+
diff --git a/app/tosca_server.py b/app/tosca_server.py
new file mode 100644
index 0000000..0836d3a
--- /dev/null
+++ b/app/tosca_server.py
@@ -0,0 +1,532 @@
+'''
+Created on Apr 8, 2016
+
+@author: Shu Shi
+'''
+#!/usr/bin/env python
+import web
+import json, os, sys
+import base64
+from toscalib.tosca_workbook import ToscaWorkBook
+from toscalib.tosca_builder import ToscaBuilder
+from toscalib.templates.database import ToscaDB
+from version import __version__
+
+
+# class fe_get_itembyid:
+#     def GET(self):
+#         item_id = web.input()
+#         print( 'get_itembyid is called with input: ' + str(item_id))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})
+#     
+# class fe_get_template:
+#     def GET(self):
+#         temp_id = web.input()
+#         print( 'get_template is called with input: ' + str(temp_id))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})    
+#     
+# class fe_get_type:
+#     def GET(self):
+#         type_name = web.input()
+#         print( 'get_type is called with input: ' + str(type_name))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})    
+#     
+# class fe_get_compositioncreate:
+#     def GET(self):
+#         webinput = web.input(cid='unknown_cid')
+#         print( 'get_compositioncreate is called with input: ' + str(webinput))
+#         cid = webinput.cid
+#         
+#         workbook_db = ToscaDB()
+#         
+#         if cid not in workbook_db:
+#             workbook_db[cid] = ToscaWorkBook() 
+# 
+#         ret_json = workbook_db[cid].toJson()
+#         ret_json['cid'] = cid
+#         
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         
+#         print( 'get_compositioncreate returns:' + ret_json)
+#         return json.dumps(ret_json)    
+# 
+# class fe_get_ice:
+#     def GET(self):
+#         input_list = web.input()
+#         print( 'get_ice is called with input: ' + str(input_list))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})        
+#     
+# class fe_post_compimg:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_compimg is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_compimg input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})        
+#     
+# class fe_post_composition_commit:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_commit is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_commit input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+#     
+# class fe_post_composition_set_nodepolicies:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_set_nodepolicies is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_set_nodepolicies input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+#     
+# class fe_post_composition_add_node:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         webinput = web.input(cid='unknown_cid')
+#         print( 'post_composition_add_node is called with input: ' + str(webinput))
+#         cid = webinput.cid
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_add_node input json data: ' + str(in_data))
+#         
+#         workbook_db = ToscaDB()
+#         if cid not in workbook_db:
+#             workbook_db[cid] = ToscaWorkBook() 
+# 
+#         if 'type' in in_data:
+#             if 'name' in in_data['type']:
+#                 print( 'add node type: ' + in_data['type']['name'])
+#                 new_node = workbook_db[cid]._use(in_data['type']['name'])
+#                 new_node.fe_json = in_data
+#                 if 'nid' in in_data:
+#                     new_node.fe_nid = in_data['nid']
+#             else:
+#                 print( 'in_data has type but no name')
+#         else:
+#             print( 'in_data has no type')
+#             
+#         
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         
+#         return json.dumps(in_data)   
+# 
+# class fe_post_composition_update_nodes:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         webinput = web.input(cid='unknown_cid')
+#         print( 'post_composition_update_nodes is called with input: ' + str(webinput))
+#         cid = webinput.cid
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_update_nodes input json data: ' + str(in_data))
+#          
+#         workbook_db = ToscaDB()
+#        
+#         if cid not in workbook_db:
+#             workbook_db[cid] = ToscaWorkBook() 
+# 
+#         for in_item in in_data:
+#             if 'nid' in in_item :
+#                 for node in workbook_db[cid].template.node_dict.itervalues():
+#                     if node.fe_nid == in_data['nid']:
+#                         node.fe_json.update(in_item)
+#                         break
+#             else:
+#                 print( 'one item has no nid')
+# 
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+# 
+# class fe_post_composition_delete_node:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_delete_nodes is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_delete_nodes input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+# 
+# class fe_post_composition_add_relation:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_add_relation is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_add_relation input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+# 
+# class fe_post_composition_delete_relation:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_delete_relation is called with input: ' + str(cid))
+#         in_data = json.loads(web.data())
+#         print( 'post_composition_delete_relation input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})   
+#     
+# class fe_post_composition_add_inputs:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_add_inputs is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_add_inputs input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})  
+#     
+# class fe_post_composition_add_outputs:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_add_outputs is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_add_outputs input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})  
+#     
+# class fe_post_composition_set_node_properties:
+#     def OPTIONS(self):
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return ''
+#     def POST(self):
+#         cid = web.input()
+#         print( 'post_composition_set_node_properties is called with input: ' + str(cid))
+#         try:
+#             in_data = json.loads(web.data())
+#         except ValueError as e:
+#             in_data = web.data()
+#         print( 'post_composition_set_node_properties input json data: ' + str(in_data))
+#         web.header('Content-Type', 'application/json')
+#         web.header('Access-Control-Allow-Origin', '*')
+#         web.header('Access-Control-Allow-Headers', 'Origin, Content-Type, X-Auth-Token')
+#         return json.dumps({})  
+#     
+# class upload:
+#     def POST(self, dir):
+# #        data = json.loads(web.data())
+# #        pyDict = {'one':1,'two':2}
+# #        web.header('Content-Type', 'application/json')
+# #        return json.dumps(pyDict)
+#         return 'OK'
+#     
+# class import_file:
+#     def GET(self):
+#         user_data = web.input(dir='')
+#         file_dir = user_data.dir
+#         if 'name' not in user_data:
+#             return 'Error: input has no file name'
+#         file_name = user_data.name
+#         workbook = ToscaWorkBook()
+#         workbook._import( file_dir +'/'+ file_name)
+#         return 'OK'
+#     
+# class use:
+#     def GET(self):
+#         user_data = web.input()
+#         if 'type' not in user_data or 'name' not in user_data:
+#             return 'Error: input has no type or name'
+#         use_type = user_data['type']
+#         name = user_data['name']
+#         
+#         workbook = ToscaWorkBook()
+# 
+#         workbook._use(name)
+#         return 'OK'
+#     
+# class assign:
+#     def GET(self):
+#         user_data = web.input()
+#         if 'src_node') is False or user_data.has_key('value') is False:
+#             return 'Error: input has no src_node or value'
+#         src = user_data.src_node
+#         dst_val = user_data.value
+#         
+#         sub2 = None
+#         if user_data.has_key('property'):
+#             sub = user_data.property
+#         elif user_data.has_key('capability'):
+#             sub = user_data.capability
+#             if user_data.has_key('capability_property') is False:
+#                 return "Error: input has capability but no capability_property"
+#             else:
+#                 sub2 = user_data.capability_property
+#         elif user_data.has_key('requirement'):
+#             sub = user_data.requirement
+#         
+#         workbook = ToscaWorkBook()
+# 
+#         if sub2 is None:
+#             workbook._assign(src, sub, dst_val)
+#         else:
+#             workbook._assign(src, sub, sub2, dst_val)
+# 
+#         return 'OK'
+#     
+# class clear:
+#     def GET(self):
+#         workbook = ToscaWorkBook()
+# 
+#         workbook._reset()
+#         return 'OK'
+# 
+# class show:
+#     def GET(self):
+#         user_data = web.input(level='details')
+#         workbook = ToscaWorkBook()
+# 
+#         if user_data.level == 'details':         
+#             return workbook._show_details()
+#         else:
+#             return workbook._show_abstract()
+#     
+# class export:
+#     def GET(self):
+#         user_data = web.input(type='tosca', translation='off')
+#         workbook = ToscaWorkBook()
+# 
+#         if user_data.translation == 'on':
+#             if user_data.has_key('translation_lib'):
+#                 tran_lib = user_data.translation_lib
+#                 workbook._load_translation_db(tran_lib)
+# 
+#         if user_data.type == 'tosca':
+#             return workbook._export_yaml_web()
+#         elif user_data.type == 'heat':
+#             return 
+#         else:
+#             return 'Error in export type: only tosca or heat are supported'
+        
+class translate_template:
+    def POST(self):
+        try:
+            in_data = json.loads(web.data().decode('utf-8'))
+        except ValueError as e:
+            in_data = web.data()
+        print( 'translate_template input json data: ' + str(in_data))
+        
+        workbook = ToscaWorkBook()
+        workbook._import_dir('./data/shared_model/')
+#        workbook._load_translation_db('./data/shared_model/')
+        
+        if 'models' in in_data:
+            in_model = in_data['models']
+            if type(in_model) != list:
+                print( 'models in the input should be a list type')
+            for model_entry in in_model:
+                for key in ['schema', 'template', 'translate']:
+                    if key in model_entry:
+                        workbook._import_yml_str(base64.b64decode(model_entry[key]))
+        
+        if 'template' in in_data:
+            in_temp = in_data['template']
+            workbook._translate_template_yaml_str(base64.b64decode(in_temp))
+            workbook._add_shared_node([{'dcae.capabilities.cdapHost':'cdap_host'}, {'dcae.capabilities.dockerHost': 'docker_host'}, {'dcae.capabilities.composition.host': 'composition_virtual'}])
+                        
+        ret = workbook._export_yaml_web('cloudify,main')
+        print(ret)
+        return ret
+    
+class model_create:
+    def POST(self):
+        try:
+            in_data = json.loads(web.data().decode('utf-8'))
+        except ValueError as e:
+            in_data = web.data()
+        print( 'model_create input json data: ' + str(in_data))
+        
+        ret = {}
+        if 'spec' in in_data:
+            spec_str = in_data['spec']
+            model_prefix = './data/tosca_model'
+            meta_model = './data/meta_model/meta_tosca_schema.yaml'
+            
+            builder = ToscaBuilder()
+        
+            builder.import_schema(meta_model)
+            builder.import_spec_str(spec_str)
+            name = builder.spec_import.name
+            builder.create_node_type()
+
+            filename = model_prefix + '/'+ name + '/schema.yaml'
+            dirname = os.path.dirname(filename)           
+            try:
+                os.stat(dirname)
+            except:
+                os.mkdir(dirname)      
+                        
+            schema_str = builder.export_schema(model_prefix+'/' + name + '/schema.yaml')
+            builder.import_schema(model_prefix+'/' + name + '/schema.yaml')
+            builder.create_model(name)
+            template_str = builder.export_model(model_prefix+'/' + name + '/template.yaml')
+            builder.create_translate(name)
+            translate_str = builder.export_translation(model_prefix+'/' + name + '/translate.yaml')
+        
+            ret['schema'] = (base64.encodestring(bytes(schema_str, 'utf-8'))).decode('utf-8')
+            ret['template'] = base64.encodestring(bytes(template_str, 'utf-8')).decode('utf-8')
+            ret['translate'] = base64.encodestring(bytes(translate_str, 'utf-8')).decode('utf-8')
+        
+        return json.dumps(ret)
+
+
+# Story 318043 - The TOSCA Lab server should expose API for healthcheck with response:
+# {
+#   "healthCheckComponent": "TOSCA_LAB",
+#   "healthCheckStatus": "<UP / DOWN>",
+#   "version": "<component version>",
+#   "description": "<OK or error description>"
+# }
+class health_check:
+    def GET(self):
+        ret = dict()
+        ret['healthCheckComponent'] = "TOSCA_LAB"
+        ret['healthCheckStatus'] = "UP"
+        ret['version'] = __version__
+        ret['description'] = "OK"
+        print ('TOSCA_LAB got healthcheck request and returns' + str(ret))
+        return json.dumps(ret)
+        
+            
+class MyApplication(web.application):
+    def run(self, port=8080, *middleware):
+        func = self.wsgifunc(*middleware)
+        return web.httpserver.runsimple(func, ('0.0.0.0', port))
+
+urls = (
+    '/upload/(.*)', 'upload',
+    '/import', 'import_file',   #/import?dir=xxx&name=xxx
+    '/use', 'use',              #/use?type=xxx&name=xxx
+    '/assign', 'assign',        #/assign?src_node=xxx&[property|capability|requrement]=xxx&[capability_property=xxx]&value]xxx
+    '/clear', 'clear',          #/clear
+    '/show', 'show',            #/show?[level=abstract/details]
+    '/export', 'export',         #/export?[type=tosca/heat]&[translation=[on|off]]&[translation_lib=xxx]
+    '/itembyid', 'fe_get_itembyid',
+    '/template', 'fe_get_template',
+    '/type', 'fe_get_type',
+    '/compositioncreate', 'fe_get_compositioncreate',
+    '/ice.html', 'fe_get_ice',
+    '/compimg', 'fe_post_compimg',
+    '/composition.commit', 'fe_post_composition_commit',
+    '/composition.setnodepolicies', 'fe_post_composition_set_nodepolicies',
+    '/composition.addnode', 'fe_post_composition_add_node',
+    '/composition.updatenodes', 'fe_post_composition_update_nodes',
+    '/composition.deletenode', 'fe_post_composition_delete_node',
+    '/composition.addrelation', 'fe_post_composition_add_relation',
+    '/composition.deleterelation', 'fe_post_composition_delete_relation',
+    '/composition.addinputs', 'fe_post_composition_add_inputs',
+    '/composition.addoutputs', 'fe_post_composition_add_outputs',
+    '/composition.setnodeproperties', 'fe_post_composition_set_node_properties',
+    '/translate', 'translate_template',
+    '/model_create', 'model_create',
+    '/healthcheck', 'health_check')
+
+application = web.application(urls, globals(), autoreload=False).wsgifunc()
+
+
+if __name__ == "__main__":
+    app = MyApplication(urls, globals())
+    if len(sys.argv) > 1:
+        app.run(int(sys.argv[1]))
+    else:
+        app.run()
+
+
diff --git a/app/tosca_server.pyc b/app/tosca_server.pyc
new file mode 100644
index 0000000..89984f4
--- /dev/null
+++ b/app/tosca_server.pyc
Binary files differ
diff --git a/app/toscalib/__init__.py b/app/toscalib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/toscalib/__init__.py
diff --git a/app/toscalib/__init__.pyc b/app/toscalib/__init__.pyc
new file mode 100644
index 0000000..57447a6
--- /dev/null
+++ b/app/toscalib/__init__.pyc
Binary files differ
diff --git a/app/toscalib/templates/__init__.py b/app/toscalib/templates/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/toscalib/templates/__init__.py
diff --git a/app/toscalib/templates/__init__.pyc b/app/toscalib/templates/__init__.pyc
new file mode 100644
index 0000000..a70f506
--- /dev/null
+++ b/app/toscalib/templates/__init__.pyc
Binary files differ
diff --git a/app/toscalib/templates/capability_item.py b/app/toscalib/templates/capability_item.py
new file mode 100644
index 0000000..e96ee43
--- /dev/null
+++ b/app/toscalib/templates/capability_item.py
@@ -0,0 +1,73 @@
+from toscalib.templates.constant import *
+from toscalib.templates.property_item import PropertyItem
+import logging
+
+class CapabilityItem(object):
+    def __init__(self, definition):
+        self.name = definition.name
+        self.type = definition.type
+        self.definition = definition
+        self.properties = {}
+        self.id = PropertyItem(definition.id)
+        self.sub_pointer = None
+        self.parent_node = None
+        for prop in definition.properties.keys():
+            self.properties[prop] = PropertyItem(definition.properties[prop])
+        
+    def _parse_pre_defined_content(self, content):
+        if content is None:
+            return
+#        if content.has_key(CAP_PROPERTIES):
+        if CAP_PROPERTIES in content:
+            prop_sec = content[CAP_PROPERTIES]
+            for prop_name in prop_sec.keys():
+                prop_item = self._get_property_item(prop_name)
+                if prop_item is not None:
+                    prop_item._assign(prop_sec[prop_name])
+
+    def _propagate_substitution_value(self):
+        converge = True
+        for prop_item in iter(self.properties.values()):
+            converge = converge and prop_item._propagate_substitution_value()
+        
+        if self.sub_pointer is None:
+            return converge
+        
+        if self.id.value is None:
+            old_val = None
+        else:
+            old_val = self.id.value._get_value()[0]
+        
+        if isinstance(self.sub_pointer, PropertyItem):
+            if self.sub_pointer.value is None:
+                logging.warning( 'Something is wrong, the cap id mapping target'+ self.sub_pointer.name+ ' should have a value!')
+                return converge
+            self.id._direct_assign(self.sub_pointer.value)
+        from toscalib.templates.node import Node
+        if isinstance(self.sub_pointer, Node):
+            if self.sub_pointer.id is None or self.sub_pointer.id.value is None:
+                logging.warning( 'Something is wrong, the cap id mapping target'+ self.sub_pointer.name+ ' should have a value!')
+                return converge
+            self.id._direct_assign(self.sub_pointer.id.value)
+        
+        if self.id.value is None:
+            new_val = None
+        else:      
+            new_val = self.id.value._get_value()[0]
+        return converge and (old_val == new_val)
+
+    def _get_property_item(self, prop_name):
+#        if self.properties.has_key(prop_name):
+        if prop_name in self.properties:
+            return self.properties[prop_name]
+        else:
+            logging.warning('Capability: '+ self.name+ ' of type: '+ self.type+ ' has no property: '+ prop_name)
+            return None
+    
+    def _validate_capability(self, cap_name):
+        return self.definition._validate_capability(cap_name)
+            
+    def _update_parent_node(self, parent):
+        self.parent_node = parent
+        for prop in iter(self.properties.values()):
+            prop._update_parent_node(parent)
diff --git a/app/toscalib/templates/capability_item.pyc b/app/toscalib/templates/capability_item.pyc
new file mode 100644
index 0000000..3a7d971
--- /dev/null
+++ b/app/toscalib/templates/capability_item.pyc
Binary files differ
diff --git a/app/toscalib/templates/constant.py b/app/toscalib/templates/constant.py
new file mode 100644
index 0000000..101abaa
--- /dev/null
+++ b/app/toscalib/templates/constant.py
@@ -0,0 +1,82 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+YMO_PREFIX=r'.._YAMLORDER_'
+
+TRUE_VALUES = ('True', 'TRUE', 'true', 'yes', 'Yes', 'YES', '1')
+
+TEMPLATE_SECTIONS = (VERSION, METADATA, DESCRIPTION, DSL, 
+                     REPO, IMPORT, ARTIFACT_TYPE, DATA_TYPE, CAPABILITY_TYPE,
+                     INTERFACE_TYPE, RELATIONSHIP_TYPE, NODE_TYPE, GROUP_TYPE,
+                     POLICY_TYPE,  TOPOLOGY) = \
+                    ('tosca_definitions_version', 'metadata', 'description', 'dsl_definitions',
+                     'repositories', 'imports', 'artifact_types', 'data_types', 'capability_types',
+                     'interface_types', 'relationship_types', 'node_types', 'group_types',
+                     'policy_types', 'topology_template' )
+                    
+YAML_ORDER_TEMPLATE_SECTIONS = (YMO_VERSION, YMO_METADATA, YMO_DESCRIPTION, YMO_DSL, 
+                     YMO_REPO, YMO_IMPORT, YMO_ARTIFACT_TYPE, YMO_DATA_TYPE, YMO_CAPABILITY_TYPE,
+                     YMO_INTERFACE_TYPE, YMO_RELATIONSHIP_TYPE, YMO_NODE_TYPE, YMO_GROUP_TYPE,
+                     YMO_POLICY_TYPE,  YMO_TOPOLOGY) = \
+                    ('00_YAMLORDER_tosca_definitions_version', '02_YAMLORDER_metadata', '01_YAMLORDER_description', '03_YAMLORDER_dsl_definitions',
+                     '04_YAMLORDER_repositories', '05_YAMLORDER_imports', '06_YAMLORDER_artifact_types', '07_YAMLORDER_data_types', '08_YAMLORDER_capability_types',
+                     '09_YAMLORDER_interface_types', '10_YAMLORDER_relationship_types', '11_YAMLORDER_node_types', '12_YAMLORDER_group_types',
+                     '13_YAMLORDER_policy_types', '14_YAMLORDER_topology_template' )
+
+# Topology template key names
+TOPOLOGY_SECTIONS = (TOPO_DESCRIPTION, TOPO_INPUTS, TOPO_NODE_TEMPLATES,
+            TOPO_RELATIONSHIP_TEMPLATES, TOPO_OUTPUTS, TOPO_GROUPS,
+            TOPO_SUBSTITUION_MAPPINGS) = \
+           ('description', 'inputs', 'node_templates',
+            'relationship_templates', 'outputs', 'groups',
+            'substitution_mappings')
+           
+YAML_ORDER_TOPOLOGY_SECTIONS = (YMO_TOPO_DESCRIPTION, YMO_TOPO_INPUTS, YMO_TOPO_NODE_TEMPLATES,
+            YMO_TOPO_RELATIONSHIP_TEMPLATES, YMO_TOPO_OUTPUTS, YMO_TOPO_GROUPS,
+            YMO_TOPO_SUBSTITUION_MAPPINGS) = \
+           ('10_YAMLORDER_description', '11_YAMLORDER_inputs', '13_YAMLORDER_node_templates',
+            '14_YAMLORDER_relationship_templates', '16_YAMLORDER_outputs', '15_YAMLORDER_groups',
+            '12_YAMLORDER_substitution_mappings')
+
+SUBSTITUTION_SECTION = (SUB_NODE_TYPE, SUB_PROPERTY, SUB_ATTRIBUTE, SUB_REQUIREMENT, SUB_CAPABILITY, SUB_CAP_PROPERTY, SUB_CAP_ID, SUB_REQ_ID, SUB_INPUT, SUB_OUTPUT) = \
+                ('node_type', 'properties', 'attributes', 'requirements', 'capabilities', 'properties', 'id', 'id', 'INPUT', 'OUTPUT')
+                
+YAML_ORDER_SUBSTITUTION_SECTION = (YMO_SUB_NODE_TYPE, YMO_SUB_PROPERTY, YMO_SUB_REQUIREMENT, YMO_SUB_CAPABILITY) = \
+                ('00_YAMLORDER_node_type', '01_YAMLORDER_properties', '03_YAMLORDER_requirements', '02_YAMLORDER_capabilities')
+                
+REQUIREMENT_SECTION = (REQ_NODE, REQ_RELATIONSHIP, REQ_CAPABILITY, REQ_OCCURRENCE, REQ_FILTER) = \
+                ('node', 'relationship', 'capability', 'occurrences', 'node_filter') 
+                
+YAML_ORDER_REQUIREMENOD_ASSIGNMENOD_SECTION = (YMO_REQ_NODE, YMO_REQ_RELATIONSHIP, YMO_REQ_CAPABILITY, YMO_REQ_OCCURRENCE, YMO_REQ_FILTER) = \
+                ('01_YAMLORDER_node', '02_YAMLORDER_relationship', '00_YAMLORDER_capability', '04_YAMLORDER_occurrences', '03_YAMLORDER_node_filter') 
+
+NODE_SECTION = (NOD_DERIVED_FROM, NOD_TYPE, NOD_PROPERTIES, NOD_ATTRIBUTES, NOD_REQUIREMENTS,
+            NOD_INTERFACES, NOD_CAPABILITIES, NOD_ARTIFACTS, NOD_DESCRIPTION) = \
+               ('derived_from', 'type', 'properties', 'attributes', 'requirements',
+                'interfaces', 'capabilities', 'artifacts', 'description')
+               
+YAML_ORDER_NODETYPE_DEFINITION = (YMO_NOD_DERIVED_FROM, YMO_NOD_TYPE, YMO_NOD_PROPERTIES, YMO_NOD_ATTRIBUTES, YMO_NOD_REQUIREMENTS, YMO_NOD_RELATIONSHIPS,
+            YMO_NOD_INTERFACES, YMO_NOD_CAPABILITIES, YMO_NOD_ARTIFACTS, YMO_NOD_DESCRIPTION) = \
+               ('00_YAMLORDER_derived_from', '00_YAMLORDER_type', '01_YAMLORDER_properties', '03_YAMLORDER_attributes', '05_YAMLORDER_requirements', '05_YAMLORDER_relationships',
+                '06_YAMLORDER_interfaces', '04_YAMLORDER_capabilities', '07_YAMLORDER_artifacts', '02_YAMLORDER_description')  
+
+CAPABILITY_SECTION = (CAP_DERIVED_FROM, CAP_TYPE, CAP_PROPERTIES, CAP_ATTRIBUTES, 
+                      CAP_VERSION, CAP_DESCEIPTION, CAP_SOURCE ) = \
+               ('derived_from', 'type', 'properties', 'attributes', 
+                'version', 'description', 'valid_source_type')
+               
+               
+PROPERTY_SECTION = (PROP_TYPE, PROP_REQUIRED, PROP_DEFAULT, PROP_DESCRIPTION, 
+                    PROP_STATUS, PROP_ENTRY, PROP_CONSTRAINT) = \
+                ('type', 'required', 'default', 'description', 
+                 'status', 'entry_schema', 'constraints')
+                
+YAML_ORDER_PROPERTY_SECTION = (YMO_PROP_TYPE, YMO_PROP_REQUIRED, YMO_PROP_DEFAULT, YMO_PROP_DESCRIPTION, 
+                    YMO_PROP_STATUS, YMO_PROP_ENTRY, YMO_PROP_CONSTRAINT) = \
+                ('00_YAMLORDER_type', '01_YAMLORDER_required', '03_YAMLORDER_default', '02_YAMLORDER_description', 
+                 '04_YAMLORDER_status', '05_YAMLORDER_entry_schema', '06_YAMLORDER_constraints')  
+
+
+YAML_ORDER_INTERFACE_SECTION = (YMO_INT_TYPE, YMO_INT_INPUTS, YMO_OP_DESCRIPTION, YMO_OP_IMPLEMENTATION, YMO_OP_EXECUTOR, YMO_OP_INPUTS) = \
+                ('00_YAMLORDER_type', '01_YAMLORDER_inputs', '02_YAMLORDER_description', 
+                 '03_YAMLORDER_implementation', '04_YAMLORDER_executor', '05_YAMLORDER_inputs')                  
diff --git a/app/toscalib/templates/constant.pyc b/app/toscalib/templates/constant.pyc
new file mode 100644
index 0000000..6335388
--- /dev/null
+++ b/app/toscalib/templates/constant.pyc
Binary files differ
diff --git a/app/toscalib/templates/database.py b/app/toscalib/templates/database.py
new file mode 100644
index 0000000..7e21148
--- /dev/null
+++ b/app/toscalib/templates/database.py
@@ -0,0 +1,113 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.templates.constant import *
+import logging
+
+class ToscaDB(object):
+    """ The database that stores all node types and TEMPLATES """
+    def __init__(self):
+        self.NODE_TYPES = {}
+        self.CAPABILITY_TYPES = {}
+        self.RELATIONSHIP_TYPES = {}
+        self.DATA_TYPES = {}
+        self.TEMPLATES = {}
+        
+    def _import_node_type(self, new_type):
+        if new_type is None:
+            return
+#        if self.NODE_TYPES.has_key(new_type.name) == True:
+        if new_type.name in self.NODE_TYPES:
+            logging.debug( 'Node type: '+ new_type.name+ ' already defined and will be overwritten')
+        self.NODE_TYPES[new_type.name]=new_type
+            
+    def _import_capability_type(self, new_type):
+        if new_type is None:
+            return
+#        if self.CAPABILITY_TYPES.has_key(new_type.name) == True:
+        if new_type.name in self.CAPABILITY_TYPES:
+            logging.debug( 'Capability type: '+ new_type.name+ ' already defined and will be overwritten')
+
+        self.CAPABILITY_TYPES[new_type.name]=new_type
+
+    def _import_relationship_type(self, new_type):
+        if new_type is None:
+            return
+#        if self.RELATIONSHIP_TYPES.has_key(new_type.name) == True:
+        if new_type.name in self.RELATIONSHIP_TYPES:
+            logging.debug( 'Relationship type: '+ new_type.name+ ' already defined and will be overwritten')
+
+        self.RELATIONSHIP_TYPES[new_type.name]=new_type
+
+    def _import_data_type(self, new_type):
+        if new_type is None:
+            return
+#        if self.DATA_TYPES.has_key(new_type.name) == True:
+        if new_type.name in self.DATA_TYPES:
+            logging.debug( 'Data type: '+ new_type.name+ ' already defined and will be overwritten')
+        self.DATA_TYPES[new_type.name]=new_type
+
+    def _import_template(self, new_template):
+        if new_template is None:
+            return
+#        if self.TEMPLATES.has_key(new_template.name) == False:
+        if new_template.name not in self.TEMPLATES :
+            self.TEMPLATES[new_template.name]= new_template        
+    
+    def _parse_objects(self):
+        logging.debug( 'parsing database')
+#        for objs in self.NODE_TYPES.itervalues():
+        for objs in iter(self.NODE_TYPES.values()):
+            objs._parse_content(self)
+#        for objs in self.CAPABILITY_TYPES.itervalues():
+        for objs in iter(self.CAPABILITY_TYPES.values()):
+            objs._parse_content(self)
+#        for objs in self.DATA_TYPES.itervalues():
+        for objs in iter(self.DATA_TYPES.values()):
+            objs._parse_content(self)
+#        for objs in self.RELATIONSHIP_TYPES.itervalues():
+        for objs in iter(self.RELATIONSHIP_TYPES.values()):
+            objs._parse_content(self)
+#        for objs in self.TEMPLATES.itervalues():
+        for objs in iter(self.TEMPLATES.values()):
+            objs._parse_content(self)
+            
+            
+    def _prepare_schema(self):
+        schema_output = {}
+        data_sec = {}
+        for key in self.DATA_TYPES.keys():
+            objs = self.DATA_TYPES[key]
+            data_sec[key] = objs.raw_content
+        node_sec = {}
+        for key in self.NODE_TYPES.keys():
+            objs = self.NODE_TYPES[key]
+            if objs.raw_content is None:
+                objs._create_rawcontent()
+            node_sec[key]=objs.raw_content
+        cap_sec = {}
+        for key in self.CAPABILITY_TYPES.keys():
+            objs = self.CAPABILITY_TYPES[key]
+            cap_sec[key]=objs.raw_content
+        rel_sec = {}
+        for key in self.RELATIONSHIP_TYPES.keys():
+            objs = self.RELATIONSHIP_TYPES[key]
+            rel_sec[key]=objs.raw_content
+            
+        if len(data_sec) > 0:
+            schema_output[YMO_DATA_TYPE] = data_sec
+        if len(node_sec) > 0:
+            schema_output[YMO_NODE_TYPE] = node_sec
+        if len(cap_sec) > 0:
+            schema_output[YMO_CAPABILITY_TYPE] = cap_sec
+        if len(rel_sec) > 0:
+            schema_output[YMO_RELATIONSHIP_TYPE] = rel_sec
+        
+        schema_output[YMO_VERSION]= 'tosca_simple_yaml_1_0_0'
+        
+        return schema_output
+            
+    
+            
+            
+            
\ No newline at end of file
diff --git a/app/toscalib/templates/database.pyc b/app/toscalib/templates/database.pyc
new file mode 100644
index 0000000..e7f651f
--- /dev/null
+++ b/app/toscalib/templates/database.pyc
Binary files differ
diff --git a/app/toscalib/templates/heat_constants.py b/app/toscalib/templates/heat_constants.py
new file mode 100644
index 0000000..365d38f
--- /dev/null
+++ b/app/toscalib/templates/heat_constants.py
@@ -0,0 +1,7 @@
+HOT_TEMPLATE = (HOT_VERSION, HOT_DESCRIPTION, HOT_PARAMETERS, HOT_RESOURCES, 
+                YMO_HOT_VERSION, YMO_HOT_DESCRIPTION, YMO_HOT_PARAMETERS, YMO_HOT_RESOURCES ) = \
+               ('heat_template_version', 'description', 'parameters', 'resources', 
+                '00_YAMLORDER_heat_template_version', '01_YAMLORDER_description', 
+                '02_YAMLORDER_parameters', '03_YAMLORDER_resources')
+               
+HOT_VERSION_NUM = '2013-05-23'
\ No newline at end of file
diff --git a/app/toscalib/templates/heat_constants.pyc b/app/toscalib/templates/heat_constants.pyc
new file mode 100644
index 0000000..379bc4e
--- /dev/null
+++ b/app/toscalib/templates/heat_constants.pyc
Binary files differ
diff --git a/app/toscalib/templates/interface_item.py b/app/toscalib/templates/interface_item.py
new file mode 100644
index 0000000..9fd7fa9
--- /dev/null
+++ b/app/toscalib/templates/interface_item.py
@@ -0,0 +1,80 @@
+from toscalib.templates.property_item import PropertyItem
+from toscalib.templates.operation_item import OperationItem
+from toscalib.types.property import PropertyDefinition
+from toscalib.templates.constant import *
+import logging
+
+class InterfaceItem(object):
+    def __init__(self, definition, name = None, content = None):
+        if definition is not None:
+            self.name = definition.name
+            self.type = definition.type
+            self.definition = definition
+            self.inputs = {}
+            self.operations = {}
+            self.parent_node = None
+            for prop in definition.inputs.keys():
+                self.inputs[prop] = PropertyItem(definition.inputs[prop])
+            for oper in definition.operations.keys():
+                self.operations[oper] = OperationItem(definition.operations[oper])
+        else:
+            self.name = name
+            self.type = None
+            self.definition = None
+            self.inputs = {}
+            self.operations = {}
+            self.parent_node = None
+        
+        self._parse_pre_defined_content(content)
+        
+    def _parse_pre_defined_content(self, content):
+        if content is None:
+            return
+        
+        for key_name in content.keys():         
+            if key_name == 'type':
+                if self.type is not None and self.type != content[key_name]:
+                    logging.warning( 'interface: '+ self.name+ 'type is different in definition: '+ self.type+ ' overwritten here to '+ self.raw_content[key_name])
+                self.type = content[key_name]
+                continue
+            if key_name == 'inputs':
+                input_sec = content['inputs']
+                for input_item in input_sec.keys():
+                    self.inputs[input_item] = PropertyItem(PropertyDefinition(input_item))
+                    self.inputs[input_item]._assign(input_sec[input_item])
+                continue
+            
+#            if self.operations.has_key(key_name):
+            if key_name in self.operations:
+                self.operations[key_name]._parse_pre_defined_content(content[key_name])
+            else:
+                self.operations[key_name] = OperationItem(None, key_name, content[key_name]) 
+            
+    def _update_parent_node(self, parent):
+        self.parent_node = parent
+        for prop in iter(self.inputs.values()):
+            prop._update_parent_node(parent)
+        for ops in iter(self.operations.values()):
+            ops._update_parent_node(parent)
+
+    def _prepare_output(self, tags=''):
+        output = {}
+        if 'cloudify' not in tags:
+            if self.type is not None:
+                output[YMO_INT_TYPE] = self.type
+            if len(self.inputs) > 0: 
+                inputs = {}
+                for prop_name in self.inputs.keys():
+                    prop_item = self.inputs[prop_name]
+                    if prop_item.value is None:
+                        prop_value = None
+                    else:
+                        prop_value = prop_item.value._get_value(tags)[0]
+                    inputs[prop_name] = prop_value
+                output[YMO_INT_INPUTS] = inputs
+        if len(self.operations) > 0:
+            for op_name in self.operations.keys():
+                output[op_name] = self.operations[op_name]._prepare_output(tags)
+ 
+        return output
+    
\ No newline at end of file
diff --git a/app/toscalib/templates/interface_item.pyc b/app/toscalib/templates/interface_item.pyc
new file mode 100644
index 0000000..20d9f8c
--- /dev/null
+++ b/app/toscalib/templates/interface_item.pyc
Binary files differ
diff --git a/app/toscalib/templates/node.py b/app/toscalib/templates/node.py
new file mode 100644
index 0000000..8a07a49
--- /dev/null
+++ b/app/toscalib/templates/node.py
@@ -0,0 +1,389 @@
+from toscalib.templates.constant import *
+from toscalib.types.node import NodeType
+from toscalib.templates.requirement_item import RequirementItem
+from toscalib.templates.property_item import PropertyItem
+from toscalib.templates.capability_item import CapabilityItem
+from toscalib.utils import tosca_import, tosca_heat
+
+import copy, re, logging
+from toscalib.templates.interface_item import InterfaceItem
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+
+class Node(object):
+    def __init__(self, template, node_name, node_type):
+        self.template = template
+        self.name = node_name
+        self.id = PropertyItem(node_type.id)   
+        self_id_str = {} 
+        self_id_str['get_attribute']= [node_name, 'id'] 
+        self.id._assign(self_id_str)
+        
+        self.mapping_template = None
+        self.tran_template = None
+        
+        self.fe_json = None
+        self.fe_nid = None
+         
+        if node_type is None:
+            logging.warning( 'Empty node type')
+            return
+        elif isinstance(node_type, NodeType) is False:
+            logging.warning( 'Invalid NodeType passed to Node: '+ node_name+ 'construction')
+            return
+        else:
+            self._instatiateWithType(node_type)
+
+#Instantiate the node type, when substitution mapping is attached, create the new template for it
+    def _instatiateWithType(self, node_type):
+        self.type = node_type.name
+        self.type_obj = node_type
+        
+        self.properties = {}
+        for prop in node_type.properties.keys():
+            self.properties[prop] = PropertyItem(node_type.properties[prop])
+            
+        self.attributes = {}
+        for attr in node_type.attributes.keys():
+            self.attributes[attr] = PropertyItem(node_type.attributes[attr])
+        
+        self.requirements = []
+        for req in node_type.requirements:
+            self.requirements.append(RequirementItem(req))
+            
+        self.capabilities = {}
+        for cap in node_type.capabilities.keys():
+            self.capabilities[cap] = CapabilityItem(node_type.capabilities[cap])
+            
+        self.interfaces = {}
+        for intf in node_type.interfaces.keys():
+            self.interfaces[intf] = InterfaceItem(node_type.interfaces[intf])
+
+        if node_type.mapping_template is not None:
+            from toscalib.templates.topology import  ToscaTopology
+            self.mapping_template = copy.deepcopy(node_type.mapping_template)
+            self.mapping_template._update_prefix(self.name + '_')
+            self.mapping_template._verify_substitution(self)
+#             for sub_rule in node_type.mapping_template.sub_rules:
+#                 sub_rule._update_pointer(self, self.mapping_template)
+
+        self._update_parent_node()
+        
+#used to parse node template structure written in a template
+#Assign values if needed
+#For requirement fulfillment, add pending mode to check whether the value is a node template or type      
+    def _parse_pre_defined_content(self, content):
+#        if content.has_key(NOD_PROPERTIES):
+        if NOD_PROPERTIES in content:
+            prop_sec = content[NOD_PROPERTIES]
+            if prop_sec is not None:
+                for prop_name in prop_sec.keys():
+                    prop_item = self._get_property_item(prop_name)
+                    if prop_item is not None:
+                        prop_item._assign(prop_sec[prop_name])
+                        if prop_sec[prop_name] == '__GET_NODE_NAME__':
+                            prop_item._assign(self.name)
+                        
+#        if content.has_key(NOD_REQUIREMENTS):
+        if NOD_REQUIREMENTS in content:
+            req_sec = content[NOD_REQUIREMENTS]
+            if req_sec is not None:                
+                for req in req_sec:
+                    req_item_name, req_item_value = tosca_import._parse_requirement_name_and_value(req)
+#TODO: the same requirement name can only appear once!!
+                    req_item = self._get_requirement_item_first(req_item_name)
+                    if req_item is not None:
+                        req_item._parse_pre_defined_content(req_item_value) 
+                    else:
+                        logging.warning( 'Requirement '+ req_item_name +'not defined in Node '+ self.name + ' of type '+ self.type)
+
+#        if content.has_key(NOD_CAPABILITIES):
+        if NOD_CAPABILITIES in content:
+            cap_sec = content[NOD_CAPABILITIES]
+            if cap_sec is not None:
+                for cap_name in cap_sec.keys():
+                    cap_item = self._get_capability_item(cap_name)
+                    if cap_item is not None: 
+                        cap_item._parse_pre_defined_content(cap_sec[cap_name])
+                        
+#        if content.has_key(NOD_INTERFACES):
+        if NOD_INTERFACES in content:
+            interface_sec = content[NOD_INTERFACES]
+            if interface_sec is not None:
+                for interface_name in interface_sec.keys():
+                    interface_item = self._get_interface_item(interface_name)
+                    if interface_item is not None:
+                        interface_item._parse_pre_defined_content(interface_sec[interface_name])
+                    else:
+                        self.interfaces[interface_name] = InterfaceItem(None, interface_name, interface_sec[interface_name])  
+         
+        self._update_parent_node()   
+    
+    def _get_property_item(self, prop_name):
+#        if self.properties.has_key(prop_name):
+        if prop_name in self.properties:
+            return self.properties[prop_name]
+        else:
+            logging.warning('Node: '+ self.name+ ' of type: '+ self.type+ ' has no property: '+ prop_name)
+            return None
+
+    def _get_attribute_item(self, attr_name):
+#        if self.attributes.has_key(attr_name):
+        if attr_name in self.attributes:
+            return self.attributes[attr_name]
+        else:
+            logging.warning('Node: '+ self.name+ ' of type: '+ self.type+ ' has no attribute: '+ attr_name)
+            return None
+        
+    def _get_interface_item(self, interface_name):
+#        if self.interfaces.has_key(interface_name):
+        if interface_name in self.interfaces:
+            return self.interfaces[interface_name]
+        else:
+            logging.warning( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no interface: '+ interface_name)
+            return None    
+        
+    def _get_capability_item(self, cap_name):
+#        if self.capabilities.has_key(cap_name):
+        if cap_name in self.capabilities:
+            return self.capabilities[cap_name]
+        else:
+            #logging.debug('Node: '+ self.name+ ' of type: '+ self.type+ ' has no capability: '+ cap_name)
+            return None
+        
+    def _get_capability_property(self, cap_name, prop_name):
+        cap_item = self._get_capability_item(cap_name)
+        if cap_item is not None:
+            return cap_item._get_property_item(prop_name)
+        else:
+            #logging.debug( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no capability: '+ cap_name)
+            return None
+        
+    def _get_requirement_item_first(self, req_name):
+        for req_item in self.requirements:
+            if req_item.name == req_name:
+                return req_item
+        logging.warning( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no requirement: '+ req_name)
+        return None
+        
+    def _verify_requirements(self, node_dict):
+        for req in self.requirements:
+            req._verify_requirement(node_dict)
+     
+    def _verify_functions(self):
+        if self.id.value is not None:
+            self.id.value._update_function_reference(self.template, self, self.id)
+        for prop_item in iter(self.properties.values()):
+            if prop_item.value is not None:
+                prop_item.value._update_function_reference(self.template, self, prop_item)     
+        for cap_item in iter(self.capabilities.values()):
+            for cap_item_prop in iter(cap_item.properties.values()):
+                if cap_item_prop.value is not None:
+                    cap_item_prop.value._update_function_reference(self.template, self, cap_item_prop)  
+        for interface_item in iter(self.interfaces.values()):
+            for interface_item_input in iter(interface_item.inputs.values()):
+                if interface_item_input.value is not None: 
+                    interface_item_input.value._update_function_reference(self.template, self, interface_item_input)
+            for operation_item in iter(interface_item.operations.values()):
+                for input_item in iter(operation_item.inputs.values()):
+                    if input_item.value is not None:
+                        input_item.value._update_function_reference(self.template, self, input_item)
+    
+    def _update_parent_node(self):
+        for prop in iter(self.properties.values()):
+            prop._update_parent_node(self)
+        for cap in iter(self.capabilities.values()):
+            cap._update_parent_node(self)
+        for req in self.requirements:
+            req._update_parent_node(self)
+        for interface in iter(self.interfaces.values()):
+            interface._update_parent_node(self)
+        
+                
+    def  _update_prefix(self, prefix):
+        if self.name == 'NO_PREFIX':
+            self.name = prefix[:len(prefix)-1]
+        else:
+            self.name = prefix + self.name
+        self.id.value._update_prefix(prefix)
+        
+        for prop_item in iter(self.properties.values()):
+            if prop_item.value is not None:
+                prop_item.value._update_prefix(prefix)
+        for cap_item in iter(self.capabilities.values()):
+            for cap_item_prop in iter(cap_item.properties.values()):
+                if cap_item_prop.value is not None:
+                    cap_item_prop.value._update_prefix(prefix)
+        for interface_item in iter(self.interfaces.values()):
+            for interface_item_input in iter(interface_item.inputs.values()):
+                if interface_item_input.value is not None: 
+                    interface_item_input.value._update_prefix(prefix)
+            for operation_item in iter(interface_item.operations.values()):
+                for input_item in iter(operation_item.inputs.values()):
+                    if input_item.value is not None:
+                        input_item.value._update_prefix(prefix)
+       
+        for req in self.requirements:
+            req._update_prefix(prefix)
+    
+        self._update_parent_node()
+
+    def _verify_req_node(self, req_type, req_cap, req_filter):
+        if req_type is not None and self.type_obj._verify_req_type(req_type) is False:
+            logging.warning( 'Type matching failed')
+            return False
+
+        if req_cap is not None:
+            cap_found = None
+            for cap_item in iter(self.capabilities.values()):
+                if cap_item._validate_capability(req_cap) is True:
+                    cap_found = cap_item
+                    break
+            if cap_found is None:
+                logging.warning( 'Capability matching failed')
+                return False
+            
+        return self._verify_node_filter(req_filter)
+    
+    def _verify_node_filter(self, req_filter):
+        return True
+    
+    def _propagate_substitution_value(self):
+        converge = True
+        for prop_item in iter(self.properties.values()):
+            converge = converge and prop_item._propagate_substitution_value()
+        for req_item in self.requirements:
+            converge = converge and req_item._propagate_substitution_value()
+        for cap_item in iter(self.capabilities.values()):
+            converge = converge and cap_item._propagate_substitution_value()
+        for attr_item in iter(self.attributes.values()):
+            converge = converge and attr_item._propagate_attr_substitution_value()
+
+        
+        if self.mapping_template is not None:
+            self.mapping_template._propagate_substitution_value()
+        if self.tran_template is not None:
+            self.tran_template._propagate_substitution_value()
+        
+        return converge
+    
+    def _prepare_extra_imports(self, tags = ''):
+        if 'noexpand' in tags:
+            return []
+        if self.tran_template is not None:
+            return self.tran_template._prepare_extra_imports(tags)
+        if self.mapping_template is not None:
+            return self.mapping_template._prepare_extra_imports(tags)
+        return []
+    
+    def _prepare_output(self, tags=''):
+        if 'noexpand' not in tags:
+            newtags = tags.replace('main', 'part')
+            if self.tran_template is not None:
+                return self.tran_template._prepare_output(newtags)
+            if self.mapping_template is not None:
+                return self.mapping_template._prepare_output(newtags)
+        output = {}
+        if 'heat' in tags:
+            heat_type = re.sub('tosca.heat.', '', self.type)
+            heat_type = re.sub('\.', '::', heat_type)
+            output[YMO_NOD_TYPE] = heat_type
+        else:
+            output[YMO_NOD_TYPE] = self.type
+        prop_out = {}
+        for prop in self.properties.keys():
+            prop_item = self.properties[prop]
+#             if prop_item.required is False and prop_item.used is not True and prop_item.filled is not True:
+            if prop_item.required is False and prop_item.filled is not True:
+                continue
+            if prop_item.filled is not True or prop_item.value is None:
+                prop_value = None
+            else:
+                prop_value = prop_item.value._get_value(tags)[0]
+            if prop_item.required is False and prop_value in [None, [], {}]:
+                continue
+            else:            
+                prop_out[prop] = prop_value
+        cap_out={}
+        for cap in iter(self.capabilities.values()):
+            cap_item = {}
+            for cap_prop in iter(cap.properties.values()):
+                if cap_prop.filled is True:
+                    cap_item[cap_prop.name] = cap_prop.value._get_value(tags)[0]
+            if len(cap_item) > 0:
+                cap_out[cap.name] = {'properties': cap_item}
+
+        req_out = []            
+        for req in self.requirements:
+            if req.filled is True:
+                req_item = dict()
+                if 'cloudify' in tags:
+                    if req.relationship is not None :
+                        req_item['type'] = req.relationship
+                    else:                    
+                        req_item['type'] = 'cloudify.relationships.connected_to'
+                    req_item['target'] = req.str_value
+                else:
+                    req_item[req.name] = req.str_value
+                req_out.append(req_item)
+            elif req.filter is not None and 'cloudify' not in tags:
+                req_item = {}
+                if req.req_capability is not None:
+                    req_item[YMO_REQ_CAPABILITY] = req.req_capability
+                if req.req_type is not None:
+                    req_item[YMO_REQ_NODE] = req.req_type
+                if req.relationship is not None:
+                    req_item[YMO_REQ_RELATIONSHIP] = req.relationship
+                req_item[YMO_REQ_FILTER] = req.filter
+                req_out.append({req.name:req_item})
+        int_out = {}
+        for interface_name in self.interfaces.keys():
+            int_out[interface_name] = self.interfaces[interface_name]._prepare_output(tags) 
+            
+        if len(prop_out) > 0:
+            output[YMO_NOD_PROPERTIES]=prop_out
+        if len(req_out) > 0 and 'java_sim' not in tags:
+            if 'cloudify' in tags:
+                output[YMO_NOD_RELATIONSHIPS] = req_out
+            else:
+                output[YMO_NOD_REQUIREMENTS] = req_out
+        if len(cap_out) > 0 and 'cloudify' not in tags:
+            output[YMO_NOD_CAPABILITIES] = cap_out
+        if len(int_out) > 0 :
+            output[YMO_NOD_INTERFACES] = int_out
+        final_out = {}
+        final_out[self.name] = output
+        return final_out
+                   
+    def _prepare_heat_output(self, parameters_type, parameters_val):
+        if self.mapping_template is not None:
+            return self.mapping_template._prepare_heat_output(parameters_type, parameters_val, True)
+        else:
+            if tosca_heat._type_validate(self.type) is not True:
+                return None
+            output = {}
+            output[YMO_NOD_TYPE] = tosca_heat._type_translate(self.type)
+            prop_out = {}
+            for prop_item in iter(self.properties.values()):
+                if prop_item.filled:
+                    prop_out[prop_item.name] = prop_item.value
+                else:
+                    input_name = self.name + '_' + prop_item.name
+                    prop_out[prop_item.name] = '{ get_param: ' + input_name + ' }'
+                    input_type = {}
+                    input_type[input_name] = prop_item.type
+                    input_val = {}
+                    input_val[input_name] = prop_item.value
+                    parameters_type.update(input_type)
+                    parameters_val.udpate(input_val)
+            if len(prop_out) > 0:
+                output[YMO_NOD_PROPERTIES] = prop_out
+            final_out = {}
+            final_out[self.name] = output
+            return final_out
+             
+             
+    def toJson(self):
+        return self.fe_json   
\ No newline at end of file
diff --git a/app/toscalib/templates/node.pyc b/app/toscalib/templates/node.pyc
new file mode 100644
index 0000000..3d780b7
--- /dev/null
+++ b/app/toscalib/templates/node.pyc
Binary files differ
diff --git a/app/toscalib/templates/operation_item.py b/app/toscalib/templates/operation_item.py
new file mode 100644
index 0000000..8bc2612
--- /dev/null
+++ b/app/toscalib/templates/operation_item.py
@@ -0,0 +1,65 @@
+from toscalib.templates.property_item import PropertyItem
+from toscalib.types.property import PropertyDefinition
+from toscalib.templates.constant import *
+
+
+class OperationItem(object):
+    def __init__(self, definition, name = None, content = None):
+        if definition is not None:
+            self.name = definition.name
+            self.implementation = definition.implementation
+            self.definition = definition
+            self.inputs = {}
+            self.parent_node = None
+
+            for prop in definition.inputs.keys():
+                self.inputs[prop] = PropertyItem(definition.inputs[prop])
+        else:
+            self.name = name
+            self.implementation = None
+            self.definition = None
+            self.inputs = {}
+            self.parent_node = None
+        
+        if content is not None: 
+            self._parse_pre_defined_content(content)
+        
+    def _parse_pre_defined_content(self, content):
+        if content is None:
+            return
+
+        if type(content) is not dict:
+            self.implementation = content
+            return
+        
+        for key_name in content.keys():         
+            if key_name == 'implementation':
+                self.implementation = content[key_name]
+            if key_name == 'inputs':
+                input_sec = content['inputs']
+                for input_item in input_sec.keys():
+                    self.inputs[input_item] = PropertyItem(PropertyDefinition(input_item))
+                    self.inputs[input_item]._assign(input_sec[input_item])
+            
+    def _update_parent_node(self, parent):
+        self.parent_node = parent
+        for prop in iter(self.inputs.values()):
+            prop._update_parent_node(parent)
+        
+    def _prepare_output(self, tags=''):
+        output = {}
+#         if self.implementation is not None:
+#             output[YMO_OP_IMPLEMENTATION] = self.implementation
+#             if 'cloudify' in tags:
+#                 output[YMO_OP_EXECUTOR] = 'central_deployment_agent'
+        if len(self.inputs) > 0: 
+            inputs = {}
+            for prop_name in self.inputs.keys():
+                prop_item = self.inputs[prop_name]
+                if prop_item.value is None:
+                    prop_value = None
+                else:
+                    prop_value = prop_item.value._get_value(tags)[0]
+                inputs[prop_name] = prop_value
+            output[YMO_OP_INPUTS] = inputs
+        return output
diff --git a/app/toscalib/templates/operation_item.pyc b/app/toscalib/templates/operation_item.pyc
new file mode 100644
index 0000000..1d35b40
--- /dev/null
+++ b/app/toscalib/templates/operation_item.pyc
Binary files differ
diff --git a/app/toscalib/templates/property_item.py b/app/toscalib/templates/property_item.py
new file mode 100644
index 0000000..dc6aea7
--- /dev/null
+++ b/app/toscalib/templates/property_item.py
@@ -0,0 +1,95 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+import ast, logging
+
+class PropertyItem(object):
+    def __init__(self, definition):
+        self.name = definition.name
+        self.type_obj = definition.type_obj
+        self.filled = False
+        self.definition = definition
+        self.value = None
+        self.required = definition.required
+        self.sub_pointer = None
+        self.used = True
+        self.parent_node = None
+    
+    def _assign(self, value):
+#         from toscalib.templates.capability_item import CapabilityItem
+        from toscalib.templates.value import Value
+        if value is None:
+            return False
+#         elif isinstance(value, CapabilityItem):
+#             self.value = value
+#             self.filled = True
+        else:
+            self.value = Value(self.type_obj, value)
+#             formatted_value = self.type._format_value(value) 
+            if self.value is None:
+                logging.warning( 'Value can not be assigned: validation failed!')
+            else:
+                self.filled = True
+        
+#         if self.sub_pointer is not None:
+#             self.sub_pointer._assign(value)
+        
+        return True
+
+    def _direct_assign(self, value):
+        self.value = value
+        if value is not None:
+            self.filled = True
+            
+    def _update_prefix(self, prefix):
+        self.name = prefix + self.name
+        
+    def _update_parent_node(self, parent):
+        self.parent_node = parent
+
+    def _propagate_substitution_value(self):
+        if self.sub_pointer is None:
+            return True
+        if self.value is not None:
+#            self.sub_pointer._direct_assign(self.value)
+            self.sub_pointer._assign(self.value._get_value()[0])
+            
+        return True
+    
+    def _propagate_attr_substitution_value(self):
+        if self.sub_pointer is None or hasattr(self.sub_pointer, 'value') is False:
+            return True
+        self._direct_assign(self.sub_pointer.value)        
+        return True   
+     
+    def _prepare_input_type_output(self, tags):
+        out_details= {}
+        out_details[YMO_PROP_TYPE] = self.type_obj.name
+        if hasattr(self.definition, 'default') is True and self.definition.default is not None:
+            if 'w_default' in tags:
+                return {}
+            out_details[YMO_PROP_DEFAULT] = self.definition.default
+            
+        out_val = {}
+        out_val[self.name] =out_details
+        return out_val
+
+    def _prepare_output_type_output(self):
+        out_val = {}
+        val_body = self.value._get_value()[0]
+        out_val[self.name] =dict(value=val_body)
+        return out_val
+    
+    def _prepare_heat_output(self):
+        type_out = {}
+        type_out[self.name] =dict(type=self.type.name)
+        val_out = {}
+        if self.filled:
+            val_out[self.name] = self.value
+        else:
+            val_out[self.name] = None
+        
+        return type_out, val_out
+        
diff --git a/app/toscalib/templates/property_item.pyc b/app/toscalib/templates/property_item.pyc
new file mode 100644
index 0000000..96d7b86
--- /dev/null
+++ b/app/toscalib/templates/property_item.pyc
Binary files differ
diff --git a/app/toscalib/templates/requirement_item.py b/app/toscalib/templates/requirement_item.py
new file mode 100644
index 0000000..19c9844
--- /dev/null
+++ b/app/toscalib/templates/requirement_item.py
@@ -0,0 +1,116 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+from toscalib.templates.property_item import PropertyItem
+import logging
+
+class RequirementItem(object):
+    def __init__(self, definition):
+        self.name = definition.name
+        self.value = None
+        self.str_value = None
+        self.cap_match = None
+        self.filled = False
+        self.pending = False
+        
+        self.req_capability = definition.req_capability        
+        self.relationship = definition.relationship
+        self.req_type = definition.req_type
+        self.filter = None
+        self.sub_pointer = None
+        self.parent_node = None
+
+
+    def _assign(self, value):
+        if value is None:
+            logging.warning( 'Assign None to fulfill requirement')
+            return False
+        
+        for cap_item in iter(value.capabilities.values()):
+            if cap_item._validate_capability(self.req_capability) is True:
+                self.cap_match = cap_item
+                break
+        if self.cap_match is None:
+            logging.warning( 'No matching capabilities in requirement assignment')
+            return False
+        else:
+            self.value = value
+            self.str_value = value.name
+            self.filled = True
+        
+            return True
+    
+    def _propagate_substitution_value(self):
+        if self.sub_pointer is None:
+            return True
+        if self.filled is not True:
+            return True
+
+        if isinstance(self.sub_pointer, RequirementItem):
+            if self.cap_match.sub_pointer is None:
+                self.sub_pointer._assign(self.value)
+            else:
+                self.sub_pointer._assign(self.cap_match.sub_pointer.parent_node)
+        elif isinstance(self.sub_pointer, PropertyItem):
+            if self.cap_match.id.value is not None:
+                self.sub_pointer._direct_assign(self.cap_match.id.value)
+            
+        return True    
+        
+    
+    def _verify_requirement(self, node_dict):
+        if self.filled is True:
+#            if node_dict.has_key(self.str_value):
+            if self.str_value in node_dict:
+                self._assign(node_dict[self.str_value])
+            else:
+                logging.warning( 'Error! the node requires \''+ self.str_value+ '\' not defined in the template!')
+                self.str_value = None
+                self.filled = False
+        if self.pending is True:
+#            if node_dict.has_key(self.str_value):
+            if self.str_value in node_dict:
+                self._assign(node_dict[self.str_value])
+                self.pending = None
+            else:
+                self.req_type = self.str_value
+                self.str_value = None
+                self.pending = None
+                
+    def _verify_node(self, node):
+        if node._verify_req_node(self.req_type, self.req_capability, self.filter) is False:
+            logging.warning( 'requirement matching failed')
+            return False
+        
+        return True
+        
+    
+    def _update_prefix(self, prefix):
+        if self.filled is True:
+            self.str_value = prefix + self.str_value        
+            
+    def _update_parent_node(self, parent):
+        self.parent_node = parent    
+        
+    def _parse_pre_defined_content(self, content):
+        if type(content) is str:
+            self.str_value = content
+            self.filled = True
+        elif type(content) is dict:
+#            if content.has_key(REQ_NODE):
+            if REQ_NODE in content:
+                self.str_value = content[REQ_NODE]
+                self.pending = True
+#            if content.has_key(REQ_CAPABILITY):
+            if REQ_CAPABILITY in content:
+                self.req_capability = content[REQ_CAPABILITY]
+#            if content.has_key(REQ_RELATIONSHIP):
+            if REQ_RELATIONSHIP in content:
+                self.relationship = content[REQ_RELATIONSHIP]
+#            if content.has_key(REQ_FILTER):
+            if REQ_FILTER in content:
+                self.filter = content[REQ_FILTER]
+        else:
+            logging.warning( 'Can not parse requirement assignment for '+self.name)
diff --git a/app/toscalib/templates/requirement_item.pyc b/app/toscalib/templates/requirement_item.pyc
new file mode 100644
index 0000000..1cb307e
--- /dev/null
+++ b/app/toscalib/templates/requirement_item.pyc
Binary files differ
diff --git a/app/toscalib/templates/substitution_rule.py b/app/toscalib/templates/substitution_rule.py
new file mode 100644
index 0000000..fda8e50
--- /dev/null
+++ b/app/toscalib/templates/substitution_rule.py
@@ -0,0 +1,179 @@
+from toscalib.templates.constant import *
+import logging
+
+
+class SubstitutionRule (object):
+    def __init__(self, type, item_name, prop_name, value):
+        self.type = type
+        self.item = item_name
+        self.property = prop_name
+        self.value = value
+        
+    def _update_pointer(self, src_node, dst_template):
+        if type(self.value) is not list and len(self.value) < 1:
+            logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': '+ self.value)
+            return
+        
+        if self.type == SUB_PROPERTY:
+            if self.value[0] == SUB_INPUT:
+#                if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]):
+                if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs:
+                    if src_node is not None:
+                        src_node.properties[self.property].sub_pointer = dst_template.inputs[self.value[1]]
+                        if src_node.properties[self.property].required is True or src_node.properties[self.property].filled is True:
+                            dst_template.inputs[self.value[1]].used = True
+                elif src_node is not None and src_node.properties[self.property].required is True:
+                    logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no input named '+ self.value[1])
+#            elif dst_template.node_dict.has_key(self.value[0]):
+            elif self.value[0] in dst_template.node_dict:
+                target_node = dst_template.node_dict[self.value[0]]
+                target_prop_item = target_node._get_property_item(self.value[1])
+                if target_prop_item is not None: 
+                    if src_node is not None:
+                        src_prop_item = src_node._get_property_item(self.property)
+                        if src_prop_item.required is True or src_prop_item.filled is True:
+                            target_prop_item.used = True
+                        if src_prop_item is not None:
+                            src_prop_item.sub_pointer = target_prop_item
+                else:
+                    logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no property named '+ self.value[1]+ ' in node '+ self.value[0])
+            else:
+                logging.warning('Incorrect mapping rule for property '+ self.property+ ': no node named '+ self.value[0])
+        
+        elif self.type == SUB_ATTRIBUTE:
+            if self.value[0] == SUB_OUTPUT:
+#                if hasattr(dst_template, 'outputs') and dst_template.outputs.has_key(self.value[1]):
+                if hasattr(dst_template, 'outputs') and self.value[1] in dst_template.outputs:
+                    if src_node is not None:
+                        src_node.attributes[self.property].sub_pointer = dst_template.outputs[self.value[1]]
+                else: 
+                    logging.warning( 'Incorrect mapping rule for attribute '+ self.property+ ': no output named '+ self.value[1])
+        
+        elif self.type == SUB_CAPABILITY:
+            if self.property is None:
+#                if dst_template.node_dict.has_key(self.value[0]):
+                if self.value[0] in dst_template.node_dict:
+                    target_node = dst_template.node_dict[self.value[0]]
+                    target_cap_item = target_node._get_capability_item(self.value[1])
+                    if target_cap_item is not None:
+                        if src_node is not None:
+                            src_cap_item = src_node._get_capability_item(self.item)
+                            if src_cap_item is not None:
+                                src_cap_item.sub_pointer = target_cap_item
+                                for prop_name in src_cap_item.properties.keys():
+                                    src_cap_item.properties[prop_name].sub_pointer = target_cap_item.properties[prop_name]
+                    else:
+                        logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability named '+ self.value[1]+ ' in node '+ self.value[0])
+                else:
+                    logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no node named '+ self.value[0])
+            elif self.property == SUB_CAP_ID:
+                if self.value[0] == SUB_OUTPUT:
+#                    if hasattr(dst_template, 'outputs') and dst_template.outputs.has_key(self.value[1]):
+                    if hasattr(dst_template, 'outputs') and self.value[1] in dst_template.outputs:
+                        target_node = dst_template.outputs[self.value[1]]
+                        if src_node is not None:
+                            src_cap_item = src_node._get_capability_item(self.item)
+                            if src_cap_item is not None:
+                                src_cap_item.sub_pointer = target_node
+#                elif dst_template.node_dict.has_key(self.value[0]):
+                elif self.value[0] in dst_template.node_dict:
+                    target_node = dst_template.node_dict[self.value[0]]
+                    if len(self.value) < 2:
+                        target_item = target_node
+#                    elif target_node.capabilities.has_key(self.value[1]) and len(self.value) > 1:
+                    elif len(self.value) > 1 and self.value[1] in target_node.capabilities :
+                        target_item = target_node._get_capability_property(self.value[1], self.value[2])
+                    elif self.value[1] in target_node.properties:
+                        target_item = target_node._get_property_item(self.value[1])
+                    else:
+                        target_item = None
+                        logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability/property named '+ self.value[1]+ ' in node '+ self.value[0])
+
+                    if target_item is not None and src_node is not None:
+                        src_cap_item = src_node._get_capability_item(self.item)
+                        if src_cap_item is not None:
+                            src_cap_item.sub_pointer = target_item
+            else:
+                if self.value[0] == SUB_INPUT:
+#                    if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]):
+                    if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs:
+                        if src_node is not None:
+                            src_cap_prop_item = src_node._get_capability_property(self.item, self.property)
+                            src_cap_prop_item.sub_pointer = dst_template.inputs[self.value[1]]
+                            if src_cap_prop_item.required is True or src_cap_prop_item.filled is True:
+                                dst_template.inputs[self.value[1]].used = True
+                    else:
+                        logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no input named '+ self.value[1])
+#                elif dst_template.node_dict.has_key(self.value[0]):
+                elif self.value[0] in dst_template.node_dict:
+                    target_node = dst_template.node_dict[self.value[0]]
+
+#                    if target_node.capabilities.has_key(self.value[1]):
+                    if self.value[1] in target_node.capabilities:
+                        target_cap_property = target_node._get_capability_property(self.value[1], self.value[2])
+                        if target_cap_property is not None:
+                            if src_node is not None:
+                                src_cap_prop_item = src_node._get_capability_property(self.item, self.property)
+                                if src_cap_prop_item is not None:
+                                    src_cap_prop_item.sub_pointer = target_cap_property
+                        else:
+                            logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no property named '+ self.value[2]+ ' in capability '+ self.value[0]+ '->'+ self.value[1])
+#                    elif target_node.properties.has_key(self.value[1]):
+                    elif self.value[1] in target_node.properties:
+                        target_prop_item = target_node._get_property_item(self.value[1])
+                        if src_node is not None:
+                            src_cap_prop_item = src_node._get_capability_property(self.item, self.property)
+                            if src_cap_prop_item is not None:
+                                src_cap_prop_item.sub_pointer = target_prop_item
+                    else:
+                        logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability/property named '+ self.value[1]+ ' in node '+ self.value[0])
+                else:
+                    logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no node named '+ self.value[0])
+        
+        elif self.type == SUB_REQUIREMENT:
+            if self.property is None:
+#                if dst_template.node_dict.has_key(self.value[0]):
+                if self.value[0] in dst_template.node_dict:
+                    target_node = dst_template.node_dict[self.value[0]]
+                    target_req_item = target_node._get_requirement_item_first(self.value[1])
+                    if target_req_item is not None:
+                        if src_node is not None:
+                            src_req_item = src_node._get_requirement_item_first(self.item)
+                            if src_req_item is not None:
+                                src_req_item.sub_pointer = target_req_item
+                    else:
+                        logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no requirement named '+ self.value[1]+ ' in node '+ self.value[0])
+                else:
+                    logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no node named '+ self.value[0])
+            elif self.property == SUB_REQ_ID:
+                if self.value[0] == SUB_INPUT:
+#                    if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]):
+                    if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs:
+                        if src_node is not None:
+                            src_req_item = src_node._get_requirement_item_first(self.item)
+                            if src_req_item is not None:
+                                src_req_item.sub_pointer = dst_template.inputs[self.value[1]]
+                                dst_template.inputs[self.value[1]].used = True
+                    else:
+                        logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no input named '+ self.value[1])
+
+#                elif dst_template.node_dict.has_key(self.value[0]):
+                elif self.value[0] in dst_template.node_dict:
+                    target_node = dst_template.node_dict[self.value[0]]
+                    target_prop_item = target_node._get_property_item(self.value[1])
+                    if target_prop_item is not None:
+                        if src_node is not None:
+                            src_req_item = src_node._get_requirement_item_first(self.item)
+                            if src_req_item is not None:
+                                src_req_item.sub_pointer = target_prop_item
+                    else:
+                        logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no property named '+ self.value[1]+ ' in node '+ self.value[0])
+                else:
+                    logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no node named '+ self.value[0])
+            else:
+                logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': wrong property name '+ self.property)
+                
+        else:
+            logging.warning('Incorrect mapping rule type: '+ self.type)
+                
+        
diff --git a/app/toscalib/templates/substitution_rule.pyc b/app/toscalib/templates/substitution_rule.pyc
new file mode 100644
index 0000000..0d4ad19
--- /dev/null
+++ b/app/toscalib/templates/substitution_rule.pyc
Binary files differ
diff --git a/app/toscalib/templates/topology.py b/app/toscalib/templates/topology.py
new file mode 100644
index 0000000..f8c00ed
--- /dev/null
+++ b/app/toscalib/templates/topology.py
@@ -0,0 +1,419 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+from toscalib.templates.heat_constants import *
+from toscalib.templates.substitution_rule import SubstitutionRule
+from toscalib.types.property import PropertyDefinition
+from toscalib.templates.property_item import PropertyItem
+from toscalib.templates.heat_constants import HOT_VERSION_NUM
+import copy, logging
+
+class ToscaTopology(object):
+    def __init__(self, name, metadata_section=None, content_section=None):
+        self.name = name
+        self.metadata = metadata_section
+        self.raw_content = content_section
+        self.db  = None
+
+        self.node_dict = {}
+        self.inputs = {}
+        self.aux_inputs = {}
+        self.outputs = {}
+        self.sub_rules = []
+
+        self.node_index = 0
+        self.temp_index = 0
+        
+        self.extra_imports = []
+        
+    def _parse_content(self, db):
+        if self.db is not None:
+            return
+        
+        self.db = db
+        
+        if self.raw_content is None:
+            return
+        
+#        if self.raw_content.has_key(TOPO_INPUTS):
+        if TOPO_INPUTS in self.raw_content:
+            self._parse_input(db, self.raw_content[TOPO_INPUTS])
+        
+#        if self.raw_content.has_key(TOPO_NODE_TEMPLATES):
+        if TOPO_NODE_TEMPLATES in self.raw_content:
+            self._parse_node_template(db, self.raw_content[TOPO_NODE_TEMPLATES])
+        else:
+            logging.warning( 'Topology template: ' + self.name+ ' has NO node templates!')
+        
+#        if self.raw_content.has_key(TOPO_OUTPUTS):
+        if TOPO_OUTPUTS in self.raw_content:
+            self._parse_output(db, self.raw_content[TOPO_OUTPUTS])
+            
+#        if self.raw_content.has_key(TOPO_SUBSTITUION_MAPPINGS):
+        if TOPO_SUBSTITUION_MAPPINGS in self.raw_content:
+            self._parse_substitution(db, self.raw_content[TOPO_SUBSTITUION_MAPPINGS])
+        else:
+            self.sub_type = None        
+        self._verify_substitution() 
+        self._update_function_pointer()   
+        
+    def _parse_substitution(self, db, sub_sec):
+#        if sub_sec.has_key(SUB_NODE_TYPE):
+        if SUB_NODE_TYPE in sub_sec:
+            self.sub_type = sub_sec[SUB_NODE_TYPE]
+#            if db.NODE_TYPES.has_key(self.sub_type):
+            if self.sub_type in db.NODE_TYPES:
+                db.NODE_TYPES[self.sub_type].mapping_template = self
+        else:
+            logging.warning( 'substitution mapping section does not have node_type defined')
+            return  
+                 
+#        if sub_sec.has_key(SUB_PROPERTY):
+#            sub_prop = sub_sec[SUB_PROPERTY]
+#            for sub_prop_name in sub_prop.keys():
+#                self.sub_rules.append(SubstitutionRule(SUB_PROPERTY, None, sub_prop_name, sub_prop[sub_prop_name]))
+ 
+        for sub_prop in db.NODE_TYPES[self.sub_type].properties.keys():
+#            if self.inputs.has_key(sub_prop):
+            if sub_prop in self.inputs:
+                self.sub_rules.append(SubstitutionRule(SUB_PROPERTY, None, sub_prop, [SUB_INPUT, sub_prop]))
+
+        for sub_attr in db.NODE_TYPES[self.sub_type].attributes.keys():
+#            if self.outputs.has_key(sub_attr):
+            if sub_attr in self.outputs:
+                self.sub_rules.append(SubstitutionRule(SUB_ATTRIBUTE, None, sub_attr, [SUB_OUTPUT, sub_attr]))
+                   
+#        if sub_sec.has_key(SUB_CAPABILITY):
+        if SUB_CAPABILITY in sub_sec:
+            sub_cap = sub_sec[SUB_CAPABILITY]
+            for sub_cap_name in sub_cap.keys():
+                sub_cap_item = sub_cap[sub_cap_name]
+                #standard capability mapping rule
+                if type(sub_cap_item) is not dict: 
+                    self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, None, sub_cap_item))
+                #self-proposed capability mapping rules 
+                else: 
+#                    if sub_cap_item.has_key(SUB_CAP_ID):
+                    if SUB_CAP_ID in sub_cap_item:
+                        self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, SUB_CAP_ID, sub_cap_item[SUB_CAP_ID]))
+#                    if sub_cap_item.has_key(SUB_CAP_PROPERTY):
+                    if SUB_CAP_PROPERTY in sub_cap_item:
+                        sub_cap_item_prop = sub_cap_item[SUB_CAP_PROPERTY] 
+                        for sub_cap_item_prop_name in sub_cap_item_prop.keys():
+                            self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, sub_cap_item_prop_name, sub_cap_item_prop[sub_cap_item_prop_name]))
+        
+#        if sub_sec.has_key(SUB_REQUIREMENT):
+        if SUB_REQUIREMENT in sub_sec:
+            sub_req = sub_sec[SUB_REQUIREMENT]
+            for sub_req_name in sub_req.keys():
+                sub_req_item = sub_req[sub_req_name]
+            #standard requirement mapping rule
+                if type(sub_req_item) is not dict: 
+                    self.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, sub_req_name, None, sub_req_item))
+            #self-proposed requirement mapping rules 
+                else: 
+#                    if sub_req_item.has_key(SUB_REQ_ID):   
+                    if SUB_REQ_ID in sub_req_item:   
+                        self.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, sub_req_name, SUB_REQ_ID, sub_req_item[SUB_REQ_ID]))
+                    else:
+                        logging.warning( 'Incorrect substitution mapping rules')
+    
+    def _verify_substitution(self, target_node=None):
+        for rule in self.sub_rules:
+            rule._update_pointer(target_node, self)       
+    
+    def _parse_input(self, db, input_sec):
+        for input_name in input_sec.keys():
+            input_def = PropertyDefinition(input_name, input_sec[input_name])
+            input_def._parse_content(db)
+            self.inputs[input_name] = PropertyItem(input_def)
+
+    def _parse_output(self, db, output_sec):
+        for output_name in output_sec.keys():
+            output_def = PropertyDefinition(output_name)
+#            output_def._parse_content(db)
+            self.outputs[output_name] = PropertyItem(output_def)
+#            if output_sec[output_name].has_key('value'):
+            if 'value' in output_sec[output_name]:
+                self.outputs[output_name]._assign(output_sec[output_name]['value'])
+              
+    def _parse_node_template(self, db, template_sec):
+        self.node_dict = {}
+        for name in template_sec.keys():
+#            if template_sec[name].has_key(NOD_TYPE):
+            if NOD_TYPE in template_sec[name]:
+                node_type_name = template_sec[name][NOD_TYPE]
+            else:
+                logging.warning( 'Invalid template: node section has no type')
+                continue
+            
+#            if db.NODE_TYPES.has_key(node_type_name) is False:
+            if node_type_name not in db.NODE_TYPES:
+                logging.warning( 'Invalid template: node type: '+ str(node_type_name)+ ' not defined or imported')
+                continue
+                
+            from toscalib.templates.node import Node
+            new_node = Node(self, name, db.NODE_TYPES[node_type_name])
+            new_node._parse_pre_defined_content(template_sec[name])
+
+            self._add_node(new_node)
+        
+        for node in iter(self.node_dict.values()):
+            node._verify_requirements(self.node_dict)
+            node._verify_functions()
+
+            
+        self.edge_list = self._create_edges()
+        
+    def _create_edges(self):
+        edges = []
+        for node in iter(self.node_dict.values()):
+            for req in node.requirements:
+                if req.filled is True:
+                    new_edge = (node, self.node_dict[req.str_value])
+                    logging.debug( 'edge created: '+ new_edge[0].name+ ' --> '+ new_edge[1].name)
+                    edges.append(new_edge)
+        return edges
+
+    def _update_function_pointer(self):   
+        for node in iter(self.node_dict.values()):
+            #node._verify_requirements(self.node_dict)
+            node._verify_functions()
+        for output in iter(self.outputs.values()):
+            if output.value is not None:
+                output.value._update_function_reference(self)
+
+    def _update_translation_function_pointer(self):
+        for node in iter(self.node_dict.values()):
+            if node.tran_template is not None:
+                node.tran_template._update_function_pointer()
+    
+    def _update_prefix(self, prefix):
+        exist_key_list = list(self.node_dict.keys())
+        for node_key in exist_key_list:
+            if node_key == 'NO_PREFIX':
+                new_node_key = prefix[:len(prefix)-1]
+            else:
+                new_node_key = prefix + node_key
+            node = self.node_dict.pop(node_key)
+            node._update_prefix(prefix)
+            self.node_dict[new_node_key] = node
+            
+        exist_key_list = list(self.inputs.keys())
+        for item_key in exist_key_list:
+            new_item_key = prefix + item_key
+            item = self.inputs.pop(item_key)
+            item._update_prefix(prefix)
+            self.inputs[new_item_key] = item
+            
+        exist_key_list = list(self.outputs.keys())
+        for item_key in exist_key_list:
+            ###don't update output name prefix here
+            ###temporary solution for cloudify generation
+            ###but still need to update pointer for the value
+            new_item_key = prefix + item_key
+            #item = self.outputs.pop(item_key)
+            #item._update_prefix(prefix)
+            item = self.outputs[item_key]
+            item.value._update_prefix(prefix)   
+            item.value._update_function_reference(self)
+            #self.outputs[new_item_key] = item
+        
+        #self._update_function_pointer()
+
+            
+    def _update_used_tag_for_translation(self):
+        for item in iter(self.inputs.values()):
+            item.used = False
+        for node_item in iter(self.node_dict.values()):
+            for prop_item in iter(node_item.properties.values()):
+                prop_item.used = False
+            
+    def _add_node(self, new_node):
+        if new_node is None:
+            return
+        self.node_dict[new_node.name] = new_node
+        
+    def _propagate_substitution_value(self):
+        converge = False
+        while converge is not True:
+            converge = True
+            for node_item in iter(self.node_dict.values()):
+                converge = converge and node_item._propagate_substitution_value() 
+            
+     
+    def _auto_generate_aux_inputs(self):
+        for node_name in self.node_dict.keys():
+            node = self.node_dict[node_name]
+            for prop_name in  node.properties.keys():
+                prop_item = node.properties[prop_name]
+                if prop_item.value is None or prop_item.filled is False:
+                    new_input_name = node_name + '_' + prop_name
+#                    while self.inputs.has_key(new_input_name) or self.aux_inputs.has_key(new_input_name):
+                    while new_input_name in self.inputs or new_input_name in self.aux_inputs:
+                        new_input_name = new_input_name + '_'
+                    def_item = copy.deepcopy(prop_item.definition)
+                    def_item.name = new_input_name
+                    self.aux_inputs[new_input_name] = PropertyItem(def_item)
+                    fun_item = {}
+                    fun_item['get_input'] = new_input_name
+                    prop_item._assign(fun_item)
+                    prop_item.value._update_function_reference(self)
+                
+    def _prepare_node_types(self):
+        for node_type in iter(self.db.NODE_TYPES.values()):
+            node_type.used = False
+            
+        for node in iter(self.node_dict.values()):
+            node_type = node.type_obj
+            while node_type is not None:
+                self.db.NODE_TYPES[node_type.name].used = True
+                node_type = node_type.parent
+    
+    def _prepare_node_types_output(self, tags=''):
+        self._prepare_node_types()
+        node_type = {}
+        if 'noexpand' not in tags:
+            for node in iter(self.node_dict.values()):
+                if node.tran_template is not None: 
+                    node_type.update(node.tran_template._prepare_node_types_output(tags))
+        if len(node_type) < 1:
+            for ntype in iter(self.db.NODE_TYPES.values()):
+                if ntype.used is False:
+                    continue
+                type_content = copy.deepcopy(ntype.raw_content)
+                if 'cloudify' in tags:
+                    if ntype.name == 'cloudify.nodes.Root':
+                        continue
+                    
+                    type_content.pop('capabilities', None)
+                    type_content.pop('requirements', None)
+                    type_content.pop('attributes', None)
+                else: 
+                    if ntype.name == 'tosca.nodes.Root':
+                        continue
+
+                node_type[ntype.name] = type_content
+                
+        return node_type
+            
+    def _prepare_extra_imports(self, tags):
+        if 'cloudify' in tags:
+            ret_val = []
+            for item in self.extra_imports:
+                ret_val += list(item.values())
+            return ret_val
+        else:
+            return self.extra_imports
+           
+    def _prepare_output(self, tags=''):
+
+        output ={} 
+        import_sec = []
+        
+        if 'cloudify' in tags:
+            output[YMO_VERSION]= 'cloudify_dsl_1_3'
+            for item in self.extra_imports:
+                import_sec += list(item.values()) 
+            #import_sec.append('http://www.getcloudify.org/spec/cloudify/3.4/types.yaml')
+        else:
+            import_sec += self.extra_imports 
+            output[YMO_VERSION]= 'tosca_simple_yaml_1_0_0'
+        
+        if 'import_schema' in tags: 
+            output[YMO_IMPORT] = [{'schema': 'schema.yaml'}]
+            
+        if self.metadata is not None and 'java_sim' not in tags:
+            output[YMO_METADATA] = self.metadata
+        topo_sec = {}
+        node_temp = {}
+        for node in iter(self.node_dict.values()):
+            node_temp.update(node._prepare_output(tags))
+            import_sec += node._prepare_extra_imports(tags)
+            
+        if 'part' in tags: 
+            return node_temp
+        
+        if len(node_temp.keys())> 0:
+            topo_sec[YMO_TOPO_NODE_TEMPLATES] = node_temp
+       
+        input_sec = {}
+        for name in self.inputs.keys():
+            input_sec.update(self.inputs[name]._prepare_input_type_output(tags))
+        for name in self.aux_inputs.keys():
+            input_sec.update(self.aux_inputs[name]._prepare_input_type_output(tags))
+        if (len(input_sec.keys())> 0) and 'java_sim' not in tags:
+            topo_sec[YMO_TOPO_INPUTS] = input_sec
+        output_sec = {}
+        for name in self.outputs.keys():
+            output_sec.update(self.outputs[name]._prepare_output_type_output())
+        if (len(output_sec.keys())> 0) and 'java_sim' not in tags:
+            topo_sec[YMO_TOPO_OUTPUTS] = output_sec
+            
+            
+        if 'w_sub' in tags and self.sub_type is not None:
+            sub_sec = {}
+            sub_sec[YMO_SUB_NODE_TYPE] = self.sub_type
+            sub_cap = {}
+            sub_req = {}
+            for sub_rule in self.sub_rules:
+                if sub_rule.type is SUB_CAPABILITY:
+                    sub_cap[sub_rule.item] = sub_rule.value
+                if sub_rule.type is SUB_REQUIREMENT:
+                    sub_req[sub_rule.item] = sub_rule.value
+            sub_sec[YMO_SUB_CAPABILITY] = sub_cap
+            sub_sec[YMO_SUB_REQUIREMENT] = sub_req
+            
+            topo_sec[YMO_TOPO_SUBSTITUION_MAPPINGS] = sub_sec 
+    
+        if 'cloudify' in tags:
+            output.update(topo_sec)
+        else:
+            output[YMO_TOPOLOGY] = topo_sec
+        
+        if 'nodetype' in tags and 'java_sim' not in tags:
+            output[YMO_NODE_TYPE] = self._prepare_node_types_output(tags)
+        
+        if len(import_sec) > 0:
+            output[YMO_IMPORT] = import_sec
+
+        
+        return output
+        
+        
+    def _prepare_heat_output(self, parameters_type = {}, parameters_val = {}, stripped = False):
+        output = {}
+        env_output = {}
+        output[YMO_HOT_VERSION] = HOT_VERSION_NUM  
+
+        for input_item in iter(self.inputs.values()):
+            out1, out2 = input_item._prepare_heat_output()
+            parameters_type.update(out1)
+            parameters_val.update(out2)
+        resources = {}
+        for node in iter(self.node_dict.values()):
+            resources.udpate(node._prepare_heat_output(parameters_type, parameters_val))
+            
+        output[YMO_HOT_PARAMETERS] = parameters_type
+        output[YMO_HOT_RESOURCES]  = resources
+        env_output[YMO_HOT_PARAMETERS] = parameters_val
+        
+        if stripped is True: 
+            return resources
+        else:
+            return output, env_output
+        
+        
+    def toJson(self):
+        ret_json = {}
+        tmp_json = {}
+        for node in iter(self.node_dict.values()):
+            tmp_json[node.name] = node.toJson()
+        ret_json['nodes'] = tmp_json
+        ret_json['relations'] = {}
+        ret_json['inputs'] = {}
+        ret_json['outputs'] = {}
+        return ret_json
+          
\ No newline at end of file
diff --git a/app/toscalib/templates/topology.pyc b/app/toscalib/templates/topology.pyc
new file mode 100644
index 0000000..3b03399
--- /dev/null
+++ b/app/toscalib/templates/topology.pyc
Binary files differ
diff --git a/app/toscalib/templates/value.py b/app/toscalib/templates/value.py
new file mode 100644
index 0000000..fee0ceb
--- /dev/null
+++ b/app/toscalib/templates/value.py
@@ -0,0 +1,266 @@
+from toscalib.types.data import TYP_LIST, TYP_MAP, TYP_STR, DataType
+from toscalib.templates.property_item import PropertyItem
+import copy, logging
+
+FUNCTIONS = (GET_INPUT, GET_PROPERTY, GET_ATTRIBUTE, GET_OPERATION, GET_NODES, GET_ARTIFACT, CONCAT) = \
+            ('get_input', 'get_property', 'get_attribute', 'get_operation_output', 'get_nodes_of_type', 'get_artifact', 'concat')
+            
+VALUE_STATE = (VALID_VALUE, FUNCTION, NULL) = \
+            (1, 2, 3)
+
+def _is_function(value):
+    if type(value) is not dict:
+        return None
+    if len(value.keys()) != 1:
+        return None
+    key = list(value.keys())[0]
+    if key not in FUNCTIONS:
+        return None
+    
+    if key == GET_INPUT:
+        out_value = FunctionValue(key)
+        out_value.target_property = value[key]
+        return out_value
+    elif key == CONCAT:
+        out_value = FunctionValue(key)
+        value_list = value[key]
+        if type(value_list) is not list:
+            return None
+        out_value.extra_data = []
+        for value_item in value_list:
+            out_value.extra_data.append(Value(DataType(TYP_STR), value_item))
+        return out_value
+    else:
+        out_value = FunctionValue(key)
+        value_list = value[key]
+        if type(value_list) is not list:
+            return None        
+        out_value.extra_data = value_list
+                
+        return out_value
+    
+        
+class FunctionValue(object):
+    def __init__(self, func_type):
+        self.type = func_type
+        self.target_property = None
+        self.extra_data = []
+        self.value_from_node = None
+        self.value_from_item = None
+        self.result = None
+        
+    def _update_prefix(self, prefix):
+        if self.type == GET_INPUT:
+            self.target_property = prefix + self.target_property
+        elif (self.type == GET_PROPERTY or self.type == GET_ATTRIBUTE):
+            if self.extra_data is not None and len(self.extra_data) > 1 and self.extra_data[0] != 'SELF':
+                if self.extra_data[0] == 'NO_PREFIX':
+                    self.extra_data[0] = prefix[:len(prefix)-1]
+                else:
+                    self.extra_data[0] = prefix + self.extra_data[0]
+        elif self.type == CONCAT:
+            for item in self.extra_data:
+                if item.function is not None: 
+                    item._update_prefix(prefix)
+                
+    def _update_function_reference(self, temp, self_node = None, self_item = None):
+        if self.type == GET_INPUT:
+#            if temp.inputs.has_key(self.target_property):
+            if self.target_property in temp.inputs:
+                self.value_from_item = temp.inputs[self.target_property]
+                return
+#            elif temp.aux_inputs.has_key(self.target_property):
+            elif self.target_property in temp.aux_inputs:
+                self.value_from_item = temp.aux_inputs[self.target_property]
+                return
+            else: 
+                logging.debug( 'get_input function points to a non-existent input, autofill'+ self.target_property)
+                def_item = copy.deepcopy(self_item.definition)
+                def_item.name = self.target_property
+                temp.inputs[self.target_property] = PropertyItem(def_item)
+                self.value_from_item = temp.inputs[self.target_property]
+                return 
+        elif self.type == GET_PROPERTY:
+            if self.extra_data is None or len(self.extra_data) < 2:
+                logging.warning('Error, get_property has not enough parameters '+ self.extra_data)
+                return 
+#            if self.extra_data[0] != 'SELF' and temp.node_dict.has_key(self.extra_data[0]) is False:
+            if self.extra_data[0] != 'SELF' and self.extra_data[0] not in temp.node_dict:
+                logging.warning( 'Error, get_property from unrecognized node '+ self.extra_data[0])
+                return 
+            
+            if self.extra_data[0] == 'SELF':
+                node_item = self_node
+            else:
+                node_item = temp.node_dict[self.extra_data[0]]
+            self.value_from_node = node_item
+            
+            if len(self.extra_data) == 2:
+                self.value_from_item = node_item._get_property_item(self.extra_data[1])
+                return
+            elif len(self.extra_data) == 3: 
+                self.value_from_item = node_item._get_capability_property(self.extra_data[1], self.extra_data[2])
+                if self.value_from_item is not None:
+                    return
+                req_item = node_item._get_requirement_item_first(self.extra_data[1])
+                if req_item is None:
+                    return
+                new_node_item = req_item.value
+                if new_node_item is None:
+                    self.value_from_node = None
+                    return
+                self.value_from_node = new_node_item
+#                if req_item.cap_match.properties.has_key(self.extra_data[2]):
+                if self.extra_data[2] in req_item.cap_match.properties:
+                    self.value_from_item = req_item.cap_match.properties[self.extra_data[2]]
+                else:
+                    self.value_from_item = new_node_item._get_property_item(self.extra_data[2])
+                 
+            else:
+                logging.warning( 'Too many parameters for get_property function '+ self.extra_data)
+        elif self.type == GET_ATTRIBUTE:
+            if self.extra_data is None or len(self.extra_data) < 2:
+                logging.error( 'Error, get_attribute has not enough parameters '+ self.extra_data)
+                return 
+#            if self.extra_data[0] != 'SELF' and temp.node_dict.has_key(self.extra_data[0]) is False:
+            if self.extra_data[0] != 'SELF' and self.extra_data[0]  not in temp.node_dict:
+                logging.error( 'Error, get_attribute from unrecognized node '+ self.extra_data[0])
+                return 
+            
+            if self.extra_data[0] == 'SELF':
+                node_item = self_node
+            else:
+                node_item = temp.node_dict[self.extra_data[0]]
+            
+            self.value_from_node = node_item            
+            
+            if len(self.extra_data) > 3:
+                logging.warning( 'Too many parameters for get_attribute function '+ self.extra_data)
+                return
+            if self.extra_data[1] == 'id':
+                self.value_from_item = node_item.id
+            else:
+                self.value_from_item = node_item._get_attribute_item(self.extra_data[1])
+            
+            if self.value_from_item is not None:
+                return
+            req_item = node_item._get_requirement_item_first(self.extra_data[1])
+            if req_item is None:
+                return
+            new_node_item = req_item.value
+            if new_node_item is None:
+                self.value_from_node = None
+                return
+            self.value_from_node = new_node_item
+            self.value_from_item = new_node_item._get_attribute_item(self.extra_data[2]) 
+            return
+        
+        elif self.type == CONCAT:
+            for item in self.extra_data:
+                if item.function is not None: 
+                    item._update_function_reference(temp, self_node)
+        else:
+            logging.warning( 'Function '+ self.type+ ' is not supported')
+            return
+        
+    def _calculate_function_result(self, tags= '' ):
+        if 'func' in tags:
+            return self._get_function_representation(tags), FUNCTION
+        
+        if self.type == CONCAT:
+            function_ret = VALID_VALUE
+            function_str = ""
+            for item in self.extra_data:
+                item_str, item_value = item._get_value(tags)
+                if item_value is FUNCTION:
+                    function_ret = FUNCTION
+                    break
+                elif item_str is not None:
+                    function_str = function_str + item_str
+            if function_ret == FUNCTION:
+                return self._get_function_representation(tags), FUNCTION
+            else:
+                return function_str, function_ret
+        
+        if 'w_default' in tags and self.type == GET_INPUT and self.value_from_item is not None and hasattr(self.value_from_item.definition, 'default') is True and self.value_from_item.definition.default is not None:
+            return self.value_from_item.definition.default, VALID_VALUE
+            
+        if self.value_from_item is None or self.value_from_item.value is None or self.value_from_item.value.function == self:
+            return self._get_function_representation(tags), FUNCTION
+        else:
+            return self.value_from_item.value._get_value(tags)
+        
+    def _get_value(self, tags = ''):
+        return self._calculate_function_result(tags)
+        
+    def _get_function_representation(self, tags=''):
+        if self.type == GET_INPUT:
+            out_str = {}
+            out_str[self.type]= self.target_property
+        elif self.type == GET_PROPERTY:
+            out_str = {}
+            if self.value_from_node is  None or 'rawfunc' in tags:
+                out_val = copy.deepcopy(self.extra_data)
+            else:
+                out_val = []
+                out_val.append(self.value_from_node.name)
+                out_val.append(self.extra_data[len(self.extra_data)-1])
+               
+            out_str[self.type] = out_val
+        elif self.type == GET_ATTRIBUTE:
+            out_str = {}
+            if self.value_from_node is None or 'rawfunc' in tags:
+                out_val = copy.deepcopy(self.extra_data)              
+            else:
+                out_val = []
+                out_val.append(self.value_from_node.name)
+                out_val.append(self.extra_data[len(self.extra_data)-1])
+            if self.extra_data[1] == 'id' and 'heat' in tags:
+                out_str['get_id'] = out_val[0]
+            else:
+                out_str[self.type] = out_val
+        elif self.type == CONCAT:
+            out_str = {}
+            out_list = []
+            for item in self.extra_data:
+                item_str, item_value = item._get_value(tags)
+                out_list.append(item_str)
+            out_str[self.type] = out_list
+        else:
+            out_str = {}
+            out_str[self.type]=  copy.deepcopy(self.extra_data)
+        return out_str
+    
+    def _get_function_result(self):
+        return self.result
+        
+class Value(object):
+    def __init__(self, prop_type, value):
+        self.type = prop_type.name
+        self.type_obj = copy.deepcopy(prop_type)
+        self.raw_value = value
+        self.value = None
+        self.function = _is_function(value)
+        
+        if self.function is None:
+            self.value = self.type_obj._format_value(value)
+    
+    def _update_function_reference(self, temp, self_node = None, self_item = None):
+        if self.value is not None:
+            self.type_obj._update_function_reference(temp, self.value, self_node, self_item)
+        if self.function is not None:
+            self.function._update_function_reference(temp, self_node, self_item)
+            
+    def _update_prefix(self, prefix):
+        if self.value is not None:
+            self.type_obj._update_prefix(prefix, self.value)
+        if self.function is not None: 
+            self.function._update_prefix(prefix)
+    
+    def _get_value(self, tags = ''):
+        if self.function is not None:
+            return self.function._get_value(tags)
+        if self.value is not None:
+            return self.type_obj._get_value(self.value, tags)
+        
+        
\ No newline at end of file
diff --git a/app/toscalib/templates/value.pyc b/app/toscalib/templates/value.pyc
new file mode 100644
index 0000000..00f27d7
--- /dev/null
+++ b/app/toscalib/templates/value.pyc
Binary files differ
diff --git a/app/toscalib/tosca_builder.py b/app/toscalib/tosca_builder.py
new file mode 100644
index 0000000..e6fb28e
--- /dev/null
+++ b/app/toscalib/tosca_builder.py
@@ -0,0 +1,1071 @@
+from toscalib.templates.database import ToscaDB
+from toscalib.utils import tosca_import, tosca_export, tosca_operate
+from toscalib.types.node import NodeType
+
+import copy
+import json
+import yaml
+import uuid
+import logging
+from toscalib.types.property import PropertyDefinition
+from toscalib.types.capability import CapabilityDefinition
+from toscalib.types.requirement import RequirementDefinition
+from toscalib.types.data import DataType, TYP_INT, TYP_STR, TYP_ANY, TYP_MAP, TYP_FLT, TYP_LIST
+from toscalib.templates import topology
+from toscalib.templates.topology import ToscaTopology
+from toscalib.tosca_workbook import DEFAULT_TEMPLATE_NAME
+from distutils.ccompiler import new_compiler
+from toscalib.templates.property_item import PropertyItem
+from toscalib.templates.substitution_rule import SubstitutionRule
+from toscalib.templates.constant import *
+from array import array
+
+
+
+class SpecImporter(object):
+    def __init__(self, ):
+        self.name = None
+        self.type = None
+        self.image = None
+        
+        self.streams_subscribes = []
+        self.streams_publishes = []
+        self.service_calls = []
+        self.service_provides = []
+        
+        self.parameters = []
+        self.aux_para = {}
+        self.policy_para = {}
+    
+    def _add_parameters(self, para_array, tag):
+        for entry in para_array:
+            if type(entry) is dict:
+                entry['tag'] = tag
+                self.parameters.append(entry)
+#                if entry.has_key('policy_editable') and entry['policy_editable'] is True:
+                if 'policy_editable' in entry and entry['policy_editable'] is True:
+#                    if entry.has_key('policy_group'):
+                    if 'policy_group'  in entry:
+                        policy_group = entry['policy_group']
+                    else:
+                        policy_group = 'default_group'
+#                    if self.policy_para.has_key(policy_group) is False:
+                    if policy_group not in self.policy_para:
+                        self.policy_para[policy_group] = []
+                    self.policy_para[policy_group].append(entry)
+                
+
+    def _add_string_para(self, para_name, para_value, tag):
+        entry = {}
+        entry['name'] = para_name
+        entry['value'] = para_value
+        entry['type'] = 'string'
+        entry['tag'] = tag
+        self.parameters.append(entry)                
+                
+    def _import(self, spec_name, aux_name = None):
+        with open(spec_name, 'r') as data_file:
+            data = json.load(data_file)
+            self._import_spec_str(data)
+            
+        if aux_name is None:
+            return
+        
+        with open(aux_name) as data_file:
+            data = json.load(data_file)
+            self._import_aux_str(data)
+            
+    def _import_spec_str(self, data):
+
+#        if data.has_key('self'):
+        if 'self' in data:
+            data_sec = data['self']
+#            if data_sec.has_key('name'):
+            if 'name' in data_sec:
+                self.name = data_sec['name']
+#            if data_sec.has_key('component_type'):
+            if 'component_type' in data_sec:
+                self.type = data_sec['component_type']
+        
+        for key in data.keys():
+            if key == 'self':
+                continue
+            elif key == 'streams':
+                data_sec = data[key]
+#                if data_sec.has_key('subscribes'):
+                if 'subscribes' in data_sec:
+                    self.streams_subscribes = data_sec['subscribes']
+#                if data_sec.has_key('publishes'):
+                if 'publishes' in data_sec:
+                    self.streams_publishes = data_sec['publishes']
+            elif key == 'services':
+                data_sec = data[key]
+#                if data_sec.has_key('calls'):
+                if 'calls' in data_sec:
+                    self.service_calls = data_sec['calls']
+#                if data_sec.has_key('provides'):
+                if 'provides' in data_sec:
+                    self.service_provides = data_sec['provides']
+            elif key == 'parameters':
+                if self.type == 'docker':
+                    self._add_parameters(data[key], 'docker')
+                elif self.type == 'cdap':
+                    data_sec = data[key]
+#                    if data_sec.has_key('app_config'):
+                    if 'app_config' in data_sec:
+                        self._add_parameters(data_sec['app_config'], 'app_config')
+#                    if data_sec.has_key('program_preferences'):
+                    if 'program_preferences' in data_sec:
+                        prog_pref_sec = data_sec['program_preferences']
+                        index = 0
+                        for prog_pref_entry in prog_pref_sec:
+#                            if prog_pref_entry.has_key('program_type'):
+                            if 'program_type' in prog_pref_entry:
+                                self._add_string_para('program_type', prog_pref_entry['program_type'], 'program_preferences_'+str(index))
+#                            if prog_pref_entry.has_key('program_id'):
+                            if 'program_id' in prog_pref_entry:
+                                self._add_string_para('program_id', prog_pref_entry['program_id'], 'program_preferences_'+str(index))
+#                            if prog_pref_entry.has_key('program_pref'):
+                            if 'program_pref' in prog_pref_entry:
+                                self._add_parameters(prog_pref_entry['program_pref'], 'program_preferences_'+str(index))
+#                    if data_sec.has_key('app_preferences'):
+                    if 'app_preferences'  in data_sec:
+                        self._add_parameters(data_sec['app_preferences'], 'app_preferences')
+            elif key == 'auxilary':
+                self.aux_para.update(data[key])
+            elif key == 'artifacts':
+                for item in data[key]:
+#                    if item.has_key('type') and item['type'] == 'jar':
+                    if 'type' in item and item['type'] == 'jar':
+                        self.image = item['uri']
+#                    if item.has_key('type') and item['type'] == 'docker image':
+                    if 'type' in item and item['type'] == 'docker image':
+                        self.image = item['uri']
+                    
+    def _import_aux_str(self, data):        
+        self.aux_para.update(data)
+        
+    
+
+class ToscaBuilder(object):
+    def __init__(self):
+        self.name = None
+        self.new_type_name = None
+        self.cloudify_type = None
+        self.imported_files = []
+        self.db = ToscaDB()
+        self.spec_import = SpecImporter()
+        self.image = None
+        self.service_component_type = None
+        self.imports = []
+        
+    def clear_DB(self):
+        self.db = ToscaDB()
+       
+    def import_spec(self, spec_name, aux_name = None):
+        self.spec_import._import(spec_name, aux_name)
+        self.set_image(self.spec_import.image)
+    
+    def import_spec_str(self, spec_str):
+        self.spec_import._import_spec_str(spec_str)
+        self.set_image(self.spec_import.image)
+        
+            
+    def import_schema(self, filename):
+        self.db = ToscaDB()
+        self.imported_files = []
+        self.db = tosca_import._file_import(self.imported_files, filename, self.db)
+        
+    def import_import(self, filename):
+        with open(filename) as data_file:
+            try: 
+                self.imports = yaml.load(data_file)
+            except yaml.YAMLError as exc:
+                logging.warning( 'input file can not be loaded as YAML, try JSON')
+                try:
+                    self.imports = json.load(data_file)
+                except:
+                    logging.error( 'input file can not be loaded as JSON either')
+                    exit(1)
+        if type(self.imports) is not list:
+            logging.error( 'import file must be a list')
+            exit(1)            
+        
+    def set_image(self, img):
+        self.image = img
+        
+    def set_service_component_type (self, type):
+        self.service_component_type = type
+        
+    
+    def _using_dmaap(self):
+        for stream in self.spec_import.streams_subscribes:
+#            if stream.has_key('type') and stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+            if 'type' in stream  and stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+                return True
+        for stream in self.spec_import.streams_publishes:
+#            if stream.has_key('type') and stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+            if 'type' in stream and stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+                return True
+        return False       
+    
+    def _using_policy(self):
+        if len(self.spec_import.policy_para) > 0:
+            return True
+        else:
+            return False     
+    
+    def create_node_type(self, name = None):
+        if self.spec_import.type == "docker":
+            parent_type_name = 'tosca.dcae.nodes.dockerApp'
+            if self._using_dmaap():
+                self.cloudify_type = self.db.NODE_TYPES['dcae.nodes.DockerContainerForComponentsUsingDmaap']
+            else:
+                self.cloudify_type = self.db.NODE_TYPES['dcae.nodes.DockerContainerForComponents']
+
+        elif self.spec_import.type == 'cdap':
+            parent_type_name = 'tosca.dcae.nodes.cdapApp'
+            self.cloudify_type = self.db.NODE_TYPES['dcae.nodes.MicroService.cdap']
+            
+        if name is None:
+            self.new_type_name = parent_type_name + '.'+self.spec_import.name
+        else:
+            self.new_type_name = parent_type_name + '.' + name
+            
+        new_type = NodeType(self.new_type_name, '')
+        new_type.parent_type = parent_type_name;
+        new_type.parent = self.db.NODE_TYPES[parent_type_name]
+        
+#             if new_type.parent is not None:
+#                 new_type.properties = copy.deepcopy(new_type.parent.properties)
+#                 new_type.attributes = copy.deepcopy(new_type.parent.attributes)
+#                 new_type.capabilities = copy.deepcopy(new_type.parent.capabilities)
+#                 new_type.requirements = copy.deepcopy(new_type.parent.requ)
+#             else:
+#                 new_type.properties = {}
+#                 new_type.attributes = {}
+#                 new_type.capabilities = {}
+        
+        for para in self.spec_import.parameters:
+#             new_prop = PropertyDefinition(para['tag']+'_'+para['name'])
+            new_prop = PropertyDefinition(para['name'])
+#            if para.has_key('type'):
+            if 'type' in para:
+                para_key = 'type'
+                if para[para_key] == 'integer':
+                    new_prop.type = TYP_INT
+                elif para[para_key] == 'float':
+                    new_prop.type = TYP_FLT
+#                elif para[para_key] == 'string' or para.has_key('value') is False:
+#                elif para[para_key] == 'string' or 'value'not in para:
+                else:
+                    new_prop.type = TYP_STR
+#                 else:
+#                     if type(para['value']) is list:
+#                         new_prop.type = TYP_LIST
+#                     elif type(para['value']) is dict:
+#                         new_prop.type = TYP_MAP
+#                     else:
+#                         new_prop.type = TYP_STR
+                new_prop.type_obj = DataType(new_prop.type)
+
+            new_prop.parsed = True            
+            new_prop._create_rawcontent()
+            if 'constraints' in para:
+                new_prop.raw_content[YMO_PROP_CONSTRAINT] = para['constraints']
+                
+            new_type.properties[new_prop.name] = new_prop
+            
+        stream_subscribe_http =     {'type': 'dcae.capabilities.stream.subscribe'}
+        stream_dmaap_mr_publish =   {'capability': 'dcae.capabilities.dmmap.topic', 'relationship': 'dcae.relationships.publish_events' }
+        stream_dmaap_mr_subscribe = {'capability': 'dcae.capabilities.dmmap.topic', 'relationship': 'dcae.relationships.subscribe_to_events' }
+        stream_dmaap_dr_publish =   {'capability': 'dcae.capabilities.dmmap.feed', 'relationship': 'dcae.relationships.publish_files' }
+        stream_dmaap_dr_subscribe = {'capability': 'dcae.capabilities.dmmap.feed', 'relationship': 'dcae.relationships.subscribe_to_files' }
+        stream_publish_http =       {'capability': 'dcae.capabilities.stream.subscribe', 'relationship': 'dcae.relationships.rework_connected_to' }
+        service_provide_content =   {'type': 'dcae.capabilities.service.provide'}
+        service_call_content =      {'capability': 'dcae.capabilities.service.provide', 'relationship': 'dcae.relationships.rework_connected_to' }
+        policy_req =                {'capability': 'dcae.capabilities.policy', 'relationship': 'cloudify.relationships.depends_on'}
+        
+        index = 0
+        for stream in self.spec_import.streams_subscribes:
+#            if stream.has_key('format') is False:
+            if 'format' not in stream:
+                continue
+            if stream['type'] == 'http':
+#                 if stream.has_key('config_key'):
+#                     new_cap_name = stream['config_key']
+#                 else:
+#                    new_cap_name = "stream_subscribe_"+str(index)
+                new_cap_name = "stream_subscribe_"+str(index)
+                new_cap = CapabilityDefinition(new_cap_name, copy.deepcopy(stream_subscribe_http))
+                new_cap._parse_content(self.db)
+                new_type.capabilities[new_cap.name] = new_cap
+                self.cloudify_type.capabilities[new_cap.name] = copy.deepcopy(new_cap)
+            elif stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+#                 if stream.has_key('config_key'):
+#                     new_req_name = stream['config_key']
+#                 else:
+#                     new_req_name = "stream_subscribe_"+str(index)
+                new_req_name = "stream_subscribe_"+str(index)
+                tmp_content={}
+                if stream['type'] in ['message router', 'message_router'] :
+                    tmp_content[new_req_name] = copy.deepcopy(stream_dmaap_mr_subscribe)
+                else:
+                    tmp_content[new_req_name] = copy.deepcopy(stream_dmaap_dr_subscribe)
+                new_req = RequirementDefinition(tmp_content)
+                new_req._parse_content(self.db)
+                new_type.requirements.append(new_req)
+                self.cloudify_type.requirements.append(copy.deepcopy(new_req))
+            else:
+                continue
+
+
+#             new_prop = PropertyDefinition(new_cap_name + '_route')
+#             new_prop.type = TYP_STR
+#             new_prop.type_obj = DataType(new_prop.type)
+#             new_type.properties[new_prop.name] = new_prop
+            index += 1
+        
+        index = 0
+        for service in self.spec_import.service_provides:
+#            if service.has_key('config_key'):
+            if 'config_key' in service:
+                new_cap_name = service['config_key']
+            else:   
+                new_cap_name = "service_provide_"+str(index)
+            new_cap = CapabilityDefinition(new_cap_name, copy.deepcopy(service_provide_content))
+            new_cap._parse_content(self.db)
+            new_type.capabilities[new_cap.name] = new_cap
+            self.cloudify_type.capabilities[new_cap.name] = copy.deepcopy(new_cap)
+            
+#             new_prop = PropertyDefinition(new_cap_name + '_service_name')
+#             new_prop.type = TYP_STR
+#             new_prop.type_obj = DataType(new_prop.type)
+#             new_type.properties[new_prop.name] = new_prop
+#             new_prop = PropertyDefinition(new_cap_name + '_service_endpoint')
+#             new_prop.type = TYP_STR
+#             new_prop.type_obj = DataType(new_prop.type)
+#             new_type.properties[new_prop.name] = new_prop
+#             new_prop = PropertyDefinition(new_cap_name + '_verb')
+#             new_prop.type = TYP_STR
+#             new_prop.type_obj = DataType(new_prop.type)
+#             new_type.properties[new_prop.name] = new_prop
+            index += 1
+            
+        index = 0
+        for stream in self.spec_import.streams_publishes:
+#            if stream.has_key('format') is False:
+            if 'format' not in stream:
+                continue
+            if stream['type'] == 'http':
+#                 if stream.has_key('config_key'):
+#                     new_req_name = stream['config_key']
+#                 else:
+#                     new_req_name = "stream_publish_"+str(index)
+                new_req_name = "stream_publish_"+str(index)
+                tmp_content={}
+                tmp_content[new_req_name] = copy.deepcopy(stream_publish_http)
+                new_req = RequirementDefinition(tmp_content)
+                new_req._parse_content(self.db)
+                new_type.requirements.append(new_req)
+                self.cloudify_type.requirements.append(copy.deepcopy(new_req))
+            elif stream['type'] in ['message router', 'message_router', 'data router', 'data_router'] :
+#                 if stream.has_key('config_key'):
+#                     new_req_name = stream['config_key']
+#                 else:
+#                     new_req_name = "stream_publish_"+str(index)
+                new_req_name = "stream_publish_"+str(index)
+                tmp_content={}
+                if stream['type'] in ['message router', 'message_router'] :
+                    tmp_content[new_req_name] = copy.deepcopy(stream_dmaap_mr_publish)
+                else:
+                    tmp_content[new_req_name] = copy.deepcopy(stream_dmaap_dr_publish)
+                new_req = RequirementDefinition(tmp_content)
+                new_req._parse_content(self.db)
+                new_type.requirements.append(new_req)
+                self.cloudify_type.requirements.append(copy.deepcopy(new_req))
+            else:
+                continue
+            
+#             new_prop = PropertyDefinition(new_req_name + '_key')
+#             new_prop.type = TYP_STR
+#             new_prop.type_obj = DataType(new_prop.type)
+#             new_type.properties[new_prop.name] = new_prop
+            index += 1
+            
+        index = 0
+        for service in self.spec_import.service_calls:
+#            if service.has_key('config_key'):
+            if 'config_key' in service:
+                new_req_name = service['config_key']
+            else:   
+                new_req_name = "service_call_"+str(index)
+            tmp_content={}
+            tmp_content[new_req_name] = copy.deepcopy(service_call_content)
+            new_req = RequirementDefinition(tmp_content)
+            new_req._parse_content(self.db)
+            new_type.requirements.append(new_req)
+            self.cloudify_type.requirements.append(copy.deepcopy(new_req))
+            index += 1
+                         
+        if self._using_policy() is True:
+#             self.create_policy(name)
+            for policy_group in self.spec_import.policy_para.keys():
+                if policy_group is 'default_group':
+                    new_req_name = "policy"
+                else:
+                    new_req_name = 'policy_' + policy_group
+                tmp_content={}
+                tmp_content[new_req_name] = copy.deepcopy(policy_req)
+                new_req = RequirementDefinition(tmp_content)
+                new_req._parse_content(self.db)
+                new_type.requirements.append(new_req)
+                self.cloudify_type.requirements.append(copy.deepcopy(new_req))
+
+        new_type.parsed = True
+        new_type._create_rawcontent()
+        self.db.NODE_TYPES[self.new_type_name] = new_type
+        self.cloudify_type._create_rawcontent()
+
+
+    def _create_property(self, entry):
+        raw_content = {}
+#        if entry.has_key('type'):
+        if 'type' in entry:
+            raw_content[YMO_PROP_TYPE] = entry['type']
+            if entry['type'] == 'number':
+                raw_content[YMO_PROP_TYPE] = TYP_INT
+        else:
+            raw_content[YMO_PROP_TYPE] = TYP_STR
+#        if entry.has_key('description') and len(entry['description']) > 0:
+        if 'description' in entry and len(entry['description']) > 0:
+            raw_content[YMO_PROP_DESCRIPTION] = entry['description']
+#         if entry.has_key('value'):
+#             raw_content[YMO_PROP_DEFAULT] = entry['value']
+#        if entry.has_key('constraints'):
+        if 'constraints' in entry:
+            raw_content[YMO_PROP_CONSTRAINT] = entry['constraints']
+#        if entry.has_key('entry_schema'):
+        if 'entry_schema' in entry:
+            raw_content[YMO_PROP_ENTRY] = entry['entry_schema']
+#        if entry.has_key('policy_schema'):
+        if 'policy_schema' in entry:
+            raw_content[YMO_PROP_ENTRY] = entry['policy_schema']
+            if raw_content[YMO_PROP_TYPE] is TYP_STR:
+                raw_content[YMO_PROP_TYPE] = TYP_MAP
+
+        return raw_content
+
+    def _create_data_type(self, name, type, para_array):
+        if len(para_array) < 1:
+            return None
+        
+        new_data_type = DataType(name)
+        new_data_type.type = type
+        new_data_type.properties = {}
+        for entry in para_array:
+            prop_name = entry['name']
+            new_data_type.properties[prop_name] = PropertyDefinition(prop_name)            
+            new_data_type.properties[prop_name].raw_content = self._create_property(entry)
+        
+        new_data_type._create_rawcontent()
+        return new_data_type
+
+    def _analyze_data_types(self, para_array, data_types):
+        if type(para_array) is not list:
+            return
+        for entry in para_array:
+#            if entry.has_key('entry_schema') is True:
+            if 'entry_schema' in entry:
+                new_data_name = 'policy.data.' + entry['name']
+                ret_para_array = self._analyze_data_types(entry['entry_schema'], data_types)
+#                if entry.has_key('type'):
+                if 'type' in entry:
+                    data_types[new_data_name]=self._create_data_type(new_data_name, entry['type'], ret_para_array)
+                else:
+                    data_types[new_data_name]=self._create_data_type(new_data_name, TYP_MAP, ret_para_array)
+                entry['entry_schema'] = {'type': new_data_name}
+#            elif entry.has_key('policy_schema') is True:
+            elif 'policy_schema' in entry:
+                new_data_name = 'policy.data.' + entry['name']
+                ret_para_array = self._analyze_data_types(entry['policy_schema'], data_types)
+#                if entry.has_key('type') and entry['type'] is not TYP_STR:
+                if 'type' in entry and entry['type'] is not TYP_STR:
+                    data_types[new_data_name]=self._create_data_type(new_data_name, entry['type'], ret_para_array)
+                else:
+                    data_types[new_data_name]=self._create_data_type(new_data_name, TYP_MAP, ret_para_array)
+                entry['policy_schema'] = {'type': new_data_name}
+            else:
+                continue
+            
+        return para_array
+
+    def create_policy(self):
+        parent_type_name = 'policy.nodes.Root'
+        name = self.spec_import.name
+            
+        for policy_group in self.spec_import.policy_para.keys():
+            if policy_group is 'default_group':
+                new_type_name = 'policy.nodes.'+name
+            else:
+                new_type_name = 'policy.nodes.'+policy_group
+                
+            new_type = NodeType(new_type_name, '')
+            new_type.parent_type = parent_type_name;
+            new_type.parent = self.db.NODE_TYPES[parent_type_name]
+            
+            self._analyze_data_types(self.spec_import.policy_para[policy_group], self.db.DATA_TYPES)
+            
+            for entry in self.spec_import.policy_para[policy_group]:
+                new_prop = PropertyDefinition(entry['name'])
+                new_prop.raw_content = self._create_property(entry)
+                new_prop.parsed = True            
+                new_type.properties[new_prop.name] = new_prop
+    
+            new_type.parsed = True
+            new_type._create_rawcontent()
+            self.db.NODE_TYPES[new_type_name] = new_type
+
+    def create_model(self, name):
+        self.template = ToscaTopology(DEFAULT_TEMPLATE_NAME)
+        self.template.metadata = {'template_name': name}
+        self.template.db = self.db
+
+        node = tosca_operate._create_new_node(self.template, self.new_type_name, name)
+#         self._assign_property_value(node, 'image', self.image)
+#        self._assign_property_value(node, 'service_component_type', self.spec_import.service_component_type)
+        
+        topic_index = 0;
+        
+        for prop_name in ['location_id']:
+            fuc_val_list = ['SELF', 'composition', prop_name]
+            fuc_val = {}
+            fuc_val['get_property'] = fuc_val_list
+            self._assign_property_value(node, prop_name, fuc_val)
+            
+        for para in self.spec_import.parameters:
+#            prop_item = node._get_property_item(para['tag']+'_'+para['name'])
+            prop_item = node._get_property_item(para['name'])
+            def_item = copy.deepcopy(prop_item.definition)
+#             input_name = node.name + '_' + def_item.name
+#             def_item.name = input_name
+#            if para.has_key('value'):
+            if 'value' in para:
+#                 def_item.default = para['value']
+                prop_item._assign(para['value'])
+#            if para.has_key('sourced_at_deployment') and para['sourced_at_deployment'] is True:
+            if 'sourced_at_deployment' in para and para['sourced_at_deployment'] is True:
+                input_name = prop_item.name
+                def_item = copy.deepcopy(prop_item.definition)
+                def_item.name = input_name
+#                if para.has_key('value'):
+                if 'value' in para:
+                    def_item.default = para['value']
+                self.template.aux_inputs[input_name] = PropertyItem(def_item)
+                fun_item = {}
+                fun_item['get_input'] = input_name
+                prop_item._assign(fun_item)
+#            if para.has_key('dependency'):
+            if 'dependency' in para:
+                fun_item = {}
+                fun_item['get_property'] = ['SELF', para['dependency']]
+                prop_item._assign(fun_item)
+            
+#             self.template.aux_inputs[input_name] = PropertyItem(def_item)
+#             fun_item = {}
+#             fun_item['get_input'] = input_name
+#             prop_item._assign(fun_item)
+        
+        if 'connected_broker_dns_name' in node.properties:
+            prop_item = node._get_property_item('connected_broker_dns_name')
+            if prop_item is not None: 
+                input_name = prop_item.name
+                def_item = copy.deepcopy(prop_item.definition)
+                def_item.name = input_name
+                self.template.aux_inputs[input_name] = PropertyItem(def_item)
+                fun_item = {}
+                fun_item['get_input'] = input_name
+                prop_item._assign(fun_item)
+            
+        index = 0
+        for stream in self.spec_import.streams_subscribes:
+#            if stream.has_key('format') is False:
+            if 'format' in stream is False:
+                continue
+            if stream['type'] == 'http':
+                new_cap_name = "stream_subscribe_"+str(index)
+#                if stream.has_key('format'):
+                if 'format' in stream:
+                    new_cap = node._get_capability_property(new_cap_name, 'format')
+                    new_cap._assign(stream['format'])                    
+#                if stream.has_key('version'):
+                if 'version' in stream:
+                    new_cap = node._get_capability_property(new_cap_name, 'version')
+                    new_cap._assign(stream['version'])
+#                if stream.has_key('route'):
+                if 'route' in stream:
+                    new_cap = node._get_capability_property(new_cap_name, 'route')
+                    new_cap._assign(stream['route'])
+#                         new_prop = node._get_property_item(new_cap_name+'_route')
+#                         new_prop._assign(stream['route'])
+            elif stream['type'] in ['message router', 'message_router', 'data router', 'data_router']:
+                new_req_name = "stream_subscribe_"+str(index)
+                new_req = node._get_requirement_item_first(new_req_name)
+                if stream['type'] in ['message router', 'message_router'] :
+                    new_topic_name = 'topic'+ str(topic_index)
+                    topic_index += 1
+                    new_topic_node = tosca_operate._create_new_node(self.template, 'tosca.dcae.nodes.dmaap.topic', new_topic_name)
+                else:
+                    new_topic_name = 'feed'+ str(topic_index)
+                    topic_index += 1
+                    new_topic_node = tosca_operate._create_new_node(self.template, 'tosca.dcae.nodes.dmaap.feed', new_topic_name)
+                new_req._assign(new_topic_node)
+                for prop_item in iter(new_topic_node.properties.values()):
+                    if prop_item.name == 'topic_name':
+#                        if stream.has_key('config_key'):
+#                            prop_item._assign(stream['config_key']+'-'+str(uuid.uuid4()))
+#                            prop_item._assign(stream['config_key'])
+#                        else:
+                            prop_item._assign('')
+                    elif prop_item.name == 'feed_name':
+#                        if stream.has_key('config_key'):
+#                            prop_item._assign(stream['config_key']+'-'+str(uuid.uuid4()))
+#                            prop_item._assign(stream['config_key'])
+#                        else:
+                            prop_item._assign("")                        
+                    elif prop_item.name == 'node_name':
+                        prop_item._assign('__GET_NODE_NAME__')
+                    elif prop_item.name == 'location':
+                        fun_item = {}
+                        fun_item['get_property'] = ['SELF', 'composition', 'location_id']
+                        prop_item._assign(fun_item)
+                    elif prop_item.required == True:
+                        input_name = new_topic_name + '_' + prop_item.name
+                        def_item = copy.deepcopy(prop_item.definition)
+                        def_item.name = input_name
+                        self.template.aux_inputs[input_name] = PropertyItem(def_item)
+                        fun_item = {}
+                        fun_item['get_input'] = input_name
+                        prop_item._assign(fun_item)
+                if stream['type'] in ['message router', 'message_router'] :
+                    for cap_prop_item in iter(new_topic_node._get_capability_item('topic').properties.values()):
+                        cap_prop_item._assign({'get_property': ['SELF', cap_prop_item.name]})
+                else:
+                    for cap_prop_item in iter(new_topic_node._get_capability_item('feed').properties.values()):
+                        cap_prop_item._assign({'get_property': ['SELF', cap_prop_item.name]})
+                       
+            index += 1
+
+        index = 0
+        for service in self.spec_import.service_provides:
+            new_cap_name = "service_provide_"+str(index)
+#            if service.has_key('request'):
+            if 'request' in service:
+                service_item = service['request']
+#                if service_item.has_key('format'):
+                if 'format' in service_item:
+                    new_cap = node._get_capability_property(new_cap_name, 'request_format')
+                    new_cap._assign(service_item['format'])                    
+#                if service_item.has_key('version'):
+                if 'version' in service_item:
+                    new_cap = node._get_capability_property(new_cap_name, 'request_version')
+                    new_cap._assign(service_item['version'])
+#            if service.has_key('response'):
+            if 'response' in service:
+                service_item = service['response']
+#                if service_item.has_key('format'):
+                if 'format' in service_item:
+                    new_cap = node._get_capability_property(new_cap_name, 'response_format')
+                    new_cap._assign(service_item['format'])                    
+#                if service_item.has_key('version'):
+                if 'version' in service_item:
+                    new_cap = node._get_capability_property(new_cap_name, 'response_version')
+                    new_cap._assign(service_item['version'])                
+#            if service.has_key('service_name'):
+            if 'service_name' in service:
+                new_cap = node._get_capability_property(new_cap_name, 'service_name')
+                new_cap._assign(service['service_name'])                    
+#                     new_prop = node._get_property_item(new_cap_name+'_service_name')
+#                     new_prop._assign(service['service_name'])
+#            if service.has_key('service_endpoint'):
+            if 'service_endpoint' in service:
+                new_cap = node._get_capability_property(new_cap_name, 'service_endpoint')
+                new_cap._assign(service['service_endpoint'])                    
+#                     new_prop = node._get_property_item(new_cap_name+'_service_endpoint')
+#                     new_prop._assign(service['service_endpoint'])
+#            if service.has_key('verb'):
+            if 'verb' in service:
+                new_cap = node._get_capability_property(new_cap_name, 'verb')
+                new_cap._assign(service['verb'])                    
+#                     new_prop = node._get_property_item(new_cap_name+'_verb')
+#                     new_prop._assign(service['verb'])
+            index += 1
+
+        
+        index = 0
+        for stream in self.spec_import.streams_publishes:
+#            if stream.has_key('format') is False:
+            if 'format' not in stream:
+                continue
+            if stream['type'] == 'http':
+                new_req_name = "stream_publish_"+str(index)
+                new_req = node._get_requirement_item_first(new_req_name)
+                items = []
+#                if stream.has_key('format'):
+                if 'format' in stream:
+                    items.append({'format':[{'equal': stream['format']}]})
+#                if stream.has_key('version'):
+                if 'version' in stream:
+                    items.append({'version':[{'equal': stream['version']}]})
+                new_req.filter = {'capabilities': [{'dcae.capabilities.stream.subscribe': {'properties': items}}]}
+            elif stream['type'] in ['message router', 'message_router', 'data router', 'data_router']:
+                new_req_name = "stream_publish_"+str(index)
+                new_req = node._get_requirement_item_first(new_req_name)
+                if stream['type'] in ['message router', 'message_router'] :
+                    new_topic_name = 'topic'+ str(topic_index)
+                    topic_index += 1
+                    new_topic_node = tosca_operate._create_new_node(self.template, 'tosca.dcae.nodes.dmaap.topic', new_topic_name)
+                else:
+                    new_topic_name = 'feed'+ str(topic_index)
+                    topic_index += 1
+                    new_topic_node = tosca_operate._create_new_node(self.template, 'tosca.dcae.nodes.dmaap.feed', new_topic_name)
+                new_req._assign(new_topic_node)
+                for prop_item in iter(new_topic_node.properties.values()):
+                    if prop_item.name == 'topic_name':
+#                        if stream.has_key('config_key'):
+#                            prop_item._assign(stream['config_key']+'-'+str(uuid.uuid4()))
+#                            prop_item._assign(stream['config_key'])
+#                        else:
+                            prop_item._assign("")
+                    elif prop_item.name == 'feed_name':
+#                        if stream.has_key('config_key'):
+#                            prop_item._assign(stream['config_key']+'-'+str(uuid.uuid4()))
+#                            prop_item._assign(stream['config_key'])
+#                        else:
+                            prop_item._assign("")                        
+                    elif prop_item.name == 'node_name':
+                        prop_item._assign('__GET_NODE_NAME__')
+                    elif prop_item.name == 'location':
+                        fun_item = {}
+                        fun_item['get_property'] = ['SELF', 'composition', 'location_id']
+                        prop_item._assign(fun_item)
+                    else:
+                        input_name = new_topic_name + '_' + prop_item.name
+                        def_item = copy.deepcopy(prop_item.definition)
+                        def_item.name = input_name
+                        self.template.aux_inputs[input_name] = PropertyItem(def_item)
+                        fun_item = {}
+                        fun_item['get_input'] = input_name
+                        prop_item._assign(fun_item)
+                if stream['type'] in ['message router', 'message_router'] :
+                    for cap_prop_item in iter(new_topic_node._get_capability_item('topic').properties.values()):
+                        cap_prop_item._assign({'get_property': ['SELF', cap_prop_item.name]})
+                else:
+                    for cap_prop_item in iter(new_topic_node._get_capability_item('feed').properties.values()):
+                        cap_prop_item._assign({'get_property': ['SELF', cap_prop_item.name]})
+
+            index += 1            
+        
+        if self._using_policy():
+            index = 0
+            for policy_group in self.spec_import.policy_para.keys():
+                if policy_group is 'default_group':
+                    req_name = 'policy'
+                    policy_type_name = 'policy.nodes.' + self.spec_import.name
+                else:
+                    req_name = 'policy_'+policy_group
+                    policy_type_name = 'policy.nodes.' + policy_group
+    
+                new_req = node._get_requirement_item_first(req_name)
+                policy_node_name = 'policy_' + str(index)
+                index += 1
+                new_policy_node = tosca_operate._create_new_node(self.template, 'tosca.dcae.nodes.policy', policy_node_name)
+                policy_name_item = new_policy_node._get_property_item('policy_name')
+                policy_name_item._assign(policy_type_name)
+                new_req._assign(new_policy_node)  
+    
+    
+    def create_translate(self, name):
+        self.template = ToscaTopology(DEFAULT_TEMPLATE_NAME)
+        self.template.metadata = {'template_name': name+"_translate"}
+        self.template.db = self.db
+        index = 0
+        for item in self.imports:
+            self.template.extra_imports.append({str(index): item})
+            index += 1
+        
+        if self.new_type_name not in self.db.NODE_TYPES:
+            logging.warning( 'error: new node type is not in db: ' + self.new_type_name)
+            return
+        
+        for input_def in iter(self.db.NODE_TYPES[self.new_type_name].properties.values()):
+            self.template.inputs[input_def.name] = PropertyItem(input_def)
+        
+        self.template.sub_type = self.new_type_name
+            
+        for cap_name in self.db.NODE_TYPES[self.new_type_name].capabilities.keys():
+            self.template.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, cap_name, None, [name, cap_name]))
+            
+        for req_item in self.db.NODE_TYPES[self.new_type_name].requirements:
+            if req_item.name == 'host':
+                self.template.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, req_item.name, None, [name, 'host']))
+            elif req_item.name == 'composition':
+                continue
+            else:
+                self.template.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, req_item.name, None, [name, req_item.name]))
+                                                            
+        if self.cloudify_type is None:
+            logging.warning( 'cloudify_type should not be None!')
+            return 
+        
+        node = tosca_operate._create_new_node(self.template, self.cloudify_type.name, name)
+
+        for prop_name in node.properties.keys():
+            if prop_name == 'application_config':
+                fuc_val = {}
+                for entry in self.spec_import.parameters:
+                    if entry['tag'] == 'docker':
+                        tmp_fun = {}
+#                        tmp_fun['get_input'] = entry['tag']+'_' +entry['name']
+                        tmp_fun['get_input'] = entry['name']
+                        fuc_val[entry['name']] = tmp_fun
+
+                fuc_list = {}
+                index = 0
+                for stream in self.spec_import.streams_publishes:
+                    fuc_unit = {}
+                    req_name = "stream_publish_"+str(index)
+                    index += 1            
+                    if stream['type'] in ['message router', 'message_router']:
+                        fuc_unit['aaf_password'] = {'get_property': ['SELF', req_name, 'aaf_password' ]}
+                        fuc_unit['aaf_username'] = {'get_property': ['SELF', req_name, 'aaf_username' ]}
+                        fuc_unit['dmaap_info'] = {'concat': ['<<', {'get_property': ['SELF', req_name, 'node_name']}, '>>' ]}
+                        fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    elif stream['type'] in ['data router', 'data_router']:
+                        fuc_unit['dmaap_info'] = {'concat': ['<<', {'get_property': ['SELF', req_name, 'node_name']}, '>>' ]}
+                        fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    else:
+                        fuc_unit = {'concat': ['{{', {'get_property': ['SELF', req_name, 'node_name']}, '}}' ]}
+                    fuc_list.update({stream['config_key']: fuc_unit})
+                fuc_val['streams_publishes'] = fuc_list
+
+                fuc_list = {}
+                index = 0
+                for stream in self.spec_import.streams_subscribes:
+                    fuc_unit = {}
+                    req_name = "stream_subscribe_"+str(index)
+                    index += 1            
+                    if stream['type']  in ['message router', 'message_router']:
+                        fuc_unit['aaf_password'] = {'get_property': ['SELF', req_name, 'aaf_password' ]}
+                        fuc_unit['aaf_username'] = {'get_property': ['SELF', req_name, 'aaf_username' ]}
+                        fuc_unit['dmaap_info'] = {'concat': ['<<', {'get_property': ['SELF', req_name, 'node_name']}, '>>' ]}
+                        fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    elif stream['type'] in ['data router', 'data_router']:
+                        fuc_unit['dmaap_info'] = {'concat': ['<<', {'get_property': ['SELF', req_name, 'node_name']}, '>>' ]}
+                        fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    else:
+                        continue
+                    fuc_list.update({stream['config_key']: fuc_unit})
+                fuc_val['streams_subscribes'] = fuc_list
+                
+                fuc_list = {}
+                index = 0
+                for service in self.spec_import.service_calls:
+#                    if service.has_key('config_key'):
+                    if 'config_key' in service:
+                        req_name = service['config_key']
+                    else:
+                        req_name = 'service_call_' + str(index)
+                    index += 1
+#                    if service['type'] == 'http':
+                    fuc_unit = {'concat': ['{{', {'get_property': ['SELF', req_name, 'node_name']}, '}}' ]}
+#                    if service.has_key('config_key') is False:
+                    if 'config_key' not in service:
+                        logging.warning( 'service call section must have config_key!')
+                        continue
+                    fuc_list.update({service['config_key']: fuc_unit})
+                fuc_val['services_calls'] = fuc_list
+
+            elif prop_name in ['app_config', 'app_preferences']:
+                fuc_val = {}
+                for entry in self.spec_import.parameters:
+                    if entry['tag'] == prop_name:
+                        tmp_fun = {}
+#                         tmp_fun['get_input'] = entry['tag']+'_' +entry['name']
+                        tmp_fun['get_input'] = entry['name']
+                        fuc_val[entry['name']] = tmp_fun
+            elif prop_name == 'program_preferences':
+                fuc_val = []
+                last_tag = None
+                for entry in self.spec_import.parameters:
+                    if entry['tag'].startswith(prop_name):
+                        if entry['tag'] != last_tag:
+                            fuc_entry = {}
+                            fuc_unit = {}
+                            fuc_entry['program_pref'] = fuc_unit
+                            last_tag = entry['tag']
+                            fuc_val.append(fuc_entry)
+                        if entry['name'] in ['program_type', 'program_id']:
+                            tmp_fun = {}
+#                            tmp_fun['get_input'] = entry['tag']+'_' +entry['name']
+                            tmp_fun['get_input'] = entry['name']
+                            fuc_entry[entry['name']] = tmp_fun
+                        else:
+                            tmp_fun = {}
+#                            tmp_fun['get_input'] = entry['tag']+'_' +entry['name']
+                            tmp_fun['get_input'] = entry['name']
+                            fuc_unit[entry['name']] = tmp_fun
+            elif prop_name == 'service_endpoints':
+                fuc_val = []
+                index = 0
+                for service in self.spec_import.service_provides:
+                    fuc_entry={}
+                    cap_prefix = 'service_' + str(index)
+#@                    if service.has_key('service_name'):
+                    if 'service_name' in service:
+                        tmp_fun = {'get_input':cap_prefix + '_service_name'}
+                    else:
+                        tmp_fun = {}
+                    fuc_entry['service_name'] = tmp_fun
+#                    if service.has_key('service_endpoint'):
+                    if 'service_endpoint' in service:
+                        tmp_fun = {'get_input':cap_prefix + '_service_endpoint'}
+                    else:
+                        tmp_fun = {}
+                    fuc_entry['service_endpoint'] = tmp_fun
+#                    if service.has_key('verb'):
+                    if 'verb' in service:
+                        tmp_fun = {'get_input':cap_prefix + '_verb'}
+                    else:
+                        tmp_fun = {}
+                    fuc_entry['endpoint_method'] = tmp_fun
+                    fuc_val.append(fuc_entry)
+                    index += 1
+                    
+            elif prop_name == 'docker_config':
+                fuc_val = {}
+                for key in self.spec_import.aux_para.keys():
+                    fuc_val[key] = self.spec_import.aux_para[key]
+                    
+            elif prop_name == 'connections':
+                fuc_val = {}
+                fuc_entry = []
+                index = 0
+                for stream in self.spec_import.streams_publishes:
+                    if stream['type'] not in ['message router', 'message_router', 'data router', 'data_router']:
+                        continue
+                    fuc_unit = {}
+                    req_name = "stream_publish_"+str(index)
+                    fuc_unit['name'] = {'get_property': ['SELF', req_name, 'node_name' ]}
+#                    if stream.has_key('config_key'):
+                    if 'config_key' in stream:
+                        fuc_unit['config_key'] = stream['config_key']
+                    if stream['type'] in ['message router', 'message_router']:
+                        fuc_unit['client_role'] = {'get_property': ['SELF', req_name, 'client_role' ]}
+                        fuc_unit['aaf_username'] = {'get_property': ['SELF', req_name, 'aaf_username' ]}
+                        fuc_unit['aaf_password'] = {'get_property': ['SELF', req_name, 'aaf_password' ]}                        
+                    fuc_unit['location'] = {'get_property': ['SELF', req_name, 'location' ]}
+                    fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    fuc_entry.append(fuc_unit)
+                    index += 1
+                fuc_val['streams_publishes'] = fuc_entry
+                fuc_entry = []
+                index = 0
+                for stream in self.spec_import.streams_subscribes:
+                    if stream['type'] not in ['message router', 'message_router', 'data router', 'data_router']:
+                        continue
+                    fuc_unit = {}
+                    req_name = "stream_subscribe_"+str(index)
+                    fuc_unit['name'] = {'get_property': ['SELF', req_name, 'node_name' ]}
+#                    if stream.has_key('config_key'):
+                    if 'config_key' in stream:
+                        fuc_unit['config_key'] = stream['config_key']
+                    if stream['type'] in ['message router', 'message_router']:
+                        fuc_unit['client_role'] = {'get_property': ['SELF', req_name, 'client_role' ]}
+                        fuc_unit['aaf_username'] = {'get_property': ['SELF', req_name, 'aaf_username' ]}
+                        fuc_unit['aaf_password'] = {'get_property': ['SELF', req_name, 'aaf_password' ]}                        
+                    fuc_unit['location'] = {'get_property': ['SELF', req_name, 'location' ]}
+                    fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    fuc_entry.append(fuc_unit)
+                    index += 1
+                fuc_val['streams_subscribes'] = fuc_entry
+            
+            elif prop_name == 'streams_publishes':
+                fuc_val = []
+                index = 0
+                
+                for stream in self.spec_import.streams_publishes:
+                    if stream['type'] not in ['message router', 'message_router', 'data router', 'data_router']:
+                        continue
+                    fuc_unit = {}
+                    req_name = "stream_publish_"+str(index)
+                    fuc_unit['name'] = {'get_property': ['SELF', req_name, 'node_name' ]}
+                    if stream['type'] in ['message router', 'message_router']:
+                        fuc_unit['client_role'] = {'get_property': ['SELF', req_name, 'client_role' ]}
+                    fuc_unit['location'] = {'get_property': ['SELF', req_name, 'location' ]}
+                    fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    fuc_val.append(fuc_unit)
+                    index += 1
+
+            elif prop_name == 'streams_subscribes':
+                fuc_val = []
+                index = 0
+                
+                for stream in self.spec_import.streams_subscribes:
+                    if stream['type'] not in ['message router', 'message_router', 'data router', 'data_router']:
+                        continue
+                    fuc_unit = {}
+                    req_name = "stream_subscribe_"+str(index)
+                    fuc_unit['name'] = {'get_property': ['SELF', req_name, 'node_name' ]}
+                    fuc_unit['location'] = {'get_property': ['SELF', req_name, 'location' ]}
+                    if stream['type'] in ['message router', 'message_router']:
+                        fuc_unit['client_role'] = {'get_property': ['SELF', req_name, 'client_role' ]}
+                    fuc_unit['type'] = stream['type'].replace(' ', '_')
+                    fuc_val.append(fuc_unit)
+                    index += 1
+
+                
+            elif self.spec_import.aux_para is not None and prop_name in self.spec_import.aux_para.keys():
+                fuc_val = self.spec_import.aux_para[prop_name]
+            elif prop_name == 'service_component_type':
+                if self.service_component_type is not None:
+                    fuc_val = self.service_component_type
+                elif self.spec_import.type == 'docker':
+                    fuc_val = self.spec_import.name
+                else:
+                    fuc_val = 'cdap_app_' + name
+            elif prop_name in ['image', 'jar_url']:
+                fuc_val = self.image
+            else:
+                fuc_val = {}
+                fuc_val['get_input'] = prop_name
+                                
+            self._assign_property_value(node, prop_name, fuc_val)
+        
+        if 'cdap' in self.cloudify_type.name:
+            interface_item = node._get_interface_item('cloudify.interfaces.lifecycle')
+            op_item = interface_item.operations['create']
+            input_item = op_item.inputs['connected_broker_dns_name']
+            input_item._assign({'get_input': 'connected_broker_dns_name'})
+        
+            
+    def _assign_property_value(self, node, property_name, value):
+#        if node.properties.has_key(property_name) is False:
+        if property_name not in node.properties:
+            logging.warning( 'No property with name '+ property_name+ ' in the node '+ node.name)
+            return False
+        return node.properties[property_name]._assign(value)
+
+    def export_policy(self, filename):
+        return tosca_export._yaml_export(filename, self.db._prepare_schema())
+        
+    def export_schema(self, filename):
+        return tosca_export._yaml_export(filename, self.db._prepare_schema())
+            
+    def export_model(self, filename):
+        return tosca_export._yaml_export(filename, self.template._prepare_output('main,import_schema'))
+    
+    def export_translation(self, filename):
+        return tosca_export._yaml_export(filename, self.template._prepare_output('main,import_schema,w_sub'))
+
+        
diff --git a/app/toscalib/tosca_builder.pyc b/app/toscalib/tosca_builder.pyc
new file mode 100644
index 0000000..082f41c
--- /dev/null
+++ b/app/toscalib/tosca_builder.pyc
Binary files differ
diff --git a/app/toscalib/tosca_workbook.py b/app/toscalib/tosca_workbook.py
new file mode 100644
index 0000000..0fce630
--- /dev/null
+++ b/app/toscalib/tosca_workbook.py
@@ -0,0 +1,202 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.database import ToscaDB
+from toscalib.utils import tosca_operate, tosca_import, tosca_print, tosca_export
+from toscalib.utils.tosca_import import import_context
+from toscalib.templates.topology import ToscaTopology
+from toscalib.utils.tosca_operate import _assign_property_value,\
+    _assign_requirement_value, _assign_capability_property_value
+import copy, logging
+
+DEFAULT_TEMPLATE_NAME='default'
+
+class ToscaWorkBook(object):
+    def __init__(self):
+        self.db = ToscaDB()
+        self.tran_db = ToscaDB()
+        self.template = ToscaTopology(DEFAULT_TEMPLATE_NAME)
+        self.template.db = self.db
+        self.imported_files = []
+        
+    def _reset(self):
+        self.template = ToscaTopology(DEFAULT_TEMPLATE_NAME)
+        self.template.db = self.db
+
+        
+    def _import(self, filename):
+        self.db = tosca_import._file_import(self.imported_files, filename, self.db)
+
+    def _import_dir(self, dirname):
+        self.db = tosca_import._dir_import(self.imported_files, dirname, self.db)
+        
+    def _import_yml_str(self, content):
+        self.db = tosca_import._yaml_str_import(content, self.db)
+        
+    def _load_translation_db(self, dir_name, prefix=''):
+        self.tran_db = tosca_import._dir_import([], dir_name, self.tran_db)
+    
+    def _load_translation_library(self):
+        if self.tran_db is None or len(self.tran_db.TEMPLATES) < 1:
+            return
+        
+        for node_item in iter(self.template.node_dict.values()):  
+            node_item.tran_template = None  
+            for tran_temp in iter(self.tran_db.TEMPLATES.values()):
+                if hasattr(tran_temp,'sub_type') and tran_temp.sub_type == node_item.type:
+                    node_item.tran_template = copy.deepcopy(tran_temp)
+                    node_item.tran_template._update_used_tag_for_translation()
+                    node_item.tran_template._verify_substitution(node_item)
+                    node_item.tran_template._update_prefix(node_item.name + '_')
+                    break
+    
+    def _use(self, type_name, node_name=None):
+        if type_name in self.db.NODE_TYPES.keys():
+            return tosca_operate._create_new_node(self.template, type_name, node_name)
+        elif type_name in self.db.TEMPLATES.keys():
+            return tosca_operate._create_new_template(self.template, type_name, node_name)
+        else:
+            logging.warning('Name: ' + type_name + ' is neither a type or a template. ')
+            return None
+    
+    def _assign(self, node_name, sub_name, value_1, value_2 = None):
+#        if self.template.node_dict.has_key(node_name) is False:
+        if node_name not in self.template.node_dict:
+            logging.warning('Unrecognized node name: ' + node_name)
+            return
+        node = self.template.node_dict[node_name]
+        if value_2 is not None:
+#            if  node.capabilities.has_key(sub_name):
+            if  sub_name in node.capabilities:
+                node_cap = node.capabilities[sub_name]
+#                if node_cap.properties.has_key(value_1):
+                if value_1 in node_cap.properties:
+                    _assign_capability_property_value(node, sub_name, value_1, value_2)
+                else:
+                    logging.warning( 'Unrecognized tag name: ' + value_1)
+            else:
+                logging('Unrecognized tag name: ' + sub_name)
+#        elif node.properties.has_key(sub_name):
+        elif sub_name in node.properties:
+            _assign_property_value(node, sub_name, value_1)
+        else:
+            req_found = False
+            for req in node.requirements:
+                if req.name == sub_name:
+                    req_found = req
+                    break
+            if req_found is False:
+                logging.warning( 'Unrecognized tag name: ' + sub_name)
+                return
+
+#            if self.template.node_dict.has_key(value_1):
+            if value_1 in self.template.node_dict:
+                _assign_requirement_value(node, sub_name, self.template.node_dict[value_1])
+            else:
+                logging.warning( 'Incorrect node name: ' + value_1 + ', a node name is needed to fulfill requirement')
+                return
+    
+    def _show_abstract(self):
+        return tosca_print._print_template(self.template, tosca_print.LEVEL_NODE_NAME)
+        
+    def _show_details(self):
+        return tosca_print._print_template(self.template, tosca_print.LEVEL_NODE_DETAILS)
+        
+    def _show_types(self):
+        return tosca_print._print_node_types(self.db)
+        
+    def _show_type(self, type_name):
+        if type_name in self.db.NODE_TYPES.keys():
+            tosca_print._print_node_type(self.db.NODE_TYPES[type_name])
+        else:
+            logging.warning( 'Node type: '+ type_name+ ' not found!')
+      
+    def _show_templates(self):
+        tosca_print._print_templates(self.db)
+              
+    def _show_template(self, temp_name):
+        if temp_name in self.db.TEMPLATES.keys():
+            tosca_print._print_template(self.db.TEMPLATES[temp_name])
+        else:
+            logging.warning( 'Template: '+ temp_name+ ' not found')
+            
+    def _translate_template_file(self, filename):
+        ctx = import_context()
+        self.db = tosca_import._single_template_file_import(filename, self.db, ctx)
+        temp_name = ctx.temp_name
+        
+        self._reset()
+        self.tran_db = self.db
+        self._use(temp_name, 'NO_PREFIX')
+        
+    def _translate_template_yaml_str(self, content):
+        ctx = import_context()
+        self.db = tosca_import._yaml_str_import(content, self.db, ctx)
+        temp_name = ctx.temp_name
+        
+        self._reset()
+        self.tran_db = self.db
+        self._use(temp_name, 'NO_PREFIX')
+        
+    def _add_shared_node(self, rel):
+        if rel is None or type(rel) is not list:
+            return
+        
+#        node_index = 0;
+        
+        for rel_entry in rel:
+            rel_name = list(rel_entry.keys())[0]
+            new_node_name = None
+#            temp_node_base= 'node_'
+
+            while True:
+                ret_node = self._find_open_requirement(rel_name)
+                if ret_node is None:
+                    break
+                if new_node_name == None:
+#                    while True:
+#                        temp_node_name = temp_node_base + str(node_index)
+#                        if self.template.node_dict.has_key(temp_node_name):
+#                            node_index += 1
+#                            continue
+#                        else:
+#                            break
+                    if self._use(rel_entry[rel_name], 'NO_PREFIX') == None:
+                        break
+                    new_node_name = True
+                self._assign(ret_node[0], ret_node[1], rel_entry[rel_name])
+                
+    def _find_open_requirement(self, cap_type):
+        for node in iter(self.template.node_dict.values()):
+            for req_item in node.requirements:
+                if req_item.filled is True:
+                    continue
+                if req_item.req_capability == cap_type:
+                    return [node.name, req_item.name]    
+        
+        
+        
+    
+    def _export_generic(self, tags=''):        
+        self.template._update_function_pointer()
+        self._load_translation_library()
+#        self.template._auto_generate_aux_inputs()
+        self.template._propagate_substitution_value()
+        self.template._update_translation_function_pointer()
+
+        return self.template._prepare_output(tags)
+
+    def _export_yaml(self, filename, tags='main,nodetype'):
+        return tosca_export._yaml_export(filename, self._export_generic(tags))
+    
+    def _export_yaml_web(self, tags= 'main,nodetype'):
+        return tosca_export._yaml_export('WEB', self._export_generic(tags))
+        
+    def _export_heat(self, filename):
+        tags ='heat,main'
+        return tosca_export._heat_export(filename, self._export_generic(tags))
+    
+    def toJson(self):
+        return self.template.toJson()
+    
\ No newline at end of file
diff --git a/app/toscalib/tosca_workbook.pyc b/app/toscalib/tosca_workbook.pyc
new file mode 100644
index 0000000..a3ada52
--- /dev/null
+++ b/app/toscalib/tosca_workbook.pyc
Binary files differ
diff --git a/app/toscalib/types/__init__.py b/app/toscalib/types/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/toscalib/types/__init__.py
diff --git a/app/toscalib/types/__init__.pyc b/app/toscalib/types/__init__.pyc
new file mode 100644
index 0000000..810f67d
--- /dev/null
+++ b/app/toscalib/types/__init__.pyc
Binary files differ
diff --git a/app/toscalib/types/capability.py b/app/toscalib/types/capability.py
new file mode 100644
index 0000000..47d9238
--- /dev/null
+++ b/app/toscalib/types/capability.py
@@ -0,0 +1,144 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+from toscalib.types.property import PropertyDefinition
+import copy,logging
+
+
+class CapabilityDefinition:
+    def __init__(self, name, content):
+        if name is None:
+            return None
+        self.name = name
+        self.type = None
+        self.type_obj = None
+        self.valid_source = None
+        self.parsed = False
+        self.id = PropertyDefinition('id')
+        self.raw_content = content
+        
+    def _parse_content(self, db):
+        if self.parsed:
+            return 
+        
+        self.id._parse_content(db)
+        
+        content = self.raw_content
+        
+        if content is None:
+            logging.warning( 'Capability definition'+ self.name+ ' is None!')
+            self.parsed = True
+            
+#        if content.has_key(CAP_TYPE):
+        if CAP_TYPE in content:
+            self.type = content[CAP_TYPE]
+#            if db.CAPABILITY_TYPES.has_key(self.type):
+            if self.type in db.CAPABILITY_TYPES:
+                self.type_obj = db.CAPABILITY_TYPES[self.type]
+                self.type_obj._parse_content(db)
+            elif self.type != 'tosca.capabilities.Root':
+                logging.warning( 'Capability type '+ self.type+ ' not imported or defined!')
+        else:
+            logging.warning( 'Capability definition '+ self.name+ ' has no type defined!')         
+        
+#        if content.has_key(CAP_SOURCE):
+        if CAP_SOURCE in content:
+            self.valid_source = content[CAP_SOURCE]
+        elif self.type_obj is not None :
+            self.valid_source = self.type_obj.valid_source
+            
+        if self.type_obj is not None:
+            self.properties = copy.deepcopy(self.type_obj.properties)
+        else:
+            self.properties = {}
+
+#        if content.has_key(CAP_PROPERTIES):
+        if CAP_PROPERTIES in content:
+            prop_sec = content[CAP_PROPERTIES]
+            for prop_name in prop_sec.keys():
+#                if self.properties.has_key(prop_name):
+                if prop_name in self.properties:
+#                     self.properties[prop_name].update(PropertyDefinition(prop_name, prop_sec[prop_name]))
+#                 else:
+                    logging.warning( 'Property name '+ prop_name+ ' has been defined in type definition, overwritten here!')
+                    self.properties[prop_name] = PropertyDefinition(prop_name, prop_sec[prop_name])
+                    self.properties[prop_name]._parse_content(db)
+        
+        self.parsed = True
+        pass
+
+    def _validate_capability(self, cap_name):
+        if self.type_obj is not None:
+            return self.type_obj._validate_capability(cap_name)
+        else:
+            return self.type == cap_name
+
+
+class CapabilityType:
+    def __init__(self, name, content):
+        if name is None or content is None:
+            return None
+        self.name = name        
+        self.parent_type = None
+        self.parent = None
+        self.valid_source = None
+        self.parsed = False
+        self.raw_content = content
+                
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        content = self.raw_content
+        if content is None:
+            logging.warning( 'Capability type '+ self.name+ ' has None content')
+            return
+        
+#        if content.has_key(CAP_DERIVED_FROM):
+        if CAP_DERIVED_FROM in content:
+            self.parent_type = content[CAP_DERIVED_FROM]
+#            if db.CAPABILITY_TYPES.has_key(self.parent_type):
+            if self.parent_type  in db.CAPABILITY_TYPES:
+                self.parent = db.CAPABILITY_TYPES[self.parent_type]
+                self.parent._parse_content(db)
+            elif self.parent_type != 'tosca.capabilities.Root':
+                logging.warning( 'Capability type '+ self.parent_type+ ' not imported but defined!')
+        else:
+            logging.warning( 'Capability type '+ self.name+ ' has no parent type to derive from')
+            
+        if self.parent is not None:
+            self.properties = copy.deepcopy(self.parent.properties)
+        else:
+            self.properties = {}
+
+#        if content.has_key(CAP_PROPERTIES):
+        if CAP_PROPERTIES in content:
+            prop_sec = content[CAP_PROPERTIES]
+#            for prop_name in prop_sec.keys():
+            for prop_name in prop_sec.keys():
+#                if self.properties.has_key(prop_name):
+                if prop_name in self.properties:
+#                     self.properties[prop_name].update(PropertyDefinition(prop_name, prop_sec[prop_name]))
+#                 else:
+                    logging.warning( 'Property name '+ prop_name+ ' has been defined in parent type, overwritten here!')
+                self.properties[prop_name] = PropertyDefinition(prop_name, prop_sec[prop_name])
+                self.properties[prop_name]._parse_content(db)
+                    
+#        if content.has_key(CAP_SOURCE):
+        if CAP_SOURCE in content:
+            self.valid_source = content[CAP_SOURCE]
+                    
+        self.parsed = True
+        
+    def _validate_capability(self, cap_name):
+        if self.name == cap_name:
+            return True
+        if self.parent is not None:
+            return self.parent._validate_capability(cap_name)
+        else:
+            return False
+        
+
+                    
\ No newline at end of file
diff --git a/app/toscalib/types/capability.pyc b/app/toscalib/types/capability.pyc
new file mode 100644
index 0000000..f79e142
--- /dev/null
+++ b/app/toscalib/types/capability.pyc
Binary files differ
diff --git a/app/toscalib/types/constraints.py b/app/toscalib/types/constraints.py
new file mode 100644
index 0000000..ae2401d
--- /dev/null
+++ b/app/toscalib/types/constraints.py
@@ -0,0 +1,15 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+class PropertyConstraints(object):
+    def __init__(self, content):
+        self.raw_content = content
+    
+    def _parse_content(self):
+        pass
+    
+    def _validate(self, value):
+        return True
+    
+    
diff --git a/app/toscalib/types/constraints.pyc b/app/toscalib/types/constraints.pyc
new file mode 100644
index 0000000..514635a
--- /dev/null
+++ b/app/toscalib/types/constraints.pyc
Binary files differ
diff --git a/app/toscalib/types/data.py b/app/toscalib/types/data.py
new file mode 100644
index 0000000..73496e2
--- /dev/null
+++ b/app/toscalib/types/data.py
@@ -0,0 +1,282 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+import ast
+import copy, logging
+from toscalib.templates.constant import *
+from toscalib.types.constraints import PropertyConstraints
+from toscalib.types.entry_schema import EntrySchema
+
+BUILT_IN_TYPES= (TYP_BOOL, TYP_INT, TYP_FLT, TYP_STR, TYP_MAP, TYP_LIST, TYP_VER, TYP_SIZE, TYP_TIME, TYP_FREQ, TYP_ANY) = \
+                ('boolean', 'integer', 'float', 'string', 'map', 'list', 'version', 'scalar-unit.size', 'scalar-unit.time', 'scalar-unit.frequency', 'output')                                          
+
+def _is_integer(value):
+    try: 
+        int(value)
+        return True
+    except ValueError:
+        return False   
+    
+def _is_float(value):
+    try: 
+        float(value)
+        return True;
+    except ValueError:
+        return False
+
+def _is_true(value):
+    if value is True:
+        return True
+    elif value in TRUE_VALUES:
+        return True
+    else:
+        return False 
+        
+             
+class DataType:
+    def __init__(self, name, content=None):
+        self.name = name
+        self.entry = EntrySchema(TYP_ANY)
+        self.contraints = None
+        self.used = False
+        self.parent_type = None
+        if content is None:
+            self.built_in = True
+            self.type = self.name
+            self.raw_content = None
+            self.parsed = False
+        else:
+            self.built_in = False
+            self.type = None
+            self.raw_content = content
+            self.parsed = False
+        
+    @classmethod
+    def _built_in_types(cls):
+        return BUILT_IN_TYPES
+    
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        self.properties = {}
+
+        if self.raw_content is None:
+            self.parsed = True
+            self.entry._parse_content(db)        
+
+            return
+        
+#        if self.raw_content.has_key(NOD_DERIVED_FROM):
+        if NOD_DERIVED_FROM in self.raw_content:
+            self.parent_type = self.raw_content[NOD_DERIVED_FROM]
+#            if db.DATA_TYPES.has_key(self.parent_type):
+            if self.parent_type in db.DATA_TYPES:
+                self.parent = db.DATA_TYPES[self.parent_type]
+                self.parent._parse_content(db)
+                self.type = self.parent.type
+                self.properties = copy.deepcopy(self.parent.properties)
+
+            elif self.parent_type in BUILT_IN_TYPES:
+                self.type = self.parent_type
+            else:
+                self.type = None
+                logging.warning( 'Unrecognized data type: '+ self.parent_type)
+        else:
+            self.type = TYP_MAP
+            
+#        if self.raw_content.has_key(NOD_PROPERTIES):
+        if NOD_PROPERTIES in self.raw_content:
+            prop_sec = self.raw_content[NOD_PROPERTIES]
+            for prop_name in prop_sec.keys():
+                from toscalib.types.property import PropertyDefinition
+                self.properties[prop_name] = PropertyDefinition(prop_name, prop_sec[prop_name])
+                self.properties[prop_name]._parse_content(db)
+         
+#        if self.raw_content.has_key(PROP_CONSTRAINT):
+        if PROP_CONSTRAINT in self.raw_content:
+            self.constraints = PropertyConstraints(self.raw_content[PROP_CONSTRAINT])
+            self.constraints._parse_content()
+        else:
+            self.constraints = None
+        
+        self.entry._parse_content(db)        
+        
+        self.parsed = True
+        pass
+    
+    def _customozed_format_value(self, value):
+        if self.type == TYP_INT:
+            if _is_integer(value):
+                return int(value)
+            else:
+                return None
+        if self.type == TYP_FLT:
+            if _is_float(value):
+                return float(value)
+            else:
+                return None
+        if self.type == TYP_BOOL:
+            if _is_true(value):
+                return True
+            else:
+                return False
+        elif self.type == TYP_STR:
+            return value
+        elif self.type == TYP_LIST:
+            return self._parse_string_to_list(value)
+        elif self.type == TYP_MAP:
+            return self._parse_string_to_map(value, self.properties)
+               
+        else:
+        #TODO add support for version, scalar-unit
+            return value
+
+    
+    def _format_value(self, value):
+        if self.built_in is True:
+            if self.type == TYP_INT:
+                if _is_integer(value):
+                    return int(value)
+                else:
+                    return None
+            if self.type == TYP_BOOL:
+                if _is_true(value):
+                    return True
+                else:
+                    return False
+            elif self.type == TYP_STR:
+                return str(value)
+            elif self.type == TYP_LIST:
+                return self._parse_string_to_list(value)
+            elif self.type == TYP_MAP:
+                return self._parse_string_to_map(value)
+            elif self.type == TYP_ANY:
+                if type(value) is int:
+                    self.type = TYP_INT
+                    return value
+                if type(value) is bool:
+                    self.type = TYP_BOOL
+                    return value
+                if type(value) is list:
+                    self.type = TYP_LIST
+                    self.entry = EntrySchema(TYP_ANY)
+                    self.entry._parse_content(None)
+                    return self._parse_string_to_list(value)
+                if type(value) is dict:
+                    self.type = TYP_MAP
+                    self.entry = EntrySchema(TYP_ANY)
+                    self.entry._parse_content(None)
+                    return self._parse_string_to_map(value)        
+                self.type = TYP_STR
+                return str(value)
+        
+            else:
+            #TODO add support for version, scalar-unit
+                return value
+        else:
+            return self._customozed_format_value(value)  
+    
+    def _update_prefix(self, prefix, value):
+        if self.type == TYP_LIST:
+            for value_item in value:
+                value_item._update_prefix(prefix)
+        elif self.type == TYP_MAP:
+            for value_item in iter(value.values()):
+                value_item._update_prefix(prefix)
+        else:
+            return
+    
+    def _update_function_reference(self, temp, value, self_node = None, self_item= None):
+        if self.type == TYP_LIST:
+            for value_item in value:
+                value_item._update_function_reference(temp, self_node, self_item)
+        elif self.type == TYP_MAP:
+            for value_item in iter(value.values()):
+                value_item._update_function_reference(temp, self_node, self_item)
+        else:
+            return
+        
+    def _get_value(self, value, tags=''):
+        from toscalib.templates.value import VALID_VALUE, FUNCTION, NULL
+        if self.type == TYP_LIST:
+            out_str = []
+            real_value = VALID_VALUE
+            for value_item in value:
+                out_item, real_item = value_item._get_value(tags)
+                if real_item is NULL:
+                    continue
+
+                out_str.append(out_item)
+                if real_item is FUNCTION:
+                    real_value = real_item
+            return out_str, real_value
+        elif self.type == TYP_MAP:
+            out_str = {}
+            real_value = VALID_VALUE
+            for value_key in value.keys():
+                temp_out, real_item = value[value_key]._get_value(tags)
+                if real_item is NULL:
+                    continue
+                
+                out_str[value_key] = temp_out
+                if real_item is FUNCTION:
+                    real_value = real_item
+            return out_str, real_value
+        else:
+            return value, VALID_VALUE
+            
+    def _parse_string_to_list(self, value):
+        try: 
+            list_value = ast.literal_eval(str(value))
+            if type(list_value) is list:
+                out_list = []
+                for list_item in list_value:
+                    from toscalib.templates.value import Value
+                    out_list.append(Value(self.entry.type_obj, list_item))
+                return out_list
+            else:
+                logging.debug( 'List formatted string required for list type: ')
+                logging.debug('Value is 1: '+ value)
+                return None
+        except ValueError:
+            logging.error( 'List formatted string required for list type: ')
+            logging.error( 'Value is 2: '+ value)
+            return None
+     
+    def _parse_string_to_map(self, value, property_set = None):
+        try: 
+            map_value = ast.literal_eval(str(value))
+            if type(map_value) is dict:
+                out_map = {}
+                from toscalib.templates.value import Value
+
+                if property_set is None: 
+                    for key_item in map_value.keys():
+                        out_map[key_item] = Value(self.entry.type_obj, map_value[key_item])
+                    return out_map
+                else:
+                    for key_item in property_set.keys():
+                        if key_item in map_value.keys():
+                            out_map[key_item] = Value(property_set[key_item].type_obj, map_value[key_item])
+                    return out_map
+            else:
+                logging.debug( 'Map formatted string required for map type: ')
+                logging.debug( 'Value is 1: '+ value)
+                return None
+        except ValueError:
+            logging.error( 'Map formatted string required for map type: ')
+            logging.error( 'Value is 2: '+ value)
+            return None  
+        
+    def _create_rawcontent(self):
+        self.raw_content= {}
+        if self.parent_type is not None:
+            self.raw_content[YMO_NOD_DERIVED_FROM] = self.parent_type
+        prop_sec = {}
+        for prop_key in self.properties.keys():
+            if self.properties[prop_key].raw_content is  None:
+                self.properties[prop_key]._create_rawcontent()
+            prop_sec[prop_key] = self.properties[prop_key].raw_content
+        if len(prop_sec) > 0:
+            self.raw_content[YMO_NOD_PROPERTIES] = prop_sec    
diff --git a/app/toscalib/types/data.pyc b/app/toscalib/types/data.pyc
new file mode 100644
index 0000000..fd1388a
--- /dev/null
+++ b/app/toscalib/types/data.pyc
Binary files differ
diff --git a/app/toscalib/types/entry_schema.py b/app/toscalib/types/entry_schema.py
new file mode 100644
index 0000000..6553875
--- /dev/null
+++ b/app/toscalib/types/entry_schema.py
@@ -0,0 +1,68 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.templates.constant import *
+from toscalib.types.constraints import PropertyConstraints
+import logging
+
+class EntrySchema(object):
+    def __init__(self, content):
+        self.type = None
+        self.type_obj = None
+        self.constraints = None
+        self.raw_content = content
+        self.parsed = False
+    
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        if self.raw_content is None:
+            logging.warning( 'Construct None entry_schema section')
+            self.parsed = True
+            return
+        
+        if type(self.raw_content) is str:
+            self.type = self.raw_content
+            from toscalib.types.data import DataType
+            if self.type in DataType._built_in_types():
+                self.type_obj = DataType(self.type)
+#            elif db is None or db.DATA_TYPES.has_key(self.type) is False:
+            elif db is None or self.type not in db.DATA_TYPES:
+                logging.warning( 'Data type: '+ self.type+ ' not defined or imported!')
+                self.type_obj = None
+            else:
+                self.type_obj = db.DATA_TYPES[self.type]
+                self.type_obj._parse_content(db)
+            self.parsed = True
+            return
+            
+#        if self.raw_content.has_key(PROP_TYPE):
+        if PROP_TYPE in self.raw_content:
+            self.type = self.raw_content[PROP_TYPE]
+            from toscalib.types.data import DataType
+            if self.type in DataType._built_in_types():
+                self.type_obj = DataType(self.type)
+#            elif db is None or db.DATA_TYPES.has_key(self.type) is False:
+            elif db is None or self.type not in db.DATA_TYPES:
+                logging.warning( 'Data type: '+ self.type+ ' not defined or imported!')
+                self.type_obj = None
+            else:
+                self.type_obj = db.DATA_TYPES[self.type]
+                self.type_obj._parse_content(db)
+        
+#        if self.raw_content.has_key(PROP_CONSTRAINT):
+        if PROP_CONSTRAINT in self.raw_content:
+            self.constraints = PropertyConstraints(self.raw_content[PROP_CONSTRAINT])
+            self.constraints._parse_content()
+            
+        self.parsed = True
+        return
+    
+    def _format_value (self, value):
+        if self.type_obj is not None:
+            return self.type_obj._format_value(value)
+        else:
+            logging.warning( 'Invalid entry_schema type')
+            return value
+    
\ No newline at end of file
diff --git a/app/toscalib/types/entry_schema.pyc b/app/toscalib/types/entry_schema.pyc
new file mode 100644
index 0000000..b9558f4
--- /dev/null
+++ b/app/toscalib/types/entry_schema.pyc
Binary files differ
diff --git a/app/toscalib/types/interface.py b/app/toscalib/types/interface.py
new file mode 100644
index 0000000..7b1e6cf
--- /dev/null
+++ b/app/toscalib/types/interface.py
@@ -0,0 +1,37 @@
+from toscalib.types.property import PropertyDefinition
+from toscalib.types.operation import OperationDefinition
+
+
+class InterfaceDefinition(object):
+    def __init__(self, name, content = None):
+        self.name = name
+        self.raw_content = content
+        self.parsed = False
+        self.operations = {}
+        self.type = None
+        self.inputs = {}
+        
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        if self.raw_content is None:
+            self.parsed = True
+            return
+
+        for key_name in self.raw_content.keys():         
+            if key_name == 'type':
+                self.type = self.raw_content[key_name]
+                continue
+            if key_name == 'inputs':
+                input_sec = self.raw_content['inputs']
+                for input_item in input_sec.keys():
+                    self.inputs[input_item] = PropertyDefinition(input_item, input_sec[input_item])
+                    self.inputs[input_item]._parse_content(db)
+                continue
+            self.operations[key_name] = OperationDefinition(self.name, key_name, self.raw_content[key_name])
+            self.operations[key_name]._parse_content(db)
+            
+        self.parsed = True
+        return
+    
diff --git a/app/toscalib/types/interface.pyc b/app/toscalib/types/interface.pyc
new file mode 100644
index 0000000..8766be3
--- /dev/null
+++ b/app/toscalib/types/interface.pyc
Binary files differ
diff --git a/app/toscalib/types/node.py b/app/toscalib/types/node.py
new file mode 100644
index 0000000..249f713
--- /dev/null
+++ b/app/toscalib/types/node.py
@@ -0,0 +1,174 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+from toscalib.types.property import PropertyDefinition
+from toscalib.types.requirement import RequirementDefinition
+from toscalib.types.capability import CapabilityDefinition
+import copy, logging
+from toscalib.types.interface import InterfaceDefinition
+
+            
+class NodeType(object):
+    def __init__(self, name, content):
+        if name is None or content is None:
+            return None
+        self.name = name
+        self.parent_type = None
+        self.parent = None
+        self.id = PropertyDefinition('id')
+        self.mapping_template = None
+        self.raw_content = content
+        self.used = False
+        self.parsed = False
+        self.properties = {}
+        self.attributes = {}
+        self.capabilities = {}
+        self.requirements = []
+        self.interfaces = {}
+        
+#         self._parse_content(content)
+        
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        self.id._parse_content(db)
+        
+        if self.raw_content is None:
+            self.parsed = True
+            logging.warning( 'Parsing Node '+ self.name+ ', content is None')
+            return
+        
+#        if self.raw_content.has_key(NOD_DERIVED_FROM):
+        if NOD_DERIVED_FROM in self.raw_content:
+            self.parent_type = self.raw_content[NOD_DERIVED_FROM]
+#            if db.NODE_TYPES.has_key(self.parent_type):
+            if self.parent_type in db.NODE_TYPES:
+                self.parent = db.NODE_TYPES[self.parent_type]
+                self.parent._parse_content(db)
+            elif self.parent_type != 'tosca.nodes.Root':
+                logging.warning( 'Node '+ self.parent_type+ ' not imported but used!')
+        else:
+            logging.warning( 'Node type '+ self.name+ ' has no parent type to derive from')
+            
+        if self.parent is not None:
+            self.properties = copy.deepcopy(self.parent.properties)
+            self.attributes = copy.deepcopy(self.parent.attributes)
+
+#        if self.raw_content.has_key(NOD_PROPERTIES):
+        if NOD_PROPERTIES in self.raw_content:
+            prop_sec = self.raw_content[NOD_PROPERTIES]
+            for prop_name in prop_sec.keys():
+#                if self.properties.has_key(prop_name):
+                if prop_name in self.properties:
+#                     self.properties[prop_name].update(PropertyDefinition(prop_name, prop_sec[prop_name]))
+#                 else:
+                    logging.debug( 'Property name '+ prop_name+ ' has been defined in parent type, overwritten here!')
+                self.properties[prop_name] = PropertyDefinition(prop_name, prop_sec[prop_name])
+                self.properties[prop_name]._parse_content(db)
+
+#        if self.raw_content.has_key(NOD_ATTRIBUTES):
+        if NOD_ATTRIBUTES in self.raw_content:
+            attr_sec = self.raw_content[NOD_ATTRIBUTES]
+            for attr_name in attr_sec.keys():
+#                if self.attributes.has_key(attr_name):
+                if attr_name in self.attributes:
+#                     self.properties[prop_name].update(PropertyDefinition(prop_name, prop_sec[prop_name]))
+#                 else:
+                    logging.debug( 'Attribute name '+ attr_name+ ' has been defined in parent type, overwritten here!')
+                self.attributes[attr_name] = PropertyDefinition(attr_name, attr_sec[attr_name])
+                self.attributes[attr_name]._parse_content(db)
+                         
+        if self.parent is not None:
+            self.requirements = copy.deepcopy(self.parent.requirements)
+        
+#        if self.raw_content.has_key(NOD_REQUIREMENTS):
+        if NOD_REQUIREMENTS in self.raw_content:
+            req_sec = self.raw_content[NOD_REQUIREMENTS]
+            for req in req_sec:
+                req_item = RequirementDefinition(req)
+                req_item._parse_content(db)
+                if req_item.name is not None:
+                    self.requirements.append(req_item)
+         
+        if self.parent is not None:
+            self.capabilities = copy.deepcopy(self.parent.capabilities)
+        
+#        if self.raw_content.has_key(NOD_CAPABILITIES):
+        if NOD_CAPABILITIES in self.raw_content:
+            cap_sec = self.raw_content[NOD_CAPABILITIES]
+            for cap_name in cap_sec.keys():
+#                if self.capabilities.has_key(cap_name):
+                if cap_name in self.capabilities:
+                    logging.warning( 'Capability name '+ cap_name+ ' has been defined in parent type, overwritten here!')
+                self.capabilities[cap_name] = CapabilityDefinition(cap_name, cap_sec[cap_name])
+                self.capabilities[cap_name]._parse_content(db)
+     
+        self.parsed = True
+        
+        if self.parent is not None:
+            self.interfaces = copy.deepcopy(self.parent.interfaces)
+            
+#        if self.raw_content.has_key(NOD_INTERFACES):
+        if NOD_INTERFACES in self.raw_content:
+            interface_sec = self.raw_content[NOD_INTERFACES]
+            for interface_name in interface_sec.keys():
+#                if self.interfaces.has_key(interface_name):
+                if interface_name in self.interfaces:
+                    logging.warning( 'Interface name'+ interface_name+ 'has been definend in parenty type, overwritten here')
+                self.interfaces[interface_name] = InterfaceDefinition(interface_name, interface_sec[interface_name])
+                self.interfaces[interface_name]._parse_content(db)
+        
+    def _verify_req_type(self, req_type):
+        if self.name == req_type:
+            return True
+        if self.parent is not None:
+            return self.parent._verify_req_type(req_type)
+        else:
+            logging.warning( 'Type '+ self.parent_type+ ' is not imported or defined')
+            return self.parent_type == req_type
+                
+                
+    def _create_rawcontent(self):
+        self.raw_content= {}
+        if self.parent_type != None:
+            self.raw_content[YMO_NOD_DERIVED_FROM] = self.parent_type
+        else:
+            self.raw_content[YMO_NOD_DERIVED_FROM] = 'tosca.nodes.Root'
+        prop_sec = {}
+        for prop_key in self.properties.keys():
+            if self.properties[prop_key].raw_content is  None:
+                self.properties[prop_key]._create_rawcontent()
+            prop_sec[prop_key] = self.properties[prop_key].raw_content
+        if len(prop_sec) > 0:
+            self.raw_content[YMO_NOD_PROPERTIES] = prop_sec    
+        
+        attr_sec = {}
+        for prop_key in self.attributes.keys():
+            if self.attributes[prop_key].raw_content is  None:
+                self.attributes[prop_key]._create_rawcontent()
+            attr_sec[prop_key] = self.attributes[prop_key].raw_content   
+        if len(attr_sec)>0 :
+            self.raw_content[YMO_NOD_ATTRIBUTES] = attr_sec    
+            
+        cap_sec = {}
+        for cap_key in self.capabilities.keys():
+            cap_sec[cap_key] = self.capabilities[cap_key].raw_content   
+        if len(cap_sec)>0 :
+            self.raw_content[YMO_NOD_CAPABILITIES] = cap_sec
+    
+        req_sec = []
+        for req in self.requirements:
+            req_sec.append(req.raw_content)
+        if len(req_sec)>0 :
+            self.raw_content[YMO_NOD_REQUIREMENTS] = req_sec
+            
+        if len(self.interfaces) > 0:
+            int_sec = {}
+            for int_name in self.interfaces.keys():
+                int_sec[int_name] = self.interfaces[int_name].raw_content
+            self.raw_content[YMO_NOD_INTERFACES] = int_sec
+        
+            
\ No newline at end of file
diff --git a/app/toscalib/types/node.pyc b/app/toscalib/types/node.pyc
new file mode 100644
index 0000000..a6ce858
--- /dev/null
+++ b/app/toscalib/types/node.pyc
Binary files differ
diff --git a/app/toscalib/types/operation.py b/app/toscalib/types/operation.py
new file mode 100644
index 0000000..a7f38c7
--- /dev/null
+++ b/app/toscalib/types/operation.py
@@ -0,0 +1,36 @@
+from toscalib.types.property import PropertyDefinition
+
+
+class OperationDefinition(object):
+    def __init__(self, interface_name, name, content = None):
+        self.name = name
+        self.interface = interface_name
+        self.raw_content = content
+        self.parsed = False
+        self.implementation = None
+        self.inputs = {}
+        
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        if self.raw_content is None:
+            self.parsed = True
+            return
+        
+        self.parsed = True
+
+        if type(self.raw_content) is not dict:
+            return
+            
+        for key_name in self.raw_content.keys():         
+            if key_name == 'implementation':
+                self.implementation = self.raw_content[key_name]
+            if key_name == 'inputs':
+                input_sec = self.raw_content['inputs']
+                for input_item in input_sec.keys():
+                    self.inputs[input_item] = PropertyDefinition(input_item, input_sec[input_item])
+                    self.inputs[input_item]._parse_content(db)
+            
+        return
+    
diff --git a/app/toscalib/types/operation.pyc b/app/toscalib/types/operation.pyc
new file mode 100644
index 0000000..ec25aae
--- /dev/null
+++ b/app/toscalib/types/operation.pyc
Binary files differ
diff --git a/app/toscalib/types/property.py b/app/toscalib/types/property.py
new file mode 100644
index 0000000..7f794dd
--- /dev/null
+++ b/app/toscalib/types/property.py
@@ -0,0 +1,92 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+from toscalib.types.constraints import PropertyConstraints
+from toscalib.types.entry_schema import EntrySchema
+from toscalib.types.data import DataType, TYP_STR, TYP_ANY, TYP_MAP
+from toscalib.templates.constant import *
+import logging
+
+class PropertyDefinition(object):
+    def __init__(self, name, content = None):
+        self.name = name
+        self.raw_content = content
+        self.parsed = False
+        self.type = None
+        self.type_obj = None
+        
+        if content is None:
+            self.required = True
+            self.type = TYP_ANY
+            self.type_obj = DataType(self.type)
+            self.default = self.name
+            self.parsed = True
+        
+    def _parse_content(self, db):
+        
+        if self.parsed is True:
+            return
+        
+        content = self.raw_content
+        
+        if content is None:
+            self.required = True
+            self.type = TYP_ANY
+            self.type_obj = DataType(self.type)
+            self.default = self.name
+            self.parsed = True
+            return
+        
+#        if content.has_key(PROP_TYPE):
+        if PROP_TYPE in content:
+            self.type = content[PROP_TYPE]
+            if self.type in DataType._built_in_types():
+                self.type_obj = DataType(self.type)
+                self.type_obj._parse_content(db)
+#            elif db.DATA_TYPES.has_key(self.type) is False:
+            elif self.type in db.DATA_TYPES is False:
+                logging.warning( 'Data type: '+ self.type+ ' not defined or imported!')
+                self.type_obj = None
+            else:
+                self.type_obj = db.DATA_TYPES[self.type]
+                self.type_obj._parse_content(db)
+
+        self.required = True
+#        if content.has_key(PROP_REQUIRED):
+        if PROP_REQUIRED in content:
+            if content[PROP_REQUIRED] not in TRUE_VALUES:
+                self.required = False
+            
+#        if content.has_key(PROP_DEFAULT):
+        if PROP_DEFAULT in content:
+            self.default = content[PROP_DEFAULT]
+        else:
+            self.default = None
+            
+#        if content.has_key(PROP_ENTRY):
+        if PROP_ENTRY in content:
+            self.type_obj.entry = EntrySchema(content[PROP_ENTRY])
+            self.type_obj.entry._parse_content(db)
+            
+#        if content.has_key(PROP_CONSTRAINT):
+        if PROP_CONSTRAINT in content:
+            self.constraints = PropertyConstraints(content[PROP_CONSTRAINT])
+            self.constraints._parse_content()
+        else:
+            self.contraints = None
+            
+        self.parsed = True
+
+    def _create_rawcontent(self):
+        self.raw_content = {}
+        self.raw_content[YMO_PROP_TYPE] = self.type
+        if self.type == TYP_ANY:
+            self.raw_content[YMO_PROP_TYPE] = TYP_STR
+        if self.default is not None and self.default is not self.name:
+            self.raw_content[YMO_PROP_DEFAULT] = self.default
+        if self.required is False:
+            self.raw_content[YMO_PROP_REQUIRED] = self.required
+#add more about constraints        
+        
+  
+  
\ No newline at end of file
diff --git a/app/toscalib/types/property.pyc b/app/toscalib/types/property.pyc
new file mode 100644
index 0000000..73008a3
--- /dev/null
+++ b/app/toscalib/types/property.pyc
Binary files differ
diff --git a/app/toscalib/types/relationship.py b/app/toscalib/types/relationship.py
new file mode 100644
index 0000000..b2408b0
--- /dev/null
+++ b/app/toscalib/types/relationship.py
@@ -0,0 +1,13 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+class RelationshipType:
+    def __init__(self, name, content):
+        if name is None or content is None:
+            return None
+        self.name = name
+        self.raw_content = content
+        
+    def _parse_content(self, db):
+        pass
\ No newline at end of file
diff --git a/app/toscalib/types/relationship.pyc b/app/toscalib/types/relationship.pyc
new file mode 100644
index 0000000..f106cbe
--- /dev/null
+++ b/app/toscalib/types/relationship.pyc
Binary files differ
diff --git a/app/toscalib/types/requirement.py b/app/toscalib/types/requirement.py
new file mode 100644
index 0000000..9cc9f6e
--- /dev/null
+++ b/app/toscalib/types/requirement.py
@@ -0,0 +1,63 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.constant import *
+import logging
+
+class RequirementDefinition(object):
+    def __init__(self, content):
+        self.name =  None
+        self.parsed = False
+        self.raw_content = content
+        self.req_type = None
+        self.req_capability = None
+        self.relationship = None
+        self.occurrence = [1,1]
+        
+    def _parse_content(self, db):
+        if self.parsed is True:
+            return
+        
+        content = self.raw_content
+    
+        if content is None:
+            logging.warning( 'Construct None requirement section')
+            self.parsed = True
+            return
+        if len(content.keys()) != 1:
+            logging.warning( 'Requirement section does not have exact one element: ' + len(content.keys()))
+            return
+        self.name = list(content.keys())[0]
+        requirement_def = content[self.name]
+        if type(requirement_def) is not dict:
+            logging.warning( 'Cannot parse requirement definition: '+ self.name)
+            return
+        
+#        if requirement_def.has_key(REQ_NODE):
+        if REQ_NODE in requirement_def:
+            self.req_type = requirement_def[REQ_NODE]
+        else:
+            self.req_type = None
+#        if requirement_def.has_key(REQ_CAPABILITY):
+        if REQ_CAPABILITY in requirement_def:
+            self.req_capability = requirement_def[REQ_CAPABILITY]
+        else:
+            self.req_capability = None
+#        if requirement_def.has_key(REQ_RELATIONSHIP):
+        if REQ_RELATIONSHIP in requirement_def:
+            self.relationship = requirement_def[REQ_RELATIONSHIP]
+        else:
+            self.relationship = None
+#        if requirement_def.has_key(REQ_OCCURRENCE):
+        if REQ_OCCURRENCE in requirement_def:
+            self.occurrence = requirement_def[REQ_OCCURRENCE]
+            if type(self.occurrence) is not list:
+                logging.warning( 'Requirement occurrence expects a list of two numbers but provided with: '+ self.occurrence)
+        
+        self.parsed = True
+        
+        
+            
+            
+            
\ No newline at end of file
diff --git a/app/toscalib/types/requirement.pyc b/app/toscalib/types/requirement.pyc
new file mode 100644
index 0000000..6a61138
--- /dev/null
+++ b/app/toscalib/types/requirement.pyc
Binary files differ
diff --git a/app/toscalib/utils/__init__.py b/app/toscalib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/toscalib/utils/__init__.py
diff --git a/app/toscalib/utils/__init__.pyc b/app/toscalib/utils/__init__.pyc
new file mode 100644
index 0000000..b047006
--- /dev/null
+++ b/app/toscalib/utils/__init__.pyc
Binary files differ
diff --git a/app/toscalib/utils/tosca_export.py b/app/toscalib/utils/tosca_export.py
new file mode 100644
index 0000000..c4399ef
--- /dev/null
+++ b/app/toscalib/utils/tosca_export.py
@@ -0,0 +1,40 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+import yaml
+import re
+from toscalib.templates.constant import *
+
+def _yaml_export(filename, content):    
+    out_str = yaml.safe_dump(content,  default_flow_style=False, width=float("inf"))
+    out_str = re.sub(YMO_PREFIX, '', out_str)
+    if filename == 'WEB':
+        return out_str
+    with open(filename, 'w') as outfile:
+        outfile.write(out_str)
+    return out_str
+
+def _heat_export(filename, content):
+    heat_ver = YMO_PREFIX + 'heat_template_version'
+    heat_content = content[YMO_TOPOLOGY]
+    if heat_content is None:
+        heat_content = {}
+    heat_content[heat_ver] = '2013-05-23'
+#    if heat_content.has_key(YMO_TOPO_OUTPUTS):
+    if YMO_TOPO_OUTPUTS in heat_content:
+        heat_content.pop(YMO_TOPO_OUTPUTS)
+        
+    out_str = yaml.dump(heat_content,  default_flow_style=False)
+    out_str = re.sub(YMO_TOPO_INPUTS, YMO_PREFIX+'parameters', out_str)
+    out_str = re.sub(YMO_TOPO_NODE_TEMPLATES, YMO_PREFIX+'resources', out_str)
+    
+    out_str = re.sub('get_input', 'get_param', out_str)
+    out_str = re.sub('get_attribute', 'get_attr', out_str)
+    out_str = re.sub('get_id', 'get_resource', out_str)
+    out_str = re.sub('get_property', 'get_attr', out_str)
+    out_str = re.sub('type: integer', 'type: number', out_str)
+        
+    out_str = re.sub(YMO_PREFIX, '', out_str)
+    with open(filename, 'w') as outfile:
+        outfile.write(out_str)
diff --git a/app/toscalib/utils/tosca_export.pyc b/app/toscalib/utils/tosca_export.pyc
new file mode 100644
index 0000000..2552481
--- /dev/null
+++ b/app/toscalib/utils/tosca_export.pyc
Binary files differ
diff --git a/app/toscalib/utils/tosca_heat.py b/app/toscalib/utils/tosca_heat.py
new file mode 100644
index 0000000..ca5672c
--- /dev/null
+++ b/app/toscalib/utils/tosca_heat.py
@@ -0,0 +1,6 @@
+
+def _type_validate(type):
+    pass
+
+def _type_translate(type):
+    pass
\ No newline at end of file
diff --git a/app/toscalib/utils/tosca_heat.pyc b/app/toscalib/utils/tosca_heat.pyc
new file mode 100644
index 0000000..a69a761
--- /dev/null
+++ b/app/toscalib/utils/tosca_heat.pyc
Binary files differ
diff --git a/app/toscalib/utils/tosca_import.py b/app/toscalib/utils/tosca_import.py
new file mode 100644
index 0000000..d0e2f03
--- /dev/null
+++ b/app/toscalib/utils/tosca_import.py
@@ -0,0 +1,296 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+import os
+from toscalib.utils.yamlparser import load_yaml, simple_parse
+from toscalib.templates.database import ToscaDB
+from toscalib.types.node import NodeType
+from toscalib.types.data import DataType
+from toscalib.types.capability import CapabilityType
+from toscalib.types.relationship import RelationshipType
+from toscalib.templates.constant import *
+from toscalib.templates.topology import ToscaTopology
+from macpath import dirname
+import logging
+
+
+# _TRUE_VALUES = ('True', 'true', '1', 'yes')
+
+class import_context(object):
+    def __init__(self):
+        self.curr_path = os.getcwd()
+        self.curr_file_name = ''
+        self.metadata = None
+        self.temp_name = None
+        self.extra_imports = []
+        
+def _dir_import(files_imported, dir_name, db=None, ctx=None):
+    if db is None:
+        db = ToscaDB()
+    
+    if os.path.isdir(dir_name) is False:
+        logging.warning( 'Dir: '+ dir_name+ ' not exist! Loading failed!')
+        return db
+    
+    for f in os.listdir(dir_name):
+        filename = os.path.join(dir_name, f)
+        if os.path.isfile(filename):
+            try:
+                db = _file_import(files_imported, filename, db)
+            except ValueError:
+                logging.error( 'Fail to import file: '+ filename)
+                continue
+        if os.path.isdir(filename):
+            try: 
+                db = _dir_import(files_imported, filename, db)
+            except ValueError:
+                logging.error( 'Fail to import dir: '+ filename)
+                continue
+    
+    return db  
+ 
+     
+    
+def _file_import(files_imported, filename, db=None, ctx=None):
+    logging.debug( 'Start to import file: '+ filename)
+
+    if db is None:
+        db = ToscaDB()
+        
+    if ctx is None:
+        orig_import = True
+        ctx = import_context()
+    else:
+        orig_import = False
+            
+    if filename == os.path.abspath(filename):
+        full_file_path = filename
+    else:
+        full_file_path = os.path.abspath(os.path.join(ctx.curr_path, filename))
+        
+    if os.path.isfile(full_file_path) == False:
+        logging.debug( 'File: ' + filename + ' not exist! Import as extra import!')
+        ctx.extra_imports.append({filename:filename})
+        return db
+    
+    if full_file_path in files_imported:
+        logging.debug( 'File: ' + filename + ' has been imported!')
+        return db
+    
+    ctx.curr_path = os.path.dirname(full_file_path)
+    ctx.curr_file_name = os.path.basename(full_file_path)
+    parser = load_yaml(full_file_path)
+
+    ctx.extra_imports = []
+    
+#    if parser.has_key(IMPORT):
+    if IMPORT in parser:
+        db = _parse_import(files_imported, parser[IMPORT], db, ctx)
+#    if parser.has_key(METADATA):
+    if METADATA in parser:
+        ctx.metadata = parser[METADATA]
+#    if parser.has_key(DATA_TYPE):
+    if DATA_TYPE in parser:
+        db = _parse_data_type(parser[DATA_TYPE], db, ctx)
+#    if parser.has_key(NODE_TYPE):
+    if NODE_TYPE in parser:
+        db = _parse_node_type(parser[NODE_TYPE], db, ctx)
+#    if parser.has_key(TOPOLOGY):
+    if TOPOLOGY in parser:
+        db = _parse_topology_template(parser[TOPOLOGY], db, ctx)
+#    if parser.has_key(CAPABILITY_TYPE):
+    if CAPABILITY_TYPE in parser:
+        db = _parse_capability_type(parser[CAPABILITY_TYPE], db, ctx)
+#    if parser.has_key(RELATIONSHIP_TYPE):
+    if RELATIONSHIP_TYPE in parser:
+        db = _parse_relationship_type(parser[RELATIONSHIP_TYPE], db, ctx)
+    
+    if orig_import:
+        db._parse_objects()
+    
+    files_imported.append(full_file_path)
+    logging.debug( 'File '+ filename+ ' imported!')
+    return db
+
+def _single_template_file_import(filename, db=None, ctx=None):
+    logging.debug( 'Start to import file: '+ filename)
+
+    if db is None:
+        db = ToscaDB()
+        
+    if ctx is None:
+        ctx = import_context()
+            
+    if filename == os.path.abspath(filename):
+        full_file_path = filename
+    else:
+        full_file_path = os.path.abspath(os.path.join(ctx.curr_path, filename))
+        
+    if os.path.isfile(full_file_path) == False:
+        logging.warning( 'File: ' +filename + ' not exist! Import failed!')
+        return db
+    
+    
+    ctx.curr_path = os.path.dirname(full_file_path)
+    ctx.curr_file_name = os.path.basename(full_file_path)
+    parser = load_yaml(full_file_path)
+    
+#     if parser.has_key(IMPORT):
+#         db = _parse_import(files_imported, parser[IMPORT], db, ctx)
+    ctx.extra_imports = []
+#    if parser.has_key(IMPORT):
+    if IMPORT in parser:
+        db = _parse_import([], parser[IMPORT], db, ctx)
+#    if parser.has_key(METADATA):
+    if METADATA in parser:
+        ctx.metadata = parser[METADATA]
+#    if parser.has_key(DATA_TYPE):
+    if DATA_TYPE in parser:
+        db = _parse_data_type(parser[DATA_TYPE], db, ctx)
+#    if parser.has_key(NODE_TYPE):
+    if NODE_TYPE in parser:
+        db = _parse_node_type(parser[NODE_TYPE], db, ctx)
+#    if parser.has_key(TOPOLOGY):
+    if TOPOLOGY in parser:
+        db = _parse_topology_template(parser[TOPOLOGY], db, ctx)
+#    if parser.has_key(CAPABILITY_TYPE):
+    if CAPABILITY_TYPE in parser:
+        db = _parse_capability_type(parser[CAPABILITY_TYPE], db, ctx)
+#    if parser.has_key(RELATIONSHIP_TYPE):
+    if RELATIONSHIP_TYPE in parser:
+        db = _parse_relationship_type(parser[RELATIONSHIP_TYPE], db, ctx)
+    
+    db._parse_objects()
+    
+    logging.debug( 'File '+ filename+ ' imported!')
+    return db
+
+def _yaml_str_import(yml_str, db=None, ctx=None):
+    parser = simple_parse(yml_str)
+    
+    if ctx is None:
+        ctx = import_context()
+
+    ctx.extra_imports = []
+    
+#    if parser.has_key(IMPORT):
+    if IMPORT in parser:
+        db = _parse_import([], parser[IMPORT], db, ctx)
+#    if parser.has_key(METADATA):
+    if METADATA in parser:
+        ctx.metadata = parser[METADATA]
+#    if parser.has_key(DATA_TYPE):
+    if DATA_TYPE in parser:
+        db = _parse_data_type(parser[DATA_TYPE], db, ctx)
+#    if parser.has_key(NODE_TYPE):
+    if NODE_TYPE in parser:
+        db = _parse_node_type(parser[NODE_TYPE], db, ctx)
+#    if parser.has_key(TOPOLOGY):
+    if TOPOLOGY in parser:
+        db = _parse_topology_template(parser[TOPOLOGY], db, ctx)
+#    if parser.has_key(CAPABILITY_TYPE):
+    if CAPABILITY_TYPE in parser:
+        db = _parse_capability_type(parser[CAPABILITY_TYPE], db, ctx)
+#    if parser.has_key(RELATIONSHIP_TYPE):
+    if RELATIONSHIP_TYPE in parser:
+        db = _parse_relationship_type(parser[RELATIONSHIP_TYPE], db, ctx)
+    
+    db._parse_objects()
+    
+    return db
+
+def _parse_data_type(data_type_section, db, ctx):
+    if data_type_section is None:
+        return db
+    if db is None: 
+        db = ToscaDB()
+        
+    for data_type_name in data_type_section.keys():
+        db._import_data_type(DataType(data_type_name, data_type_section[data_type_name]))
+    return db
+
+def _parse_node_type(node_type_section, db, ctx):    
+    if node_type_section is None:
+        return db
+    if db is None:
+        db = ToscaDB()
+            
+    for node_type_def_name in node_type_section.keys():
+        db._import_node_type(NodeType(node_type_def_name, node_type_section[node_type_def_name]))
+    return db
+
+def _parse_capability_type(cap_type_section, db, ctx):    
+    if cap_type_section is None:
+        return db
+    if db is None:
+        db = ToscaDB()
+            
+    for cap_type_def_name in cap_type_section.keys():
+        db._import_capability_type(CapabilityType(cap_type_def_name, cap_type_section[cap_type_def_name]))
+    return db
+
+def _parse_relationship_type(rel_type_section, db, ctx):    
+    if rel_type_section is None:
+        return db
+    if db is None:
+        db = ToscaDB()
+            
+    for rel_type_def_name in rel_type_section.keys():
+        db._import_relationship_type(RelationshipType(rel_type_def_name,rel_type_section[rel_type_def_name]))
+    return db
+
+def _parse_topology_template(topology_section, db, ctx):    
+    if topology_section is None:
+        return db
+    if db is None:
+        db = ToscaDB()
+        
+    if ctx.metadata is None or ctx.metadata['template_name'] is None or ctx.metadata['template_name']=='':
+        template_key = ctx.curr_file_name
+    else:
+        template_key = ctx.metadata['template_name']
+        
+    if template_key is None or template_key == '':
+        index = 0
+        while True:
+#            if db.TEMPLATES.has_key('template'+str(index)):
+            if 'template'+str(index) in db.TEMPLATES:
+                index+=1
+                continue
+            break
+        template_key = 'template'+str(index)
+        
+    ctx.temp_name = template_key
+    
+    new_topology = ToscaTopology(template_key, ctx.metadata, topology_section)
+    new_topology.extra_imports = ctx.extra_imports
+    db._import_template(new_topology)
+            
+    return db
+
+def _parse_requirement_name_and_value(content):
+    list_size = len(content.keys())
+    if list_size != 1:
+        logging.warning( 'Requirement section does not have exact one element: '+ list_size)
+        return
+    ck = list(content.keys())[0]
+    return ck, content[ck]        
+        
+def _parse_import(file_imported, import_section, db, ctx):
+    if db is None:
+        db = ToscaDB()
+        
+    if import_section is None:
+        return db
+    
+    for new_file_sec in import_section:
+#        for new_file in new_file_sec.itervalues():
+        for new_file in iter(new_file_sec.values()):
+            curr_path_bk = ctx.curr_path
+            curr_filename_bk = ctx.curr_file_name
+            _file_import(file_imported, new_file, db, ctx)
+            ctx.curr_path = curr_path_bk
+            ctx.curr_file_name = curr_filename_bk
+        
+    return db
diff --git a/app/toscalib/utils/tosca_import.pyc b/app/toscalib/utils/tosca_import.pyc
new file mode 100644
index 0000000..b86dfa7
--- /dev/null
+++ b/app/toscalib/utils/tosca_import.pyc
Binary files differ
diff --git a/app/toscalib/utils/tosca_operate.py b/app/toscalib/utils/tosca_operate.py
new file mode 100644
index 0000000..bc67310
--- /dev/null
+++ b/app/toscalib/utils/tosca_operate.py
@@ -0,0 +1,94 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+from toscalib.templates.node import Node
+import copy, logging
+#from __builtin__ import False
+
+def _create_new_node(template, type_name, node_name = None):
+    db = template.db
+#    if db.NODE_TYPES.has_key(type_name) is False:
+    if type_name not in db.NODE_TYPES:
+        new_name = 'unknown_'+ str(template.node_index)
+        template.node_index = template.node_index + 1
+        new_node = Node(template, new_name, None)
+        logging.debug( 'New node: '+ new_name+ ' added')
+        return new_node
+    
+    if node_name is None:
+        new_name = _get_basename(db.NODE_TYPES[type_name].name) + '_' + str(template.node_index)
+        template.node_index = template.node_index + 1
+    else:
+        new_name = node_name
+         
+    new_node = Node(template, new_name, db.NODE_TYPES[type_name])
+
+    template._add_node(new_node)    
+    
+    logging.debug( 'New node: '+ new_name+ ' added')
+    return new_node
+    
+
+def _create_new_template(template, type_name, prefix_name = None):
+    db = template.db
+    if prefix_name is None:
+        prefix = db.TEMPLATES[type_name].name + '_' + str(template.temp_index) + '_'
+        template.temp_index = template.temp_index + 1
+    elif prefix_name == 'NO_PREFIX':
+        prefix = ''
+    else:
+        prefix = prefix_name
+    
+    new_temp = copy.deepcopy(db.TEMPLATES[type_name])
+    new_temp._update_prefix(prefix)
+    template.inputs.update(new_temp.inputs)
+    template.outputs.update(new_temp.outputs)
+    template.node_dict.update(new_temp.node_dict)
+    return template
+
+def _assign_property_value(node, property_name, value):
+#    if node.properties.has_key(property_name) is False:
+    if property_name not in node.properties:
+        logging.warning( 'No property with name '+ property_name+ ' in the node '+ node.name)
+        return False
+    return node.properties[property_name]._assign(value)
+
+def _assign_capability_property_value(node, cap_name, prop_name, value):
+#    if node.capabilities.has_key(cap_name) is False:
+    if cap_name not in node.capabilities:
+        logging.warning( 'No capability with name '+ cap_name+ ' in the node '+ node.name)
+        return False
+    cap_item = node.capabilities[cap_name]
+#    if cap_item.properties.has_key(prop_name) is False:
+    if prop_name not in cap_item.properties:
+        logging.warning( 'No propoerty with name'+ prop_name+ ' in the node '+ node.name+ ' capability '+ cap_name)
+        return False
+    return cap_item.properties[prop_name]._assign(value)
+
+def _assign_requirement_value(node, requirement_name, value):
+
+    requirement_found = False
+    for req in node.requirements:
+        if req.name == requirement_name:
+            requirement_found = req
+            break
+    if requirement_found is False:
+        logging.warning( 'No requirement with name '+ requirement_name+ ' in the node '+ node.name)
+        return False
+    if isinstance(value, Node) is False:
+        logging.warning( 'Node value should be passed to requirement assignment')
+        return False
+    else:
+        if requirement_found._verify_node(value):
+            requirement_found._assign(value)
+        else:
+            logging.warning( 'Invalid requirement fulfillment for node '+ node.name+ '->'+ requirement_name)
+ 
+    
+    return True
+ 
+def _get_basename(name):
+    names = name.split(".")
+    return names[len(names)-1]       
+    
\ No newline at end of file
diff --git a/app/toscalib/utils/tosca_operate.pyc b/app/toscalib/utils/tosca_operate.pyc
new file mode 100644
index 0000000..4ebd264
--- /dev/null
+++ b/app/toscalib/utils/tosca_operate.pyc
Binary files differ
diff --git a/app/toscalib/utils/tosca_print.py b/app/toscalib/utils/tosca_print.py
new file mode 100644
index 0000000..a7205e3
--- /dev/null
+++ b/app/toscalib/utils/tosca_print.py
@@ -0,0 +1,58 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+
+LEVEL_NODE_NAME = 1
+LEVEL_NODE_DETAILS = 2
+LEVEL_NODE_EVERYTHING = 3
+
+def _print_template(template, level=LEVEL_NODE_DETAILS):
+    print_str = ''
+    if template is None: 
+        return print_str
+    print_str += 'Nodes:'+ '\n'
+    for node in iter(template.node_dict.values()):
+        print_str += _print_node(node, level)
+    return print_str
+ 
+def _print_node(node, level):
+    print_str = ''
+    if node is None:
+        return
+    print_str +=  ' '+ node.name + '\n'
+    print_str +=   '    type: '+ node.type+ '\n'
+    if level == LEVEL_NODE_DETAILS:
+        if len(node.properties) > 0:
+            print_str +=   '    properties:' + '\n'
+        for prop in iter(node.properties.values()):
+            if prop.filled:
+                print_str +=   '     '+ prop.name+ ': '+ str(prop.value._get_value()[0])+ '\n'
+            else:
+                print_str +=   '     '+ prop.name+ ': null'+ '\n'
+        if len(node.requirements)> 0:
+            print_str +=   '    requirements:'+ '\n'
+        for req in node.requirements:
+            if req.filled:
+                print_str +=   '     '+ req.name+ ': '+ req.value.name+ '\n'
+            else:
+                print_str +=   '     '+ req.name+ ': null'+ '\n'
+        print_str +=   ''+ '\n'
+    return print_str
+    
+
+def _print_node_types(db):
+    print_str =  'Available node types: '+ '\n'
+    for name in db.NODE_TYPES.keys():
+        print_str +=   name, '\n'
+    return print_str
+
+def _print_templates(db):
+    print_str =   'Available templates: '+ '\n'
+    for name in db.TEMPLATES.keys():
+        print_str +=   name, '\n'
+    return print_str
+
+def _print_node_type(node_type):
+    print_str =   'Node Type Definition: '+ node_type.name+ '\n'
+    print_str +=   node_type.raw_content+ '\n'
+    return print_str
diff --git a/app/toscalib/utils/tosca_print.pyc b/app/toscalib/utils/tosca_print.pyc
new file mode 100644
index 0000000..7c39616
--- /dev/null
+++ b/app/toscalib/utils/tosca_print.pyc
Binary files differ
diff --git a/app/toscalib/utils/validateutils.py b/app/toscalib/utils/validateutils.py
new file mode 100644
index 0000000..dca08b1
--- /dev/null
+++ b/app/toscalib/utils/validateutils.py
@@ -0,0 +1,81 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import collections
+import dateutil.parser
+import numbers
+
+import six
+
+#from toscalib.utils.gettextutils import _
+
+
+def str_to_num(value):
+    '''Convert a string representation of a number into a numeric type.'''
+    if isinstance(value, numbers.Number):
+        return value
+    try:
+        return int(value)
+    except ValueError:
+        return float(value)
+
+
+def validate_number(value):
+    return str_to_num(value)
+
+
+def validate_integer(value):
+    if not isinstance(value, int):
+        try:
+            value = int(value)
+        except Exception:
+            raise ValueError(_('"%s" is not an integer') % value)
+    return value
+
+
+def validate_float(value):
+    if not isinstance(value, float):
+        raise ValueError(_('"%s" is not a float') % value)
+    return validate_number(value)
+
+
+def validate_string(value):
+    if not isinstance(value, six.string_types):
+        raise ValueError(_('"%s" is not a string') % value)
+    return value
+
+
+def validate_list(value):
+    if not isinstance(value, list):
+        raise ValueError(_('"%s" is not a list') % value)
+    return value
+
+
+def validate_map(value):
+    if not isinstance(value, collections.Mapping):
+        raise ValueError(_('"%s" is not a map') % value)
+    return value
+
+
+def validate_boolean(value):
+    if isinstance(value, bool):
+        return value
+
+    if isinstance(value, str):
+        normalised = value.lower()
+        if normalised in ['true', 'false']:
+            return normalised == 'true'
+    raise ValueError(_('"%s" is not a boolean') % value)
+
+
+def validate_timestamp(value):
+    return dateutil.parser.parse(value)
diff --git a/app/toscalib/utils/yamlparser.py b/app/toscalib/utils/yamlparser.py
new file mode 100644
index 0000000..e54227d
--- /dev/null
+++ b/app/toscalib/utils/yamlparser.py
@@ -0,0 +1,63 @@
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+
+import codecs
+from collections import OrderedDict
+
+import yaml
+
+
+if hasattr(yaml, 'CSafeLoader'):
+    yaml_loader = yaml.CSafeLoader
+else:
+    yaml_loader = yaml.SafeLoader
+
+
+def load_yaml(path):
+    with codecs.open(path, encoding='utf-8', errors='strict') as f:
+        return yaml.load(f.read(), Loader=yaml_loader)
+
+
+def simple_parse(tmpl_str):
+    try:
+        tpl = yaml.load(tmpl_str, Loader=yaml_loader)
+    except yaml.YAMLError as yea:
+        raise ValueError(yea)
+    else:
+        if tpl is None:
+            tpl = {}
+    return tpl
+
+
+def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
+    class OrderedLoader(Loader):
+        pass
+
+    def construct_mapping(loader, node):
+        loader.flatten_mapping(node)
+        return object_pairs_hook(loader.construct_pairs(node))
+
+    OrderedLoader.add_constructor(
+        yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
+        construct_mapping)
+    return yaml.load(stream, OrderedLoader)
+
+
+def simple_ordered_parse(tmpl_str):
+    try:
+        tpl = ordered_load(tmpl_str)
+    except yaml.YAMLError as yea:
+        raise ValueError(yea)
+    else:
+        if tpl is None:
+            tpl = {}
+    return tpl
diff --git a/app/toscalib/utils/yamlparser.pyc b/app/toscalib/utils/yamlparser.pyc
new file mode 100644
index 0000000..8d97b2a
--- /dev/null
+++ b/app/toscalib/utils/yamlparser.pyc
Binary files differ
diff --git a/app/version.py b/app/version.py
new file mode 100644
index 0000000..1fb3765
--- /dev/null
+++ b/app/version.py
@@ -0,0 +1 @@
+__version__ =  '1806.0.022118-SNAPSHOT'
\ No newline at end of file
diff --git a/app/version.pyc b/app/version.pyc
new file mode 100644
index 0000000..7a19a68
--- /dev/null
+++ b/app/version.pyc
Binary files differ
diff --git a/app/web/uwsgi.ini b/app/web/uwsgi.ini
new file mode 100644
index 0000000..c4cca12
--- /dev/null
+++ b/app/web/uwsgi.ini
@@ -0,0 +1,16 @@
+[uwsgi]
+module = tosca_server:application
+#plugin = python
+chdir = /srv
+master = True
+processes = 2
+pidfile = /tmp/project-master.pid
+vacuum = true
+max-requests = 5000
+enable-threads = True
+logto = /var/log/uwsgi/%n.log
+
+uid = dcae
+socket = /run/uwsgi/tosca.sock
+chown-socket = dcae:nginx
+chmod-socket = 664
diff --git a/docker/Dockerfile b/docker/Dockerfile
new file mode 100644
index 0000000..fbfa7df
--- /dev/null
+++ b/docker/Dockerfile
@@ -0,0 +1,20 @@
+FROM onap/dcae-tosca-base:1.0.4
+
+RUN apk add --no-cache \
+    curl \
+    && :
+
+COPY ./app /srv/
+
+COPY ./docker/docker-entrypoint.sh /srv/
+
+COPY ./nginx/nginx.conf /etc/nginx/nginx.conf
+
+RUN chmod +x /srv/tosca_server.py && \
+    chmod +x /srv/docker-entrypoint.sh
+
+RUN pip3 install -r /srv/requirements.txt
+
+EXPOSE 8085 
+
+ENTRYPOINT ["/srv/docker-entrypoint.sh", "/usr/local/bin/uwsgi", "--ini", "/srv/web/uwsgi.ini"]
diff --git a/docker/Dockerfile.Base b/docker/Dockerfile.Base
new file mode 100644
index 0000000..0f3b85d
--- /dev/null
+++ b/docker/Dockerfile.Base
@@ -0,0 +1,39 @@
+FROM python:3.5-alpine
+
+ENV http_proxy http://one.proxy.att.com:8080
+ENV https_proxy http://one.proxy.att.com:8080
+
+ENV GROUPID=1000 \
+    USERNAME=dcae \
+    USERID=1001 \
+    USER_HOME=/home/dcae
+
+RUN apk add --no-cache \
+    autoconf \
+    gcc \
+    libc-dev \
+    linux-headers \
+    nginx \
+    && :
+
+RUN pip install --upgrade setuptools && \
+    pip install uwsgi==2.0.15
+
+RUN mkdir -p \
+    /var/log/uwsgi \
+    /run/uwsgi \
+    /srv    
+
+RUN addgroup -g ${GROUPID} -S ${USERNAME}
+
+RUN adduser -S -G ${USERNAME} -u ${USERID} -s /bin/bash -h ${USER_HOME} ${USERNAME}
+
+RUN chown ${USERNAME}:nginx /run/uwsgi
+
+RUN apk del \
+    autoconf \
+    gcc \
+    libc-dev \
+    linux-headers \
+    && :
+
diff --git a/docker/docker-entrypoint.sh b/docker/docker-entrypoint.sh
new file mode 100644
index 0000000..42fe60b
--- /dev/null
+++ b/docker/docker-entrypoint.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+set -x
+echo "Running entry point"
+echo $@
+## Run nginxi Web Server
+/usr/sbin/nginx -c /etc/nginx/nginx.conf
+
+# Execute arguments
+echo >&2 $0: Continuing with "$@"...
+exec "$@"
diff --git a/nginx/nginx.conf b/nginx/nginx.conf
new file mode 100644
index 0000000..363e205
--- /dev/null
+++ b/nginx/nginx.conf
@@ -0,0 +1,30 @@
+#user nginx;
+worker_processes  1;
+
+error_log  logs/error.log;
+error_log  logs/error.log  notice;
+error_log  logs/error.log  info;
+
+pid        nginx.pid;
+
+
+events {
+    worker_connections  4096;
+}
+
+
+http {
+
+    server {
+        listen   8085;
+        server_name  localhost;
+        charset utf-8r;
+
+        location / {
+            include  uwsgi_params;
+            uwsgi_pass unix:/run/uwsgi/tosca.sock;
+            
+        }
+    }
+
+}
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 0000000..f706f8b
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,159 @@
+<project 
+  xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <parent>
+    <groupId>org.onap.oparent</groupId>
+    <artifactId>oparent</artifactId>
+    <version>1.0.0-SNAPSHOT</version>
+  </parent>
+
+  <groupId>org.onap.dcae.dcae-tosca-app </groupId>
+  <artifactId>dcae-tosca-app</artifactId>
+  <packaging>pom</packaging>
+  <version>1.0.0-SNAPSHOT</version>
+  <name>dcae-tosca-app</name>
+
+  <properties>
+        <!--nexus-->
+        <sitePath>/content/sites/site/org/onap/dcae/${project.artifactId}/${project.version}</sitePath>
+        <!--maven-->
+        <timestamp>${maven.build.timestamp}</timestamp>
+        <maven.build.timestamp.format>yyyy.MM.dd.HH.mm</maven.build.timestamp.format>
+        <!--docker-->
+        <docker.tag>${project.version}-${maven.build.timestamp}</docker.tag>
+        <docker.latest.tag>${project.version}-latest</docker.latest.tag>
+        <docker.staging.tag>${parsedVersion.majorVersion}.${parsedVersion.minorVersion}-STAGING-latest</docker.staging.tag>
+  </properties>
+
+
+  <profiles>
+      <profile>
+          <id>docker</id>
+          <properties>
+              <!-- Docker tags -->
+              <maven.build.timestamp.format>yyyyMMdd'T'HHmm</maven.build.timestamp.format>
+              <docker.tag>${project.version}-${maven.build.timestamp}</docker.tag>
+              <docker.latest.tag>${project.version}-latest</docker.latest.tag>
+              <docker.staging.tag>${parsedVersion.majorVersion}.${parsedVersion.minorVersion}-STAGING-latest</docker.staging.tag>
+          </properties>
+          <activation>
+              <activeByDefault>false</activeByDefault>
+          </activation>
+          <build>
+              <plugins>
+                  <plugin>
+                      <groupId>org.codehaus.mojo</groupId>
+                      <artifactId>build-helper-maven-plugin</artifactId>
+                      <version>1.8</version>
+                      <executions>
+                          <execution>
+                              <id>parse-version</id>
+                              <goals>
+                                  <goal>parse-version</goal>
+                              </goals>
+                          </execution>
+                      </executions>
+                  </plugin>
+                  <plugin>
+                      <groupId>org.sonatype.plugins</groupId>
+                      <artifactId>nexus-staging-maven-plugin</artifactId>
+                      <version>1.6.7</version>
+                      <extensions>true</extensions>
+                      <configuration>
+                          <nexusUrl>${onap.nexus.url}</nexusUrl>
+                          <stagingProfileId>176c31dfe190a</stagingProfileId>
+                          <serverId>ecomp-staging</serverId>
+                      </configuration>
+                  </plugin>
+                  <plugin>
+                      <artifactId>maven-resources-plugin</artifactId>
+                      <version>3.0.2</version>
+                      <executions>
+                          <execution>
+                              <id>copy-docker-file</id>
+                              <phase>validate</phase>
+                              <goals>
+                                  <goal>copy-resources</goal>
+                              </goals>
+                              <configuration>
+                                  <outputDirectory>${project.basedir}</outputDirectory>
+                                  <resources>
+                                      <resource>
+                                          <directory>${project.basedir}/docker</directory>
+                                          <includes>
+                                              <include>Dockerfile</include>
+                                          </includes>
+                                      </resource>
+                                  </resources>
+                              </configuration>
+                          </execution>
+                      </executions>
+                  </plugin>
+                  <plugin>
+                      <groupId>io.fabric8</groupId>
+                      <artifactId>docker-maven-plugin</artifactId>
+                      <version>0.19.1</version>
+                      <configuration>
+                          <verbose>true</verbose>
+                          <apiVersion>1.23</apiVersion>
+                          <registry>nexus3.onap.org:10003</registry>
+                          <images>
+                              
+                          </images>
+                      </configuration>
+                      <executions>
+                          <execution>
+                              <id>clean-images</id>
+                              <phase>pre-clean</phase>
+                              <goals>
+                                  <goal>remove</goal>
+                              </goals>
+                              <configuration>
+                                  <removeAll>true</removeAll>
+                              </configuration>
+                          </execution>
+                          <execution>
+                              <id>generate-images</id>
+                              <phase>package</phase>
+                              <goals>
+                                  <goal>build</goal>
+                              </goals>
+                          </execution>
+                          <execution>
+                              <id>push-images</id>
+                              <phase>deploy</phase>
+                              <goals>
+                                  <goal>build</goal>
+                                  <goal>push</goal>
+                              </goals>
+                              <configuration>
+                                  
+                              </configuration>
+                          </execution>
+                      </executions>
+                  </plugin>
+              </plugins>
+          </build>
+          <distributionManagement>
+              <site>
+                  <id>nexus</id>
+                  <url>dav:${nexus.url}${sitePath}</url>
+              </site>
+          </distributionManagement>
+      </profile>
+  </profiles>
+</project>
+
diff --git a/scripts/RunJson2Yaml.sh b/scripts/RunJson2Yaml.sh
new file mode 100644
index 0000000..3e12425
--- /dev/null
+++ b/scripts/RunJson2Yaml.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+set -x
+# The script should immediately exit if any command in the script fails.
+# set -e
+# This is the path for the json spac folder
+file_path="../app/data/json-spac"
+
+for f in $file_path/*
+do
+    filename=$(basename $f)
+    name=$(echo $filename | cut -f 1 -d '.')
+    
+    echo $filename $name
+    python ../app/model_create.py -i $f -o ../app/data/tosca_model/$name/ --name=$name -t ../app/data/import_file.yaml -m ../app/data/meta_model/meta_tosca_schema.yaml
+    echo "-------------------"
+done
diff --git a/simple_compose_stdin.py b/simple_compose_stdin.py
new file mode 100644
index 0000000..532c57a
--- /dev/null
+++ b/simple_compose_stdin.py
@@ -0,0 +1,35 @@
+#Author: Shu Shi
+#emaiL: shushi@research.att.com
+
+import sys, json, base64, logging
+
+from toscalib.tosca_workbook import ToscaWorkBook
+    
+input_data = sys.stdin.readline()  
+try:
+    in_data = json.loads(input_data)
+except ValueError as e:
+    logging.error( 'error, cannot load input json data: ' + str(input_data))
+
+workbook = ToscaWorkBook()
+workbook._import_dir('./data/shared_model/')
+
+#if in_data.has_key('models'):
+if 'models' in in_data:
+    in_model = in_data['models']
+    if type(in_model) != list:
+        logging.warning( 'models in the input should be a list type')
+    for model_entry in in_model:
+        for key in ['schema', 'template', 'translate']:
+#            if model_entry.has_key(key):
+            if key in model_entry:
+                workbook._import_yml_str(base64.b64decode(model_entry[key]))
+
+#if in_data.has_key('template'):
+if 'template' in in_data:
+    in_temp = in_data['template']
+    workbook._translate_template_yaml_str(base64.b64decode(in_temp))
+    workbook._add_shared_node([{'dcae.capabilities.cdapHost':'cdap_host'}, {'dcae.capabilities.dockerHost': 'docker_host'}, {'dcae.capabilities.composition.host': 'composition_virtual'}])
+                
+ret = workbook._export_yaml_web('cloudify,main')
+print(ret)
diff --git a/tosca_server.pyc b/tosca_server.pyc
new file mode 100644
index 0000000..1bd8f2e
--- /dev/null
+++ b/tosca_server.pyc
Binary files differ
diff --git a/toscalib/__init__.pyc b/toscalib/__init__.pyc
new file mode 100644
index 0000000..55190c1
--- /dev/null
+++ b/toscalib/__init__.pyc
Binary files differ
diff --git a/toscalib/templates/__init__.pyc b/toscalib/templates/__init__.pyc
new file mode 100644
index 0000000..4ab1179
--- /dev/null
+++ b/toscalib/templates/__init__.pyc
Binary files differ
diff --git a/toscalib/templates/capability_item.pyc b/toscalib/templates/capability_item.pyc
new file mode 100644
index 0000000..e81ec20
--- /dev/null
+++ b/toscalib/templates/capability_item.pyc
Binary files differ
diff --git a/toscalib/templates/constant.pyc b/toscalib/templates/constant.pyc
new file mode 100644
index 0000000..ec5584f
--- /dev/null
+++ b/toscalib/templates/constant.pyc
Binary files differ
diff --git a/toscalib/templates/database.pyc b/toscalib/templates/database.pyc
new file mode 100644
index 0000000..5a8d85e
--- /dev/null
+++ b/toscalib/templates/database.pyc
Binary files differ
diff --git a/toscalib/templates/heat_constants.pyc b/toscalib/templates/heat_constants.pyc
new file mode 100644
index 0000000..22fa58a
--- /dev/null
+++ b/toscalib/templates/heat_constants.pyc
Binary files differ
diff --git a/toscalib/templates/interface_item.pyc b/toscalib/templates/interface_item.pyc
new file mode 100644
index 0000000..569c24a
--- /dev/null
+++ b/toscalib/templates/interface_item.pyc
Binary files differ
diff --git a/toscalib/templates/node.pyc b/toscalib/templates/node.pyc
new file mode 100644
index 0000000..4eeb108
--- /dev/null
+++ b/toscalib/templates/node.pyc
Binary files differ
diff --git a/toscalib/templates/operation_item.pyc b/toscalib/templates/operation_item.pyc
new file mode 100644
index 0000000..1fbb451
--- /dev/null
+++ b/toscalib/templates/operation_item.pyc
Binary files differ
diff --git a/toscalib/templates/property_item.pyc b/toscalib/templates/property_item.pyc
new file mode 100644
index 0000000..207f9b8
--- /dev/null
+++ b/toscalib/templates/property_item.pyc
Binary files differ
diff --git a/toscalib/templates/requirement_item.pyc b/toscalib/templates/requirement_item.pyc
new file mode 100644
index 0000000..a9d7579
--- /dev/null
+++ b/toscalib/templates/requirement_item.pyc
Binary files differ
diff --git a/toscalib/templates/substitution_rule.pyc b/toscalib/templates/substitution_rule.pyc
new file mode 100644
index 0000000..b901ed9
--- /dev/null
+++ b/toscalib/templates/substitution_rule.pyc
Binary files differ
diff --git a/toscalib/templates/topology.pyc b/toscalib/templates/topology.pyc
new file mode 100644
index 0000000..1e8a0ba
--- /dev/null
+++ b/toscalib/templates/topology.pyc
Binary files differ
diff --git a/toscalib/tosca_builder.pyc b/toscalib/tosca_builder.pyc
new file mode 100644
index 0000000..8a506a4
--- /dev/null
+++ b/toscalib/tosca_builder.pyc
Binary files differ
diff --git a/toscalib/tosca_workbook.pyc b/toscalib/tosca_workbook.pyc
new file mode 100644
index 0000000..4863e92
--- /dev/null
+++ b/toscalib/tosca_workbook.pyc
Binary files differ
diff --git a/toscalib/types/__init__.pyc b/toscalib/types/__init__.pyc
new file mode 100644
index 0000000..8eaffbd
--- /dev/null
+++ b/toscalib/types/__init__.pyc
Binary files differ
diff --git a/toscalib/types/capability.pyc b/toscalib/types/capability.pyc
new file mode 100644
index 0000000..64afa1a
--- /dev/null
+++ b/toscalib/types/capability.pyc
Binary files differ
diff --git a/toscalib/types/constraints.pyc b/toscalib/types/constraints.pyc
new file mode 100644
index 0000000..af2f629
--- /dev/null
+++ b/toscalib/types/constraints.pyc
Binary files differ
diff --git a/toscalib/types/data.pyc b/toscalib/types/data.pyc
new file mode 100644
index 0000000..3e05119
--- /dev/null
+++ b/toscalib/types/data.pyc
Binary files differ
diff --git a/toscalib/types/entry_schema.pyc b/toscalib/types/entry_schema.pyc
new file mode 100644
index 0000000..8b473f8
--- /dev/null
+++ b/toscalib/types/entry_schema.pyc
Binary files differ
diff --git a/toscalib/types/interface.pyc b/toscalib/types/interface.pyc
new file mode 100644
index 0000000..9fa7b8c
--- /dev/null
+++ b/toscalib/types/interface.pyc
Binary files differ
diff --git a/toscalib/types/node.pyc b/toscalib/types/node.pyc
new file mode 100644
index 0000000..2c289fc
--- /dev/null
+++ b/toscalib/types/node.pyc
Binary files differ
diff --git a/toscalib/types/operation.pyc b/toscalib/types/operation.pyc
new file mode 100644
index 0000000..db7e056
--- /dev/null
+++ b/toscalib/types/operation.pyc
Binary files differ
diff --git a/toscalib/types/property.pyc b/toscalib/types/property.pyc
new file mode 100644
index 0000000..162881f
--- /dev/null
+++ b/toscalib/types/property.pyc
Binary files differ
diff --git a/toscalib/types/relationship.pyc b/toscalib/types/relationship.pyc
new file mode 100644
index 0000000..f3450e8
--- /dev/null
+++ b/toscalib/types/relationship.pyc
Binary files differ
diff --git a/toscalib/types/requirement.pyc b/toscalib/types/requirement.pyc
new file mode 100644
index 0000000..145aeea
--- /dev/null
+++ b/toscalib/types/requirement.pyc
Binary files differ
diff --git a/toscalib/utils/__init__.pyc b/toscalib/utils/__init__.pyc
new file mode 100644
index 0000000..d354098
--- /dev/null
+++ b/toscalib/utils/__init__.pyc
Binary files differ
diff --git a/toscalib/utils/tosca_export.pyc b/toscalib/utils/tosca_export.pyc
new file mode 100644
index 0000000..e07fa63
--- /dev/null
+++ b/toscalib/utils/tosca_export.pyc
Binary files differ
diff --git a/toscalib/utils/tosca_heat.pyc b/toscalib/utils/tosca_heat.pyc
new file mode 100644
index 0000000..a958536
--- /dev/null
+++ b/toscalib/utils/tosca_heat.pyc
Binary files differ
diff --git a/toscalib/utils/tosca_import.pyc b/toscalib/utils/tosca_import.pyc
new file mode 100644
index 0000000..5dc01e8
--- /dev/null
+++ b/toscalib/utils/tosca_import.pyc
Binary files differ
diff --git a/toscalib/utils/tosca_operate.pyc b/toscalib/utils/tosca_operate.pyc
new file mode 100644
index 0000000..b411bd2
--- /dev/null
+++ b/toscalib/utils/tosca_operate.pyc
Binary files differ
diff --git a/toscalib/utils/tosca_print.pyc b/toscalib/utils/tosca_print.pyc
new file mode 100644
index 0000000..7ec2ae3
--- /dev/null
+++ b/toscalib/utils/tosca_print.pyc
Binary files differ
diff --git a/toscalib/utils/yamlparser.pyc b/toscalib/utils/yamlparser.pyc
new file mode 100644
index 0000000..1d5b4c2
--- /dev/null
+++ b/toscalib/utils/yamlparser.pyc
Binary files differ