diff --git a/synthesis/dep_solver.py b/synthesis/dep_solver.py
new file mode 100644
index 0000000000000000000000000000000000000000..a1e5cdc4334799980de2146186b285e64900b41c
--- /dev/null
+++ b/synthesis/dep_solver.py
@@ -0,0 +1,99 @@
+import msg as p
+
+
+class IDependable:
+    def __init__(self):
+        self.dep_fixed = False;
+        self.dep_index = 0;
+        self.dep_provides = [];
+        self.dep_requires = [];
+        self.dep_depends_on = [];
+        
+class DependencySolver:
+    def __init__(self):
+        self.entities = {};
+
+    def _lookup_post_provider(self, files, start_index, requires):
+        while True:
+            start_index = start_index + 1
+            try:
+                f = files[start_index];
+            except IndexError:
+                break
+
+            if requires:
+                for req in requires:
+                    if req in f.dep_provides: 
+                        return start_index
+        return None
+
+    def _find_provider_file(self, files, req):
+        for f in files:
+            if f.dep_provides:
+                if req in f.dep_provides:
+                    return f;
+
+        return None
+        
+    def solve(self, fileset):
+        n_iter = 0
+        max_iter = 100
+        import copy
+
+        fset = fileset.files;
+
+        f_nondep = []
+
+        done = False
+        while not done and (n_iter < max_iter):
+            n_iter = n_iter+1
+            done = True
+            for f in fset:
+                if not f.dep_fixed:
+                    idx = fset.index(f)
+                    k = self._lookup_post_provider(fset, idx, f.dep_requires);
+
+                    if k:
+                        done = False
+                        fset[idx] = (fset[idx], fset[k])
+                        fset[k] = fset[idx][0]
+                        fset[idx] = fset[idx][1]
+
+        if(n_iter == max_iter):
+            p.vprint("Maximum number of iterations reached when trying to solve the dependencies. Perhaps a cyclic inter-dependency problem...");
+            return None
+
+        for f in fset:
+            if f.dep_fixed:
+                f_nondep.append(copy.copy(f))
+                del f
+        
+
+        f_nondep.sort(key=lambda f: f.dep_index)
+
+        for f in fset:
+            p.vprint(f.path)
+            if f.dep_requires:
+                for req in f.dep_requires:
+                    pf = self._find_provider_file(fset, req)
+                    if not pf:
+                        p.vprint("Missing dependency: " + req)
+                        quit()
+                    else:
+                        p.vprint("--> " + pf.path);
+                        f.dep_depends_on.append(pf)
+
+        import srcfile as sf
+
+
+        newobj = sf.SourceFileSet();
+        newobj.add(f_nondep);
+        for f in fset:
+            if not f.dep_fixed:
+                newobj.add(f)
+
+        for k in newobj.files:
+            print(str(k.dep_index) + " " + k.path + str(k.dep_fixed))
+        return newobj
+                
+            
diff --git a/synthesis/depend.py b/synthesis/depend.py
index f8b2dfe1e2b959430634da946293ec557ed48ba7..683490d161996b5bf29f2eadddb8e9b47dc34e88 100644
--- a/synthesis/depend.py
+++ b/synthesis/depend.py
@@ -5,7 +5,7 @@ import re
 import string
 import global_mod
 import msg as p
-
+from srcfile import *
 
 
 def try_utf8(data):
@@ -14,54 +14,6 @@ def try_utf8(data):
     except UnicodeDecodeError:
             return None
 
-def search_for_use(file):
-    """
-    Reads a file and looks for 'use' clause. For every 'use' with
-    non-standard library a tuple (lib, file) is returned in a list.
-    """
-    f = open(file, "r")
-    try:
-        text = f.readlines()
-    except UnicodeDecodeError:
-        return []
-
-    ret = []
-    use_pattern = re.compile("^[ \t]*use[ \t]+([^; ]+)[ \t]*;.*$")
-    lib_pattern = re.compile("([^.]+)\.([^.]+)\.all")
-
-    use_lines = []
-    for line in text:
-        m = re.match(use_pattern, line)
-        if m != None:
-            use_lines.append(m.group(1))
-    for line in use_lines:
-        m = re.match(lib_pattern, line)
-        if m != None:
-            if (m.group(1)).lower() in std_libs:
-                continue
-            ret.append((m.group(1),m.group(2)))
-    f.close()
-    return ret
-
-def search_for_package(file):
-    """
-    Reads a file and looks for package clase. Returns list of packages' names
-    from the file
-    """
-    f = open(file, "r")
-    try:
-        text = f.readlines()
-    except UnicodeDecodeError:
-        return []
-
-    ret = []
-    package_pattern = re.compile("^[ \t]*package[ \t]+([^ \t]+)[ \t]+is[ \t]*$")
-    for line in text:
-        m = re.match(package_pattern, line)
-        if m != None:
-            ret.append(m.group(1))
-    f.close()
-    return ret
 
 def generate_deps_for_sv_files(files):
     def search_for_sv_include(file):
@@ -188,7 +140,13 @@ def generate_list_makefile(file_deps_dict, filename="Makefile.list"):
         f.write("\t\t@echo \'"+file.library+';'+rp(file.path)+"\' >> ise_list\n\n")
     f.write("done:\n\t\t@echo Done.")
 
-def generate_makefile(file_deps, sv_files, filename="Makefile"):
+def emit_string(s):
+    if not s:
+        return ""
+    else:
+        return s
+
+def generate_modelsim_makefile(fileset, module, filename="Makefile"):
     from time import gmtime, strftime
     import path
     #from path import relpath as rp
@@ -197,23 +155,23 @@ def generate_makefile(file_deps, sv_files, filename="Makefile"):
 #   This makefile has been automatically generated by hdl-make 
 #   on """ + date + """
 #######################################################################
-
 """
+
     make_preambule_p1 = """## variables #############################
 PWD := $(shell pwd)
 WORK_NAME := work
 
 MODELSIM_INI_PATH := """ + modelsim_ini_path() + """
 
-VCOM_FLAGS := -nologo -quiet -93 -modelsimini ./modelsim.ini
-VSIM_FLAGS := -voptargs="+acc"
-VLOG_FLAGS := -nologo -quiet -sv -modelsimini $(PWD)/modelsim.ini
+VCOM_FLAGS := -nologo -quiet -93 -modelsimini ./modelsim.ini """ + emit_string(module.vcom_opt) + """
+VSIM_FLAGS := """ + emit_string(module.vsim_opt) + """
+VLOG_FLAGS := -nologo -quiet -sv -modelsimini $(PWD)/modelsim.ini """ + emit_string(module.vlog_opt) + """
 
 
-"""
+""" 
     make_preambule_p2 = """## rules #################################
-all: modelsim.ini $(LIB_IND) $(SV_OBJ) $(VHDL_OBJ)
-$(SV_OBJ): $(VHDL_OBJ) 
+all: modelsim.ini $(LIB_IND) $(VERILOG_OBJ) $(VHDL_OBJ)
+$(VERILOG_OBJ): $(VHDL_OBJ) 
 $(VHDL_OBJ): $(LIB_IND) modelsim.ini
 
 modelsim.ini: $(MODELSIM_INI_PATH)/modelsim.ini
@@ -229,24 +187,23 @@ clean:
     f.write(make_preambule_p1)
 
     rp = os.path.relpath
-    f.write("SV_SRC := ")
+    f.write("VERILOG_SRC := ")
 
-#    SV_SRC := $(wildcard $(PWD)/*.sv)
-#    SV_OBJ := $(foreach svfile, $(SV_SRC), work/$(patsubst %.sv,%/_primary.dat,$(notdir $(svfile))))
-
-    for file in sv_files:
+#    print(str(fileset.files))
+    for file in fileset.filter(VerilogFile):
         f.write(rp(file.path) + " \\\n")
     f.write("\n")
 
-    f.write("SV_OBJ := ")
-    for file in sv_files:
+    f.write("VERILOG_OBJ := ")
+    for file in fileset.filter(VerilogFile):
         f.write(os.path.join(file.library, file.purename, "."+file.purename) + " \\\n")
     f.write('\n')
 
-    libs = set(file.library for file in list(file_deps.keys()))
+    libs = set(file.library for file in fileset.files)
+
     #list vhdl objects (_primary.dat files)
     f.write("VHDL_OBJ := ")
-    for file in file_deps:
+    for file in fileset.filter(VHDLFile):
         f.write(os.path.join(file.library, file.purename,"."+file.purename) + " \\\n")
     f.write('\n')
 
@@ -263,22 +220,23 @@ clean:
     vmo = global_mod.top_module.vmap_opt
     for lib in libs:
         f.write(lib+"/."+lib+":\n")
-        f.write(' '.join(["\t(vlib", vlo, lib, "&&", "vmap", vmo, "-modelsimini modelsim.ini", 
+        f.write(' '.join(["\t(vlib",  lib, "&&", "vmap", "-modelsimini modelsim.ini", 
         lib, "&&", "touch", lib+"/."+lib,")"]))
 
         f.write(' '.join(["||", "rm -rf", lib, "\n"]))
         f.write('\n')
 
     #rules for all _primary.dat files for sv
-    for file in sv_files:
+    for file in fileset.filter(VerilogFile):
         f.write(os.path.join(file.library, file.purename, '.'+file.purename)+': '+rp(file.path)+"\n")
         f.write("\t\tvlog -work "+file.library+" $(VLOG_FLAGS) $<")
         f.write(" && mkdir -p "+os.path.join(file.library+'/'+file.purename) )
         f.write(" && touch "+ os.path.join(file.library, file.purename, '.'+file.purename)+'\n')
     f.write("\n")
+
     #list rules for all _primary.dat files for vhdl
     vco = global_mod.top_module.vcom_opt
-    for file in file_deps:
+    for file in fileset.filter(VHDLFile):
         lib = file.library
         basename = file.name
         purename = file.purename 
@@ -287,11 +245,11 @@ clean:
         f.write(' '.join(["\t\tvcom $(VCOM_FLAGS)", vco, "-work", lib, rp(file.path),
         "&&", "mkdir -p", os.path.join(lib, purename), "&&", "touch", os.path.join(lib, purename, '.'+ purename), '\n']))
         f.write('\n')
-        if len(file_deps[file]) != 0:
+        if len(file.dep_depends_on) != 0:
             f.write(os.path.join(lib, purename, "."+purename) +":")
-            for dep_file in file_deps[file]:
+            for dep_file in file.dep_depends_on:
                 name = dep_file.purename
                 f.write(" \\\n"+ os.path.join(dep_file.library, name, "."+name))
             f.write('\n\n')
 
-    f.close()
\ No newline at end of file
+    f.close()
diff --git a/synthesis/fetch.py b/synthesis/fetch.py
index 2e8e5f08dbd5a50989f72df167fdb7ccab8542f0..fc58564400a07acb8342d5df274d227f1eee6bf9 100644
--- a/synthesis/fetch.py
+++ b/synthesis/fetch.py
@@ -30,24 +30,58 @@ def fetch_from_git(url, revision = None, fetchto = None):
     if fetchto == None:
         fetchto = global_mod.fetchto
 
+    basename = path.url_basename(url)
+    if basename.endswith(".git"):
+        basename = basename[:-4] #remove trailing .git
+
     if not os.path.exists(fetchto):
+        if not global_mod.fetch:
+            return None;
         os.mkdir(fetchto)
 
-    cur_dir = os.getcwd()
-    os.chdir(fetchto)
+    if os.path.exists(fetchto+"/"+basename):
+        if global_mod.options.fetch:
+            update_only = True;
+            do_fetch = True;
+        else:
+            return True;
+    else:
+        if(global_mod.options.fetch):
+            update_only = False;
+            do_fetch = True;
+        else:
+            return None
 
-    basename = url_basename(url)
-    if basename.endswith(".git"):
-        basename = basename[:-4] #remove trailing .git
+    rval = True
+    if do_fetch:
 
-    cmd = "git clone " + url
-    p.vprint(cmd)
-    os.system(cmd)
-    if revision:
-        os.chdir(basename)
-        os.system("git checkout " + revision)
-    os.chdir(cur_dir)
+        cur_dir = os.getcwd()
+        os.chdir(fetchto)
+
+        if update_only:
+            fdir = fetchto+"/"+basename;
+            os.chdir(fdir);
+            cmd = "git pull"
+            p.vprint(cmd);
+            if os.system(cmd) != 0:
+                rval = False
+            os.chdir(fetchto)
 
+        else:  		
+            cmd = "git clone " + url
+            p.vprint(cmd);
+            if os.system(cmd) != 0:
+                rval = False
+	    
+
+        if revision and rval:
+            os.chdir(basename)
+            if os.system("git checkout " + revision) != 0:
+                rval = False
+            
+        os.chdir(cur_dir)
+
+    return rval
 
 def parse_repo_url(url) :
     """
@@ -64,3 +98,5 @@ def parse_repo_url(url) :
     else:
         ret = url_match.group(1)
     return ret
+
+
diff --git a/synthesis/fetch_new.py b/synthesis/fetch_new.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6c556def839c9d2e1c136b1753f9d40971b88d4
--- /dev/null
+++ b/synthesis/fetch_new.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+import os
+import msg as p
+import global_mod
+import path
+
+
+class ModuleFetcher:
+
+    def __init__(self, fetch_dir = None):
+        pass
+
+# inputs:
+# - type of the module (local/git/svn)
+    def fetch(self, mod_type, mod_url):
+        pass
+
+    def 
+
+class ModulePool:
+
+
+
+
+def fetch_from_svn(url, revision = None, fetchto = None):
+    if fetchto == None:
+        fetchto = global_mod.fetchto
+
+    if not os.path.exists(fetchto):
+        os.mkdir(fetchto)
+
+    cur_dir = os.getcwd()
+    os.chdir(fetchto)
+    basename = path.url_basename(url)
+
+    cmd = "svn checkout {0} " + basename
+    if revision:
+        cmd = cmd.format(url + '@' + revision)
+    else:
+        cmd = cmd.format(url)
+
+    p.vprint(cmd)
+    os.system(cmd)
+    os.chdir(cur_dir)
+
+def fetch_from_git(url, revision = None, fetchto = None):
+    if fetchto == None:
+        fetchto = global_mod.fetchto
+
+    basename = path.url_basename(url)
+    if basename.endswith(".git"):
+        basename = basename[:-4] #remove trailing .git
+
+    if not os.path.exists(fetchto):
+        os.mkdir(fetchto)
+
+    if not os.path.exists(fetchto+"/"+basename):
+        update_only = False
+    else:
+        update_only = True
+
+    cur_dir = os.getcwd()
+    os.chdir(fetchto)
+
+
+    if update_only:
+        cmd = "git --git-dir="+basename+"/.git pull"
+    else:  		
+        cmd = "git clone " + url
+	    
+    rval = True
+    if os.system(cmd) != 0:
+        rval = False
+
+    if revision and rval:
+        os.chdir(basename)
+        if os.system("git checkout " + revision) != 0:
+            rval = False
+            
+    os.chdir(cur_dir)
+    return rval
+
+
+def parse_repo_url(url) :
+    """
+    Check if link to a repo seems to be correct
+    """
+    import re
+    url_pat = re.compile("[ \t]*([^ \t]+)[ \t]*(@[ \t]*(.+))?[ \t]*")
+    url_match = re.match(url_pat, url)
+
+    if url_match == None:
+        p.echo("Not a correct repo url: {0}. Skipping".format(url))
+    if url_match.group(3) != None: #there is a revision given 
+        ret = (url_match.group(1), url_match.group(3))
+    else:
+        ret = url_match.group(1)
+    return ret
+
+
diff --git a/synthesis/file.py b/synthesis/file.py
new file mode 100644
index 0000000000000000000000000000000000000000..aedbff7df59c0e92a0f12539f9c0df522a618fe4
--- /dev/null
+++ b/synthesis/file.py
@@ -0,0 +1,137 @@
+
+from dep_solver import *
+from os import path
+
+
+class SourceFile(IDependable):
+        cur_index = 0
+        def __init__(self, path):
+                IDependable.__init__(self)
+                self.path = path;
+
+        def gen_index(self):    
+                self.__class__.cur_index = self.__class__.cur_index+1
+                return self.__class__.cur_index
+
+class VHDLFile(SourceFile):
+        def __init__(self, path):
+                SourceFile.__init__(self, path);
+                self.create_deps();
+                if self.dep_fixed:
+                        print("File " + self.path + " fixed dep [idx " + str(self.dep_index) + "]")
+                else:
+                        print("File " + self.path + " uses: " + str(self.dep_requires) + " provides: " + str(self.dep_provides))
+
+        def check_encryption(self):
+                f = open(self.path, "rb");
+                s = f.read(3);
+                f.close()
+                if(s == b'Xlx'):
+                        return True
+                else:
+                        return False
+                        
+        def create_deps(self):
+                if self.check_encryption():
+                        self.dep_index = SourceFile.gen_index(self)
+                        self.dep_fixed = True
+                else:
+                        self.dep_requires = self.search_use_clauses()
+                        self.dep_provides = self.search_packages()
+
+        def search_use_clauses(self):
+                """
+                Reads a file and looks for 'use' clause. For every 'use' with
+                non-standard library a tuple (lib, file) is returned in a list.
+
+                """
+
+                import re
+                std_libs = ['ieee', 'altera_mf', 'cycloneiii', 'lpm', 'std', 'unisim', 'XilinxCoreLib', 'simprims']
+
+                f = open(self.path, "r")
+                try:
+                        text = f.readlines()
+                except UnicodeDecodeError:
+                        return []
+
+                use_pattern = re.compile("^[ \t]*use[ \t]+([^; ]+)[ \t]*;.*$")
+                lib_pattern = re.compile("([^.]+)\.([^.]+)\.all")
+
+                use_lines = []
+                for line in text:
+                        m = re.match(use_pattern, line)
+                        if m != None:
+                                use_lines.append(m.group(1))
+                                
+                ret = []
+                for line in use_lines:
+                        m = re.match(lib_pattern, line)
+                        if m != None:
+                                if (m.group(1)).lower() in std_libs:
+                                        continue
+                                ret.append((m.group(1),m.group(2)))
+
+                f.close()
+                return ret
+        
+        def search_packages(self):
+                """
+                Reads a file and looks for package clase. Returns list of packages' names
+                from the file
+                """
+
+                import re
+                f = open(self.path, "r")
+                try:
+                        text = f.readlines()
+                except UnicodeDecodeError:
+                        return []
+
+                package_pattern = re.compile("^[ \t]*package[ \t]+([^ \t]+)[ \t]+is[ \t]*$")
+
+                ret = []
+                for line in text:
+                        m = re.match(package_pattern, line)
+                        if m != None:
+                                ret.append(m.group(1))
+
+                f.close()
+                return ret
+        
+class VerilogFile(SourceFile):
+        def __init__(self, path):
+                self.path = path;
+                self.create_deps(path);
+
+        def create_deps(self):
+                self.dep_requires = self.search_includes()
+                self.dep_provides = os.path.basename(self.path);
+
+        def search_includes(self):
+                pass
+
+class UCFFile(SourceFile):
+        pass
+
+class NGCFile(SourceFile):
+        pass
+
+
+class SourceFileSet:
+        def __init__(self):
+                self.files = [];
+        
+        def add(self, files):
+                for f in files:
+                        if f.endswith('.vhd') or f.endswith('.vhdl'):
+                                nf = VHDLFile(f)
+                        elif f.endwith('.v') or f.endswith('.sv'):
+                                nf = VerilogFile(f);
+                        elif f.endwith('.ngc'):
+                                nf = NGCFile(f);
+                        elif f.endwith('.ucf'):
+                                nf = UCFFile(f);
+
+                        self.files.append(nf);
+                        
diff --git a/synthesis/flow.py b/synthesis/flow.py
new file mode 100755
index 0000000000000000000000000000000000000000..43cb4516f3b415ad40c8111a444e61a422630d3f
--- /dev/null
+++ b/synthesis/flow.py
@@ -0,0 +1,139 @@
+#!/usr/bin/python
+
+import xml.dom.minidom as xml
+import sys
+from srcfile import *
+
+xmlimpl = xml.getDOMImplementation()
+
+class ISEProjectProperty:
+        def __init__(self,  name, value, is_default = False):
+                self.name = name
+                self.value = value
+                self.is_default = is_default
+
+        def emit_xml(self, doc):
+                prop = doc.createElement("property")
+                prop.setAttribute("xil_pn:name", self.name)
+                prop.setAttribute("xil_pn:value", self.value)
+                if self.is_default:
+                        prop.setAttribute("xil_pn:valueState", "default")
+                else:
+                        prop.setAttribute("xil_pn:valueState", "non-default")
+
+                return prop
+
+
+class ISEProject:
+        def __init__(self, top_mod = None):
+                self.props = []
+                self.files = []
+                self.libs = []
+                self.xml_doc = None
+                self.top_mod = top_mod
+
+        def add_files(self, files):
+                self.files.extend(files);
+
+        def add_libs(self, libs):
+                self.libs.extend(libs);
+                self.libs.remove('work')
+
+        def add_property(self, prop):
+                self.props.append(prop)
+
+
+        def _parse_props(self):
+                for p in self.xml_project.getElementsByTagName("properties")[0].getElementsByTagName("property"):
+                        prop = ISEProjectProperty(
+                                p.getAttribute("xil_pn:name"),
+                                p.getAttribute("xil_pn:value"),
+                                p.getAttribute("xil_pn:valueState") == "default"
+                                )
+
+                        self.props.append(prop)
+
+                
+
+        def load_xml(self, filename):
+                f = open(filename)
+                self.xml_doc = xml.parse(f) 
+                self.xml_project =  self.xml_doc.getElementsByTagName("project")[0];
+                self._parse_props()
+                purge_dom_node(self.xml_project.getElementsByTagName("files")[0]);
+                purge_dom_node(self.xml_project.getElementsByTagName("properties")[0]);
+                f.close()
+
+        def _output_files(self, node):
+
+                for f in self.files:
+                        import os
+                        fp = self.xml_doc.createElement("file")
+                        fp.setAttribute("xil_pn:name", os.path.relpath(f.path))
+                        if (isinstance(f, VHDLFile)):
+                                fp.setAttribute("xil_pn:type", "FILE_VHDL")
+                        elif (isinstance(f, VerilogFile)):
+                                fp.setAttribute("xil_pn:type", "FILE_VERILOG")
+                        elif (isinstance(f, UCFFile)):
+                                fp.setAttribute("xil_pn:type", "FILE_UCF")
+
+                        assoc = self.xml_doc.createElement("association");
+                        assoc.setAttribute("xil_pn:name", "Implementation");
+                        assoc.setAttribute("xil_pn:seqID", str(self.files.index(f)+1));
+
+                        if(f.library != "work"):
+                                lib = self.xml_doc.createElement("library");
+                                lib.setAttribute("xil_pn:name", f.library);
+                                fp.appendChild(lib)
+
+                        fp.appendChild(assoc)
+                        node.appendChild(fp);
+
+        def _output_props(self, node):
+                for p in self.props:
+                        node.appendChild(p.emit_xml(self.xml_doc))
+
+        def _output_libs(self, node):
+                for l in self.libs:
+                        ll =  self.xml_doc.createElement("library")
+                        ll.setAttribute("xil_pn:name", l);
+                        node.appendChild(ll);
+
+
+        def emit_xml(self, filename = None):
+                if not self.xml_doc:
+                        self.create_empty_project()
+
+                self._output_files(self.xml_files);
+                self._output_props(self.xml_props);
+                self._output_libs(self.xml_libs);
+
+                        
+                self.xml_doc.writexml(open(filename,"w"), newl="\n")
+
+
+        def create_empty_project(self):
+                self.xml_doc = xmlimpl.createDocument("http://www.xilinx.com/XMLSchema", "project", None)
+                top_element = self.xml_doc.documentElement
+                top_element.setAttribute("xmlns", "http://www.xilinx.com/XMLSchema")
+                top_element.setAttribute("xmlns:xil_pn", "http://www.xilinx.com/XMLSchema")
+
+                version = self.xml_doc.createElement( "version")
+                version.setAttribute("xil_pn:ise_version", "13.1");
+                version.setAttribute("xil_pn:schema_version", "2");
+
+                header = self.xml_doc.createElement("header")
+                header.appendChild(self.xml_doc.createTextNode(""))
+
+                self.xml_files = self.xml_doc.createElement("files")
+                self.xml_props = self.xml_doc.createElement("properties")
+                self.xml_libs = self.xml_doc.createElement("libraries")
+
+                top_element.appendChild(header)
+                top_element.appendChild(version)
+                top_element.appendChild(self.xml_files)
+                top_element.appendChild(self.xml_props)
+                top_element.appendChild(self.xml_libs)
+                
+
+
diff --git a/synthesis/hdlmake.py b/synthesis/hdlmake.py
index 4d7d69868171336941725cf4a7492ebb8c52d775..ad1f8a0368de03fbbfc64b218d2140ce4f20732e 100755
--- a/synthesis/hdlmake.py
+++ b/synthesis/hdlmake.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/python2.7
 # -*- coding: utf-8 -*-
 
 import re
@@ -15,10 +15,15 @@ import global_mod
 import msg as p
 import optparse
 from module import Module
-from helper_classes import Manifest, SourceFile, ManifestParser
+from helper_classes import Manifest, ManifestParser
+from fetch import *
+
 
 def main():
+#    print("Start");
+
     global_mod.t0 = time.time()
+
     parser = optparse.OptionParser()
     #disabled due to introducing a new parser class. Help msg printing is not ready yet.
 
@@ -26,13 +31,10 @@ def main():
     dest="manifest_help", help="print manifest file variables description")
 
     parser.add_option("-k", "--make", dest="make", action="store_true",
-    default=None, help="prepare makefile for simulation")
+    default=None, help="Generate a Makefile (simulation/synthesis)")
 
     parser.add_option("-f", "--fetch", action="store_true", dest="fetch",
-    help="fetch files from modules listed in MANIFEST")
-
-    parser.add_option("--make-fetch", action="store_true", dest="make_fetch",
-    help="generate makefile for fetching needed modules")
+    help="fetch and/or update remote modules listed in Manifet")
 
     parser.add_option("-l", "--synthesize-locally", dest="local",
     action="store_true", help="perform a local synthesis")
@@ -46,24 +48,9 @@ def main():
     parser.add_option("--ipcore", dest="ipcore", action="store_true",
     default="false", help="generate a pseudo ip-core")
 
-    parser.add_option("--inject", dest="inject", action="store_true",
-    default=None, help="inject file list into ise project")
-
     parser.add_option("--nodel", dest="nodel", action="store_true",
     default="false", help="don't delete intermediate makefiles")
 
-    parser.add_option("--make-list", dest="make_list", action="store_true",
-    default=None, help="make list of project files in ISE format")
-
-    parser.add_option("--tcl-file", dest="tcl",
-    help="specify a .tcl file used for synthesis with ISE")
-
-    parser.add_option("--qpf-file", dest="qpf",
-    help="specify a .qpf file used for synthesis with QPF")
-
-    parser.add_option("--ise-file", dest="ise",
-    help="specify .xise file for other actions", metavar="ISE")
- 
     parser.add_option("--synth-server", dest="synth_server",
     default=None, help="use given SERVER for remote synthesis", metavar="SERVER")
 
@@ -76,6 +63,8 @@ def main():
     (options, args) = parser.parse_args()
     global_mod.options = options
 
+#    print("Parsed");
+
     if options.manifest_help == True:
         ManifestParser().help()
         quit()
@@ -89,37 +78,32 @@ def main():
         file = "Manifest.py"
 
     if file != None:
+        p.vprint("LoadTopManifest");
         top_manifest = Manifest(path=os.path.abspath(file))
         global_mod.top_module = Module(manifest=top_manifest, parent=None, source="local", fetchto=".")
+
         global_mod.top_module.parse_manifest()
         global_mod.global_target = global_mod.top_module.target
+        global_mod.top_module.fetch()
     else:
         p.echo("No manifest found. At least an empty one is needed")
         quit()
 
     global_mod.ssh = Connection(options.synth_user, options.synth_server)
 
-    if global_mod.options.fetch == True:
-        fetch()
-    elif global_mod.options.local == True:
+    if global_mod.options.local == True:
         local_synthesis()
     elif global_mod.options.remote == True:
         remote_synthesis()
-    elif global_mod.options.make_list == True:
-        generate_list_makefile()
     elif global_mod.options.make == True:
         generate_makefile()
-    elif global_mod.options.inject == True:
-        inject_into_ise()
-    elif global_mod.options.ipcore == True:
-        generate_pseudo_ipcore()
-    elif global_mod.options.make_fetch == True:
-        generate_fetch_makefile()
 
 # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
 def generate_pseudo_ipcore():
     import depend
     tm = global_mod.top_module
+
+    
     file_deps_dict = tm.generate_deps_for_vhdl_in_modules()
     depend.generate_pseudo_ipcore(file_deps_dict)
 
@@ -160,11 +144,109 @@ def inject_into_ise():
     depend.inject_files_into_ise(global_mod.options.ise_project, files)
 
 def generate_makefile():
+    from dep_solver import DependencySolver
     import depend
+    solver = DependencySolver()
+
     tm = global_mod.top_module
-    vhdl_deps = tm.generate_deps_for_vhdl_in_modules()
-    sv_files = tm.extract_files_from_all_modules(extensions=['v','sv'])
-    depend.generate_makefile(file_deps=vhdl_deps, sv_files=sv_files)
+    flist = tm.build_global_file_list();
+    flist_sorted = solver.solve(flist);
+
+    if(tm.target == "simulation"):
+        depend.generate_modelsim_makefile(flist_sorted, tm)
+    elif (tm.target == "xilinx"):
+        generate_ise_project(flist_sorted, tm);
+        generate_ise_makefile(tm)
+
+def generate_ise_makefile(top_mod):
+    filename = "Makefile"
+    f=open(filename,"w");
+    
+    mk_text = """
+PROJECT=""" + top_mod.syn_project + """
+ISE_CRAP = \
+*.bgn \
+*.html \
+*.tcl \
+*.bld \
+*.cmd_log \
+*.drc \
+*.lso \
+*.ncd \
+*.ngc \
+*.ngd \
+*.ngr \
+*.pad \
+*.par \
+*.pcf \
+*.prj \
+*.ptwx \
+*.stx \
+*.syr \
+*.twr \
+*.twx \
+*.gise \
+*.unroutes \
+*.ut \
+*.xpi \
+*.xst \
+*_bitgen.xwbt \
+*_envsettings.html \
+*_guide.ncd \
+*_map.map \
+*_map.mrp \
+*_map.ncd \
+*_map.ngm \
+*_map.xrpt \
+*_ngdbuild.xrpt \
+*_pad.csv \
+*_pad.txt \
+*_par.xrpt \
+*_summary.html \
+*_summary.xml \
+*_usage.xml \
+*_xst.xrpt \
+usage_statistics_webtalk.html \
+webtalk.log \
+webtalk_pn.xml \
+run.tcl
+
+
+all:		syn
+
+clean:
+		rm -f $(ISE_CRAP)
+		rm -rf xst xlnx_auto_*_xdb iseconfig _xmsgs _ngo
+
+mrproper:
+\trm -f *.bit *.bin *.mcs
+
+syn:
+		echo "project open $(PROJECT)" > run.tcl
+		echo "process run {Generate Programming File} -force rerun_all" >> run.tcl
+		xtclsh run.tcl
+"""
+    f.write(mk_text);
+    f.close()
+				
+
+def generate_ise_project(fileset, top_mod):
+    from flow import ISEProject, ISEProjectProperty
+    
+    prj = ISEProject()
+    prj.add_files(fileset.files)
+    prj.add_libs(fileset.get_libs())
+
+    prj.add_property(ISEProjectProperty("Device", top_mod.syn_device))
+    prj.add_property(ISEProjectProperty("Device Family", "Spartan6"))
+    prj.add_property(ISEProjectProperty("Speed Grade", top_mod.syn_grade))
+    prj.add_property(ISEProjectProperty("Package", top_mod.syn_package))
+#    prj.add_property(ISEProjectProperty("Implementation Top", "Architecture|"+top_mod.syn_top))
+    prj.add_property(ISEProjectProperty("Implementation Top", "Architecture|"+top_mod.syn_top))
+    prj.add_property(ISEProjectProperty("Manual Implementation Compile Order", "true"))
+    prj.add_property(ISEProjectProperty("Auto Implementation Top", "false"))
+    prj.add_property(ISEProjectProperty("Implementation Top Instance Path", "/"+top_mod.syn_top))
+    prj.emit_xml(top_mod.syn_project)
 
     #NOT YET TRANSFORMED INTO CLASSES
 def remote_synthesis():
@@ -215,37 +297,23 @@ def remote_synthesis():
         p.echo("Deleting synthesis folder")
         ssh.system('rm -rf ' + dest_folder)
 
-def local_synthesis():
-    if global_mod.options.tcl == None:
-        p.echo("No .tcl file found. Exiting")
-        quit()
-    ise = global_mod.top_module.ise
-    tcl = global_mod.options.tcl
-    if not os.path.exists("/opt/Xilinx/" + ise):
-        p.echo("The script can't find demanded ISE version: " + ise)
-        quit()
+def local_run_xilinx_flow(tm):
+    f = open("run.tcl","w");
+    f.write("project open " + tm.syn_project);
+    f.write("process run {Generate Programming Files} -force rerun_all");
+    f.close()
+    os.system("xtclsh run.tcl");
 
-    address_length = check_address_length(os)
-    if address_length == 32 or address_length == None:
-        path_ext = global_mod.ise_path_32[ise]
-    else:
-        p.echo("Don't know how to run settings script for ISE version: " + ise)
-    results = os.popen("export PATH=$PATH:"+path_ext+" && xtclsh " + tcl + " run_process")
-    p.echo(results.readlines())
-    quit()
 
-def generate_list_makefile():
-    import depend
+def local_synthesis():
     tm = global_mod.top_module
-    deps = tm.generate_deps_for_vhdl_in_modules()
-    depend.generate_list_makefile(deps)
-    os.system("make -f Makefile.list")
+    if tm.target == "xilinx":
+        local_run_xilinx_flow(tm)
+    else:
+        p.echo("Target " + tm.target + " is not synthesizable")
 
-    if global_mod.options.nodel != True:
-        os.remove("Makefile.list")
 
-if __name__ == "__main__":
-    #global options' map for use in the entire script
-    t0 = None
-    global_mod.cwd = os.getcwd()
-    main()
+def generate_list_makefile():
+    pass
+
+main()
diff --git a/synthesis/helper_classes.py b/synthesis/helper_classes.py
index d85e9c8d1cc66706542697fd14c1251e7314a5bb..fb5054676d6a548ff76cadf96bb37190041ef7b9 100644
--- a/synthesis/helper_classes.py
+++ b/synthesis/helper_classes.py
@@ -62,9 +62,14 @@ class ManifestParser(ConfigParser):
         self.add_option('fetchto', default=None, help="Destination for fetched modules", type='')
         self.add_option('root_module', default=None, help="Path to root module for currently parsed", type='')
         self.add_option('name', default=None, help="Name of the folder at remote synthesis machine", type='')
-        self.add_option('tcl', default=None, help="Path to .tcl file used in synthesis", type='')
-        self.add_option('ise', default=None, help="Version of ISE to be used in synthesis", type='')
-        self.add_type('ise', type=1)
+
+
+        self.add_option('syn_device', default=None, help = "Target FPGA device", type = '');
+        self.add_option('syn_grade', default=None, help = "Speed grade of target FPGA", type = '');
+        self.add_option('syn_package', default=None, help = "Package variant of target FPGA", type = '');
+        self.add_option('syn_top', default=None, help = "Top level module for synthesis", type = '');
+        self.add_option('syn_project', default=None, help = "Vendor flow project file", type = '');
+
 
         self.add_option('vsim_opt', default="", help="Additional options for vsim", type='')
         self.add_option('vcom_opt', default="", help="Additional options for vcom", type='')
@@ -90,7 +95,7 @@ class ManifestParser(ConfigParser):
     #def print_help():
     #    self.parser.print_help()
 
-class SourceFile:
+class PawelSourceFile:
     def __init__(self, path, type=None):
         self.path = path
         self.name = os.path.basename(self.path)
@@ -170,7 +175,7 @@ class SourceFile:
         f.close()
         self.package = ret
 
-class IseProjectFile(SourceFile):
+class IseProjectFile(PawelSourceFile):
     def __init__(self, path=None, type="ise"):
         SourceFile.__init__(self, path=path, type=type)
 
diff --git a/synthesis/module.py b/synthesis/module.py
index 872c28bec2bbd3ae37943a94b391746348f2b9f0..40cb9af3369e93235c3892f68a0b2bc2a65c5009 100644
--- a/synthesis/module.py
+++ b/synthesis/module.py
@@ -4,7 +4,11 @@ import msg as p
 import os
 import configparser
 import global_mod
-from helper_classes import Manifest, ManifestParser, SourceFile, IseProjectFile, ManifestOptions
+from helper_classes import Manifest, ManifestParser, IseProjectFile, ManifestOptions
+from srcfile import *
+
+from fetch import *
+
 
 class Module(object):
     def __init__(self, parent, url=None, files=None, manifest=None,
@@ -12,14 +16,17 @@ class Module(object):
         self.options = ManifestOptions()
         if source == "local" and path != None:
             if not os.path.exists(path):
-                raise ValueError("There is no such local module: " + path)
+                raise ValueError("Path to the local module doesn't exist: " + path)
+
         self.parent = parent
+
         if files == None:
             self.options["files"] = []
         elif not isinstance(files, list):
             self.options["files"] = [files]
         else:
             self.options["files"] = files
+
         if manifest != None and fetchto == None:
             options["fetchto"] = os.path.dirname(manifest.path)
 
@@ -75,8 +82,8 @@ class Module(object):
         #options = object.__getattribute__(self, "options")
         return self.options[attr]
 
-    def __str__(self):
-        return self.url
+  #  def __str__(self):
+  #      return self.url
 
     def search_for_manifest(self):
         """
@@ -86,6 +93,7 @@ class Module(object):
         for filename in os.listdir(self.path):
             if filename == "manifest.py" or filename == "Manifest.py":
                 if not os.path.isdir(filename):
+                    p.vprint("*** found manifest for module "+self.path);
                     manifest = Manifest(path=os.path.abspath(os.path.join(self.path, filename)))
                     return manifest
         # no manifest file found
@@ -100,17 +108,26 @@ class Module(object):
         return sth
 
     def parse_manifest(self):
+        p.vprint(self);
+        p.vprint("IsParsed "+str(self.isparsed)+" isFetched " + str(self.isfetched));
         if self.isparsed == True:
             return
         if self.isfetched == False:
             return
 
         manifest_parser = ManifestParser()
-        manifest_parser.add_arbitrary_code("target="+global_mod.global_target)
+        if(self.parent != None):
+            print("GlobMod " +str(global_mod.top_module))
+            manifest_parser.add_arbitrary_code("target=\""+global_mod.top_module.target+"\"")
+        else:
+            print("NoParent")
+            global_mod.top_module = self
+
+        manifest_parser.add_arbitrary_code("__manifest=\""+self.url+"\"")
         manifest_parser.add_arbitrary_code(global_mod.options.arbitrary_code)
 
         if self.manifest == None:
-            p.vprint(' '.join(["In module",str(self),"there is no manifest."]))
+            p.vprint("No manifest found in module "+str(self))
         else:
             manifest_parser.add_manifest(self.manifest)
             p.vprint("Parsing manifest file: " + str(self.manifest))
@@ -126,6 +143,9 @@ class Module(object):
             self.root_module = Module(path=root_path, source="local", isfetched=True, parent=self)
             self.root_module.parse_manifest()
 
+        self.target = opt_map["target"]
+
+
         #derivate fetchto from the root_module
         if opt_map["root_module"] != None:
             self.fetchto = self.root_module.fetchto
@@ -133,7 +153,12 @@ class Module(object):
             if not path_mod.is_rel_path(opt_map["fetchto"]):
                 p.echo(' '.join([os.path.basename(sys.argv[0]), "accepts relative paths only:", opt_map["fetchto"]]))
                 quit()
-            fetchto = path_mod.rel2abs(opt_map["fetchto"], self.path)
+
+            if(opt_map["fetchto"] != None):
+                fetchto = path_mod.rel2abs(opt_map["fetchto"], self.path)
+            else:
+                fetchto = None
+
         #this is the previous solution - no derivation 
         #if opt_map["fetchto"] == None:
         #    fetchto = self.path
@@ -157,14 +182,20 @@ class Module(object):
 
         self.library = opt_map["library"]
         if opt_map["files"] == []:
-            files = []
-            for filename in os.listdir(self.path):
-                path = os.path.join(self.path, filename)
-                if not os.path.isdir(path):
-                    file = SourceFile(path=path)
-                    file.library = self.library
-                    files.append(file)
-            self.files = files
+
+# don't scan if there a manifest exists but contains no files (i.e. only sub-modules)
+#            fact = SourceFileFactory ()
+
+#            files = []
+#            for filename in os.listdir(self.path):
+#                path = os.path.join(self.path, filename)
+#                if not os.path.isdir(path):
+#                    file = fact.new(path=path)
+#                    file.library = self.library
+#                    files.append(file)
+#            self.files = files
+            self.fileset = SourceFileSet()
+            pass
         else:
             opt_map["files"] = self.__make_list(opt_map["files"])
             paths = []
@@ -178,8 +209,9 @@ class Module(object):
                     p.echo("File listed in " + self.manifest.path + " doesn't exist: "
                     + path +".\nExiting.")
                     quit()
-            self.__make_list_of_files(paths=paths)
 
+            self.fileset = self.create_flat_file_list(paths=paths);
+            
         if "svn" in opt_map["modules"]:
             opt_map["modules"]["svn"] = self.__make_list(opt_map["modules"]["svn"])
             svn = []
@@ -201,22 +233,26 @@ class Module(object):
         self.vmap_opt = opt_map["vmap_opt"]
         self.vcom_opt = opt_map["vcom_opt"]
         self.vlog_opt = opt_map["vlog_opt"]
+        self.vsim_opt = opt_map["vsim_opt"]
 
         self.name = opt_map["name"]
         self.target = opt_map["target"]
 
+        self.syn_device = opt_map["syn_device"];
+        self.syn_grade = opt_map["syn_grade"];
+        self.syn_package= opt_map["syn_package"];
+        self.syn_project = opt_map["syn_project"];
+        self.syn_top = opt_map["syn_top"];
+
         self.isparsed = True
 
     def is_fetched(self):
         return self.isfetched
 
     def fetch(self):
+
         if self.source == "local":
             self.path = self.url
-        elif self.source == "svn":
-            self.__fetch_from_svn()
-        elif self.source == "git":
-            self.__fetch_from_git()
 
         involved_modules = [self]
         modules_queue = [self]
@@ -224,9 +260,11 @@ class Module(object):
         p.vprint("Fetching manifest: " + str(self.manifest))
 
         while len(modules_queue) > 0:
-            cur_mod = modules_queue.pop()
-            cur_mod.parse_manifest()
-
+            if(self.source == "local"):
+                cur_mod = modules_queue.pop()
+                p.vprint("ModPath: " + cur_mod.path);
+                cur_mod.parse_manifest()
+            
             if cur_mod.root_module != None:
                 root_module = cur_mod.root_module
                 p.vprint("Encountered root manifest: " + str(root_module))
@@ -242,13 +280,19 @@ class Module(object):
                 p.vprint("Fetching to " + module.fetchto)
                 path = module.__fetch_from_svn()
                 module.path = path
+                module.source = "local"
+                module.isparsed = False;
                 involved_modules.append(module)
                 modules_queue.append(module)
 
             for module in cur_mod.git:
-                p.vprint("Fetching to " + module.fetchto)
+                p.vprint("[git] Fetching to " + module.fetchto)
                 path = module.__fetch_from_git()
                 module.path = path
+                module.source = "local"
+                module.isparsed = False;
+                module.manifest = module.search_for_manifest();
+                p.vprint("[git] Local path " + module.path);
                 involved_modules.append(module)
                 modules_queue.append(module)
 
@@ -283,26 +327,10 @@ class Module(object):
         return os.path.join(fetchto, basename)
 
     def __fetch_from_git(self):
-        fetchto = self.fetchto
-        if not os.path.exists(fetchto):
-            os.mkdir(fetchto)
-
-        cur_dir = os.getcwd()
-        os.chdir(fetchto)
-
+#        p.vprint(self.fetchto);
         basename = path_mod.url_basename(self.url)
-        if basename.endswith(".git"):
-            basename = basename[:-4] #remove trailing .git
-
-        cmd = "git clone " + self.url
-        p.vprint(cmd)
-        os.system(cmd)
-        #if revision:
-        #    os.chdir(basename)
-        #    os.system("git checkout " + revision)
-        os.chdir(cur_dir)
-        self.isfetched = True
-        return os.path.join(fetchto, basename)
+        self.isfetched = fetch_from_git(self.url, None, self.fetchto);
+        return os.path.join(self.fetchto, basename)
 
     def make_list_of_modules(self):
         p.vprint("Making list of modules for " + str(self))
@@ -310,8 +338,9 @@ class Module(object):
         modules = [self]
         while len(new_modules) > 0:
             cur_module = new_modules.pop()
+#            p.vprint("Current: " + str(cur_module))
             if not cur_module.isfetched:
-                p.echo("Error in modules list - unfetched module: " + cur_mod)
+                p.echo("Error in modules list - unfetched module: " + str(cur_module))
                 quit()
             if cur_module.manifest == None:
                 p.vprint("No manifest in " + str(cur_module))
@@ -338,60 +367,23 @@ class Module(object):
             p.vprint("No modules were found in " + self.fetchto)
         return modules
 
-    def __make_list_of_files(self, paths, file_type = None, ret_class = SourceFile):
-        def get_files_(path, file_type = None, ret_class = SourceFile):
-            """
-            Get lists of normal files and list folders recursively
-            """
-            ret = []
-            for filename in os.listdir(path):
-                if filename[0] == ".": #a hidden file/catalogue -> skip
-                    continue
-                if os.path.isdir(os.path.join(path, filename)):
-                    ret.extend(get_files_(os.path.join(path, filename), file_type))
-                else:
-                    if file_type == None:
-                        ret.append(ret_class(path=os.path.abspath(os.path.join(path, filename))))
-                    else:
-                        tmp = filename.rsplit('.')
-                        ext = tmp[len(tmp)-1]
-                        if ext == file_type:
-                            ret.append( ret_class(path=os.path.abspath(os.path.join(path, filename))) )
-            return ret
-
-        files = []
-        for path in paths:
-            if os.path.isdir(path):
-                files.extend(get_files_(path, file_type=file_type, ret_class=ret_class))
-            else:
-                if file_type == None:
-                    files.append(ret_class(path=path))
-                else:
-                    tmp = filename.rsplit('.')
-                    ext = tmp[len(tmp)-1]
-                    if ext == file_type:
-                        files.append( ret_class(path=path) )
-        for file in files:
-            file.library = self.library
-        self.files = files
-
-    def extract_files_from_module(self, extensions):
-        p.vprint("Extracting files from the module: " + str(self))
-        from copy import copy
-        if isinstance(extensions, list):
-            files = [copy(f) for f in self.files if f.extension() in extensions]
-        elif isinstance(extensions, basestring):
-            files = [copy(f) for f in self.files if f.extension() == extensions]
-        return files
-
-    def extract_files_from_all_modules(self, extensions):
+
+    def create_flat_file_list(self, paths):
+        fact = SourceFileFactory();
+        srcs = SourceFileSet();
+        for p in paths:
+            srcs.add(fact.new(p, self.library))
+        return srcs
+
+
+    def build_global_file_list(self):
+        f_set = SourceFileSet();
+#        self.create_flat_file_list();
         modules = self.make_list_of_modules()
-        files = []
-        for module in modules:
-            tmp = module.extract_files_from_module(extensions=extensions)
-            files.extend(tmp)
+        for m in modules:
+            f_set.add(m.fileset);
 
-        return files
+        return f_set
 
     def generate_deps_for_vhdl_in_modules(self):
         all_files = self.extract_files_from_all_modules(extensions="vhd")
@@ -486,4 +478,4 @@ class Module(object):
                 p.echo("modules.svn has strange format (neither string nor list)")
         if m.local != None:
             if not isinstance(m.local, [basestring,list]):
-                p.echo("modules.svn has strange format (neither string nor list)")
\ No newline at end of file
+                p.echo("modules.svn has strange format (neither string nor list)")
diff --git a/synthesis/srcfile.py b/synthesis/srcfile.py
new file mode 100644
index 0000000000000000000000000000000000000000..dbe8322d81c5a7e969e4b059cdd8c105b2c55377
--- /dev/null
+++ b/synthesis/srcfile.py
@@ -0,0 +1,196 @@
+from dep_solver import *
+import os
+
+
+class SourceFile(IDependable):
+        cur_index = 0
+        def __init__(self, path, library = None):
+                IDependable.__init__(self)
+                self.path = path;
+                self.name = os.path.basename(self.path)
+                self.purename = os.path.splitext(self.name)[0]
+                if not library:
+                        library = "work"
+
+                self.library = library
+
+        def __str__(self):
+                return self.path
+
+        def extension(self):
+                tmp = self.path.rsplit('.')
+                ext = tmp[len(tmp)-1]
+                return ext
+
+        def isdir(self):
+                return os.path.isdir(self.path)
+
+        def gen_index(self):    
+                self.__class__.cur_index = self.__class__.cur_index+1
+                return self.__class__.cur_index
+
+        def show(self):
+                print(self.path);
+                pass
+
+class VHDLFile(SourceFile):
+        def __init__(self, path, library = None):
+                SourceFile.__init__(self, path, library);
+                self.create_deps();
+#                if self.dep_fixed:
+ #                       print("File " + self.path + " fixed dep [idx " + str(self.dep_index) + "]")
+#                else:
+#                        print("File " + self.path + " uses: " + str(self.dep_requires) + " provides: " + str(self.dep_provides))
+
+        def check_encryption(self):
+                f = open(self.path, "rb");
+                s = f.read(3);
+                f.close()
+                if(s == b'Xlx'):
+                        return True
+                else:
+                        return False
+                        
+        def create_deps(self):
+                if self.check_encryption():
+                        self.dep_index = SourceFile.gen_index(self)
+                        self.dep_fixed = True
+                else:
+                        self.dep_requires = self.search_use_clauses()
+                        self.dep_provides = self.search_packages()
+
+        def search_use_clauses(self):
+                """
+                Reads a file and looks for 'use' clause. For every 'use' with
+                non-standard library a tuple (lib, file) is returned in a list.
+
+                """
+
+                import re
+                std_libs = ['ieee', 'altera_mf', 'cycloneiii', 'lpm', 'std', 'unisim', 'XilinxCoreLib', 'simprims']
+
+                f = open(self.path, "r")
+                try:
+                        text = f.readlines()
+                except UnicodeDecodeError:
+                        return []
+
+
+                use_pattern = re.compile("^[ \t]*use[ \t]+([^; ]+)[ \t]*;.*$")
+                lib_pattern = re.compile("([^.]+)\.([^.]+)\.all")
+
+                use_lines = []
+                for line in text:
+                        m = re.match(use_pattern, line)
+                        if m != None:
+                                use_lines.append(m.group(1))
+                                
+                ret = []
+                for line in use_lines:
+                        m = re.match(lib_pattern, line)
+                        if m != None:
+                                if (m.group(1)).lower() in std_libs:
+                                        continue
+                                ret.append(m.group(1)+"::"+m.group(2))
+
+                f.close()
+                return ret
+        
+        def search_packages(self):
+                """
+                Reads a file and looks for package clase. Returns list of packages' names
+                from the file
+                """
+
+                import re
+                f = open(self.path, "r")
+                try:
+                        text = f.readlines()
+                except UnicodeDecodeError:
+                        return []
+
+                package_pattern = re.compile("^[ \t]*package[ \t]+([^ \t]+)[ \t]+is[ \t]*$")
+
+                ret = []
+                for line in text:
+                        m = re.match(package_pattern, line)
+                        if m != None:
+                                ret.append(self.library+"::"+m.group(1))
+
+                f.close()
+                return ret
+        
+class VerilogFile(SourceFile):
+        def __init__(self, path, library = None):
+                if not library:
+                        library = "work"
+                SourceFile.__init__(self, path, library);
+#                self.path = path;
+                self.create_deps();
+
+        def create_deps(self):
+                self.dep_requires = self.search_includes()
+                self.dep_provides = os.path.basename(self.path);
+
+        def search_includes(self):
+                pass
+
+class UCFFile(SourceFile):
+        def __init__(self, path):
+                SourceFile.__init__(self, path);
+
+class NGCFile(SourceFile):
+        def __init__(self, path):
+                SourceFile.__init__(self, path);
+
+class WBGenFile(SourceFile):
+        def __init__(self, path):
+                SourceFile.__init__(self, path);
+
+
+
+class SourceFileSet:
+        def __init__(self):
+                self.files = [];
+
+        def add(self, files):
+                if(isinstance(files, SourceFileSet)):
+                        for f in files.files:
+                                self.files.append(f)
+                elif(isinstance(files, list)):
+                        for f in files:
+                                self.files.append(f)
+                else:
+                        self.files.append(files)
+
+        def filter(self, type):
+                out = []
+                for f in self.files:
+                        if isinstance(f, type):
+                                out.append(f)
+                return out
+      
+        def get_libs(self):
+                return set(file.library for file in self.files)
+
+
+class SourceFileFactory:
+
+        def new (self, path, library = None):
+                tmp = path.rsplit('.')
+                extension = tmp[len(tmp)-1]
+                print(">>>> " + path);
+
+                if extension == 'vhd' or extension == 'vhdl':
+                        nf = VHDLFile(path, library)
+                elif extension == 'v' or extension == 'sv':
+                        nf = VerilogFile(path, library);
+                elif extension == 'ngc':
+                        nf = NGCFile(path);
+                elif extension == 'ucf':
+                        nf = UCFFile(path);
+                elif extension == 'wb':
+                        nf = WBGenFile(path);
+
+                return nf
+