Commit e3cdeae4 authored by Paweł Szostek's avatar Paweł Szostek

logging: change from in-house to python standard

parent 01309173
......@@ -9,15 +9,21 @@
import os
from connection import Connection
import global_mod
import msg as p
import optparse
import logging
from fetch import ModulePool
from env import Env
try:
from build_hash import BUILD_ID
except:
BUILD_ID = "unrecognized"
def main():
usage = "usage: %prog [options]\n"
usage += "type %prog --help to get help message"
parser = optparse.OptionParser(usage=usage)
parser.add_option("--manifest-help", action="store_true",
......@@ -78,8 +84,8 @@ def main():
parser.add_option("--py", dest="arbitrary_code",
default="", help="add arbitrary code to all manifests' evaluation")
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
default="false", help="verbose mode")
parser.add_option("--log", dest="log",
default="info", help="set logging level (one of debug, info, warning, error, critical")
parser.add_option("--version", dest="print_version", action="store_true",
default="false", help="print version id of this Hdlmake build")
......@@ -96,18 +102,21 @@ def main():
quit()
if options.print_version is True:
p.print_version()
print("Hdlmake build " + BUILD_ID)
quit()
p.vprint("LoadTopManifest")
numeric_level = getattr(logging, options.log.upper(), None)
if not isinstance(numeric_level, int):
print('Invalid log level: %s' % options.log)
logging.basicConfig(level=numeric_level)
pool = ModulePool()
pool.new_module(parent=None, url=os.getcwd(), source="local", fetchto=".")
# Setting top_module as top module of design (ModulePool class)
if pool.get_top_module().manifest is None:
p.rawprint("No manifest found. At least an empty one is needed")
p.rawprint("To see some help, type hdlmake --help")
logging.info("No manifest found. At least an empty one is needed")
logging.info("To see some help, type hdlmake --help")
quit()
# Setting global variable (global_mod.py)
......@@ -148,14 +157,14 @@ def main():
sth_chosen = True
getattr(kernel, function)()
except Exception, unknown_error:
p.echo("Oooops! We've got an error. Here is the appropriate info:\n")
p.print_version()
print(unknown_error)
logging.error("Oooops! We've got an error. Here is the appropriate info:\n")
print("Hdlmake build " + BUILD_ID)
print(str(unknown_error))
traceback.print_exc()
if not sth_chosen:
p.rawprint("No option selected. Running automatic flow")
p.rawprint("To see some help, type hdlmake --help")
logging.info("No option selected. Running automatic flow")
logging.info("To see some help, type hdlmake --help")
kernel.run()
if __name__ == "__main__":
......
......@@ -19,11 +19,13 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
import msg as p
from __future__ import print_function
import logging
import sys
import StringIO
import contextlib
@contextlib.contextmanager
def stdoutIO(stdout=None):
old = sys.stdout
......@@ -33,6 +35,7 @@ def stdoutIO(stdout=None):
yield stdout
sys.stdout = old
class ConfigParser(object):
"""Class for parsing python configuration files
......@@ -144,15 +147,14 @@ class ConfigParser(object):
elif key == "default":
self.default = others["default"]
elif key == "type":
self.add_type(type_obj=others["type"])
self.add_type(type_obj=others["type"])
else:
raise ValueError("Option not recognized: " + key)
def add_type(self, type_obj):
self.types.append(type(type_obj))
def __init__(self, description = None):
def __init__(self, description=None):
if description is not None:
if not isinstance(description, str):
raise ValueError("Description should be a string!")
......@@ -174,10 +176,10 @@ class ConfigParser(object):
raise RuntimeError("No such option as " + str(name))
def help(self):
p.rawprint("Variables available in a manifest:")
print("Variables available in a manifest:")
for opt in self.options:
if opt is None:
p.rawprint("")
print("")
continue
line = ' {0:15}; {1:29}; {2:45}{3}{4:10}'
......@@ -188,7 +190,7 @@ class ConfigParser(object):
line = line.format(opt.name, str(opt.types), opt.help, ', default=', tmp_def)
except AttributeError: # no default value
line = line.format(opt.name, str(opt.types), opt.help, "", "")
p.rawprint(line)
print(line)
def add_option(self, name, **others):
if name in self.__names():
......@@ -237,7 +239,7 @@ class ConfigParser(object):
if self.config_file is not None:
with open(self.config_file, "r") as config_file:
content = open(self.config_file, "r").readlines()
content = config_file.readlines()
content = ''.join(content)
else:
content = ''
......@@ -258,11 +260,11 @@ class ConfigParser(object):
if printed:
print(printed)
except SyntaxError as e:
p.error("Invalid syntax in the arbitraty code:\n" + str(e))
logging.error("Invalid syntax in the arbitraty code:\n" + str(e))
quit()
except:
p.error("Unexpected error while parsing arbitrary code:")
p.rawprint(str(sys.exc_info()[0])+':'+str(sys.exc_info()[1]))
logging.error("Unexpected error while parsing arbitrary code:")
print(str(sys.exc_info()[0])+':'+str(sys.exc_info()[1]))
quit()
try:
......@@ -270,19 +272,19 @@ class ConfigParser(object):
exec(content, options)
printed = s.getvalue()
if len(printed) > 0:
p.info("The manifest inside " + self.config_file + " tried to print something:")
logging.info("The manifest inside " + self.config_file + " tried to print something:")
for line in printed.split('\n'):
p.rawprint("> " + line)
print("> " + line)
#print "out:", s.getvalue()
except SyntaxError as e:
p.error("Invalid syntax in the manifest file " + self.config_file+ ":\n" + str(e))
logging.error("Invalid syntax in the manifest file " + self.config_file + ":\n" + str(e))
quit()
except:
p.error("Encountered unexpected error while parsing " + self.config_file)
p.rawprint(str(sys.exc_info()[0]) +':'+ str(sys.exc_info()[1]))
logging.error("Encountered unexpected error while parsing " + self.config_file)
print(str(sys.exc_info()[0]) + ':' + str(sys.exc_info()[1]))
quit()
for opt_name, val in list(options.items()): #check delivered options
for opt_name, val in list(options.items()): # check delivered options
if opt_name.startswith('__'):
continue
if opt_name not in self.__names():
......@@ -294,22 +296,22 @@ class ConfigParser(object):
raise NameError("Unrecognized option: " + opt_name)
opt = self[opt_name]
if type(val) not in opt.types:
raise RuntimeError("Given option: "+str(type(val))+" doesn't match specified types:"+str(opt.types))
raise RuntimeError("Given option: %s doesn't match specified types: %s" % (str(type(val)), str(opt.types)))
ret[opt_name] = val
# print("Opt_name ", opt_name)
if type(val) == type(dict()):
try:
for key in val:
if key not in self[opt_name].allowed_keys:
raise RuntimeError("Encountered unallowed key: " +key+ " for options '"+opt_name+"'")
except AttributeError: #no allowed_keys member - don't perform any check
raise RuntimeError("Encountered unallowed key: %s for option '%s'" % (key, opt_name))
except AttributeError: # no allowed_keys member - don't perform any check
pass
for opt in self.options: #set values for not listed items with defaults
for opt in self.options: # set values for not listed items with defaults
try:
if opt.name not in ret:
ret[opt.name] = opt.default
except AttributeError: #no default value in the option
except AttributeError: # no default value in the option
pass
return ret
......@@ -319,4 +321,4 @@ def _test():
doctest.testmod()
if __name__ == "__main__":
_test()
\ No newline at end of file
_test()
......@@ -22,7 +22,8 @@
import os
import random
import string
import msg as p
import logging
class Connection:
def __init__(self, ssh_user, ssh_server):
......@@ -42,7 +43,7 @@ class Connection:
def __check(self):
if not self.__data_given():
p.echo("Error: no data for connection given")
logging.info("Error: no data for connection given")
quit()
def system(self, cmd):
......@@ -59,10 +60,9 @@ class Connection:
self.__check()
#create a new catalogue on remote machine
if dest_folder is None:
dest_folder = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(8))
mkdir_cmd = 'mkdir -p ' + dest_folder
import msg as p
p.vprint("Connecting to " + str(self) + " and creating directory " + dest_folder + ": " + mkdir_cmd)
dest_folder = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(8))
mkdir_cmd = 'mkdir -p ' + dest_folder
logging.debug("Connecting to " + str(self) + " and creating directory " + dest_folder + ": " + mkdir_cmd)
self.system(mkdir_cmd)
#create a string with filenames
......@@ -71,16 +71,16 @@ class Connection:
rsync_cmd = "rsync -Rav " + local_files_str + " " + self.ssh_user + "@" + self.ssh_server + ":" + dest_folder
#rsync_cmd += " > /dev/null"
p.vprint("Coping files to remote machine: "+rsync_cmd)
logging.debug("Coping files to remote machine: " + rsync_cmd)
import subprocess
p = subprocess.Popen(rsync_cmd, shell=True)
os.waitpid(p.pid, 0)[1]
process = subprocess.Popen(rsync_cmd, shell=True)
os.waitpid(process.pid, 0)[1]
return dest_folder
def transfer_files_back(self, what, where):
self.__check()
rsync_cmd = "rsync -av " + self.ssh_user + "@" + self.ssh_server + ":" + what + ' ' + where
p.vprint(rsync_cmd)
logging.debug(rsync_cmd)
os.system(rsync_cmd)
def is_good(self):
......@@ -93,11 +93,11 @@ class Connection:
p = self.popen("uname -a")
p = p.readlines()
if not len(p):
p.echo("Checking address length failed")
logging.warning("Checking address length failed")
return None
elif "i686" in p[0]:
return 32
elif "x86_64" in p[0]:
return 64
else:
return None
\ No newline at end of file
return None
......@@ -20,10 +20,11 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
import msg as p
import logging
import global_mod
import os.path
class IDependable:
def __init__(self):
self.dep_index = 0
......@@ -116,7 +117,7 @@ class DependencySolver:
for dir in inc_dirs:
dir = os.path.join(os.getcwd(), dir)
if not os.path.exists(dir) or not os.path.isdir(dir):
p.warning("Include path "+dir+" doesn't exist")
logging.warning("Include path "+dir+" doesn't exist")
continue
h_file = os.path.join(dir, req)
if os.path.exists(h_file) and not os.path.isdir(h_file):
......@@ -167,7 +168,7 @@ class DependencySolver:
else:
break
p.vprint ("Include paths are: " + ' '.join(ret))
logging.debug("Include paths are: " + ' '.join(ret))
return ret
def solve(self, fileset):
......@@ -193,8 +194,8 @@ class DependencySolver:
fset[idx], fset[k] = fset[k], fset[idx]
if(n_iter == max_iter):
p.error("Maximum number of iterations reached when trying to solve the dependencies.\n"
"Perhaps a cyclic inter-dependency problem.")
logging.error("Maximum number of iterations reached when trying to solve the dependencies.\n"
"Perhaps a cyclic inter-dependency problem.")
return None
for f in fset:
......@@ -205,14 +206,14 @@ class DependencySolver:
f_nondep.sort(key=lambda f: f.dep_index)
from srcfile import VHDLFile, VerilogFile
for f in [file for file in fset if isinstance(file, VHDLFile)]:
p.vprint(f.path)
logging.debug(f.path)
if f.dep_requires:
for req in f.dep_requires:
pf = self.__find_provider_vhdl_file([file for file in fset if isinstance(file, VHDLFile)], req)
if not pf:
p.error("Missing dependency in file "+str(f)+": " + req[0]+'.'+req[1])
logging.error("Missing dependency in file "+str(f)+": " + req[0]+'.'+req[1])
else:
p.vprint("--> " + pf.path)
logging.debug("--> " + pf.path)
if pf.path != f.path:
f.dep_depends_on.append(pf)
#get rid of duplicates by making a set from the list and vice versa
......@@ -222,20 +223,18 @@ class DependencySolver:
acc = []
for f in [file for file in fset if isinstance(file, VerilogFile)]:
p.vprint(f.path)
logging.debug(f.path)
if f.dep_requires:
for req in f.dep_requires:
pf = self.__find_provider_verilog_file(req, f, fset+acc)
if not pf:
p.warning("Cannot find depending for file "+str(f)+": "+req)
logging.warning("Cannot find depending for file "+str(f)+": "+req)
else:
p.vprint("--> " + pf.path)
logging.debug("--> " + pf.path)
f.dep_depends_on.append(pf)
#get rid of duplicates by making a set from the list and vice versa
f.dep_depends_on = list(set(f.dep_depends_on))
newobj = sf.SourceFileSet()
newobj.add(f_nondep)
for f in fset:
......@@ -256,14 +255,14 @@ class DependencySolver:
for req in qf.dep_requires:
pf = self.__find_provider_verilog_file(req, f, [])
if not pf:
p.warning("Cannot find include for file "+str(f)+": "+req)
logging.warning("Cannot find include for file "+str(f)+": "+req)
else:
p.vprint("--> " + pf.path)
logging.debug("--> " + pf.path)
f.dep_depends_on.append(pf)
stack.append(pf)
#get rid of duplicates by making a set from the list and vice versa
f.dep_depends_on = list(set(f.dep_depends_on))
for k in newobj:
p.vprint(str(k.dep_index) + " " + k.path + str(k._dep_fixed))
logging.debug(str(k.dep_index) + " " + k.path + str(k._dep_fixed))
return newobj
......@@ -20,11 +20,12 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import os
import sys
import msg as p
from subprocess import Popen, PIPE
import re
import logging
class _IsePath(object):
......@@ -78,7 +79,7 @@ class _IsePath(object):
class Env(dict):
def __init__(self, options, top_module):
self.options = options
self.top_module = top_module
self.top_module = top_module
def check(self):
platform = sys.platform
......@@ -86,7 +87,6 @@ class Env(dict):
#1: determine path for ise
print("--- ISE synthesis ---")
xilinx = os.environ.get("XILINX")
if xilinx:
print("Environmental variable %s is set: %s." % ("XILINX", xilinx))
......@@ -128,7 +128,6 @@ class Env(dict):
#######
self.report_and_set_var("top_module")
#3: determine modelsim path
print("--- Modelsim simulation ---")
self.report_and_set_var("modelsim_path")
......@@ -207,8 +206,8 @@ class Env(dict):
if match:
ise_version = (int(match.group('major')), int(match.group('minor')))
else:
p.error("xst output is not in expected format: %s\n" % xst_output +
"Can't determine ISE version")
logging.error("xst output is not in expected format: %s\n" % xst_output +
"Can't determine ISE version")
return None
return ise_version
......@@ -271,5 +270,5 @@ class Env(dict):
return False
if __name__ == "__main__":
ec = EnvChecker({}, {})
ec = Env({}, {})
ec.check()
......@@ -20,31 +20,33 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import os
import msg as p
import logging
import path
import global_mod
class ModulePool(list):
class ModuleFetcher:
def __init__(self):
pass
def fetch_single_module(self, module):
import global_mod
new_modules = []
p.vprint("Fetching module: " + str(module))
logging.debug("Fetching module: " + str(module))
if module.source == "local":
p.vprint("ModPath: " + module.path)
logging.debug("ModPath: " + module.path)
else:
p.printhr()
p.info("Fetching module: " + str(module) +\
" [parent: " + str(module.parent) + "]")
logging.info("Fetching module: " + str(module) +
"[parent: " + str(module.parent) + "]")
if module.source == "svn":
p.info("[svn] Fetching to " + module.fetchto)
logging.info("[svn] Fetching to " + module.fetchto)
self.__fetch_from_svn(module)
if module.source == "git":
p.info("[git] Fetching to " + module.fetchto)
logging.info("[git] Fetching to " + module.fetchto)
self.__fetch_from_git(module)
module.parse_manifest()
......@@ -69,7 +71,7 @@ class ModulePool(list):
rval = True
p.vprint(cmd)
logging.debug(cmd)
if os.system(cmd) != 0:
rval = False
os.chdir(cur_dir)
......@@ -90,7 +92,7 @@ class ModulePool(list):
mod_path = os.path.join(module.fetchto, basename)
if basename.endswith(".git"):
basename = basename[:-4] #remove trailing .git
basename = basename[:-4] # remove trailing .git
if module.isfetched:
update_only = True
......@@ -106,14 +108,14 @@ class ModulePool(list):
rval = True
p.vprint(cmd)
logging.debug(cmd)
if os.system(cmd) != 0:
rval = False
if module.revision and rval:
os.chdir(mod_path)
cmd = "git checkout " + module.revision
p.vprint(cmd)
logging.debug(cmd)
if os.system(cmd) != 0:
rval = False
os.chdir(cur_dir)
......@@ -122,7 +124,6 @@ class ModulePool(list):
module.path = mod_path
return rval
def __init__(self, *args):
list.__init__(self, *args)
self.top_module = None
......@@ -146,7 +147,7 @@ class ModulePool(list):
return [m for m in self if m.url == url][0]
else:
if self.global_fetch: # if there is global fetch parameter (HDLMAKE_COREDIR env variable)
fetchto = self.global_fetch # screw module's particular fetchto
fetchto = self.global_fetch # screw module's particular fetchto
elif global_mod.top_module:
fetchto = global_mod.top_module.fetchto
......@@ -186,11 +187,11 @@ class ModulePool(list):
new_modules = fetcher.fetch_single_module(cur_mod)
for mod in new_modules:
if not mod.isfetched:
p.vprint("Appended to fetch queue: " +str(mod.url))
logging.debug("Appended to fetch queue: " + str(mod.url))
self._add(mod)
fetch_queue.append(mod)
else:
p.vprint("NOT appended to fetch queue: " +str(mod.url))
logging.debug("NOT appended to fetch queue: " + str(mod.url))
def build_global_file_list(self):
from srcfile import SourceFileSet
......@@ -230,9 +231,9 @@ class ModulePool(list):
if nvl:
extra_verilog_files.add(nvl)
p.vprint("Extra verilog files, not listed in manifests:")
logging.debug("Extra verilog files, not listed in manifests:")
for extra_vl in extra_verilog_files:
p.vprint(str(extra_vl))
logging.debug(str(extra_vl))
for extra_vl in extra_verilog_files:
files.add(extra_vl)
return files
......
......@@ -22,15 +22,16 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import xml.dom.minidom
import xml.parsers.expat
import msg as p
import logging
import re
XmlImpl = xml.dom.minidom.getDOMImplementation()
ISE_STANDARD_LIBS = ['ieee', 'ieee_proposed', 'iSE', 'simprims', 'std',
'synopsys','unimacro', 'unisim', 'XilinxCoreLib']
'synopsys', 'unimacro', 'unisim', 'XilinxCoreLib']
QUARTUS_STANDARD_LIBS = ['altera', 'altera_mf', 'lpm', 'ieee', 'std']
MODELSIM_STANDARD_LIBS = ['ieee', 'std']
ISIM_STARDAND_LIBS = ['std', 'ieee', 'ieee_proposed', 'vl', 'synopsys',
......@@ -39,6 +40,7 @@ ISIM_STARDAND_LIBS = ['std', 'ieee', 'ieee_proposed', 'vl', 'synopsys',
'simprims_ver', 'unisims_ver', 'uni9000_ver',
'unimacro_ver', 'xilinxcorelib_ver', 'secureip']
class ISEProjectProperty:
def __init__(self, name, value, is_default=False):
self.name = name
......@@ -108,14 +110,13 @@ class ISEProject:
def add_initial_properties(self, syn_device, syn_grade, syn_package, syn_top):
family_names = {
"XC6S": "Spartan6",
"XC3S": "Spartan3",
"XC6V": "Virtex6",
"XC5V": "Virtex5",
"XC4V": "Virtex4",
"XC7K": "Kintex7",
"XC7A": "Artix7"}
"XC6S": "Spartan6",
"XC3S": "Spartan3",
"XC6V": "Virtex6",
"XC5V": "Virtex5",
"XC4V": "Virtex4",
"XC7K": "Kintex7",
"XC7A": "Artix7"}
self.add_property(ISEProjectProperty("Device", syn_device))
self.add_property(ISEProjectProperty("Device Family", family_names[syn_device[0:4].upper()]))
......@@ -132,10 +133,10 @@ class ISEProject:
def __parse_props(self):
for xmlp in self.xml_project.getElementsByTagName("properties")[0].getElementsByTagName("property"):
prop = ISEProjectProperty(
xmlp.getAttribute("xil_pn:name"),
xmlp.getAttribute("xil_pn:value"),
xmlp.getAttribute("xil_pn:valueState") == "default"
)
xmlp.getAttribute("xil_pn:name"),
xmlp.getAttribute("xil_pn:value"),
xmlp.getAttribute("xil_pn:valueState") == "default"
)
self.props.append(prop)
self.xml_props = self.__purge_dom_node(name="properties", where=self.xml_doc.documentElement)
......@@ -148,20 +149,20 @@ class ISEProject:
def load_xml(self, filename):
f = open(filename)
self.xml_doc = xml.dom.minidom.parse(f)
self.xml_project = self.xml_doc.getElementsByTagName("project")[0]
self.xml_project = self.xml_doc.getElementsByTagName("project")[0]
import sys
try:
self.__parse_props()
except xml.parsers.expat.ExpatError:
p.rawprint("Error while parsing existng file's properties:")
p.rawprint(str(sys.exc_info()))
print("Error while parsing existng file's properties:")
print(str(sys.exc_info()))
quit()
try:
self.__parse_libs()
except xml.parsers.expat.ExpatError:
p.rawprint("Error while parsing existng file's libraries:")
p.rawprint(str(sys.exc_info()))
print("Error while parsing existng file's libraries:")
print(str(sys.exc_info()))
quit()
where = self.xml_doc.documentElement
......@@ -190,7 +191,7 @@ class ISEProject:
from srcfile import UCFFile, VHDLFile, VerilogFile, CDCFile, NGCFile
for f in self.files:
p.vprint("Writing .xise file for version " + str(self.ise))
logging.debug("Writing .xise file for version " + str(self.ise))
fp = self.xml_doc.createElement("file")
fp.setAttribute("xil_pn:name", os.path.relpath(f.path))
if isinstance(f, VHDLFile):
......@@ -245,7 +246,7 @@ class ISEProject:
i.setAttribute("xil_pn:schema_version", "2")
node.appendChild(i)
def emit_xml(self, filename = None):
def emit_xml(self, filename=None):
if not self.xml_doc:
self.create_empty_project()
else:
......@@ -256,7 +257,7 @@ class ISEProject:
self.__output_libs(self.xml_libs)
output_file = open(filename, "w")
string_buffer = self.StringBuffer()
self.xml_doc.writexml(string_buffer, newl = "\n", addindent="\t")
self.xml_doc.writexml(string_buffer, newl="\n", addindent="\t")
output_file.write('\n'.join(string_buffer))
output_file.close()
......@@ -291,8 +292,7 @@ class ISEProject:
class ModelsiminiReader(object):
def __init__(self, path = None):
def __init__(self, path=None):
if path is None:
path = self.modelsim_ini_dir() + "/modelsim.ini"
self.path = path
......@@ -312,7 +312,8 @@ class ModelsiminiReader(object):
for line in ini:
line = line.split(" ")[0]
line = line.strip()
if line == "": continue
if line == "":
continue
if line.lower() == "[library]":
reading_libraries = True
continue
......@@ -336,7 +337,7 @@ class ModelsiminiReader(object):
return os.path.abspath(bin_path+"/../")
class XilinxsiminiReader(object):
def __init__(self, path = None):
def __init__(self, path=None):
if path is None:
path = self.xilinxsim_ini_dir() + "/xilinxsim.ini"
self.path = path
......@@ -359,7 +360,8 @@ class XilinxsiminiReader(object):
# Read line by line, skipping comments and striping newline
line = line.split('--')[0].strip()
# Still in comments section
if line == "": continue
if line == "":
continue
# Not in comments section. Library section:
#<logical_library> = <phisical_path>
......@@ -375,7 +377,7 @@ class XilinxsiminiReader(object):
try:
xilinx_path = os.environ["XILINX"]
except KeyError:
p.error("Please set the environment variable XILINX")
logging.error("Please set the environment variable XILINX")
# Fail completely for now
quit()
......@@ -383,7 +385,7 @@ class XilinxsiminiReader(object):
try:
host_platform = os.environ["HOST_PLATFORM"]
except KeyError:
p.error("Please set the environment variable HOST_PLATFORM")
logging.error("Please set the environment variable HOST_PLATFORM")
# Fail completely for now
quit()
......
......@@ -21,7 +21,7 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
import os
import msg as p
import logging
import path
from makefile_writer import MakefileWriter
from flow import ISEProject
......@@ -42,7 +42,7 @@ class HdlmakeKernel(object):
return self.modules_pool.get_top_module()
def run(self):
p.info("Running automatic flow")
logging.info("Running automatic flow")
tm = self.top_module
......@@ -53,7 +53,7 @@ class HdlmakeKernel(object):
self.generate_simulation_makefile()
elif tm.action == "synthesis":
if tm.syn_project is None:
p.error("syn_project variable must be defined in the manifest")
logging.error("syn_project variable must be defined in the manifest")
quit()
if tm.target.lower() == "xilinx":
self.generate_ise_project()
......@@ -66,23 +66,29 @@ class HdlmakeKernel(object):
else:
raise RuntimeError("Unrecognized target: "+tm.target)
else:
p.print_action_help() and quit()
logging.error("`Action' variable was not specified\n"
"Allowed values are: \"simulation\" or \"synthesis\"\n"
"This variable in a manifest file is necessary for Hdlmake\n"
"to be able to know what to do with the given modules' structure.\n"
"For more help type `hdlmake --help'\n"
"or visit http://www.ohwr.org/projects/hdl-make")
quit()
def list_modules(self):
for m in self.modules_pool:
if not m.isfetched:
p.rawprint("#!UNFETCHED")
p.rawprint(m.url+'\n')
print("#!UNFETCHED")
print(m.url+'\n')
else:
p.rawprint(path.relpath(m.path))
print(path.relpath(m.path))
if m.source in ["svn", "git"]:
p.rawprint ("#"+m.url)
print("#"+m.url)
if not len(m.files):
p.rawprint(" # no files")
print(" # no files")
else:
for f in m.files:
p.rawprint(" " + path.relpath(f.path, m.path))
p.rawprint("")
print(" " + path.relpath(f.path, m.path))
print("")
def list_files(self):
files_str = []
......@@ -90,14 +96,15 @@ class HdlmakeKernel(object):
if not m.isfetched:
continue
files_str.append(" ".join([f.path for f in m.files]))
p.rawprint(" ".join(files_str))
print(" ".join(files_str))
def fetch(self, unfetched_only = False):
p.info("Fetching needed modules.")
def fetch(self, unfetched_only=False):
logging.info("Fetching needed modules.")
self.modules_pool.fetch_all(unfetched_only)
p.vprint(str(self.modules_pool))
logging.debug(str(self.modules_pool))
def generate_simulation_makefile(self):
tm = self.modules_pool.top_module
if tm.sim_tool == "iverilog":
self._generate_iverilog_makefile()
elif tm.sim_tool == "isim":
......@@ -105,20 +112,20 @@ class HdlmakeKernel(object):
elif tm.sim_tool == "vsim" or tm.sim_tool == "modelsim":
self._generate_vsim_makefile()
else:
raise RuntimeError("Unrecognized or not specified simulation tool: "+ str(tm.sim_tool))
raise RuntimeError("Unrecognized or not specified simulation tool: %s" % str(tm.sim_tool))
quit()
def _generate_vsim_makefile(self):
# p.info("Generating makefile for simulation.")
p.info("Generating ModelSim makefile for simulation.")
logging.info("Generating ModelSim makefile for simulation.")
solver = DependencySolver()
pool = self.modules_pool
if not pool.is_everything_fetched():
p.echo("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
p.echo(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
logging.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
print(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
top_module = pool.get_top_module()
flist = pool.build_global_file_list()
flist_sorted = solver.solve(flist)
......@@ -127,14 +134,14 @@ class HdlmakeKernel(object):
def _generate_isim_makefile(self):
# p.info("Generating makefile for simulation.")
p.info("Generating ISE Simulation (ISim) makefile for simulation.")
logging.info("Generating ISE Simulation (ISim) makefile for simulation.")
solver = DependencySolver()
pool = self.modules_pool
if not pool.is_everything_fetched():
p.echo("A module remains unfetched. "
"Fetching must be done prior to makefile generation. Try issuing \"hdlmake2 --fetch\"")
p.echo(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
logging.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation. Try running \"hdlmake --fetch\"")
print(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
top_module = pool.get_top_module()
flist = pool.build_global_file_list()
......@@ -143,13 +150,13 @@ class HdlmakeKernel(object):
def _generate_iverilog_makefile(self):
from dep_solver import DependencySolver
p.info("Generating makefile for simulation.")
logging.info("Generating makefile for simulation.")
solver = DependencySolver()
pool = self.modules_pool
if not self.modules_pool.is_everything_fetched():
p.echo("A module remains unfetched. Fetching must be done prior to makefile generation")
p.echo(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
logging.error("A module remains unfetched. Fetching must be done prior to makefile generation")
print(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
tm = pool.get_top_module()
flist = pool.build_global_file_list()
......@@ -159,7 +166,7 @@ class HdlmakeKernel(object):
def generate_ise_makefile(self):
import global_mod
global_mod.mod_pool = self.modules_pool
p.info("Generating makefile for local synthesis.")
logging.info("Generating makefile for local synthesis.")
ise_path = self.__figure_out_ise_path()
......@@ -167,14 +174,14 @@ class HdlmakeKernel(object):
def generate_remote_synthesis_makefile(self):
if self.connection.ssh_user is None or self.connection.ssh_server is None:
p.warning("Connection data is not given. "
"Accessing environmental variables in the makefile")
p.info("Generating makefile for remote synthesis.")
logging.warning("Connection data is not given. "
"Accessing environmental variables in the makefile")
logging.info("Generating makefile for remote synthesis.")
top_mod = self.modules_pool.get_top_module()
if not os.path.exists(top_mod.fetchto):
p.warning("There are no modules fetched. "
"Are you sure it's correct?")
logging.warning("There are no modules fetched. "
"Are you sure it's correct?")
ise_path = self.__figure_out_ise_path()
tcl = self.__search_tcl_file()
......@@ -192,14 +199,14 @@ class HdlmakeKernel(object):
cwd=os.getcwd(), user=self.connection.ssh_user, server=self.connection.ssh_server, ise_path=ise_path)
def generate_ise_project(self):
p.info("Generating/updating ISE project")
logging.info("Generating/updating ISE project")
if self.__is_xilinx_screwed():
p.error("Xilinx environment variable is unset or is wrong.\n"
"Cannot generate ise project")
logging.error("Xilinx environment variable is unset or is wrong.\n"
"Cannot generate ise project")
quit()
if not self.modules_pool.is_everything_fetched():
p.echo("A module remains unfetched. Fetching must be done prior to makefile generation")
p.echo(str([str(m) for m in self.modules_pool if not m.isfetched]))
logging.error("A module remains unfetched. Fetching must be done prior to makefile generation")
print(str([str(m) for m in self.modules_pool if not m.isfetched]))
quit()
ise = self.__check_ise_version()
if os.path.exists(self.top_module.syn_project):
......@@ -208,12 +215,12 @@ class HdlmakeKernel(object):
self.__create_new_ise_project(ise=ise)
def generate_quartus_project(self):
p.info("Generating/updating Quartus project.")
logging.info("Generating/updating Quartus project.")
if not self.modules_pool.is_everything_fetched():
p.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
p.rawprint(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
logging.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
print(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
if os.path.exists(self.top_module.syn_project + ".qsf"):
......@@ -249,21 +256,24 @@ class HdlmakeKernel(object):
import subprocess
import re
xst = subprocess.Popen('which xst', shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
close_fds=True)
lines = xst.stdout.readlines()
if not lines:
p.error("Xilinx binaries are not in the PATH variable\n"
"Can't determine ISE version")
logging.error("Xilinx binaries are not in the PATH variable\n"
"Can't determine ISE version")
quit()
xst = str(lines[0].strip())
version_pattern = re.compile(".*?(\d\d\.\d).*") #First check if we have version in path
version_pattern = re.compile(".*?(\d\d\.\d).*") # First check if we have version in path
match = re.match(version_pattern, xst)
if match:
ise_version = match.group(1)
else: #If it is not the case call the "xst -h" to get version
else: # If it is not the case call the "xst -h" to get version
xst_output = subprocess.Popen('xst -h', shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True)
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
close_fds=True)
xst_output = xst_output.stdout.readlines()[0]
xst_output = xst_output.strip()
version_pattern = \
......@@ -272,11 +282,11 @@ class HdlmakeKernel(object):
if match:
ise_version = ''.join((match.group('major'), '.', match.group('minor')))
else:
p.error("xst output is not in expected format: "+ xst_output +"\n"
"Can't determine ISE version")
logging.error("xst output is not in expected format: %s\n"
"Can't determine ISE version" % xst_output)
return None
p.vprint("ISE version: " + ise_version)
logging.debug("ISE version: " + ise_version)
return ise_version
def __update_existing_ise_project(self, ise):
......@@ -293,7 +303,7 @@ class HdlmakeKernel(object):
prj.add_files(all_files)
from srcfile import SourceFileFactory
sff = SourceFileFactory()
print top_mod.vlog_opt
logging.debug(top_mod.vlog_opt)
prj.add_files([sff.new(top_mod.vlog_opt)])
prj.add_libs(all_files.get_libs())
prj.load_xml(top_mod.syn_project)
......@@ -312,12 +322,12 @@ class HdlmakeKernel(object):
prj.add_libs(fileset.get_libs())
from srcfile import SourceFileFactory
sff = SourceFileFactory()
print top_mod.vlog_opt
logging.debug(top_mod.vlog_opt)
prj.add_files([sff.new(top_mod.vlog_opt)])
prj.add_initial_properties(syn_device=top_mod.syn_device,
syn_grade = top_mod.syn_grade,
syn_package = top_mod.syn_package,
syn_top = top_mod.syn_top)
syn_grade=top_mod.syn_grade,
syn_package=top_mod.syn_package,
syn_top=top_mod.syn_top)
prj.emit_xml(top_mod.syn_project)
......@@ -332,7 +342,7 @@ class HdlmakeKernel(object):
prj = QuartusProject(top_mod.syn_project)
prj.add_files(fileset)
prj.add_initial_properties( top_mod.syn_device,
prj.add_initial_properties(top_mod.syn_device,
top_mod.syn_grade,
top_mod.syn_package,
top_mod.syn_top)
......@@ -364,19 +374,19 @@ class HdlmakeKernel(object):
self.__generate_tcl()
os.system("xtclsh run.tcl")
else:
p.error("Target " + tm.target + " is not synthesizable")
logging.error("Target " + tm.target + " is not synthesizable")
def run_remote_synthesis(self):
ssh = self.connection
cwd = os.getcwd()
p.vprint("The program will be using ssh connection: "+str(ssh))
logging.debug("The program will be using ssh connection: "+str(ssh))
if not ssh.is_good():
p.error("SSH connection failure. Remote host doesn't response.")
logging.error("SSH connection failure. Remote host doesn't response.")
quit()
if not os.path.exists(self.top_module.fetchto):
p.warning("There are no modules fetched. Are you sure it's correct?")
logging.warning("There are no modules fetched. Are you sure it's correct?")
files = self.modules_pool.build_very_global_file_list()
# tcl = self.__search_tcl_file()
......@@ -392,10 +402,10 @@ class HdlmakeKernel(object):
dest_folder=self.top_module.syn_name)
syn_cmd = "cd "+dest_folder+cwd+" && xtclsh run.tcl"
p.vprint("Launching synthesis on " + str(ssh) + ": " + syn_cmd)
logging.debug("Launching synthesis on " + str(ssh) + ": " + syn_cmd)
ret = ssh.system(syn_cmd)
if ret == 1:
p.error("Synthesis failed. Nothing will be transfered back")
logging.error("Synthesis failed. Nothing will be transfered back")
quit()
cur_dir = os.path.basename(cwd)
......@@ -403,7 +413,7 @@ class HdlmakeKernel(object):
ssh.transfer_files_back(what=dest_folder+cwd, where=".")
os.chdir(cur_dir)
def __search_tcl_file(self, directory = None):
def __search_tcl_file(self, directory=None):
if directory is None:
directory = "."
filenames = os.listdir(directory)
......@@ -415,51 +425,51 @@ class HdlmakeKernel(object):
if len(tcls) == 0:
return None
if len(tcls) > 1:
p.error("Multiple tcls in the current directory!\n" + str(tcls))
logging.error("Multiple tcls in the current directory!\n" + str(tcls))
quit()
return tcls[0]
def __generate_tcl(self):
f = open("run.tcl","w")
f = open("run.tcl", "w")
f.write("project open " + self.top_module.syn_project + '\n')
f.write("process run {Generate Programming File} -force rerun_all\n")
f.close()
def clean_modules(self):
p.info("Removing fetched modules..")
logging.info("Removing fetched modules..")
remove_list = [m for m in self.modules_pool if m.source in ["svn", "git"] and m.isfetched]
remove_list.reverse() #we will remove modules in backward order
remove_list.reverse() # we will remove modules in backward order
if len(remove_list):
for m in remove_list:
p.rawprint("\t" + m.url + " [from: " + m.path + "]")
print("\t" + m.url + " [from: " + m.path + "]")
m.remove_dir_from_disk()
else:
p.info("There are no modules to be removed")
logging.info("There are no modules to be removed")
def generate_fetch_makefile(self):
pool = self.modules_pool
if pool.get_fetchable_modules() == []:
p.error("There are no fetchable modules. "
"No fetch makefile is produced")
logging.error("There are no fetchable modules. "
"No fetch makefile is produced")
quit()
if not pool.is_everything_fetched():
p.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
logging.error("A module remains unfetched. "
"Fetching must be done prior to makefile generation")
quit()
self.make_writer.generate_fetch_makefile(pool)
def merge_cores(self):
from srcfile import VerilogFile, VHDLFile, SVFile, NGCFile
from srcfile import VerilogFile, VHDLFile, NGCFile
from vlog_parser import VerilogPreprocessor
solver = DependencySolver()
pool = self.modules_pool
if not pool.is_everything_fetched():
p.echo("A module remains unfetched. Fetching must be done prior to makefile generation")
p.echo(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
logging.error("A module remains unfetched. Fetching must be done prior to makefile generation")
print(str([str(m) for m in self.modules_pool.modules if not m.isfetched]))
quit()
flist = pool.build_global_file_list()
......@@ -482,7 +492,7 @@ class HdlmakeKernel(object):
for vhdl in flist_sorted.filter(VHDLFile):
f_out.write("\n\n--- File: %s ----\n\n" % vhdl.rel_path())
f_out.write(open(vhdl.rel_path(),"r").read()+"\n\n")
f_out.write(open(vhdl.rel_path(), "r").read()+"\n\n")
#print("VHDL: %s" % vhdl.rel_path())
f_out.close()
......
......@@ -22,6 +22,8 @@
import os
import string
import logging
class MakefileWriter(object):
def __init__(self, filename):
......@@ -59,7 +61,6 @@ class MakefileWriter(object):
self._file = open(filename, "w")
def generate_remote_synthesis_makefile(self, files, name, cwd, user, server, ise_path):
import path
if name is None:
import random
name = ''.join(random.choice(string.ascii_letters + string.digits) for x in range(8))
......@@ -69,7 +70,7 @@ class MakefileWriter(object):
remote_name_tmpl = "R_NAME:={0}"
files_tmpl = "FILES := {0}"
if user is None:
if user is None:
user_tmpl = user_tmpl.format("$(HDLMAKE_USER)#take the value from the environment")
test_tmpl = """__test_for_remote_synthesis_variables:
ifeq (x$(USER),x)
......@@ -132,7 +133,6 @@ endif
pass
def generate_ise_makefile(self, top_mod, ise_path):
import path
mk_text = """PROJECT := {1}
ISE_CRAP := \
*.b \
......@@ -217,7 +217,7 @@ mrproper:
self.initialize()
self.write("#target for fetching all modules stored in repositories\n")
self.write("fetch: ")
self.write(' \\\n'.join(["__"+m.basename+"_fetch" for m in modules_pool if m.source in ["svn","git"]]))
self.write(' \\\n'.join(["__"+m.basename+"_fetch" for m in modules_pool if m.source in ["svn", "git"]]))
self.write("\n\n")
for module in modules_pool:
......@@ -229,9 +229,9 @@ mrproper:
self.write("cd " + rp(module.fetchto) + ' ')
c = "svn checkout {0}{1} {2}"
if module.revision:
c=c.format(module.url, "@"+module.revision, module.basename)
c = c.format(module.url, "@"+module.revision, module.basename)
else:
c=c.format(module.url, "", module.basename)
c = c.format(module.url, "", module.basename)
self.write(c)
self.write("cd $(PWD) \n\n")
......@@ -243,17 +243,16 @@ mrproper:
self.write("if [ -d " + basename + " ] then cd " + basename + ' ')
self.write("git pull ")
if module.revision:
self.write("git checkout " + module.revision +'')
self.write("else git clone "+ module.url + ' fi ')
self.write("git checkout " + module.revision + '')
self.write("else git clone " + module.url + ' fi ')
if module.revision:
self.write("git checkout " + module.revision + '')
self.write("cd $(PWD) \n\n")
def generate_iverilog_makefile(self, fileset, top_module, modules_pool):
from srcfile import VerilogFile, VHDLFile, SVFile
from srcfile import VerilogFile
#open the file and write the above preambule (part 1)
self.initialize()
rp = os.path.relpath
import global_mod
# for m in global_mod.mod_pool:
for f in global_mod.top_module.incl_makefiles:
......@@ -272,7 +271,7 @@ mrproper:
include_dirs = list(set([os.path.dirname(f.rel_path()) for f in vl.dep_depends_on if f.name.endswith("vh")]))
while "" in include_dirs:
include_dirs.remove("")
include_dir_string=" -I".join(include_dirs)
include_dir_string = " -I".join(include_dirs)
if include_dir_string:
include_dir_string = ' -I'+include_dir_string
self.writeln("VFLAGS_"+target_name+"="+include_dir_string)
......@@ -307,8 +306,8 @@ mrproper:
if (f.name != vl.name and f.name not in sim_only_files):
bt_syn_deps.append(f)
self.writeln(bt+'syn_deps = '+ ' '.join([f.rel_path() for f in bt_syn_deps]))
if not os.path.exists(bt+".ucf"):
print "WARNING: The file " +bt+".ucf doesn't exist!"
if not os.path.exists("%s.ucf" % bt):
logging.warning("The file %s.ucf doesn't exist!" % bt)
self.writeln(bt+".bit:\t"+bt+".v $("+bt+"syn_deps) "+bt+".ucf")
part=(global_mod.top_module.syn_device+'-'+
global_mod.top_module.syn_package+
......@@ -319,7 +318,6 @@ mrproper:
self.writeln("clean:")
self.writeln("\t\trm -f "+" ".join(target_list)+"\n\t\trm -rf _xilinx")
def generate_vsim_makefile(self, fileset, top_module):
from srcfile import VerilogFile, VHDLFile, SVFile
from flow import ModelsiminiReader
......@@ -348,7 +346,6 @@ clean:
self.initialize()
self.write(make_preambule_p1)
rp = os.path.relpath
self.write("VERILOG_SRC := ")
for vl in fileset.filter(VerilogFile):
self.write(vl.rel_path() + " \\\n")
......@@ -374,7 +371,7 @@ clean:
self.write("VHDL_OBJ := ")
for vhdl in fileset.filter(VHDLFile):
#file compilation indicator (important: add _vhd ending)
self.write(os.path.join(vhdl.library, vhdl.purename,"."+vhdl.purename+"_"+vhdl.extension()) + " \\\n")
self.write(os.path.join(vhdl.library, vhdl.purename, "."+vhdl.purename+"_"+vhdl.extension()) + " \\\n")
self.write('\n')
self.write('LIBS := ')
......@@ -389,7 +386,7 @@ clean:
for lib in libs:
self.write(lib+"/."+lib+":\n")
self.write(' '.join(["\t(vlib", lib, "&&", "vmap", "-modelsimini modelsim.ini",
lib, "&&", "touch", lib+"/."+lib,")"]))
lib, "&&", "touch", lib+"/."+lib, ")"]))
self.write(' '.join(["||", "rm -rf", lib, "\n"]))
self.write('\n')
......@@ -417,10 +414,10 @@ clean:
lib = vhdl.library
purename = vhdl.purename
#each .dat depends on corresponding .vhd file
self.write(os.path.join(lib, purename, "."+purename+"_"+ vhdl.extension()) + ": " + vhdl.rel_path())
self.write(os.path.join(lib, purename, "."+purename+"_" + vhdl.extension()) + ": " + vhdl.rel_path())
for dep_file in vhdl.dep_depends_on:
name = dep_file.purename
self.write(" \\\n"+ os.path.join(dep_file.library, name, "."+name+"_vhd"))
self.write(" \\\n" + os.path.join(dep_file.library, name, ".%s_vhd" % name))
self.writeln()
self.writeln(' '.join(["\t\tvcom $(VCOM_FLAGS)", vhdl.vcom_opt, "-work", lib, "$< "]))
self.writeln("\t\t@mkdir -p $(dir $@) && touch $@\n")
......@@ -428,7 +425,7 @@ clean:
# Modification here
def generate_isim_makefile(self, fileset, top_module):
from srcfile import VerilogFile, VHDLFile, SVFile
from srcfile import VerilogFile, VHDLFile
from flow import XilinxsiminiReader
make_preambule_p1 = """## variables #############################
PWD := $(shell pwd)
......@@ -448,7 +445,7 @@ $(VHDL_OBJ): $(LIB_IND) xilinxsim.ini
xilinxsim.ini: $(XILINX_INI_PATH)/xilinxsim.ini
\t\tcp $< .
fuse:
fuse:
ifeq ($(TOP_MODULE),)
\t\t@echo \"Environment variable TOP_MODULE not set!\"
else
......@@ -464,7 +461,6 @@ isim.wdb
self.initialize()
self.write(make_preambule_p1)
rp = os.path.relpath
self.write("VERILOG_SRC := ")
for vl in fileset.filter(VerilogFile):
self.write(vl.rel_path() + " \\\n")
......@@ -490,7 +486,7 @@ isim.wdb
self.write("VHDL_OBJ := ")
for vhdl in fileset.filter(VHDLFile):
#file compilation indicator (important: add _vhd ending)
self.write(os.path.join(vhdl.library, vhdl.purename,"."+vhdl.purename+"_"+vhdl.extension()) + " \\\n")
self.write(os.path.join(vhdl.library, vhdl.purename, "."+vhdl.purename+"_"+vhdl.extension()) + " \\\n")
self.write('\n')
self.write('LIBS := ')
......@@ -525,7 +521,7 @@ isim.wdb
#self.writeln(".PHONY: " + os.path.join(comp_obj, '.'+vl.purename+"_"+vl.extension()))
self.write(os.path.join(comp_obj, '.'+vl.purename+"_"+vl.extension())+': ')
self.write(vl.rel_path() + ' ')
self.writeln(' '.join([f.rel_path() for f in vl.dep_depends_on]))
self.writeln(' '.join([fname.rel_path() for fname in vl.dep_depends_on]))
self.write("\t\tvlogcomp -work "+vl.library+"=./"+vl.library)
self.write(" $(VLOGCOMP_FLAGS) ")
#if isinstance(vl, SVFile):
......@@ -548,7 +544,7 @@ isim.wdb
#each .dat depends on corresponding .vhd file and its dependencies
#self.write(os.path.join(lib, purename, "."+purename+"_"+ vhdl.extension()) + ": "+ vhdl.rel_path()+" " + os.path.join(lib, purename, "."+purename) + '\n')
#self.writeln(".PHONY: " + os.path.join(comp_obj, "."+purename+"_"+ vhdl.extension()))
self.write(os.path.join(comp_obj, "."+purename+"_"+ vhdl.extension()) + ": "+ vhdl.rel_path()+" " + os.path.join(lib, purename, "."+purename) + '\n')
self.write(os.path.join(comp_obj, "."+purename+"_" + vhdl.extension()) + ": " + vhdl.rel_path()+" " + os.path.join(lib, purename, "."+purename) + '\n')
self.writeln(' '.join(["\t\tvhpcomp $(VHPCOMP_FLAGS)", vhdl.vcom_opt, "-work", lib+"=./"+lib, "$< "]))
self.writeln("\t\t@mkdir -p $(dir $@) && touch $@\n")
self.writeln()
......@@ -557,10 +553,10 @@ isim.wdb
#self.writeln(".PHONY: " + os.path.join(lib, purename, "."+purename))
# Touch the dependency file as well. In this way, "make" will recompile only what is needed (out of date)
#if len(vhdl.dep_depends_on) != 0:
self.write(os.path.join(lib, purename, "."+purename) +":")
self.write(os.path.join(lib, purename, "."+purename) + ":")
for dep_file in vhdl.dep_depends_on:
name = dep_file.purename
self.write(" \\\n"+ os.path.join(dep_file.library, name, "."+name+ "_" + vhdl.extension()))
self.write(" \\\n" + os.path.join(dep_file.library, name, "."+name + "_" + vhdl.extension()))
self.write('\n')
self.writeln("\t\t@mkdir -p $(dir $@) && touch $@\n")
......
......@@ -3,12 +3,14 @@
# Copyright (c) 2013 CERN
# Author: Pawel Szostek (pawel.szostek@cern.ch)
from __future__ import print_function
import path as path_mod
import msg as p
import os
import global_mod
import logging
from manifest_parser import Manifest, ManifestParser
from srcfile import SourceFileSet, SourceFileFactory
from srcfile import SourceFileSet, SourceFileFactory
class Module(object):
@property
......@@ -17,14 +19,14 @@ class Module(object):
@source.setter
def source(self, value):
if value not in ["svn","git","local"]:
if value not in ["svn", "git", "local"]:
raise ValueError("Inproper source: " + value)
self._source = value
@source.deleter
def source(self):
del self._source
###
@property
def basename(self):
import path
......@@ -62,8 +64,8 @@ class Module(object):
self.url, self.branch, self.revision = url, None, None
if source == "local" and not os.path.exists(url):
p.error("Path to the local module doesn't exist:\n" + url
+ "\nThis module was instantiated in: " + str(parent))
logging.error("Path to the local module doesn't exist:\n" + url
+ "\nThis module was instantiated in: " + str(parent))
quit()
if source == "local":
......@@ -102,18 +104,18 @@ class Module(object):
"""
Look for manifest in the given folder
"""
p.vprint("Looking for manifest in " + self.path)
logging.debug("Looking for manifest in " + self.path)
for filename in os.listdir(self.path):
if filename == "manifest.py" or filename == "Manifest.py":
if not os.path.isdir(filename):
p.vprint("*** found manifest for module "+self.path)
logging.debug("*** found manifest for module "+self.path)
manifest = Manifest(path=os.path.abspath(os.path.join(self.path, filename)))
return manifest
return None
def __make_list(self, sth):
if sth is not None:
if not isinstance(sth, (list,tuple)):
if not isinstance(sth, (list, tuple)):
sth = [sth]
else:
sth = []
......@@ -126,7 +128,7 @@ class Module(object):
import shutil
import os
p.vprint("Removing " + self.path)
logging.debug("Removing " + self.path)
shutil.rmtree(self.path)
parts = self.path.split('/')
......@@ -134,9 +136,9 @@ class Module(object):
try:
parts = parts[:-1]
tmp = '/'.join(parts)
p.vprint("Trying to remove " + tmp)
logging.debug("Trying to remove " + tmp)
os.rmdir(tmp)
except OSError: #a catologue is not empty - we are done
except OSError: # a catologue is not empty - we are done
break
def parse_manifest(self):
......@@ -158,16 +160,16 @@ class Module(object):
manifest_parser.add_arbitrary_code(global_mod.options.arbitrary_code)
if self.manifest is None:
p.vprint("No manifest found in module "+str(self))
logging.debug("No manifest found in module "+str(self))
else:
manifest_parser.add_manifest(self.manifest)
p.vprint("Parsing manifest file: " + str(self.manifest))
logging.debug("Parsing manifest file: " + str(self.manifest))
opt_map = None
try:
opt_map = manifest_parser.parse()
except NameError as ne:
p.echo("Error while parsing {0}:\n{1}: {2}.".format(self.manifest, type(ne), ne))
logging.error("Error while parsing {0}:\n{1}: {2}.".format(self.manifest, type(ne), ne))
quit()
if(opt_map["fetchto"] is not None):
......@@ -184,8 +186,8 @@ class Module(object):
local_mods = []
for path in local_paths:
if path_mod.is_abs_path(path):
p.error("Found an absolute path (" + path + ") in a manifest")
p.rawprint("(" + self.path + ")")
logging.error("Found an absolute path (" + path + ") in a manifest"
"(" + self.path + ")")
quit()
path = path_mod.rel2abs(path, self.path)
local_mods.append(self.pool.new_module(parent=self, url=path, source="local", fetchto=fetchto))
......@@ -207,10 +209,10 @@ class Module(object):
map(lambda f: mkFileList.append(f), opt_map["incl_makefiles"])
for f in mkFileList:
if path_mod.is_abs_path(f):
print "Found and absolute path in manifest. Exiting .."
logging.error("Found and absolute path in manifest. Exiting ..")
quit()
else:
self.incl_makefiles.append(os.path.relpath(os.path.abspath(os.path.join(self.path,f))))
self.incl_makefiles.append(os.path.relpath(os.path.abspath(os.path.join(self.path, f))))
#if self.vlog_opt == "":
# self.vlog_opt = global_mod.top_module.vlog_opt
......@@ -226,35 +228,32 @@ class Module(object):
if opt_map["include_dirs"] is not None:
if isinstance(opt_map["include_dirs"], basestring):
# self.include_dirs.append(opt_map["include_dirs"])
ll = os.path.relpath(os.path.abspath(os.path.join(self.path,opt_map["include_dirs"])))
ll = os.path.relpath(os.path.abspath(os.path.join(self.path, opt_map["include_dirs"])))
self.include_dirs.append(ll)
else:
# self.include_dirs.extend(opt_map["include_dirs"])
ll = map(lambda x: os.path.relpath(os.path.abspath(os.path.join(self.path,x))),
ll = map(lambda x: os.path.relpath(os.path.abspath(os.path.join(self.path, x))),
opt_map["include_dirs"])
self.include_dirs.extend(ll)
for dir in self.include_dirs:
if path_mod.is_abs_path(dir):
p.warning(self.path + " contains absolute path to an include directory: " +
dir)
logging.warning("%s contains absolute path to an include directory: %s" % (self.path, dir))
if not os.path.exists(dir):
p.warning(self.path + " has an unexisting include directory: " + dir)
logging.warning(self.path + " has an unexisting include directory: " + dir)
if opt_map["files"] == []:
self.files = SourceFileSet()
else:
opt_map["files"] = self.__make_list(opt_map["files"])
paths = []
for path in opt_map["files"]:
if not path_mod.is_abs_path(path):
path = path_mod.rel2abs(path, self.path)
paths.append(path)
for file_path in opt_map["files"]:
if not path_mod.is_abs_path(file_path):
path = path_mod.rel2abs(file_path, self.path)
paths.append(file_path)
else:
p.warning(path + " is an absolute path. Omitting.")
if not os.path.exists(path):
p.error("File listed in " + self.manifest.path + " doesn't exist: "
+ path +".\nExiting.")
logging.warning(file_path + " is an absolute path. Omitting.")
if not os.path.exists(file_path):
quit()
from srcfile import VerilogFile, VHDLFile
......@@ -270,33 +269,30 @@ class Module(object):
else:
opt_map["sim_only_files"] = self.__make_list(opt_map["sim_only_files"])
paths = []
for path in opt_map["sim_only_files"]:
if not path_mod.is_abs_path(path):
path = path_mod.rel2abs(path, self.path)
paths.append(path)
for file_path in opt_map["sim_only_files"]:
if not path_mod.is_abs_path(file_path):
file_path = path_mod.rel2abs(file_path, self.path)
paths.append(file_path)
else:
p.warning(path + " is an absolute path. Omitting.")
if not os.path.exists(path):
p.error("File listed in " + self.manifest.path + " doesn't exist: "
+ path +".\nExiting.")
logging.warning(file_path + " is an absolute path. Omitting.")
if not os.path.exists(file_path):
logging.error("File listed in %s doesn't exist: %s" % (self.manifest.path, file_path))
quit()
from srcfile import VerilogFile, VHDLFile
self.sim_only_files = self.__create_file_list_from_paths(paths=paths)
self.bit_file_targets = SourceFileSet()
if opt_map["bit_file_targets"] != []:
paths=[]
paths = []
for path in opt_map["bit_file_targets"]:
if not path_mod.is_abs_path(path):
path = path_mod.rel2abs(path, self.path)
paths.append(path)
else:
p.warning(path + " is an absolute path. Omitting.")
logging.warning(path + " is an absolute path. Omitting.")
if not os.path.exists(path):
p.error("File listed in " + self.manifest.path +
" doesn't exist: " + path +".\nExiting.")
logging.error("File listed in " + self.manifest.path +
" doesn't exist: " + path + ".\nExiting.")
quit()
from srcfile import VerilogFile, VHDLFile
self.bit_file_targets = self.__create_file_list_from_paths(paths=paths)
if "svn" in opt_map["modules"]:
......@@ -321,12 +317,12 @@ class Module(object):
self.action = opt_map["action"]
if opt_map["syn_name"] is None and opt_map["syn_project"] is not None:
self.syn_name = opt_map["syn_project"][:-5] #cut out .xise from the end
self.syn_name = opt_map["syn_project"][:-5] # cut out .xise from the end
else:
self.syn_name = opt_map["syn_name"]
self.syn_device = opt_map["syn_device"]
self.syn_grade = opt_map["syn_grade"]
self.syn_package= opt_map["syn_package"]
self.syn_package = opt_map["syn_package"]
self.syn_project = opt_map["syn_project"]
self.syn_top = opt_map["syn_top"]
......@@ -344,17 +340,17 @@ class Module(object):
return True
def make_list_of_modules(self):
p.vprint("Making list of modules for " + str(self))
logging.debug("Making list of modules for " + str(self))
new_modules = [self]
modules = [self]
while len(new_modules) > 0:
cur_module = new_modules.pop()
if not cur_module.isfetched:
p.error("Unfetched module in modules list: " + str(cur_module))
logging.error("Unfetched module in modules list: " + str(cur_module))
quit()
if cur_module.manifest is None:
p.vprint("No manifest in " + str(cur_module))
logging.debug("No manifest in " + str(cur_module))
continue
cur_module.parse_manifest()
......@@ -371,7 +367,7 @@ class Module(object):
new_modules.append(module)
if len(modules) == 0:
p.vprint("No modules were found in " + self.fetchto)
logging.debug("No modules were found in " + self.fetchto)
return modules
def __create_file_list_from_paths(self, paths):
......
......@@ -20,24 +20,10 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import os
import msg as p
ise_path_64 = {
"10.0":"/opt/Xilinx/10.0/ISE/bin/lin",
"10.1":"/opt/Xilinx/10.1/ISE/bin/lin",
"12.2":"/opt/Xilinx/12.2/ISE_DS/ISE/bin/lin64",
"12.1":"/opt/Xilinx/12.1/ISE_DS/ISE/bin/lin",
"12.4":"/opt/Xilinx/12.4/ISE_DS/ISE/bin/lin64",
"13.1":"/opt/Xilinx/13.1/ISE_DS/ISE/bin/lin64"
}
ise_path_32 = {"10.0":"/opt/Xilinx/10.0/ISE/bin/lin",
"10.1":"/opt/Xilinx/10.1/ISE/bin/lin",
"12.2":"/opt/Xilinx/12.2/ISE_DS/ISE/bin/lin64",
"12.1":"/opt/Xilinx/12.1/ISE_DS/ISE/bin/lin",
"12.4":"/opt/Xilinx/12.4/ISE_DS/ISE/bin/lin64",
"13.1":"/opt/Xilinx/13.1/ISE_DS/ISE/bin/lin64"}
import logging
def url_parse(url):
"""
......@@ -46,7 +32,7 @@ def url_parse(url):
"""url_pat = re.compile("[ \t]*([^ \t]+?)[ \t]*(::)?([^ \t@]+)?(@[ \t]*(.+))?[ \t]*")
url_match = re.match(url_pat, url)
if url_match is None:
p.echo("Not a correct repo url: {0}. Skipping".format(url))
print("Not a correct repo url: {0}. Skipping".format(url))
url_clean = url_match.group(1)
if url_match.group(3) is not None: #there is a branch
branch = url_match.group(3)
......@@ -62,6 +48,7 @@ def url_parse(url):
return (url_clean, branch, rev)
def url_basename(url):
"""
Get basename from an url
......@@ -75,6 +62,7 @@ def url_basename(url):
ret = os.path.basename(url)
return ret
def svn_basename(url):
words = url.split('//')
try:
......@@ -83,22 +71,30 @@ def svn_basename(url):
except IndexError:
return None
def pathsplit(p, rest=None):
if rest is None:
rest = []
(h, t) = os.path.split(p)
if len(h) < 1: return [t]+rest
if len(t) < 1: return [h]+rest
if len(h) < 1:
return [t]+rest
if len(t) < 1:
return [h]+rest
return pathsplit(h, [t]+rest)
def commonpath(l1, l2, common=None):
if common is None:
common = []
if len(l1) < 1: return (common, l1, l2)
if len(l2) < 1: return (common, l1, l2)
if l1[0] != l2[0]: return (common, l1, l2)
if len(l1) < 1:
return (common, l1, l2)
if len(l2) < 1:
return (common, l1, l2)
if l1[0] != l2[0]:
return (common, l1, l2)
return commonpath(l1[1:], l2[1:], common+[l1[0]])
def is_rel_path(path):
path = str(path)
s = path[0]
......@@ -106,6 +102,7 @@ def is_rel_path(path):
return False
return True
def is_abs_path(path):
path = str(path)
s = path[0]
......@@ -113,7 +110,8 @@ def is_abs_path(path):
return True
return False
def relpath(p1, p2 = None):
def relpath(p1, p2=None):
if p2 is None:
p2 = os.getcwd()
if p1 == p2:
......@@ -127,12 +125,12 @@ def relpath(p1, p2 = None):
(_, l1, l2) = commonpath(pathsplit(p1), pathsplit(p2))
p = []
if len(l1) > 0:
p = [ '../' * len(l1) ]
p = ['../' * len(l1)]
p = p + l2
return os.path.join(*p)
def rel2abs(path, base = None):
def rel2abs(path, base=None):
"""
converts a relative path to an absolute path.
......@@ -147,11 +145,12 @@ def rel2abs(path, base = None):
retval = os.path.join(base, path)
return os.path.abspath(retval)
def search_for_manifest(search_path):
"""
Look for manifest in the given folder
"""
p.vprint("Looking for manifest in " + search_path)
logging.debug("Looking for manifest in " + search_path)
for filename in os.listdir(search_path):
if filename == "manifest.py" and not os.path.isdir(filename):
return os.path.abspath(os.path.join(search_path, filename))
......
......@@ -19,10 +19,11 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
from dep_solver import IDependable
import os
import msg as p
import global_mod
import logging
import flow
import path as path_mod
......@@ -42,7 +43,7 @@ class File(object):
@property
def dirname(self):
return os.path.dirname(self.path)
def rel_path(self, dir=None):
import path
if dir is None:
......@@ -54,7 +55,7 @@ class File(object):
def __eq__(self, other):
_NOTFOUND = object()
v1, v2 = [getattr(obj, "path", _NOTFOUND) for obj in [self, other]]
v1, v2 = [getattr(obj, "path", _NOTFOUND) for obj in [self, other]]
if v1 is _NOTFOUND or v2 is _NOTFOUND:
return False
elif v1 != v2:
......@@ -79,14 +80,14 @@ class File(object):
return os.path.isdir(self.path)
def show(self):
p.rawprint(self.path)
print(self.path)
def extension(self):
tmp = self.path.rsplit('.')
ext = tmp[len(tmp)-1]
return ext
class SourceFile(IDependable, File):
cur_index = 0
......@@ -128,8 +129,8 @@ class VHDLFile(SourceFile):
else:
self.dep_requires = list(self.__search_use_clauses())
self.dep_provides = list(self.__search_packages())
p.vprint(self.path + " provides " + str(self.dep_provides))
p.vprint(self.path + " requires " + str(self.dep_requires))
logging.debug(self.path + " provides " + str(self.dep_provides))
logging.debug(self.path + " requires " + str(self.dep_requires))
def __search_use_clauses(self):
"""
......@@ -148,12 +149,12 @@ class VHDLFile(SourceFile):
elif global_mod.top_module.sim_tool == "vsim":
std_libs = flow.ModelsiminiReader().get_libraries()
else:
p.warning("Could not determine simulation tool. Defaulting to Modelsim")
logging.warning("Could not determine simulation tool. Defaulting to Modelsim")
std_libs = flow.MODELSIM_STANDARD_LIBS
except RuntimeError as e:
#std_libs = flow.MODELSIM_STANDARD_LIBS
print "I/O error: ({0})".format(e.message)
p.error("Picking standard Modelsim simulation libraries. Try to fix the error.")
logging.error("I/O error: ({0})".format(e.message))
logging.error("Picking standard Modelsim simulation libraries. Try to fix the error.")
std_libs = flow.MODELSIM_STARDAND_LIBS
elif global_mod.top_module.action == "synthesis":
print("setting std libs for synthesis...")
......@@ -180,7 +181,7 @@ class VHDLFile(SourceFile):
if m is not None:
use_lines.append(m.group(1))
ret = set()
ret = set()
for line in use_lines:
m = re.match(lib_pattern, line)
if m is not None:
......@@ -214,7 +215,7 @@ class VHDLFile(SourceFile):
package_pattern = re.compile("^[ \t]*package[ \t]+([^ \t]+)[ \t]+is[ \t]*.*$")
ret = set()
ret = set()
for line in text:
#identifiers and keywords are case-insensitive in VHDL
line = line.lower()
......@@ -274,13 +275,13 @@ class VerilogFile(SourceFile):
command += " " + vlog_opt + " " + self.rel_path()
retOsSystem = os.system(command)
if retOsSystem and retOsSystem != 256:
print "Dependencies not Met"
print command, self.include_dirs, inc_dirs, global_mod.mod_pool
logging.error("Dependencies not Met")
logging.debug(command, self.include_dirs, inc_dirs, global_mod.mod_pool)
quit()
elif retOsSystem == 256:
print command
logging.debug(command)
pass
depFile = open(depFileName,"r")
depFile = open(depFileName, "r")
depFiles = list(set([l.strip() for l in depFile.readlines()]))
depFile.close()
return depFiles
......@@ -361,7 +362,7 @@ class SourceFileSet(list):
if isinstance(files, str):
raise RuntimeError("Expected object, not a string")
elif files is None:
p.vprint("Got None as a file.\n Ommiting")
logging.debug("Got None as a file.\n Ommiting")
else:
try:
for f in files:
......@@ -403,7 +404,7 @@ class SourceFileFactory:
path = os.path.abspath(path)
tmp = path.rsplit('.')
extension = tmp[len(tmp)-1]
p.vprint("SFF> " + path)
logging.debug("SFF> " + path)
nf = None
if extension == 'vhd' or extension == 'vhdl' or extension == 'vho':
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment