Commit 3fa638fd authored by Paweł Szostek's avatar Paweł Szostek

further dev. of deps solver

parent 01f1b753
...@@ -131,6 +131,9 @@ def main(): ...@@ -131,6 +131,9 @@ def main():
if options.command == "auto": if options.command == "auto":
logging.info("Running automatic flow.") logging.info("Running automatic flow.")
if not top_mod.action:
logging.error("`action' manifest variable has to be specified.\n"
"Otherwise hdlmake doesn't know how to handle the project")
if top_mod.action == "simulation": if top_mod.action == "simulation":
sim = GenerateSimulationMakefile(modules_pool=modules_pool, options=options, env=env) sim = GenerateSimulationMakefile(modules_pool=modules_pool, options=options, env=env)
sim.run() sim.run()
......
...@@ -23,15 +23,14 @@ from __future__ import print_function ...@@ -23,15 +23,14 @@ from __future__ import print_function
from action import Action from action import Action
import logging import logging
import global_mod import global_mod
import dep_solver
class GenerateISEMakefile(Action): class GenerateISEMakefile(Action):
def run(self): def run(self):
import global_mod
global_mod.mod_pool = self.modules_pool
logging.info("Generating makefile for local synthesis.") logging.info("Generating makefile for local synthesis.")
ise_path = global_mod.env["ise_path"] ise_path = global_mod.env["ise_path"]
global_mod.makefile_writer.generate_ise_makefile(top_mod=self.modules_pool.get_top_module(), global_mod.makefile_writer.generate_ise_makefile(top_mod=self.modules_pool.get_top_module(),
ise_path=ise_path) ise_path=ise_path)
...@@ -24,8 +24,8 @@ import logging ...@@ -24,8 +24,8 @@ import logging
from action import Action from action import Action
import sys import sys
import os import os
from dependable_file import DependableFile from dep_file import DepFile
import dep_solver import new_dep_solver as dep_solver
from srcfile import SourceFileSet from srcfile import SourceFileSet
from tools.ise import ISEProject from tools.ise import ISEProject
from srcfile import SourceFileFactory from srcfile import SourceFileFactory
...@@ -66,23 +66,20 @@ class GenerateISEProject(Action): ...@@ -66,23 +66,20 @@ class GenerateISEProject(Action):
def _handle_ise_project(self, update=False): def _handle_ise_project(self, update=False):
top_mod = self.modules_pool.get_top_module() top_mod = self.modules_pool.get_top_module()
fileset = self.modules_pool.build_global_file_list() fileset = self.modules_pool.build_global_file_list()
non_dependable = fileset.inversed_filter(DependableFile) dep_solver.solve(fileset)
dependable = dep_solver.solve(fileset) flist = dep_solver.make_dependency_sorted_list(fileset)
all_files = SourceFileSet()
all_files.add(non_dependable)
all_files.add(dependable)
prj = ISEProject(ise=self.env["ise_version"], prj = ISEProject(ise=self.env["ise_version"],
top_mod=self.modules_pool.get_top_module()) top_mod=self.modules_pool.get_top_module())
self._write_project_vhd() self._write_project_vhd()
prj.add_files(all_files) prj.add_files(flist)
sff = SourceFileFactory() sff = SourceFileFactory()
logging.debug(top_mod.vlog_opt) logging.debug(top_mod.vlog_opt)
# prj.add_files([sff.new(top_mod.vlog_opt)]) # prj.add_files([sff.new(top_mod.vlog_opt)])
prj.add_files([sff.new(path=path.rel2abs("project.vhd"), prj.add_files([sff.new(path=path.rel2abs("project.vhd"),
module=self.modules_pool.get_module_by_path("."))]) module=self.modules_pool.get_module_by_path("."))])
prj.add_libs(all_files.get_libs()) prj.add_libs(fileset.get_libs())
if update is True: if update is True:
prj.load_xml(top_mod.syn_project) prj.load_xml(top_mod.syn_project)
else: else:
......
...@@ -22,7 +22,7 @@ ...@@ -22,7 +22,7 @@
from __future__ import print_function from __future__ import print_function
from action import Action from action import Action
import logging import logging
import dep_solver import new_dep_solver as dep_solver
import sys import sys
import global_mod import global_mod
...@@ -50,43 +50,41 @@ class GenerateSimulationMakefile(Action): ...@@ -50,43 +50,41 @@ class GenerateSimulationMakefile(Action):
def _generate_vsim_makefile(self): def _generate_vsim_makefile(self):
# p.info("Generating makefile for simulation.") # p.info("Generating makefile for simulation.")
if self.env["modelsim_path"] is None: # if self.env["modelsim_path"] is None:
logging.error("Can't generate a Modelsim makefile. Modelsim not found.") # logging.error("Can't generate a Modelsim makefile. Modelsim not found.")
sys.exit("Exiting") # sys.exit("Exiting")
else: logging.info("Generating ModelSim makefile for simulation.")
logging.info("Generating ModelSim makefile for simulation.")
pool = self.modules_pool pool = self.modules_pool
top_module = pool.get_top_module() top_module = pool.get_top_module()
flist = pool.build_global_file_list() fset = pool.build_global_file_list()
flist_sorted = dep_solver.solve(flist) dep_solver.solve(fset)
global_mod.makefile_writer.generate_vsim_makefile(flist_sorted, top_module) global_mod.makefile_writer.generate_vsim_makefile(fset, top_module)
def _generate_isim_makefile(self): def _generate_isim_makefile(self):
# p.info("Generating makefile for simulation.") # p.info("Generating makefile for simulation.")
if self.env["isim_path"] is None and self.env["xilinx"] is None: # if self.env["isim_path"] is None and self.env["xilinx"] is None:
logging.error("Can't generate an ISim makefile. ISim not found.") # logging.error("Can't generate an ISim makefile. ISim not found.")
sys.exit("Exiting") # sys.exit("Exiting")
else:
logging.info("Generating ISE Simulation (ISim) makefile for simulation.") logging.info("Generating ISE Simulation (ISim) makefile for simulation.")
pool = self.modules_pool pool = self.modules_pool
top_module = pool.get_top_module() top_module = pool.get_top_module()
flist = pool.build_global_file_list() fset = pool.build_global_file_list()
flist_sorted = dep_solver.solve(flist) dep_solver.solve(fset)
global_mod.makefile_writer.generate_isim_makefile(flist_sorted, top_module) global_mod.makefile_writer.generate_isim_makefile(fset, top_module)
def _generate_iverilog_makefile(self): def _generate_iverilog_makefile(self):
if self.env["iverilog_path"] is None: # if self.env["iverilog_path"] is None:
logging.error("Can't generate an IVerilog makefile. IVerilog not found.") # logging.error("Can't generate an IVerilog makefile. IVerilog not found.")
sys.exit("Exiting") # sys.exit("Exiting")
else: logging.info("Generating IVerilog makefile for simulation.")
logging.info("Generating IVerilog makefile for simulation.")
pool = self.modules_pool pool = self.modules_pool
tm = pool.get_top_module() tm = pool.get_top_module()
flist = pool.build_global_file_list() fset = pool.build_global_file_list()
flist_sorted = dep_solver.solve(flist) dep_solver.solve(fset)
global_mod.makefile_writer.generate_iverilog_makefile(flist_sorted, tm, pool) global_mod.makefile_writer.generate_iverilog_makefile(fset, tm, pool)
#!/usr/bin/python
#
# Copyright (c) 2013 CERN
# Author: Pawel Szostek (pawel.szostek@cern.ch)
#
# This file is part of Hdlmake.
#
# Hdlmake is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Hdlmake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
#
import global_mod
import os
from util import path as path_mod
class DepRelation(object):
PROVIDE = 1
USE = 2
ENTITY = 1
PACKAGE = 2
INCLUDE = 3
def __init__(self, obj_name, direction, rel_type):
self.direction = direction
self.rel_type = rel_type
self.obj_name = obj_name
def satisfies(self, rel_b):
if rel_b.direction == DepRelation.PROVIDE or self.direction == DepRelation.USE:
return False
if rel_b.rel_type == self.rel_type and rel_b.obj_name == self.obj_name:
return True
return False
def library(self):
if self.rel_type == DepRelation.PACKAGE:
libdotpackage = self.obj_name
try:
lib, package = libdotpackage.split('.')
return lib
except ValueError:
return None
else:
return None
def __str__(self):
dstr = {self.USE: "Use", self.PROVIDE: "Provide"}
ostr = {self.ENTITY: "entity/module", self.PACKAGE: "package", self.INCLUDE: "include/header"}
return "%s %s '%s'" % (dstr[self.direction], ostr[self.rel_type], self.obj_name)
def __eq__(self, other):
return (isinstance(other, self.__class__)
and self.__dict__ == other.__dict__)
def __ne__(self, other):
return not self.__eq__(other)
class File(object):
def __init__(self, path, module=None):
self.path = path
if module is None:
self.module = global_mod.top_module
else:
assert not isinstance(module, basestring)
self.module = module
@property
def name(self):
return os.path.basename(self.path)
@property
def purename(self):
return os.path.splitext(self.name)[0]
@property
def dirname(self):
return os.path.dirname(self.path)
def rel_path(self, dir=None):
if dir is None:
dir = os.getcwd()
return path_mod.relpath(self.path, dir)
def __str__(self):
return self.path
def __eq__(self, other):
_NOTFOUND = object()
v1, v2 = [getattr(obj, "path", _NOTFOUND) for obj in [self, other]]
if v1 is _NOTFOUND or v2 is _NOTFOUND:
return False
elif v1 != v2:
return False
return True
def __hash__(self):
return hash(self.path)
def __cmp__(self, other):
if self.path < other.path:
return -1
if self.path == other.path:
return 0
if self.path > other.path:
return 1
def __ne__(self, other):
return not self.__eq__(other)
def isdir(self):
return os.path.isdir(self.path)
def show(self):
print(self.path)
def extension(self):
tmp = self.path.rsplit('.')
ext = tmp[len(tmp)-1]
return ext
class DepFile(File):
def __init__(self, file_path, module, include_paths=None):
from module import Module
from new_dep_solver import ParserFactory
assert isinstance(file_path, basestring)
assert isinstance(module, Module)
File.__init__(self, path=file_path, module=module)
self.file_path = file_path
self.rels = set()
self.depends_on = set() # set of files that the file depends on, items of type DepFile
if include_paths is None:
include_paths = []
else:
pass
self.file_path = file_path
self.include_paths = include_paths
parser = ParserFactory().create(self)
parser.parse(self)
def add_relation(self, rel):
self.rels.add(rel)
# def satisfies_any(self, rels_b):
# assert isinstance(rels_b, list)
# for rel_a in self.rels:
# if not any(map(lambda x: x.satisfies(rel_a), rels_b)):
# return False
# return True
def satisfies(self, rel_b):
assert isinstance(rel_b, DepRelation)
return any(map(lambda x: x.satisfies(rel_b), self.rels))
def show_relations(self):
for r in self.rels:
print(str(r))
@property
def filename(self):
return os.path.basename(self.file_path)
...@@ -466,7 +466,7 @@ clean: ...@@ -466,7 +466,7 @@ clean:
for vl in fileset.filter(VerilogFile): for vl in fileset.filter(VerilogFile):
self.write(os.path.join(vl.library, vl.purename, '.'+vl.purename+"_"+vl.extension())+': ') self.write(os.path.join(vl.library, vl.purename, '.'+vl.purename+"_"+vl.extension())+': ')
self.write(vl.rel_path() + ' ') self.write(vl.rel_path() + ' ')
self.writeln(' '.join([f.rel_path() for f in vl.dep_depends_on])) self.writeln(' '.join([f.rel_path() for f in vl.depends_on]))
self.write("\t\tvlog -work "+vl.library) self.write("\t\tvlog -work "+vl.library)
self.write(" $(VLOG_FLAGS) ") self.write(" $(VLOG_FLAGS) ")
if isinstance(vl, SVFile): if isinstance(vl, SVFile):
...@@ -486,7 +486,7 @@ clean: ...@@ -486,7 +486,7 @@ clean:
purename = vhdl.purename purename = vhdl.purename
#each .dat depends on corresponding .vhd file #each .dat depends on corresponding .vhd file
self.write(os.path.join(lib, purename, "."+purename+"_" + vhdl.extension()) + ": " + vhdl.rel_path()) self.write(os.path.join(lib, purename, "."+purename+"_" + vhdl.extension()) + ": " + vhdl.rel_path())
for dep_file in vhdl.dep_depends_on: for dep_file in vhdl.depends_on:
name = dep_file.purename name = dep_file.purename
self.write(" \\\n" + os.path.join(dep_file.library, name, ".%s_vhd" % name)) self.write(" \\\n" + os.path.join(dep_file.library, name, ".%s_vhd" % name))
self.writeln() self.writeln()
......
...@@ -21,7 +21,6 @@ ...@@ -21,7 +21,6 @@
from __future__ import print_function from __future__ import print_function
from manifest_parser import Manifest, ManifestParser from manifest_parser import Manifest, ManifestParser
from srcfile import VerilogFile, VHDLFile, SourceFileFactory, SourceFileSet
from util import path as path_mod from util import path as path_mod
import os import os
import global_mod import global_mod
...@@ -95,6 +94,7 @@ class Module(object): ...@@ -95,6 +94,7 @@ class Module(object):
self.top_module = None self.top_module = None
self.commit_id = None self.commit_id = None
self.raw_url = url
if source != "local": if source != "local":
self.url, self.branch, self.revision = path.url_parse(url) self.url, self.branch, self.revision = path.url_parse(url)
else: else:
...@@ -122,7 +122,7 @@ class Module(object): ...@@ -122,7 +122,7 @@ class Module(object):
self.manifest = None self.manifest = None
def __str__(self): def __str__(self):
return self.url return self.raw_url
@property @property
def is_fetched_to(self): def is_fetched_to(self):
...@@ -224,6 +224,7 @@ class Module(object): ...@@ -224,6 +224,7 @@ class Module(object):
self.manifest_dict = opt_map self.manifest_dict = opt_map
def process_manifest(self): def process_manifest(self):
from srcfile import VerilogFile, VHDLFile, SourceFileFactory, SourceFileSet
if self.manifest_dict is None: if self.manifest_dict is None:
logging.debug("there is no manifest to be processed") logging.debug("there is no manifest to be processed")
return return
...@@ -306,7 +307,7 @@ class Module(object): ...@@ -306,7 +307,7 @@ class Module(object):
if self.manifest_dict["files"] == []: if self.manifest_dict["files"] == []:
self.files = SourceFileSet() self.files = SourceFileSet()
logging.debug("No files in the manifest") logging.debug("No files in the manifest %s" % self.manifest.path)
else: else:
self.manifest_dict["files"] = self._flatten_list(self.manifest_dict["files"]) self.manifest_dict["files"] = self._flatten_list(self.manifest_dict["files"])
logging.debug(self.path + str(self.manifest_dict["files"])) logging.debug(self.path + str(self.manifest_dict["files"]))
...@@ -450,6 +451,7 @@ class Module(object): ...@@ -450,6 +451,7 @@ class Module(object):
return modules return modules
def _create_file_list_from_paths(self, paths): def _create_file_list_from_paths(self, paths):
from srcfile import SourceFileFactory, SourceFileSet
sff = SourceFileFactory() sff = SourceFileFactory()
srcs = SourceFileSet() srcs = SourceFileSet()
for p in paths: for p in paths:
......
...@@ -24,10 +24,10 @@ from __future__ import print_function ...@@ -24,10 +24,10 @@ from __future__ import print_function
import os import os
import logging import logging
import global_mod import global_mod
import dep_solver import new_dep_solver as dep_solver
from srcfile import SourceFileSet from util import path
import sys
from fetch import BackendFactory from fetch import BackendFactory
import fetch
from subprocess import PIPE, Popen from subprocess import PIPE, Popen
...@@ -74,23 +74,25 @@ class ModulePool(list): ...@@ -74,23 +74,25 @@ class ModulePool(list):
def new_module(self, parent, url, source, fetchto, process_manifest=True): def new_module(self, parent, url, source, fetchto, process_manifest=True):
from module import Module from module import Module
if source != "local": if source != "local":
clean_url, branch, revision = path.parse_url(url) clean_url, branch, revision = path.url_parse(url)
else: else:
clean_url, branch, revision = url, None, None clean_url, branch, revision = url, None, None
if clean_url in [m.url for m in self]: # check if module is not already in the pool if url in [m.raw_url for m in self]: # check if module is not already in the pool
same_url_mod = [m for m in self if m.url == url][0] # same_url_mod = [m for m in self if m.raw_url == url][0]
if branch != same_url_mod.branch: # if branch != same_url_mod.branch:
logging.error("Requested the same module, but different branches." # logging.error("Requested the same module, but different branches."
"URL: %s\n" % clean_url + # "URL: %s\n" % clean_url +
"branches: %s and %s\n" % (branch, same_url_mod.branch)) # "branches: %s and %s\n" % (branch, same_url_mod.branch))
sys.exit("\nExiting") # sys.exit("\nExiting")
if revision != same_url_mod.revision: # if revision != same_url_mod.revision:
logging.error("Requested the same module, but different revisions." # logging.error("Requested the same module, but different revisions."
"URL: %s\n" % clean_url + # "URL: %s\n" % clean_url +
"revisions: %s and %s\n" % (revision, same_url_mod.revision)) # "revisions: %s (from %s)\n and \n%s (from %s)\n" % (revision,
sys.exit("\nExiting") # parent.path,
# same_url_mod.revision,
return [m for m in self if m.url == url][0] # same_url_mod.parent.path))
# sys.exit("\nExiting")
return [m for m in self if m.raw_url == url][0]
else: else:
if self.global_fetch: # if there is global fetch parameter (HDLMAKE_COREDIR env variable) if self.global_fetch: # if there is global fetch parameter (HDLMAKE_COREDIR env variable)
fetchto = self.global_fetch # screw module's particular fetchto fetchto = self.global_fetch # screw module's particular fetchto
...@@ -128,6 +130,7 @@ class ModulePool(list): ...@@ -128,6 +130,7 @@ class ModulePool(list):
return url return url
finally: finally:
os.chdir(cwd) os.chdir(cwd)
def _add(self, new_module): def _add(self, new_module):
from module import Module from module import Module
if not isinstance(new_module, Module): if not isinstance(new_module, Module):
...@@ -164,12 +167,14 @@ class ModulePool(list): ...@@ -164,12 +167,14 @@ class ModulePool(list):
logging.debug("NOT appended to fetch queue: " + str(mod.url)) logging.debug("NOT appended to fetch queue: " + str(mod.url))
def build_global_file_list(self): def build_global_file_list(self):
from srcfile import SourceFileSet
ret = SourceFileSet() ret = SourceFileSet()
for module in self: for module in self:
ret.add(module.files) ret.add(module.files)
return ret return ret
def build_very_global_file_list(self): def build_very_global_file_list(self):
from srcfile import SourceFileSet
files = self.build_global_file_list() files = self.build_global_file_list()
assert isinstance(files, SourceFileSet) assert isinstance(files, SourceFileSet)
dep_solver.solve(files) dep_solver.solve(files)
......
...@@ -19,71 +19,107 @@ ...@@ -19,71 +19,107 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>. # along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
# #
class DepRelation:
PROVIDE = 1 from __future__ import print_function
USE = 2 import logging
import tools
ENTITY = 1
PACKAGE = 2
INCLUDE = 3 class DepParser(object):
def __init__(self, dep_file):
def __init__(self, obj_name, direction, rel_type): self.dep_file = dep_file
self.direction = direction
self.rel_type = rel_type def parse():
self.obj_name = obj_name raise
def satisfies(self, rel_b):
if(rel_b.direction == self.USE): class ParserFactory(object):
return True def create(self, dep_file):
elif(self.direction == self.PROVIDE and rel_b.rel_type == self.rel_type and rel_b.obj_name == self.obj_name):
return True
return False
def __str__(self):
dstr = { self.USE : "Use", self.PROVIDE : "Provide" }
ostr = { self.ENTITY : "entity/module", self.PACKAGE : "package", self.INCLUDE : "include/header" }
return "%s %s '%s'" % (dstr[self.direction], ostr[self.rel_type], self.obj_name)
class DepFile:
def __init__(self, filename, search_path=[]):
self.rels = []
self.filename = filename
parser = ParserFactory().create(self.filename, search_path)
parser.parse(self, self.filename)
def add_relation(self, rel):
self.rels.append(rel)
def satisfies(self, rels_b):
for r_mine in self.rels:
if not any(map(rels_b, lambda x: x.satisfies(r_mine))):
return False
def show_relations(self):
for r in self.rels:
print(str(r))
class DepParser:
def __init__(self):
pass
def parse(f, filename):
pass
class ParserFactory:
def create(self, filename, search_path):
import re import re
from vlog_parser import VerilogParser from vlog_parser import VerilogParser
from vhdl_parser import VHDLParser from vhdl_parser import VHDLParser
extension=re.match(re.compile(".+\.(\w+)$"), filename) extension = re.match(re.compile(".+\.(\w+)$"), dep_file.file_path)
if not extension : if not extension:
raise ValueError("Unecognized file format : %s" % filename) raise ValueError("Unecognized file format : %s" % dep_file.file_path)
extension = extension.group(1).lower() extension = extension.group(1).lower()
if(extension in ["vhd", "vhdl"]): if extension in ["vhd", "vhdl"]:
return VHDLParser() return VHDLParser(dep_file)
elif(extension in ["v", "sv"]): elif extension in ["v", "sv"]:
vp = VerilogParser() vp = VerilogParser(dep_file)
for d in search_path: for d in dep_file.include_paths:
vp.add_search_path(d) vp.add_search_path(d)
return vp return vp
# class DepSolver(object):
# def solve(self, vhdl_files):
# for f in vhdl_files:
# logging.debug("solving deps for " + f.path)
# if f.dep_requires:
# for req in f.dep_requires:
# pf = self._find_provider_file(req=req, vhdl_file=f, fset=vhdl_files)
# assert isinstance(pf, SourceFile)
# if not pf:
# logging.error("Missing dependency in file "+str(f)+": " + req[0]+'.'+req[1])
# else:
# logging.debug("%s depends on %s" % (f.path, pf.path))
# if pf.path != f.path:
# f.dep_depends_on.append(pf)
# #get rid of duplicates by making a set from the list and vice versa
# f.dep_depends_on = list(set(f.dep_depends_on))
# f.dep_resolved = True
def solve(fileset):
from srcfile import SourceFileSet
from dep_file import DepFile, DepRelation
assert isinstance(fileset, SourceFileSet)
fset = fileset.filter(DepFile)
# for fle in fset:
# print(fle.path)
# for rel in fle.rels:
# print('\t' + str(rel))
for investigated_file in fset:
for rel in investigated_file.rels:
if rel.direction is DepRelation.PROVIDE: # PROVIDE relations dont have to be satisfied
continue
if rel.rel_type is DepRelation.INCLUDE: # INCLUDE are already solved by preprocessor
continue
if rel.library() in tools.get_standard_libraries(): # dont care about standard libs
continue
satisfied_by = set()
for dep_file in fset:
if dep_file is investigated_file:
continue
if dep_file.satisfies(rel):
investigated_file.depends_on.add(dep_file)
satisfied_by.add(dep_file)
if len(satisfied_by) > 1:
logging.warning("Relation %s satisfied by multpiple (%d) files: %s",
str(rel),
len(satisfied_by),
'\n'.join([file.path for file in list(satisfied_by)]))
elif len(satisfied_by) == 0:
logging.warning("Relation %s not satisfied by any source file", str(rel))
logging.info("Dependencies solved")
def make_dependency_sorted_list(fileset, purge_unused=True):
pass
# return list of files sorted in dependency order
if __name__ == "__main__":
from dep_file import (DepFile)
logging.basicConfig(format="%(levelname)s %(funcName)s() %(filename)s:%(lineno)d: %(message)s", level=logging.DEBUG)
df = DepFile("/home/pawel/cern/hdl-make/tests/lr_test/wr-cores/modules/wrc_lm32/lm32_shifter.v", [])
df.show_relations()
print("-----------------------\n"
"---------- VHDL -------\n"
"-----------------------\n")
df1 = DepFile("/home/pawel/cern/hdl-make/examples/fine_delay/hdl/testbench/top/wr-cores/testbench/top_level/gn4124_bfm/mem_model.vhd")
df1.show_relations()
...@@ -20,90 +20,31 @@ ...@@ -20,90 +20,31 @@
# #
from __future__ import print_function from __future__ import print_function
from dependable_file import DependableFile #from dependable_file import DependableFile
import os import os
import global_mod import global_mod
import logging import logging
from module import Module
from tools import ise from tools import ise
from tools import modelsim from tools import modelsim
from tools import quartus from tools import quartus
from util import path as path_mod from util import path as path_mod
from dep_file import DepFile, File
class File(object): class SourceFile(DepFile):
def __init__(self, path, module=None):
self.path = path
if module is None:
self.module = global_mod.top_module
else:
assert not isinstance(module, basestring)
self.module = module
@property
def name(self):
return os.path.basename(self.path)
@property
def purename(self):
return os.path.splitext(self.name)[0]
@property
def dirname(self):
return os.path.dirname(self.path)
def rel_path(self, dir=None):
if dir is None:
dir = os.getcwd()
return path_mod.relpath(self.path, dir)
def __str__(self):
return self.path
def __eq__(self, other):
_NOTFOUND = object()
v1, v2 = [getattr(obj, "path", _NOTFOUND) for obj in [self, other]]
if v1 is _NOTFOUND or v2 is _NOTFOUND:
return False
elif v1 != v2:
return False
return True
def __hash__(self):
return hash(self.path)
def __cmp__(self, other):
if self.path < other.path:
return -1
if self.path == other.path:
return 0
if self.path > other.path:
return 1
def __ne__(self, other):
return not self.__eq__(other)
def isdir(self):
return os.path.isdir(self.path)
def show(self):
print(self.path)
def extension(self):
tmp = self.path.rsplit('.')
ext = tmp[len(tmp)-1]
return ext
class SourceFile(DependableFile, File):
cur_index = 0 cur_index = 0
def __init__(self, path, module, library=None): def __init__(self, path, module, library=None):
DependableFile.__init__(self) from dep_file import DepFile
File.__init__(self, path=path, module=module) assert isinstance(path, basestring)
if not library: assert isinstance(module, Module)
library = "work"
self.library = library self.library = library
DepFile.__init__(self,
file_path=path,
module=module,
include_paths=module.include_dirs[:])
def gen_index(self): def gen_index(self):
self.__class__.cur_index = self.__class__.cur_index+1 self.__class__.cur_index = self.__class__.cur_index+1
...@@ -210,31 +151,6 @@ class VHDLFile(SourceFile): ...@@ -210,31 +151,6 @@ class VHDLFile(SourceFile):
f.close() f.close()
return ret return ret
def _search_packages(self):
"""
Reads a file and looks for package clase. Returns list of packages' names
from the file
"""
import re
f = open(self.path, "r")
try:
text = f.readlines()
except UnicodeDecodeError:
return []
package_pattern = re.compile("^[ \t]*package[ \t]+([^ \t]+)[ \t]+is[ \t]*.*$")
ret = set()
for line in text:
#identifiers and keywords are case-insensitive in VHDL
line = line.lower()
m = re.match(package_pattern, line)
if m is not None:
ret.add((self.library.lower(), m.group(1).lower()))
f.close()
return ret
class VerilogFile(SourceFile): class VerilogFile(SourceFile):
def __init__(self, path, module, library=None, vlog_opt=None, include_dirs=None): def __init__(self, path, module, library=None, vlog_opt=None, include_dirs=None):
...@@ -408,7 +324,7 @@ class SourceFileSet(list): ...@@ -408,7 +324,7 @@ class SourceFileSet(list):
def get_libs(self): def get_libs(self):
ret = set() ret = set()
for file in self: for file in self.modules_pool.build_global_file_list():
try: try:
ret.add(file.library) ret.add(file.library)
except: except:
...@@ -418,7 +334,7 @@ class SourceFileSet(list): ...@@ -418,7 +334,7 @@ class SourceFileSet(list):
class SourceFileFactory: class SourceFileFactory:
def new(self, path, module, library=None, vcom_opt=None, vlog_opt=None, include_dirs=None): def new(self, path, module, library=None, vcom_opt=None, vlog_opt=None, include_dirs=None):
if path =="/home/pawel/cern/wr-cores/testbench/top_level/gn4124_bfm.svh": if path == "/home/pawel/cern/wr-cores/testbench/top_level/gn4124_bfm.svh":
raise Exception() raise Exception()
if path is None or path == "": if path is None or path == "":
raise RuntimeError("Expected a file path, got: "+str(path)) raise RuntimeError("Expected a file path, got: "+str(path))
...@@ -441,7 +357,7 @@ class SourceFileFactory: ...@@ -441,7 +357,7 @@ class SourceFileFactory:
vlog_opt=vlog_opt, vlog_opt=vlog_opt,
include_dirs=include_dirs) include_dirs=include_dirs)
elif extension == 'sv' or extension == 'svh': elif extension == 'sv' or extension == 'svh':
nf = SVFile(path=path, nf = SVFile(path=path,
module=module, module=module,
library=library, library=library,
vlog_opt=vlog_opt, vlog_opt=vlog_opt,
......
#!/usr/bin/env python
def get_standard_libraries():
import global_mod
import quartus
import ise
import modelsim
import iverilog
import isim
tm = global_mod.top_module
if tm.action == "simulation":
if tm.sim_tool == "modelsim" or tm.sim_tool == "vsim":
return modelsim.MODELSIM_STANDARD_LIBS
elif tm.sim_tool == "isim":
return isim.ISIM_STANDARD_LIBS
elif tm.sim_tool == "iverilog":
return iverilog.IVERILOG_STANDARD_LIBS
else:
if tm.syn_tool == "quartus":
return quartus.QUARTUS_STANDARD_LIBS
elif tm.syn_tool == "ise":
return ise.ISE_STANDARD_LIBS
...@@ -2,6 +2,11 @@ ...@@ -2,6 +2,11 @@
from subprocess import Popen, PIPE from subprocess import Popen, PIPE
IVERILOG_STARDAND_LIBS = ['std', 'ieee', 'ieee_proposed', 'vl', 'synopsys',
'simprim', 'unisim', 'unimacro', 'aim', 'cpld',
'pls', 'xilinxcorelib', 'aim_ver', 'cpld_ver',
'simprims_ver', 'unisims_ver', 'uni9000_ver',
'unimacro_ver', 'xilinxcorelib_ver', 'secureip']
def detect_iverilog_version(path): def detect_iverilog_version(path):
iverilog = Popen("iverilog -v 2>/dev/null| awk '{if(NR==1) print $4}'", iverilog = Popen("iverilog -v 2>/dev/null| awk '{if(NR==1) print $4}'",
......
...@@ -20,123 +20,139 @@ ...@@ -20,123 +20,139 @@
# along with Hdlmake. If not, see <http://www.gnu.org/licenses/>. # along with Hdlmake. If not, see <http://www.gnu.org/licenses/>.
def remove_gaps(buf, delims, gap_chars, lower_strings=False): from new_dep_solver import DepParser
da={}
def _remove_gaps(buf, delims, gap_chars, lower_strings=False):
da = {}
for d in delims: for d in delims:
da[d]=False da[d] = False
prev_is_gap = False prev_is_gap = False
buf2="" buf2 = ""
lines=[] lines = []
for c in buf: for c in buf:
for d in delims: for d in delims:
if(c==d): if c == d:
da[d]=not da[d] da[d] = not da[d]
within_string = any(da.values()) and not (c in delims) within_string = any(da.values()) and not (c in delims)
if not within_string:
if(not within_string):
if(c in gap_chars): if(c in gap_chars):
if(not prev_is_gap): if(not prev_is_gap):
prev_is_gap = True prev_is_gap = True
buf2+=" " buf2 += " "
else: else:
prev_is_gap = False prev_is_gap = False
buf2+=c buf2 += c
if(c==";" or c=="\n"): if c == ";" or c == "\n":
lines.append(buf2); lines.append(buf2)
buf2="" buf2 = ""
else: else:
buf2+=c; buf2 += c
prev_is_gap = False prev_is_gap = False
return lines return lines
from dep_solver import DepParser, DepRelation, DepFile
class VHDLParser(DepParser): class VHDLParser(DepParser):
def parse(self, f_deps, filename): def parse(self, dep_file):
f = open(filename,"r") from dep_file import DepRelation
content = open(dep_file.file_path, "r")
buf = "" buf = ""
# stage 1: strip comments # stage 1: strip comments
for l in f: for l in content.readlines():
ci = l.find("--") ci = l.find("--")
if(ci==0): if ci == 0:
continue continue
while(ci>0): while ci > 0:
nquotes = l[:ci].count('"') # ignore comments in strings quotes = l[:ci].count('"') # ignore comments in strings
if(nquotes % 2 == 0): if quotes % 2 == 0:
l=l[:ci-1]; l = l[:ci-1]
break break
ci= l.find("--", ci+1) ci = l.find("--", ci+1)
buf+=l buf += l
# stage 2: remove spaces, crs, lfs, strip strings (we don't need them) # stage 2: remove spaces, crs, lfs, strip strings (we don't need them)
buf2="" buf2 = ""
string_literal = char_literal = False string_literal = char_literal = False
prev_is_gap = False prev_is_gap = False
gap_chars = " \r\n\t" gap_chars = " \r\n\t"
lines=[] lines = []
for c in buf: for c in buf:
if(c == '"' and not char_literal): if c == '"' and not char_literal:
string_literal = not string_literal string_literal = not string_literal
if(c == "'" and not string_literal)-: if c == "'" and not string_literal:
char_literal = not char_literal char_literal = not char_literal
within_string = (string_literal or char_literal) and (c != '"') and (c != "'") within_string = (string_literal or char_literal) and (c != '"') and (c != "'")
if(not within_string): if(not within_string):
if(c in gap_chars): if(c in gap_chars):
if(not prev_is_gap): if(not prev_is_gap):
prev_is_gap = True prev_is_gap = True
buf2+=" " buf2 += " "
else: else:
prev_is_gap = False prev_is_gap = False
buf2+=c.lower() buf2 += c.lower()
if(c==";"): if c == ";":
lines.append(buf2); lines.append(buf2)
buf2="" buf2 = ""
else: else:
prev_is_gap = False prev_is_gap = False
import re import re
patterns = { patterns = {
"use" : "^ *use *(\w+) *\. *(\w+) *. *\w+ *;", "use": "^ *use *(\w+) *\. *(\w+) *. *\w+ *;",
"entity" : "^ *entity +(\w+) +is +(port|generic)", "entity": "^ *entity +(\w+) +is +(port|generic)",
"package" : "^ *package +(\w+) +is", "package": "^ *package +(\w+) +is",
"arch_begin" : "^ *architecture +(\w+) +of +(\w+) +is +", "arch_begin": "^ *architecture +(\w+) +of +(\w+) +is +",
"arch_end" : "^ *end +(\w+) +;", "arch_end": "^ *end +(\w+) +;",
"instance" : "^ *(\w+) *\: *(\w+) *(port|generic) *map" "instance": "^ *(\w+) *\: *(\w+) *(port|generic) *map"
} }
compiled_patterns = map(lambda p: (p,re.compile(patterns[p])), patterns) compiled_patterns = map(lambda p: (p, re.compile(patterns[p])), patterns)
within_architecture = False within_architecture = False
for l in lines: for l in lines:
matches = filter(lambda(k,v): v!=None, map(lambda (k,v): (k,re.match(v,l)), compiled_patterns)) matches = filter(lambda (k, v): v is not None, map(lambda (k, v): (k, re.match(v, l)), compiled_patterns))
if(not len(matches)): if(not len(matches)):
continue continue
what,g=matches[0] what, g = matches[0]
if(what == "use"): if(what == "use"):
f_deps.add_relation(DepRelation(g.group(1)+"."+g.group(2), DepRelation.USE, DepRelation.PACKAGE)) dep_file.add_relation(DepRelation(g.group(1)+"."+g.group(2), DepRelation.USE, DepRelation.PACKAGE))
if(what == "package"): if(what == "package"):
f_deps.add_relation(DepRelation(g.group(1), DepRelation.PROVIDE, DepRelation.PACKAGE)) dep_file.add_relation(DepRelation(g.group(1),
DepRelation.PROVIDE,
DepRelation.PACKAGE))
dep_file.add_relation(DepRelation("%s.%s" % (dep_file.library, g.group(1)),
DepRelation.PROVIDE,
DepRelation.PACKAGE))
elif(what == "entity"): elif(what == "entity"):
f_deps.add_relation(DepRelation(g.group(1), DepRelation.PROVIDE, DepRelation.ENTITY)) dep_file.add_relation(DepRelation(g.group(1),
DepRelation.PROVIDE,
DepRelation.ENTITY))
dep_file.add_relation(DepRelation("%s.%s" % (dep_file.library, g.group(1)),
DepRelation.PROVIDE,
DepRelation.ENTITY))
elif(what == "package"): elif(what == "package"):
f_deps.add_relation(DepRelation(g.group(1), DepRelation.PROVIDE, DepRelation.PACKAGE)) dep_file.add_relation(DepRelation(g.group(1),
DepRelation.PROVIDE,
DepRelation.PACKAGE))
dep_file.add_relation(DepRelation("%s.%s" % (dep_file.library, g.group(1)),
DepRelation.PROVIDE,
DepRelation.PACKAGE))
elif(what == "arch_begin"): elif(what == "arch_begin"):
arch_name = g.group(1) arch_name = g.group(1)
within_architecture = True within_architecture = True
elif(what == "arch_end" and within_architecture and g.group(1) == arch_name): elif(what == "arch_end" and within_architecture and g.group(1) == arch_name):
within_architecture = False within_architecture = False
elif(what == "instance" and within_architecture): elif(what == "instance" and within_architecture):
f_deps.add_relation(DepRelation(g.group(1), DepRelation.USE, DepRelation.ENTITY)) dep_file.add_relation(DepRelation(g.group(2),
DepRelation.USE,
DepRelation.ENTITY))
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment