[fbt-commit] SF.net SVN: fbt:[85] trunk/bin
Status: Beta
Brought to you by:
dave_infj
From: <dav...@us...> - 2010-03-01 17:57:50
|
Revision: 85 http://fbt.svn.sourceforge.net/fbt/?rev=85&view=rev Author: dave_infj Date: 2010-03-01 17:57:41 +0000 (Mon, 01 Mar 2010) Log Message: ----------- Add --dumpdeps debugging flag: dumps dependencies database. This somewhat changes possible programme flow. Not everything needs to be done in a dumpdeps operation (like resolving dependencies), so: * Dependency loading code moved into its own method in mkvproj.py * Disambiguation handling code moved into cfg object * Control flow forks in main() depending on whether a dumpdeps operation or not * When dumping deps, don't enforce specifying -r etc; don't process disambiguation rules; don't resolve dependencies. Added a bit of extra sanity checking (specified root unknown etc) Added keys() method to DepList so that it can be iterated over during deps dump tool_common.py: When processing coregen descriptors - instead of pointing coregen straight at the source descriptor - make a copy of the descriptor, changing the part spec. This means that, where appropriate, cores targeted for one device can be retargeted for another on the fly. The modified XCO files are put in $TEMPDIR, and a 'clean' target is added into coregen.mk so that the temp directory is cleaned up. The timestamp on the copied file is preserved from the original so that 'make' won't rebuild all cores when nothing has changed in the real source XCO file. In aid of that, move partspec processing out of tool_synth_syn and into tool_common. Modified Paths: -------------- trunk/bin/DepList.py trunk/bin/mkvproj.py trunk/bin/tool_common.py trunk/bin/tool_synth_synplify.py Modified: trunk/bin/DepList.py =================================================================== --- trunk/bin/DepList.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/DepList.py 2010-03-01 17:57:41 UTC (rev 85) @@ -138,6 +138,11 @@ os.path.join(path, hdl_src), deps ) + def keys(self): + """ + Return a list of entities in the dependency db + """ + return self.list.keys() def iterkeys(self): return self.list.iterkeys() Modified: trunk/bin/mkvproj.py =================================================================== --- trunk/bin/mkvproj.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/mkvproj.py 2010-03-01 17:57:41 UTC (rev 85) @@ -73,6 +73,8 @@ -C, --no-cache-deps do not use cached dependency information --no-relative do not make path names relative (to output_dir) + --dumpdeps (debugging) dump dependency database + Synthesis options: -p, --part target FPGA spec (required) -k, --constraints constraints file @@ -96,6 +98,47 @@ sys.exit(0) +def load_dep_db( cfg ): + """ + Populate the dependencies database + """ + + # If cache-mode isn't specified, then assume use cached mode if the project + # directory (containing project output) contains a cache file (.depends). + # This is a somewhat arbitrary assumption, but it is probably correct most + # of the time. It can always be overridden from the commandline. + if cfg.cache_deps == None: + cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir, + tool_common.DEPS_FILE) ) + # Build source list + if cfg.cache_deps: + sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) ) + cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath ) + else: + cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath ) + + + +def dep_dump( cfg ): + """ + Dump the dependency database + """ + + print " * Dependency database dump *\n" + entities = sorted( cfg.dep_list.keys() ) + for ent in entities: + print "Entity %s:" % (ent) + for hdl_src, deps, core_src in cfg.dep_list[ent]: + if cfg.relative_paths: + hdl_src = relpath(hdl_src) + print "\tin %s:" % (hdl_src) + if deps: + print "\t\t%s" % '\n\t\t'.join( deps ) + else: + print "\t\t(none)" + print + + def main(argv): # # Parse options @@ -114,18 +157,19 @@ 'no-relative', 'help', 'verbose', - 'version'] ) + 'version', + 'dumpdeps'] ) except getopt.GetoptError, e: raise Panic( e ) - # Temporary list - dlist = [] + # Temporary flag + do_dump = False cfg = tool_common.Config() for arg, val in opts: if arg in ['-v', '--verbose']: cfg.verbose = True if arg in ['-d', '--dir']: cfg.output_dir = val - if arg in ['-D', '--disambiguate']: dlist.append(val) + if arg in ['-D', '--disambiguate']: cfg.add_dar(val) if arg in ['-l', '--libpath']: cfg.libpath += val.split(':') if arg in ['-p', '--part']: cfg.part = val if arg in ['-r', '--root']: cfg.top_ent = val @@ -135,76 +179,66 @@ if arg in ['--no-relative']: cfg.relative_paths = False if arg in ['-k', '--constraints']: cfg.constraints += val.split(':') if arg in ['-h', '--help']: print_help() + if arg in ['--dumpdeps']: do_dump = True if arg in ['-V', '--version']: sys.stderr.write( '$Id$\n' ) sys.exit(0) # Sanity checks - if args == []: - print "%s: no operation specified" % (prog_name()) - print_help() + if not cfg.libpath: + raise Panic("no library paths specified (-l)") - cfg.oper = args.pop(0) - if not os.path.isdir(cfg.output_dir): raise Panic("output directory %s doesn't exist" % (cfg.output_dir) ) - if not cfg.top_ent: - raise Panic("no top level module specified") + # + # Do job + # - # Convert the list of disambiguation rules into a map indexed by entity - for d in dlist: - try: - ent, regex = d.split('/', 1) - except ValueError: - ent, regex = None, None + if do_dump: + # Load deps + load_dep_db( cfg ) - if not ent or regex[-1] != '/': - raise Panic( "Bad disambiguation rule %s" % (d) ) + # Dump db + dep_dump( cfg ) + else: + # More sanity checks + if args == []: + print "%s: no operation specified" % (prog_name()) + print_help() - # Trim trailing / - regex = regex[:-1] + cfg.oper = args.pop(0) - try: - cfg.disambig[ent] = re.compile( regex ) - except re.error: - raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex)) + if not cfg.top_ent: + raise Panic("no top level module specified (-r)") - # If cache-mode isn't specified, then assume use cached mode if the project - # directory (containing project output) contains a cache file (.depends). - # This is a somewhat arbitrary assumption, but it is probably correct most - # of the time. It can always be overridden from the commandline. - if cfg.cache_deps == None: - cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir, - tool_common.DEPS_FILE) ) - # - # Do job - # + if cfg.part == '<unspecified>': + sys.stderr.write( "%s: warning: no partspec specified (-p)\n" % prog_name() ) - # Build source list - if cfg.cache_deps: - sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) ) - cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath ) - else: - cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath ) + # Load deps + load_dep_db( cfg ) - # Try to resolve dependencies - cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list, - cfg.disambig ) + # Check to make sure top level is known about + if cfg.top_ent not in cfg.dep_list: + raise Panic("top level entity %s unknown" % cfg.top_ent) - if cfg.verbose: - print '\n'.join( [hdl_src - for ent, hdl_src, deps, core_src in cfg.resolved_list] ) - print '\n'.join( [constr - for constr in cfg.constraints] ) - - # Write out project - try: - exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) ) - except ImportError: - raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) + # Try to resolve dependencies + cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list, + cfg.disambig ) - tool.write_project( cfg ) + if cfg.verbose: + print '\n'.join( [hdl_src + for ent, hdl_src, deps, core_src in cfg.resolved_list] ) + print '\n'.join( [constr + for constr in cfg.constraints] ) + + # Write out project + try: + exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) ) + except ImportError: + raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) + + tool.write_project( cfg ) if __name__ == '__main__': Modified: trunk/bin/tool_common.py =================================================================== --- trunk/bin/tool_common.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/tool_common.py 2010-03-01 17:57:41 UTC (rev 85) @@ -32,8 +32,9 @@ import sys import os +import re +import tempfile - # # Constants # @@ -67,6 +68,28 @@ self.relative_paths = True self.constraints = [] + def add_dar( self, rule ): + """ + Add a disambiguation rule + """ + + try: + ent, regex = rule.split('/', 1) + except ValueError: + ent, regex = None, None + + if not ent or regex[-1] != '/': + raise Panic( "Bad disambiguation rule %s" % (rule) ) + + # Trim trailing / + regex = regex[:-1] + + try: + self.disambig[ent] = re.compile( regex ) + except re.error: + raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex)) + + def rel_src(cfg, hdl_src): """ @@ -83,11 +106,81 @@ return hdl_src +# Synthesis: FPGA Family name expansion from shorthand +def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2 + """ + Parse a Xilinx partspec into (part, family, package, speed) + """ + + match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps.upper() ) + if not match: + raise Panic( "unknown partspec %s" % (ps) ) + + part, family, pkg, speed = match.groups() + + try: + family = {'2V' : 'VIRTEX2', + '2VP' : 'VIRTEX2P', + '2S' : 'SPARTAN2', + '2SE' : 'SPARTAN2E', + '3S' : 'SPARTAN3'}[family] + except: + if family[0].isdigit(): + family = 'VIRTEX'+family[0] + else: + raise Panic( 'unknown family %s in partspec' % k ) + + return part, family, pkg, speed + + +def copy_xco( src, dst, partspec ): + """ + Copy an XCO project file, replacing the part spec info as appropriate + """ + attrs = [] + part, family, pkg, speed = parse_partspec( partspec ) + + with open(src) as s: + for line in s: + # Delete any comments. + try: + line = line[:line.index('#')] + except ValueError: + pass + + kv = line.strip().replace('=', ' = ').split() + if kv: attrs.append( kv ) + + with open(dst, 'w') as d: + d.write( """\ +# Automatically generated by %s. Do not edit! +# Source: %s +# $Id$ + +""" % (prog_name(), src) ) + + for attr in attrs: + if attr[0].upper() == 'SET': + if attr[1].upper() == 'DEVICE' : attr[3] = part + if attr[1].upper() == 'DEVICEFAMILY': attr[3] = family + if attr[1].upper() == 'PACKAGE' : attr[3] = pkg + if attr[1].upper() == 'SPEEDGRADE' : attr[3] = speed + + d.write( ' '.join( attr ) + '\n' ) + + # Update atime and mtime on the newly created file to reflect its source + # This keeps make happy, and prevents unnecessary builds + st = os.stat( src ) + os.utime( dst, (st.st_atime, st.st_mtime) ) + def write_coregen_mf(cfg, cores): """ Write out a coregen makefile from the list of (hdl_src, core_src). """ + # Make a temporary directory to hold the temporary coregen projects + xco_tmp_dir = tempfile.mkdtemp( prefix='%s-' % prog_name() ) + # Make the cores directory, if it doesn't already exist cores_dir = os.path.join( cfg.output_dir, CORES_DIR ) if not os.path.isdir( cores_dir ): @@ -106,17 +199,30 @@ # default target """ % (prog_name()) ) - # Write out default target - mf.write( 'all: %s\n\n' % (' '.join( [hdl_src - for hdl_src, core_src in cores] )) ) + # Write out default and clean targets + mf.write( """\ +all: %s +\t@echo --- +\t@echo --- Made cores +\t@echo --- - # Write out coregen invocation rules +clean: +\trm -rf %s + +""" % (' '.join( [hdl_src for hdl_src, core_src in cores] ), + xco_tmp_dir) ) + for hdl_src, core_src in cores: + # Copy coregen project file + out_xco = os.path.join( xco_tmp_dir, os.path.split( core_src )[1] ) + copy_xco( core_src, out_xco, cfg.part ) + + # Write out coregen invocation rules mf.write( """\ %s: %s -\tcd %s; $(COREGEN) -b ../%s +\tcd %s; $(COREGEN) -b %s """ % (hdl_src, # the build target (sim source) - rel_src(cfg, core_src), # its dependency (core description [.xco]) + out_xco, # its dependency (core description [.xco]) CORES_DIR, # the cores subdirectory - rel_src(cfg, core_src) ) ) # the core description file [.xco] + out_xco ) ) # the core description file [.xco] Modified: trunk/bin/tool_synth_synplify.py =================================================================== --- trunk/bin/tool_synth_synplify.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/tool_synth_synplify.py 2010-03-01 17:57:41 UTC (rev 85) @@ -31,7 +31,6 @@ from util import * import os -import re import tool_common @@ -53,29 +52,6 @@ raise Panic( 'unknown HDL source extension %s' % k ) -# Synthesis: FPGA Family name expansion from shorthand -def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2 - match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps ) - if not match: - raise Panic( "unknown partspec %s" % (cfg.part) ) - - part, family, pkg, speed = match.groups() - - try: - return {'2V' : 'VIRTEX2', - '2VP' : 'VIRTEX2P', - '2S' : 'SPARTAN2', - '2SE' : 'SPARTAN2E', - '3S' : 'SPARTAN3'}[family] - except: - if family[0].isdigit(): - family = 'VIRTEX'+family[0] - else: - raise Panic( 'unknown family %s in partspec' % k ) - - return part, family, pkg, speed - - def write_project(cfg): """ Write out a synplify synthesis project from the resolved sources @@ -98,7 +74,7 @@ cores.append( (hdl_src, core_src) ) # Unpack partspec - part, family, pkg, speed = parse_partspec( cfg.part.upper() ) + part, family, pkg, speed = tool_common.parse_partspec( cfg.part ) # Write out project file. # @@ -165,7 +141,7 @@ #implementation attributes set_option -vlog_std v2001 -set_option -synthesis_onoff_pragma 0 +set_option -synthesis_onoff_pragma 1 set_option -project_relative_includes 1 """ % (prog_name(), This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |