Thread: [fbt-commit] SF.net SVN: fbt:[67] trunk/bin
Status: Beta
Brought to you by:
dave_infj
|
From: <dav...@us...> - 2008-12-04 18:31:23
|
Revision: 67
http://fbt.svn.sourceforge.net/fbt/?rev=67&view=rev
Author: dave_infj
Date: 2008-12-04 18:31:17 +0000 (Thu, 04 Dec 2008)
Log Message:
-----------
Big reorganisation prior to release. Apologies in advance if it breaks stuff.
To summarise:
- djm_pyhelp renamed util
- Changed function def comments into docstrings
- formalised module headers
- Modularised mkvdeps and mkvproj
- Changed how mkvproj calls the tool requested, including
how the default is set
Modularisation:
* parts of mkvdeps.py split into parse_{vhdl,verilog,coregen}.py and DepList.py
Support for other languages can be fairly easily added by adding a new module
and editing src_parser_map in mkvdeps.py.
* parts of mkvproj.py split into tool_sim_modelsim.py, tool_synth_synplify.py
and tool_common.py
Support for new project writers can be added simply by writing the appropriate
module and putting it in the right place.
tool_common contains constants and utilities required by all project writers.
The default tool for a given operation can be changed by altering the
appropriate symlink.
Modified Paths:
--------------
trunk/bin/comp_filter.py
trunk/bin/mkvdeps.py
trunk/bin/mkvproj.py
trunk/bin/synplify_wrapper.py
Added Paths:
-----------
trunk/bin/DepList.py
trunk/bin/parse_coregen.py
trunk/bin/parse_verilog.py
trunk/bin/parse_vhdl.py
trunk/bin/tool_common.py
trunk/bin/tool_sim_default.py
trunk/bin/tool_sim_modelsim.py
trunk/bin/tool_synth_default.py
trunk/bin/tool_synth_synplify.py
trunk/bin/util.py
Removed Paths:
-------------
trunk/bin/djm_pyhelp.py
Copied: trunk/bin/DepList.py (from rev 66, trunk/bin/mkvdeps.py)
===================================================================
--- trunk/bin/DepList.py (rev 0)
+++ trunk/bin/DepList.py 2008-12-04 18:31:17 UTC (rev 67)
@@ -0,0 +1,143 @@
+################################################################################
+#
+# DepList.py
+#
+# PURPOSE:
+#
+# Container class which maintains candidate entities and their dependencies.
+# Functions like an ordinary dictionary, but copes with ambiguous entity
+# implementations.
+#
+# $Id$
+
+from __future__ import with_statement
+from util import *
+
+import re
+import os
+
+import tool_common
+
+
+class DepList:
+ """
+ Helper class for storing lists of entity dependencies
+
+ Usage:
+ Inputs are individual candidates added via DepList.add_dep()
+ Outputs are always sets of candidates indexed by entity
+ """
+
+ def __init__(self):
+ self.list = {}
+ # Regex for parsing dependency caches
+ self.m_deps = re.compile( '(\w+)\s*\(\s*([\w.-]+)\s*\)\s*:(.*)' )
+
+
+ def add_dep(self, ent, hdl_src, deps, core_src = '' ):
+ """
+ Add a dependency to the list
+ """
+
+ try:
+ self.list[ent].append( (hdl_src, deps, core_src) )
+ except KeyError:
+ self.list[ent] = [ (hdl_src, deps, core_src) ]
+
+
+ def merge(self, src):
+ """
+ Update a dependencies dictionary with a new entry, merging if required.
+ """
+
+ for ent, dep_rec in src.list.iteritems():
+ try:
+ self.list[ent] = self.list[ent] + dep_rec
+ except KeyError:
+ self.list[ent] = dep_rec
+
+
+ def write_deps_cache(self, df):
+ """
+ Write out the DepList in a linear .depends cache file. df is an handle
+ and must already be open.
+ """
+
+ with df:
+ df.write("""\
+# Dependencies automatically generated by %s. Do not edit!
+# $Id$
+#
+
+""" % (prog_name()) )
+
+ for ent, candidates in self.iteritems():
+ for hdl_src, deps, core_src in candidates:
+ df.write( '%s(%s): %s%s\n' % (ent,
+ os.path.basename(hdl_src),
+ os.path.basename(core_src),
+ ' '.join(deps)) )
+
+
+ def read_deps_cache(self, df, path):
+ """
+ Import depencency data from a .depends cache file. df is an handle and
+ must already be open
+ """
+
+ with df:
+ for lno, dep_line in enumerate(df):
+ # Delete any comments
+ try:
+ dep_line = dep_line[:dep_line.index('#')]
+ except ValueError:
+ pass
+ dep_line = dep_line.strip()
+ if not dep_line:
+ continue
+
+ # Parse components
+ match = self.m_deps.search(dep_line)
+ if not match:
+ raise Panic("%s:%d: invalid dependency line" %
+ ( relpath(os.path.join(path,
+ tool_common.DEPS_FILE)),
+ (lno+1) )
+ )
+ ent, hdl_src, deps = match.groups()
+ deps = deps.split()
+
+ # If deps contains a single object ending in '.xco', then it
+ # is a core reference.
+ if len(deps) == 1 and deps[0].endswith('.xco'):
+ self.add_dep( ent,
+ os.path.join(tool_common.CORES_DIR, hdl_src),
+ [], # no entity dependencies
+ os.path.join(path, deps[0] ) )
+ else:
+ self.add_dep( ent,
+ os.path.join(path, hdl_src),
+ deps )
+
+
+ def iterkeys(self):
+ return self.list.iterkeys()
+
+
+ def iteritems(self):
+ return self.list.iteritems()
+
+
+ def __iter__(self):
+ return self.iterkeys()
+
+
+ def __contains__(self, ent):
+ return ent in self.list
+
+
+ def __getitem__(self, ent):
+ return self.list[ent]
+
+
+
Property changes on: trunk/bin/DepList.py
___________________________________________________________________
Added: svn:keywords
+ Author Date Id Revision
Added: svn:eol-style
+ native
Modified: trunk/bin/comp_filter.py
===================================================================
--- trunk/bin/comp_filter.py 2008-12-01 23:31:06 UTC (rev 66)
+++ trunk/bin/comp_filter.py 2008-12-04 18:31:17 UTC (rev 67)
@@ -1,13 +1,21 @@
################################################################################
#
-# accept():
+# comp_filter.py
#
-# Entity instances for which accept() returns False will be excluded from
-# dependency lists.
+# PURPOSE:
#
+# Provides a means for the HDL parsers to reject certain entity instantiations
+# because they are provided by third party simulation libraries
+#
# $Id$
+
def accept(comp_decl):
+ """
+ Entity instances for which accept() returns False will be excluded from
+ dependency lists.
+ """
+
if not comp_decl:
# Reject None, etc
return False
Deleted: trunk/bin/djm_pyhelp.py
===================================================================
--- trunk/bin/djm_pyhelp.py 2008-12-01 23:31:06 UTC (rev 66)
+++ trunk/bin/djm_pyhelp.py 2008-12-04 18:31:17 UTC (rev 67)
@@ -1,110 +0,0 @@
-
-# $Id$
-
-import sys
-import os
-
-#
-# prog_name()
-#
-# Returns programme name
-def prog_name():
- return os.path.basename(sys.argv[0])
-
-
-#
-# Panic exception
-#
-# For equivalent of perl die "message"
-class Panic(Exception):
- def __init__(self, value):
- self.value = value
-
- def __str__(self):
- return "%s: panic: %s" % (prog_name(), self.value)
-
-
-# relpath.py
-# R.Barran 30/08/2004
-
-def relpath(target, base=os.curdir):
- """
- Return a relative path to the target from either the current dir or an optional base dir.
- Base can be a directory specified either as absolute or relative to current dir.
- """
-
-# check commented out: not clear this is required, djm 11/6/08
-# if not os.path.exists(target):
-# raise OSError, 'Target does not exist: '+target
-
- if not os.path.isdir(base):
- raise OSError, 'Base is not a directory or does not exist: '+base
-
- base_list = (os.path.abspath(base)).split(os.sep)
- target_list = (os.path.abspath(target)).split(os.sep)
-
- # On the windows platform the target may be on a completely different drive from the base.
- if os.name in ['nt','dos','os2'] and base_list[0] <> target_list[0]:
- raise OSError, 'Target is on a different drive to base. Target: '+target_list[0].upper()+', base: '+base_list[0].upper()
-
- # Starting from the filepath root, work out how much of the filepath is
- # shared by base and target.
- for i in range(min(len(base_list), len(target_list))):
- if base_list[i] <> target_list[i]: break
- else:
- # If we broke out of the loop, i is pointing to the first differing
- # path elements. If we didn't break out of the loop, i is pointing to
- # identical path elements. Increment i so that in all cases it points
- # to the first differing path elements.
- i+=1
-
- rel_list = [os.pardir] * (len(base_list)-i) + target_list[i:]
- return os.path.join(*rel_list)
-
-#
-# walk_dirlist()
-#
-# Conditionally walks the list of directories given. Only directories
-# beginning with a + will be searched recursively. Version control directories
-# will be filtered out along the way.
-def walk_dirlist(dirlist):
- excl_dirs = []
- for dir in dirlist:
- dir = dir.strip()
- if not dir:
- continue
-
- # Those directories with a leading - should be excluded from the walk.
- if dir.startswith('-'):
- excl_dirs.append( dir.lstrip('-') )
- continue
-
- # Only those directories with a leading + should be recursed into.
- recurse = dir.startswith('+')
- for path, dirs, files in os.walk( dir.lstrip('+') ):
- if not recurse:
- del dirs[:]
-
- # Skip revision control directories
- try:
- del dirs[dirs.index('.svn')]
- del dirs[dirs.index('CVS')]
- del dirs[dirs.index('RCS')]
- del dirs[dirs.index('work')]
- except ValueError:
- pass
-
- # Skip any excludes
- for i in reversed(xrange(len(dirs))):
- for excl in excl_dirs:
- try:
- if os.path.samefile( os.path.join(path, dirs[i]), excl ):
- del dirs[i]
- except OSError:
- sys.stderr.write( "%s: ignoring invalid ignore %s\n" % (
- prog_name(),
- excl ) )
-
- yield (path, dirs, files)
-
-
Modified: trunk/bin/mkvdeps.py
===================================================================
--- trunk/bin/mkvdeps.py 2008-12-01 23:31:06 UTC (rev 66)
+++ trunk/bin/mkvdeps.py 2008-12-04 18:31:17 UTC (rev 67)
@@ -1,294 +1,52 @@
#!/usr/bin/python
+################################################################################
+#
+# mkvdeps.py
+#
+# PURPOSE:
+#
+# Auto depencies resolver. When used stand-alone, it will write a dependency
+# cache in the specified directories. Can be imported as a module such as to
+# provide dependency resolution in mkvproj.py.
+#
# $Id$
from __future__ import with_statement
+from util import *
+
import sys
import getopt
import re
import os
-import glob
-import comp_filter
-from djm_pyhelp import *
+import DepList
+import parse_vhdl
+import parse_verilog
+import parse_coregen
+import tool_common
-#
-# Constants
-#
-# Output directory for coregen
-CORES_DIR = 'cores'
-
-# Dependency cache file name
-DEPS_FILE = '.depends'
-
-################################################################################
-#
-# Helper class for storing lists of entity dependencies
-#
-# Usage:
-# Inputs are individual candidates added via DepList.add_dep()
-# Outputs are always sets of candidates indexed by entity
-class DepList:
- def __init__(self):
- self.list = {}
- # Regex for parsing dependency caches
- self.m_deps = re.compile( '(\w+)\s*\(\s*([\w.-]+)\s*\)\s*:(.*)' )
-
- #
- # add_dep()
- #
- # Add a dependency to the list
- def add_dep(self, ent, hdl_src, deps, core_src = '' ):
- try:
- self.list[ent].append( (hdl_src, deps, core_src) )
- except KeyError:
- self.list[ent] = [ (hdl_src, deps, core_src) ]
-
- #
- # merge_deps()
- #
- # Update a dependencies dictionary with a new entry, merging if required.
- def merge(self, src):
- for ent, dep_rec in src.list.iteritems():
- try:
- self.list[ent] = self.list[ent] + dep_rec
- except KeyError:
- self.list[ent] = dep_rec
-
- #
- # write_deps_cache()
- #
- # Write out the DepList in a linear .depends cache file. df is an handle
- # and must already be open.
- def write_deps_cache(self, df):
- with df:
- df.write("""\
-# Dependencies automatically generated by %s. Do not edit!
-# $Id$
-#
-
-""" % (prog_name()) )
-
- for ent, candidates in self.iteritems():
- for hdl_src, deps, core_src in candidates:
- df.write( '%s(%s): %s%s\n' % (ent,
- os.path.basename(hdl_src),
- os.path.basename(core_src),
- ' '.join(deps)) )
- #
- # read_deps_cache()
- #
- # Import depencency data from a .depends cache file. df is an handle and
- # must already be open
- def read_deps_cache(self, df, path):
- with df:
- for lno, dep_line in enumerate(df):
- # Delete any comments
- try:
- dep_line = dep_line[:dep_line.index('#')]
- except ValueError:
- pass
- dep_line = dep_line.strip()
- if not dep_line:
- continue
-
- # Parse components
- match = self.m_deps.search(dep_line)
- if not match:
- raise Panic("%s:%d: invalid dependency line" %
- ( relpath(os.path.join(path, DEPS_FILE)),
- (lno+1) )
- )
- ent, hdl_src, deps = match.groups()
- deps = deps.split()
-
- # If deps contains a single object ending in '.xco', then it
- # is a core reference.
- if len(deps) == 1 and deps[0].endswith('.xco'):
- self.add_dep( ent,
- os.path.join(CORES_DIR, hdl_src),
- [], # no entity dependencies
- os.path.join(path, deps[0] ) )
- else:
- self.add_dep( ent,
- os.path.join(path, hdl_src),
- deps )
-
-
- def iterkeys(self):
- return self.list.iterkeys()
-
- def iteritems(self):
- return self.list.iteritems()
-
- def __iter__(self):
- return self.iterkeys()
-
- def __contains__(self, ent):
- return ent in self.list
-
- def __getitem__(self, ent):
- return self.list[ent]
-
-
-
-################################################################################
-#
-# parse_deps_vhdl()
-#
-# Determine dependencies for a given file (VHDL mode)
-
-m_dep_vhdl = re.compile("""
- use \s+ (?P<lib> \w+) \. (?P<pkg> \w+) \.all \s* ; | # pkg import
-
- (?: entity|package) \s+ (?P<ent> \w+) \s+ is | # entity decl
-
- component \s+ (?P<comp> \w+) | # component decl
-
- \w+ \s* : \s* entity \s+ (?: work\.)? (?P<inst> \w+) # instance decl
- """, re.I|re.X)
-
-def parse_deps_vhdl(hdl_src):
- ent = None
- deps = []
- dep_list = DepList()
-
- with open(hdl_src) as vf:
- for line in vf:
- # Delete any comments.
- try:
- line = line[:line.index('--')]
- except ValueError:
- pass
- # Since VHDL is case insensitive, convert everything to lowercase
- line = line.lower()
-
- match = m_dep_vhdl.search(line)
- if match:
- # A package import decl after an entity or package body is
- # defined means we have hit a new entity, so we must output
- # what we've got and start over
- if match.group('pkg') and ent:
- dep_list.add_dep( ent, hdl_src, deps )
- ent = None
- deps = []
-
- # Find out what we've matched and handle appropriately
- if match.group('ent'):
- if ent:
- raise Panic("""\
-%s: unexpected entity %s found when processing entity %s. missing package imports?""" % (relpath(self.hdl_src),
- e,
- self.ent) )
-
- else:
- ent = match.group('ent')
-
- if ent in dep_list:
- raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) )
-
- # If it's a package import decl, then lib and pkg will be
- # defined. Only add a package if the library is 'work'.
- if match.group('lib') == 'work':
- deps.append( match.group('pkg') )
-
- # If it's an instance decl, then inst will be defined.
- if match.group('inst'):
- deps.append( match.group('inst') )
-
- # If it's a component decl, add it so long as it's not
- # marked to be ignored
- if comp_filter.accept(match.group('comp')):
- deps.append( match.group('comp') )
-
- dep_list.add_dep( ent, hdl_src, deps )
- return dep_list
-
-
-#
-# parse_deps_verilog()
-#
-# Determine dependencies for a given file (verilog mode)
-def parse_deps_verilog(hdl_src):
- sys.stderr.write('%s:%s: warning: unsupported verilog source ignored.\n' %
- ( prog_name(), relpath(hdl_src) ))
-
- return DepList()
-
-
-m_parse_xco = re.compile( "^\s*\w+\s+(\w+)\s*=\s*(.*)$" )
-
-#
-# parse_coregen()
-#
-# Determine the entity and HDL source provided by the given core
-def parse_coregen(hdl_src):
- def bool(s):
- return s in ['yes', 'true', '1']
-
- attribs = {}
-
- # Read in project settings and attributes
- with open(hdl_src) as vf:
- for line in vf:
- # Delete any comments.
- try:
- line = line[:line.index('#')]
- except ValueError:
- pass
-
- match = m_parse_xco.search( line )
- if match:
- attribs[match.group(1).lower()] = match.group(2)
-
- # sanity check
- try:
- ent = attribs['component_name']
- use_vhdl = bool(attribs['vhdlsim'])
- use_verilog = bool(attribs['verilogsim'])
- except KeyError, e:
- raise Panic( '%s: missing parameter %s' % (hdl_src, e) )
-
- if not use_vhdl ^ use_verilog:
- raise Panic( "%s: exactly one simulation (VHDL or verilog) is required" % relpath(hdl_src) )
-
- # The simulation source file will be the entity name with the appropriate
- # extension attached
- sim_src = os.path.join( CORES_DIR, ent )
- if use_vhdl:
- sim_src += '.vhd'
- else:
- sim_src += '.v'
-
- # Generate dependency record and return
- dep_list = DepList()
- dep_list.add_dep( ent, sim_src, [], hdl_src )
-
- return dep_list
-
-#
-# source_get_all_deps()
-#
-# Generate a list of dependencies in all VHDL and verilog files in the given
-# list of directories, optionally writing out a cache of the results
-#
-# Returns a hash indexed by entity of lists in which the first element is the
-# path to where that entity is declared, and the remaining elements are the
-# entities on which that entity depends.
-#
src_parser_map = {
- '.vhd' : parse_deps_vhdl,
- '.vhdl': parse_deps_vhdl,
- '.v' : parse_deps_verilog,
+ '.vhd' : parse_vhdl,
+ '.vhdl': parse_vhdl,
+ '.v' : parse_verilog,
'.xco' : parse_coregen
}
def source_get_all_deps(src_dirs, write_cache=False, verbose=False):
- dep_list = DepList()
+ """
+ Generate a list of dependencies in all VHDL and verilog files in the given
+ list of directories, optionally writing out a cache of the results
+
+ Returns a hash indexed by entity of lists in which the first element is the
+ path to where that entity is declared, and the remaining elements are the
+ entities on which that entity depends.
+ """
+ dep_list = DepList.DepList()
for path, dirs, hdl_srcs in walk_dirlist(src_dirs):
- dir_deps = DepList()
+ dir_deps = DepList.DepList()
for src in hdl_srcs:
path_src = os.path.join(path, src)
@@ -300,60 +58,61 @@
print path_src
# Parse this file
- src_ent_list = src_parser_map[src_ext](path_src)
+ src_ent_list = src_parser_map[src_ext].parse(path_src)
dep_list.merge( src_ent_list )
dir_deps.merge( src_ent_list )
# Write out cache if that option is enabled
if write_cache and dir_deps:
- with open(os.path.join(path, DEPS_FILE), 'w') as deps_file:
- dir_deps.write_deps_cache( deps_file )
+ with open(os.path.join(path, tool_common.DEPS_FILE), 'w') as df:
+ dir_deps.write_deps_cache( df )
return dep_list
-#
-# cache_get_all_deps()
-#
-# like source_get_all_deps(), but reads from .depends cache
+
def cache_get_all_deps(src_dirs, verbose=False):
- dep_list = DepList()
+ """
+ like source_get_all_deps(), but reads from .depends cache
+ """
+ dep_list = DepList.DepList()
# Read in all .depends files
for path, dirs, files in walk_dirlist(src_dirs):
# Not all directories will have cache files
- if DEPS_FILE not in files:
+ if tool_common.DEPS_FILE not in files:
continue
# Read through .depends file and build dependencies list
- with open(os.path.join(path, DEPS_FILE)) as deps_file:
- dep_list.read_deps_cache( deps_file, path )
+ with open(os.path.join(path, tool_common.DEPS_FILE)) as df:
+ dep_list.read_deps_cache( df, path )
return dep_list
-#
-# resolve_deps()
-#
-# Resolve the dependencies arising from the top module as specified in the
-# configuration. Returns an ordered list of sources.
+
def resolve_deps(top_ent, dep_list, disambig):
- #
- # what_srcs()
- #
- # Return a list of files that depend on the specified entity
+ """
+ Resolve the dependencies arising from the top module as specified in the
+ configuration. Returns an ordered list of sources.
+ """
+
def what_srcs(what_ent):
+ """
+ Return a list of files that depend on the specified entity
+ """
ws = []
for ent, allcand in dep_list.iteritems():
for hdl_src, deps, core_src in allcand:
if what_ent in deps:
ws.append(hdl_src)
return ws
+
- #
- # disambiguate()
- #
- # Error check, select and return the dependencies for the given entity
- # according to the disambiguation rules.
def disambiguate(ent):
+ """
+ Error check, select and return the dependencies for the given entity
+ according to the disambiguation rules.
+ """
+
# Check to make sure we know about unresolved entity ent
if ent not in dep_list:
raise Panic( """\
@@ -436,12 +195,12 @@
return resolved
-################################################################################
-#
-# print_help()
-#
-# Print help message
+
def print_help():
+ """
+ Print help message
+ """
+
print """
Dependency cache builder
@@ -465,7 +224,7 @@
""" % (prog_name())
sys.exit(0)
-# main
+
def main(argv):
verbose = False
Modified: trunk/bin/mkvproj.py
===================================================================
--- trunk/bin/mkvproj.py 2008-12-01 23:31:06 UTC (rev 66)
+++ trunk/bin/mkvproj.py 2008-12-04 18:31:17 UTC (rev 67)
@@ -1,305 +1,33 @@
#!/usr/bin/python
-from __future__ import with_statement
-import sys
-import os
-import re
-import getopt
-
-from djm_pyhelp import *
-import mkvdeps
-
-#
-# Constants
-#
-
-# Simulation/synthesis: the name of the makefile containing coregen build rules
-COREGEN_MK = 'coregen.mk'
-
-# Simulation: name of the makefile containing modelsim build rules
-MDLSIM_MK = 'modelsim.mk'
-
-# Synthesis: synplify output directory
-BUILD_DIR = 'build'
-
-# Simulation: modelsim commands for compiling HDL source files
-mdlsim_compile = {
- '.vhd' : '$(VCOM) $(VCOM_OPTS)',
- '.vhdl': '$(VCOM) $(VCOM_OPTS)',
- '.v' : '$(VLOG) $(VLOG_OPTS)'
- }
-
-# Synthesis: synthesis commands for adding HDL source files
-lang_flag = {'.vhd' :'-vhdl -lib work',
- '.vhdl':'-vhdl -lib work',
- '.v' :'-verilog'}
-
-# Synthesis: FPGA Family name expansion from shorthand
-family_tab = {'2V' : 'VIRTEX2',
- '2VP' : 'VIRTEX2P',
- '2S' : 'SPARTAN2',
- '2SE' : 'SPARTAN2E',
- '3S' : 'SPARTAN3'}
-
-
-
-#
-# class Config
-#
-# Storage object for configuration
-class Config:
- def __init__(self):
- #
- # Defaults
- #
- self.verbose = False
- self.output_dir = '.'
- self.libpath = []
- self.disambig = {}
- self.part = '<unspecified>'
- self.top_ent = ''
- self.tool = 'default'
- self.cache_deps = None
- self.relative_paths = True
- self.constraints = []
-
################################################################################
#
-# rel_src()
+# mkvproj.py
#
-# Conditionally return relative path of the given source file, with (some)
-# error checking
-def rel_src(cfg, hdl_src):
- if cfg.relative_paths:
- try:
- return relpath(hdl_src, cfg.output_dir)
- except OSError:
- raise Panic( "missing source file %s. Is dependency cache out of date?" % (x[1]) )
- else:
- return hdl_src
-
-################################################################################
+# PURPOSE:
#
-# mk_coregen_mf()
+# Simulation and synthesis project writer. Uses autodeps service from
+# mkvdeps.py.
#
-# Write out a coregen makefile from the list of (hdl_src, core_src).
-def mk_coregen_mf(cfg, cores):
-
- # Make the cores directory, if it doesn't already exist
- cores_dir = os.path.join( cfg.output_dir, mkvdeps.CORES_DIR )
- if not os.path.isdir( cores_dir ):
- sys.stderr.write( 'mkdir %s\n' % (cores_dir) )
- os.mkdir( cores_dir )
-
- with open( os.path.join(cfg.output_dir, COREGEN_MK), 'w' ) as mf:
- # Write out header
- mf.write( """\
-# Coregen makefile automatically generated by %s. Do not edit!
# $Id$
-#
-COREGEN ?= coregen
+from __future__ import with_statement
+from util import *
-# default target
-""" % (prog_name()) )
-
- # Write out default target
- mf.write( 'all: %s\n\n' % (' '.join( [hdl_src
- for hdl_src, core_src in cores] )) )
+import sys
+import os
+import re
+import getopt
- # Write out coregen invocation rules
- for hdl_src, core_src in cores:
- mf.write( """\
-%s: %s
-\tcd %s; $(COREGEN) -b ../%s
-""" % (hdl_src, # the build target (sim source)
- rel_src(cfg, core_src), # its dependency (core description [.xco])
- mkvdeps.CORES_DIR, # the cores subdirectory
- rel_src(cfg, core_src) ) ) # the core description file [.xco]
-
-################################################################################
-#
-# mk_mdlsim_proj()
-#
-# Write out a modelsim simulation project using the resolved sources
-def mk_mdlsim_proj(cfg):
- #
- # msim_lib()
- #
- # Helper macro for mk_mdlsim_proj() to express the filename of a modelsim
- # library entity for the given entity
- def msim_lib(ent):
- return os.path.join( 'work', ent, '_primary.dat' )
+import mkvdeps
+import tool_common
- # make the work library, if it doesn't exist
- worklib = os.path.join(cfg.output_dir, 'work')
- if not os.path.isdir(worklib):
- sys.stderr.write( 'vlib %s\n' % (worklib) )
- os.system( 'vlib %s' % (worklib) )
- # write out the modelsim makefile
- cores = []
- with open( os.path.join(cfg.output_dir, MDLSIM_MK), 'w' ) as mf:
- mf.write( """\
-# Modelsim makefile automatically generated by %s. Do not edit!
-# $Id$
-#
-
-VCOM ?= vcom
-VLOG ?= vlog
-
-# default target
-""" % (prog_name()) )
-
- # Write out default target
- mf.write( "all: %s\n\n" % (msim_lib(cfg.top_ent)) )
-
- for ent, hdl_src, deps, core_src in cfg.resolved_list:
- rel_hdl_src = rel_src( cfg, hdl_src )
-
- # Write rules to express this source's dependencies, if any...
[truncated message content] |
|
From: <dav...@us...> - 2010-03-01 17:57:50
|
Revision: 85
http://fbt.svn.sourceforge.net/fbt/?rev=85&view=rev
Author: dave_infj
Date: 2010-03-01 17:57:41 +0000 (Mon, 01 Mar 2010)
Log Message:
-----------
Add --dumpdeps debugging flag: dumps dependencies database.
This somewhat changes possible programme flow. Not everything needs to be done
in a dumpdeps operation (like resolving dependencies), so:
* Dependency loading code moved into its own method in mkvproj.py
* Disambiguation handling code moved into cfg object
* Control flow forks in main() depending on whether a dumpdeps operation or not
* When dumping deps, don't enforce specifying -r etc; don't process
disambiguation rules; don't resolve dependencies.
Added a bit of extra sanity checking (specified root unknown etc)
Added keys() method to DepList so that it can be iterated over during deps dump
tool_common.py:
When processing coregen descriptors - instead of pointing coregen straight
at the source descriptor - make a copy of the descriptor, changing the
part spec. This means that, where appropriate, cores targeted for one device
can be retargeted for another on the fly.
The modified XCO files are put in $TEMPDIR, and a 'clean' target is added
into coregen.mk so that the temp directory is cleaned up. The timestamp on
the copied file is preserved from the original so that 'make' won't rebuild
all cores when nothing has changed in the real source XCO file.
In aid of that, move partspec processing out of tool_synth_syn and into
tool_common.
Modified Paths:
--------------
trunk/bin/DepList.py
trunk/bin/mkvproj.py
trunk/bin/tool_common.py
trunk/bin/tool_synth_synplify.py
Modified: trunk/bin/DepList.py
===================================================================
--- trunk/bin/DepList.py 2010-03-01 17:29:53 UTC (rev 84)
+++ trunk/bin/DepList.py 2010-03-01 17:57:41 UTC (rev 85)
@@ -138,6 +138,11 @@
os.path.join(path, hdl_src),
deps )
+ def keys(self):
+ """
+ Return a list of entities in the dependency db
+ """
+ return self.list.keys()
def iterkeys(self):
return self.list.iterkeys()
Modified: trunk/bin/mkvproj.py
===================================================================
--- trunk/bin/mkvproj.py 2010-03-01 17:29:53 UTC (rev 84)
+++ trunk/bin/mkvproj.py 2010-03-01 17:57:41 UTC (rev 85)
@@ -73,6 +73,8 @@
-C, --no-cache-deps do not use cached dependency information
--no-relative do not make path names relative (to output_dir)
+ --dumpdeps (debugging) dump dependency database
+
Synthesis options:
-p, --part target FPGA spec (required)
-k, --constraints constraints file
@@ -96,6 +98,47 @@
sys.exit(0)
+def load_dep_db( cfg ):
+ """
+ Populate the dependencies database
+ """
+
+ # If cache-mode isn't specified, then assume use cached mode if the project
+ # directory (containing project output) contains a cache file (.depends).
+ # This is a somewhat arbitrary assumption, but it is probably correct most
+ # of the time. It can always be overridden from the commandline.
+ if cfg.cache_deps == None:
+ cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir,
+ tool_common.DEPS_FILE) )
+ # Build source list
+ if cfg.cache_deps:
+ sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) )
+ cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath )
+ else:
+ cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath )
+
+
+
+def dep_dump( cfg ):
+ """
+ Dump the dependency database
+ """
+
+ print " * Dependency database dump *\n"
+ entities = sorted( cfg.dep_list.keys() )
+ for ent in entities:
+ print "Entity %s:" % (ent)
+ for hdl_src, deps, core_src in cfg.dep_list[ent]:
+ if cfg.relative_paths:
+ hdl_src = relpath(hdl_src)
+ print "\tin %s:" % (hdl_src)
+ if deps:
+ print "\t\t%s" % '\n\t\t'.join( deps )
+ else:
+ print "\t\t(none)"
+ print
+
+
def main(argv):
#
# Parse options
@@ -114,18 +157,19 @@
'no-relative',
'help',
'verbose',
- 'version'] )
+ 'version',
+ 'dumpdeps'] )
except getopt.GetoptError, e:
raise Panic( e )
- # Temporary list
- dlist = []
+ # Temporary flag
+ do_dump = False
cfg = tool_common.Config()
for arg, val in opts:
if arg in ['-v', '--verbose']: cfg.verbose = True
if arg in ['-d', '--dir']: cfg.output_dir = val
- if arg in ['-D', '--disambiguate']: dlist.append(val)
+ if arg in ['-D', '--disambiguate']: cfg.add_dar(val)
if arg in ['-l', '--libpath']: cfg.libpath += val.split(':')
if arg in ['-p', '--part']: cfg.part = val
if arg in ['-r', '--root']: cfg.top_ent = val
@@ -135,76 +179,66 @@
if arg in ['--no-relative']: cfg.relative_paths = False
if arg in ['-k', '--constraints']: cfg.constraints += val.split(':')
if arg in ['-h', '--help']: print_help()
+ if arg in ['--dumpdeps']: do_dump = True
if arg in ['-V', '--version']:
sys.stderr.write( '$Id$\n' )
sys.exit(0)
# Sanity checks
- if args == []:
- print "%s: no operation specified" % (prog_name())
- print_help()
+ if not cfg.libpath:
+ raise Panic("no library paths specified (-l)")
- cfg.oper = args.pop(0)
-
if not os.path.isdir(cfg.output_dir):
raise Panic("output directory %s doesn't exist" % (cfg.output_dir) )
- if not cfg.top_ent:
- raise Panic("no top level module specified")
+ #
+ # Do job
+ #
- # Convert the list of disambiguation rules into a map indexed by entity
- for d in dlist:
- try:
- ent, regex = d.split('/', 1)
- except ValueError:
- ent, regex = None, None
+ if do_dump:
+ # Load deps
+ load_dep_db( cfg )
- if not ent or regex[-1] != '/':
- raise Panic( "Bad disambiguation rule %s" % (d) )
+ # Dump db
+ dep_dump( cfg )
+ else:
+ # More sanity checks
+ if args == []:
+ print "%s: no operation specified" % (prog_name())
+ print_help()
- # Trim trailing /
- regex = regex[:-1]
+ cfg.oper = args.pop(0)
- try:
- cfg.disambig[ent] = re.compile( regex )
- except re.error:
- raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex))
+ if not cfg.top_ent:
+ raise Panic("no top level module specified (-r)")
- # If cache-mode isn't specified, then assume use cached mode if the project
- # directory (containing project output) contains a cache file (.depends).
- # This is a somewhat arbitrary assumption, but it is probably correct most
- # of the time. It can always be overridden from the commandline.
- if cfg.cache_deps == None:
- cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir,
- tool_common.DEPS_FILE) )
- #
- # Do job
- #
+ if cfg.part == '<unspecified>':
+ sys.stderr.write( "%s: warning: no partspec specified (-p)\n" % prog_name() )
- # Build source list
- if cfg.cache_deps:
- sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) )
- cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath )
- else:
- cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath )
+ # Load deps
+ load_dep_db( cfg )
- # Try to resolve dependencies
- cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list,
- cfg.disambig )
+ # Check to make sure top level is known about
+ if cfg.top_ent not in cfg.dep_list:
+ raise Panic("top level entity %s unknown" % cfg.top_ent)
- if cfg.verbose:
- print '\n'.join( [hdl_src
- for ent, hdl_src, deps, core_src in cfg.resolved_list] )
- print '\n'.join( [constr
- for constr in cfg.constraints] )
-
- # Write out project
- try:
- exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) )
- except ImportError:
- raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) )
+ # Try to resolve dependencies
+ cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list,
+ cfg.disambig )
- tool.write_project( cfg )
+ if cfg.verbose:
+ print '\n'.join( [hdl_src
+ for ent, hdl_src, deps, core_src in cfg.resolved_list] )
+ print '\n'.join( [constr
+ for constr in cfg.constraints] )
+
+ # Write out project
+ try:
+ exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) )
+ except ImportError:
+ raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) )
+
+ tool.write_project( cfg )
if __name__ == '__main__':
Modified: trunk/bin/tool_common.py
===================================================================
--- trunk/bin/tool_common.py 2010-03-01 17:29:53 UTC (rev 84)
+++ trunk/bin/tool_common.py 2010-03-01 17:57:41 UTC (rev 85)
@@ -32,8 +32,9 @@
import sys
import os
+import re
+import tempfile
-
#
# Constants
#
@@ -67,6 +68,28 @@
self.relative_paths = True
self.constraints = []
+ def add_dar( self, rule ):
+ """
+ Add a disambiguation rule
+ """
+
+ try:
+ ent, regex = rule.split('/', 1)
+ except ValueError:
+ ent, regex = None, None
+
+ if not ent or regex[-1] != '/':
+ raise Panic( "Bad disambiguation rule %s" % (rule) )
+
+ # Trim trailing /
+ regex = regex[:-1]
+
+ try:
+ self.disambig[ent] = re.compile( regex )
+ except re.error:
+ raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex))
+
+
def rel_src(cfg, hdl_src):
"""
@@ -83,11 +106,81 @@
return hdl_src
+# Synthesis: FPGA Family name expansion from shorthand
+def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2
+ """
+ Parse a Xilinx partspec into (part, family, package, speed)
+ """
+
+ match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps.upper() )
+ if not match:
+ raise Panic( "unknown partspec %s" % (ps) )
+
+ part, family, pkg, speed = match.groups()
+
+ try:
+ family = {'2V' : 'VIRTEX2',
+ '2VP' : 'VIRTEX2P',
+ '2S' : 'SPARTAN2',
+ '2SE' : 'SPARTAN2E',
+ '3S' : 'SPARTAN3'}[family]
+ except:
+ if family[0].isdigit():
+ family = 'VIRTEX'+family[0]
+ else:
+ raise Panic( 'unknown family %s in partspec' % k )
+
+ return part, family, pkg, speed
+
+
+def copy_xco( src, dst, partspec ):
+ """
+ Copy an XCO project file, replacing the part spec info as appropriate
+ """
+ attrs = []
+ part, family, pkg, speed = parse_partspec( partspec )
+
+ with open(src) as s:
+ for line in s:
+ # Delete any comments.
+ try:
+ line = line[:line.index('#')]
+ except ValueError:
+ pass
+
+ kv = line.strip().replace('=', ' = ').split()
+ if kv: attrs.append( kv )
+
+ with open(dst, 'w') as d:
+ d.write( """\
+# Automatically generated by %s. Do not edit!
+# Source: %s
+# $Id$
+
+""" % (prog_name(), src) )
+
+ for attr in attrs:
+ if attr[0].upper() == 'SET':
+ if attr[1].upper() == 'DEVICE' : attr[3] = part
+ if attr[1].upper() == 'DEVICEFAMILY': attr[3] = family
+ if attr[1].upper() == 'PACKAGE' : attr[3] = pkg
+ if attr[1].upper() == 'SPEEDGRADE' : attr[3] = speed
+
+ d.write( ' '.join( attr ) + '\n' )
+
+ # Update atime and mtime on the newly created file to reflect its source
+ # This keeps make happy, and prevents unnecessary builds
+ st = os.stat( src )
+ os.utime( dst, (st.st_atime, st.st_mtime) )
+
def write_coregen_mf(cfg, cores):
"""
Write out a coregen makefile from the list of (hdl_src, core_src).
"""
+ # Make a temporary directory to hold the temporary coregen projects
+ xco_tmp_dir = tempfile.mkdtemp( prefix='%s-' % prog_name() )
+
# Make the cores directory, if it doesn't already exist
cores_dir = os.path.join( cfg.output_dir, CORES_DIR )
if not os.path.isdir( cores_dir ):
@@ -106,17 +199,30 @@
# default target
""" % (prog_name()) )
- # Write out default target
- mf.write( 'all: %s\n\n' % (' '.join( [hdl_src
- for hdl_src, core_src in cores] )) )
+ # Write out default and clean targets
+ mf.write( """\
+all: %s
+\t@echo ---
+\t@echo --- Made cores
+\t@echo ---
- # Write out coregen invocation rules
+clean:
+\trm -rf %s
+
+""" % (' '.join( [hdl_src for hdl_src, core_src in cores] ),
+ xco_tmp_dir) )
+
for hdl_src, core_src in cores:
+ # Copy coregen project file
+ out_xco = os.path.join( xco_tmp_dir, os.path.split( core_src )[1] )
+ copy_xco( core_src, out_xco, cfg.part )
+
+ # Write out coregen invocation rules
mf.write( """\
%s: %s
-\tcd %s; $(COREGEN) -b ../%s
+\tcd %s; $(COREGEN) -b %s
""" % (hdl_src, # the build target (sim source)
- rel_src(cfg, core_src), # its dependency (core description [.xco])
+ out_xco, # its dependency (core description [.xco])
CORES_DIR, # the cores subdirectory
- rel_src(cfg, core_src) ) ) # the core description file [.xco]
+ out_xco ) ) # the core description file [.xco]
Modified: trunk/bin/tool_synth_synplify.py
===================================================================
--- trunk/bin/tool_synth_synplify.py 2010-03-01 17:29:53 UTC (rev 84)
+++ trunk/bin/tool_synth_synplify.py 2010-03-01 17:57:41 UTC (rev 85)
@@ -31,7 +31,6 @@
from util import *
import os
-import re
import tool_common
@@ -53,29 +52,6 @@
raise Panic( 'unknown HDL source extension %s' % k )
-# Synthesis: FPGA Family name expansion from shorthand
-def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2
- match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps )
- if not match:
- raise Panic( "unknown partspec %s" % (cfg.part) )
-
- part, family, pkg, speed = match.groups()
-
- try:
- return {'2V' : 'VIRTEX2',
- '2VP' : 'VIRTEX2P',
- '2S' : 'SPARTAN2',
- '2SE' : 'SPARTAN2E',
- '3S' : 'SPARTAN3'}[family]
- except:
- if family[0].isdigit():
- family = 'VIRTEX'+family[0]
- else:
- raise Panic( 'unknown family %s in partspec' % k )
-
- return part, family, pkg, speed
-
-
def write_project(cfg):
"""
Write out a synplify synthesis project from the resolved sources
@@ -98,7 +74,7 @@
cores.append( (hdl_src, core_src) )
# Unpack partspec
- part, family, pkg, speed = parse_partspec( cfg.part.upper() )
+ part, family, pkg, speed = tool_common.parse_partspec( cfg.part )
# Write out project file.
#
@@ -165,7 +141,7 @@
#implementation attributes
set_option -vlog_std v2001
-set_option -synthesis_onoff_pragma 0
+set_option -synthesis_onoff_pragma 1
set_option -project_relative_includes 1
""" % (prog_name(),
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <dav...@us...> - 2010-03-02 17:39:31
|
Revision: 88
http://fbt.svn.sourceforge.net/fbt/?rev=88&view=rev
Author: dave_infj
Date: 2010-03-02 17:39:24 +0000 (Tue, 02 Mar 2010)
Log Message:
-----------
* Add new feature: entity aliases.
Since coregen objects are not parameterisable (from HDL source), this feature
provides a mechanism whereby HDL source can instantiate an entity name that
is in fact an /alias/ for some other coregen actually defined with a .XCO file
somewhere in the library path.
When alias pairs ($instantiated_entity=$actual_entity) are provided on the
command-line, the dependency resolver will use $actual_entity to satisfiy
a dependency for $instantiated_entity - but only if $instantiated_entity is
not provided explicitly elsewhere.
The aliased entity name as derived from the actual entity name will appear
in coregen.mf and modelsim.mf. During the XCO copy phase, the alias entity
name is substituted for the actual entity name so that coregen will produce
appropriately named objects.
The variable ALIASES should be defined in the project Makefile as ie=ae pairs.
NB: No spaces allowed.
NB: This feature only works with coregen objects. Other HDL entities cannot
be aliased in this way.
* Clarity enhancement: Error messages and --dumpdeps will now output the
XCO path, instead of the resulting HDL source path, for coregen objects.
Modified Paths:
--------------
trunk/bin/Makefile.inc
trunk/bin/mkvdeps.py
trunk/bin/mkvproj.py
trunk/bin/tool_common.py
trunk/bin/tool_sim_modelsim.py
trunk/bin/tool_synth_synplify.py
Modified: trunk/bin/Makefile.inc
===================================================================
--- trunk/bin/Makefile.inc 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/Makefile.inc 2010-03-02 17:39:24 UTC (rev 88)
@@ -46,6 +46,8 @@
# SIM_DAR Simulation disambiguation rules
# SYN_DAR Synthesis disambiguation rules
#
+# ALIASES Coregen aliases
+#
# VCOM_OPTS Options for ModelSim VHDL compiler
# VLOG_OPTS Options for ModelSim Verilog compiler
#
@@ -99,12 +101,14 @@
#
# Add mandatory arguments
-SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach F,$(SIMLIBPATH) ,-l $(F)) \
- $(foreach F,$(SIM_DAR) ,-D $(F))
-SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach F,$(SYNTHLIBPATH),-l $(F)) \
- $(foreach F,$(SYN_CONSTR) ,-k $(F)) \
- $(foreach F,$(SYN_DAR) ,-D $(F))
-NGDBUILD_OPTS += -p $(PART) -a $(foreach F,$(NGOLIBPATH) ,-sd $(F))
+SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach P,$(SIMLIBPATH) ,-l $(P)) \
+ $(foreach R,$(SIM_DAR) ,-D $(R)) \
+ $(foreach A,$(ALIASES) ,-a $(A))
+SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach P,$(SYNTHLIBPATH),-l $(P)) \
+ $(foreach F,$(SYN_CONSTR) ,-k $(F)) \
+ $(foreach R,$(SYN_DAR) ,-D $(R)) \
+ $(foreach A,$(ALIASES) ,-a $(A))
+NGDBUILD_OPTS += -p $(PART) -a $(foreach P,$(NGOLIBPATH) ,-sd $(P))
MAP_OPTS += -w -p $(PART) -pr b
PAR_OPTS += -w
BITGEN_OPTS += -w
Modified: trunk/bin/mkvdeps.py
===================================================================
--- trunk/bin/mkvdeps.py 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/mkvdeps.py 2010-03-02 17:39:24 UTC (rev 88)
@@ -108,10 +108,12 @@
return dep_list
-def resolve_deps(top_ent, dep_list, disambig):
+def resolve_deps(cfg):
"""
Resolve the dependencies arising from the top module as specified in the
- configuration. Returns an ordered list of sources.
+ configuration, including any aliased entities. Returns an ordered list of
+ sources.
+ NB: At present, aliases are supported on XCO objects only.
"""
def what_srcs(what_ent):
@@ -119,22 +121,37 @@
Return a list of files that depend on the specified entity
"""
ws = []
- for ent, allcand in dep_list.iteritems():
+ for ent, allcand in cfg.dep_list.iteritems():
for hdl_src, deps, core_src in allcand:
if what_ent in deps:
ws.append(hdl_src)
return ws
- def disambiguate(ent):
+ def disambiguate(ent, alias = ''):
"""
Error check, select and return the dependencies for the given entity
- according to the disambiguation rules.
+ according to the disambiguation and aliasing rules.
"""
+ #
# Check to make sure we know about unresolved entity ent
- if ent not in dep_list:
- raise Panic( """\
+ #
+ if ent not in cfg.dep_list:
+ if ent in cfg.aliases: # ent might be an alias
+ return disambiguate( cfg.aliases[ent], ent )
+ else: # If not, raise an error
+ if alias:
+ raise Panic( """\
+real entity %s of alias %s unknown.
+The following sources depend on %s:
+\t%s""" %
+ (ent,
+ alias,
+ alias,
+ '\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(alias)] ) ) )
+ else:
+ raise Panic( """\
entity %s unknown.
The following sources depend on %s:
\t%s""" %
@@ -142,17 +159,19 @@
ent,
'\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(ent)] ) ) )
+ #
# Figure out which entity to return
- if len( dep_list[ent] ) == 1:
+ #
+ if len( cfg.dep_list[ent] ) == 1:
# When there is only one candidate, there likely will be no
# disambiguation rules that would match it, so just use it.
- return dep_list[ent][0]
+ resolved_ent = cfg.dep_list[ent][0]
else:
# Otherwise, use the disambiguation rules to select a candidate.
try:
filtered_list = []
- for hdl_src, deps, core_src in dep_list[ent]:
- if disambig[ent].search( os.path.abspath( hdl_src ) ):
+ for hdl_src, deps, core_src in cfg.dep_list[ent]:
+ if cfg.disambig[ent].search( os.path.abspath( hdl_src ) ):
filtered_list.append( (hdl_src, deps, core_src) )
except KeyError:
# There is no rule for this entity, which is an error.
@@ -160,8 +179,8 @@
no rule for disambiguating entity %s with multiple candidates:
\t%s
""" % (ent,
- '\n\t'.join( [relpath(hdl_src)
- for hdl_src, deps, core_src in dep_list[ent]] )) )
+ '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src)
+ for hdl_src, deps, core_src in cfg.dep_list[ent]] )) )
# Sanity: check that there is exactly one match
if len(filtered_list) == 0:
@@ -170,25 +189,36 @@
raise Panic( """\
no candidates match specified rule for entity %s:
\t%s
-""" % (ent, '\n\t'.join( [relpath(hdl_src)
- for hdl_src, deps, core_src in dep_list[ent]] )) )
+""" % (ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src)
+ for hdl_src, deps, core_src in cfg.dep_list[ent]] )) )
elif len(filtered_list) != 1:
raise Panic( """\
Still %d candidates left after applying disambiguation rule for entity %s:
\t%s
-""" % (len(filtered_list), ent, '\n\t'.join( [relpath(hdl_src)
+""" % (len(filtered_list), ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src)
for hdl_src, deps, core_src in filtered_list] )) )
# We have exactly one candidate left
- return filtered_list[0]
-
-
+ resolved_ent = filtered_list[0]
+
+ #
+ # If the resolved entity was an alias, then do sanity checking and
+ # translate the hdl_src name appropriately.
+ # NB: the coregen handling code must still translate the entity name
+ # within the XCO project file.
+ if alias:
+ hdl_src, deps, core_src = resolved_ent
+ if not core_src:
+ raise Panic( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) )
+ return subst_basename(hdl_src, alias), deps, core_src, alias
+ else:
+ return resolved_ent + ('',)
###########################################################################
# unresolved: list of entities yet to be resolved
- unresolved = [top_ent]
+ unresolved = [cfg.top_ent]
# resolved: list of ordered resolved dependencies in (ent, hdl_src) tuples
resolved = []
@@ -196,10 +226,10 @@
unres_ent = unresolved.pop(0)
# Disambiguate as required
- hdl_src, deps, core_src = disambiguate(unres_ent)
+ hdl_src, deps, core_src, alias = disambiguate(unres_ent)
# Prepend the source file which satisfies unresolved entity ent
- resolved.insert(0, (unres_ent, hdl_src, deps, core_src))
+ resolved.insert(0, (unres_ent, hdl_src, deps, core_src, alias))
# Append entity ent's own dependencies for later consideration
unresolved += deps
Modified: trunk/bin/mkvproj.py
===================================================================
--- trunk/bin/mkvproj.py 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/mkvproj.py 2010-03-02 17:39:24 UTC (rev 88)
@@ -63,7 +63,9 @@
-v, --verbose print filenames as they are processed
-V, --version print version
-h, --help print this message
-x
+
+ -a, --alias <ie=ae> XCO entity alias
+ instantiated entity=actual entity
-d, --dir output directory
-l, --libpath dependency search paths (required)
-r, --root name of top module (required)
@@ -129,9 +131,12 @@
for ent in entities:
print "Entity %s:" % (ent)
for hdl_src, deps, core_src in cfg.dep_list[ent]:
- if cfg.relative_paths:
- hdl_src = relpath(hdl_src)
- print "\tin %s:" % (hdl_src)
+ if core_src:
+ src = relpath(core_src) if cfg.relative_paths else core_src
+ else:
+ src = relpath(hdl_src) if cfg.relative_paths else hdl_src
+
+ print "\tin %s:" % (src)
if deps:
print "\t\t%s" % '\n\t\t'.join( deps )
else:
@@ -144,8 +149,9 @@
# Parse options
#
try:
- opts, args = getopt.gnu_getopt( argv[1:], 'd:D:l:p:r:t:k:cChvV',
- ['dir=',
+ opts, args = getopt.gnu_getopt( argv[1:], 'a:d:D:l:p:r:t:k:cChvV',
+ ['alias=',
+ 'dir=',
'disambiguate=',
'libpath=',
'part=',
@@ -168,6 +174,7 @@
cfg = tool_common.Config()
for arg, val in opts:
if arg in ['-v', '--verbose']: cfg.verbose = True
+ if arg in ['-a', '--alias']: cfg.add_alias(val)
if arg in ['-d', '--dir']: cfg.output_dir = val
if arg in ['-D', '--disambiguate']: cfg.add_dar(val)
if arg in ['-l', '--libpath']: cfg.libpath += val.split(':')
@@ -223,12 +230,11 @@
raise Panic("top level entity %s unknown" % cfg.top_ent)
# Try to resolve dependencies
- cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list,
- cfg.disambig )
+ cfg.resolved_list = mkvdeps.resolve_deps( cfg )
if cfg.verbose:
print '\n'.join( [hdl_src
- for ent, hdl_src, deps, core_src in cfg.resolved_list] )
+ for ent, hdl_src, deps, core_src, alias in cfg.resolved_list] )
print '\n'.join( [constr
for constr in cfg.constraints] )
Modified: trunk/bin/tool_common.py
===================================================================
--- trunk/bin/tool_common.py 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/tool_common.py 2010-03-02 17:39:24 UTC (rev 88)
@@ -61,6 +61,7 @@
self.output_dir = '.'
self.libpath = []
self.disambig = {}
+ self.aliases = {}
self.part = '<unspecified>'
self.top_ent = ''
self.tool = 'default'
@@ -68,6 +69,23 @@
self.relative_paths = True
self.constraints = []
+ def add_alias( self, alias ):
+ """
+ Add entity alias of the form "<ie>=<ae>" where
+ ie = instantiated entity
+ ae = actual entity to substitute
+
+ NB: This only works for entities implemented by an XCO core
+ """
+
+ alias_split = alias.split('=')
+ if len(alias_split) != 2:
+ raise Panic( "Bad alias %s" % alias )
+
+ ie, ae = alias_split
+ self.aliases[ie.strip()] = ae.strip()
+
+
def add_dar( self, rule ):
"""
Add a disambiguation rule
@@ -133,9 +151,10 @@
return part, family, pkg, speed
-def copy_xco( src, dst, partspec ):
+def copy_xco( src, dst, partspec, ent_name='' ):
"""
- Copy an XCO project file, replacing the part spec info as appropriate
+ Copy an XCO project file, replacing the part spec info as
+ appropriate, and the entity name (if set)
"""
attrs = []
part, family, pkg, speed = parse_partspec( partspec )
@@ -165,6 +184,10 @@
if attr[1].upper() == 'DEVICEFAMILY': attr[3] = family
if attr[1].upper() == 'PACKAGE' : attr[3] = pkg
if attr[1].upper() == 'SPEEDGRADE' : attr[3] = speed
+
+ if attr[0].upper() == 'CSET':
+ if attr[1].upper() == 'COMPONENT_NAME' \
+ and ent_name : attr[3] = ent_name
d.write( ' '.join( attr ) + '\n' )
@@ -175,7 +198,7 @@
def write_coregen_mf(cfg, cores):
"""
- Write out a coregen makefile from the list of (hdl_src, core_src).
+ Write out a coregen makefile from the list of (hdl_src, core_src, alias).
"""
makefile = os.path.join(cfg.output_dir, COREGEN_MK)
@@ -216,13 +239,15 @@
clean:
\trm -rf %s
-""" % (' '.join( [hdl_src for hdl_src, core_src in cores] ),
+""" % (' '.join( [hdl_src for hdl_src, core_src, alias in cores] ),
xco_tmp_dir) )
- for hdl_src, core_src in cores:
+ for hdl_src, core_src, alias in cores:
# Copy coregen project file
out_xco = os.path.join( xco_tmp_dir, os.path.split( core_src )[1] )
- copy_xco( core_src, out_xco, cfg.part )
+ if alias:
+ out_xco = subst_basename( out_xco, alias )
+ copy_xco( core_src, out_xco, cfg.part, alias )
# Write out coregen invocation rules
mf.write( """\
Modified: trunk/bin/tool_sim_modelsim.py
===================================================================
--- trunk/bin/tool_sim_modelsim.py 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/tool_sim_modelsim.py 2010-03-02 17:39:24 UTC (rev 88)
@@ -88,7 +88,7 @@
# Write out default target
mf.write( "all: %s\n\n" % (msim_lib(cfg.top_ent)) )
- for ent, hdl_src, deps, core_src in cfg.resolved_list:
+ for ent, hdl_src, deps, core_src, alias in cfg.resolved_list:
rel_hdl_src = tool_common.rel_src( cfg, hdl_src )
# Write rules to express this source's dependencies, if any
@@ -109,7 +109,7 @@
# If it's a core, add it to the cores list
if core_src:
- cores.append( (hdl_src, core_src) )
+ cores.append( (hdl_src, core_src, alias) )
# Include core rules, if any
if cores:
Modified: trunk/bin/tool_synth_synplify.py
===================================================================
--- trunk/bin/tool_synth_synplify.py 2010-03-02 17:20:17 UTC (rev 87)
+++ trunk/bin/tool_synth_synplify.py 2010-03-02 17:39:24 UTC (rev 88)
@@ -64,14 +64,14 @@
# Generate sources and cores list
srcs = []
cores = []
- for ent, hdl_src, deps, core_src in cfg.resolved_list:
+ for ent, hdl_src, deps, core_src, alias in cfg.resolved_list:
srcs.append( 'add_file %s "%s"' % (
lang_flag(os.path.splitext(hdl_src)[1]),
tool_common.rel_src(cfg, hdl_src) ) )
# If it's a core, add it to the cores list
if core_src:
- cores.append( (hdl_src, core_src) )
+ cores.append( (hdl_src, core_src, alias) )
# Unpack partspec
part, family, pkg, speed = tool_common.parse_partspec( cfg.part )
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <dav...@us...> - 2010-05-13 16:11:19
|
Revision: 95
http://fbt.svn.sourceforge.net/fbt/?rev=95&view=rev
Author: dave_infj
Date: 2010-05-13 16:11:12 +0000 (Thu, 13 May 2010)
Log Message:
-----------
Get rid of Panic() class, replace with 'exit()'
Modified Paths:
--------------
trunk/bin/DepList.py
trunk/bin/mkvdeps.py
trunk/bin/mkvproj.py
trunk/bin/parse_coregen.py
trunk/bin/parse_verilog.py
trunk/bin/parse_vhdl.py
trunk/bin/synplify_wrapper.py
trunk/bin/tool_common.py
trunk/bin/tool_synth_synplify.py
trunk/bin/tool_synth_xst.py
trunk/bin/util.py
Modified: trunk/bin/DepList.py
===================================================================
--- trunk/bin/DepList.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/DepList.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -118,7 +118,7 @@
# Parse components
match = self.m_deps.search(dep_line)
if not match:
- raise Panic("%s:%d: invalid dependency line" %
+ exit("%s:%d: invalid dependency line" %
( relpath(os.path.join(path,
tool_common.DEPS_FILE)),
(lno+1) )
Modified: trunk/bin/mkvdeps.py
===================================================================
--- trunk/bin/mkvdeps.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/mkvdeps.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -35,6 +35,7 @@
from util import *
import sys
+import types
import getopt
import re
import os
@@ -142,7 +143,7 @@
return disambiguate( cfg.aliases[ent], ent )
else: # If not, raise an error
if alias:
- raise Panic( """\
+ exit( """\
real entity %s of alias %s unknown.
The following sources depend on %s:
\t%s""" %
@@ -151,7 +152,7 @@
alias,
'\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(alias)] ) ) )
else:
- raise Panic( """\
+ exit( """\
entity %s unknown.
The following sources depend on %s:
\t%s""" %
@@ -175,7 +176,7 @@
filtered_list.append( (hdl_src, deps, core_src) )
except KeyError:
# There is no rule for this entity, which is an error.
- raise Panic( """\
+ exit( """\
no rule for disambiguating entity %s with multiple candidates:
\t%s
""" % (ent,
@@ -186,14 +187,14 @@
if len(filtered_list) == 0:
# Nothing matches, the ambiguity is unresolved, which is
# an error.
- raise Panic( """\
+ exit( """\
no candidates match specified rule for entity %s:
\t%s
""" % (ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src)
for hdl_src, deps, core_src in cfg.dep_list[ent]] )) )
elif len(filtered_list) != 1:
- raise Panic( """\
+ exit( """\
Still %d candidates left after applying disambiguation rule for entity %s:
\t%s
""" % (len(filtered_list), ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src)
@@ -210,7 +211,7 @@
if alias:
hdl_src, deps, core_src = resolved_ent
if not core_src:
- raise Panic( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) )
+ exit( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) )
return subst_basename(hdl_src, alias), deps, core_src, alias
else:
return resolved_ent + ('',)
@@ -271,7 +272,7 @@
$Id$
""" % (prog_name())
- sys.exit(0)
+ exit()
def main(argv):
@@ -285,7 +286,7 @@
'help',
'version'] )
except getopt.GetoptError, e:
- raise Panic( e )
+ exit( e )
for arg, val in opts:
if arg in ['-v', '--verbose']:
@@ -296,7 +297,7 @@
if arg in ['-V', '--version']:
print '$Id$'
- sys.exit(0)
+ exit()
dirs = args
if not dirs:
@@ -309,7 +310,10 @@
if __name__ == '__main__':
try:
main(sys.argv)
- except Panic, e:
- sys.stderr.write( '%s\n' % e )
- sys.exit( 1 )
-
+ except SystemExit, e:
+ if e.code:
+ if type(e.code) is types.IntType:
+ exit(e.code)
+ else:
+ sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) )
+ exit(1)
Modified: trunk/bin/mkvproj.py
===================================================================
--- trunk/bin/mkvproj.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/mkvproj.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -34,6 +34,7 @@
from util import *
import sys
+import types
import os
import re
import getopt
@@ -97,7 +98,7 @@
$Id$
""" % (prog_name())
- sys.exit(0)
+ exit()
def load_dep_db( cfg ):
@@ -166,7 +167,7 @@
'version',
'dumpdeps'] )
except getopt.GetoptError, e:
- raise Panic( e )
+ exit(e)
# Temporary flag
do_dump = False
@@ -189,14 +190,14 @@
if arg in ['--dumpdeps']: do_dump = True
if arg in ['-V', '--version']:
sys.stderr.write( '$Id$\n' )
- sys.exit(0)
+ exit()
# Sanity checks
if not cfg.libpath:
- raise Panic("no library paths specified (-l)")
+ exit("no library paths specified (-l)")
if not os.path.isdir(cfg.output_dir):
- raise Panic("output directory %s doesn't exist" % (cfg.output_dir) )
+ exit("output directory %s doesn't exist" % (cfg.output_dir) )
#
# Do job
@@ -217,7 +218,7 @@
cfg.oper = args.pop(0)
if not cfg.top_ent:
- raise Panic("no top level module specified (-r)")
+ exit("no top level module specified (-r)")
if cfg.part == '<unspecified>':
sys.stderr.write( "%s: warning: no partspec specified (-p)\n" % prog_name() )
@@ -227,7 +228,7 @@
# Check to make sure top level is known about
if cfg.top_ent not in cfg.dep_list:
- raise Panic("top level entity %s unknown" % cfg.top_ent)
+ exit("top level entity %s unknown" % cfg.top_ent)
# Try to resolve dependencies
cfg.resolved_list = mkvdeps.resolve_deps( cfg )
@@ -242,7 +243,7 @@
try:
exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) )
except ImportError:
- raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) )
+ exit( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) )
tool.write_project( cfg )
@@ -250,6 +251,10 @@
if __name__ == '__main__':
try:
main(sys.argv)
- except Panic, e:
- sys.stderr.write( '%s\n' % e )
- sys.exit( 1 )
+ except SystemExit, e:
+ if e.code:
+ if type(e.code) is types.IntType:
+ exit(e.code)
+ else:
+ sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) )
+ exit(1)
Modified: trunk/bin/parse_coregen.py
===================================================================
--- trunk/bin/parse_coregen.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/parse_coregen.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -70,10 +70,10 @@
use_vhdl = bool(attribs['vhdlsim'])
use_verilog = bool(attribs['verilogsim'])
except KeyError, e:
- raise Panic( '%s: missing parameter %s' % (hdl_src, e) )
+ exit('%s: missing parameter %s' % (hdl_src, e))
if not use_vhdl ^ use_verilog:
- raise Panic( "%s: exactly one simulation (VHDL or verilog) is required" % relpath(hdl_src) )
+ exit("%s: exactly one simulation (VHDL or verilog) is required" % relpath(hdl_src))
# The simulation source file will be the entity name with the appropriate
# extension attached
Modified: trunk/bin/parse_verilog.py
===================================================================
--- trunk/bin/parse_verilog.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/parse_verilog.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -53,7 +53,7 @@
try:
results = parser.parseFile( hdl_src, parseAll = True )
except pyparsing.ParseException, e:
- raise Panic("""\
+ exit("""\
%%s:%d: error: Parse exception:
%s
@@ -74,7 +74,7 @@
for mod_parse_data in results:
ent = mod_parse_data[1]
if ent in dep_list:
- raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % (
+ exit( "duplicate entity %s declaration found in %s (previous in %s)" % (
ent, relpath(hdl_src), relpath(dep_list[ent][0])
)
)
@@ -91,7 +91,7 @@
global _src
- raise Panic("""\
+ exit("""\
%s:%d: error: unexpected syntax:
%s
@@ -139,7 +139,7 @@
end_kw = CaselessKeyword('end')
func_begin = CaselessKeyword('function')
func_end = CaselessKeyword('endfunction')
- module_kw = CaselessKeyword( 'module' ) | CaselessKeyword( 'primitive' )
+ module_kw = CaselessKeyword('module' ) | CaselessKeyword('primitive')
endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive')
# () groups (with recursion)
@@ -188,6 +188,7 @@
statement.suppress() # Compound and simple statements
)
mod_footer = endmodule_kw # End module keyword
+
syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err )
module = Group( (mod_header | syntax_err) +
mod_body +
Modified: trunk/bin/parse_vhdl.py
===================================================================
--- trunk/bin/parse_vhdl.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/parse_vhdl.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -84,7 +84,7 @@
# Find out what we've matched and handle appropriately
if match.group('ent'):
if ent:
- raise Panic("""\
+ exit("""\
%s: unexpected entity %s found when processing entity %s. missing package imports?""" % (relpath(self.hdl_src),
e,
self.ent) )
@@ -93,7 +93,7 @@
ent = match.group('ent')
if ent in dep_list:
- raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) )
+ exit( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) )
# If it's a package import decl, then lib and pkg will be
# defined. Only add a package if the library is 'work'.
Modified: trunk/bin/synplify_wrapper.py
===================================================================
--- trunk/bin/synplify_wrapper.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/synplify_wrapper.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -33,6 +33,7 @@
from util import *
import sys
+import types
import os
import subprocess
import time
@@ -133,7 +134,7 @@
while not os.path.isfile( synth_input_log ):
time.sleep( 0.5 )
if syn_proc.poll() is not None:
- raise Panic( "synthesis log does not exist. %s may not have run properly." %
+ exit('synthesis log does not exist. %s may not have run properly.'%
(exe) )
# Parse synthesis log file
@@ -260,7 +261,7 @@
$Id$
""" % (prog_name(), DEF_SYN_EXE)
- sys.exit(0)
+ exit()
# main
@@ -278,7 +279,7 @@
'help',
'version'] )
except getopt.GetoptError, e:
- raise Panic( e )
+ exit(e)
for arg, val in opts:
if arg in ['-v', '--verbose']:
@@ -289,7 +290,7 @@
if arg in ['-V', '--version']:
print '$Id$'
- sys.exit(0)
+ exit()
if arg in ['-e', '--executable']:
exe = val
@@ -305,7 +306,10 @@
if __name__ == '__main__':
try:
main(sys.argv)
- except Panic, e:
- sys.stderr.write( '%s\n' % e )
- sys.exit( 1 )
-
+ except SystemExit, e:
+ if e.code:
+ if type(e.code) is types.IntType:
+ exit(e.code)
+ else:
+ sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) )
+ exit(1)
Modified: trunk/bin/tool_common.py
===================================================================
--- trunk/bin/tool_common.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/tool_common.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -80,7 +80,7 @@
alias_split = alias.split('=')
if len(alias_split) != 2:
- raise Panic( "Bad alias %s" % alias )
+ exit('bad alias %s' % alias)
ie, ae = alias_split
self.aliases[ie.strip()] = ae.strip()
@@ -97,7 +97,7 @@
ent, regex = None, None
if not ent or regex[-1] != '/':
- raise Panic( "Bad disambiguation rule %s" % (rule) )
+ exit('bad disambiguation rule %s' % (rule))
# Trim trailing /
regex = regex[:-1]
@@ -105,7 +105,7 @@
try:
self.disambig[ent] = re.compile( regex )
except re.error:
- raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex))
+ exit('bad regex in disambiguation rule %s: %s' % (d, regex))
@@ -119,7 +119,7 @@
try:
return relpath(hdl_src, cfg.output_dir)
except OSError:
- raise Panic( "missing source file %s. Is dependency cache out of date?" % (x[1]) )
+ exit('missing source file %s. Is dependency cache out of date?' % (x[1]))
else:
return hdl_src
@@ -132,7 +132,7 @@
match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps.upper() )
if not match:
- raise Panic( "unknown partspec %s" % (ps) )
+ exit('unknown partspec %s' % (ps))
part, family, pkg, speed = match.groups()
@@ -146,7 +146,7 @@
if family[0].isdigit():
family = 'VIRTEX'+family[0]
else:
- raise Panic( 'unknown family %s in partspec' % k )
+ exit('unknown family %s in partspec' % k)
return part, family, pkg, speed
Modified: trunk/bin/tool_synth_synplify.py
===================================================================
--- trunk/bin/tool_synth_synplify.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/tool_synth_synplify.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -49,7 +49,7 @@
'.vhdl':'-vhdl -lib work',
'.v' :'-verilog'}[k]
except:
- raise Panic( 'unknown HDL source extension %s' % k )
+ exit( 'unknown HDL source extension %s' % k )
def write_project(cfg):
Modified: trunk/bin/tool_synth_xst.py
===================================================================
--- trunk/bin/tool_synth_xst.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/tool_synth_xst.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -49,7 +49,7 @@
'.vhdl':'vhdl work',
'.v' :'verilog work'}[k]
except:
- raise Panic( 'unknown HDL source extension %s' % k )
+ exit( 'unknown HDL source extension %s' % k )
def write_project(cfg):
Modified: trunk/bin/util.py
===================================================================
--- trunk/bin/util.py 2010-04-17 18:43:23 UTC (rev 94)
+++ trunk/bin/util.py 2010-05-13 16:11:12 UTC (rev 95)
@@ -39,18 +39,6 @@
return os.path.basename(sys.argv[0])
-class Panic(Exception):
- """
- For equivalent of perl die "message"
- """
-
- def __init__(self, value):
- self.value = value
-
- def __str__(self):
- return "%s: panic: %s" % (prog_name(), self.value)
-
-
def subst_basename(pathspec, new_base):
"""
Substitute the basename component of pathspec with new_base
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|