fbt-commit Mailing List for FPGA Build Tool
Status: Beta
Brought to you by:
dave_infj
You can subscribe to this list here.
2008 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
(8) |
---|---|---|---|---|---|---|---|---|---|---|---|---|
2009 |
Jan
(1) |
Feb
(3) |
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2010 |
Jan
(5) |
Feb
|
Mar
(6) |
Apr
(6) |
May
(6) |
Jun
(2) |
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
2011 |
Jan
|
Feb
|
Mar
|
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
(1) |
2012 |
Jan
|
Feb
|
Mar
(1) |
Apr
|
May
|
Jun
|
Jul
|
Aug
|
Sep
|
Oct
|
Nov
|
Dec
|
From: <dav...@us...> - 2012-03-19 16:09:19
|
Revision: 104 http://fbt.svn.sourceforge.net/fbt/?rev=104&view=rev Author: dave_infj Date: 2012-03-19 16:09:09 +0000 (Mon, 19 Mar 2012) Log Message: ----------- initial commit; make simulatable, synthesizable blackbox vhdl Added Paths: ----------- trunk/bin/blackboxify_vhdl.py Added: trunk/bin/blackboxify_vhdl.py =================================================================== --- trunk/bin/blackboxify_vhdl.py (rev 0) +++ trunk/bin/blackboxify_vhdl.py 2012-03-19 16:09:09 UTC (rev 104) @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +################################################################################ +# +# FPGA Build Tool +# Copyright (C) 2008 David Miller +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# MODULE: +# +# blackboxify_vhdl.py +# +# PURPOSE: +# +# Turn simulation VHDL netlist into a synthesizable blackbox (ie, enclose +# simulation-only objects in translate_{off,on} pragmas.) +# +# $Id$ + +from __future__ import with_statement +from util import * + +import os +import re +import sys +import types + +translate = re.compile( '^\s*--.*translate_(on|off)', re.I ) +architecture = re.compile( '^\s*architecture.*is' , re.I ) +begin = re.compile( '^\s*begin.*' , re.I ) +end = re.compile( '^\s*end.*;' , re.I ) + +SYNTH_OFF = '-- synthesis translate_off\n' +SYNTH_ON = '-- synthesis translate_on\n' + + +def blackboxify( file ): + """ + Make a simulation netlist a synthesizable blackbox as well as simulatable. + """ + + print file + + # Load file, stripping out any translate pragmas + with open(file) as f: + text = [line for line in f.readlines() if not translate.search(line)] + + # enclose signal declarations in translate pragmas + for i in xrange( len(text) ): + if architecture.search( text[i] ): + text.insert( i+1, SYNTH_OFF ) + break + else: + print '%s: failed to find ARCHITECTURE declaration' % file + exit(1) + + for i in xrange( i+2, len(text) ): + if begin.search( text[i] ): + text.insert( i+1, SYNTH_OFF ) + text.insert( i , SYNTH_ON ) + break + else: + print '%s: failed to find BEGIN declaration' % file + exit(1) + + # add translate_on before final END + for i in xrange( len(text)-1, i+3, -1 ): + if end.search( text[i] ): + text.insert( i, SYNTH_ON ) + break + else: + print '%s: failed to find final END declaration' % file + exit(1) + + with open( file+'x', 'w' ) as f: + f.writelines( text ) + + +def main( sources ): + if len( sources ) == 0: + print "usage: blackboxify.py <source> [...]" + exit(0) + + for source in sources: + blackboxify( source ) + +if __name__ == '__main__': + try: + main(sys.argv[1:]) + except SystemExit, e: + if e.code: + if type(e.code) is types.IntType: + exit(e.code) + else: + sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) ) + exit(1) Property changes on: trunk/bin/blackboxify_vhdl.py ___________________________________________________________________ Added: svn:executable + * Added: svn:keywords + Author Date Id Revision Added: svn:eol-style + native This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2011-12-05 19:41:24
|
Revision: 103 http://fbt.svn.sourceforge.net/fbt/?rev=103&view=rev Author: dave_infj Date: 2011-12-05 19:41:18 +0000 (Mon, 05 Dec 2011) Log Message: ----------- initial commit; coregen IP catalogue generator Added Paths: ----------- trunk/bin/mkipcat.py Added: trunk/bin/mkipcat.py =================================================================== --- trunk/bin/mkipcat.py (rev 0) +++ trunk/bin/mkipcat.py 2011-12-05 19:41:18 UTC (rev 103) @@ -0,0 +1,100 @@ +#!/usr/bin/env python + +################################################################################ +# +# FPGA Build Tool +# Copyright (C) 2008 David Miller +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# MODULE: +# +# mkipcat.py +# +# PURPOSE: +# +# Generate catalogue of all available IP in coregen for $XILINX. The catalogue +# is used by the coregen writer in tool_common.py (via mkvproj.py) to rewrite +# IP version if required. +# +# $Id$ + +from __future__ import with_statement + +import os +import platform +import subprocess +import sys + +CAT_NAME = 'fbt_ip_catalogue.csv' + +def scan_ip( base, parent, ver_len ): + """ + Scans directories under `base` looking for directories containing IP, which + are identified by parent directory named `parent`, and returns a list of + (name, version) where version is the last `ver_len` elements of the + directory name. + """ + ip = [] + for root, dirs, files in os.walk( base ): + if os.path.basename(root) == parent: + for dir in dirs: + elts = dir.rsplit( '_', ver_len ) + if len(elts) != ver_len + 1 or elts[-ver_len][0] != 'v': + continue + elts[-ver_len] = elts[-ver_len][1:] # trim leading 'v' + ip.append( (elts[0], '.'.join(elts[-ver_len:])) ) + return ip + + +def main(): + if len(sys.argv) > 1: + xilinx = sys.argv[1] + else: + if 'XILINX' not in os.environ: + print '$XILINX not set' + sys.exit(1) + else: + xilinx = os.environ['XILINX'] + + if platform.system().lower().find('cygwin') != -1: + # On cygwin - must convert $XILINX to cygwin path + xilinx = subprocess.Popen( ['cygpath', xilinx], stdout=subprocess.PIPE ).communicate()[0].strip() + + cg_root = os.path.join( xilinx, 'coregen' ) + catalogue = os.path.join( xilinx, CAT_NAME ) + # Sanity check + for dir in [xilinx, cg_root]: + if not os.path.isdir( dir ): + print '%s not found' % dir + sys.exit(1) + + print """\ +Xilinx installation at %s +Coregen at %s +IP catalogue writen to %s +""" % (xilinx, cg_root, catalogue) + + ip = scan_ip( os.path.join( cg_root, 'ip' ), 'ip', 2 ) + \ + scan_ip( os.path.join( cg_root, 'iprepo' ), 'pcores', 3 ) + + with open( catalogue, 'w' ) as f: + f.writelines( ','.join(d)+'\n' for d in ip ) + + print 'Total cores: %d' % len(ip) + + +if __name__ == '__main__': + main() Property changes on: trunk/bin/mkipcat.py ___________________________________________________________________ Added: svn:executable + * Added: svn:keywords + Author Date Id Revision Added: svn:eol-style + native This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-06-08 13:41:41
|
Revision: 102 http://fbt.svn.sourceforge.net/fbt/?rev=102&view=rev Author: dave_infj Date: 2010-06-08 13:41:35 +0000 (Tue, 08 Jun 2010) Log Message: ----------- hackish way to automate the generation of comp_filter.py Added Paths: ----------- trunk/bin/mk_comp_filter.py Added: trunk/bin/mk_comp_filter.py =================================================================== --- trunk/bin/mk_comp_filter.py (rev 0) +++ trunk/bin/mk_comp_filter.py 2010-06-08 13:41:35 UTC (rev 102) @@ -0,0 +1,62 @@ +#!/usr/bin/python + +################################################################################ +# +# FPGA Build Tool +# Copyright (C) 2008 David Miller +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# MODULE: +# +# mk_comp_filter.py +# +# PURPOSE: +# +# Generate the fragment of 'comp_filter.py' which contains the list of entities +# to ignore, from the symbols listed in the given file (to stdout) +# +# $Id$ + +from __future__ import with_statement +import sys + +if len(sys.argv) != 2: + print 'syntax: mk_comp_filter.py <source_list.txt>' + exit() + +setline = 'UNISIMS = frozenset( [' +WIDTH = 80 + +sys.stdout.write( setline ) +wid = len(setline) +try: + with open( sys.argv[1] ) as f: + for ent in f: + ent = "'%s', " % ent.strip().lower() + + if wid + len(ent) > WIDTH: + sys.stdout.write( '\n' + ' '*len(setline) ) + wid = len(setline) + + sys.stdout.write( ent ) + wid += len(ent) + + sys.stdout.write( '\n' + ' '*len(setline) + '] )\n' ) +except IOError, e: + sys.stderr.write( '\n\nIOError: %s\n' % e ) + exit(1) + + Property changes on: trunk/bin/mk_comp_filter.py ___________________________________________________________________ Added: svn:executable + * Added: svn:keywords + Author Date Id Revision Added: svn:eol-style + native This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-06-08 13:40:47
|
Revision: 101 http://fbt.svn.sourceforge.net/fbt/?rev=101&view=rev Author: dave_infj Date: 2010-06-08 13:40:39 +0000 (Tue, 08 Jun 2010) Log Message: ----------- update against unisims from ISE 11.4 Modified Paths: -------------- trunk/bin/comp_filter.py Modified: trunk/bin/comp_filter.py =================================================================== --- trunk/bin/comp_filter.py 2010-05-28 15:03:58 UTC (rev 100) +++ trunk/bin/comp_filter.py 2010-06-08 13:40:39 UTC (rev 101) @@ -44,377 +44,371 @@ # Xilinx unified primative library -UNISIMS = frozenset( [ 'and2', 'and2b1', 'and2b2', 'and3', 'and3b1', 'and3b2', - 'and3b3', 'and4', 'and4b1', 'and4b2', 'and4b3', - 'and4b4', 'and5', 'and5b1', 'and5b2', 'and5b3', - 'and5b4', 'and5b5', 'and6', 'and7', 'and8', - 'bscan_fpgacore', 'bscan_spartan2', 'bscan_spartan3', - 'bscan_spartan3a', 'bscan_virtex', 'bscan_virtex2', - 'bscan_virtex4', 'bscan_virtex5', 'buf', 'bufcf', - 'bufe', 'buffoe', 'bufg', 'bufgce', 'bufgce_1', - 'bufgctrl', 'bufgdll', 'bufgmux', 'bufgmux_1', - 'bufgmux_ctrl', 'bufgmux_virtex4', 'bufgp', 'bufgsr', - 'bufgts', 'bufio', 'bufr', 'buft', 'capture_fpgacore', - 'capture_spartan2', 'capture_spartan3', - 'capture_spartan3a', 'capture_virtex', - 'capture_virtex2', 'capture_virtex4', 'capture_virtex5', - 'carry4', 'cfglut5', 'clk_div10', 'clk_div10r', - 'clk_div10rsd', 'clk_div10sd', 'clk_div12', - 'clk_div12r', 'clk_div12rsd', 'clk_div12sd', - 'clk_div14', 'clk_div14r', 'clk_div14rsd', - 'clk_div14sd', 'clk_div16', 'clk_div16r', - 'clk_div16rsd', 'clk_div16sd', 'clk_div2', 'clk_div2r', - 'clk_div2rsd', 'clk_div2sd', 'clk_div4', 'clk_div4r', - 'clk_div4rsd', 'clk_div4sd', 'clk_div6', 'clk_div6r', - 'clk_div6rsd', 'clk_div6sd', 'clk_div8', 'clk_div8r', - 'clk_div8rsd', 'clk_div8sd', 'clkdll', 'clkdlle', - 'clkdllhf', 'crc32', 'crc64', 'dcc_fpgacore', #'config', - 'dcireset', 'dcm', 'dcm_adv', 'dcm_base', 'dcm_ps', - 'dcm_sp', 'dna_port', 'dsp48', 'dsp48a', 'dsp48e', - 'emac', 'fd', 'fd_1', 'fdc', 'fdc_1', 'fdce', 'fdce_1', - 'fdcp', 'fdcp_1', 'fdcpe', 'fdcpe_1', 'fdcpx1', 'fdd', - 'fddc', 'fddce', 'fddcp', 'fddcpe', 'fddp', 'fddpe', - 'fddrcpe', 'fddrrse', 'fde', 'fde_1', 'fdp', 'fdp_1', - 'fdpe', 'fdpe_1', 'fdr', 'fdr_1', 'fdre', 'fdre_1', - 'fdrs', 'fdrs_1', 'fdrse', 'fdrse_1', 'fds', 'fds_1', - 'fdse', 'fdse_1', 'fifo16', 'fifo18', 'fifo18_36', - 'fifo36', 'fifo36_72', 'fifo36_72_exp', 'fifo36_exp', - 'fmap', 'frame_ecc_virtex4', 'frame_ecc_virtex5', 'ftc', - 'ftcp', 'ftp', 'gnd', 'gt', 'gt10', 'gt10_10ge_4', - 'gt10_10ge_8', 'gt10_10gfc_4', 'gt10_10gfc_8', - 'gt10_aurora_1', 'gt10_aurora_2', 'gt10_aurora_4', - 'gt10_aurorax_4', 'gt10_aurorax_8', 'gt10_custom', - 'gt10_infiniband_1', 'gt10_infiniband_2', - 'gt10_infiniband_4', 'gt10_oc192_4', 'gt10_oc192_8', - 'gt10_oc48_1', 'gt10_oc48_2', 'gt10_oc48_4', - 'gt10_pci_express_1', 'gt10_pci_express_2', - 'gt10_pci_express_4', 'gt10_xaui_1', 'gt10_xaui_2', - 'gt10_xaui_4', 'gt11', 'gt11_custom', 'gt11_dual', - 'gt11clk', 'gt11clk_mgt', 'gt_aurora_1', 'gt_aurora_2', - 'gt_aurora_4', 'gt_custom', 'gt_ethernet_1', - 'gt_ethernet_2', 'gt_ethernet_4', 'gt_fibre_chan_1', - 'gt_fibre_chan_2', 'gt_fibre_chan_4', 'gt_infiniband_1', - 'gt_infiniband_2', 'gt_infiniband_4', 'gt_xaui_1', - 'gt_xaui_2', 'gt_xaui_4', 'gtp_dual', 'ibuf', - 'ibuf_agp', 'ibuf_ctt', 'ibuf_dly_adj', 'ibuf_gtl', - 'ibuf_gtl_dci', 'ibuf_gtlp', 'ibuf_gtlp_dci', - 'ibuf_hstl_i', 'ibuf_hstl_i_18', 'ibuf_hstl_i_dci', - 'ibuf_hstl_i_dci_18', 'ibuf_hstl_ii', 'ibuf_hstl_ii_18', - 'ibuf_hstl_ii_dci', 'ibuf_hstl_ii_dci_18', - 'ibuf_hstl_iii', 'ibuf_hstl_iii_18', - 'ibuf_hstl_iii_dci', 'ibuf_hstl_iii_dci_18', - 'ibuf_hstl_iv', 'ibuf_hstl_iv_18', 'ibuf_hstl_iv_dci', - 'ibuf_hstl_iv_dci_18', 'ibuf_lvcmos12', 'ibuf_lvcmos15', - 'ibuf_lvcmos18', 'ibuf_lvcmos2', 'ibuf_lvcmos25', - 'ibuf_lvcmos33', 'ibuf_lvdci_15', 'ibuf_lvdci_18', - 'ibuf_lvdci_25', 'ibuf_lvdci_33', 'ibuf_lvdci_dv2_15', - 'ibuf_lvdci_dv2_18', 'ibuf_lvdci_dv2_25', - 'ibuf_lvdci_dv2_33', 'ibuf_lvds', 'ibuf_lvpecl', - 'ibuf_lvttl', 'ibuf_pci33_3', 'ibuf_pci33_5', - 'ibuf_pci66_3', 'ibuf_pcix', 'ibuf_pcix66_3', - 'ibuf_sstl18_i', 'ibuf_sstl18_i_dci', 'ibuf_sstl18_ii', - 'ibuf_sstl18_ii_dci', 'ibuf_sstl2_i', - 'ibuf_sstl2_i_dci', 'ibuf_sstl2_ii', - 'ibuf_sstl2_ii_dci', 'ibuf_sstl3_i', 'ibuf_sstl3_i_dci', - 'ibuf_sstl3_ii', 'ibuf_sstl3_ii_dci', 'ibufds', - 'ibufds_blvds_25', 'ibufds_diff_out', 'ibufds_dly_adj', - 'ibufds_ldt_25', 'ibufds_lvds_25', 'ibufds_lvds_25_dci', - 'ibufds_lvds_33', 'ibufds_lvds_33_dci', - 'ibufds_lvdsext_25', 'ibufds_lvdsext_25_dci', - 'ibufds_lvdsext_33', 'ibufds_lvdsext_33_dci', - 'ibufds_lvpecl_25', 'ibufds_lvpecl_33', - 'ibufds_ulvds_25', 'ibufg', 'ibufg_agp', 'ibufg_ctt', - 'ibufg_gtl', 'ibufg_gtl_dci', 'ibufg_gtlp', - 'ibufg_gtlp_dci', 'ibufg_hstl_i', 'ibufg_hstl_i_18', - 'ibufg_hstl_i_dci', 'ibufg_hstl_i_dci_18', - 'ibufg_hstl_ii', 'ibufg_hstl_ii_18', - 'ibufg_hstl_ii_dci', 'ibufg_hstl_ii_dci_18', - 'ibufg_hstl_iii', 'ibufg_hstl_iii_18', - 'ibufg_hstl_iii_dci', 'ibufg_hstl_iii_dci_18', - 'ibufg_hstl_iv', 'ibufg_hstl_iv_18', - 'ibufg_hstl_iv_dci', 'ibufg_hstl_iv_dci_18', - 'ibufg_lvcmos12', 'ibufg_lvcmos15', 'ibufg_lvcmos18', - 'ibufg_lvcmos2', 'ibufg_lvcmos25', 'ibufg_lvcmos33', - 'ibufg_lvdci_15', 'ibufg_lvdci_18', 'ibufg_lvdci_25', - 'ibufg_lvdci_33', 'ibufg_lvdci_dv2_15', - 'ibufg_lvdci_dv2_18', 'ibufg_lvdci_dv2_25', - 'ibufg_lvdci_dv2_33', 'ibufg_lvds', 'ibufg_lvpecl', - 'ibufg_lvttl', 'ibufg_pci33_3', 'ibufg_pci33_5', - 'ibufg_pci66_3', 'ibufg_pcix', 'ibufg_pcix66_3', - 'ibufg_sstl18_i', 'ibufg_sstl18_i_dci', - 'ibufg_sstl18_ii', 'ibufg_sstl18_ii_dci', - 'ibufg_sstl2_i', 'ibufg_sstl2_i_dci', 'ibufg_sstl2_ii', - 'ibufg_sstl2_ii_dci', 'ibufg_sstl3_i', - 'ibufg_sstl3_i_dci', 'ibufg_sstl3_ii', - 'ibufg_sstl3_ii_dci', 'ibufgds', 'ibufgds_blvds_25', - 'ibufgds_diff_out', 'ibufgds_ldt_25', 'ibufgds_lvds_25', - 'ibufgds_lvds_25_dci', 'ibufgds_lvds_33', - 'ibufgds_lvds_33_dci', 'ibufgds_lvdsext_25', - 'ibufgds_lvdsext_25_dci', 'ibufgds_lvdsext_33', - 'ibufgds_lvdsext_33_dci', 'ibufgds_lvpecl_25', - 'ibufgds_lvpecl_33', 'ibufgds_ulvds_25', - 'icap_spartan3a', 'icap_virtex2', 'icap_virtex4', - 'icap_virtex5', 'iddr', 'iddr2', 'iddr_2clk', 'idelay', - 'idelayctrl', 'ifddrcpe', 'ifddrrse', 'ild', 'inv', - 'iobuf', 'iobuf_agp', 'iobuf_ctt', 'iobuf_f_12', - 'iobuf_f_16', 'iobuf_f_2', 'iobuf_f_24', 'iobuf_f_4', - 'iobuf_f_6', 'iobuf_f_8', 'iobuf_gtl', 'iobuf_gtl_dci', - 'iobuf_gtlp', 'iobuf_gtlp_dci', 'iobuf_hstl_i', - 'iobuf_hstl_i_18', 'iobuf_hstl_ii', 'iobuf_hstl_ii_18', - 'iobuf_hstl_ii_dci', 'iobuf_hstl_ii_dci_18', - 'iobuf_hstl_iii', 'iobuf_hstl_iii_18', 'iobuf_hstl_iv', - 'iobuf_hstl_iv_18', 'iobuf_hstl_iv_dci', - 'iobuf_hstl_iv_dci_18', 'iobuf_lvcmos12', - 'iobuf_lvcmos12_f_2', 'iobuf_lvcmos12_f_4', - 'iobuf_lvcmos12_f_6', 'iobuf_lvcmos12_f_8', - 'iobuf_lvcmos12_s_2', 'iobuf_lvcmos12_s_4', - 'iobuf_lvcmos12_s_6', 'iobuf_lvcmos12_s_8', - 'iobuf_lvcmos15', 'iobuf_lvcmos15_f_12', - 'iobuf_lvcmos15_f_16', 'iobuf_lvcmos15_f_2', - 'iobuf_lvcmos15_f_4', 'iobuf_lvcmos15_f_6', - 'iobuf_lvcmos15_f_8', 'iobuf_lvcmos15_s_12', - 'iobuf_lvcmos15_s_16', 'iobuf_lvcmos15_s_2', - 'iobuf_lvcmos15_s_4', 'iobuf_lvcmos15_s_6', - 'iobuf_lvcmos15_s_8', 'iobuf_lvcmos18', - 'iobuf_lvcmos18_f_12', 'iobuf_lvcmos18_f_16', - 'iobuf_lvcmos18_f_2', 'iobuf_lvcmos18_f_4', - 'iobuf_lvcmos18_f_6', 'iobuf_lvcmos18_f_8', - 'iobuf_lvcmos18_s_12', 'iobuf_lvcmos18_s_16', - 'iobuf_lvcmos18_s_2', 'iobuf_lvcmos18_s_4', - 'iobuf_lvcmos18_s_6', 'iobuf_lvcmos18_s_8', - 'iobuf_lvcmos2', 'iobuf_lvcmos25', - 'iobuf_lvcmos25_f_12', 'iobuf_lvcmos25_f_16', - 'iobuf_lvcmos25_f_2', 'iobuf_lvcmos25_f_24', - 'iobuf_lvcmos25_f_4', 'iobuf_lvcmos25_f_6', - 'iobuf_lvcmos25_f_8', 'iobuf_lvcmos25_s_12', - 'iobuf_lvcmos25_s_16', 'iobuf_lvcmos25_s_2', - 'iobuf_lvcmos25_s_24', 'iobuf_lvcmos25_s_4', - 'iobuf_lvcmos25_s_6', 'iobuf_lvcmos25_s_8', - 'iobuf_lvcmos33', 'iobuf_lvcmos33_f_12', - 'iobuf_lvcmos33_f_16', 'iobuf_lvcmos33_f_2', - 'iobuf_lvcmos33_f_24', 'iobuf_lvcmos33_f_4', - 'iobuf_lvcmos33_f_6', 'iobuf_lvcmos33_f_8', - 'iobuf_lvcmos33_s_12', 'iobuf_lvcmos33_s_16', - 'iobuf_lvcmos33_s_2', 'iobuf_lvcmos33_s_24', - 'iobuf_lvcmos33_s_4', 'iobuf_lvcmos33_s_6', - 'iobuf_lvcmos33_s_8', 'iobuf_lvdci_15', - 'iobuf_lvdci_18', 'iobuf_lvdci_25', 'iobuf_lvdci_33', - 'iobuf_lvdci_dv2_15', 'iobuf_lvdci_dv2_18', - 'iobuf_lvdci_dv2_25', 'iobuf_lvdci_dv2_33', - 'iobuf_lvds', 'iobuf_lvpecl', 'iobuf_lvttl', - 'iobuf_lvttl_f_12', 'iobuf_lvttl_f_16', - 'iobuf_lvttl_f_2', 'iobuf_lvttl_f_24', - 'iobuf_lvttl_f_4', 'iobuf_lvttl_f_6', 'iobuf_lvttl_f_8', - 'iobuf_lvttl_s_12', 'iobuf_lvttl_s_16', - 'iobuf_lvttl_s_2', 'iobuf_lvttl_s_24', - 'iobuf_lvttl_s_4', 'iobuf_lvttl_s_6', 'iobuf_lvttl_s_8', - 'iobuf_pci33_3', 'iobuf_pci33_5', 'iobuf_pci66_3', - 'iobuf_pcix', 'iobuf_pcix66_3', 'iobuf_s_12', - 'iobuf_s_16', 'iobuf_s_2', 'iobuf_s_24', 'iobuf_s_4', - 'iobuf_s_6', 'iobuf_s_8', 'iobuf_sstl18_i', - 'iobuf_sstl18_ii', 'iobuf_sstl18_ii_dci', - 'iobuf_sstl2_i', 'iobuf_sstl2_ii', 'iobuf_sstl2_ii_dci', - 'iobuf_sstl3_i', 'iobuf_sstl3_ii', 'iobuf_sstl3_ii_dci', - 'iobufds', 'iobufds_blvds_25', 'iobufe', 'iobufe_f', - 'iobufe_s', 'iodelay', 'iserdes', 'iserdes_nodelay', - 'jtag_sim_spartan3a', 'jtag_sim_virtex4', - 'jtag_sim_virtex5', 'jtagppc', 'keep', 'keeper', - 'key_clear', 'ld', 'ld_1', 'ldc', 'ldc_1', 'ldce', - 'ldce_1', 'ldcp', 'ldcp_1', 'ldcpe', 'ldcpe_1', 'lde', - 'lde_1', 'ldg', 'ldp', 'ldp_1', 'ldpe', 'ldpe_1', - 'lut1', 'lut1_d', 'lut1_l', 'lut2', 'lut2_d', 'lut2_l', - 'lut3', 'lut3_d', 'lut3_l', 'lut4', 'lut4_d', 'lut4_l', - 'lut5', 'lut5_d', 'lut5_l', 'lut6', 'lut6_2', 'lut6_d', - 'lut6_l', 'merge', 'min_off', 'mult18x18', 'mult18x18s', - 'mult18x18sio', 'mult_and', 'muxcy', 'muxcy_d', - 'muxcy_l', 'muxf5', 'muxf5_d', 'muxf5_l', 'muxf6', - 'muxf6_d', 'muxf6_l', 'muxf7', 'muxf7_d', 'muxf7_l', - 'muxf8', 'muxf8_d', 'muxf8_l', 'nand2', 'nand2b1', - 'nand2b2', 'nand3', 'nand3b1', 'nand3b2', 'nand3b3', - 'nand4', 'nand4b1', 'nand4b2', 'nand4b3', 'nand4b4', - 'nand5', 'nand5b1', 'nand5b2', 'nand5b3', 'nand5b4', - 'nand5b5', 'nor2', 'nor2b1', 'nor2b2', 'nor3', 'nor3b1', - 'nor3b2', 'nor3b3', 'nor4', 'nor4b1', 'nor4b2', - 'nor4b3', 'nor4b4', 'nor5', 'nor5b1', 'nor5b2', - 'nor5b3', 'nor5b4', 'nor5b5', 'obuf', 'obuf_agp', - 'obuf_ctt', 'obuf_f_12', 'obuf_f_16', 'obuf_f_2', - 'obuf_f_24', 'obuf_f_4', 'obuf_f_6', 'obuf_f_8', - 'obuf_gtl', 'obuf_gtl_dci', 'obuf_gtlp', - 'obuf_gtlp_dci', 'obuf_hstl_i', 'obuf_hstl_i_18', - 'obuf_hstl_i_dci', 'obuf_hstl_i_dci_18', 'obuf_hstl_ii', - 'obuf_hstl_ii_18', 'obuf_hstl_ii_dci', - 'obuf_hstl_ii_dci_18', 'obuf_hstl_iii', - 'obuf_hstl_iii_18', 'obuf_hstl_iii_dci', - 'obuf_hstl_iii_dci_18', 'obuf_hstl_iv', - 'obuf_hstl_iv_18', 'obuf_hstl_iv_dci', - 'obuf_hstl_iv_dci_18', 'obuf_lvcmos12', - 'obuf_lvcmos12_f_2', 'obuf_lvcmos12_f_4', - 'obuf_lvcmos12_f_6', 'obuf_lvcmos12_f_8', - 'obuf_lvcmos12_s_2', 'obuf_lvcmos12_s_4', - 'obuf_lvcmos12_s_6', 'obuf_lvcmos12_s_8', - 'obuf_lvcmos15', 'obuf_lvcmos15_f_12', - 'obuf_lvcmos15_f_16', 'obuf_lvcmos15_f_2', - 'obuf_lvcmos15_f_4', 'obuf_lvcmos15_f_6', - 'obuf_lvcmos15_f_8', 'obuf_lvcmos15_s_12', - 'obuf_lvcmos15_s_16', 'obuf_lvcmos15_s_2', - 'obuf_lvcmos15_s_4', 'obuf_lvcmos15_s_6', - 'obuf_lvcmos15_s_8', 'obuf_lvcmos18', - 'obuf_lvcmos18_f_12', 'obuf_lvcmos18_f_16', - 'obuf_lvcmos18_f_2', 'obuf_lvcmos18_f_4', - 'obuf_lvcmos18_f_6', 'obuf_lvcmos18_f_8', - 'obuf_lvcmos18_s_12', 'obuf_lvcmos18_s_16', - 'obuf_lvcmos18_s_2', 'obuf_lvcmos18_s_4', - 'obuf_lvcmos18_s_6', 'obuf_lvcmos18_s_8', - 'obuf_lvcmos2', 'obuf_lvcmos25', 'obuf_lvcmos25_f_12', - 'obuf_lvcmos25_f_16', 'obuf_lvcmos25_f_2', - 'obuf_lvcmos25_f_24', 'obuf_lvcmos25_f_4', - 'obuf_lvcmos25_f_6', 'obuf_lvcmos25_f_8', - 'obuf_lvcmos25_s_12', 'obuf_lvcmos25_s_16', - 'obuf_lvcmos25_s_2', 'obuf_lvcmos25_s_24', - 'obuf_lvcmos25_s_4', 'obuf_lvcmos25_s_6', - 'obuf_lvcmos25_s_8', 'obuf_lvcmos33', - 'obuf_lvcmos33_f_12', 'obuf_lvcmos33_f_16', - 'obuf_lvcmos33_f_2', 'obuf_lvcmos33_f_24', - 'obuf_lvcmos33_f_4', 'obuf_lvcmos33_f_6', - 'obuf_lvcmos33_f_8', 'obuf_lvcmos33_s_12', - 'obuf_lvcmos33_s_16', 'obuf_lvcmos33_s_2', - 'obuf_lvcmos33_s_24', 'obuf_lvcmos33_s_4', - 'obuf_lvcmos33_s_6', 'obuf_lvcmos33_s_8', - 'obuf_lvdci_15', 'obuf_lvdci_18', 'obuf_lvdci_25', - 'obuf_lvdci_33', 'obuf_lvdci_dv2_15', - 'obuf_lvdci_dv2_18', 'obuf_lvdci_dv2_25', - 'obuf_lvdci_dv2_33', 'obuf_lvds', 'obuf_lvpecl', - 'obuf_lvttl', 'obuf_lvttl_f_12', 'obuf_lvttl_f_16', - 'obuf_lvttl_f_2', 'obuf_lvttl_f_24', 'obuf_lvttl_f_4', - 'obuf_lvttl_f_6', 'obuf_lvttl_f_8', 'obuf_lvttl_s_12', - 'obuf_lvttl_s_16', 'obuf_lvttl_s_2', 'obuf_lvttl_s_24', - 'obuf_lvttl_s_4', 'obuf_lvttl_s_6', 'obuf_lvttl_s_8', - 'obuf_pci33_3', 'obuf_pci33_5', 'obuf_pci66_3', - 'obuf_pcix', 'obuf_pcix66_3', 'obuf_s_12', 'obuf_s_16', - 'obuf_s_2', 'obuf_s_24', 'obuf_s_4', 'obuf_s_6', - 'obuf_s_8', 'obuf_sstl18_i', 'obuf_sstl18_i_dci', - 'obuf_sstl18_ii', 'obuf_sstl18_ii_dci', 'obuf_sstl2_i', - 'obuf_sstl2_i_dci', 'obuf_sstl2_ii', - 'obuf_sstl2_ii_dci', 'obuf_sstl3_i', 'obuf_sstl3_i_dci', - 'obuf_sstl3_ii', 'obuf_sstl3_ii_dci', 'obufds', - 'obufds_blvds_25', 'obufds_ldt_25', 'obufds_lvds_25', - 'obufds_lvds_33', 'obufds_lvdsext_25', - 'obufds_lvdsext_33', 'obufds_lvpecl_25', - 'obufds_lvpecl_33', 'obufds_ulvds_25', 'obufe', 'obuft', - 'obuft_agp', 'obuft_ctt', 'obuft_f_12', 'obuft_f_16', - 'obuft_f_2', 'obuft_f_24', 'obuft_f_4', 'obuft_f_6', - 'obuft_f_8', 'obuft_gtl', 'obuft_gtl_dci', 'obuft_gtlp', - 'obuft_gtlp_dci', 'obuft_hstl_i', 'obuft_hstl_i_18', - 'obuft_hstl_i_dci', 'obuft_hstl_i_dci_18', - 'obuft_hstl_ii', 'obuft_hstl_ii_18', - 'obuft_hstl_ii_dci', 'obuft_hstl_ii_dci_18', - 'obuft_hstl_iii', 'obuft_hstl_iii_18', - 'obuft_hstl_iii_dci', 'obuft_hstl_iii_dci_18', - 'obuft_hstl_iv', 'obuft_hstl_iv_18', - 'obuft_hstl_iv_dci', 'obuft_hstl_iv_dci_18', - 'obuft_lvcmos12', 'obuft_lvcmos12_f_2', - 'obuft_lvcmos12_f_4', 'obuft_lvcmos12_f_6', - 'obuft_lvcmos12_f_8', 'obuft_lvcmos12_s_2', - 'obuft_lvcmos12_s_4', 'obuft_lvcmos12_s_6', - 'obuft_lvcmos12_s_8', 'obuft_lvcmos15', - 'obuft_lvcmos15_f_12', 'obuft_lvcmos15_f_16', - 'obuft_lvcmos15_f_2', 'obuft_lvcmos15_f_4', - 'obuft_lvcmos15_f_6', 'obuft_lvcmos15_f_8', - 'obuft_lvcmos15_s_12', 'obuft_lvcmos15_s_16', - 'obuft_lvcmos15_s_2', 'obuft_lvcmos15_s_4', - 'obuft_lvcmos15_s_6', 'obuft_lvcmos15_s_8', - 'obuft_lvcmos18', 'obuft_lvcmos18_f_12', - 'obuft_lvcmos18_f_16', 'obuft_lvcmos18_f_2', - 'obuft_lvcmos18_f_4', 'obuft_lvcmos18_f_6', - 'obuft_lvcmos18_f_8', 'obuft_lvcmos18_s_12', - 'obuft_lvcmos18_s_16', 'obuft_lvcmos18_s_2', - 'obuft_lvcmos18_s_4', 'obuft_lvcmos18_s_6', - 'obuft_lvcmos18_s_8', 'obuft_lvcmos2', 'obuft_lvcmos25', - 'obuft_lvcmos25_f_12', 'obuft_lvcmos25_f_16', - 'obuft_lvcmos25_f_2', 'obuft_lvcmos25_f_24', - 'obuft_lvcmos25_f_4', 'obuft_lvcmos25_f_6', - 'obuft_lvcmos25_f_8', 'obuft_lvcmos25_s_12', - 'obuft_lvcmos25_s_16', 'obuft_lvcmos25_s_2', - 'obuft_lvcmos25_s_24', 'obuft_lvcmos25_s_4', - 'obuft_lvcmos25_s_6', 'obuft_lvcmos25_s_8', - 'obuft_lvcmos33', 'obuft_lvcmos33_f_12', - 'obuft_lvcmos33_f_16', 'obuft_lvcmos33_f_2', - 'obuft_lvcmos33_f_24', 'obuft_lvcmos33_f_4', - 'obuft_lvcmos33_f_6', 'obuft_lvcmos33_f_8', - 'obuft_lvcmos33_s_12', 'obuft_lvcmos33_s_16', - 'obuft_lvcmos33_s_2', 'obuft_lvcmos33_s_24', - 'obuft_lvcmos33_s_4', 'obuft_lvcmos33_s_6', - 'obuft_lvcmos33_s_8', 'obuft_lvdci_15', - 'obuft_lvdci_18', 'obuft_lvdci_25', 'obuft_lvdci_33', - 'obuft_lvdci_dv2_15', 'obuft_lvdci_dv2_18', - 'obuft_lvdci_dv2_25', 'obuft_lvdci_dv2_33', - 'obuft_lvds', 'obuft_lvpecl', 'obuft_lvttl', - 'obuft_lvttl_f_12', 'obuft_lvttl_f_16', - 'obuft_lvttl_f_2', 'obuft_lvttl_f_24', - 'obuft_lvttl_f_4', 'obuft_lvttl_f_6', 'obuft_lvttl_f_8', - 'obuft_lvttl_s_12', 'obuft_lvttl_s_16', - 'obuft_lvttl_s_2', 'obuft_lvttl_s_24', - 'obuft_lvttl_s_4', 'obuft_lvttl_s_6', 'obuft_lvttl_s_8', - 'obuft_pci33_3', 'obuft_pci33_5', 'obuft_pci66_3', - 'obuft_pcix', 'obuft_pcix66_3', 'obuft_s_12', - 'obuft_s_16', 'obuft_s_2', 'obuft_s_24', 'obuft_s_4', - 'obuft_s_6', 'obuft_s_8', 'obuft_sstl18_i', - 'obuft_sstl18_i_dci', 'obuft_sstl18_ii', - 'obuft_sstl18_ii_dci', 'obuft_sstl2_i', - 'obuft_sstl2_i_dci', 'obuft_sstl2_ii', - 'obuft_sstl2_ii_dci', 'obuft_sstl3_i', - 'obuft_sstl3_i_dci', 'obuft_sstl3_ii', - 'obuft_sstl3_ii_dci', 'obuftds', 'obuftds_blvds_25', - 'obuftds_ldt_25', 'obuftds_lvds_25', 'obuftds_lvds_33', - 'obuftds_lvdsext_25', 'obuftds_lvdsext_33', - 'obuftds_lvpecl_25', 'obuftds_lvpecl_33', - 'obuftds_ulvds_25', 'oddr', 'oddr2', 'ofddrcpe', - 'ofddrrse', 'ofddrtcpe', 'ofddrtrse', 'opt_off', - 'opt_uim', 'or2', 'or2b1', 'or2b2', 'or3', 'or3b1', - 'or3b2', 'or3b3', 'or4', 'or4b1', 'or4b2', 'or4b3', - 'or4b4', 'or5', 'or5b1', 'or5b2', 'or5b3', 'or5b4', - 'or5b5', 'or6', 'or7', 'or8', 'orcy', 'oserdes', - 'pcie_ep', 'pcie_internal_1_1', 'pll_adv', 'pll_base', - 'pmcd', 'ppc405', 'ppc405_adv', 'pulldown', 'pullup', - 'ram128x1d', 'ram128x1s', 'ram128x1s_1', 'ram16x1d', - 'ram16x1d_1', 'ram16x1s', 'ram16x1s_1', 'ram16x2s', - 'ram16x4s', 'ram16x8s', 'ram256x1s', 'ram32m', - 'ram32x1d', 'ram32x1d_1', 'ram32x1s', 'ram32x1s_1', - 'ram32x2s', 'ram32x4s', 'ram32x8s', 'ram64m', - 'ram64x1d', 'ram64x1d_1', 'ram64x1s', 'ram64x1s_1', - 'ram64x2s', 'ramb16', 'ramb16_s1', 'ramb16_s18', - 'ramb16_s18_s18', 'ramb16_s18_s36', 'ramb16_s1_s1', - 'ramb16_s1_s18', 'ramb16_s1_s2', 'ramb16_s1_s36', - 'ramb16_s1_s4', 'ramb16_s1_s9', 'ramb16_s2', - 'ramb16_s2_s18', 'ramb16_s2_s2', 'ramb16_s2_s36', - 'ramb16_s2_s4', 'ramb16_s2_s9', 'ramb16_s36', - 'ramb16_s36_s36', 'ramb16_s4', 'ramb16_s4_s18', - 'ramb16_s4_s36', 'ramb16_s4_s4', 'ramb16_s4_s9', - 'ramb16_s9', 'ramb16_s9_s18', 'ramb16_s9_s36', - 'ramb16_s9_s9', 'ramb16bwe', 'ramb16bwe_s18', - 'ramb16bwe_s18_s18', 'ramb16bwe_s18_s9', - 'ramb16bwe_s36', 'ramb16bwe_s36_s18', - 'ramb16bwe_s36_s36', 'ramb16bwe_s36_s9', 'ramb16bwer', - 'ramb18', 'ramb18sdp', 'ramb32_s64_ecc', 'ramb36', - 'ramb36_exp', 'ramb36sdp', 'ramb36sdp_exp', 'ramb4_s1', - 'ramb4_s16', 'ramb4_s16_s16', 'ramb4_s1_s1', - 'ramb4_s1_s16', 'ramb4_s1_s2', 'ramb4_s1_s4', - 'ramb4_s1_s8', 'ramb4_s2', 'ramb4_s2_s16', - 'ramb4_s2_s2', 'ramb4_s2_s4', 'ramb4_s2_s8', 'ramb4_s4', - 'ramb4_s4_s16', 'ramb4_s4_s4', 'ramb4_s4_s8', - 'ramb4_s8', 'ramb4_s8_s16', 'ramb4_s8_s8', 'roc', - 'rocbuf', 'rom128x1', 'rom16x1', 'rom256x1', 'rom32x1', - 'rom64x1', 'sim_config_s3a', 'spi_access', 'srl16', - 'srl16_1', 'srl16e', 'srl16e_1', 'srlc16', 'srlc16_1', - 'srlc16e', 'srlc16e_1', 'srlc32e', 'startbuf_fpgacore', - 'startbuf_spartan2', 'startbuf_spartan3', - 'startbuf_virtex', 'startbuf_virtex2', - 'startbuf_virtex4', 'startup_fpgacore', - 'startup_spartan2', 'startup_spartan3', - 'startup_spartan3a', 'startup_spartan3e', - 'startup_virtex', 'startup_virtex2', 'startup_virtex4', - 'startup_virtex5', 'sysmon', 'tblock', 'temac', - 'timegrp', 'timespec', 'toc', 'tocbuf', - 'usr_access_virtex4', 'usr_access_virtex5', 'vcc', - 'wireand', 'xnor2', 'xnor3', 'xnor4', 'xnor5', 'xor2', - 'xor3', 'xor4', 'xor5', 'xorcy', 'xorcy_d', 'xorcy_l' ] ) +UNISIMS = frozenset( ['and2', 'and2b1', 'and2b1l', 'and2b2', 'and3', 'and3b1', + 'and3b2', 'and3b3', 'and4', 'and4b1', 'and4b2', 'and4b3', + 'and4b4', 'and5', 'and5b1', 'and5b2', 'and5b3', 'and5b4', + 'and5b5', 'and6', 'and7', 'and8', 'autobuf', + 'bscan_fpgacore', 'bscan_spartan3', 'bscan_spartan3a', + 'bscan_spartan6', 'bscan_virtex4', 'bscan_virtex5', + 'bscan_virtex6', 'buf', 'bufcf', 'bufe', 'buffoe', + 'bufg', 'bufgce', 'bufgce_1', 'bufgctrl', 'bufgdll', + 'bufgmux', 'bufgmux_1', 'bufgmux_ctrl', + 'bufgmux_virtex4', 'bufgp', 'bufgsr', 'bufgts', 'bufh', + 'bufhce', 'bufio', 'bufio2', 'bufio2fb', 'bufio2_2clk', + 'bufiodqs', 'bufpll', 'bufpll_mcb', 'bufr', 'buft', + 'capture_fpgacore', 'capture_spartan3', + 'capture_spartan3a', 'capture_virtex4', + 'capture_virtex5', 'capture_virtex6', 'carry4', + 'cfglut5', 'clkdll', 'clkdlle', 'clkdllhf', 'clk_div10', + 'clk_div10r', 'clk_div10rsd', 'clk_div10sd', 'clk_div12', + 'clk_div12r', 'clk_div12rsd', 'clk_div12sd', 'clk_div14', + 'clk_div14r', 'clk_div14rsd', 'clk_div14sd', 'clk_div16', + 'clk_div16r', 'clk_div16rsd', 'clk_div16sd', 'clk_div2', + 'clk_div2r', 'clk_div2rsd', 'clk_div2sd', 'clk_div4', + 'clk_div4r', 'clk_div4rsd', 'clk_div4sd', 'clk_div6', + 'clk_div6r', 'clk_div6rsd', 'clk_div6sd', 'clk_div8', + 'clk_div8r', 'clk_div8rsd', 'clk_div8sd', 'config', + 'crc32', 'crc64', 'dcireset', 'dcm', 'dcm_adv', + 'dcm_base', 'dcm_clkgen', 'dcm_ps', 'dcm_sp', 'dna_port', + 'dsp48', 'dsp48a', 'dsp48a1', 'dsp48e', 'dsp48e1', + 'efuse_usr', 'emac', 'fd', 'fdc', 'fdce', 'fdce_1', + 'fdcp', 'fdcpe', 'fdcpe_1', 'fdcp_1', 'fdc_1', 'fdd', + 'fddc', 'fddce', 'fddcp', 'fddcpe', 'fddp', 'fddpe', + 'fddrcpe', 'fddrrse', 'fde', 'fde_1', 'fdp', 'fdpe', + 'fdpe_1', 'fdp_1', 'fdr', 'fdre', 'fdre_1', 'fdrs', + 'fdrse', 'fdrse_1', 'fdrs_1', 'fdr_1', 'fds', 'fdse', + 'fdse_1', 'fds_1', 'fd_1', 'fifo16', 'fifo18', + 'fifo18e1', 'fifo18_36', 'fifo36', 'fifo36e1', + 'fifo36_72', 'fifo36_72_exp', 'fifo36_exp', 'fmap', + 'frame_ecc_virtex4', 'frame_ecc_virtex5', + 'frame_ecc_virtex6', 'ftc', 'ftcp', 'ftp', 'gnd', 'gt11', + 'gt11clk', 'gt11clk_mgt', 'gt11_custom', 'gt11_dual', + 'gthe1_quad', 'gtpa1_dual', 'gtp_dual', 'gtxe1', + 'gtx_dual', 'ibuf', 'ibufds', 'ibufds_blvds_25', + 'ibufds_diff_out', 'ibufds_dly_adj', 'ibufds_gthe1', + 'ibufds_gtxe1', 'ibufds_ldt_25', 'ibufds_lvdsext_25', + 'ibufds_lvdsext_25_dci', 'ibufds_lvdsext_33', + 'ibufds_lvdsext_33_dci', 'ibufds_lvds_25', + 'ibufds_lvds_25_dci', 'ibufds_lvds_33', + 'ibufds_lvds_33_dci', 'ibufds_lvpecl_25', + 'ibufds_lvpecl_33', 'ibufds_ulvds_25', 'ibufg', + 'ibufgds', 'ibufgds_blvds_25', 'ibufgds_diff_out', + 'ibufgds_ldt_25', 'ibufgds_lvdsext_25', + 'ibufgds_lvdsext_25_dci', 'ibufgds_lvdsext_33', + 'ibufgds_lvdsext_33_dci', 'ibufgds_lvds_25', + 'ibufgds_lvds_25_dci', 'ibufgds_lvds_33', + 'ibufgds_lvds_33_dci', 'ibufgds_lvpecl_25', + 'ibufgds_lvpecl_33', 'ibufgds_ulvds_25', 'ibufg_agp', + 'ibufg_ctt', 'ibufg_gtl', 'ibufg_gtlp', 'ibufg_gtlp_dci', + 'ibufg_gtl_dci', 'ibufg_hstl_i', 'ibufg_hstl_ii', + 'ibufg_hstl_iii', 'ibufg_hstl_iii_18', + 'ibufg_hstl_iii_dci', 'ibufg_hstl_iii_dci_18', + 'ibufg_hstl_ii_18', 'ibufg_hstl_ii_dci', + 'ibufg_hstl_ii_dci_18', 'ibufg_hstl_iv', + 'ibufg_hstl_iv_18', 'ibufg_hstl_iv_dci', + 'ibufg_hstl_iv_dci_18', 'ibufg_hstl_i_18', + 'ibufg_hstl_i_dci', 'ibufg_hstl_i_dci_18', + 'ibufg_lvcmos12', 'ibufg_lvcmos15', 'ibufg_lvcmos18', + 'ibufg_lvcmos2', 'ibufg_lvcmos25', 'ibufg_lvcmos33', + 'ibufg_lvdci_15', 'ibufg_lvdci_18', 'ibufg_lvdci_25', + 'ibufg_lvdci_33', 'ibufg_lvdci_dv2_15', + 'ibufg_lvdci_dv2_18', 'ibufg_lvdci_dv2_25', + 'ibufg_lvdci_dv2_33', 'ibufg_lvds', 'ibufg_lvpecl', + 'ibufg_lvttl', 'ibufg_pci33_3', 'ibufg_pci33_5', + 'ibufg_pci66_3', 'ibufg_pcix', 'ibufg_pcix66_3', + 'ibufg_sstl18_i', 'ibufg_sstl18_ii', + 'ibufg_sstl18_ii_dci', 'ibufg_sstl18_i_dci', + 'ibufg_sstl2_i', 'ibufg_sstl2_ii', 'ibufg_sstl2_ii_dci', + 'ibufg_sstl2_i_dci', 'ibufg_sstl3_i', 'ibufg_sstl3_ii', + 'ibufg_sstl3_ii_dci', 'ibufg_sstl3_i_dci', 'ibuf_agp', + 'ibuf_ctt', 'ibuf_dly_adj', 'ibuf_gtl', 'ibuf_gtlp', + 'ibuf_gtlp_dci', 'ibuf_gtl_dci', 'ibuf_hstl_i', + 'ibuf_hstl_ii', 'ibuf_hstl_iii', 'ibuf_hstl_iii_18', + 'ibuf_hstl_iii_dci', 'ibuf_hstl_iii_dci_18', + 'ibuf_hstl_ii_18', 'ibuf_hstl_ii_dci', + 'ibuf_hstl_ii_dci_18', 'ibuf_hstl_iv', 'ibuf_hstl_iv_18', + 'ibuf_hstl_iv_dci', 'ibuf_hstl_iv_dci_18', + 'ibuf_hstl_i_18', 'ibuf_hstl_i_dci', + 'ibuf_hstl_i_dci_18', 'ibuf_lvcmos12', 'ibuf_lvcmos15', + 'ibuf_lvcmos18', 'ibuf_lvcmos2', 'ibuf_lvcmos25', + 'ibuf_lvcmos33', 'ibuf_lvdci_15', 'ibuf_lvdci_18', + 'ibuf_lvdci_25', 'ibuf_lvdci_33', 'ibuf_lvdci_dv2_15', + 'ibuf_lvdci_dv2_18', 'ibuf_lvdci_dv2_25', + 'ibuf_lvdci_dv2_33', 'ibuf_lvds', 'ibuf_lvpecl', + 'ibuf_lvttl', 'ibuf_pci33_3', 'ibuf_pci33_5', + 'ibuf_pci66_3', 'ibuf_pcix', 'ibuf_pcix66_3', + 'ibuf_sstl18_i', 'ibuf_sstl18_ii', 'ibuf_sstl18_ii_dci', + 'ibuf_sstl18_i_dci', 'ibuf_sstl2_i', 'ibuf_sstl2_ii', + 'ibuf_sstl2_ii_dci', 'ibuf_sstl2_i_dci', 'ibuf_sstl3_i', + 'ibuf_sstl3_ii', 'ibuf_sstl3_ii_dci', 'ibuf_sstl3_i_dci', + 'icap_spartan3a', 'icap_spartan6', 'icap_virtex4', + 'icap_virtex5', 'icap_virtex6', 'iddr', 'iddr2', + 'iddr_2clk', 'idelay', 'idelayctrl', 'ifddrcpe', + 'ifddrrse', 'ild', 'inv', 'iobuf', 'iobufds', + 'iobufds_blvds_25', 'iobufds_diff_out', 'iobufe', + 'iobuf_agp', 'iobuf_ctt', 'iobuf_f_12', 'iobuf_f_16', + 'iobuf_f_2', 'iobuf_f_24', 'iobuf_f_4', 'iobuf_f_6', + 'iobuf_f_8', 'iobuf_gtl', 'iobuf_gtlp', 'iobuf_gtlp_dci', + 'iobuf_gtl_dci', 'iobuf_hstl_i', 'iobuf_hstl_ii', + 'iobuf_hstl_iii', 'iobuf_hstl_iii_18', + 'iobuf_hstl_ii_18', 'iobuf_hstl_ii_dci', + 'iobuf_hstl_ii_dci_18', 'iobuf_hstl_iv', + 'iobuf_hstl_iv_18', 'iobuf_hstl_iv_dci', + 'iobuf_hstl_iv_dci_18', 'iobuf_hstl_i_18', + 'iobuf_lvcmos12', 'iobuf_lvcmos12_f_2', + 'iobuf_lvcmos12_f_4', 'iobuf_lvcmos12_f_6', + 'iobuf_lvcmos12_f_8', 'iobuf_lvcmos12_s_2', + 'iobuf_lvcmos12_s_4', 'iobuf_lvcmos12_s_6', + 'iobuf_lvcmos12_s_8', 'iobuf_lvcmos15', + 'iobuf_lvcmos15_f_12', 'iobuf_lvcmos15_f_16', + 'iobuf_lvcmos15_f_2', 'iobuf_lvcmos15_f_4', + 'iobuf_lvcmos15_f_6', 'iobuf_lvcmos15_f_8', + 'iobuf_lvcmos15_s_12', 'iobuf_lvcmos15_s_16', + 'iobuf_lvcmos15_s_2', 'iobuf_lvcmos15_s_4', + 'iobuf_lvcmos15_s_6', 'iobuf_lvcmos15_s_8', + 'iobuf_lvcmos18', 'iobuf_lvcmos18_f_12', + 'iobuf_lvcmos18_f_16', 'iobuf_lvcmos18_f_2', + 'iobuf_lvcmos18_f_4', 'iobuf_lvcmos18_f_6', + 'iobuf_lvcmos18_f_8', 'iobuf_lvcmos18_s_12', + 'iobuf_lvcmos18_s_16', 'iobuf_lvcmos18_s_2', + 'iobuf_lvcmos18_s_4', 'iobuf_lvcmos18_s_6', + 'iobuf_lvcmos18_s_8', 'iobuf_lvcmos2', 'iobuf_lvcmos25', + 'iobuf_lvcmos25_f_12', 'iobuf_lvcmos25_f_16', + 'iobuf_lvcmos25_f_2', 'iobuf_lvcmos25_f_24', + 'iobuf_lvcmos25_f_4', 'iobuf_lvcmos25_f_6', + 'iobuf_lvcmos25_f_8', 'iobuf_lvcmos25_s_12', + 'iobuf_lvcmos25_s_16', 'iobuf_lvcmos25_s_2', + 'iobuf_lvcmos25_s_24', 'iobuf_lvcmos25_s_4', + 'iobuf_lvcmos25_s_6', 'iobuf_lvcmos25_s_8', + 'iobuf_lvcmos33', 'iobuf_lvcmos33_f_12', + 'iobuf_lvcmos33_f_16', 'iobuf_lvcmos33_f_2', + 'iobuf_lvcmos33_f_24', 'iobuf_lvcmos33_f_4', + 'iobuf_lvcmos33_f_6', 'iobuf_lvcmos33_f_8', + 'iobuf_lvcmos33_s_12', 'iobuf_lvcmos33_s_16', + 'iobuf_lvcmos33_s_2', 'iobuf_lvcmos33_s_24', + 'iobuf_lvcmos33_s_4', 'iobuf_lvcmos33_s_6', + 'iobuf_lvcmos33_s_8', 'iobuf_lvdci_15', 'iobuf_lvdci_18', + 'iobuf_lvdci_25', 'iobuf_lvdci_33', 'iobuf_lvdci_dv2_15', + 'iobuf_lvdci_dv2_18', 'iobuf_lvdci_dv2_25', + 'iobuf_lvdci_dv2_33', 'iobuf_lvds', 'iobuf_lvpecl', + 'iobuf_lvttl', 'iobuf_lvttl_f_12', 'iobuf_lvttl_f_16', + 'iobuf_lvttl_f_2', 'iobuf_lvttl_f_24', 'iobuf_lvttl_f_4', + 'iobuf_lvttl_f_6', 'iobuf_lvttl_f_8', 'iobuf_lvttl_s_12', + 'iobuf_lvttl_s_16', 'iobuf_lvttl_s_2', + 'iobuf_lvttl_s_24', 'iobuf_lvttl_s_4', 'iobuf_lvttl_s_6', + 'iobuf_lvttl_s_8', 'iobuf_pci33_3', 'iobuf_pci33_5', + 'iobuf_pci66_3', 'iobuf_pcix', 'iobuf_pcix66_3', + 'iobuf_sstl18_i', 'iobuf_sstl18_ii', + 'iobuf_sstl18_ii_dci', 'iobuf_sstl2_i', 'iobuf_sstl2_ii', + 'iobuf_sstl2_ii_dci', 'iobuf_sstl3_i', 'iobuf_sstl3_ii', + 'iobuf_sstl3_ii_dci', 'iobuf_s_12', 'iobuf_s_16', + 'iobuf_s_2', 'iobuf_s_24', 'iobuf_s_4', 'iobuf_s_6', + 'iobuf_s_8', 'iodelay', 'iodelay2', 'iodelaye1', + 'iodrp2', 'iodrp2_mcb', 'iserdes', 'iserdes2', + 'iserdese1', 'iserdes_nodelay', 'jtagppc', 'jtagppc440', + 'jtag_sim_spartan3a', 'jtag_sim_spartan6', + 'jtag_sim_virtex4', 'jtag_sim_virtex5', + 'jtag_sim_virtex6', 'keep', 'keeper', 'key_clear', 'ld', + 'ldc', 'ldce', 'ldce_1', 'ldcp', 'ldcpe', 'ldcpe_1', + 'ldcp_1', 'ldc_1', 'lde', 'lde_1', 'ldg', 'ldp', 'ldpe', + 'ldpe_1', 'ldp_1', 'ld_1', 'lut1', 'lut1_d', 'lut1_l', + 'lut2', 'lut2_d', 'lut2_l', 'lut3', 'lut3_d', 'lut3_l', + 'lut4', 'lut4_d', 'lut4_l', 'lut5', 'lut5_d', 'lut5_l', + 'lut6', 'lut6_2', 'lut6_d', 'lut6_l', 'mcb', 'merge', + 'min_off', 'mmcm_adv', 'mmcm_base', 'mult18x18', + 'mult18x18s', 'mult18x18sio', 'mult_and', 'muxcy', + 'muxcy_d', 'muxcy_l', 'muxf5', 'muxf5_d', 'muxf5_l', + 'muxf6', 'muxf6_d', 'muxf6_l', 'muxf7', 'muxf7_d', + 'muxf7_l', 'muxf8', 'muxf8_d', 'muxf8_l', 'nand2', + 'nand2b1', 'nand2b2', 'nand3', 'nand3b1', 'nand3b2', + 'nand3b3', 'nand4', 'nand4b1', 'nand4b2', 'nand4b3', + 'nand4b4', 'nand5', 'nand5b1', 'nand5b2', 'nand5b3', + 'nand5b4', 'nand5b5', 'nor2', 'nor2b1', 'nor2b2', 'nor3', + 'nor3b1', 'nor3b2', 'nor3b3', 'nor4', 'nor4b1', 'nor4b2', + 'nor4b3', 'nor4b4', 'nor5', 'nor5b1', 'nor5b2', 'nor5b3', + 'nor5b4', 'nor5b5', 'obuf', 'obufds', 'obufds_blvds_25', + 'obufds_ldt_25', 'obufds_lvdsext_25', + 'obufds_lvdsext_33', 'obufds_lvds_25', 'obufds_lvds_33', + 'obufds_lvpecl_25', 'obufds_lvpecl_33', + 'obufds_ulvds_25', 'obufe', 'obuft', 'obuftds', + 'obuftds_blvds_25', 'obuftds_ldt_25', + 'obuftds_lvdsext_25', 'obuftds_lvdsext_33', + 'obuftds_lvds_25', 'obuftds_lvds_33', + 'obuftds_lvpecl_25', 'obuftds_lvpecl_33', + 'obuftds_ulvds_25', 'obuft_agp', 'obuft_ctt', + 'obuft_f_12', 'obuft_f_16', 'obuft_f_2', 'obuft_f_24', + 'obuft_f_4', 'obuft_f_6', 'obuft_f_8', 'obuft_gtl', + 'obuft_gtlp', 'obuft_gtlp_dci', 'obuft_gtl_dci', + 'obuft_hstl_i', 'obuft_hstl_ii', 'obuft_hstl_iii', + 'obuft_hstl_iii_18', 'obuft_hstl_iii_dci', + 'obuft_hstl_iii_dci_18', 'obuft_hstl_ii_18', + 'obuft_hstl_ii_dci', 'obuft_hstl_ii_dci_18', + 'obuft_hstl_iv', 'obuft_hstl_iv_18', 'obuft_hstl_iv_dci', + 'obuft_hstl_iv_dci_18', 'obuft_hstl_i_18', + 'obuft_hstl_i_dci', 'obuft_hstl_i_dci_18', + 'obuft_lvcmos12', 'obuft_lvcmos12_f_2', + 'obuft_lvcmos12_f_4', 'obuft_lvcmos12_f_6', + 'obuft_lvcmos12_f_8', 'obuft_lvcmos12_s_2', + 'obuft_lvcmos12_s_4', 'obuft_lvcmos12_s_6', + 'obuft_lvcmos12_s_8', 'obuft_lvcmos15', + 'obuft_lvcmos15_f_12', 'obuft_lvcmos15_f_16', + 'obuft_lvcmos15_f_2', 'obuft_lvcmos15_f_4', + 'obuft_lvcmos15_f_6', 'obuft_lvcmos15_f_8', + 'obuft_lvcmos15_s_12', 'obuft_lvcmos15_s_16', + 'obuft_lvcmos15_s_2', 'obuft_lvcmos15_s_4', + 'obuft_lvcmos15_s_6', 'obuft_lvcmos15_s_8', + 'obuft_lvcmos18', 'obuft_lvcmos18_f_12', + 'obuft_lvcmos18_f_16', 'obuft_lvcmos18_f_2', + 'obuft_lvcmos18_f_4', 'obuft_lvcmos18_f_6', + 'obuft_lvcmos18_f_8', 'obuft_lvcmos18_s_12', + 'obuft_lvcmos18_s_16', 'obuft_lvcmos18_s_2', + 'obuft_lvcmos18_s_4', 'obuft_lvcmos18_s_6', + 'obuft_lvcmos18_s_8', 'obuft_lvcmos2', 'obuft_lvcmos25', + 'obuft_lvcmos25_f_12', 'obuft_lvcmos25_f_16', + 'obuft_lvcmos25_f_2', 'obuft_lvcmos25_f_24', + 'obuft_lvcmos25_f_4', 'obuft_lvcmos25_f_6', + 'obuft_lvcmos25_f_8', 'obuft_lvcmos25_s_12', + 'obuft_lvcmos25_s_16', 'obuft_lvcmos25_s_2', + 'obuft_lvcmos25_s_24', 'obuft_lvcmos25_s_4', + 'obuft_lvcmos25_s_6', 'obuft_lvcmos25_s_8', + 'obuft_lvcmos33', 'obuft_lvcmos33_f_12', + 'obuft_lvcmos33_f_16', 'obuft_lvcmos33_f_2', + 'obuft_lvcmos33_f_24', 'obuft_lvcmos33_f_4', + 'obuft_lvcmos33_f_6', 'obuft_lvcmos33_f_8', + 'obuft_lvcmos33_s_12', 'obuft_lvcmos33_s_16', + 'obuft_lvcmos33_s_2', 'obuft_lvcmos33_s_24', + 'obuft_lvcmos33_s_4', 'obuft_lvcmos33_s_6', + 'obuft_lvcmos33_s_8', 'obuft_lvdci_15', 'obuft_lvdci_18', + 'obuft_lvdci_25', 'obuft_lvdci_33', 'obuft_lvdci_dv2_15', + 'obuft_lvdci_dv2_18', 'obuft_lvdci_dv2_25', + 'obuft_lvdci_dv2_33', 'obuft_lvds', 'obuft_lvpecl', + 'obuft_lvttl', 'obuft_lvttl_f_12', 'obuft_lvttl_f_16', + 'obuft_lvttl_f_2', 'obuft_lvttl_f_24', 'obuft_lvttl_f_4', + 'obuft_lvttl_f_6', 'obuft_lvttl_f_8', 'obuft_lvttl_s_12', + 'obuft_lvttl_s_16', 'obuft_lvttl_s_2', + 'obuft_lvttl_s_24', 'obuft_lvttl_s_4', 'obuft_lvttl_s_6', + 'obuft_lvttl_s_8', 'obuft_pci33_3', 'obuft_pci33_5', + 'obuft_pci66_3', 'obuft_pcix', 'obuft_pcix66_3', + 'obuft_sstl18_i', 'obuft_sstl18_ii', + 'obuft_sstl18_ii_dci', 'obuft_sstl18_i_dci', + 'obuft_sstl2_i', 'obuft_sstl2_ii', 'obuft_sstl2_ii_dci', + 'obuft_sstl2_i_dci', 'obuft_sstl3_i', 'obuft_sstl3_ii', + 'obuft_sstl3_ii_dci', 'obuft_sstl3_i_dci', 'obuft_s_12', + 'obuft_s_16', 'obuft_s_2', 'obuft_s_24', 'obuft_s_4', + 'obuft_s_6', 'obuft_s_8', 'obuf_agp', 'obuf_ctt', + 'obuf_f_12', 'obuf_f_16', 'obuf_f_2', 'obuf_f_24', + 'obuf_f_4', 'obuf_f_6', 'obuf_f_8', 'obuf_gtl', + 'obuf_gtlp', 'obuf_gtlp_dci', 'obuf_gtl_dci', + 'obuf_hstl_i', 'obuf_hstl_ii', 'obuf_hstl_iii', + 'obuf_hstl_iii_18', 'obuf_hstl_iii_dci', + 'obuf_hstl_iii_dci_18', 'obuf_hstl_ii_18', + 'obuf_hstl_ii_dci', 'obuf_hstl_ii_dci_18', + 'obuf_hstl_iv', 'obuf_hstl_iv_18', 'obuf_hstl_iv_dci', + 'obuf_hstl_iv_dci_18', 'obuf_hstl_i_18', + 'obuf_hstl_i_dci', 'obuf_hstl_i_dci_18', 'obuf_lvcmos12', + 'obuf_lvcmos12_f_2', 'obuf_lvcmos12_f_4', + 'obuf_lvcmos12_f_6', 'obuf_lvcmos12_f_8', + 'obuf_lvcmos12_s_2', 'obuf_lvcmos12_s_4', + 'obuf_lvcmos12_s_6', 'obuf_lvcmos12_s_8', + 'obuf_lvcmos15', 'obuf_lvcmos15_f_12', + 'obuf_lvcmos15_f_16', 'obuf_lvcmos15_f_2', + 'obuf_lvcmos15_f_4', 'obuf_lvcmos15_f_6', + 'obuf_lvcmos15_f_8', 'obuf_lvcmos15_s_12', + 'obuf_lvcmos15_s_16', 'obuf_lvcmos15_s_2', + 'obuf_lvcmos15_s_4', 'obuf_lvcmos15_s_6', + 'obuf_lvcmos15_s_8', 'obuf_lvcmos18', + 'obuf_lvcmos18_f_12', 'obuf_lvcmos18_f_16', + 'obuf_lvcmos18_f_2', 'obuf_lvcmos18_f_4', + 'obuf_lvcmos18_f_6', 'obuf_lvcmos18_f_8', + 'obuf_lvcmos18_s_12', 'obuf_lvcmos18_s_16', + 'obuf_lvcmos18_s_2', 'obuf_lvcmos18_s_4', + 'obuf_lvcmos18_s_6', 'obuf_lvcmos18_s_8', 'obuf_lvcmos2', + 'obuf_lvcmos25', 'obuf_lvcmos25_f_12', + 'obuf_lvcmos25_f_16', 'obuf_lvcmos25_f_2', + 'obuf_lvcmos25_f_24', 'obuf_lvcmos25_f_4', + 'obuf_lvcmos25_f_6', 'obuf_lvcmos25_f_8', + 'obuf_lvcmos25_s_12', 'obuf_lvcmos25_s_16', + 'obuf_lvcmos25_s_2', 'obuf_lvcmos25_s_24', + 'obuf_lvcmos25_s_4', 'obuf_lvcmos25_s_6', + 'obuf_lvcmos25_s_8', 'obuf_lvcmos33', + 'obuf_lvcmos33_f_12', 'obuf_lvcmos33_f_16', + 'obuf_lvcmos33_f_2', 'obuf_lvcmos33_f_24', + 'obuf_lvcmos33_f_4', 'obuf_lvcmos33_f_6', + 'obuf_lvcmos33_f_8', 'obuf_lvcmos33_s_12', + 'obuf_lvcmos33_s_16', 'obuf_lvcmos33_s_2', + 'obuf_lvcmos33_s_24', 'obuf_lvcmos33_s_4', + 'obuf_lvcmos33_s_6', 'obuf_lvcmos33_s_8', + 'obuf_lvdci_15', 'obuf_lvdci_18', 'obuf_lvdci_25', + 'obuf_lvdci_33', 'obuf_lvdci_dv2_15', + 'obuf_lvdci_dv2_18', 'obuf_lvdci_dv2_25', + 'obuf_lvdci_dv2_33', 'obuf_lvds', 'obuf_lvpecl', + 'obuf_lvttl', 'obuf_lvttl_f_12', 'obuf_lvttl_f_16', + 'obuf_lvttl_f_2', 'obuf_lvttl_f_24', 'obuf_lvttl_f_4', + 'obuf_lvttl_f_6', 'obuf_lvttl_f_8', 'obuf_lvttl_s_12', + 'obuf_lvttl_s_16', 'obuf_lvttl_s_2', 'obuf_lvttl_s_24', + 'obuf_lvttl_s_4', 'obuf_lvttl_s_6', 'obuf_lvttl_s_8', + 'obuf_pci33_3', 'obuf_pci33_5', 'obuf_pci66_3', + 'obuf_pcix', 'obuf_pcix66_3', 'obuf_sstl18_i', + 'obuf_sstl18_ii', 'obuf_sstl18_ii_dci', + 'obuf_sstl18_i_dci', 'obuf_sstl2_i', 'obuf_sstl2_ii', + 'obuf_sstl2_ii_dci', 'obuf_sstl2_i_dci', 'obuf_sstl3_i', + 'obuf_sstl3_ii', 'obuf_sstl3_ii_dci', 'obuf_sstl3_i_dci', + 'obuf_s_12', 'obuf_s_16', 'obuf_s_2', 'obuf_s_24', + 'obuf_s_4', 'obuf_s_6', 'obuf_s_8', 'oddr', 'oddr2', + 'ofddrcpe', 'ofddrrse', 'ofddrtcpe', 'ofddrtrse', + 'opt_off', 'opt_uim', 'or2', 'or2b1', 'or2b2', 'or2l', + 'or3', 'or3b1', 'or3b2', 'or3b3', 'or4', 'or4b1', + 'or4b2', 'or4b3', 'or4b4', 'or5', 'or5b1', 'or5b2', + 'or5b3', 'or5b4', 'or5b5', 'or6', 'or7', 'or8', 'orcy', + 'oserdes', 'oserdes2', 'oserdese1', 'pcie_2_0', + 'pcie_a1', 'pcie_ep', 'pcie_internal_1_1', 'pll_adv', + 'pll_base', 'pmcd', 'post_crc_internal', 'ppc405_adv', + 'ppc440', 'pulldown', 'pullup', 'ram128x1d', 'ram128x1s', + 'ram128x1s_1', 'ram16x1d', 'ram16x1d_1', 'ram16x1s', + 'ram16x1s_1', 'ram16x2s', 'ram16x4s', 'ram16x8s', + 'ram256x1s', 'ram32m', 'ram32x1d', 'ram32x1d_1', + 'ram32x1s', 'ram32x1s_1', 'ram32x2s', 'ram32x4s', + 'ram32x8s', 'ram64m', 'ram64x1d', 'ram64x1d_1', + 'ram64x1s', 'ram64x1s_1', 'ram64x2s', 'ramb16', + 'ramb16bwe', 'ramb16bwer', 'ramb16bwe_s18', + 'ramb16bwe_s18_s18', 'ramb16bwe_s18_s9', 'ramb16bwe_s36', + 'ramb16bwe_s36_s18', 'ramb16bwe_s36_s36', + 'ramb16bwe_s36_s9', 'ramb16_s1', 'ramb16_s18', + 'ramb16_s18_s18', 'ramb16_s18_s36', 'ramb16_s1_s1', + 'ramb16_s1_s18', 'ramb16_s1_s2', 'ramb16_s1_s36', + 'ramb16_s1_s4', 'ramb16_s1_s9', 'ramb16_s2', + 'ramb16_s2_s18', 'ramb16_s2_s2', 'ramb16_s2_s36', + 'ramb16_s2_s4', 'ramb16_s2_s9', 'ramb16_s36', + 'ramb16_s36_s36', 'ramb16_s4', 'ramb16_s4_s18', + 'ramb16_s4_s36', 'ramb16_s4_s4', 'ramb16_s4_s9', + 'ramb16_s9', 'ramb16_s9_s18', 'ramb16_s9_s36', + 'ramb16_s9_s9', 'ramb18', 'ramb18e1', 'ramb18sdp', + 'ramb32_s64_ecc', 'ramb36', 'ramb36e1', 'ramb36sdp', + 'ramb36sdp_exp', 'ramb36_exp', 'ramb4_s1', 'ramb4_s16', + 'ramb4_s16_s16', 'ramb4_s1_s1', 'ramb4_s1_s16', + 'ramb4_s1_s2', 'ramb4_s1_s4', 'ramb4_s1_s8', 'ramb4_s2', + 'ramb4_s2_s16', 'ramb4_s2_s2', 'ramb4_s2_s4', + 'ramb4_s2_s8', 'ramb4_s4', 'ramb4_s4_s16', 'ramb4_s4_s4', + 'ramb4_s4_s8', 'ramb4_s8', 'ramb4_s8_s16', 'ramb4_s8_s8', + 'ramb8bwer', 'roc', 'rocbuf', 'rom128x1', 'rom16x1', + 'rom256x1', 'rom32x1', 'rom64x1', 'sim_config_s3a', + 'sim_config_s3a_serial', 'sim_config_s6', + 'sim_config_s6_serial', 'sim_config_v5', + 'sim_config_v5_serial', 'sim_config_v6_serial', + 'spi_access', 'srl16', 'srl16e', 'srl16e_1', 'srl16_1', + 'srlc16', 'srlc16e', 'srlc16e_1', 'srlc16_1', 'srlc32e', + 'startbuf_fpgacore', 'startbuf_spartan2', + 'startbuf_spartan3', 'startbuf_virtex', + 'startbuf_virtex2', 'startbuf_virtex4', + 'startup_fpgacore', 'startup_spartan3', + 'startup_spartan3a', 'startup_spartan3e', + 'startup_spartan6', 'startup_virtex4', 'startup_virtex5', + 'startup_virtex6', 'suspend_sync', 'sysmon', 'tblock', + 'temac', 'temac_single', 'timegrp', 'timespec', 'toc', + 'tocbuf', 'usr_access_virtex4', 'usr_access_virtex5', + 'usr_access_virtex6', 'vcc', 'wireand', 'xnor2', 'xnor3', + 'xnor4', 'xnor5', 'xor2', 'xor3', 'xor4', 'xor5', + 'xorcy', 'xorcy_d', 'xorcy_l', 'bscntrl_iserdese1_vhd', + 'ice_iserdese1_vhd', 'plg_oserdese1_vhd', + 'selfheal_oserdese1_vhd', + ] ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-28 15:04:04
|
Revision: 100 http://fbt.svn.sourceforge.net/fbt/?rev=100&view=rev Author: dave_infj Date: 2010-05-28 15:03:58 +0000 (Fri, 28 May 2010) Log Message: ----------- first attempt at supporting `includes Modified Paths: -------------- trunk/bin/parse_verilog.py Modified: trunk/bin/parse_verilog.py =================================================================== --- trunk/bin/parse_verilog.py 2010-05-26 15:09:21 UTC (rev 99) +++ trunk/bin/parse_verilog.py 2010-05-28 15:03:58 UTC (rev 100) @@ -33,10 +33,11 @@ import pyparsing from pyparsing import Literal, CaselessKeyword, Word, ZeroOrMore, OneOrMore, Combine, \ Group, Optional, Forward, ParseException, alphas, alphanums, printables, \ - Regex, cppStyleComment, oneOf, nestedExpr, lineno, col, line + Regex, cppStyleComment, oneOf, nestedExpr, quotedString, lineno, col, line import DepList import comp_filter +import os debug = False @@ -45,22 +46,31 @@ Determine dependencies for a given file (verilog mode) """ - global _src + global main_src + global this_src global dep_list + global current_module + global includes - _src = hdl_src # for handle_syntax_err dep_list = DepList.DepList() + current_module = None + main_src = hdl_src + includes = [hdl_src] - # Parse the file - try: - results = parser.parseFile( hdl_src, parseAll = True ) - except pyparsing.ParseException, e: - exit("""\ + # Parse source(s) + for this_src in includes: + if this_src != hdl_src: + print '\t`include %s' % this_src + + try: + results = parser.parseFile( this_src, parseAll = True ) + except pyparsing.ParseException, e: + exit("""\ %%s:%d: error: Parse exception: %s %s -%s^""" % (relpath(hdl_src), +%s^""" % (relpath(this_src), e.lineno, e, e.line, @@ -93,19 +103,30 @@ print '\n%s,' % (' '*indent ), +def handle_include( s, loc, toks ): + """ + Handle include directives + """ + + include = toks[-1].strip( '"'+"'" ) + includes.append( os.path.join( os.path.dirname( this_src ), include ) ) + + def handle_module( s, loc, toks ): """ Handle module declarations """ global current_module + if this_src != main_src: + exit('%s: module declarations in includes not supported' % this_src) current_module = toks[1] if current_module in dep_list: print "%s: warning: duplicate entity %s declaration found in %s: previous in:\n\t%s" % ( - prog_name(), current_module, relpath(_src), + prog_name(), current_module, relpath(main_src), '\n\t'.join( relpath(dep[0]) for dep in dep_list[current_module] ) ) - dep_list.add_dep( current_module, _src, [] ) + dep_list.add_dep( current_module, main_src, [] ) def handle_inst( s, loc, toks ): @@ -114,8 +135,8 @@ """ ent, inst = toks[0], toks[-3] - if comp_filter.accept(ent): - dep_list.add_dep( current_module, _src, [ent] ) + if current_module is not None and comp_filter.accept(ent): + dep_list.add_dep( current_module, main_src, [ent] ) def handle_syntax_err( s, loc, toks ): @@ -123,8 +144,6 @@ Handle syntax errors """ - global _src - exit("""\ %s:%d: error: unexpected syntax: @@ -137,7 +156,7 @@ If you believe this is genuinely valid Verilog, see the documentation under Verilog support. -""" % ( relpath(_src), +""" % ( relpath(this_src), lineno( loc, s ), line( loc, s ), ' '*(col( loc, s )-1) @@ -165,7 +184,7 @@ ) directive = Group( Combine( "`" + \ oneOf("define undef ifdef ifndef else endif default_nettype " - "include resetall timescale unconnected_drive " + "resetall timescale unconnected_drive " "nounconnected_drive celldefine endcelldefine") + \ restOfLineWithCont ) ) @@ -198,6 +217,7 @@ func_kw = CaselessKeyword('function') generate_kw = CaselessKeyword('generate') if_kw = CaselessKeyword('if') + include_dr = Combine( '`' + CaselessKeyword( 'include' ) ) initial_kw = CaselessKeyword('initial') join_kw = CaselessKeyword('join') module_kw = CaselessKeyword('module' ) | CaselessKeyword('primitive') @@ -230,6 +250,7 @@ # Statements, block statements and compound statements statement = Forward() + include = t( (include_dr + quotedString).setParseAction( handle_include ), 'include' ) process = t( always_kw|initial_kw, 'proc' ) + statement evt_ctrl = t( (Literal('@') | Literal('#')), 'evt_ctrl' ) + (paren_group | word) + statement ifcond = (t( if_kw, 'if' ) + @@ -261,7 +282,7 @@ simple_stmt = t( OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';'), 'stmt' ) statement << Group( ~endmodule_kw + - (process | evt_ctrl | function | ifcond | case | loop | + (include | process | evt_ctrl | function | ifcond | case | loop | generate | block | fork | instance | simple_stmt ) ) # @@ -277,15 +298,11 @@ ).setParseAction( handle_module ), 'mod_hdr' ) mod_body = t( ZeroOrMore( statement ), 'mod_bdy' ) # Module body mod_footer = t( endmodule_kw, 'mod_end' ) # End module keyword + module = Group( mod_header + mod_body + mod_footer ) syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err ) - module = Group( (mod_header | syntax_err) + - mod_body + - (mod_footer | syntax_err) - ) + v = ZeroOrMore( include | module | statement | syntax_err ) - v = ZeroOrMore( module ) - # No comments, no compiler directives (which can appear anywhere in the input) v.ignore( cppStyleComment ) v.ignore( directive ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-26 15:09:27
|
Revision: 99 http://fbt.svn.sourceforge.net/fbt/?rev=99&view=rev Author: dave_infj Date: 2010-05-26 15:09:21 +0000 (Wed, 26 May 2010) Log Message: ----------- Add fork/join support Add initial support Separate @() support from 'always', as is more accurate for the standard. Add delay support (#) Move Optional() from 'generics' to where generics is actually used - for the sake of consistency Make module formals optional Modified Paths: -------------- trunk/bin/parse_verilog.py Modified: trunk/bin/parse_verilog.py =================================================================== --- trunk/bin/parse_verilog.py 2010-05-24 17:04:05 UTC (rev 98) +++ trunk/bin/parse_verilog.py 2010-05-26 15:09:21 UTC (rev 99) @@ -194,9 +194,12 @@ endgen_kw = CaselessKeyword('endgenerate') endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive') for_kw = CaselessKeyword('for') + fork_kw = CaselessKeyword('fork') func_kw = CaselessKeyword('function') generate_kw = CaselessKeyword('generate') if_kw = CaselessKeyword('if') + initial_kw = CaselessKeyword('initial') + join_kw = CaselessKeyword('join') module_kw = CaselessKeyword('module' ) | CaselessKeyword('primitive') repeat_kw = CaselessKeyword('repeat') wait_kw = CaselessKeyword('wait') @@ -211,14 +214,14 @@ ) # Module instances - generics = Optional( Literal('#') + paren_group ) - instance = t( Group( ( t(ident, 'ent_name') + # Entity name - t(generics, 'inst_gens') + # Optional generics - t(ident, 'inst_name') + # Instance name - t(paren_group, 'ports') + # Ports - t(Literal( ';' ), ';') # terminal ; + generics = Literal('#') + paren_group + instance = t( Group( ( t(ident, 'ent_name') + # Entity name + Optional( t(generics, 'inst_gens') ) + # Optional generics + t(ident, 'inst_name') + # Instance name + t(paren_group, 'ports') + # Ports + t(Literal( ';' ), ';') # terminal ; ).setParseAction( handle_inst ) - ), 'inst' ) + ), 'inst' ) # # Complex contexts @@ -227,10 +230,11 @@ # Statements, block statements and compound statements statement = Forward() - process = t( always_kw, 'proc' ) + Literal('@') + (paren_group | word) + statement - ifcond = (t( if_kw, 'if' ) + + process = t( always_kw|initial_kw, 'proc' ) + statement + evt_ctrl = t( (Literal('@') | Literal('#')), 'evt_ctrl' ) + (paren_group | word) + statement + ifcond = (t( if_kw, 'if' ) + t( paren_group, 'cond' ) + statement + Optional( - t( else_kw, 'else' ) + statement ) + t( else_kw, 'else' ) + statement ) ) case = t( nestedExpr( case_kw + paren_group, endcase_kw, content=OneOrMore( ~case_kw + ~endcase_kw + word ) @@ -250,29 +254,33 @@ t( end_kw, 'end' ), content=OneOrMore( ~begin_kw + ~end_kw + statement ) ) - simple_stmt = t( OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';') - , 'stmt' ) + fork = nestedExpr( t( fork_kw, 'fork' ), + t( join_kw, 'join' ), + content=OneOrMore( ~fork_kw + ~join_kw + statement ) + ) + simple_stmt = t( OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';'), + 'stmt' ) statement << Group( ~endmodule_kw + - (process | function | ifcond | case | loop | - generate | block | instance | simple_stmt ) ) + (process | evt_ctrl | function | ifcond | case | loop | + generate | block | fork | instance | simple_stmt ) ) # # Module definition # # Module definitions. For these purposes, we can assume UDPs are the same as modules. - mod_header = t( (module_kw + # Module keyword - ident + # Entity's name - t( generics, 'mod_gens' ) + # Optional generic mappings - paren_group + # Port mappings - Literal(';') # Terminal ; + mod_header = t( ( t( module_kw, 'mod' ) + # Module keyword + t( ident, 'mod_name' ) + # Entity's name + Optional( t( generics, 'mod_gens' ) ) + # Optional generic mappings + Optional( t( paren_group, 'mod_ports' ) ) + # Optional port mappings + t( Literal(';'), ';' ) # Terminal ; ).setParseAction( handle_module ), 'mod_hdr' ) - mod_body = t( ZeroOrMore( statement ), 'mod_bdy' ) # Module body - mod_footer = t( endmodule_kw, 'mod_end' ) # End module keyword + mod_body = t( ZeroOrMore( statement ), 'mod_bdy' ) # Module body + mod_footer = t( endmodule_kw, 'mod_end' ) # End module keyword syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err ) module = Group( (mod_header | syntax_err) + - mod_body + + mod_body + (mod_footer | syntax_err) ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-24 17:04:11
|
Revision: 98 http://fbt.svn.sourceforge.net/fbt/?rev=98&view=rev Author: dave_infj Date: 2010-05-24 17:04:05 +0000 (Mon, 24 May 2010) Log Message: ----------- Consider this a rewrite: Add parseAction hooks to handle module and instance decls, rather than try to parse the weird results hierarchy that is returned. Wrap individual lexical units in new function t() ('term') to label those units, and also apply the debug flag as required. Add print_results() for inspecting parse output during debugging Fix tab/empty lines fix directive handling: now understands backslash line continuation. add 'ifndef' directive big grammar shakeup: - handle compound statements explicitly (while/repeat/case/etc) - add explicit if/else handling (which didn't fit the old catch-all when the else clause was present - add block label support - add generate support - functions now accept 'range' expressions, and also handles the old style of verilog in which there is no formal list given in () I don't know if nested functions are allowed, but they're handled too. - reimplement begin/end blocks, functions and instance handling - processes can now handle a bare sensitivity list outside of parens - instances decls and functions now incorporated into 'statement' - all of the above now defined in terms of recursive references to 'statement', so that instances within generate/for structs etc are picked up Possibly others I've forgotten. This change set took a long time to come about, and there were lots of subtle problems found and fixed (no doubt introducing new problems) Modified Paths: -------------- trunk/bin/parse_verilog.py Modified: trunk/bin/parse_verilog.py =================================================================== --- trunk/bin/parse_verilog.py 2010-05-24 16:04:51 UTC (rev 97) +++ trunk/bin/parse_verilog.py 2010-05-24 17:04:05 UTC (rev 98) @@ -33,21 +33,23 @@ import pyparsing from pyparsing import Literal, CaselessKeyword, Word, ZeroOrMore, OneOrMore, Combine, \ Group, Optional, Forward, ParseException, alphas, alphanums, printables, \ - restOfLine, cppStyleComment, oneOf, nestedExpr, lineno, col, line + Regex, cppStyleComment, oneOf, nestedExpr, lineno, col, line import DepList import comp_filter +debug = False def parse(hdl_src): """ Determine dependencies for a given file (verilog mode) """ - global parser - global _src + global dep_list + _src = hdl_src # for handle_syntax_err + dep_list = DepList.DepList() # Parse the file try: @@ -65,32 +67,64 @@ ' '*(e.col-1) ) ) - - # Process the results; make and return the dependency list - dep_list = DepList.DepList() - # Each element of results is formatted as follows: - # 'module', <mod_name>, [<entity, instance>, ...], 'endmodule' - for mod_parse_data in results: - ent = mod_parse_data[1] - if ent in dep_list: - exit( "duplicate entity %s declaration found in %s (previous in %s)" % ( - ent, relpath(hdl_src), relpath(dep_list[ent][0]) - ) - ) - dep_list.add_dep( ent, hdl_src, [ent for ent, inst in mod_parse_data[2:-1] - if comp_filter.accept(ent)] ) + if debug: + print_results( results ) + for ent in dep_list: + print '%s:' % ent + for dep in dep_list[ent]: + print '\t%s' % `dep` + print return dep_list +def print_results( list, indent=0 ): + """ + For debugging: Dump results tree + """ + + print '\n%s-' % (' '*indent ), + for elt in list: + if type(elt) is str: + print '%r,' % elt, + else: + print_results( elt, indent+1 ) + print '\n%s,' % (' '*indent ), + + +def handle_module( s, loc, toks ): + """ + Handle module declarations + """ + + global current_module + current_module = toks[1] + if current_module in dep_list: + print "%s: warning: duplicate entity %s declaration found in %s: previous in:\n\t%s" % ( + prog_name(), current_module, relpath(_src), + '\n\t'.join( relpath(dep[0]) for dep in dep_list[current_module] ) + ) + dep_list.add_dep( current_module, _src, [] ) + + +def handle_inst( s, loc, toks ): + """ + Handle intantiations + """ + + ent, inst = toks[0], toks[-3] + if comp_filter.accept(ent): + dep_list.add_dep( current_module, _src, [ent] ) + + def handle_syntax_err( s, loc, toks ): """ Handle syntax errors """ - + global _src - + exit("""\ %s:%d: error: unexpected syntax: @@ -109,21 +143,36 @@ ' '*(col( loc, s )-1) ) ) - - + + def verilog_grammar(): """ - Define the partial grammar used to parse Verilog sources + Define the partial grammar used to parse Verilog sources. + + NB: This will accept invalid verilog, but hopefully all valid verilog, + though it is pretty much certain that there is stuff + missing/incomplete/invalid. """ + def t( term, name ): + return term.setName( name ).setResultsName( name ).setDebug( debug ) + # Compiler directives (we don't care what they are, therefore we don't care about their # format.) + restOfLineWithCont = Forward() + restOfLineWithCont << ( (t(Regex(r".*\\").leaveWhitespace(), 'esc_eol') + restOfLineWithCont) | + t(Regex(r".*" ).leaveWhitespace(), 'eol') + ) directive = Group( Combine( "`" + \ - oneOf("define undef ifdef else endif default_nettype " + oneOf("define undef ifdef ifndef else endif default_nettype " "include resetall timescale unconnected_drive " "nounconnected_drive celldefine endcelldefine") + \ - restOfLine ) ) + restOfLineWithCont ) ) + # + # Context-free units + # + # Various character classes printable_less_parens = "".join([x for x in printables if x not in ['(',')']]) printable_less_semi = "".join([x for x in printables if x not in [';']]) @@ -135,59 +184,91 @@ ident = Word( alphanums+'_' ) # Keywords - begin_kw = CaselessKeyword('begin') + always_kw = CaselessKeyword('always') + begin_kw = CaselessKeyword('begin') + Optional( Literal(':') + ident ) + case_kw = CaselessKeyword('case') | CaselessKeyword('casex') | CaselessKeyword('casez') + else_kw = CaselessKeyword('else') end_kw = CaselessKeyword('end') - func_begin = CaselessKeyword('function') - func_end = CaselessKeyword('endfunction') + endcase_kw = CaselessKeyword('endcase') + endfunc_kw = CaselessKeyword('endfunction') + endgen_kw = CaselessKeyword('endgenerate') + endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive') + for_kw = CaselessKeyword('for') + func_kw = CaselessKeyword('function') + generate_kw = CaselessKeyword('generate') + if_kw = CaselessKeyword('if') module_kw = CaselessKeyword('module' ) | CaselessKeyword('primitive') - endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive') + repeat_kw = CaselessKeyword('repeat') + wait_kw = CaselessKeyword('wait') + while_kw = CaselessKeyword('while') + # + # Simple contexts + # # () groups (with recursion) paren_group = nestedExpr( Literal('('), Literal(')'), content=OneOrMore( Word(printable_less_parens) ) ) - # begin/end blocks - begin_block = nestedExpr( begin_kw, end_kw, - content=OneOrMore( ~begin_kw + ~end_kw + word ) - ) + # Module instances + generics = Optional( Literal('#') + paren_group ) + instance = t( Group( ( t(ident, 'ent_name') + # Entity name + t(generics, 'inst_gens') + # Optional generics + t(ident, 'inst_name') + # Instance name + t(paren_group, 'ports') + # Ports + t(Literal( ';' ), ';') # terminal ; + ).setParseAction( handle_inst ) + ), 'inst' ) - # Functions - function = nestedExpr( func_begin, func_end, - content=OneOrMore( ~func_begin + ~func_end + word ) - ) + # + # Complex contexts + # # Statements, block statements and compound statements statement = Forward() - simple_stmt = OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';') - compnd_stmt = ( (CaselessKeyword('always') + Literal('@')) | # Special case: always @(...) - Word(alphas) # Any other case: kw( ... ) - ) + paren_group + statement - statement << Group( ~endmodule_kw + (compnd_stmt | begin_block | simple_stmt) ) - # Module and instance generics - generics = Optional( Literal('#') + paren_group ) + process = t( always_kw, 'proc' ) + Literal('@') + (paren_group | word) + statement + ifcond = (t( if_kw, 'if' ) + + t( paren_group, 'cond' ) + statement + Optional( + t( else_kw, 'else' ) + statement ) + ) + case = t( nestedExpr( case_kw + paren_group, endcase_kw, + content=OneOrMore( ~case_kw + ~endcase_kw + word ) + ), 'case' ) + loop = t( for_kw | repeat_kw | wait_kw | while_kw, 'loop' ) + paren_group + statement + function = nestedExpr( t( func_kw, 'func' ) + + t( ZeroOrMore( ~Literal(';') + word ), 'func_decl' ) + + t( Literal(';'), ';' ), + t( endfunc_kw, 'endfunc' ), + content=OneOrMore( ~func_kw + ~endfunc_kw + statement ) + ) + generate = nestedExpr( t( generate_kw, 'gen' ), + t( endgen_kw, 'endgen' ), + content=OneOrMore( ~generate_kw + ~endgen_kw + statement ) + ) + block = nestedExpr( t( begin_kw, 'begin' ), + t( end_kw, 'end' ), + content=OneOrMore( ~begin_kw + ~end_kw + statement ) + ) + simple_stmt = t( OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';') + , 'stmt' ) + statement << Group( ~endmodule_kw + + (process | function | ifcond | case | loop | + generate | block | instance | simple_stmt ) ) - # Module instances - instance = Group( ident + # Entity name - generics.suppress() + # Optional generics - ident + # Instance name - paren_group.suppress() + # Ports - Literal( ';' ).suppress() # terminal ; - ) + # + # Module definition + # # Module definitions. For these purposes, we can assume UDPs are the same as modules. - mod_header = (module_kw + # Module keyword - ident + # Entity's name - generics.suppress() + # Optional generic mappings - paren_group.suppress() + # Port mappings - Literal(';').suppress() # Terminal ; - ) - mod_body = ZeroOrMore( instance | # Entity instantiation - function.suppress() | # Function declarations - statement.suppress() # Compound and simple statements - ) - mod_footer = endmodule_kw # End module keyword + mod_header = t( (module_kw + # Module keyword + ident + # Entity's name + t( generics, 'mod_gens' ) + # Optional generic mappings + paren_group + # Port mappings + Literal(';') # Terminal ; + ).setParseAction( handle_module ), 'mod_hdr' ) + mod_body = t( ZeroOrMore( statement ), 'mod_bdy' ) # Module body + mod_footer = t( endmodule_kw, 'mod_end' ) # End module keyword syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err ) module = Group( (mod_header | syntax_err) + @@ -200,7 +281,7 @@ # No comments, no compiler directives (which can appear anywhere in the input) v.ignore( cppStyleComment ) v.ignore( directive ) - + return v parser = verilog_grammar() This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-24 16:04:57
|
Revision: 97 http://fbt.svn.sourceforge.net/fbt/?rev=97&view=rev Author: dave_infj Date: 2010-05-24 16:04:51 +0000 (Mon, 24 May 2010) Log Message: ----------- clean up tabs and empty lines fix error conditions 'unexpected entity' and 'duplicate entity'. Apparently missed out when this was refactored. demote duplicate entity entry to warning, now that there's a dependency resolution mechanism Modified Paths: -------------- trunk/bin/parse_vhdl.py Modified: trunk/bin/parse_vhdl.py =================================================================== --- trunk/bin/parse_vhdl.py 2010-05-24 16:01:51 UTC (rev 96) +++ trunk/bin/parse_vhdl.py 2010-05-24 16:04:51 UTC (rev 97) @@ -52,11 +52,11 @@ """ Determine dependencies for a given file (VHDL mode) """ - + ent = None deps = [] dep_list = DepList.DepList() - + with open(hdl_src) as vf: for line in vf: # Delete any comments. @@ -70,7 +70,7 @@ # HACK: Check that this isn't an attributes declaration, which confuses the main re if m_attribs.search(line): continue - + match = m_dep_vhdl.search(line) if match: # A package import decl after an entity or package body is @@ -84,17 +84,21 @@ # Find out what we've matched and handle appropriately if match.group('ent'): if ent: - exit("""\ -%s: unexpected entity %s found when processing entity %s. missing package imports?""" % (relpath(self.hdl_src), - e, - self.ent) ) - + exit('%s: unexpected entity %s found when processing entity %s. ' + 'missing package imports?' % (relpath(hdl_src), + match.group('ent'), + ent) ) + else: ent = match.group('ent') - + if ent in dep_list: - exit( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) ) - + print '%s: warning: duplicate entity %s declaration found in %s:' % ( + prog_name(), ent, relpath(hdl_src) ) + print 'previous in:\n\t%s' % ( + '\n\t'.join( relpath(dep[0]) for dep in dep_list[ent] ) + ) + # If it's a package import decl, then lib and pkg will be # defined. Only add a package if the library is 'work'. if match.group('lib') == 'work': @@ -112,5 +116,3 @@ dep_list.add_dep( ent, hdl_src, deps ) return dep_list - - This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-24 16:01:58
|
Revision: 96 http://fbt.svn.sourceforge.net/fbt/?rev=96&view=rev Author: dave_infj Date: 2010-05-24 16:01:51 +0000 (Mon, 24 May 2010) Log Message: ----------- Clean up tabs and empty lines add_dep() now will merge deps lists for dep records for the same ent/source file, rather than ending up with duplicate dep records merge() now simplified by calling add_dep() Modified Paths: -------------- trunk/bin/DepList.py Modified: trunk/bin/DepList.py =================================================================== --- trunk/bin/DepList.py 2010-05-13 16:11:12 UTC (rev 95) +++ trunk/bin/DepList.py 2010-05-24 16:01:51 UTC (rev 96) @@ -41,41 +41,46 @@ class DepList: """ Helper class for storing lists of entity dependencies - + Usage: Inputs are individual candidates added via DepList.add_dep() Outputs are always sets of candidates indexed by entity """ - + def __init__(self): self.list = {} # Regex for parsing dependency caches self.m_deps = re.compile( '(\w+)\s*\(\s*([\w.-]+)\s*\)\s*:(.*)' ) - + def add_dep(self, ent, hdl_src, deps, core_src = '' ): """ Add a dependency to the list """ - - try: - self.list[ent].append( (hdl_src, deps, core_src) ) - except KeyError: - self.list[ent] = [ (hdl_src, deps, core_src) ] - + if ent in self.list: + # If this source file is already known, merge the new deps list + # with the existing entry + for _src, _deps, _core_src in self.list[ent]: + if _src == hdl_src : + _deps.update(deps) + return + # Else add a new entry + self.list[ent].append( (hdl_src, set(deps), core_src) ) + else: + self.list[ent] = [ (hdl_src, set(deps), core_src) ] + + def merge(self, src): """ Update a dependencies dictionary with a new entry, merging if required. """ - - for ent, dep_rec in src.list.iteritems(): - try: - self.list[ent] = self.list[ent] + dep_rec - except KeyError: - self.list[ent] = dep_rec - + for ent, deps in src.list.iteritems(): + for dep in deps: + self.add_dep( ent, *dep ) + + def write_deps_cache(self, df): """ Write out the DepList in a linear .depends cache file. df is an handle @@ -97,13 +102,13 @@ os.path.basename(core_src), ' '.join(deps)) ) - + def read_deps_cache(self, df, path): """ Import depencency data from a .depends cache file. df is an handle and must already be open """ - + with df: for lno, dep_line in enumerate(df): # Delete any comments @@ -125,7 +130,7 @@ ) ent, hdl_src, deps = match.groups() deps = deps.split() - + # If deps contains a single object ending in '.xco', then it # is a core reference. if len(deps) == 1 and deps[0].endswith('.xco'): @@ -147,21 +152,18 @@ def iterkeys(self): return self.list.iterkeys() - + def iteritems(self): return self.list.iteritems() - + def __iter__(self): return self.iterkeys() - + def __contains__(self, ent): return ent in self.list def __getitem__(self, ent): return self.list[ent] - - - This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-05-13 16:11:19
|
Revision: 95 http://fbt.svn.sourceforge.net/fbt/?rev=95&view=rev Author: dave_infj Date: 2010-05-13 16:11:12 +0000 (Thu, 13 May 2010) Log Message: ----------- Get rid of Panic() class, replace with 'exit()' Modified Paths: -------------- trunk/bin/DepList.py trunk/bin/mkvdeps.py trunk/bin/mkvproj.py trunk/bin/parse_coregen.py trunk/bin/parse_verilog.py trunk/bin/parse_vhdl.py trunk/bin/synplify_wrapper.py trunk/bin/tool_common.py trunk/bin/tool_synth_synplify.py trunk/bin/tool_synth_xst.py trunk/bin/util.py Modified: trunk/bin/DepList.py =================================================================== --- trunk/bin/DepList.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/DepList.py 2010-05-13 16:11:12 UTC (rev 95) @@ -118,7 +118,7 @@ # Parse components match = self.m_deps.search(dep_line) if not match: - raise Panic("%s:%d: invalid dependency line" % + exit("%s:%d: invalid dependency line" % ( relpath(os.path.join(path, tool_common.DEPS_FILE)), (lno+1) ) Modified: trunk/bin/mkvdeps.py =================================================================== --- trunk/bin/mkvdeps.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/mkvdeps.py 2010-05-13 16:11:12 UTC (rev 95) @@ -35,6 +35,7 @@ from util import * import sys +import types import getopt import re import os @@ -142,7 +143,7 @@ return disambiguate( cfg.aliases[ent], ent ) else: # If not, raise an error if alias: - raise Panic( """\ + exit( """\ real entity %s of alias %s unknown. The following sources depend on %s: \t%s""" % @@ -151,7 +152,7 @@ alias, '\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(alias)] ) ) ) else: - raise Panic( """\ + exit( """\ entity %s unknown. The following sources depend on %s: \t%s""" % @@ -175,7 +176,7 @@ filtered_list.append( (hdl_src, deps, core_src) ) except KeyError: # There is no rule for this entity, which is an error. - raise Panic( """\ + exit( """\ no rule for disambiguating entity %s with multiple candidates: \t%s """ % (ent, @@ -186,14 +187,14 @@ if len(filtered_list) == 0: # Nothing matches, the ambiguity is unresolved, which is # an error. - raise Panic( """\ + exit( """\ no candidates match specified rule for entity %s: \t%s """ % (ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src) for hdl_src, deps, core_src in cfg.dep_list[ent]] )) ) elif len(filtered_list) != 1: - raise Panic( """\ + exit( """\ Still %d candidates left after applying disambiguation rule for entity %s: \t%s """ % (len(filtered_list), ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src) @@ -210,7 +211,7 @@ if alias: hdl_src, deps, core_src = resolved_ent if not core_src: - raise Panic( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) ) + exit( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) ) return subst_basename(hdl_src, alias), deps, core_src, alias else: return resolved_ent + ('',) @@ -271,7 +272,7 @@ $Id$ """ % (prog_name()) - sys.exit(0) + exit() def main(argv): @@ -285,7 +286,7 @@ 'help', 'version'] ) except getopt.GetoptError, e: - raise Panic( e ) + exit( e ) for arg, val in opts: if arg in ['-v', '--verbose']: @@ -296,7 +297,7 @@ if arg in ['-V', '--version']: print '$Id$' - sys.exit(0) + exit() dirs = args if not dirs: @@ -309,7 +310,10 @@ if __name__ == '__main__': try: main(sys.argv) - except Panic, e: - sys.stderr.write( '%s\n' % e ) - sys.exit( 1 ) - + except SystemExit, e: + if e.code: + if type(e.code) is types.IntType: + exit(e.code) + else: + sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) ) + exit(1) Modified: trunk/bin/mkvproj.py =================================================================== --- trunk/bin/mkvproj.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/mkvproj.py 2010-05-13 16:11:12 UTC (rev 95) @@ -34,6 +34,7 @@ from util import * import sys +import types import os import re import getopt @@ -97,7 +98,7 @@ $Id$ """ % (prog_name()) - sys.exit(0) + exit() def load_dep_db( cfg ): @@ -166,7 +167,7 @@ 'version', 'dumpdeps'] ) except getopt.GetoptError, e: - raise Panic( e ) + exit(e) # Temporary flag do_dump = False @@ -189,14 +190,14 @@ if arg in ['--dumpdeps']: do_dump = True if arg in ['-V', '--version']: sys.stderr.write( '$Id$\n' ) - sys.exit(0) + exit() # Sanity checks if not cfg.libpath: - raise Panic("no library paths specified (-l)") + exit("no library paths specified (-l)") if not os.path.isdir(cfg.output_dir): - raise Panic("output directory %s doesn't exist" % (cfg.output_dir) ) + exit("output directory %s doesn't exist" % (cfg.output_dir) ) # # Do job @@ -217,7 +218,7 @@ cfg.oper = args.pop(0) if not cfg.top_ent: - raise Panic("no top level module specified (-r)") + exit("no top level module specified (-r)") if cfg.part == '<unspecified>': sys.stderr.write( "%s: warning: no partspec specified (-p)\n" % prog_name() ) @@ -227,7 +228,7 @@ # Check to make sure top level is known about if cfg.top_ent not in cfg.dep_list: - raise Panic("top level entity %s unknown" % cfg.top_ent) + exit("top level entity %s unknown" % cfg.top_ent) # Try to resolve dependencies cfg.resolved_list = mkvdeps.resolve_deps( cfg ) @@ -242,7 +243,7 @@ try: exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) ) except ImportError: - raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) + exit( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) tool.write_project( cfg ) @@ -250,6 +251,10 @@ if __name__ == '__main__': try: main(sys.argv) - except Panic, e: - sys.stderr.write( '%s\n' % e ) - sys.exit( 1 ) + except SystemExit, e: + if e.code: + if type(e.code) is types.IntType: + exit(e.code) + else: + sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) ) + exit(1) Modified: trunk/bin/parse_coregen.py =================================================================== --- trunk/bin/parse_coregen.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/parse_coregen.py 2010-05-13 16:11:12 UTC (rev 95) @@ -70,10 +70,10 @@ use_vhdl = bool(attribs['vhdlsim']) use_verilog = bool(attribs['verilogsim']) except KeyError, e: - raise Panic( '%s: missing parameter %s' % (hdl_src, e) ) + exit('%s: missing parameter %s' % (hdl_src, e)) if not use_vhdl ^ use_verilog: - raise Panic( "%s: exactly one simulation (VHDL or verilog) is required" % relpath(hdl_src) ) + exit("%s: exactly one simulation (VHDL or verilog) is required" % relpath(hdl_src)) # The simulation source file will be the entity name with the appropriate # extension attached Modified: trunk/bin/parse_verilog.py =================================================================== --- trunk/bin/parse_verilog.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/parse_verilog.py 2010-05-13 16:11:12 UTC (rev 95) @@ -53,7 +53,7 @@ try: results = parser.parseFile( hdl_src, parseAll = True ) except pyparsing.ParseException, e: - raise Panic("""\ + exit("""\ %%s:%d: error: Parse exception: %s @@ -74,7 +74,7 @@ for mod_parse_data in results: ent = mod_parse_data[1] if ent in dep_list: - raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % ( + exit( "duplicate entity %s declaration found in %s (previous in %s)" % ( ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) ) @@ -91,7 +91,7 @@ global _src - raise Panic("""\ + exit("""\ %s:%d: error: unexpected syntax: %s @@ -139,7 +139,7 @@ end_kw = CaselessKeyword('end') func_begin = CaselessKeyword('function') func_end = CaselessKeyword('endfunction') - module_kw = CaselessKeyword( 'module' ) | CaselessKeyword( 'primitive' ) + module_kw = CaselessKeyword('module' ) | CaselessKeyword('primitive') endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive') # () groups (with recursion) @@ -188,6 +188,7 @@ statement.suppress() # Compound and simple statements ) mod_footer = endmodule_kw # End module keyword + syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err ) module = Group( (mod_header | syntax_err) + mod_body + Modified: trunk/bin/parse_vhdl.py =================================================================== --- trunk/bin/parse_vhdl.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/parse_vhdl.py 2010-05-13 16:11:12 UTC (rev 95) @@ -84,7 +84,7 @@ # Find out what we've matched and handle appropriately if match.group('ent'): if ent: - raise Panic("""\ + exit("""\ %s: unexpected entity %s found when processing entity %s. missing package imports?""" % (relpath(self.hdl_src), e, self.ent) ) @@ -93,7 +93,7 @@ ent = match.group('ent') if ent in dep_list: - raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) ) + exit( "duplicate entity %s declaration found in %s (previous in %s)" % (ent, relpath(hdl_src), relpath(dep_list[ent][0]) ) ) # If it's a package import decl, then lib and pkg will be # defined. Only add a package if the library is 'work'. Modified: trunk/bin/synplify_wrapper.py =================================================================== --- trunk/bin/synplify_wrapper.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/synplify_wrapper.py 2010-05-13 16:11:12 UTC (rev 95) @@ -33,6 +33,7 @@ from util import * import sys +import types import os import subprocess import time @@ -133,7 +134,7 @@ while not os.path.isfile( synth_input_log ): time.sleep( 0.5 ) if syn_proc.poll() is not None: - raise Panic( "synthesis log does not exist. %s may not have run properly." % + exit('synthesis log does not exist. %s may not have run properly.'% (exe) ) # Parse synthesis log file @@ -260,7 +261,7 @@ $Id$ """ % (prog_name(), DEF_SYN_EXE) - sys.exit(0) + exit() # main @@ -278,7 +279,7 @@ 'help', 'version'] ) except getopt.GetoptError, e: - raise Panic( e ) + exit(e) for arg, val in opts: if arg in ['-v', '--verbose']: @@ -289,7 +290,7 @@ if arg in ['-V', '--version']: print '$Id$' - sys.exit(0) + exit() if arg in ['-e', '--executable']: exe = val @@ -305,7 +306,10 @@ if __name__ == '__main__': try: main(sys.argv) - except Panic, e: - sys.stderr.write( '%s\n' % e ) - sys.exit( 1 ) - + except SystemExit, e: + if e.code: + if type(e.code) is types.IntType: + exit(e.code) + else: + sys.stderr.write( '%s: panic: %s\n' % (prog_name(), e.code) ) + exit(1) Modified: trunk/bin/tool_common.py =================================================================== --- trunk/bin/tool_common.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/tool_common.py 2010-05-13 16:11:12 UTC (rev 95) @@ -80,7 +80,7 @@ alias_split = alias.split('=') if len(alias_split) != 2: - raise Panic( "Bad alias %s" % alias ) + exit('bad alias %s' % alias) ie, ae = alias_split self.aliases[ie.strip()] = ae.strip() @@ -97,7 +97,7 @@ ent, regex = None, None if not ent or regex[-1] != '/': - raise Panic( "Bad disambiguation rule %s" % (rule) ) + exit('bad disambiguation rule %s' % (rule)) # Trim trailing / regex = regex[:-1] @@ -105,7 +105,7 @@ try: self.disambig[ent] = re.compile( regex ) except re.error: - raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex)) + exit('bad regex in disambiguation rule %s: %s' % (d, regex)) @@ -119,7 +119,7 @@ try: return relpath(hdl_src, cfg.output_dir) except OSError: - raise Panic( "missing source file %s. Is dependency cache out of date?" % (x[1]) ) + exit('missing source file %s. Is dependency cache out of date?' % (x[1])) else: return hdl_src @@ -132,7 +132,7 @@ match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps.upper() ) if not match: - raise Panic( "unknown partspec %s" % (ps) ) + exit('unknown partspec %s' % (ps)) part, family, pkg, speed = match.groups() @@ -146,7 +146,7 @@ if family[0].isdigit(): family = 'VIRTEX'+family[0] else: - raise Panic( 'unknown family %s in partspec' % k ) + exit('unknown family %s in partspec' % k) return part, family, pkg, speed Modified: trunk/bin/tool_synth_synplify.py =================================================================== --- trunk/bin/tool_synth_synplify.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/tool_synth_synplify.py 2010-05-13 16:11:12 UTC (rev 95) @@ -49,7 +49,7 @@ '.vhdl':'-vhdl -lib work', '.v' :'-verilog'}[k] except: - raise Panic( 'unknown HDL source extension %s' % k ) + exit( 'unknown HDL source extension %s' % k ) def write_project(cfg): Modified: trunk/bin/tool_synth_xst.py =================================================================== --- trunk/bin/tool_synth_xst.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/tool_synth_xst.py 2010-05-13 16:11:12 UTC (rev 95) @@ -49,7 +49,7 @@ '.vhdl':'vhdl work', '.v' :'verilog work'}[k] except: - raise Panic( 'unknown HDL source extension %s' % k ) + exit( 'unknown HDL source extension %s' % k ) def write_project(cfg): Modified: trunk/bin/util.py =================================================================== --- trunk/bin/util.py 2010-04-17 18:43:23 UTC (rev 94) +++ trunk/bin/util.py 2010-05-13 16:11:12 UTC (rev 95) @@ -39,18 +39,6 @@ return os.path.basename(sys.argv[0]) -class Panic(Exception): - """ - For equivalent of perl die "message" - """ - - def __init__(self, value): - self.value = value - - def __str__(self): - return "%s: panic: %s" % (prog_name(), self.value) - - def subst_basename(pathspec, new_base): """ Substitute the basename component of pathspec with new_base This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-17 18:43:29
|
Revision: 94 http://fbt.svn.sourceforge.net/fbt/?rev=94&view=rev Author: dave_infj Date: 2010-04-17 18:43:23 +0000 (Sat, 17 Apr 2010) Log Message: ----------- now have a poll-loop on the existance of the SRR file, in case synplify is slow to get started Modified Paths: -------------- trunk/bin/synplify_wrapper.py Modified: trunk/bin/synplify_wrapper.py =================================================================== --- trunk/bin/synplify_wrapper.py 2010-04-14 14:59:39 UTC (rev 93) +++ trunk/bin/synplify_wrapper.py 2010-04-17 18:43:23 UTC (rev 94) @@ -130,10 +130,11 @@ # Do synthesis run syn_proc = subprocess.Popen( '%s -batch %s' % (exe, project), shell=True ) - time.sleep( 1 ) - if not os.path.isfile( synth_input_log ): - raise Panic( "synthesis log does not exist. %s may not have run properly." % - (exe) ) + while not os.path.isfile( synth_input_log ): + time.sleep( 0.5 ) + if syn_proc.poll() is not None: + raise Panic( "synthesis log does not exist. %s may not have run properly." % + (exe) ) # Parse synthesis log file m_parse_log = re.compile( """ This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-14 14:59:45
|
Revision: 93 http://fbt.svn.sourceforge.net/fbt/?rev=93&view=rev Author: dave_infj Date: 2010-04-14 14:59:39 +0000 (Wed, 14 Apr 2010) Log Message: ----------- now implements the equivalent of tail -f on log file, so progress of synth can be watched. Add an exception to handle (naively) synplify subprocess ident lines Unknown log lines now result in a warning, rather than an error. Modified Paths: -------------- trunk/bin/synplify_wrapper.py Modified: trunk/bin/synplify_wrapper.py =================================================================== --- trunk/bin/synplify_wrapper.py 2010-04-13 16:53:18 UTC (rev 92) +++ trunk/bin/synplify_wrapper.py 2010-04-14 14:59:39 UTC (rev 93) @@ -34,6 +34,8 @@ import sys import os +import subprocess +import time import errno import re import getopt @@ -92,6 +94,19 @@ return (type, code, file, lno, msg) +def tail_f(file, run_condition): + interval = 0.5 + + while run_condition(): + where = file.tell() + line = file.readline() + if not line: + time.sleep(interval) + file.seek(where) + else: + yield line + + def exec_synplify( project, verbose, exe ): # various filenames path, entity = os.path.split( os.path.splitext(project)[0] ) @@ -114,7 +129,8 @@ raise e # Do synthesis run - syn_result = os.system( '%s -batch %s' % (exe, project) ) + syn_proc = subprocess.Popen( '%s -batch %s' % (exe, project), shell=True ) + time.sleep( 1 ) if not os.path.isfile( synth_input_log ): raise Panic( "synthesis log does not exist. %s may not have run properly." % (exe) ) @@ -164,15 +180,25 @@ """ % (prog_name(), exe, project, os.path.abspath(synth_input_log)) ) with open( synth_input_log ) as sil: - for line in sil: + for line in tail_f(sil, lambda: syn_proc.poll() is None): # Want only useful messages (^@), and not @END lines line = line.strip() + + # Print the sub-programme header, so one can see progress + if line.startswith('Synpl'): + line = line[:80] + print '\n%s\n%s\n%s\n' % ('-'*len(line), + line, + '-'*len(line)) + + # Otherwise, process only message lines if not line.startswith('@') or line in at_ignores: continue match = m_parse_log.search( line ) if not match: - raise Panic( "unknown log entry:\n%s" % (line) ) + print "%s: warning: unknown log entry:\n%s" % (prog_name(), line) + continue # Parse log line, doing any type conversions try: This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-13 16:53:24
|
Revision: 92 http://fbt.svn.sourceforge.net/fbt/?rev=92&view=rev Author: dave_infj Date: 2010-04-13 16:53:18 +0000 (Tue, 13 Apr 2010) Log Message: ----------- add support for "advisories" Modified Paths: -------------- trunk/bin/synplify_wrapper.py Modified: trunk/bin/synplify_wrapper.py =================================================================== --- trunk/bin/synplify_wrapper.py 2010-04-12 16:55:42 UTC (rev 91) +++ trunk/bin/synplify_wrapper.py 2010-04-13 16:53:18 UTC (rev 92) @@ -79,6 +79,10 @@ def translate_log( log ): (type, code, file, lno, msg) = log + # "Advisory" messages are warnings + if type == 'A': + type = 'W' + # Promote/demote messages if code in to_null: return None if code in to_note: type = 'N' This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-12 16:55:50
|
Revision: 91 http://fbt.svn.sourceforge.net/fbt/?rev=91&view=rev Author: dave_infj Date: 2010-04-12 16:55:42 +0000 (Mon, 12 Apr 2010) Log Message: ----------- Add preliminary Verilog support Modified Paths: -------------- trunk/bin/parse_verilog.py Modified: trunk/bin/parse_verilog.py =================================================================== --- trunk/bin/parse_verilog.py 2010-04-09 16:29:04 UTC (rev 90) +++ trunk/bin/parse_verilog.py 2010-04-12 16:55:42 UTC (rev 91) @@ -30,8 +30,10 @@ from __future__ import with_statement from util import * -import sys -import re +import pyparsing +from pyparsing import Literal, CaselessKeyword, Word, ZeroOrMore, OneOrMore, Combine, \ + Group, Optional, Forward, ParseException, alphas, alphanums, printables, \ + restOfLine, cppStyleComment, oneOf, nestedExpr, lineno, col, line import DepList import comp_filter @@ -42,9 +44,162 @@ Determine dependencies for a given file (verilog mode) """ - sys.stderr.write('%s:%s: warning: unsupported verilog source ignored.\n' % - ( prog_name(), relpath(hdl_src) )) + global parser + + global _src + _src = hdl_src # for handle_syntax_err - return DepList.DepList() + # Parse the file + try: + results = parser.parseFile( hdl_src, parseAll = True ) + except pyparsing.ParseException, e: + raise Panic("""\ +%%s:%d: error: Parse exception: + %s +%s +%s^""" % (relpath(hdl_src), + e.lineno, + e, + e.line, + ' '*(e.col-1) + ) + ) + + # Process the results; make and return the dependency list + dep_list = DepList.DepList() + # Each element of results is formatted as follows: + # 'module', <mod_name>, [<entity, instance>, ...], 'endmodule' + for mod_parse_data in results: + ent = mod_parse_data[1] + if ent in dep_list: + raise Panic( "duplicate entity %s declaration found in %s (previous in %s)" % ( + ent, relpath(hdl_src), relpath(dep_list[ent][0]) + ) + ) + dep_list.add_dep( ent, hdl_src, [ent for ent, inst in mod_parse_data[2:-1] + if comp_filter.accept(ent)] ) + + return dep_list + + +def handle_syntax_err( s, loc, toks ): + """ + Handle syntax errors + """ + + global _src + + raise Panic("""\ +%s:%d: error: unexpected syntax: + +%s +%s^ + +Check all of the syntactically required components for this declaration are +both present and free of syntax errors. eg, 'module' must have a valid set of +port declarations. + +If you believe this is genuinely valid Verilog, see the documentation under +Verilog support. +""" % ( relpath(_src), + lineno( loc, s ), + line( loc, s ), + ' '*(col( loc, s )-1) + ) + ) + + +def verilog_grammar(): + """ + Define the partial grammar used to parse Verilog sources + """ + + # Compiler directives (we don't care what they are, therefore we don't care about their + # format.) + directive = Group( Combine( "`" + \ + oneOf("define undef ifdef else endif default_nettype " + "include resetall timescale unconnected_drive " + "nounconnected_drive celldefine endcelldefine") + \ + restOfLine ) ) + + # Various character classes + printable_less_parens = "".join([x for x in printables if x not in ['(',')']]) + printable_less_semi = "".join([x for x in printables if x not in [';']]) + + # word: Any non-whitespace + word = Word( printables ) + + # object indentifiers + ident = Word( alphanums+'_' ) + + # Keywords + begin_kw = CaselessKeyword('begin') + end_kw = CaselessKeyword('end') + func_begin = CaselessKeyword('function') + func_end = CaselessKeyword('endfunction') + module_kw = CaselessKeyword( 'module' ) | CaselessKeyword( 'primitive' ) + endmodule_kw= CaselessKeyword('endmodule') | CaselessKeyword('endprimitive') + + # () groups (with recursion) + paren_group = nestedExpr( Literal('('), Literal(')'), + content=OneOrMore( Word(printable_less_parens) ) + ) + + # begin/end blocks + begin_block = nestedExpr( begin_kw, end_kw, + content=OneOrMore( ~begin_kw + ~end_kw + word ) + ) + + # Functions + function = nestedExpr( func_begin, func_end, + content=OneOrMore( ~func_begin + ~func_end + word ) + ) + + # Statements, block statements and compound statements + statement = Forward() + simple_stmt = OneOrMore(~endmodule_kw + Word(printable_less_semi)) + Literal(';') + compnd_stmt = ( (CaselessKeyword('always') + Literal('@')) | # Special case: always @(...) + Word(alphas) # Any other case: kw( ... ) + ) + paren_group + statement + statement << Group( ~endmodule_kw + (compnd_stmt | begin_block | simple_stmt) ) + + # Module and instance generics + generics = Optional( Literal('#') + paren_group ) + + # Module instances + instance = Group( ident + # Entity name + generics.suppress() + # Optional generics + ident + # Instance name + paren_group.suppress() + # Ports + Literal( ';' ).suppress() # terminal ; + ) + + # Module definitions. For these purposes, we can assume UDPs are the same as modules. + mod_header = (module_kw + # Module keyword + ident + # Entity's name + generics.suppress() + # Optional generic mappings + paren_group.suppress() + # Port mappings + Literal(';').suppress() # Terminal ; + ) + mod_body = ZeroOrMore( instance | # Entity instantiation + function.suppress() | # Function declarations + statement.suppress() # Compound and simple statements + ) + mod_footer = endmodule_kw # End module keyword + syntax_err = OneOrMore( Word(printables) ).setParseAction( handle_syntax_err ) + module = Group( (mod_header | syntax_err) + + mod_body + + (mod_footer | syntax_err) + ) + + v = ZeroOrMore( module ) + + # No comments, no compiler directives (which can appear anywhere in the input) + v.ignore( cppStyleComment ) + v.ignore( directive ) + + return v + +parser = verilog_grammar() This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-09 16:29:15
|
Revision: 90 http://fbt.svn.sourceforge.net/fbt/?rev=90&view=rev Author: dave_infj Date: 2010-04-09 16:29:04 +0000 (Fri, 09 Apr 2010) Log Message: ----------- Add support for XST. New symbol now must be defined: SYNTH_TOOL. Should be set to either xst or synplify. defaults to xst. Also add new target: run (for non-gui "non-interative" modelsim) - however, one still needs to manually tell the simulator to run for however long. Update make clean. Modified Paths: -------------- trunk/bin/Makefile.inc Modified: trunk/bin/Makefile.inc =================================================================== --- trunk/bin/Makefile.inc 2010-04-09 14:56:11 UTC (rev 89) +++ trunk/bin/Makefile.inc 2010-04-09 16:29:04 UTC (rev 90) @@ -36,6 +36,9 @@ # # Options parameters may optionally be defined: # +# SYNTH_TOOL Name of the synthesis tool to use (default: xst) +# Current legal values: xst, synplify +# # SIMLIBPATH Paths in which to search for source dependencies for # simulation # SYNTHLIBPATH Paths in which to search for source dependencies for @@ -80,6 +83,8 @@ SIMTOP ?= $(TOP) +SYNTH_TOOL ?= xst + SIMLIBPATH ?= +$(VROOT)/lib . SYNTHLIBPATH ?= +$(VROOT)/lib . NGOLIBPATH += cores @@ -90,6 +95,7 @@ MKVPROJ ?= mkvproj.py SYNWRAP ?= synplify_wrapper.py SYNPLIFY ?= synplify_premier_dp +XST ?= xflow NGDBUILD ?= ngdbuild MAP ?= map PAR ?= par @@ -104,10 +110,12 @@ SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach P,$(SIMLIBPATH) ,-l $(P)) \ $(foreach R,$(SIM_DAR) ,-D $(R)) \ $(foreach A,$(ALIASES) ,-a $(A)) -SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach P,$(SYNTHLIBPATH),-l $(P)) \ +SYNTH_OPTS += -r $(TOP) -p $(PART) -t $(SYNTH_TOOL) \ + $(foreach P,$(SYNTHLIBPATH),-l $(P)) \ $(foreach F,$(SYN_CONSTR) ,-k $(F)) \ $(foreach R,$(SYN_DAR) ,-D $(R)) \ $(foreach A,$(ALIASES) ,-a $(A)) +XST_OPTS ?= -p $(PART) -synth xst_mixed -wd build -ed .. NGDBUILD_OPTS += -p $(PART) -a $(foreach P,$(NGOLIBPATH) ,-sd $(P)) MAP_OPTS += -w -p $(PART) -pr b PAR_OPTS += -w @@ -154,6 +162,9 @@ irun: $(VSIM) -i $(SIMTOP) $(VSIM_OPTS) & +run: + $(VSIM) $(SIMTOP) $(VSIM_OPTS) + bitstream: $(TOP).bit synth_list: @@ -164,7 +175,7 @@ rm -rf *~ work/ transcript vsim.wlf modelsim.mk \ *.{prj,prd,edf,ncf,log,synlog} build/ \ *_{fpga_editor.out,pad.txt} xlnx_auto_0* \ - *.{lst,bgn,bld,drc,map,mrp,ncd,ng[dmo],pcf} \ + *.{ngc,lst,bgn,bld,drc,map,mrp,ncd,ng[dmo],pcf} \ *.{xml,pad,par,unroutes,xpi,csv,bit,xrpt,ptwx} mrproper: clean @@ -180,13 +191,25 @@ $(if $^,$(MKVDEPS) -v .) # Synplify synthesis +ifeq ($(SYNTH_TOOL), synplify) $(TOP).edf: $(shell test -e $(TOP).prj \ && perl -ne '/add_file.*"(.*)"/ && print "$$1\n"' $(TOP).prj \ || echo FORCE ) $(MKVPROJ) synth $(SYNTH_OPTS) [ -f coregen.mk ] && make -f coregen.mk || true $(SYNWRAP) -e $(SYNPLIFY) $(TOP).prj +endif +# XST synthesis +ifeq ($(SYNTH_TOOL), xst) +$(TOP).ngc: $(shell test -e $(TOP).prj \ + && perl -ne 'chomp; s/(.*)#.*/$$1/; @x = split; $$s = $$x[$$#x]; print "$$s\n" if $$s' $(TOP).prj \ + || echo FORCE ) + $(MKVPROJ) synth $(SYNTH_OPTS) + [ -f coregen.mk ] && make -f coregen.mk || true + $(XST) $(XST_OPTS) $(TOP).prj +endif + # The list of sources for synthesis is determined by mkvproj, so there's no # direct way to include them in the dependencies for the EDIF netlist. # @@ -199,6 +222,10 @@ %.ngd: %.edf $(NGDBUILD) $(NGDBUILD_OPTS) $< $@ +# Xilinx NGDBUILD stage (EDF Netlist format) +%.ngd: %.ngc + $(NGDBUILD) $(NGDBUILD_OPTS) $< $@ + # Xilinx MAP stage %.ncd: %.ngd $(MAP) $(MAP_OPTS) $< -o $@ @@ -213,6 +240,6 @@ $(BITGEN) $(BITGEN_OPTS) $< $@ $(if $(POSTBUILD), && $(POSTBUILD) $@) -.PHONY: default deps alldeps sim irun bitstream synth_list clean FORCE +.PHONY: default deps alldeps sim irun run bitstream synth_list clean mrproper FORCE .PRECIOUS: %.edf %.ngd %.ncd %-timed.ncd %.bit This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-04-09 14:56:18
|
Revision: 89 http://fbt.svn.sourceforge.net/fbt/?rev=89&view=rev Author: dave_infj Date: 2010-04-09 14:56:11 +0000 (Fri, 09 Apr 2010) Log Message: ----------- initial commit; preliminary XST support (via XFLOW) Added Paths: ----------- trunk/bin/tool_synth_xst.py Added: trunk/bin/tool_synth_xst.py =================================================================== --- trunk/bin/tool_synth_xst.py (rev 0) +++ trunk/bin/tool_synth_xst.py 2010-04-09 14:56:11 UTC (rev 89) @@ -0,0 +1,99 @@ +################################################################################ +# +# FPGA Build Tool +# Copyright (C) 2008 David Miller +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +# +# MODULE: +# +# tool_synth_xst.py +# +# PURPOSE: +# +# Project writer module for Xilinx's XST synthesis tool. +# +# $Id$ + +from __future__ import with_statement +from util import * + +import os + +import tool_common + + +# +# Constants +# + +# Synthesis: synplify output directory +BUILD_DIR = 'build' + +# Synthesis: synthesis commands for adding HDL source files +def lang_flag( k ): + try: + return {'.vhd' :'vhdl work', + '.vhdl':'vhdl work', + '.v' :'verilog work'}[k] + except: + raise Panic( 'unknown HDL source extension %s' % k ) + + +def write_project(cfg): + """ + Write out an XST synthesis project from the resolved sources + """ + + # write out the synplify project file + proj_file = os.path.join( cfg.output_dir, '%s.prj' % (cfg.top_ent) ) + + with open( proj_file, 'w' ) as pf: + # Generate sources and cores list + srcs = [] + cores = [] + for ent, hdl_src, deps, core_src, alias in cfg.resolved_list: + srcs.append( '%s %s' % ( + lang_flag(os.path.splitext(hdl_src)[1]), + tool_common.rel_src(cfg, hdl_src) ) ) + + # If it's a core, add it to the cores list + if core_src: + cores.append( (hdl_src, core_src, alias) ) + + # Unpack partspec + part, family, pkg, speed = tool_common.parse_partspec( cfg.part ) + + # Write out project file. + # + # NB: Much of this is hard-coded. The intention is to add flags as + # required to avoid a proliferation of used arguments. + + pf.write( """\ +# XST project file automatically generated by %s. Do not edit! +# $Id$ +# + +# source files +%s + +""" % (prog_name(), + '\n'.join( srcs ) ) ) + + # Write out core rules, if any + if cores: + tool_common.write_coregen_mf( cfg, cores ) + + Property changes on: trunk/bin/tool_synth_xst.py ___________________________________________________________________ Added: svn:keywords + Author Date Id Revision Added: svn:eol-style + native This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-02 17:39:31
|
Revision: 88 http://fbt.svn.sourceforge.net/fbt/?rev=88&view=rev Author: dave_infj Date: 2010-03-02 17:39:24 +0000 (Tue, 02 Mar 2010) Log Message: ----------- * Add new feature: entity aliases. Since coregen objects are not parameterisable (from HDL source), this feature provides a mechanism whereby HDL source can instantiate an entity name that is in fact an /alias/ for some other coregen actually defined with a .XCO file somewhere in the library path. When alias pairs ($instantiated_entity=$actual_entity) are provided on the command-line, the dependency resolver will use $actual_entity to satisfiy a dependency for $instantiated_entity - but only if $instantiated_entity is not provided explicitly elsewhere. The aliased entity name as derived from the actual entity name will appear in coregen.mf and modelsim.mf. During the XCO copy phase, the alias entity name is substituted for the actual entity name so that coregen will produce appropriately named objects. The variable ALIASES should be defined in the project Makefile as ie=ae pairs. NB: No spaces allowed. NB: This feature only works with coregen objects. Other HDL entities cannot be aliased in this way. * Clarity enhancement: Error messages and --dumpdeps will now output the XCO path, instead of the resulting HDL source path, for coregen objects. Modified Paths: -------------- trunk/bin/Makefile.inc trunk/bin/mkvdeps.py trunk/bin/mkvproj.py trunk/bin/tool_common.py trunk/bin/tool_sim_modelsim.py trunk/bin/tool_synth_synplify.py Modified: trunk/bin/Makefile.inc =================================================================== --- trunk/bin/Makefile.inc 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/Makefile.inc 2010-03-02 17:39:24 UTC (rev 88) @@ -46,6 +46,8 @@ # SIM_DAR Simulation disambiguation rules # SYN_DAR Synthesis disambiguation rules # +# ALIASES Coregen aliases +# # VCOM_OPTS Options for ModelSim VHDL compiler # VLOG_OPTS Options for ModelSim Verilog compiler # @@ -99,12 +101,14 @@ # # Add mandatory arguments -SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach F,$(SIMLIBPATH) ,-l $(F)) \ - $(foreach F,$(SIM_DAR) ,-D $(F)) -SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach F,$(SYNTHLIBPATH),-l $(F)) \ - $(foreach F,$(SYN_CONSTR) ,-k $(F)) \ - $(foreach F,$(SYN_DAR) ,-D $(F)) -NGDBUILD_OPTS += -p $(PART) -a $(foreach F,$(NGOLIBPATH) ,-sd $(F)) +SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach P,$(SIMLIBPATH) ,-l $(P)) \ + $(foreach R,$(SIM_DAR) ,-D $(R)) \ + $(foreach A,$(ALIASES) ,-a $(A)) +SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach P,$(SYNTHLIBPATH),-l $(P)) \ + $(foreach F,$(SYN_CONSTR) ,-k $(F)) \ + $(foreach R,$(SYN_DAR) ,-D $(R)) \ + $(foreach A,$(ALIASES) ,-a $(A)) +NGDBUILD_OPTS += -p $(PART) -a $(foreach P,$(NGOLIBPATH) ,-sd $(P)) MAP_OPTS += -w -p $(PART) -pr b PAR_OPTS += -w BITGEN_OPTS += -w Modified: trunk/bin/mkvdeps.py =================================================================== --- trunk/bin/mkvdeps.py 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/mkvdeps.py 2010-03-02 17:39:24 UTC (rev 88) @@ -108,10 +108,12 @@ return dep_list -def resolve_deps(top_ent, dep_list, disambig): +def resolve_deps(cfg): """ Resolve the dependencies arising from the top module as specified in the - configuration. Returns an ordered list of sources. + configuration, including any aliased entities. Returns an ordered list of + sources. + NB: At present, aliases are supported on XCO objects only. """ def what_srcs(what_ent): @@ -119,22 +121,37 @@ Return a list of files that depend on the specified entity """ ws = [] - for ent, allcand in dep_list.iteritems(): + for ent, allcand in cfg.dep_list.iteritems(): for hdl_src, deps, core_src in allcand: if what_ent in deps: ws.append(hdl_src) return ws - def disambiguate(ent): + def disambiguate(ent, alias = ''): """ Error check, select and return the dependencies for the given entity - according to the disambiguation rules. + according to the disambiguation and aliasing rules. """ + # # Check to make sure we know about unresolved entity ent - if ent not in dep_list: - raise Panic( """\ + # + if ent not in cfg.dep_list: + if ent in cfg.aliases: # ent might be an alias + return disambiguate( cfg.aliases[ent], ent ) + else: # If not, raise an error + if alias: + raise Panic( """\ +real entity %s of alias %s unknown. +The following sources depend on %s: +\t%s""" % + (ent, + alias, + alias, + '\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(alias)] ) ) ) + else: + raise Panic( """\ entity %s unknown. The following sources depend on %s: \t%s""" % @@ -142,17 +159,19 @@ ent, '\n\t'.join( [relpath(hdl_src) for hdl_src in what_srcs(ent)] ) ) ) + # # Figure out which entity to return - if len( dep_list[ent] ) == 1: + # + if len( cfg.dep_list[ent] ) == 1: # When there is only one candidate, there likely will be no # disambiguation rules that would match it, so just use it. - return dep_list[ent][0] + resolved_ent = cfg.dep_list[ent][0] else: # Otherwise, use the disambiguation rules to select a candidate. try: filtered_list = [] - for hdl_src, deps, core_src in dep_list[ent]: - if disambig[ent].search( os.path.abspath( hdl_src ) ): + for hdl_src, deps, core_src in cfg.dep_list[ent]: + if cfg.disambig[ent].search( os.path.abspath( hdl_src ) ): filtered_list.append( (hdl_src, deps, core_src) ) except KeyError: # There is no rule for this entity, which is an error. @@ -160,8 +179,8 @@ no rule for disambiguating entity %s with multiple candidates: \t%s """ % (ent, - '\n\t'.join( [relpath(hdl_src) - for hdl_src, deps, core_src in dep_list[ent]] )) ) + '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src) + for hdl_src, deps, core_src in cfg.dep_list[ent]] )) ) # Sanity: check that there is exactly one match if len(filtered_list) == 0: @@ -170,25 +189,36 @@ raise Panic( """\ no candidates match specified rule for entity %s: \t%s -""" % (ent, '\n\t'.join( [relpath(hdl_src) - for hdl_src, deps, core_src in dep_list[ent]] )) ) +""" % (ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src) + for hdl_src, deps, core_src in cfg.dep_list[ent]] )) ) elif len(filtered_list) != 1: raise Panic( """\ Still %d candidates left after applying disambiguation rule for entity %s: \t%s -""" % (len(filtered_list), ent, '\n\t'.join( [relpath(hdl_src) +""" % (len(filtered_list), ent, '\n\t'.join( [relpath(core_src) if core_src else relpath(hdl_src) for hdl_src, deps, core_src in filtered_list] )) ) # We have exactly one candidate left - return filtered_list[0] - - + resolved_ent = filtered_list[0] + + # + # If the resolved entity was an alias, then do sanity checking and + # translate the hdl_src name appropriately. + # NB: the coregen handling code must still translate the entity name + # within the XCO project file. + if alias: + hdl_src, deps, core_src = resolved_ent + if not core_src: + raise Panic( 'aliased entity %s resolved to non-CoreGen object %s' % (alias, ent) ) + return subst_basename(hdl_src, alias), deps, core_src, alias + else: + return resolved_ent + ('',) ########################################################################### # unresolved: list of entities yet to be resolved - unresolved = [top_ent] + unresolved = [cfg.top_ent] # resolved: list of ordered resolved dependencies in (ent, hdl_src) tuples resolved = [] @@ -196,10 +226,10 @@ unres_ent = unresolved.pop(0) # Disambiguate as required - hdl_src, deps, core_src = disambiguate(unres_ent) + hdl_src, deps, core_src, alias = disambiguate(unres_ent) # Prepend the source file which satisfies unresolved entity ent - resolved.insert(0, (unres_ent, hdl_src, deps, core_src)) + resolved.insert(0, (unres_ent, hdl_src, deps, core_src, alias)) # Append entity ent's own dependencies for later consideration unresolved += deps Modified: trunk/bin/mkvproj.py =================================================================== --- trunk/bin/mkvproj.py 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/mkvproj.py 2010-03-02 17:39:24 UTC (rev 88) @@ -63,7 +63,9 @@ -v, --verbose print filenames as they are processed -V, --version print version -h, --help print this message -x + + -a, --alias <ie=ae> XCO entity alias + instantiated entity=actual entity -d, --dir output directory -l, --libpath dependency search paths (required) -r, --root name of top module (required) @@ -129,9 +131,12 @@ for ent in entities: print "Entity %s:" % (ent) for hdl_src, deps, core_src in cfg.dep_list[ent]: - if cfg.relative_paths: - hdl_src = relpath(hdl_src) - print "\tin %s:" % (hdl_src) + if core_src: + src = relpath(core_src) if cfg.relative_paths else core_src + else: + src = relpath(hdl_src) if cfg.relative_paths else hdl_src + + print "\tin %s:" % (src) if deps: print "\t\t%s" % '\n\t\t'.join( deps ) else: @@ -144,8 +149,9 @@ # Parse options # try: - opts, args = getopt.gnu_getopt( argv[1:], 'd:D:l:p:r:t:k:cChvV', - ['dir=', + opts, args = getopt.gnu_getopt( argv[1:], 'a:d:D:l:p:r:t:k:cChvV', + ['alias=', + 'dir=', 'disambiguate=', 'libpath=', 'part=', @@ -168,6 +174,7 @@ cfg = tool_common.Config() for arg, val in opts: if arg in ['-v', '--verbose']: cfg.verbose = True + if arg in ['-a', '--alias']: cfg.add_alias(val) if arg in ['-d', '--dir']: cfg.output_dir = val if arg in ['-D', '--disambiguate']: cfg.add_dar(val) if arg in ['-l', '--libpath']: cfg.libpath += val.split(':') @@ -223,12 +230,11 @@ raise Panic("top level entity %s unknown" % cfg.top_ent) # Try to resolve dependencies - cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list, - cfg.disambig ) + cfg.resolved_list = mkvdeps.resolve_deps( cfg ) if cfg.verbose: print '\n'.join( [hdl_src - for ent, hdl_src, deps, core_src in cfg.resolved_list] ) + for ent, hdl_src, deps, core_src, alias in cfg.resolved_list] ) print '\n'.join( [constr for constr in cfg.constraints] ) Modified: trunk/bin/tool_common.py =================================================================== --- trunk/bin/tool_common.py 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/tool_common.py 2010-03-02 17:39:24 UTC (rev 88) @@ -61,6 +61,7 @@ self.output_dir = '.' self.libpath = [] self.disambig = {} + self.aliases = {} self.part = '<unspecified>' self.top_ent = '' self.tool = 'default' @@ -68,6 +69,23 @@ self.relative_paths = True self.constraints = [] + def add_alias( self, alias ): + """ + Add entity alias of the form "<ie>=<ae>" where + ie = instantiated entity + ae = actual entity to substitute + + NB: This only works for entities implemented by an XCO core + """ + + alias_split = alias.split('=') + if len(alias_split) != 2: + raise Panic( "Bad alias %s" % alias ) + + ie, ae = alias_split + self.aliases[ie.strip()] = ae.strip() + + def add_dar( self, rule ): """ Add a disambiguation rule @@ -133,9 +151,10 @@ return part, family, pkg, speed -def copy_xco( src, dst, partspec ): +def copy_xco( src, dst, partspec, ent_name='' ): """ - Copy an XCO project file, replacing the part spec info as appropriate + Copy an XCO project file, replacing the part spec info as + appropriate, and the entity name (if set) """ attrs = [] part, family, pkg, speed = parse_partspec( partspec ) @@ -165,6 +184,10 @@ if attr[1].upper() == 'DEVICEFAMILY': attr[3] = family if attr[1].upper() == 'PACKAGE' : attr[3] = pkg if attr[1].upper() == 'SPEEDGRADE' : attr[3] = speed + + if attr[0].upper() == 'CSET': + if attr[1].upper() == 'COMPONENT_NAME' \ + and ent_name : attr[3] = ent_name d.write( ' '.join( attr ) + '\n' ) @@ -175,7 +198,7 @@ def write_coregen_mf(cfg, cores): """ - Write out a coregen makefile from the list of (hdl_src, core_src). + Write out a coregen makefile from the list of (hdl_src, core_src, alias). """ makefile = os.path.join(cfg.output_dir, COREGEN_MK) @@ -216,13 +239,15 @@ clean: \trm -rf %s -""" % (' '.join( [hdl_src for hdl_src, core_src in cores] ), +""" % (' '.join( [hdl_src for hdl_src, core_src, alias in cores] ), xco_tmp_dir) ) - for hdl_src, core_src in cores: + for hdl_src, core_src, alias in cores: # Copy coregen project file out_xco = os.path.join( xco_tmp_dir, os.path.split( core_src )[1] ) - copy_xco( core_src, out_xco, cfg.part ) + if alias: + out_xco = subst_basename( out_xco, alias ) + copy_xco( core_src, out_xco, cfg.part, alias ) # Write out coregen invocation rules mf.write( """\ Modified: trunk/bin/tool_sim_modelsim.py =================================================================== --- trunk/bin/tool_sim_modelsim.py 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/tool_sim_modelsim.py 2010-03-02 17:39:24 UTC (rev 88) @@ -88,7 +88,7 @@ # Write out default target mf.write( "all: %s\n\n" % (msim_lib(cfg.top_ent)) ) - for ent, hdl_src, deps, core_src in cfg.resolved_list: + for ent, hdl_src, deps, core_src, alias in cfg.resolved_list: rel_hdl_src = tool_common.rel_src( cfg, hdl_src ) # Write rules to express this source's dependencies, if any @@ -109,7 +109,7 @@ # If it's a core, add it to the cores list if core_src: - cores.append( (hdl_src, core_src) ) + cores.append( (hdl_src, core_src, alias) ) # Include core rules, if any if cores: Modified: trunk/bin/tool_synth_synplify.py =================================================================== --- trunk/bin/tool_synth_synplify.py 2010-03-02 17:20:17 UTC (rev 87) +++ trunk/bin/tool_synth_synplify.py 2010-03-02 17:39:24 UTC (rev 88) @@ -64,14 +64,14 @@ # Generate sources and cores list srcs = [] cores = [] - for ent, hdl_src, deps, core_src in cfg.resolved_list: + for ent, hdl_src, deps, core_src, alias in cfg.resolved_list: srcs.append( 'add_file %s "%s"' % ( lang_flag(os.path.splitext(hdl_src)[1]), tool_common.rel_src(cfg, hdl_src) ) ) # If it's a core, add it to the cores list if core_src: - cores.append( (hdl_src, core_src) ) + cores.append( (hdl_src, core_src, alias) ) # Unpack partspec part, family, pkg, speed = tool_common.parse_partspec( cfg.part ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-02 17:20:25
|
Revision: 87 http://fbt.svn.sourceforge.net/fbt/?rev=87&view=rev Author: dave_infj Date: 2010-03-02 17:20:17 +0000 (Tue, 02 Mar 2010) Log Message: ----------- add base-name substitution function Modified Paths: -------------- trunk/bin/util.py Modified: trunk/bin/util.py =================================================================== --- trunk/bin/util.py 2010-03-01 18:11:43 UTC (rev 86) +++ trunk/bin/util.py 2010-03-02 17:20:17 UTC (rev 87) @@ -50,7 +50,16 @@ def __str__(self): return "%s: panic: %s" % (prog_name(), self.value) + +def subst_basename(pathspec, new_base): + """ + Substitute the basename component of pathspec with new_base + """ + path, old_filename = os.path.split(pathspec) + _, old_ext = os.path.splitext(old_filename) + return os.path.join( path, new_base + old_ext ) + # relpath.py # R.Barran 30/08/2004 This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-01 18:11:49
|
Revision: 86 http://fbt.svn.sourceforge.net/fbt/?rev=86&view=rev Author: dave_infj Date: 2010-03-01 18:11:43 +0000 (Mon, 01 Mar 2010) Log Message: ----------- write_coregen_mf() now calls 'make -f coregen.mk clean' before it overwrites it Modified Paths: -------------- trunk/bin/tool_common.py Modified: trunk/bin/tool_common.py =================================================================== --- trunk/bin/tool_common.py 2010-03-01 17:57:41 UTC (rev 85) +++ trunk/bin/tool_common.py 2010-03-01 18:11:43 UTC (rev 86) @@ -178,6 +178,13 @@ Write out a coregen makefile from the list of (hdl_src, core_src). """ + makefile = os.path.join(cfg.output_dir, COREGEN_MK) + + # If a makefile already exists, tell it to invoke its clean target to clear + # up its $TEMPDIR mess + if os.path.exists( makefile ): + os.system( "make -f %s clean" % makefile ) + # Make a temporary directory to hold the temporary coregen projects xco_tmp_dir = tempfile.mkdtemp( prefix='%s-' % prog_name() ) @@ -187,7 +194,7 @@ sys.stderr.write( 'mkdir %s\n' % (cores_dir) ) os.mkdir( cores_dir ) - with open( os.path.join(cfg.output_dir, COREGEN_MK), 'w' ) as mf: + with open( makefile, 'w' ) as mf: # Write out header mf.write( """\ # Coregen makefile automatically generated by %s. Do not edit! This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-01 17:57:50
|
Revision: 85 http://fbt.svn.sourceforge.net/fbt/?rev=85&view=rev Author: dave_infj Date: 2010-03-01 17:57:41 +0000 (Mon, 01 Mar 2010) Log Message: ----------- Add --dumpdeps debugging flag: dumps dependencies database. This somewhat changes possible programme flow. Not everything needs to be done in a dumpdeps operation (like resolving dependencies), so: * Dependency loading code moved into its own method in mkvproj.py * Disambiguation handling code moved into cfg object * Control flow forks in main() depending on whether a dumpdeps operation or not * When dumping deps, don't enforce specifying -r etc; don't process disambiguation rules; don't resolve dependencies. Added a bit of extra sanity checking (specified root unknown etc) Added keys() method to DepList so that it can be iterated over during deps dump tool_common.py: When processing coregen descriptors - instead of pointing coregen straight at the source descriptor - make a copy of the descriptor, changing the part spec. This means that, where appropriate, cores targeted for one device can be retargeted for another on the fly. The modified XCO files are put in $TEMPDIR, and a 'clean' target is added into coregen.mk so that the temp directory is cleaned up. The timestamp on the copied file is preserved from the original so that 'make' won't rebuild all cores when nothing has changed in the real source XCO file. In aid of that, move partspec processing out of tool_synth_syn and into tool_common. Modified Paths: -------------- trunk/bin/DepList.py trunk/bin/mkvproj.py trunk/bin/tool_common.py trunk/bin/tool_synth_synplify.py Modified: trunk/bin/DepList.py =================================================================== --- trunk/bin/DepList.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/DepList.py 2010-03-01 17:57:41 UTC (rev 85) @@ -138,6 +138,11 @@ os.path.join(path, hdl_src), deps ) + def keys(self): + """ + Return a list of entities in the dependency db + """ + return self.list.keys() def iterkeys(self): return self.list.iterkeys() Modified: trunk/bin/mkvproj.py =================================================================== --- trunk/bin/mkvproj.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/mkvproj.py 2010-03-01 17:57:41 UTC (rev 85) @@ -73,6 +73,8 @@ -C, --no-cache-deps do not use cached dependency information --no-relative do not make path names relative (to output_dir) + --dumpdeps (debugging) dump dependency database + Synthesis options: -p, --part target FPGA spec (required) -k, --constraints constraints file @@ -96,6 +98,47 @@ sys.exit(0) +def load_dep_db( cfg ): + """ + Populate the dependencies database + """ + + # If cache-mode isn't specified, then assume use cached mode if the project + # directory (containing project output) contains a cache file (.depends). + # This is a somewhat arbitrary assumption, but it is probably correct most + # of the time. It can always be overridden from the commandline. + if cfg.cache_deps == None: + cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir, + tool_common.DEPS_FILE) ) + # Build source list + if cfg.cache_deps: + sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) ) + cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath ) + else: + cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath ) + + + +def dep_dump( cfg ): + """ + Dump the dependency database + """ + + print " * Dependency database dump *\n" + entities = sorted( cfg.dep_list.keys() ) + for ent in entities: + print "Entity %s:" % (ent) + for hdl_src, deps, core_src in cfg.dep_list[ent]: + if cfg.relative_paths: + hdl_src = relpath(hdl_src) + print "\tin %s:" % (hdl_src) + if deps: + print "\t\t%s" % '\n\t\t'.join( deps ) + else: + print "\t\t(none)" + print + + def main(argv): # # Parse options @@ -114,18 +157,19 @@ 'no-relative', 'help', 'verbose', - 'version'] ) + 'version', + 'dumpdeps'] ) except getopt.GetoptError, e: raise Panic( e ) - # Temporary list - dlist = [] + # Temporary flag + do_dump = False cfg = tool_common.Config() for arg, val in opts: if arg in ['-v', '--verbose']: cfg.verbose = True if arg in ['-d', '--dir']: cfg.output_dir = val - if arg in ['-D', '--disambiguate']: dlist.append(val) + if arg in ['-D', '--disambiguate']: cfg.add_dar(val) if arg in ['-l', '--libpath']: cfg.libpath += val.split(':') if arg in ['-p', '--part']: cfg.part = val if arg in ['-r', '--root']: cfg.top_ent = val @@ -135,76 +179,66 @@ if arg in ['--no-relative']: cfg.relative_paths = False if arg in ['-k', '--constraints']: cfg.constraints += val.split(':') if arg in ['-h', '--help']: print_help() + if arg in ['--dumpdeps']: do_dump = True if arg in ['-V', '--version']: sys.stderr.write( '$Id$\n' ) sys.exit(0) # Sanity checks - if args == []: - print "%s: no operation specified" % (prog_name()) - print_help() + if not cfg.libpath: + raise Panic("no library paths specified (-l)") - cfg.oper = args.pop(0) - if not os.path.isdir(cfg.output_dir): raise Panic("output directory %s doesn't exist" % (cfg.output_dir) ) - if not cfg.top_ent: - raise Panic("no top level module specified") + # + # Do job + # - # Convert the list of disambiguation rules into a map indexed by entity - for d in dlist: - try: - ent, regex = d.split('/', 1) - except ValueError: - ent, regex = None, None + if do_dump: + # Load deps + load_dep_db( cfg ) - if not ent or regex[-1] != '/': - raise Panic( "Bad disambiguation rule %s" % (d) ) + # Dump db + dep_dump( cfg ) + else: + # More sanity checks + if args == []: + print "%s: no operation specified" % (prog_name()) + print_help() - # Trim trailing / - regex = regex[:-1] + cfg.oper = args.pop(0) - try: - cfg.disambig[ent] = re.compile( regex ) - except re.error: - raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex)) + if not cfg.top_ent: + raise Panic("no top level module specified (-r)") - # If cache-mode isn't specified, then assume use cached mode if the project - # directory (containing project output) contains a cache file (.depends). - # This is a somewhat arbitrary assumption, but it is probably correct most - # of the time. It can always be overridden from the commandline. - if cfg.cache_deps == None: - cfg.cache_deps = os.path.isfile( os.path.join(cfg.output_dir, - tool_common.DEPS_FILE) ) - # - # Do job - # + if cfg.part == '<unspecified>': + sys.stderr.write( "%s: warning: no partspec specified (-p)\n" % prog_name() ) - # Build source list - if cfg.cache_deps: - sys.stderr.write( '%s: using cached dependencies\n\n' % (prog_name()) ) - cfg.dep_list = mkvdeps.cache_get_all_deps( cfg.libpath ) - else: - cfg.dep_list = mkvdeps.source_get_all_deps( cfg.libpath ) + # Load deps + load_dep_db( cfg ) - # Try to resolve dependencies - cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list, - cfg.disambig ) + # Check to make sure top level is known about + if cfg.top_ent not in cfg.dep_list: + raise Panic("top level entity %s unknown" % cfg.top_ent) - if cfg.verbose: - print '\n'.join( [hdl_src - for ent, hdl_src, deps, core_src in cfg.resolved_list] ) - print '\n'.join( [constr - for constr in cfg.constraints] ) - - # Write out project - try: - exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) ) - except ImportError: - raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) + # Try to resolve dependencies + cfg.resolved_list = mkvdeps.resolve_deps( cfg.top_ent, cfg.dep_list, + cfg.disambig ) - tool.write_project( cfg ) + if cfg.verbose: + print '\n'.join( [hdl_src + for ent, hdl_src, deps, core_src in cfg.resolved_list] ) + print '\n'.join( [constr + for constr in cfg.constraints] ) + + # Write out project + try: + exec( 'import tool_%s_%s as tool' % (cfg.oper, cfg.tool) ) + except ImportError: + raise Panic( "Unknown combination %s/%s" % (cfg.oper, cfg.tool) ) + + tool.write_project( cfg ) if __name__ == '__main__': Modified: trunk/bin/tool_common.py =================================================================== --- trunk/bin/tool_common.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/tool_common.py 2010-03-01 17:57:41 UTC (rev 85) @@ -32,8 +32,9 @@ import sys import os +import re +import tempfile - # # Constants # @@ -67,6 +68,28 @@ self.relative_paths = True self.constraints = [] + def add_dar( self, rule ): + """ + Add a disambiguation rule + """ + + try: + ent, regex = rule.split('/', 1) + except ValueError: + ent, regex = None, None + + if not ent or regex[-1] != '/': + raise Panic( "Bad disambiguation rule %s" % (rule) ) + + # Trim trailing / + regex = regex[:-1] + + try: + self.disambig[ent] = re.compile( regex ) + except re.error: + raise Panic("Bad regex in disambiguation rule %s: %s" % (d, regex)) + + def rel_src(cfg, hdl_src): """ @@ -83,11 +106,81 @@ return hdl_src +# Synthesis: FPGA Family name expansion from shorthand +def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2 + """ + Parse a Xilinx partspec into (part, family, package, speed) + """ + + match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps.upper() ) + if not match: + raise Panic( "unknown partspec %s" % (ps) ) + + part, family, pkg, speed = match.groups() + + try: + family = {'2V' : 'VIRTEX2', + '2VP' : 'VIRTEX2P', + '2S' : 'SPARTAN2', + '2SE' : 'SPARTAN2E', + '3S' : 'SPARTAN3'}[family] + except: + if family[0].isdigit(): + family = 'VIRTEX'+family[0] + else: + raise Panic( 'unknown family %s in partspec' % k ) + + return part, family, pkg, speed + + +def copy_xco( src, dst, partspec ): + """ + Copy an XCO project file, replacing the part spec info as appropriate + """ + attrs = [] + part, family, pkg, speed = parse_partspec( partspec ) + + with open(src) as s: + for line in s: + # Delete any comments. + try: + line = line[:line.index('#')] + except ValueError: + pass + + kv = line.strip().replace('=', ' = ').split() + if kv: attrs.append( kv ) + + with open(dst, 'w') as d: + d.write( """\ +# Automatically generated by %s. Do not edit! +# Source: %s +# $Id$ + +""" % (prog_name(), src) ) + + for attr in attrs: + if attr[0].upper() == 'SET': + if attr[1].upper() == 'DEVICE' : attr[3] = part + if attr[1].upper() == 'DEVICEFAMILY': attr[3] = family + if attr[1].upper() == 'PACKAGE' : attr[3] = pkg + if attr[1].upper() == 'SPEEDGRADE' : attr[3] = speed + + d.write( ' '.join( attr ) + '\n' ) + + # Update atime and mtime on the newly created file to reflect its source + # This keeps make happy, and prevents unnecessary builds + st = os.stat( src ) + os.utime( dst, (st.st_atime, st.st_mtime) ) + def write_coregen_mf(cfg, cores): """ Write out a coregen makefile from the list of (hdl_src, core_src). """ + # Make a temporary directory to hold the temporary coregen projects + xco_tmp_dir = tempfile.mkdtemp( prefix='%s-' % prog_name() ) + # Make the cores directory, if it doesn't already exist cores_dir = os.path.join( cfg.output_dir, CORES_DIR ) if not os.path.isdir( cores_dir ): @@ -106,17 +199,30 @@ # default target """ % (prog_name()) ) - # Write out default target - mf.write( 'all: %s\n\n' % (' '.join( [hdl_src - for hdl_src, core_src in cores] )) ) + # Write out default and clean targets + mf.write( """\ +all: %s +\t@echo --- +\t@echo --- Made cores +\t@echo --- - # Write out coregen invocation rules +clean: +\trm -rf %s + +""" % (' '.join( [hdl_src for hdl_src, core_src in cores] ), + xco_tmp_dir) ) + for hdl_src, core_src in cores: + # Copy coregen project file + out_xco = os.path.join( xco_tmp_dir, os.path.split( core_src )[1] ) + copy_xco( core_src, out_xco, cfg.part ) + + # Write out coregen invocation rules mf.write( """\ %s: %s -\tcd %s; $(COREGEN) -b ../%s +\tcd %s; $(COREGEN) -b %s """ % (hdl_src, # the build target (sim source) - rel_src(cfg, core_src), # its dependency (core description [.xco]) + out_xco, # its dependency (core description [.xco]) CORES_DIR, # the cores subdirectory - rel_src(cfg, core_src) ) ) # the core description file [.xco] + out_xco ) ) # the core description file [.xco] Modified: trunk/bin/tool_synth_synplify.py =================================================================== --- trunk/bin/tool_synth_synplify.py 2010-03-01 17:29:53 UTC (rev 84) +++ trunk/bin/tool_synth_synplify.py 2010-03-01 17:57:41 UTC (rev 85) @@ -31,7 +31,6 @@ from util import * import os -import re import tool_common @@ -53,29 +52,6 @@ raise Panic( 'unknown HDL source extension %s' % k ) -# Synthesis: FPGA Family name expansion from shorthand -def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2 - match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps ) - if not match: - raise Panic( "unknown partspec %s" % (cfg.part) ) - - part, family, pkg, speed = match.groups() - - try: - return {'2V' : 'VIRTEX2', - '2VP' : 'VIRTEX2P', - '2S' : 'SPARTAN2', - '2SE' : 'SPARTAN2E', - '3S' : 'SPARTAN3'}[family] - except: - if family[0].isdigit(): - family = 'VIRTEX'+family[0] - else: - raise Panic( 'unknown family %s in partspec' % k ) - - return part, family, pkg, speed - - def write_project(cfg): """ Write out a synplify synthesis project from the resolved sources @@ -98,7 +74,7 @@ cores.append( (hdl_src, core_src) ) # Unpack partspec - part, family, pkg, speed = parse_partspec( cfg.part.upper() ) + part, family, pkg, speed = tool_common.parse_partspec( cfg.part ) # Write out project file. # @@ -165,7 +141,7 @@ #implementation attributes set_option -vlog_std v2001 -set_option -synthesis_onoff_pragma 0 +set_option -synthesis_onoff_pragma 1 set_option -project_relative_includes 1 """ % (prog_name(), This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-01 17:30:00
|
Revision: 84 http://fbt.svn.sourceforge.net/fbt/?rev=84&view=rev Author: dave_infj Date: 2010-03-01 17:29:53 +0000 (Mon, 01 Mar 2010) Log Message: ----------- MAP now seems to require a -w flag (ISE 11) Separate out cores from make clean. Now include make mproper to clean cores as well. Fix placement of -r in SIMP_OPTS Modified Paths: -------------- trunk/bin/Makefile.inc Modified: trunk/bin/Makefile.inc =================================================================== --- trunk/bin/Makefile.inc 2010-03-01 16:58:05 UTC (rev 83) +++ trunk/bin/Makefile.inc 2010-03-01 17:29:53 UTC (rev 84) @@ -99,13 +99,13 @@ # # Add mandatory arguments -SIMP_OPTS += $(foreach F,$(SIMLIBPATH) ,-l $(F)) \ +SIMP_OPTS += -r $(SIMTOP) -p $(PART) $(foreach F,$(SIMLIBPATH) ,-l $(F)) \ $(foreach F,$(SIM_DAR) ,-D $(F)) SYNTH_OPTS += -r $(TOP) -p $(PART) $(foreach F,$(SYNTHLIBPATH),-l $(F)) \ $(foreach F,$(SYN_CONSTR) ,-k $(F)) \ $(foreach F,$(SYN_DAR) ,-D $(F)) NGDBUILD_OPTS += -p $(PART) -a $(foreach F,$(NGOLIBPATH) ,-sd $(F)) -MAP_OPTS += -p $(PART) -pr b +MAP_OPTS += -w -p $(PART) -pr b PAR_OPTS += -w BITGEN_OPTS += -w @@ -144,7 +144,7 @@ make -sf $(lastword $(MAKEFILE_LIST)) -C '{}' deps \; sim: - $(MKVPROJ) sim -r $(SIMTOP) $(SIMP_OPTS) + $(MKVPROJ) sim $(SIMP_OPTS) make -f modelsim.mk all irun: @@ -157,12 +157,16 @@ # Remove build intermediates clean: - rm -rf *~ work/ cores/ transcript vsim.wlf modelsim.mk coregen.mk \ + rm -rf *~ work/ transcript vsim.wlf modelsim.mk \ *.{prj,prd,edf,ncf,log,synlog} build/ \ *_{fpga_editor.out,pad.txt} xlnx_auto_0* \ *.{lst,bgn,bld,drc,map,mrp,ncd,ng[dmo],pcf} \ *.{xml,pad,par,unroutes,xpi,csv,bit,xrpt,ptwx} +mrproper: clean + [ -f coregen.mk ] && make -f coregen.mk clean || true + rm -rf cores/ coregen.mk + ################################################################################ # # Implicit rules This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-03-01 16:58:13
|
Revision: 83 http://fbt.svn.sourceforge.net/fbt/?rev=83&view=rev Author: dave_infj Date: 2010-03-01 16:58:05 +0000 (Mon, 01 Mar 2010) Log Message: ----------- hack to ignore stupid "found ram pair" message. Modified Paths: -------------- trunk/bin/synplify_wrapper.py Modified: trunk/bin/synplify_wrapper.py =================================================================== --- trunk/bin/synplify_wrapper.py 2010-01-26 20:09:03 UTC (rev 82) +++ trunk/bin/synplify_wrapper.py 2010-03-01 16:58:05 UTC (rev 83) @@ -69,6 +69,11 @@ # them to errors to_error = ['BN105', # Cannot apply constraint x to y ] + +# at_ignores is a list of lines beginning with @ which should be ignored. +at_ignores = ['@END', + '@ found ram pair' # God knows what that's all about. + ] def translate_log( log ): @@ -158,7 +163,7 @@ for line in sil: # Want only useful messages (^@), and not @END lines line = line.strip() - if not line.startswith('@') or line == '@END': + if not line.startswith('@') or line in at_ignores: continue match = m_parse_log.search( line ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-01-26 20:09:16
|
Revision: 82 http://fbt.svn.sourceforge.net/fbt/?rev=82&view=rev Author: dave_infj Date: 2010-01-26 20:09:03 +0000 (Tue, 26 Jan 2010) Log Message: ----------- revert 'is' hack for components; rather, check that the line doesn't contain the keyword 'attributes' Modified Paths: -------------- trunk/bin/parse_vhdl.py Modified: trunk/bin/parse_vhdl.py =================================================================== --- trunk/bin/parse_vhdl.py 2010-01-17 18:39:28 UTC (rev 81) +++ trunk/bin/parse_vhdl.py 2010-01-26 20:09:03 UTC (rev 82) @@ -46,6 +46,8 @@ \w+ \s* : \s* entity \s+ (?: work\.)? (?P<inst> \w+) # instance decl """, re.I|re.X) +m_attribs = re.compile( '(?:\s|^)attribute\s', re.I ) + def parse(hdl_src): """ Determine dependencies for a given file (VHDL mode) @@ -64,6 +66,10 @@ pass # Since VHDL is case insensitive, convert everything to lowercase line = line.lower() + + # HACK: Check that this isn't an attributes declaration, which confuses the main re + if m_attribs.search(line): + continue match = m_dep_vhdl.search(line) if match: @@ -99,16 +105,9 @@ deps.append( match.group('inst') ) # If it's a component decl, add it so long as it's not - # marked to be ignored. - # - # Without writing a proper AST, it's not easy to discern - # between: - # component icon2 - # and - # attribute syn_noprune of icon2: component is true; - # hence 'is' is an explicitly excluded 'component'. + # marked to be ignored comp = match.group('comp') - if comp_filter.accept(comp) and comp != 'is': + if comp_filter.accept(comp): deps.append( comp ) dep_list.add_dep( ent, hdl_src, deps ) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-01-17 18:39:35
|
Revision: 81 http://fbt.svn.sourceforge.net/fbt/?rev=81&view=rev Author: dave_infj Date: 2010-01-17 18:39:28 +0000 (Sun, 17 Jan 2010) Log Message: ----------- change lang_flag/family_tab to be functions, with a trap for bad input family_tab now parse_partspec; added support for virtex 5*t (eg, xc5vlx330t); other virtexes generically supported so long as they conform to the pattern 'xc\d' move source file processing outside of print statement, in case an exception is thrown (and a partial file is written) Updated project file options (syn 9/virtex5) Modified Paths: -------------- trunk/bin/tool_synth_synplify.py Modified: trunk/bin/tool_synth_synplify.py =================================================================== --- trunk/bin/tool_synth_synplify.py 2010-01-17 16:20:10 UTC (rev 80) +++ trunk/bin/tool_synth_synplify.py 2010-01-17 18:39:28 UTC (rev 81) @@ -44,53 +44,73 @@ BUILD_DIR = 'build' # Synthesis: synthesis commands for adding HDL source files -lang_flag = {'.vhd' :'-vhdl -lib work', - '.vhdl':'-vhdl -lib work', - '.v' :'-verilog'} +def lang_flag( k ): + try: + return {'.vhd' :'-vhdl -lib work', + '.vhdl':'-vhdl -lib work', + '.v' :'-verilog'}[k] + except: + raise Panic( 'unknown HDL source extension %s' % k ) + # Synthesis: FPGA Family name expansion from shorthand -family_tab = {'2V' : 'VIRTEX2', - '2VP' : 'VIRTEX2P', - '2S' : 'SPARTAN2', - '2SE' : 'SPARTAN2E', - '3S' : 'SPARTAN3'} +def parse_partspec( ps ): # (xc (5vlx) 330) (tff1738) -2 + match = re.search( "(XC(\d[A-Z]+).+)((?:CP|CS|FT|FG|BG|BF|SF|FF|PC|HQ|VQ|PQ|TQ)\d+)(-\d+)", ps ) + if not match: + raise Panic( "unknown partspec %s" % (cfg.part) ) + part, family, pkg, speed = match.groups() + + try: + return {'2V' : 'VIRTEX2', + '2VP' : 'VIRTEX2P', + '2S' : 'SPARTAN2', + '2SE' : 'SPARTAN2E', + '3S' : 'SPARTAN3'}[family] + except: + if family[0].isdigit(): + family = 'VIRTEX'+family[0] + else: + raise Panic( 'unknown family %s in partspec' % k ) + return part, family, pkg, speed + + def write_project(cfg): """ Write out a synplify synthesis project from the resolved sources """ + # write out the synplify project file - cores = [] proj_file = os.path.join( cfg.output_dir, '%s.prj' % (cfg.top_ent) ) + with open( proj_file, 'w' ) as pf: + # Generate sources and cores list + srcs = [] + cores = [] + for ent, hdl_src, deps, core_src in cfg.resolved_list: + srcs.append( 'add_file %s "%s"' % ( + lang_flag(os.path.splitext(hdl_src)[1]), + tool_common.rel_src(cfg, hdl_src) ) ) + + # If it's a core, add it to the cores list + if core_src: + cores.append( (hdl_src, core_src) ) + # Unpack partspec - match = re.search( "(XC(\d[A-Z]+)\d+)([A-Z]+\d+)(-\d+)", cfg.part.upper() ) - if not match: - raise Panic( "unknown partspec %s" % (cfg.part) ) - part, family, pkg, speed = match.groups() + part, family, pkg, speed = parse_partspec( cfg.part.upper() ) + + # Write out project file. + # + # NB: Much of this is hard-coded. The intention is to add flags as + # required to avoid a proliferation of used arguments. - # Write out header pf.write( """\ # Synplify project file automatically generated by %s. Do not edit! # $Id$ # # source files -""" % (prog_name()) ) - - # Write out source list - for ent, hdl_src, deps, core_src in cfg.resolved_list: - pf.write( 'add_file %s "%s"\n' % ( - lang_flag[os.path.splitext(hdl_src)[1]], - tool_common.rel_src(cfg, hdl_src) ) ) - - # If it's a core, add it to the cores list - if core_src: - cores.append( (hdl_src, core_src) ) - - # Write out the rest of the project - pf.write( """\ %s # output file @@ -119,11 +139,13 @@ set_option -disable_io_insertion 0 set_option -pipe 1 set_option -update_models_cp 0 +set_option -enable_prepacking 1 set_option -verification_mode 0 set_option -retiming 0 set_option -no_sequential_opt 0 set_option -fixgatedclocks 3 set_option -fixgeneratedclocks 3 +set_option -hier_report 1 #sequential_optimizations options @@ -146,13 +168,15 @@ set_option -synthesis_onoff_pragma 0 set_option -project_relative_includes 1 -""" % ('\n'.join( ['add_file -constraint "%s"' % ( +""" % (prog_name(), + '\n'.join( srcs + + ['add_file -constraint "%s"' % ( tool_common.rel_src(cfg, hdl_src) ) for hdl_src in cfg.constraints] ), os.path.join( BUILD_DIR, '%s.edf' % (cfg.top_ent) ), # Output netlist BUILD_DIR, # Implementation output directory BUILD_DIR, # Active implementation - family_tab[family], # Device family + family, # Device family part, # Device name pkg, # Device package speed ) ) # and speed grade This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <dav...@us...> - 2010-01-17 16:20:17
|
Revision: 80 http://fbt.svn.sourceforge.net/fbt/?rev=80&view=rev Author: dave_infj Date: 2010-01-17 16:20:10 +0000 (Sun, 17 Jan 2010) Log Message: ----------- strip input lines to deal with \r and the like Modified Paths: -------------- trunk/bin/parse_coregen.py Modified: trunk/bin/parse_coregen.py =================================================================== --- trunk/bin/parse_coregen.py 2010-01-15 17:29:09 UTC (rev 79) +++ trunk/bin/parse_coregen.py 2010-01-17 16:20:10 UTC (rev 80) @@ -60,7 +60,7 @@ except ValueError: pass - match = m_parse_xco.search( line ) + match = m_parse_xco.search( line.strip() ) if match: attribs[match.group(1).lower()] = match.group(2) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |