|
From: <hc...@us...> - 2010-01-23 17:26:04
|
Revision: 1812
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1812&view=rev
Author: hchen30
Date: 2010-01-23 17:25:56 +0000 (Sat, 23 Jan 2010)
Log Message:
-----------
1. Enable Execution Order Tool in Build Tool project. This tool is used to statically scan c source code and FV image files to find the dispatch order of each PEIM/DXE DIRVER with their produced/consumed PPI/PROTOCOL.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Table/TableQuery.py
Added Paths:
-----------
trunk/BaseTools/Source/Python/Eot/
trunk/BaseTools/Source/Python/Eot/CLexer.py
trunk/BaseTools/Source/Python/Eot/CParser.py
trunk/BaseTools/Source/Python/Eot/CodeFragment.py
trunk/BaseTools/Source/Python/Eot/CodeFragmentCollector.py
trunk/BaseTools/Source/Python/Eot/Database.py
trunk/BaseTools/Source/Python/Eot/EfiCompressor.pyd
trunk/BaseTools/Source/Python/Eot/Eot.py
trunk/BaseTools/Source/Python/Eot/EotGlobalData.py
trunk/BaseTools/Source/Python/Eot/EotToolError.py
trunk/BaseTools/Source/Python/Eot/FileProfile.py
trunk/BaseTools/Source/Python/Eot/FvImage.py
trunk/BaseTools/Source/Python/Eot/InfParserLite.py
trunk/BaseTools/Source/Python/Eot/LzmaCompressor.pyd
trunk/BaseTools/Source/Python/Eot/Parser.py
trunk/BaseTools/Source/Python/Eot/ParserWarning.py
trunk/BaseTools/Source/Python/Eot/Report.py
trunk/BaseTools/Source/Python/Eot/__init__.py
trunk/BaseTools/Source/Python/Eot/c.py
Added: trunk/BaseTools/Source/Python/Eot/CLexer.py
===================================================================
--- trunk/BaseTools/Source/Python/Eot/CLexer.py (rev 0)
+++ trunk/BaseTools/Source/Python/Eot/CLexer.py 2010-01-23 17:25:56 UTC (rev 1812)
@@ -0,0 +1,4852 @@
+# $ANTLR 3.0.1 C.g 2008-06-04 15:55:54
+
+from antlr3 import *
+from antlr3.compat import set, frozenset
+
+
+# for convenience in actions
+HIDDEN = BaseRecognizer.HIDDEN
+
+# token types
+T114=114
+T115=115
+FloatTypeSuffix=16
+LETTER=11
+T29=29
+T28=28
+T27=27
+T26=26
+T25=25
+EOF=-1
+STRING_LITERAL=9
+FLOATING_POINT_LITERAL=10
+T38=38
+T37=37
+T39=39
+T34=34
+COMMENT=22
+T33=33
+T36=36
+T35=35
+T30=30
+T32=32
+T31=31
+LINE_COMMENT=23
+IntegerTypeSuffix=14
+CHARACTER_LITERAL=8
+T49=49
+T48=48
+T100=100
+T43=43
+T42=42
+T102=102
+T41=41
+T101=101
+T40=40
+T47=47
+T46=46
+T45=45
+T44=44
+T109=109
+T107=107
+T108=108
+T105=105
+WS=19
+T106=106
+T103=103
+T104=104
+T50=50
+LINE_COMMAND=24
+T59=59
+T113=113
+T52=52
+T112=112
+T51=51
+T111=111
+T54=54
+T110=110
+EscapeSequence=12
+DECIMAL_LITERAL=7
+T53=53
+T56=56
+T55=55
+T58=58
+T57=57
+T75=75
+T76=76
+T73=73
+T74=74
+T79=79
+T77=77
+T78=78
+Exponent=15
+HexDigit=13
+T72=72
+T71=71
+T70=70
+T62=62
+T63=63
+T64=64
+T65=65
+T66=66
+T67=67
+T68=68
+T69=69
+IDENTIFIER=4
+UnicodeVocabulary=21
+HEX_LITERAL=5
+T61=61
+T60=60
+T99=99
+T97=97
+BS=20
+T98=98
+T95=95
+T96=96
+OCTAL_LITERAL=6
+T94=94
+Tokens=116
+T93=93
+T92=92
+T91=91
+T90=90
+T88=88
+T89=89
+T84=84
+T85=85
+T86=86
+T87=87
+UnicodeEscape=18
+T81=81
+T80=80
+T83=83
+OctalEscape=17
+T82=82
+
+class CLexer(Lexer):
+
+ grammarFileName = "C.g"
+
+ def __init__(self, input=None):
+ Lexer.__init__(self, input)
+ self.dfa25 = self.DFA25(
+ self, 25,
+ eot = self.DFA25_eot,
+ eof = self.DFA25_eof,
+ min = self.DFA25_min,
+ max = self.DFA25_max,
+ accept = self.DFA25_accept,
+ special = self.DFA25_special,
+ transition = self.DFA25_transition
+ )
+ self.dfa35 = self.DFA35(
+ self, 35,
+ eot = self.DFA35_eot,
+ eof = self.DFA35_eof,
+ min = self.DFA35_min,
+ max = self.DFA35_max,
+ accept = self.DFA35_accept,
+ special = self.DFA35_special,
+ transition = self.DFA35_transition
+ )
+
+
+
+
+
+
+ # $ANTLR start T25
+ def mT25(self, ):
+
+ try:
+ self.type = T25
+
+ # C.g:7:5: ( ';' )
+ # C.g:7:7: ';'
+ self.match(u';')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T25
+
+
+
+ # $ANTLR start T26
+ def mT26(self, ):
+
+ try:
+ self.type = T26
+
+ # C.g:8:5: ( 'typedef' )
+ # C.g:8:7: 'typedef'
+ self.match("typedef")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T26
+
+
+
+ # $ANTLR start T27
+ def mT27(self, ):
+
+ try:
+ self.type = T27
+
+ # C.g:9:5: ( ',' )
+ # C.g:9:7: ','
+ self.match(u',')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T27
+
+
+
+ # $ANTLR start T28
+ def mT28(self, ):
+
+ try:
+ self.type = T28
+
+ # C.g:10:5: ( '=' )
+ # C.g:10:7: '='
+ self.match(u'=')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T28
+
+
+
+ # $ANTLR start T29
+ def mT29(self, ):
+
+ try:
+ self.type = T29
+
+ # C.g:11:5: ( 'extern' )
+ # C.g:11:7: 'extern'
+ self.match("extern")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T29
+
+
+
+ # $ANTLR start T30
+ def mT30(self, ):
+
+ try:
+ self.type = T30
+
+ # C.g:12:5: ( 'static' )
+ # C.g:12:7: 'static'
+ self.match("static")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T30
+
+
+
+ # $ANTLR start T31
+ def mT31(self, ):
+
+ try:
+ self.type = T31
+
+ # C.g:13:5: ( 'auto' )
+ # C.g:13:7: 'auto'
+ self.match("auto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T31
+
+
+
+ # $ANTLR start T32
+ def mT32(self, ):
+
+ try:
+ self.type = T32
+
+ # C.g:14:5: ( 'register' )
+ # C.g:14:7: 'register'
+ self.match("register")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T32
+
+
+
+ # $ANTLR start T33
+ def mT33(self, ):
+
+ try:
+ self.type = T33
+
+ # C.g:15:5: ( 'STATIC' )
+ # C.g:15:7: 'STATIC'
+ self.match("STATIC")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T33
+
+
+
+ # $ANTLR start T34
+ def mT34(self, ):
+
+ try:
+ self.type = T34
+
+ # C.g:16:5: ( 'void' )
+ # C.g:16:7: 'void'
+ self.match("void")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T34
+
+
+
+ # $ANTLR start T35
+ def mT35(self, ):
+
+ try:
+ self.type = T35
+
+ # C.g:17:5: ( 'char' )
+ # C.g:17:7: 'char'
+ self.match("char")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T35
+
+
+
+ # $ANTLR start T36
+ def mT36(self, ):
+
+ try:
+ self.type = T36
+
+ # C.g:18:5: ( 'short' )
+ # C.g:18:7: 'short'
+ self.match("short")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T36
+
+
+
+ # $ANTLR start T37
+ def mT37(self, ):
+
+ try:
+ self.type = T37
+
+ # C.g:19:5: ( 'int' )
+ # C.g:19:7: 'int'
+ self.match("int")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T37
+
+
+
+ # $ANTLR start T38
+ def mT38(self, ):
+
+ try:
+ self.type = T38
+
+ # C.g:20:5: ( 'long' )
+ # C.g:20:7: 'long'
+ self.match("long")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T38
+
+
+
+ # $ANTLR start T39
+ def mT39(self, ):
+
+ try:
+ self.type = T39
+
+ # C.g:21:5: ( 'float' )
+ # C.g:21:7: 'float'
+ self.match("float")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T39
+
+
+
+ # $ANTLR start T40
+ def mT40(self, ):
+
+ try:
+ self.type = T40
+
+ # C.g:22:5: ( 'double' )
+ # C.g:22:7: 'double'
+ self.match("double")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T40
+
+
+
+ # $ANTLR start T41
+ def mT41(self, ):
+
+ try:
+ self.type = T41
+
+ # C.g:23:5: ( 'signed' )
+ # C.g:23:7: 'signed'
+ self.match("signed")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T41
+
+
+
+ # $ANTLR start T42
+ def mT42(self, ):
+
+ try:
+ self.type = T42
+
+ # C.g:24:5: ( 'unsigned' )
+ # C.g:24:7: 'unsigned'
+ self.match("unsigned")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T42
+
+
+
+ # $ANTLR start T43
+ def mT43(self, ):
+
+ try:
+ self.type = T43
+
+ # C.g:25:5: ( '{' )
+ # C.g:25:7: '{'
+ self.match(u'{')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T43
+
+
+
+ # $ANTLR start T44
+ def mT44(self, ):
+
+ try:
+ self.type = T44
+
+ # C.g:26:5: ( '}' )
+ # C.g:26:7: '}'
+ self.match(u'}')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T44
+
+
+
+ # $ANTLR start T45
+ def mT45(self, ):
+
+ try:
+ self.type = T45
+
+ # C.g:27:5: ( 'struct' )
+ # C.g:27:7: 'struct'
+ self.match("struct")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T45
+
+
+
+ # $ANTLR start T46
+ def mT46(self, ):
+
+ try:
+ self.type = T46
+
+ # C.g:28:5: ( 'union' )
+ # C.g:28:7: 'union'
+ self.match("union")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T46
+
+
+
+ # $ANTLR start T47
+ def mT47(self, ):
+
+ try:
+ self.type = T47
+
+ # C.g:29:5: ( ':' )
+ # C.g:29:7: ':'
+ self.match(u':')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T47
+
+
+
+ # $ANTLR start T48
+ def mT48(self, ):
+
+ try:
+ self.type = T48
+
+ # C.g:30:5: ( 'enum' )
+ # C.g:30:7: 'enum'
+ self.match("enum")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T48
+
+
+
+ # $ANTLR start T49
+ def mT49(self, ):
+
+ try:
+ self.type = T49
+
+ # C.g:31:5: ( 'const' )
+ # C.g:31:7: 'const'
+ self.match("const")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T49
+
+
+
+ # $ANTLR start T50
+ def mT50(self, ):
+
+ try:
+ self.type = T50
+
+ # C.g:32:5: ( 'volatile' )
+ # C.g:32:7: 'volatile'
+ self.match("volatile")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T50
+
+
+
+ # $ANTLR start T51
+ def mT51(self, ):
+
+ try:
+ self.type = T51
+
+ # C.g:33:5: ( 'IN' )
+ # C.g:33:7: 'IN'
+ self.match("IN")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T51
+
+
+
+ # $ANTLR start T52
+ def mT52(self, ):
+
+ try:
+ self.type = T52
+
+ # C.g:34:5: ( 'OUT' )
+ # C.g:34:7: 'OUT'
+ self.match("OUT")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T52
+
+
+
+ # $ANTLR start T53
+ def mT53(self, ):
+
+ try:
+ self.type = T53
+
+ # C.g:35:5: ( 'OPTIONAL' )
+ # C.g:35:7: 'OPTIONAL'
+ self.match("OPTIONAL")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T53
+
+
+
+ # $ANTLR start T54
+ def mT54(self, ):
+
+ try:
+ self.type = T54
+
+ # C.g:36:5: ( 'CONST' )
+ # C.g:36:7: 'CONST'
+ self.match("CONST")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T54
+
+
+
+ # $ANTLR start T55
+ def mT55(self, ):
+
+ try:
+ self.type = T55
+
+ # C.g:37:5: ( 'UNALIGNED' )
+ # C.g:37:7: 'UNALIGNED'
+ self.match("UNALIGNED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T55
+
+
+
+ # $ANTLR start T56
+ def mT56(self, ):
+
+ try:
+ self.type = T56
+
+ # C.g:38:5: ( 'VOLATILE' )
+ # C.g:38:7: 'VOLATILE'
+ self.match("VOLATILE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T56
+
+
+
+ # $ANTLR start T57
+ def mT57(self, ):
+
+ try:
+ self.type = T57
+
+ # C.g:39:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
+ # C.g:39:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T57
+
+
+
+ # $ANTLR start T58
+ def mT58(self, ):
+
+ try:
+ self.type = T58
+
+ # C.g:40:5: ( 'EFIAPI' )
+ # C.g:40:7: 'EFIAPI'
+ self.match("EFIAPI")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T58
+
+
+
+ # $ANTLR start T59
+ def mT59(self, ):
+
+ try:
+ self.type = T59
+
+ # C.g:41:5: ( 'EFI_BOOTSERVICE' )
+ # C.g:41:7: 'EFI_BOOTSERVICE'
+ self.match("EFI_BOOTSERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T59
+
+
+
+ # $ANTLR start T60
+ def mT60(self, ):
+
+ try:
+ self.type = T60
+
+ # C.g:42:5: ( 'EFI_RUNTIMESERVICE' )
+ # C.g:42:7: 'EFI_RUNTIMESERVICE'
+ self.match("EFI_RUNTIMESERVICE")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T60
+
+
+
+ # $ANTLR start T61
+ def mT61(self, ):
+
+ try:
+ self.type = T61
+
+ # C.g:43:5: ( '(' )
+ # C.g:43:7: '('
+ self.match(u'(')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T61
+
+
+
+ # $ANTLR start T62
+ def mT62(self, ):
+
+ try:
+ self.type = T62
+
+ # C.g:44:5: ( ')' )
+ # C.g:44:7: ')'
+ self.match(u')')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T62
+
+
+
+ # $ANTLR start T63
+ def mT63(self, ):
+
+ try:
+ self.type = T63
+
+ # C.g:45:5: ( '[' )
+ # C.g:45:7: '['
+ self.match(u'[')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T63
+
+
+
+ # $ANTLR start T64
+ def mT64(self, ):
+
+ try:
+ self.type = T64
+
+ # C.g:46:5: ( ']' )
+ # C.g:46:7: ']'
+ self.match(u']')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T64
+
+
+
+ # $ANTLR start T65
+ def mT65(self, ):
+
+ try:
+ self.type = T65
+
+ # C.g:47:5: ( '*' )
+ # C.g:47:7: '*'
+ self.match(u'*')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T65
+
+
+
+ # $ANTLR start T66
+ def mT66(self, ):
+
+ try:
+ self.type = T66
+
+ # C.g:48:5: ( '...' )
+ # C.g:48:7: '...'
+ self.match("...")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T66
+
+
+
+ # $ANTLR start T67
+ def mT67(self, ):
+
+ try:
+ self.type = T67
+
+ # C.g:49:5: ( '+' )
+ # C.g:49:7: '+'
+ self.match(u'+')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T67
+
+
+
+ # $ANTLR start T68
+ def mT68(self, ):
+
+ try:
+ self.type = T68
+
+ # C.g:50:5: ( '-' )
+ # C.g:50:7: '-'
+ self.match(u'-')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T68
+
+
+
+ # $ANTLR start T69
+ def mT69(self, ):
+
+ try:
+ self.type = T69
+
+ # C.g:51:5: ( '/' )
+ # C.g:51:7: '/'
+ self.match(u'/')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T69
+
+
+
+ # $ANTLR start T70
+ def mT70(self, ):
+
+ try:
+ self.type = T70
+
+ # C.g:52:5: ( '%' )
+ # C.g:52:7: '%'
+ self.match(u'%')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T70
+
+
+
+ # $ANTLR start T71
+ def mT71(self, ):
+
+ try:
+ self.type = T71
+
+ # C.g:53:5: ( '++' )
+ # C.g:53:7: '++'
+ self.match("++")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T71
+
+
+
+ # $ANTLR start T72
+ def mT72(self, ):
+
+ try:
+ self.type = T72
+
+ # C.g:54:5: ( '--' )
+ # C.g:54:7: '--'
+ self.match("--")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T72
+
+
+
+ # $ANTLR start T73
+ def mT73(self, ):
+
+ try:
+ self.type = T73
+
+ # C.g:55:5: ( 'sizeof' )
+ # C.g:55:7: 'sizeof'
+ self.match("sizeof")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T73
+
+
+
+ # $ANTLR start T74
+ def mT74(self, ):
+
+ try:
+ self.type = T74
+
+ # C.g:56:5: ( '.' )
+ # C.g:56:7: '.'
+ self.match(u'.')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T74
+
+
+
+ # $ANTLR start T75
+ def mT75(self, ):
+
+ try:
+ self.type = T75
+
+ # C.g:57:5: ( '->' )
+ # C.g:57:7: '->'
+ self.match("->")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T75
+
+
+
+ # $ANTLR start T76
+ def mT76(self, ):
+
+ try:
+ self.type = T76
+
+ # C.g:58:5: ( '&' )
+ # C.g:58:7: '&'
+ self.match(u'&')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T76
+
+
+
+ # $ANTLR start T77
+ def mT77(self, ):
+
+ try:
+ self.type = T77
+
+ # C.g:59:5: ( '~' )
+ # C.g:59:7: '~'
+ self.match(u'~')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T77
+
+
+
+ # $ANTLR start T78
+ def mT78(self, ):
+
+ try:
+ self.type = T78
+
+ # C.g:60:5: ( '!' )
+ # C.g:60:7: '!'
+ self.match(u'!')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T78
+
+
+
+ # $ANTLR start T79
+ def mT79(self, ):
+
+ try:
+ self.type = T79
+
+ # C.g:61:5: ( '*=' )
+ # C.g:61:7: '*='
+ self.match("*=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T79
+
+
+
+ # $ANTLR start T80
+ def mT80(self, ):
+
+ try:
+ self.type = T80
+
+ # C.g:62:5: ( '/=' )
+ # C.g:62:7: '/='
+ self.match("/=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T80
+
+
+
+ # $ANTLR start T81
+ def mT81(self, ):
+
+ try:
+ self.type = T81
+
+ # C.g:63:5: ( '%=' )
+ # C.g:63:7: '%='
+ self.match("%=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T81
+
+
+
+ # $ANTLR start T82
+ def mT82(self, ):
+
+ try:
+ self.type = T82
+
+ # C.g:64:5: ( '+=' )
+ # C.g:64:7: '+='
+ self.match("+=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T82
+
+
+
+ # $ANTLR start T83
+ def mT83(self, ):
+
+ try:
+ self.type = T83
+
+ # C.g:65:5: ( '-=' )
+ # C.g:65:7: '-='
+ self.match("-=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T83
+
+
+
+ # $ANTLR start T84
+ def mT84(self, ):
+
+ try:
+ self.type = T84
+
+ # C.g:66:5: ( '<<=' )
+ # C.g:66:7: '<<='
+ self.match("<<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T84
+
+
+
+ # $ANTLR start T85
+ def mT85(self, ):
+
+ try:
+ self.type = T85
+
+ # C.g:67:5: ( '>>=' )
+ # C.g:67:7: '>>='
+ self.match(">>=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T85
+
+
+
+ # $ANTLR start T86
+ def mT86(self, ):
+
+ try:
+ self.type = T86
+
+ # C.g:68:5: ( '&=' )
+ # C.g:68:7: '&='
+ self.match("&=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T86
+
+
+
+ # $ANTLR start T87
+ def mT87(self, ):
+
+ try:
+ self.type = T87
+
+ # C.g:69:5: ( '^=' )
+ # C.g:69:7: '^='
+ self.match("^=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T87
+
+
+
+ # $ANTLR start T88
+ def mT88(self, ):
+
+ try:
+ self.type = T88
+
+ # C.g:70:5: ( '|=' )
+ # C.g:70:7: '|='
+ self.match("|=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T88
+
+
+
+ # $ANTLR start T89
+ def mT89(self, ):
+
+ try:
+ self.type = T89
+
+ # C.g:71:5: ( '?' )
+ # C.g:71:7: '?'
+ self.match(u'?')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T89
+
+
+
+ # $ANTLR start T90
+ def mT90(self, ):
+
+ try:
+ self.type = T90
+
+ # C.g:72:5: ( '||' )
+ # C.g:72:7: '||'
+ self.match("||")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T90
+
+
+
+ # $ANTLR start T91
+ def mT91(self, ):
+
+ try:
+ self.type = T91
+
+ # C.g:73:5: ( '&&' )
+ # C.g:73:7: '&&'
+ self.match("&&")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T91
+
+
+
+ # $ANTLR start T92
+ def mT92(self, ):
+
+ try:
+ self.type = T92
+
+ # C.g:74:5: ( '|' )
+ # C.g:74:7: '|'
+ self.match(u'|')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T92
+
+
+
+ # $ANTLR start T93
+ def mT93(self, ):
+
+ try:
+ self.type = T93
+
+ # C.g:75:5: ( '^' )
+ # C.g:75:7: '^'
+ self.match(u'^')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T93
+
+
+
+ # $ANTLR start T94
+ def mT94(self, ):
+
+ try:
+ self.type = T94
+
+ # C.g:76:5: ( '==' )
+ # C.g:76:7: '=='
+ self.match("==")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T94
+
+
+
+ # $ANTLR start T95
+ def mT95(self, ):
+
+ try:
+ self.type = T95
+
+ # C.g:77:5: ( '!=' )
+ # C.g:77:7: '!='
+ self.match("!=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T95
+
+
+
+ # $ANTLR start T96
+ def mT96(self, ):
+
+ try:
+ self.type = T96
+
+ # C.g:78:5: ( '<' )
+ # C.g:78:7: '<'
+ self.match(u'<')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T96
+
+
+
+ # $ANTLR start T97
+ def mT97(self, ):
+
+ try:
+ self.type = T97
+
+ # C.g:79:5: ( '>' )
+ # C.g:79:7: '>'
+ self.match(u'>')
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T97
+
+
+
+ # $ANTLR start T98
+ def mT98(self, ):
+
+ try:
+ self.type = T98
+
+ # C.g:80:5: ( '<=' )
+ # C.g:80:7: '<='
+ self.match("<=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T98
+
+
+
+ # $ANTLR start T99
+ def mT99(self, ):
+
+ try:
+ self.type = T99
+
+ # C.g:81:5: ( '>=' )
+ # C.g:81:7: '>='
+ self.match(">=")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T99
+
+
+
+ # $ANTLR start T100
+ def mT100(self, ):
+
+ try:
+ self.type = T100
+
+ # C.g:82:6: ( '<<' )
+ # C.g:82:8: '<<'
+ self.match("<<")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T100
+
+
+
+ # $ANTLR start T101
+ def mT101(self, ):
+
+ try:
+ self.type = T101
+
+ # C.g:83:6: ( '>>' )
+ # C.g:83:8: '>>'
+ self.match(">>")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T101
+
+
+
+ # $ANTLR start T102
+ def mT102(self, ):
+
+ try:
+ self.type = T102
+
+ # C.g:84:6: ( '_asm' )
+ # C.g:84:8: '_asm'
+ self.match("_asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T102
+
+
+
+ # $ANTLR start T103
+ def mT103(self, ):
+
+ try:
+ self.type = T103
+
+ # C.g:85:6: ( '__asm' )
+ # C.g:85:8: '__asm'
+ self.match("__asm")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T103
+
+
+
+ # $ANTLR start T104
+ def mT104(self, ):
+
+ try:
+ self.type = T104
+
+ # C.g:86:6: ( 'case' )
+ # C.g:86:8: 'case'
+ self.match("case")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T104
+
+
+
+ # $ANTLR start T105
+ def mT105(self, ):
+
+ try:
+ self.type = T105
+
+ # C.g:87:6: ( 'default' )
+ # C.g:87:8: 'default'
+ self.match("default")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T105
+
+
+
+ # $ANTLR start T106
+ def mT106(self, ):
+
+ try:
+ self.type = T106
+
+ # C.g:88:6: ( 'if' )
+ # C.g:88:8: 'if'
+ self.match("if")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T106
+
+
+
+ # $ANTLR start T107
+ def mT107(self, ):
+
+ try:
+ self.type = T107
+
+ # C.g:89:6: ( 'else' )
+ # C.g:89:8: 'else'
+ self.match("else")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T107
+
+
+
+ # $ANTLR start T108
+ def mT108(self, ):
+
+ try:
+ self.type = T108
+
+ # C.g:90:6: ( 'switch' )
+ # C.g:90:8: 'switch'
+ self.match("switch")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T108
+
+
+
+ # $ANTLR start T109
+ def mT109(self, ):
+
+ try:
+ self.type = T109
+
+ # C.g:91:6: ( 'while' )
+ # C.g:91:8: 'while'
+ self.match("while")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T109
+
+
+
+ # $ANTLR start T110
+ def mT110(self, ):
+
+ try:
+ self.type = T110
+
+ # C.g:92:6: ( 'do' )
+ # C.g:92:8: 'do'
+ self.match("do")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T110
+
+
+
+ # $ANTLR start T111
+ def mT111(self, ):
+
+ try:
+ self.type = T111
+
+ # C.g:93:6: ( 'for' )
+ # C.g:93:8: 'for'
+ self.match("for")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T111
+
+
+
+ # $ANTLR start T112
+ def mT112(self, ):
+
+ try:
+ self.type = T112
+
+ # C.g:94:6: ( 'goto' )
+ # C.g:94:8: 'goto'
+ self.match("goto")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T112
+
+
+
+ # $ANTLR start T113
+ def mT113(self, ):
+
+ try:
+ self.type = T113
+
+ # C.g:95:6: ( 'continue' )
+ # C.g:95:8: 'continue'
+ self.match("continue")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T113
+
+
+
+ # $ANTLR start T114
+ def mT114(self, ):
+
+ try:
+ self.type = T114
+
+ # C.g:96:6: ( 'break' )
+ # C.g:96:8: 'break'
+ self.match("break")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T114
+
+
+
+ # $ANTLR start T115
+ def mT115(self, ):
+
+ try:
+ self.type = T115
+
+ # C.g:97:6: ( 'return' )
+ # C.g:97:8: 'return'
+ self.match("return")
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end T115
+
+
+
+ # $ANTLR start IDENTIFIER
+ def mIDENTIFIER(self, ):
+
+ try:
+ self.type = IDENTIFIER
+
+ # C.g:534:2: ( LETTER ( LETTER | '0' .. '9' )* )
+ # C.g:534:4: LETTER ( LETTER | '0' .. '9' )*
+ self.mLETTER()
+
+ # C.g:534:11: ( LETTER | '0' .. '9' )*
+ while True: #loop1
+ alt1 = 2
+ LA1_0 = self.input.LA(1)
+
+ if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) :
+ alt1 = 1
+
+
+ if alt1 == 1:
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'):
+ self.input.consume();
+
+ else:
+ mse = MismatchedSetException(None, self.input)
+ self.recover(mse)
+ raise mse
+
+
+
+
+ else:
+ break #loop1
+
+
+
+
+
+
+ finally:
+
+ pass
+
+ # $ANTLR end IDENTIFIER
+
+
+
+ # $ANTLR start LETTER
+ def mLETTER(self, ):
+
+ try:
+ # C.g:539:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' )
+ # C.g:
+ if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(...
[truncated message content] |
|
From: <lg...@us...> - 2010-01-28 01:46:39
|
Revision: 1819
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1819&view=rev
Author: lgao4
Date: 2010-01-28 01:46:33 +0000 (Thu, 28 Jan 2010)
Log Message:
-----------
1. Extend Platform Build Options only for EDK module, or only for EDKII module, or for both.
2. Support negative value setting for PCD value.
3. Check whether PCD value is valid for its data type.
4. Correct the generation for patchable PCD in AutoGen code.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/AutoGen/GenC.py
trunk/BaseTools/Source/Python/Common/BuildToolError.py
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-01-26 14:22:57 UTC (rev 1818)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-01-28 01:46:33 UTC (rev 1819)
@@ -355,6 +355,8 @@
self._ToolChainFamily = None
self._BuildRuleFamily = None
self._BuildOption = None # toolcode : option
+ self._EdkBuildOption = None # edktoolcode : option
+ self._EdkIIBuildOption = None # edkiitoolcode : option
self._PackageList = None
self._ModuleAutoGenList = None
self._LibraryAutoGenList = None
@@ -661,12 +663,24 @@
self._BuildRuleFamily = ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
return self._BuildRuleFamily
- ## Return the build options specific to this platform
+ ## Return the build options specific for all modules in this platform
def _GetBuildOptions(self):
if self._BuildOption == None:
self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
return self._BuildOption
+ ## Return the build options specific for EDK modules in this platform
+ def _GetEdkBuildOptions(self):
+ if self._EdkBuildOption == None:
+ self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
+ return self._EdkBuildOption
+
+ ## Return the build options specific for EDKII modules in this platform
+ def _GetEdkIIBuildOptions(self):
+ if self._EdkIIBuildOption == None:
+ self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
+ return self._EdkIIBuildOption
+
## Parse build_rule.txt in $(WORKSPACE)/Conf/build_rule.txt
#
# @retval BuildRule object
@@ -1072,11 +1086,18 @@
#
# @retval options Options expanded
#
- def _ExpandBuildOption(self, Options):
+ def _ExpandBuildOption(self, Options, ModuleStyle=None):
BuildOptions = {}
FamilyMatch = False
FamilyIsNull = True
for Key in Options:
+ if ModuleStyle != None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
@@ -1104,6 +1125,13 @@
return BuildOptions
for Key in Options:
+ if ModuleStyle != None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
@@ -1133,7 +1161,11 @@
# @retval options The options appended with build options in platform
#
def ApplyBuildOption(self, Module):
- PlatformOptions = self.BuildOption
+ # Get the different options for the different style module
+ if Module.AutoGenVersion < 0x00010005:
+ PlatformOptions = self.EdkBuildOption
+ else:
+ PlatformOptions = self.EdkIIBuildOption
ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
@@ -1181,6 +1213,8 @@
ToolChainFamily = property(_GetToolChainFamily)
BuildRuleFamily = property(_GetBuildRuleFamily)
BuildOption = property(_GetBuildOptions) # toolcode : option
+ EdkBuildOption = property(_GetEdkBuildOptions) # edktoolcode : option
+ EdkIIBuildOption = property(_GetEdkIIBuildOptions) # edkiitoolcode : option
BuildCommand = property(_GetBuildCommand)
BuildRule = property(_GetBuildRule)
Modified: trunk/BaseTools/Source/Python/AutoGen/GenC.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-01-26 14:22:57 UTC (rev 1818)
+++ trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-01-28 01:46:33 UTC (rev 1819)
@@ -961,9 +961,46 @@
Array = ''
Value = Pcd.DefaultValue
Unicode = False
- if Pcd.DatumType == 'UINT64':
- if not Value.endswith('ULL'):
- Value += 'ULL'
+ ValueNumber = 0
+ if Pcd.DatumType in ['UINT64', 'UINT32', 'UINT16', 'UINT8']:
+ if Value.upper().startswith('0X'):
+ ValueNumber = int (Value, 16)
+ else:
+ ValueNumber = int (Value)
+ if Pcd.DatumType == 'UINT64':
+ if abs (ValueNumber) >= 0x10000000000000000:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ ValueNumber = 0x10000000000000000 + ValueNumber
+ Value = str (ValueNumber)
+ if not Value.endswith('ULL'):
+ Value += 'ULL'
+ elif Pcd.DatumType == 'UINT32':
+ if abs (ValueNumber) >= 0x100000000:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ ValueNumber = 0x100000000 + ValueNumber
+ Value = str (ValueNumber)
+ elif Pcd.DatumType == 'UINT16':
+ if abs (ValueNumber) >= 0x10000:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ ValueNumber = 0x10000 + ValueNumber
+ Value = str (ValueNumber)
+ elif Pcd.DatumType == 'UINT8':
+ if abs (ValueNumber) >= 0x100:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
+ if ValueNumber < 0:
+ ValueNumber = 0x100 + ValueNumber
+ Value = str (ValueNumber)
if Pcd.DatumType == 'VOID*':
if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
EdkLogger.error("build", AUTOGEN_ERROR,
@@ -1021,7 +1058,7 @@
AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
+ AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
else:
Modified: trunk/BaseTools/Source/Python/Common/BuildToolError.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/BuildToolError.py 2010-01-26 14:22:57 UTC (rev 1818)
+++ trunk/BaseTools/Source/Python/Common/BuildToolError.py 2010-01-28 01:46:33 UTC (rev 1819)
@@ -84,7 +84,7 @@
## Error message of each error code
gErrorMessage = {
- FILE_NOT_FOUND : "File/directory not found",
+ FILE_NOT_FOUND : "File/directory not found in workspace",
FILE_OPEN_FAILURE : "File open failure",
FILE_WRITE_FAILURE : "File write failure",
FILE_PARSE_FAILURE : "File parse failure",
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2010-01-26 14:22:57 UTC (rev 1818)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2010-01-28 01:46:33 UTC (rev 1819)
@@ -73,6 +73,8 @@
EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
+EDK_NAME = 'EDK'
+EDKII_NAME = 'EDKII'
BINARY_FILE_TYPE_FW = 'FW'
BINARY_FILE_TYPE_GUID = 'GUID'
@@ -230,6 +232,19 @@
TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, TAB_PCDS_DYNAMIC_HII_NULL]
TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL]
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE = 'PcdLoadFixAddressPeiCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE = 'PcdLoadFixAddressBootTimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE = 'PcdLoadFixAddressRuntimeCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE = 'PcdLoadFixAddressSmmCodePageNumber'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE_DATA_TYPE = 'UINT32'
+TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_LIST = [TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_PEI_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_DXE_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_RUNTIME_PAGE_SIZE, \
+ TAB_PCDS_PATCHABLE_LOAD_FIX_ADDRESS_SMM_PAGE_SIZE]
+
TAB_DEPEX = 'Depex'
TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
@@ -338,6 +353,7 @@
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#
# TargetTxt Definitions
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-01-26 14:22:57 UTC (rev 1818)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-01-28 01:46:33 UTC (rev 1819)
@@ -134,6 +134,7 @@
self._LibraryClasses = None
self._Pcds = None
self._BuildOptions = None
+ self._LoadFixAddress = None
## Get architecture
def _GetArch(self):
@@ -185,6 +186,8 @@
elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
if self._SkuName == None:
self._SkuName = Record[1]
+ elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
+ self._LoadFixAddress = Record[1]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
@@ -309,6 +312,15 @@
self._RtBaseAddress = ''
return self._RtBaseAddress
+ ## Retrieve the top address for the load fix address
+ def _GetLoadFixAddress(self):
+ if self._LoadFixAddress == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._LoadFixAddress == None:
+ self._LoadFixAddress = ''
+ return self._LoadFixAddress
+
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
if self._SkuIds == None:
@@ -513,9 +525,18 @@
def _GetBuildOptions(self):
if self._BuildOptions == None:
self._BuildOptions = {}
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION]
+ #
+ # Retrieve build option for EDKII style module
+ #
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, 'COMMON', EDKII_NAME]
for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- self._BuildOptions[ToolChainFamily, ToolChain] = Option
+ self._BuildOptions[ToolChainFamily, ToolChain, EDKII_NAME] = Option
+ #
+ # Retrieve build option for EDK style module
+ #
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, 'COMMON', EDK_NAME]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
+ self._BuildOptions[ToolChainFamily, ToolChain, EDK_NAME] = Option
return self._BuildOptions
## Retrieve non-dynamic PCD settings
@@ -730,6 +751,7 @@
MakefileName = property(_GetMakefileName)
BsBaseAddress = property(_GetBsBaseAddress)
RtBaseAddress = property(_GetRtBaseAddress)
+ LoadFixAddress = property(_GetLoadFixAddress)
SkuIds = property(_GetSkuIds)
Modules = property(_GetModules)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-01-29 11:44:39
|
Revision: 1828
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1828&view=rev
Author: qhuang8
Date: 2010-01-29 11:44:32 +0000 (Fri, 29 Jan 2010)
Log Message:
-----------
1. Add loading fixed address report if the fixed address map file is generated.
2. Add driver execution order report by integrating EOT tool
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Eot/Eot.py
trunk/BaseTools/Source/Python/Eot/Report.py
trunk/BaseTools/Source/Python/build/BuildReport.py
Modified: trunk/BaseTools/Source/Python/Eot/Eot.py
===================================================================
--- trunk/BaseTools/Source/Python/Eot/Eot.py 2010-01-29 06:00:56 UTC (rev 1827)
+++ trunk/BaseTools/Source/Python/Eot/Eot.py 2010-01-29 11:44:32 UTC (rev 1828)
@@ -43,7 +43,7 @@
#
def __init__(self, CommandLineOption=True, IsInit=True, SourceFileList=None, \
IncludeDirList=None, DecFileList=None, GuidList=None, LogFile=None,
- FvFileList="", MapFileList="", Report='Report.html'):
+ FvFileList="", MapFileList="", Report='Report.html', Dispatch=None):
# Version and Copyright
self.VersionNumber = "0.02"
self.Version = "%prog Version " + self.VersionNumber
@@ -58,6 +58,7 @@
self.LogFile = LogFile
self.FvFileList = FvFileList
self.MapFileList = MapFileList
+ self.Dispatch = Dispatch
# Check workspace environment
if "EFI_SOURCE" not in os.environ:
@@ -289,7 +290,7 @@
#
def GenerateReport(self):
EdkLogger.quiet("Generating report file ... ")
- Rep = Report('Report.html', EotGlobalData.gFV)
+ Rep = Report(self.Report, EotGlobalData.gFV, self.Dispatch)
Rep.GenerateReport()
## LoadMapInfo() method
Modified: trunk/BaseTools/Source/Python/Eot/Report.py
===================================================================
--- trunk/BaseTools/Source/Python/Eot/Report.py 2010-01-29 06:00:56 UTC (rev 1827)
+++ trunk/BaseTools/Source/Python/Eot/Report.py 2010-01-29 11:44:32 UTC (rev 1828)
@@ -30,9 +30,12 @@
# @param ReportName: name of the report
# @param FvObj: FV object after parsing FV images
#
- def __init__(self, ReportName = 'Report.html', FvObj = None):
+ def __init__(self, ReportName = 'Report.html', FvObj = None, DispatchName=None):
self.ReportName = ReportName
self.Op = open(ReportName, 'w+')
+ self.DispatchList = None
+ if DispatchName:
+ self.DispatchList = open(DispatchName, 'w+')
self.FvObj = FvObj
self.FfsIndex = 0
self.PpiIndex = 0
@@ -230,7 +233,7 @@
#
def GenerateFfs(self, FfsObj):
self.FfsIndex = self.FfsIndex + 1
- if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08]:
+ if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
FfsGuid = FfsObj.Guid
FfsOffset = FfsObj._OFF_
FfsName = 'Unknonw Ffs Name'
@@ -260,7 +263,7 @@
and Model = %s""" % (FfsGuid, 5001, 1011)
RecordSet = EotGlobalData.gDb.TblReport.Exec(SqlCommand)
if RecordSet != []:
- FfsPath = RecordSet[0][0].replace(EotGlobalData.gMACRO['WORKSPACE'], '.')
+ FfsPath = RecordSet[0][0]
Content = """ <tr>
<tr class='styleFfs' id='FfsHeader%s'>
@@ -272,7 +275,13 @@
</tr>
<tr id='Ffs%s' style='display:none;'>
<td colspan="4"><table width="100%%" border="1">""" % (self.FfsIndex, self.FfsIndex, self.FfsIndex, FfsPath, FfsName, FfsGuid, FfsOffset, FfsType, self.FfsIndex)
-
+
+ if self.DispatchList:
+ if FfsObj.Type in [0x04, 0x06]:
+ self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "P", FfsName, FfsPath))
+ if FfsObj.Type in [0x05, 0x07, 0x08, 0x0A]:
+ self.DispatchList.write("%s %s %s %s\n" % (FfsGuid, "D", FfsName, FfsPath))
+
self.WriteLn(Content)
EotGlobalData.gOP_DISPATCH_ORDER.write('%s\n' %FfsName)
Modified: trunk/BaseTools/Source/Python/build/BuildReport.py
===================================================================
--- trunk/BaseTools/Source/Python/build/BuildReport.py 2010-01-29 06:00:56 UTC (rev 1827)
+++ trunk/BaseTools/Source/Python/build/BuildReport.py 2010-01-29 11:44:32 UTC (rev 1828)
@@ -23,9 +23,11 @@
from datetime import datetime
from Common import EdkLogger
from Common.Misc import GuidStructureByteArrayToGuidString
+from Common.Misc import GuidStructureStringToGuidString
from Common.InfClassObject import gComponentType2ModuleType
from Common.BuildToolError import FILE_OPEN_FAILURE
from Common.BuildToolError import FILE_WRITE_FAILURE
+from Eot.Eot import Eot
## Pattern to extract contents in EDK DXS files
@@ -45,6 +47,17 @@
## Pattern to collect offset, GUID value pair in the flash report intermediate file
gOffsetGuidPattern = re.compile(r"(0x[0-9A-Fa-f]+) ([-A-Fa-f0-9]+)")
+## Pattern to find module base address and entry point in fixed flash map file
+gModulePattern = r"\n\w+\s*\(([^,]+),\s*BaseAddress=%(Address)s,\s*EntryPoint=%(Address)s\)\s*\(GUID=([-0-9A-Fa-f]+)[^)]*\)"
+gMapFileItemPattern = re.compile(gModulePattern % {"Address" : "(-?0[xX][0-9A-Fa-f]+)"})
+
+## Pattern to find all module referenced header files in source files
+gIncludePattern = re.compile(r'#include\s*["<]([^">]+)[">]')
+gIncludePattern2 = re.compile(r"#include\s+EFI_([A-Z_]+)\s*[(]\s*(\w+)\s*[)]")
+
+## Pattern to find the entry point for EDK module using EDKII Glue library
+gGlueLibEntryPoint = re.compile(r"__EDKII_GLUE_MODULE_ENTRY_POINT__\s*=\s*(\w+)")
+
## Tags for section start, end and separator
gSectionStart = ">" + "=" * 118 + "<"
gSectionEnd = "<" + "=" * 118 + ">" + "\n"
@@ -98,8 +111,53 @@
String = textwrap.fill(String, 120)
File.write(String + "\n")
-
##
+# Find all the header file that the module source directly includes.
+#
+# This function scans source code to find all header files the module may
+# include. This is not accurate but very effective to find all the header
+# file the module might include with #include statement.
+#
+# @Source The source file name
+# @IncludePathList The list of include path to find the source file.
+# @IncludeFiles The dictionary of current found include files.
+#
+def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
+ FileContents = open(Source).read()
+ #
+ # Find header files with pattern #include "XXX.h" or #include <XXX.h>
+ #
+ for Match in gIncludePattern.finditer(FileContents):
+ FileName = Match.group(1).strip()
+ for Dir in [os.path.dirname(Source)] + IncludePathList:
+ FullFileName = os.path.normpath(os.path.join(Dir, FileName))
+ if os.path.exists(FullFileName):
+ IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
+ break
+
+ #
+ # Find header files with pattern like #include EFI_PPI_CONSUMER(XXX)
+ #
+ for Match in gIncludePattern2.finditer(FileContents):
+ Key = Match.group(2)
+ Type = Match.group(1)
+ if "ARCH_PROTOCOL" in Type:
+ FileName = "ArchProtocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif "PROTOCOL" in Type:
+ FileName = "Protocol/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif "PPI" in Type:
+ FileName = "Ppi/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ elif "GUID" in Type:
+ FileName = "Guid/%(Key)s/%(Key)s.h" % {"Key" : Key}
+ else:
+ continue
+ for Dir in IncludePathList:
+ FullFileName = os.path.normpath(os.path.join(Dir, FileName))
+ if os.path.exists(FullFileName):
+ IncludeFiles[FullFileName.lower().replace("\\", "/")] = FullFileName
+ break
+
+##
# Reports library information
#
# This class reports the module library subsection in the build report file.
@@ -387,7 +445,7 @@
# @param GlobalPcdReport The platform global PCD class object
# @param ReportType The kind of report items in the final report file
#
- def GenerateReport(self, File, GlobalPcdReport, ReportType):
+ def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, ReportType):
FileWrite(File, gSectionStart)
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")
@@ -438,7 +496,10 @@
if "BUILD_FLAGS" in ReportType:
self.BuildFlagsReport.GenerateReport(File)
-
+
+ if "PREDICTION" in ReportType:
+ GlobalPredictionReport.GenerateReport(File, self.FileGuid)
+
FileWrite(File, gSectionEnd)
##
@@ -640,7 +701,274 @@
FileWrite(File, gSubSectionEnd)
+
##
+# Reports platform and module Prediction information
+#
+# This class reports the platform execution order prediction section and
+# module load fixed address prediction subsection in the build report file.
+#
+class PredictionReport(object):
+ ##
+ # Constructor function for class PredictionReport
+ #
+ # This constructor function generates PredictionReport object for the platform.
+ #
+ # @param self: The object pointer
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Wa):
+ self._MapFileName = os.path.join(Wa.BuildDir, Wa.Name + ".map")
+ self._MapFileParsed = False
+ self._EotToolInvoked = False
+ self._FvDir = Wa.FvDir
+
+ self.FixedMapDict = {}
+ self.ItemList = []
+ self.MaxLen = 0
+
+ self._SourceFileList = os.path.join(Wa.BuildDir, Wa.Name + "_SourceFileList.txt")
+ SourceList = open(self._SourceFileList, "w+")
+
+ self._FfsEntryPoint = {}
+
+ #
+ # Collect all platform reference source files and write to the an intermediate file
+ # for EOT tool to parse.
+ #
+ GuidMap = {}
+ for Pa in Wa.AutoGenObjectList:
+ for Module in Pa.LibraryAutoGenList + Pa.ModuleAutoGenList:
+ #
+ # Add module referenced source files
+ #
+ SourceList.write(str(Module) + "\n")
+ IncludeList = {}
+ for Source in Module.SourceFileList:
+ if os.path.splitext(str(Source))[1].lower() == ".c":
+ SourceList.write(" " + str(Source) + "\n")
+ FindIncludeFiles(Source.Path, Module.IncludePathList, IncludeList)
+ for IncludeFile in IncludeList.values():
+ SourceList.write(" " + IncludeFile + "\n")
+
+ for Guid in Module.PpiList:
+ GuidMap[Guid] = GuidStructureStringToGuidString(Module.PpiList[Guid])
+ for Guid in Module.ProtocolList:
+ GuidMap[Guid] = GuidStructureStringToGuidString(Module.ProtocolList[Guid])
+ for Guid in Module.GuidList:
+ GuidMap[Guid] = GuidStructureStringToGuidString(Module.GuidList[Guid])
+
+ if Module.Guid and not Module.IsLibrary:
+ EntryPoint = " ".join(Module.Module.ModuleEntryPointList)
+ if int(str(Module.AutoGenVersion), 0) >= 0x00010005:
+ RealEntryPoint = "_ModuleEntryPoint"
+ else:
+ RealEntryPoint = EntryPoint
+ if EntryPoint == "_ModuleEntryPoint":
+ CCFlags = Module.BuildOption.get("CC", {}).get("FLAGS", "")
+ Match = gGlueLibEntryPoint.search(CCFlags)
+ if Match:
+ EntryPoint = Match.group(1)
+
+ self._FfsEntryPoint[Module.Guid.upper()] = (EntryPoint, RealEntryPoint)
+
+ SourceList.close()
+
+ #
+ # Write platform referenced GUID list as the input of EOT tool
+ # to calculate module dependency GUID
+ #
+ self._GuidList = os.path.join(Wa.BuildDir, Wa.Name + "_GuidList.txt")
+ GuidList = open(self._GuidList, "w+")
+ for Guid in GuidMap:
+ GuidList.write("%s %s\n" % (Guid, GuidMap[Guid]))
+ GuidList.close()
+
+ #
+ # Collect platform firmware volume list as the input of EOT.
+ #
+ self._FvList = []
+ for Fd in Wa.FdfProfile.FdDict:
+ for FdRegion in Wa.FdfProfile.FdDict[Fd].RegionList:
+ if FdRegion.RegionType != "FV":
+ continue
+ for FvName in FdRegion.RegionDataList:
+ if FvName in self._FvList:
+ continue
+ self._FvList.append(FvName)
+ for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
+ for Section in Ffs.SectionList:
+ try:
+ for FvSection in Section.SectionList:
+ if FvSection.FvName in self._FvList:
+ continue
+ self._FvList.append(FvSection.FvName)
+ except AttributeError:
+ pass
+
+
+ self._Dispatch = os.path.join(Wa.BuildDir, Wa.Name + "_Dispatch.log")
+
+ ##
+ # Parse platform fixed address map files
+ #
+ # This function parses the platform final fixed address map file to get
+ # the database of predicted fixed address for module image base, entry point
+ # etc.
+ #
+ # @param self: The object pointer
+ #
+ def _ParseMapFile(self):
+ if self._MapFileParsed:
+ return
+ self._MapFileParsed = True
+ if os.path.isfile(self._MapFileName):
+ try:
+ FileContents = open(self._MapFileName).read()
+ for Match in gMapFileItemPattern.finditer(FileContents):
+ AddressType = Match.group(1)
+ BaseAddress = Match.group(2)
+ EntryPoint = Match.group(3)
+ Guid = Match.group(4).upper()
+ List = self.FixedMapDict.setdefault(Guid, [])
+ List.append((AddressType, BaseAddress, "*I"))
+ List.append((AddressType, EntryPoint, "*E"))
+ except:
+ EdkLogger.warn(None, "Cannot open file to read", self._MapFileName)
+
+ ##
+ # Invokes EOT tool to get the predicted the execution order.
+ #
+ # This function invokes EOT tool to calculate the predicted dispatch order
+ #
+ # @param self: The object pointer
+ #
+ def _InvokeEotTool(self):
+ if self._EotToolInvoked:
+ return
+
+ self._EotToolInvoked = True
+ FvFileList = []
+ for FvName in self._FvList:
+ FvFile = os.path.join(self._FvDir, FvName + ".Fv")
+ if os.path.isfile(FvFile):
+ FvFileList.append(FvFile)
+
+ #
+ # Invoke EOT tool
+ #
+ Eot(CommandLineOption=False, SourceFileList=self._SourceFileList, GuidList=self._GuidList,
+ FvFileList=' '.join(FvFileList), Dispatch=self._Dispatch, IsInit=True)
+
+ #
+ # Parse the output of EOT tool
+ #
+ for Line in open(self._Dispatch):
+ (Guid, Phase, FfsName, FilePath) = Line.split()
+ Symbol = self._FfsEntryPoint.get(Guid, [FfsName, ""])[0]
+ if len(Symbol) > self.MaxLen:
+ self.MaxLen = len(Symbol)
+ self.ItemList.append((Phase, Symbol, FilePath))
+
+ ##
+ # Generate platform execution order report
+ #
+ # This function generates the predicted module execution order.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ #
+ def _GenerateExecutionOrderReport(self, File):
+ FileWrite(File, gSectionStart)
+ FileWrite(File, "Execution Order Prediction")
+ FileWrite(File, "*P PEI phase")
+ FileWrite(File, "*D DXE phase")
+ FileWrite(File, "*E Module INF entry point name")
+ FileWrite(File, "*N Module notification function name")
+
+ FileWrite(File, "Type %-*s %s" % (self.MaxLen, "Symbol", "Module INF Path"))
+ FileWrite(File, gSectionSep)
+ for Item in self.ItemList:
+ FileWrite(File, "*%sE %-*s %s" % (Item[0], self.MaxLen, Item[1], Item[2]))
+
+ FileWrite(File, gSectionStart)
+
+ ##
+ # Generate Fixed Address report.
+ #
+ # This function generate the predicted fixed address report for a module
+ # specified by Guid.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param Guid The module Guid value.
+ # @param NotifyList The list of all notify function in a module
+ #
+ def _GenerateFixedAddressReport(self, File, Guid, NotifyList):
+ FixedAddressList = self.FixedMapDict.get(Guid)
+ if not FixedAddressList:
+ return
+
+ FileWrite(File, gSubSectionStart)
+ FileWrite(File, "Fixed Address Prediction")
+ FileWrite(File, "*I Image Loading Address")
+ FileWrite(File, "*E Entry Point Address")
+ FileWrite(File, "*N Notification Function Address")
+ FileWrite(File, "*F Flash Address")
+ FileWrite(File, "*M Memory Address")
+ FileWrite(File, "*S SMM RAM Address")
+ FileWrite(File, "TOM Top of Memory")
+
+ FileWrite(File, "Type Address Name")
+ FileWrite(File, gSubSectionSep)
+ for Item in FixedAddressList:
+ Type = Item[0]
+ Value = Item[1]
+ Symbol = Item[2]
+ if Symbol == "*I":
+ Name = "(Image Base)"
+ elif Symbol == "*E":
+ Name = self._FfsEntryPoint.get(Guid, ["", "_ModuleEntryPoint"])[1]
+ elif Symbol in NotifyList:
+ Name = Symbol
+ Symbol = "*N"
+ else:
+ continue
+
+ if "Flash" in Type:
+ Symbol += "F"
+ elif "Memory" in Type:
+ Symbol += "M"
+ else:
+ Symbol += "S"
+
+ if Value[0] == "-":
+ Value = "TOM" + Value
+
+ FileWrite(File, "%s %-16s %s" % (Symbol, Value, Name))
+
+ ##
+ # Generate report for the prediction part
+ #
+ # This function generate the predicted fixed address report for a module or
+ # predicted module execution order for a platform.
+ # If the input Guid is None, then, it generates the predicted module execution order;
+ # otherwise it generated the module fixed loading address for the module specified by
+ # Guid.
+ #
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param Guid The module Guid value.
+ #
+ def GenerateReport(self, File, Guid):
+ self._ParseMapFile()
+ self._InvokeEotTool()
+ if Guid:
+ self._GenerateFixedAddressReport(File, Guid.upper(), [])
+ else:
+ self._GenerateExecutionOrderReport(File)
+
+##
# Reports FD region information
#
# This class reports the FD subsection in the build report file.
@@ -918,6 +1246,7 @@
FileWrite(File, gSectionEnd)
+
##
# Reports platform information
@@ -953,7 +1282,11 @@
if "FLASH" in ReportType and Wa.FdfProfile:
for Fd in Wa.FdfProfile.FdDict:
self.FdReportList.append(FdReport(Wa.FdfProfile.FdDict[Fd], Wa))
-
+
+ self.PredictionReport = None
+ if "PREDICTION" in ReportType:
+ self.PredictionReport = PredictionReport(Wa)
+
self.ModuleReportList = []
for Pa in Wa.AutoGenObjectList:
for ModuleKey in Pa.Platform.Modules:
@@ -965,6 +1298,8 @@
DscOverridePcds = {}
self.ModuleReportList.append(ModuleReport(Pa.Platform.Modules[ModuleKey].M, DscOverridePcds, ReportType))
+
+
##
# Generate report for the whole platform.
#
@@ -997,8 +1332,10 @@
FdReportListItem.GenerateReport(File)
for ModuleReportItem in self.ModuleReportList:
- ModuleReportItem.GenerateReport(File, self.PcdReport, ReportType)
+ ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, ReportType)
+ if "PREDICTION" in ReportType:
+ self.PredictionReport.GenerateReport(File, None)
## BuildReport class
#
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-02-02 10:06:21
|
Revision: 1835
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1835&view=rev
Author: qhuang8
Date: 2010-02-02 10:05:50 +0000 (Tue, 02 Feb 2010)
Log Message:
-----------
Enhance ECC tools:
1. Add same Include File Name checkpoint
2. Add -w option support to specify workspace environment
3. Add -e option support to specify an exception file list
4. Print out the error file, line number, error info in the command output in the standard format to be captured by some editor when ECC tool is run as the plug in.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Ecc/Check.py
trunk/BaseTools/Source/Python/Ecc/Ecc.py
trunk/BaseTools/Source/Python/Table/TableReport.py
Modified: trunk/BaseTools/Source/Python/Ecc/Check.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Check.py 2010-02-02 08:42:48 UTC (rev 1834)
+++ trunk/BaseTools/Source/Python/Ecc/Check.py 2010-02-02 10:05:50 UTC (rev 1835)
@@ -1,7 +1,7 @@
## @file
# This file is used to define checkpoints used by ECC tool
#
-# Copyright (c) 2008, Intel Corporation
+# Copyright (c) 2008 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -298,7 +298,8 @@
for Key in RecordDict:
if len(RecordDict[Key]) > 1:
for Item in RecordDict[Key]:
- EccGlobalData.gDb.TblReport.Insert(ERROR_INCLUDE_FILE_CHECK_NAME, OtherMsg = "The file name for '%s' is duplicate" % (Item[1]), BelongsToTable = 'File', BelongsToItem = Item[0])
+ if not EccGlobalData.gException.IsException(ERROR_INCLUDE_FILE_CHECK_NAME, Item[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_INCLUDE_FILE_CHECK_NAME, OtherMsg = "The file name for [%s] is duplicate" % (Item[1]), BelongsToTable = 'File', BelongsToItem = Item[0])
# Check whether all include file contents is guarded by a #ifndef statement.
def IncludeFileCheckIfndef(self):
@@ -738,17 +739,17 @@
self.NamingConventionCheckDefineStatement(FileTable)
self.NamingConventionCheckTypedefStatement(FileTable)
self.NamingConventionCheckIfndefStatement(FileTable)
- self.NamingConventionCheckVariableName(FileTable)
+ self.NamingConventionCheckVariableName(FileTable)
self.NamingConventionCheckSingleCharacterVariable(FileTable)
self.NamingConventionCheckPathName()
self.NamingConventionCheckFunctionName()
-
+
# Check whether only capital letters are used for #define declarations
def NamingConventionCheckDefineStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming covention of #define statement ...")
-
+
SqlCommand = """select ID, Value from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_MACRO_DEFINE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
@@ -763,7 +764,7 @@
def NamingConventionCheckTypedefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming covention of #typedef statement ...")
-
+
SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_TYPEDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
@@ -783,7 +784,7 @@
def NamingConventionCheckIfndefStatement(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming covention of #ifndef statement ...")
-
+
SqlCommand = """select ID, Value from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_MACRO_IFNDEF)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
@@ -818,7 +819,7 @@
if EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming covention of variable name ...")
Pattern = re.compile(r'^[A-Zgm]+\S*[a-z]\S*$')
-
+
SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_VARIABLE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
@@ -846,7 +847,7 @@
def NamingConventionCheckSingleCharacterVariable(self, FileTable):
if EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking naming covention of single character variable name ...")
-
+
SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_VARIABLE)
RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Record in RecordSet:
Modified: trunk/BaseTools/Source/Python/Ecc/Ecc.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Ecc.py 2010-02-02 08:42:48 UTC (rev 1834)
+++ trunk/BaseTools/Source/Python/Ecc/Ecc.py 2010-02-02 10:05:50 UTC (rev 1835)
@@ -1,7 +1,7 @@
## @file
# This file is used to be the main entrance of ECC tool
#
-# Copyright (c) 2009, Intel Corporation
+# Copyright (c) 2009 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -225,6 +225,9 @@
EdkLogger.quiet("Loading ECC configuration ... done")
(Options, Target) = self.EccOptionParser()
+ if Options.Workspace:
+ os.environ["WORKSPACE"] = Options.Workspace
+
# Check workspace envirnoment
if "WORKSPACE" not in os.environ:
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
@@ -244,6 +247,8 @@
self.OutputFile = Options.OutputFile
if Options.ReportFile != None:
self.ReportFile = Options.ReportFile
+ if Options.ExceptionFile != None:
+ self.ExceptionFile = Options.ExceptionFile
if Options.Target != None:
if not os.path.isdir(Options.Target):
EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
@@ -294,6 +299,8 @@
help="Specify the name of an output file, if and only if one filename was specified.")
Parser.add_option("-r", "--reportfile filename", action="store", type="string", dest="ReportFile",
help="Specify the name of an report file, if and only if one filename was specified.")
+ Parser.add_option("-e", "--exceptionfile filename", action="store", type="string", dest="ExceptionFile",
+ help="Specify the name of an exception file, if and only if one filename was specified.")
Parser.add_option("-m", "--metadata", action="store_true", type=None, help="Only scan meta-data files information if this option is specified.")
Parser.add_option("-s", "--sourcecode", action="store_true", type=None, help="Only scan source code files information if this option is specified.")
Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Ecc database will not be cleaned except report information if this option is specified.")
@@ -307,6 +314,7 @@
"including library instances selected, final dependency expression, "\
"and warning messages, etc.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-w", "--workspace", action="store", type="string", dest='Workspace', help="Specify workspace.")
(Opt, Args)=Parser.parse_args()
Modified: trunk/BaseTools/Source/Python/Table/TableReport.py
===================================================================
--- trunk/BaseTools/Source/Python/Table/TableReport.py 2010-02-02 08:42:48 UTC (rev 1834)
+++ trunk/BaseTools/Source/Python/Table/TableReport.py 2010-02-02 10:05:50 UTC (rev 1835)
@@ -1,7 +1,7 @@
## @file
# This file is used to create/update/query/erase table for ECC reports
#
-# Copyright (c) 2008, Intel Corporation
+# Copyright (c) 2008 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -114,7 +114,8 @@
NewRecord = self.Exec(SqlCommand)
if NewRecord != []:
File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))
-
+ EdkLogger.quiet("%s(%s): [%s]%s %s" % (NewRecord[0][1], NewRecord[0][0], ErrorID, EccToolError.gEccErrorMessage[ErrorID], OtherMsg))
+
File.close()
except IOError:
NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-02-04 18:13:27
|
Revision: 1842
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1842&view=rev
Author: qhuang8
Date: 2010-02-04 12:25:41 +0000 (Thu, 04 Feb 2010)
Log Message:
-----------
1. Turn off prediction feature of build report by default.
2. Show absolute path instead of workspace relative path
3. Show time stamp relative to local time zone instead of utc time zone
4. Show DXE_SMM_DRIVER as SMM file type if the module spec version >= 1.1
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Eot/Report.py
trunk/BaseTools/Source/Python/build/BuildReport.py
Modified: trunk/BaseTools/Source/Python/Eot/Report.py
===================================================================
--- trunk/BaseTools/Source/Python/Eot/Report.py 2010-02-04 10:03:26 UTC (rev 1841)
+++ trunk/BaseTools/Source/Python/Eot/Report.py 2010-02-04 12:25:41 UTC (rev 1842)
@@ -236,7 +236,7 @@
if FfsObj != None and FfsObj.Type in [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0xA]:
FfsGuid = FfsObj.Guid
FfsOffset = FfsObj._OFF_
- FfsName = 'Unknonw Ffs Name'
+ FfsName = 'Unknown-Module'
FfsPath = FfsGuid
FfsType = FfsObj._TypeName[FfsObj.Type]
Modified: trunk/BaseTools/Source/Python/build/BuildReport.py
===================================================================
--- trunk/BaseTools/Source/Python/build/BuildReport.py 2010-02-04 10:03:26 UTC (rev 1841)
+++ trunk/BaseTools/Source/Python/build/BuildReport.py 2010-02-04 12:25:41 UTC (rev 1842)
@@ -93,6 +93,7 @@
'UEFI_DRIVER' : '0x7 (DRIVER)',
'UEFI_APPLICATION' : '0x9 (APPLICATION)',
'SMM_CORE' : '0xD (SMM_CORE)',
+ 'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
}
##
@@ -396,6 +397,13 @@
ModuleType = M.ModuleType
if not ModuleType:
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
+ #
+ # If a module complies to PI 1.1, promote Module type to "SMM_DRIVER"
+ #
+ if ModuleType == "DXE_SMM_DRIVER":
+ PiSpec = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "0x00010000")
+ if int(PiSpec, 0) >= 0x0001000A:
+ ModuleType = "SMM_DRIVER"
self.DriverType = gDriverTypeMap.get(ModuleType, "")
self.UefiSpecVersion = M.Module.Specification.get("UEFI_SPECIFICATION_VERSION", "")
self.PiSpecVersion = M.Module.Specification.get("PI_SPECIFICATION_VERSION", "")
@@ -457,7 +465,7 @@
Match = gTimeStampPattern.search(FileContents)
if Match:
- self.BuildTimeStamp = datetime.utcfromtimestamp(int(Match.group(1)))
+ self.BuildTimeStamp = datetime.fromtimestamp(int(Match.group(1)))
except IOError:
EdkLogger.warn(None, "Fail to read report file", FwReportFileName)
@@ -864,7 +872,7 @@
# Invoke EOT tool
#
Eot(CommandLineOption=False, SourceFileList=SourceList, GuidList=GuidList,
- FvFileList=' '.join(FvFileList), Dispatch=DispatchList, IsInit=True)
+ FvFileList=' '.join(FvFileList), Dispatch=DispatchList, IsInit=True)
#
# Parse the output of EOT tool
@@ -876,8 +884,7 @@
self.MaxLen = len(Symbol)
self.ItemList.append((Phase, Symbol, FilePath))
except:
- EdkLogger.warn(None, "Failed to generate execution order prediction report, \
- for some error occurred in executing EOT.")
+ EdkLogger.warn(None, "Failed to generate execution order prediction report, for some error occurred in executing EOT.")
##
@@ -1091,7 +1098,8 @@
for Pa in Wa.AutoGenObjectList:
for ModuleKey in Pa.Platform.Modules:
M = Pa.Platform.Modules[ModuleKey].M
- self._GuidsDb[M.Guid.upper()] = "%s (%s)" % (M.Module.BaseName, M.MetaFile.File)
+ InfPath = os.path.join(Wa.WorkspaceDir, M.MetaFile.File)
+ self._GuidsDb[M.Guid.upper()] = "%s (%s)" % (M.Module.BaseName, InfPath)
#
# Collect the GUID map in the FV firmware volume
@@ -1351,7 +1359,9 @@
if ReportFile:
self.ReportList = []
self.ReportType = []
- if ReportType == None or "ALL" in ReportType:
+ if ReportType == None:
+ self.ReportType = ["PCD", "LIBRARY", "BUILD_FLAGS", "DEPEX", "FLASH"]
+ elif "ALL" in ReportType:
self.ReportType = ["PCD", "LIBRARY", "BUILD_FLAGS", "DEPEX", "FLASH", "PREDICTION"]
else:
for ReportTypeItem in ReportType:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2010-02-05 08:09:01
|
Revision: 1848
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1848&view=rev
Author: lgao4
Date: 2010-02-05 06:59:58 +0000 (Fri, 05 Feb 2010)
Log Message:
-----------
Correct copyright time.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/GenDepex.py
trunk/BaseTools/Source/Python/Ecc/Ecc.py
trunk/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py
trunk/BaseTools/Source/Python/TargetTool/TargetTool.py
trunk/BaseTools/Source/Python/Trim/Trim.py
trunk/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py
Modified: trunk/BaseTools/Source/Python/AutoGen/GenDepex.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenDepex.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/AutoGen/GenDepex.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -1,7 +1,7 @@
## @file
# This file is used to generate DEPEX file for module's dependency expression
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -356,7 +356,7 @@
versionNumber = "0.04"
__version__ = "%prog Version " + versionNumber
-__copyright__ = "Copyright (c) 2007-2008, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007-2010, Intel Corporation All rights reserved."
__usage__ = "%prog [options] [dependency_expression_file]"
## Parse command line options
Modified: trunk/BaseTools/Source/Python/Ecc/Ecc.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Ecc.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/Ecc/Ecc.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -43,7 +43,7 @@
# Version and Copyright
self.VersionNumber = "0.01"
self.Version = "%prog Version " + self.VersionNumber
- self.Copyright = "Copyright (c) 2009, Intel Corporation All rights reserved."
+ self.Copyright = "Copyright (c) 2009 - 2010, Intel Corporation All rights reserved."
self.InitDefaultConfigIni()
self.OutputFile = 'output.txt'
Modified: trunk/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py
===================================================================
--- trunk/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -1,7 +1,7 @@
#!/usr/bin/env python
#
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -21,7 +21,7 @@
versionNumber = "0.9"
__version__ = "%prog Version " + versionNumber
-__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
commonHeaderFilename = "CommonHeader.h"
entryPointFilename = "EntryPoint.c"
Modified: trunk/BaseTools/Source/Python/TargetTool/TargetTool.py
===================================================================
--- trunk/BaseTools/Source/Python/TargetTool/TargetTool.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/TargetTool/TargetTool.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
#
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -175,7 +175,7 @@
VersionNumber = "0.01"
__version__ = "%prog Version " + VersionNumber
-__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
__usage__ = "%prog [options] {args} \
\nArgs: \
\n Clean clean the all default configuration of target.txt. \
Modified: trunk/BaseTools/Source/Python/Trim/Trim.py
===================================================================
--- trunk/BaseTools/Source/Python/Trim/Trim.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/Trim/Trim.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -1,7 +1,7 @@
## @file
# Trim files preprocessed by compiler
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -28,7 +28,7 @@
# Version and Copyright
__version_number__ = "0.10"
__version__ = "%prog Version " + __version_number__
-__copyright__ = "Copyright (c) 2007-2008, Intel Corporation. All rights reserved."
+__copyright__ = "Copyright (c) 2007-2010, Intel Corporation. All rights reserved."
## Regular expression for matching Line Control directive like "#line xxx"
gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"')
Modified: trunk/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py
===================================================================
--- trunk/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py 2010-02-05 06:50:09 UTC (rev 1847)
+++ trunk/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py 2010-02-05 06:59:58 UTC (rev 1848)
@@ -1,7 +1,7 @@
## @file
# Convert an XML-based FPD file to a text-based DSC file.
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -22,7 +22,7 @@
# Version and Copyright
__version_number__ = "1.0"
__version__ = "%prog Version " + __version_number__
-__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved."
+__copyright__ = "Copyright (c) 2007 - 2010, Intel Corporation All rights reserved."
## Parse command line options
#
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-02-09 10:16:07
|
Revision: 1856
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1856&view=rev
Author: qhuang8
Date: 2010-02-09 10:16:00 +0000 (Tue, 09 Feb 2010)
Log Message:
-----------
Enhance build tool to support "UEFI_SPECIFICATION_VERSION" in INF spec. It also supports the original "EFI_SPECIFICATION_VERION" for backward compatibility.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/GenC.py
trunk/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
trunk/BaseTools/Source/Python/Common/InfClassObject.py
trunk/BaseTools/Source/Python/Common/InfClassObjectLight.py
trunk/BaseTools/Source/Python/CommonDataClass/ModuleClass.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
trunk/BaseTools/Source/Python/msa2inf/ConvertModule.py
trunk/BaseTools/Source/Python/msa2inf/StoreInf.py
Modified: trunk/BaseTools/Source/Python/AutoGen/GenC.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -453,7 +453,7 @@
gSmmCoreEntryPointString = TemplateString("""
${BEGIN}
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
@@ -482,7 +482,7 @@
gDxeSmmEntryPointString = [
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
@@ -497,7 +497,7 @@
}
"""),
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
@@ -550,7 +550,7 @@
gUefiDriverEntryPointString = [
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
@@ -564,7 +564,7 @@
}
"""),
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
${BEGIN}
@@ -592,7 +592,7 @@
}
"""),
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
const UINT32 _gDxeRevision = ${PiSpecVersion};
EFI_STATUS
@@ -645,7 +645,7 @@
gUefiApplicationEntryPointString = [
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
EFI_STATUS
EFIAPI
@@ -658,7 +658,7 @@
}
"""),
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
${BEGIN}
EFI_STATUS
@@ -685,7 +685,7 @@
}
"""),
TemplateString("""
-const UINT32 _gUefiDriverRevision = ${EfiSpecVersion};
+const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
EFI_STATUS
EFIAPI
@@ -1653,14 +1653,14 @@
PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION']
else:
PiSpecVersion = 0
- if 'EFI_SPECIFICATION_VERSION' in Info.Module.Specification:
- EfiSpecVersion = Info.Module.Specification['EFI_SPECIFICATION_VERSION']
+ if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification:
+ UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION']
else:
- EfiSpecVersion = 0
+ UefiSpecVersion = 0
Dict = {
- 'Function' : Info.Module.ModuleEntryPointList,
- 'PiSpecVersion' : PiSpecVersion,
- 'EfiSpecVersion': EfiSpecVersion
+ 'Function' : Info.Module.ModuleEntryPointList,
+ 'PiSpecVersion' : PiSpecVersion,
+ 'UefiSpecVersion': UefiSpecVersion
}
if Info.ModuleType in ['PEI_CORE', 'DXE_CORE', 'SMM_CORE']:
Modified: trunk/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -1,7 +1,7 @@
## @file
# This file is used to define each component of the build database
#
-# Copyright (c) 2007 ~ 2008, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -748,7 +748,8 @@
#
Pb.Specification = ModuleHeader.Specification
Pb.Specification[TAB_INF_DEFINES_EDK_RELEASE_VERSION] = ModuleHeader.EdkReleaseVersion
- Pb.Specification[TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION] = ModuleHeader.EfiSpecificationVersion
+ Pb.Specification[TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION] = ModuleHeader.UefiSpecificationVersion
+ Pb.Specification[TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION] = ModuleHeader.UefiSpecificationVersion
Pb.AutoGenVersion = int(ModuleHeader.InfVersion, 0)
#
Modified: trunk/BaseTools/Source/Python/Common/InfClassObject.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/InfClassObject.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/Common/InfClassObject.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -1,7 +1,7 @@
## @file
# This file is used to define each component of INF file
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -85,7 +85,8 @@
TAB_INF_DEFINES_BASE_NAME : "Name",
TAB_INF_DEFINES_FILE_GUID : "Guid",
TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
- TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "UefiSpecificationVersion",
+ TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : "UefiSpecificationVersion",
TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
#
# Optional Fields
@@ -452,7 +453,7 @@
print 'Guid =', M.Header[Arch].Guid
print 'Version =', M.Header[Arch].Version
print 'InfVersion =', M.Header[Arch].InfVersion
- print 'EfiSpecificationVersion =', M.Header[Arch].EfiSpecificationVersion
+ print 'UefiSpecificationVersion =', M.Header[Arch].UefiSpecificationVersion
print 'EdkReleaseVersion =', M.Header[Arch].EdkReleaseVersion
print 'ModuleType =', M.Header[Arch].ModuleType
print 'BinaryModule =', M.Header[Arch].BinaryModule
Modified: trunk/BaseTools/Source/Python/Common/InfClassObjectLight.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/InfClassObjectLight.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/Common/InfClassObjectLight.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -1,7 +1,7 @@
## @file
# This file is used to define each component of INF file
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -71,7 +71,8 @@
TAB_INF_DEFINES_BASE_NAME : "Name",
TAB_INF_DEFINES_FILE_GUID : "Guid",
TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
- TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "UefiSpecificationVersion",
+ TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION : "UefiSpecificationVersion",
TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
# Optional Fields
@@ -583,7 +584,7 @@
ModuleHeader.PcdIsDriver = Value
elif Name == TAB_INF_DEFINES_MODULE_TYPE:
ModuleHeader.ModuleType = Value
- elif Name == TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION:
+ elif Name in (TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION, TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION):
ModuleHeader.UefiSpecificationVersion = Value
elif Name == TAB_INF_DEFINES_PI_SPECIFICATION_VERSION:
ModuleHeader.PiSpecificationVersion = Value
Modified: trunk/BaseTools/Source/Python/CommonDataClass/ModuleClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/ModuleClass.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/CommonDataClass/ModuleClass.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -1,7 +1,7 @@
## @file
# This file is used to define a class object to describe a module
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -34,7 +34,7 @@
# PEI_PCD_DRIVER | DXE_PCD_DRIVER
# @var TianoR8FlashMap_h: To store value for TianoR8FlashMap_h
# @var InfVersion: To store value for InfVersion
-# @var EfiSpecificationVersion: To store value for EfiSpecificationVersion
+# @var UefiSpecificationVersion: To store value for UefiSpecificationVersion
# @var EdkReleaseVersion: To store value for EdkReleaseVersion
# @var LibraryClass: To store value for LibraryClass, it is a set structure as
# [ LibraryClassClass, ...]
@@ -65,7 +65,6 @@
self.PcdIsDriver = ''
self.TianoR8FlashMap_h = False
self.InfVersion = ''
- self.EfiSpecificationVersion = ''
self.PiSpecificationVersion = ''
self.UefiSpecificationVersion = ''
self.EdkReleaseVersion = ''
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-02-09 01:52:22 UTC (rev 1855)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-02-09 10:16:00 UTC (rev 1856)
@@ -1,2375 +1,2375 @@
-## @file
-# This file is used to create a database used by build tool
-#
-# Copyright (c) 2008 - 2009, Intel Corporation
-# All rights reserved. This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import sqlite3
-import os
-import os.path
-import pickle
-
-import Common.EdkLogger as EdkLogger
-import Common.GlobalData as GlobalData
-
-from Common.String import *
-from Common.DataType import *
-from Common.Misc import *
-from types import *
-
-from CommonDataClass.CommonClass import SkuInfoClass
-
-from MetaDataTable import *
-from MetaFileTable import *
-from MetaFileParser import *
-from BuildClassObject import *
-
-## Platform build information from DSC file
-#
-# This class is used to retrieve information stored in database and convert them
-# into PlatformBuildClassObject form for easier use for AutoGen.
-#
-class DscBuildData(PlatformBuildClassObject):
- # dict used to convert PCD type in database to string used by build tool
- _PCD_TYPE_STRING_ = {
- MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
- MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
- MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
- MODEL_PCD_DYNAMIC : "Dynamic",
- MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
- MODEL_PCD_DYNAMIC_HII : "DynamicHii",
- MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
- MODEL_PCD_DYNAMIC_EX : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
- MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
- }
-
- # dict used to convert part of [Defines] to members of DscBuildData directly
- _PROPERTY_ = {
- #
- # Required Fields
- #
- TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName",
- TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid",
- TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version",
- TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification",
- #TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory",
- #TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList",
- #TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets",
- #TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName",
- #TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition",
- TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber",
- TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
- TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
- TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
- }
-
- # used to compose dummy library class name for those forced library instances
- _NullLibraryNumber = 0
-
- ## Constructor of DscBuildData
- #
- # Initialize object of DscBuildData
- #
- # @param FilePath The path of platform description file
- # @param RawData The raw data of DSC file
- # @param BuildDataBase Database used to retrieve module/package information
- # @param Arch The target architecture
- # @param Platform (not used for DscBuildData)
- # @param Macros Macros used for replacement in DSC file
- #
- def __init__(self, FilePath, RawData, BuildDataBase, Arch='COMMON', Platform='DUMMY', Macros={}):
- self.MetaFile = FilePath
- self._RawData = RawData
- self._Bdb = BuildDataBase
- self._Arch = Arch
- self._Macros = Macros
- self._Clear()
- RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
- for Record in RecordList:
- GlobalData.gEdkGlobal[Record[0]] = Record[1]
-
- ## XXX[key] = value
- def __setitem__(self, key, value):
- self.__dict__[self._PROPERTY_[key]] = value
-
- ## value = XXX[key]
- def __getitem__(self, key):
- return self.__dict__[self._PROPERTY_[key]]
-
- ## "in" test support
- def __contains__(self, key):
- return key in self._PROPERTY_
-
- ## Set all internal used members of DscBuildData to None
- def _Clear(self):
- self._Header = None
- self._PlatformName = None
- self._Guid = None
- self._Version = None
- self._DscSpecification = None
- self._OutputDirectory = None
- self._SupArchList = None
- self._BuildTargets = None
- self._SkuName = None
- self._FlashDefinition = None
- self._BuildNumber = None
- self._MakefileName = None
- self._BsBaseAddress = None
- self._RtBaseAddress = None
- self._SkuIds = None
- self._Modules = None
- self._LibraryInstances = None
- self._LibraryClasses = None
- self._Pcds = None
- self._BuildOptions = None
- self._LoadFixAddress = None
-
- ## Get architecture
- def _GetArch(self):
- return self._Arch
-
- ## Set architecture
- #
- # Changing the default ARCH to another may affect all other information
- # because all information in a platform may be ARCH-related. That's
- # why we need to clear all internal used members, in order to cause all
- # information to be re-retrieved.
- #
- # @param Value The value of ARCH
- #
- def _SetArch(self, Value):
- if self._Arch == Value:
- return
- self._Arch = Value
- self._Clear()
-
- ## Retrieve all information in [Defines] section
- #
- # (Retriving all [Defines] information in one-shot is just to save time.)
- #
- def _GetHeaderInfo(self):
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
- for Record in RecordList:
- Name = Record[0]
- # items defined _PROPERTY_ don't need additional processing
- if Name in self:
- self[Name] = Record[1]
- # some special items in [Defines] section need special treatment
- elif Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY:
- self._OutputDirectory = NormPath(Record[1], self._Macros)
- if ' ' in self._OutputDirectory:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY",
- File=self.MetaFile, Line=Record[-1],
- ExtraData=self._OutputDirectory)
- elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION:
- self._FlashDefinition = PathClass(NormPath(Record[1], self._Macros), GlobalData.gWorkspace)
- ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1],
- ExtraData=ErrorInfo)
- elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES:
- self._SupArchList = GetSplitValueList(Record[1], TAB_VALUE_SPLIT)
- elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
- self._BuildTargets = GetSplitValueList(Record[1])
- elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
- if self._SkuName == None:
- self._SkuName = Record[1]
- elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
- self._LoadFixAddress = Record[1]
- # set _Header to non-None in order to avoid database re-querying
- self._Header = 'DUMMY'
-
- ## Retrieve platform name
- def _GetPlatformName(self):
- if self._PlatformName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._PlatformName == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
- return self._PlatformName
-
- ## Retrieve file guid
- def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Guid == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No FILE_GUID", File=self.MetaFile)
- return self._Guid
-
- ## Retrieve platform version
- def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Version == None:
- self._Version = ''
- return self._Version
-
- ## Retrieve platform description file version
- def _GetDscSpec(self):
- if self._DscSpecification == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._DscSpecification == None:
- self._DscSpecification = ''
- return self._DscSpecification
-
- ## Retrieve OUTPUT_DIRECTORY
- def _GetOutpuDir(self):
- if self._OutputDirectory == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._OutputDirectory == None:
- self._OutputDirectory = os.path.join("Build", self._PlatformName)
- return self._OutputDirectory
-
- ## Retrieve SUPPORTED_ARCHITECTURES
- def _GetSupArch(self):
- if self._SupArchList == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._SupArchList == None:
- self._SupArchList = ARCH_LIST
- return self._SupArchList
-
- ## Retrieve BUILD_TARGETS
- def _GetBuildTarget(self):
- if self._BuildTargets == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BuildTargets == None:
- self._BuildTargets = ['DEBUG', 'RELEASE']
- return self._BuildTargets
-
- ## Retrieve SKUID_IDENTIFIER
- def _GetSkuName(self):
- if self._SkuName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._SkuName == None or self._SkuName not in self.SkuIds:
- self._SkuName = 'DEFAULT'
- return self._SkuName
-
- ## Override SKUID_IDENTIFIER
- def _SetSkuName(self, Value):
- if Value in self.SkuIds:
- self._SkuName = Value
-
- def _GetFdfFile(self):
- if self._FlashDefinition == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._FlashDefinition == None:
- self._FlashDefinition = ''
- return self._FlashDefinition
-
- ## Retrieve FLASH_DEFINITION
- def _GetBuildNumber(self):
- if self._BuildNumber == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BuildNumber == None:
- self._BuildNumber = ''
- return self._BuildNumber
-
- ## Retrieve MAKEFILE_NAME
- def _GetMakefileName(self):
- if self._MakefileName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._MakefileName == None:
- self._MakefileName = ''
- return self._MakefileName
-
- ## Retrieve BsBaseAddress
- def _GetBsBaseAddress(self):
- if self._BsBaseAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BsBaseAddress == None:
- self._BsBaseAddress = ''
- return self._BsBaseAddress
-
- ## Retrieve RtBaseAddress
- def _GetRtBaseAddress(self):
- if self._RtBaseAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._RtBaseAddress == None:
- self._RtBaseAddress = ''
- return self._RtBaseAddress
-
- ## Retrieve the top address for the load fix address
- def _GetLoadFixAddress(self):
- if self._LoadFixAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._LoadFixAddress == None:
- self._LoadFixAddress = ''
- return self._LoadFixAddress
-
- ## Retrieve [SkuIds] section information
- def _GetSkuIds(self):
- if self._SkuIds == None:
- self._SkuIds = {}
- RecordList = self._RawData[MODEL_EFI_SKU_ID]
- for Record in RecordList:
- if Record[0] in [None, '']:
- EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number',
- File=self.MetaFile, Line=Record[-1])
- if Record[1] in [None, '']:
- EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
- File=self.MetaFile, Line=Record[-1])
- self._SkuIds[Record[1]] = Record[0]
- if 'DEFAULT' not in self._SkuIds:
- self._SkuIds['DEFAULT'] = 0
- return self._SkuIds
-
- ## Retrieve [Components] section information
- def _GetModules(self):
- if self._Modules != None:
- return self._Modules
-
- self._Modules = sdict()
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
- Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
- Macros.update(self._Macros)
- for Record in RecordList:
- ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- ModuleId = Record[5]
- LineNo = Record[6]
-
- # check the file validation
- ErrorCode, ErrorInfo = ModuleFile.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
- # Check duplication
- if ModuleFile in self._Modules:
- EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
-
- Module = ModuleBuildClassObject()
- Module.MetaFile = ModuleFile
-
- # get module override path
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId]
- if RecordList != []:
- Module.SourceOverridePath = os.path.join(GlobalData.gWorkspace, NormPath(RecordList[0][0], Macros))
-
- # Check if the source override path exists
- if not os.path.isdir(Module.SourceOverridePath):
- EdkLogger.error('build', FILE_NOT_FOUND, Message = 'Source override path does not exist:', File=self.MetaFile, ExtraData=Module.SourceOverridePath, Line=LineNo)
-
- #Add to GlobalData Variables
- GlobalData.gOverrideDir[ModuleFile.Key] = Module.SourceOverridePath
-
- # get module private library instance
- RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
- for Record in RecordList:
- LibraryClass = Record[0]
- LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- LineNo = Record[-1]
-
- # check the file validation
- ErrorCode, ErrorInfo = LibraryPath.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
-
- if LibraryClass == '' or LibraryClass == 'NULL':
- self._NullLibraryNumber += 1
- LibraryClass = 'NULL%d' % self._NullLibraryNumber
- EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass))
- Module.LibraryClasses[LibraryClass] = LibraryPath
- if LibraryPath not in self.LibraryInstances:
- self.LibraryInstances.append(LibraryPath)
-
- # get module private PCD setting
- for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
- MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
- RecordList = self._RawData[Type, self._Arch, None, ModuleId]
- for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- TokenList = GetSplitValueList(Setting)
- DefaultValue = TokenList[0]
- if len(TokenList) > 1:
- MaxDatumSize = TokenList[1]
- else:
- MaxDatumSize = ''
- TypeString = self._PCD_TYPE_STRING_[Type]
- Pcd = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- TypeString,
- '',
- DefaultValue,
- '',
- MaxDatumSize,
- {},
- None
- )
- Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
-
- # get module private build options
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
- for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
- Module.BuildOptions[ToolChainFamily, ToolChain] = Option
- else:
- OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
- Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
-
- self._Modules[ModuleFile] = Module
- return self._Modules
-
- ## Retrieve all possible library instances used in this platform
- def _GetLibraryInstances(self):
- if self._LibraryInstances == None:
- self._GetLibraryClasses()
- return self._LibraryInstances
-
- ## Retrieve [LibraryClasses] information
- def _GetLibraryClasses(self):
- if self._LibraryClasses == None:
- self._LibraryInstances = []
- #
- # tdict is a special dict kind of type, used for selecting correct
- # libr...
[truncated message content] |
|
From: <qh...@us...> - 2010-02-24 02:40:26
|
Revision: 1887
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1887&view=rev
Author: qhuang8
Date: 2010-02-24 02:40:19 +0000 (Wed, 24 Feb 2010)
Log Message:
-----------
1. Add file header for all __init__.py
2. Update ANTLR grammar file C.g to inject file header to the output CLexer.py and CParser.py
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/__init__.py
trunk/BaseTools/Source/Python/Common/__init__.py
trunk/BaseTools/Source/Python/CommonDataClass/__init__.py
trunk/BaseTools/Source/Python/Ecc/C.g
trunk/BaseTools/Source/Python/Ecc/CLexer.py
trunk/BaseTools/Source/Python/Ecc/CParser.py
trunk/BaseTools/Source/Python/Ecc/__init__.py
trunk/BaseTools/Source/Python/Eot/CLexer.py
trunk/BaseTools/Source/Python/Eot/CParser.py
trunk/BaseTools/Source/Python/Eot/__init__.py
trunk/BaseTools/Source/Python/Fdb/__init__.py
trunk/BaseTools/Source/Python/FixFlash/__init__.py
trunk/BaseTools/Source/Python/GenFds/__init__.py
trunk/BaseTools/Source/Python/GenPatchPcdTable/__init__.py
trunk/BaseTools/Source/Python/MigrationMsa2Inf/__init__.py
trunk/BaseTools/Source/Python/MkBOM/__init__.py
trunk/BaseTools/Source/Python/PatchPcdValue/__init__.py
trunk/BaseTools/Source/Python/Table/__init__.py
trunk/BaseTools/Source/Python/TargetTool/__init__.py
trunk/BaseTools/Source/Python/Workspace/__init__.py
trunk/BaseTools/Source/Python/build/__init__.py
trunk/BaseTools/Source/Python/fpd2dsc/__init__.py
trunk/BaseTools/Source/Python/msa2inf/__init__.py
trunk/BaseTools/Source/Python/spd2dec/__init__.py
Modified: trunk/BaseTools/Source/Python/AutoGen/__init__.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/__init__.py 2010-02-24 01:18:37 UTC (rev 1886)
+++ trunk/BaseTools/Source/Python/AutoGen/__init__.py 2010-02-24 02:40:19 UTC (rev 1887)
@@ -1,4 +1,10 @@
-# Copyright (c) 2007, Intel Corporation
+## @file
+# Python 'AutoGen' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation<BR>
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -6,5 +12,6 @@
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
__all__ = ["AutoGen"]
Modified: trunk/BaseTools/Source/Python/Common/__init__.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/__init__.py 2010-02-24 01:18:37 UTC (rev 1886)
+++ trunk/BaseTools/Source/Python/Common/__init__.py 2010-02-24 02:40:19 UTC (rev 1887)
@@ -0,0 +1,15 @@
+## @file
+# Python 'Common' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation<BR>
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
Modified: trunk/BaseTools/Source/Python/CommonDataClass/__init__.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/__init__.py 2010-02-24 01:18:37 UTC (rev 1886)
+++ trunk/BaseTools/Source/Python/CommonDataClass/__init__.py 2010-02-24 02:40:19 UTC (rev 1887)
@@ -0,0 +1,15 @@
+## @file
+# Python 'CommonDataClass' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation<BR>
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
Modified: trunk/BaseTools/Source/Python/Ecc/C.g
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/C.g 2010-02-24 01:18:37 UTC (rev 1886)
+++ trunk/BaseTools/Source/Python/Ecc/C.g 2010-02-24 02:40:19 UTC (rev 1887)
@@ -19,9 +19,49 @@
k=2;
}
+@lexer::header{
+## @file
+# The file defines the Lexer for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+}
+
@header {
- import CodeFragment
- import FileProfile
+## @file
+# The file defines the parser for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+
+import CodeFragment
+import FileProfile
}
@members {
Modified: trunk/BaseTools/Source/Python/Ecc/CLexer.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/CLexer.py 2010-02-24 01:18:37 UTC (rev 1886)
+++ trunk/BaseTools/Source/Python/Ecc/CLexer.py 2010-02-24 02:40:19 UTC (rev 1887)
@@ -1,9 +1,29 @@
-# $ANTLR 3.0.1 C.g 2010-01-21 14:45:07
+# $ANTLR 3.0.1 C.g 2010-02-23 09:58:53
from antlr3 import *
from antlr3.compat import set, frozenset
+
+## @file
+# The file defines the Lexer for C source files.
+#
+# THIS FILE IS AUTO-GENENERATED. PLEASE DON NOT MODIFY THIS FILE.
+# This file is generated by running:
+# java org.antlr.Tool C.g
+#
+# Copyright (c) 2009 - 2010, Intel Corporation All rights reserved.
+#
+# This program and the accompanying materials are licensed and made available
+# under the terms and conditions of the BSD License which accompanies this
+# distribution. The full text of the license may be found at:
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+##
+
# for convenience in actions
HIDDEN = BaseRecognizer.HIDDEN
@@ -163,8 +183,8 @@
try:
self.type = T25
- # C.g:7:5: ( ';' )
- # C.g:7:7: ';'
+ # C.g:27:5: ( ';' )
+ # C.g:27:7: ';'
self.match(u';')
@@ -185,8 +205,8 @@
try:
self.type = T26
- # C.g:8:5: ( 'typedef' )
- # C.g:8:7: 'typedef'
+ # C.g:28:5: ( 'typedef' )
+ # C.g:28:7: 'typedef'
self.match("typedef")
@@ -208,8 +228,8 @@
try:
self.type = T27
- # C.g:9:5: ( ',' )
- # C.g:9:7: ','
+ # C.g:29:5: ( ',' )
+ # C.g:29:7: ','
self.match(u',')
@@ -230,8 +250,8 @@
try:
self.type = T28
- # C.g:10:5: ( '=' )
- # C.g:10:7: '='
+ # C.g:30:5: ( '=' )
+ # C.g:30:7: '='
self.match(u'=')
@@ -252,8 +272,8 @@
try:
self.type = T29
- # C.g:11:5: ( 'extern' )
- # C.g:11:7: 'extern'
+ # C.g:31:5: ( 'extern' )
+ # C.g:31:7: 'extern'
self.match("extern")
@@ -275,8 +295,8 @@
try:
self.type = T30
- # C.g:12:5: ( 'static' )
- # C.g:12:7: 'static'
+ # C.g:32:5: ( 'static' )
+ # C.g:32:7: 'static'
self.match("static")
@@ -298,8 +318,8 @@
try:
self.type = T31
- # C.g:13:5: ( 'auto' )
- # C.g:13:7: 'auto'
+ # C.g:33:5: ( 'auto' )
+ # C.g:33:7: 'auto'
self.match("auto")
@@ -321,8 +341,8 @@
try:
self.type = T32
- # C.g:14:5: ( 'register' )
- # C.g:14:7: 'register'
+ # C.g:34:5: ( 'register' )
+ # C.g:34:7: 'register'
self.match("register")
@@ -344,8 +364,8 @@
try:
self.type = T33
- # C.g:15:5: ( 'STATIC' )
- # C.g:15:7: 'STATIC'
+ # C.g:35:5: ( 'STATIC' )
+ # C.g:35:7: 'STATIC'
self.match("STATIC")
@@ -367,8 +387,8 @@
try:
self.type = T34
- # C.g:16:5: ( 'void' )
- # C.g:16:7: 'void'
+ # C.g:36:5: ( 'void' )
+ # C.g:36:7: 'void'
self.match("void")
@@ -390,8 +410,8 @@
try:
self.type = T35
- # C.g:17:5: ( 'char' )
- # C.g:17:7: 'char'
+ # C.g:37:5: ( 'char' )
+ # C.g:37:7: 'char'
self.match("char")
@@ -413,8 +433,8 @@
try:
self.type = T36
- # C.g:18:5: ( 'short' )
- # C.g:18:7: 'short'
+ # C.g:38:5: ( 'short' )
+ # C.g:38:7: 'short'
self.match("short")
@@ -436,8 +456,8 @@
try:
self.type = T37
- # C.g:19:5: ( 'int' )
- # C.g:19:7: 'int'
+ # C.g:39:5: ( 'int' )
+ # C.g:39:7: 'int'
self.match("int")
@@ -459,8 +479,8 @@
try:
self.type = T38
- # C.g:20:5: ( 'long' )
- # C.g:20:7: 'long'
+ # C.g:40:5: ( 'long' )
+ # C.g:40:7: 'long'
self.match("long")
@@ -482,8 +502,8 @@
try:
self.type = T39
- # C.g:21:5: ( 'float' )
- # C.g:21:7: 'float'
+ # C.g:41:5: ( 'float' )
+ # C.g:41:7: 'float'
self.match("float")
@@ -505,8 +525,8 @@
try:
self.type = T40
- # C.g:22:5: ( 'double' )
- # C.g:22:7: 'double'
+ # C.g:42:5: ( 'double' )
+ # C.g:42:7: 'double'
self.match("double")
@@ -528,8 +548,8 @@
try:
self.type = T41
- # C.g:23:5: ( 'signed' )
- # C.g:23:7: 'signed'
+ # C.g:43:5: ( 'signed' )
+ # C.g:43:7: 'signed'
self.match("signed")
@@ -551,8 +571,8 @@
try:
self.type = T42
- # C.g:24:5: ( 'unsigned' )
- # C.g:24:7: 'unsigned'
+ # C.g:44:5: ( 'unsigned' )
+ # C.g:44:7: 'unsigned'
self.match("unsigned")
@@ -574,8 +594,8 @@
try:
self.type = T43
- # C.g:25:5: ( '{' )
- # C.g:25:7: '{'
+ # C.g:45:5: ( '{' )
+ # C.g:45:7: '{'
self.match(u'{')
@@ -596,8 +616,8 @@
try:
self.type = T44
- # C.g:26:5: ( '}' )
- # C.g:26:7: '}'
+ # C.g:46:5: ( '}' )
+ # C.g:46:7: '}'
self.match(u'}')
@@ -618,8 +638,8 @@
try:
self.type = T45
- # C.g:27:5: ( 'struct' )
- # C.g:27:7: 'struct'
+ # C.g:47:5: ( 'struct' )
+ # C.g:47:7: 'struct'
self.match("struct")
@@ -641,8 +661,8 @@
try:
self.type = T46
- # C.g:28:5: ( 'union' )
- # C.g:28:7: 'union'
+ # C.g:48:5: ( 'union' )
+ # C.g:48:7: 'union'
self.match("union")
@@ -664,8 +684,8 @@
try:
self.type = T47
- # C.g:29:5: ( ':' )
- # C.g:29:7: ':'
+ # C.g:49:5: ( ':' )
+ # C.g:49:7: ':'
self.match(u':')
@@ -686,8 +706,8 @@
try:
self.type = T48
- # C.g:30:5: ( 'enum' )
- # C.g:30:7: 'enum'
+ # C.g:50:5: ( 'enum' )
+ # C.g:50:7: 'enum'
self.match("enum")
@@ -709,8 +729,8 @@
try:
self.type = T49
- # C.g:31:5: ( 'const' )
- # C.g:31:7: 'const'
+ # C.g:51:5: ( 'const' )
+ # C.g:51:7: 'const'
self.match("const")
@@ -732,8 +752,8 @@
try:
self.type = T50
- # C.g:32:5: ( 'volatile' )
- # C.g:32:7: 'volatile'
+ # C.g:52:5: ( 'volatile' )
+ # C.g:52:7: 'volatile'
self.match("volatile")
@@ -755,8 +775,8 @@
try:
self.type = T51
- # C.g:33:5: ( 'IN' )
- # C.g:33:7: 'IN'
+ # C.g:53:5: ( 'IN' )
+ # C.g:53:7: 'IN'
self.match("IN")
@@ -778,8 +798,8 @@
try:
self.type = T52
- # C.g:34:5: ( 'OUT' )
- # C.g:34:7: 'OUT'
+ # C.g:54:5: ( 'OUT' )
+ # C.g:54:7: 'OUT'
self.match("OUT")
@@ -801,8 +821,8 @@
try:
self.type = T53
- # C.g:35:5: ( 'OPTIONAL' )
- # C.g:35:7: 'OPTIONAL'
+ # C.g:55:5: ( 'OPTIONAL' )
+ # C.g:55:7: 'OPTIONAL'
self.match("OPTIONAL")
@@ -824,8 +844,8 @@
try:
self.type = T54
- # C.g:36:5: ( 'CONST' )
- # C.g:36:7: 'CONST'
+ # C.g:56:5: ( 'CONST' )
+ # C.g:56:7: 'CONST'
self.match("CONST")
@@ -847,8 +867,8 @@
try:
self.type = T55
- # C.g:37:5: ( 'UNALIGNED' )
- # C.g:37:7: 'UNALIGNED'
+ # C.g:57:5: ( 'UNALIGNED' )
+ # C.g:57:7: 'UNALIGNED'
self.match("UNALIGNED")
@@ -870,8 +890,8 @@
try:
self.type = T56
- # C.g:38:5: ( 'VOLATILE' )
- # C.g:38:7: 'VOLATILE'
+ # C.g:58:5: ( 'VOLATILE' )
+ # C.g:58:7: 'VOLATILE'
self.match("VOLATILE")
@@ -893,8 +913,8 @@
try:
self.type = T57
- # C.g:39:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
- # C.g:39:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ # C.g:59:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' )
+ # C.g:59:7: 'GLOBAL_REMOVE_IF_UNREFERENCED'
self.match("GLOBAL_REMOVE_IF_UNREFERENCED")
@@ -916,8 +936,8 @@
try:
self.type = T58
- # C.g:40:5: ( 'EFIAPI' )
- # C.g:40:7: 'EFIAPI'
+ # C.g:60:5: ( 'EFIAPI' )
+ # C.g:60:7: 'EFIAPI'
self.match("EFIAPI")
@@ -939,8 +959,8 @@
try:
self.type = T59
- # C.g:41:5: ( 'EFI_BOOTSERVICE' )
- # C.g:41:7: 'EFI_BOOTSERVICE'
+ # C.g:61:5: ( 'EFI_BOOTSERVICE' )
+ # C.g:61:7: 'EFI_BOOTSERVICE'
self.match("EFI_BOOTSERVICE")
@@ -962,8 +982,8 @@
try:
self.type = T60
- # C.g:42:5: ( 'EFI_RUNTIMESERVICE' )
- # C.g:42:7: 'EFI_RUNTIMESERVICE'
+ # C.g:62:5: ( 'EFI_RUNTIMESERVICE' )
+ # C.g:62:7: 'EFI_RUNTIMESERVICE'
self.match("EFI_RUNTIMESERVICE")
@@ -985,8 +1005,8 @@
try:
self.type = T61
- # C.g:43:5: ( 'PACKED' )
- # C.g:43:7: 'PACKED'
+ # C.g:63:5: ( 'PACKED' )
+ # C.g:63:7: 'PACKED'
self.match("PACKED")
@@ -1008,8 +1028,8 @@
try:
self.type = T62
- # C.g:44:5: ( '(' )
- # C.g:44:7: '('
+ # C.g:64:5: ( '(' )
+ # C.g:64:7: '('
self.match(u'(')
@@ -1030,8 +1050,8 @@
try:
self.type = T63
- # C.g:45:5: ( ')' )
- # C.g:45:7: ')'
+ # C.g:65:5: ( ')' )
+ # C.g:65:7: ')'
self.match(u')')
@@ -1052,8 +1072,8 @@
try:
self.type = T64
- # C.g:46:5: ( '[' )
- # C.g:46:7: '['
+ # C.g:66:5: ( '[' )
+ # C.g:66:7: '['
self.match(u'[')
@@ -1074,8 +1094,8 @@
try:
self.type = T65
- # C.g:47:5: ( ']' )
- # C.g:47:7: ']'
+ # C.g:67:5: ( ']' )
+ # C.g:67:7: ']'
self.match(u']')
@@ -1096,8 +1116,8 @@
try:
self.type = T66
- # C.g:48:5: ( '*' )
- # C.g:48:7: '*'
+ # C.g:68:5: ( '*' )
+ # C.g:68:7: '*'
self.match(u'*')
@@ -1118,8 +1138,8 @@
try:
self.type = T67
- # C.g:49:5: ( '...' )
- # C.g:49:7: '...'
+ # C.g:69:5: ( '...' )
+ # C.g:69:7: '...'
self.match("...")
@@ -1141,8 +1161,8 @@
try:
self.type = T68
- # C.g:50:5: ( '+' )
- # C.g:50:7: '+'
+ # C.g:70:5: ( '+' )
+ # C.g:70:7: '+'
self.match(u'+')
@@ -1163,8 +1183,8 @@
try:
self.type = T69
- # C.g:51:5: ( '-' )
- # C.g:51:7: '-'
+ # C.g:71:5: ( '-' )
+ # C.g:71:7: '-'
self.match(u'-')
@@ -1185,8 +1205,8 @@
try:
self.type = T70
- # C.g:52:5: ( '/' )
- # C.g:52:7: '/'
+ # C.g:72:5: ( '/' )
+ # C.g:72:7: '/'
self.match(u'/')
@@ -1207,8 +1227,8 @@
try:
self.type = T71
- # C.g:53:5: ( '%' )
- # C.g:53:7: '%'
+ # C.g:73:5: ( '%' )
+ # C.g:73:7: '%'
self.match(u'%')
@@ -1229,8 +1249,8 @@
try:
self.type = T72
- # C.g:54:5: ( '++' )
- # C.g:54:7: '++'
+ # C.g:74:5: ( '++' )
+ # C.g:74:7: '++'
self.match("++")
@@ -1252,8 +1272,8 @@
try:
self.type = T73
- # C.g:55:5: ( '--' )
- # C.g:55:7: '--'
+ # C.g:75:5: ( '--' )
+ # C.g:75:7: '--'
self.match("--")
@@ -1275,8 +1295,8 @@
try:
self.type = T74
- # C.g:56:5: ( 'sizeof' )
- # C.g:56:7: 'sizeof'
+ # C.g:76:5: ( 'sizeof' )
+ # C.g:76:7: 'sizeof'
self.match("sizeof")
@@ -1298,8 +1318,8 @@
try:
self.type = T75
- # C.g:57:5: ( '.' )
- # C.g:57:7: '.'
+ # C.g:77:5: ( '.' )
+ # C.g:77:7: '.'
self.match(u'.')
@@ -1320,8 +1340,8 @@
try:
self.type = T76
- # C.g:58:5: ( '->' )
- # C.g:58:7: '->'
+ # C.g:78:5: ( '->' )
+ # C.g:78:7: '->'
self.match("->")
@@ -1343,8 +1363,8 @@
try:
self.type = T77
- # C.g:59:5: ( '&' )
- # C.g:59:7: '&'
+ # C.g:79:5: ( '&' )
+ # C.g:79:7: '&'
self.match(u'&')
@@ -1365,8 +1385,8 @@
try:
self.type = T78
- # C.g:60:5: ( '~' )
- # C.g:60:7: '~'
+ # C.g:80:5: ( '~' )
+ # C.g:80:7: '~'
self.match(u'~')
@@ -1387,8 +1407,8 @@
try:
self.type = T79
- # C.g:61:5: ( '!' )
- # C.g:61:7: '!'
+ # C.g:81:5: ( '!' )
+ # C.g:81:7: '!'
self.match(u'!')
@@ -1409,8 +1429,8 @@
try:
self.type = T80
- # C.g:62:5: ( '*=' )
- # C.g:62:7: '*='
+ # C.g:82:5: ( '*=' )
+ # C.g:82:7: '*='
self.match("*=")
@@ -1432,8 +1452,8 @@
try:
self.type = T81
- # C.g:63:5: ( '/=' )
- # C.g:63:7: '/='
+ # C.g:83:5: ( '/=' )
+ # C.g:83:7: '/='
self.match("/=")
@@ -1455,8 +1475,8 @@
try:
self.type = T82
- # C.g:64:5: ( '%=' )
- # C.g:64:7: '%='
+ # C.g:84:5: ( '%=' )
+ # C.g:84:7: '%='
self.match("%=")
@@ -1478,8 +1498,8 @@
try:
self.type = T83
- # C.g:65:5: ( '+=' )
- # C.g:65:7: '+='
+ # C.g:85:5: ( '+=' )
+ # C.g:85:7: '+='
self.match("+=")
@@ -1501,8 +1521,8 @@
try:
self.type = T84
- # C.g:66:5: ( '-=' )
- # C.g:66:7: '-='
+ # C.g:86:5: ( '-=' )
+ # C.g:86:7: '-='
self.match("-=")
@@ -1524,8 +1544,8 @@
try:
self.type = T85
- # C.g:67:5: ( '<<=' )
- # C.g:67:7: '<<='
+ # C.g:87:5: ( '<<=' )
+ # C.g:87:7: '<<='
self.match("<<=")
@@ -1547,8 +1567,8 @@
try:
self.type = T86
- # C.g:68:5: ( '>>=' )
- # C.g:68:7: '>>='
+ # C.g:88:5: ( '>>=' )
+ # C.g:88:7: '>>='
self.match(">>=")
@@ -1570,8 +1590,8 @@
try:
self.type = T87
- # C.g:69:5: ( '&=' )
- # C.g:69:7: '&='
+ # C.g:89:5: ( '&=' )
+ # C.g:89:7: '&='
self.match("&=")
@@ -1593,8 +1613,8 @@
try:
self.type = T88
- # C.g:70:5: ( '^=' )
- # C.g:70:7: '^='
+ # C.g:90:5: ( '^=' )
+ # C.g:90:7: '^='
self.match("^=")
@@ -1616,8 +1636,8 @@
try:
self.type = T89
- # C.g:71:5: ( '|=' )
- # C.g:71:7: '|='
+ # C.g:91:5: ( '|=' )
+ # C.g:91:7: '|='
self.match("|=")
@@ -1639,8 +1659,8 @@
try:
self.type = T90
- # C.g:72:5: ( '?' )
- # C.g:72:7: '?'
+ # C.g:92:5: ( '?' )
+ # C.g:92:7: '?'
self.match(u'?')
@@ -1661,8 +1681,8 @@
try:
self.type = T91
- # C.g:73:5: ( '||' )
- # C.g:73:7: '||'
+ # C.g:93:5: ( '||' )
+ # C.g:93:7: '||'
self.match("||")
@@ -1684,8 +1704,8 @@
try:
self.type = T92
- # C.g:74:5: ( '&&' )
- # C.g:74:7: '&&'
+ # C.g:94:5: ( '&&' )
+ # C.g:94:7: '&&'
self.match("&&")
@@ -1707,8 +1727,8 @@
try:
self.type = T93
- # C.g:75:5: ( '|' )
- # C.g:75:7: '|'
+ # C.g:95:5: ( '|' )
+ # C.g:95:7: '|'
self.match(u'|')
@@ -1729,8 +1749,8 @@
try:
self.type = T94
- # C.g:76:5: ( '^' )
- # C.g:76:7: '^'
+ # C.g:96:5: ( '^' )
+ # C.g:96:7: '^'
self.match(u'^')
@@ -1751,8 +1771,8 @@
try:
self.type = T95
- # C.g:77:5: ( '==' )
- # C.g:77:7: '=='
+ # C.g:97:5: ( '==' )
+ # C.g:97:7: '=='
self.match("==")
@@ -1774,8 +1794,8 @@
try:
self.type = T96
- # C.g:78:5: ( '!=' )
- # C.g:78:7: '!='
+ # C.g:98:5: ( '!=' )
+ # C.g:98:7: '!='
self.match("!=")
@@ -1797,8 +1817,8 @@
try:
self.type = T97
- # C.g:79:5: ( '<' )
- # C.g:79:7: '<'
+ # C.g:99:5: ( '<' )
+ # C.g:99:7: '<'
self.match(u'<')
@@ -1819,8 +1839,8 @@
try:
self.type = T98
- # C.g:80:5: ( '>' )
- # C.g:80:7: '>'
+ # C.g:100:5: ( '>' )
+ # C.g:100:7: '>'
self.match(u'>')
@@ -1841,8 +1861,8 @@
try:
self.type = T99
- # C.g:81:5: ( '<=' )
- # C.g:81:7: '<='
+ # C.g:101:5: ( '<=' )
+ # C.g:101:7: '<='
self.match("<=")
@@ -1864,8 +1884,8 @@
try:
self.type = T100
- # C.g:82:6: ( '>=' )
- # C.g:82:8: '>='
+ # C.g:102:6: ( '>=' )
+ # C.g:102:8: '>='
self.match(">=")
@@ -1887,8 +1907,8 @@
try:
self.type = T101
- # C.g:83:6: ( '<<' )
- # C.g:83:8: '<<'
+ # C.g:103:6: ( '<<' )
+ # C.g:103:8: '<<'
self.match("<<")
@@ -1910,8 +1930,8 @@
try:
self.type = T102
- # C.g:84:6: ( '>>' )
- # C.g:84:8: '>>'
+ # C.g:104:6: ( '>>' )
+ # C.g:104:8: '>>'
self.match(">>")
@@ -1933,8 +1953,8 @@
try:
self.type = T103
- # C.g:85:6: ( '__asm__' )
- # C.g:85:8: '__asm__'
+ # C.g:105:6: ( '__asm__' )
+ # C.g:105:8: '__asm__'
self.match("__asm__")
@@ -1956,8 +1976,8 @@
try:
self.type = T104
- # C.g:86:6: ( '_asm' )
- # C.g:86:8: '_asm'
+ # C.g:106:6: ( '_asm' )
+ # C.g:106:8: '_asm'
self.match("_asm")
@@ -1979,8 +1999,8 @@
try:
self.type = T105
- # C.g:87:6: ( '__asm' )
- # C.g:87:8: '__asm'
+ # C.g:107:6: ( '__asm' )
+ # C.g:107:8: '__asm'
self.match("__asm")
@@ -2002,8 +2022,8 @@
try:
self.type = T106
- # C.g:88:6: ( 'case' )
- # C.g:88:8: 'case'
+ # C.g:108:6: ( 'case' )
+ # C.g:108:8: 'case'
self.match("case")
@@ -2025,8 +2045,8 @@
try:
self.type = T107
- # C.g:89:6: ( 'default' )
- # C.g:89:8: 'default'
+ # C.g:109:6: ( 'default' )
+ # C.g:109:8: 'default'
self.match("default")
@@ -2048,8 +2068,8 @@
try:
self.type = T108
- # C.g:90:6: ( 'if' )
- # C.g:90:8: 'if'
+ # C.g:110:6: ( 'if' )
+ # C.g:110:8: 'if'
self.match("if")
@@ -2071,8 +2091,8 @@
try:
self.type = T109
- # C.g:91:6: ( 'else' )
- # C.g:91:8: 'else'
+ # C.g:111:6: ( 'else' )
+ # C.g:111:8: 'else'
self.match("else")
@@ -2094,8 +2114,8 @@
try:
self.type = T110
- # C.g:92:6: ( 'switch' )
- # C.g:92:8: 'switch'
+ # C.g:112:6: ( 'switch' )
+ # C.g:112:8: 'switch'
self.match("switch")
@@ -2117,8 +2137,8 @@
try:
self.type = T111
- # C.g:93:6: ( 'while' )
- # C.g:93:8: 'while'
+ # C.g:113:6: ( 'while' )
+ # C.g:113:8: 'while'
self.match("while")
@@ -2140,8 +2160,8 @@
try:
self.type = T112
- # C.g:94:6: ( 'do' )
- # C.g:94:8: 'do'
+ # C.g:114:6: ( 'do' )
+ # C.g:114:8: 'do'
self.match("do")
@@ -2163,8 +2183,8 @@
try:
self.type = T113
- # C.g:95:6: ( 'for' )
- # C.g:95:8: 'for'
+ # C.g:115:6: ( 'for' )
+ # C.g:115:8: 'for'
self.match("for")
@@ -2186,8 +2206,8 @@
try:
self.type = T114
- # C.g:96:6: ( 'goto' )
- # C.g:96:8: 'goto'
+ # C.g:116:6: ( 'goto' )
+ # C.g:116:8: 'goto'
self.match("goto")
@@ -2209,8 +2229,8 @@
try:
self.type = T115
- # C.g:97:6: ( 'continue' )
- # C.g:97:8: 'continue'
+ # C.g:117:6: ( 'continue' )
+ # C.g:117:8: 'continue'
self.match("continue")
@@ -2232,8 +2252,8 @@
try:
self.type = T116
- # C.g:98:6: ( 'break' )
- # C.g:98:8: 'break'
+ # C.g:118:6: ( 'break' )
+ # C.g:118:8: 'break'
self.match("break")
@@ -2255,8 +2275,8 @@
try:
self.type = T117
- # C.g:99:6: ( 'return' )
- # C.g:99:8: 'return'
+ # C.g:119:6: ( 'return' )
+ # C.g:119:8: 'return'
self.match("return")
@@ -2278,11 +2298,11 @@
try:
self.type = IDENTIFIER
- # C.g:534:2: ( LETTER ( LETTER | '0' .. '9' )* )
- # C.g:534:4: LETTER ( LETTER | '0' .. '9' )*
+ # C.g:586:2: ( LETTER ( LETTER | '0' .. '9' )* )
+ # C.g:586:4: LETTER ( LETTER | '0' .. '9' )*
self.mLETTER()
- # C.g...
[truncated message content] |
|
From: <qh...@us...> - 2010-02-24 03:27:19
|
Revision: 1891
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1891&view=rev
Author: qhuang8
Date: 2010-02-24 03:27:13 +0000 (Wed, 24 Feb 2010)
Log Message:
-----------
Update the build tools devel mail list in python source.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/PackagingTool/InstallPkg.py
trunk/BaseTools/Source/Python/PackagingTool/MkPkg.py
trunk/BaseTools/Source/Python/PackagingTool/RmPkg.py
trunk/BaseTools/Source/Python/Trim/Trim.py
trunk/BaseTools/Source/Python/build/build.py
Modified: trunk/BaseTools/Source/Python/PackagingTool/InstallPkg.py
===================================================================
--- trunk/BaseTools/Source/Python/PackagingTool/InstallPkg.py 2010-02-24 03:16:52 UTC (rev 1890)
+++ trunk/BaseTools/Source/Python/PackagingTool/InstallPkg.py 2010-02-24 03:27:13 UTC (rev 1891)
@@ -288,7 +288,7 @@
"\nInstallPkg",
CODE_ERROR,
"Unknown fatal error when installing [%s]" % Options.PackageFile,
- ExtraData="\n(Please send email to de...@bu... for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk...@li... for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
Modified: trunk/BaseTools/Source/Python/PackagingTool/MkPkg.py
===================================================================
--- trunk/BaseTools/Source/Python/PackagingTool/MkPkg.py 2010-02-24 03:16:52 UTC (rev 1890)
+++ trunk/BaseTools/Source/Python/PackagingTool/MkPkg.py 2010-02-24 03:27:13 UTC (rev 1891)
@@ -281,7 +281,7 @@
"\nMkPkg",
CODE_ERROR,
"Unknown fatal error when creating [%s]" % Options.DistributionFile,
- ExtraData="\n(Please send email to de...@bu... for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk...@li... for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
Modified: trunk/BaseTools/Source/Python/PackagingTool/RmPkg.py
===================================================================
--- trunk/BaseTools/Source/Python/PackagingTool/RmPkg.py 2010-02-24 03:16:52 UTC (rev 1890)
+++ trunk/BaseTools/Source/Python/PackagingTool/RmPkg.py 2010-02-24 03:27:13 UTC (rev 1891)
@@ -206,7 +206,7 @@
"\nRmPkg",
CODE_ERROR,
"Unknown fatal error when removing package",
- ExtraData="\n(Please send email to de...@bu... for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk...@li... for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
Modified: trunk/BaseTools/Source/Python/Trim/Trim.py
===================================================================
--- trunk/BaseTools/Source/Python/Trim/Trim.py 2010-02-24 03:16:52 UTC (rev 1890)
+++ trunk/BaseTools/Source/Python/Trim/Trim.py 2010-02-24 03:27:13 UTC (rev 1891)
@@ -519,7 +519,7 @@
"\nTrim",
CODE_ERROR,
"Unknown fatal error when trimming [%s]" % InputFile,
- ExtraData="\n(Please send email to de...@bu... for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk...@li... for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
Modified: trunk/BaseTools/Source/Python/build/build.py
===================================================================
--- trunk/BaseTools/Source/Python/build/build.py 2010-02-24 03:16:52 UTC (rev 1890)
+++ trunk/BaseTools/Source/Python/build/build.py 2010-02-24 03:27:13 UTC (rev 1891)
@@ -1842,7 +1842,7 @@
"\nbuild",
CODE_ERROR,
"Unknown fatal error when processing [%s]" % MetaFile,
- ExtraData="\n(Please send email to de...@bu... for help, attaching following call stack trace!)\n",
+ ExtraData="\n(Please send email to edk...@li... for help, attaching following call stack trace!)\n",
RaiseError=False
)
EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-03-04 02:45:26
|
Revision: 1910
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1910&view=rev
Author: qhuang8
Date: 2010-03-04 02:45:19 +0000 (Thu, 04 Mar 2010)
Log Message:
-----------
Update Conditional statement(!if..) in DSC/FDF statement to require the mandatory $() for macro name.
This is a non-backward compatible change that might require the manual update of current DSC/FDF file. (The error message printed out by the tool will help user to solve the incompatible issue)
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/FdfParserLite.py
trunk/BaseTools/Source/Python/GenFds/FdfParser.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
Modified: trunk/BaseTools/Source/Python/Common/FdfParserLite.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/FdfParserLite.py 2010-03-03 02:48:39 UTC (rev 1909)
+++ trunk/BaseTools/Source/Python/Common/FdfParserLite.py 2010-03-04 02:45:19 UTC (rev 1910)
@@ -1,7 +1,7 @@
## @file
# parse FDF file
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
#
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -383,7 +383,22 @@
while Offset <= EndPos[1]:
self.Profile.FileLinesList[EndPos[0]][Offset] = Value
Offset += 1
-
+
+
+ def __GetMacroName(self):
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:].strip()
+
+ if not MacroName.startswith('$(') or not MacroName.endswith(')'):
+ raise Warning("Macro name expected(Please use '$(%(Token)s)' if '%(Token)s' is a macro.)" % {"Token" : MacroName},
+ self.FileName, self.CurrentLineNumber)
+ MacroName = MacroName[2:-1]
+ return MacroName, NotFlag
## PreprocessFile() method
#
@@ -554,14 +569,7 @@
IfList.append([IfStartPos, None, None])
CondLabel = self.__Token
- if not self.__GetNextToken():
- raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
- MacroName = self.__Token
- NotFlag = False
- if MacroName.startswith('!'):
- NotFlag = True
- MacroName = MacroName[1:]
-
+ MacroName, NotFlag = self.__GetMacroName()
NotDefineFlag = False
if CondLabel == '!ifndef':
NotDefineFlag = True
@@ -615,14 +623,7 @@
self.__WipeOffArea.append((IfList[-1][0], ElseStartPos))
IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
if self.__Token == '!elseif':
- if not self.__GetNextToken():
- raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
- MacroName = self.__Token
- NotFlag = False
- if MacroName.startswith('!'):
- NotFlag = True
- MacroName = MacroName[1:]
-
+ MacroName, NotFlag = self.__GetMacroName()
if not self.__GetNextOp():
raise Warning("expected !endif At Line ", self.FileName, self.CurrentLineNumber)
Modified: trunk/BaseTools/Source/Python/GenFds/FdfParser.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/FdfParser.py 2010-03-03 02:48:39 UTC (rev 1909)
+++ trunk/BaseTools/Source/Python/GenFds/FdfParser.py 2010-03-04 02:45:19 UTC (rev 1910)
@@ -1,7 +1,7 @@
## @file
# parse FDF file
#
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
#
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -415,6 +415,21 @@
Offset += 1
+ def __GetMacroName(self):
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:].strip()
+
+ if not MacroName.startswith('$(') or not MacroName.endswith(')'):
+ raise Warning("Macro name expected(Please use '$(%(Token)s)' if '%(Token)s' is a macro.)" % {"Token" : MacroName},
+ self.FileName, self.CurrentLineNumber)
+ MacroName = MacroName[2:-1]
+ return MacroName, NotFlag
+
## PreprocessFile() method
#
# Preprocess file contents, replace comments with spaces.
@@ -545,6 +560,7 @@
self.Rewind()
+
## PreprocessIncludeFile() method
#
# Preprocess file contents, replace !include statements with file contents.
@@ -583,15 +599,8 @@
IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
IfList.append([IfStartPos, None, None])
CondLabel = self.__Token
-
- if not self.__GetNextToken():
- raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
- MacroName = self.__Token
- NotFlag = False
- if MacroName.startswith('!'):
- NotFlag = True
- MacroName = MacroName[1:]
-
+
+ MacroName, NotFlag = self.__GetMacroName()
NotDefineFlag = False
if CondLabel == '!ifndef':
NotDefineFlag = True
@@ -645,14 +654,7 @@
self.__WipeOffArea.append((IfList[-1][0], ElseStartPos))
IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
if self.__Token == '!elseif':
- if not self.__GetNextToken():
- raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
- MacroName = self.__Token
- NotFlag = False
- if MacroName.startswith('!'):
- NotFlag = True
- MacroName = MacroName[1:]
-
+ MacroName, NotFlag = self.__GetMacroName()
if not self.__GetNextOp():
raise Warning("expected !endif", self.FileName, self.CurrentLineNumber)
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-03-03 02:48:39 UTC (rev 1909)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-03-04 02:45:19 UTC (rev 1910)
@@ -799,13 +799,24 @@
else:
self._Enabled = len(self._Eval)
- ## Evaludate the value of expression in "if/ifdef/ifndef" directives
+ ## Evaluate the Token for its value; for now only macros are supported.
+ def _EvaluateToken(self, TokenName, Expression):
+ if TokenName.startswith("$(") and TokenName.endswith(")"):
+ Name = TokenName[2:-1]
+ return self._Macros.get(Name)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Unknown operand '%(Token)s', "
+ "please use '$(%(Token)s)' if '%(Token)s' is a macro" % {"Token" : TokenName},
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=Expression)
+
+ ## Evaluate the value of expression in "if/ifdef/ifndef" directives
def _Evaluate(self, Expression):
TokenList = Expression.split()
TokenNumber = len(TokenList)
# one operand, guess it's just a macro name
if TokenNumber == 1:
- return TokenList[0] in self._Macros
+ TokenValue = self._EvaluateToken(TokenList[0], Expression)
+ return TokenValue != None
# two operands, suppose it's "!xxx" format
elif TokenNumber == 2:
Op = TokenList[0]
@@ -819,8 +830,8 @@
return self._OP_[Op](Value)
# three operands
elif TokenNumber == 3:
- Name = TokenList[0]
- if Name not in self._Macros:
+ TokenValue = self._EvaluateToken(TokenList[0], Expression)
+ if TokenValue == None:
return False
Value = TokenList[2]
if Value[0] in ["'", '"'] and Value[-1] in ["'", '"']:
@@ -829,7 +840,7 @@
if Op not in self._OP_:
EdkLogger.error('Parser', FORMAT_INVALID, "Unsupported operator [%s]" % Op, File=self.MetaFile,
Line=self._LineIndex+1, ExtraData=Expression)
- return self._OP_[Op](self._Macros[Name], Value)
+ return self._OP_[Op](TokenValue, Value)
else:
EdkLogger.error('Parser', FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
ExtraData=Expression)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2010-03-05 09:36:46
|
Revision: 1916
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1916&view=rev
Author: lgao4
Date: 2010-03-05 09:36:39 +0000 (Fri, 05 Mar 2010)
Log Message:
-----------
Update GenFds to support HexValue in DATA region.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/FdfParserLite.py
trunk/BaseTools/Source/Python/GenFds/FdfParser.py
Modified: trunk/BaseTools/Source/Python/Common/FdfParserLite.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/FdfParserLite.py 2010-03-05 09:35:36 UTC (rev 1915)
+++ trunk/BaseTools/Source/Python/Common/FdfParserLite.py 2010-03-05 09:36:39 UTC (rev 1916)
@@ -1760,8 +1760,8 @@
if not self.__GetNextHexNumber():
raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
DataString = self.__Token
DataString += ","
@@ -1792,8 +1792,8 @@
if not self.__GetNextHexNumber():
raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
DataString = self.__Token
DataString += ","
Modified: trunk/BaseTools/Source/Python/GenFds/FdfParser.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/FdfParser.py 2010-03-05 09:35:36 UTC (rev 1915)
+++ trunk/BaseTools/Source/Python/GenFds/FdfParser.py 2010-03-05 09:36:39 UTC (rev 1916)
@@ -1787,19 +1787,27 @@
if not self.__GetNextHexNumber():
raise Warning("expected Hex byte", self.FileName, self.CurrentLineNumber)
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
- DataString = self.__Token
- DataString += ","
+ # convert hex string value to byte hex string array
+ AllString = self.__Token
+ AllStrLen = len (AllString)
+ DataString = ""
+ while AllStrLen > 4:
+ DataString = DataString + "0x" + AllString[AllStrLen - 2: AllStrLen] + ","
+ AllStrLen = AllStrLen - 2
+ DataString = DataString + AllString[:AllStrLen] + ","
- while self.__IsToken(","):
- if not self.__GetNextHexNumber():
- raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
- DataString += self.__Token
- DataString += ","
+ # byte value array
+ if len (self.__Token) <= 4:
+ while self.__IsToken(","):
+ if not self.__GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
if not self.__IsToken( "}"):
raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
@@ -1819,18 +1827,27 @@
if not self.__GetNextHexNumber():
raise Warning("expected Hex byte", self.FileName, self.CurrentLineNumber)
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 18:
+ raise Warning("Hex string can't be converted to a valid UINT64 value", self.FileName, self.CurrentLineNumber)
- DataString = self.__Token
- DataString += ","
+ # convert hex string value to byte hex string array
+ AllString = self.__Token
+ AllStrLen = len (AllString)
+ DataString = ""
+ while AllStrLen > 4:
+ DataString = DataString + "0x" + AllString[AllStrLen - 2: AllStrLen] + ","
+ AllStrLen = AllStrLen - 2
+ DataString = DataString + AllString[:AllStrLen] + ","
- while self.__IsToken(","):
- self.__GetNextHexNumber()
- if len(self.__Token) > 4:
- raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
- DataString += self.__Token
- DataString += ","
+ # byte value array
+ if len (self.__Token) <= 4:
+ while self.__IsToken(","):
+ if not self.__GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
if not self.__IsToken( "}"):
raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-03-12 09:11:16
|
Revision: 1929
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1929&view=rev
Author: qhuang8
Date: 2010-03-12 09:11:10 +0000 (Fri, 12 Mar 2010)
Log Message:
-----------
Fix two parser related issues:
1. /* start in INF, but left open
2. Allow "//" comment in UNI file #langdef line in UNI files.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/UniClassObject.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
Modified: trunk/BaseTools/Source/Python/AutoGen/UniClassObject.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/UniClassObject.py 2010-03-12 07:53:17 UTC (rev 1928)
+++ trunk/BaseTools/Source/Python/AutoGen/UniClassObject.py 2010-03-12 09:11:10 UTC (rev 1929)
@@ -1,4 +1,4 @@
-# Copyright (c) 2007, Intel Corporation
+# Copyright (c) 2007 - 2010, Intel Corporation
# All rights reserved. This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -193,7 +193,7 @@
# Get Language definition
#
def GetLangDef(self, File, Line):
- Lang = Line.split()
+ Lang = Line.split(u"//")[0].split()
if len(Lang) != 3:
try:
FileIn = codecs.open(File, mode='rb', encoding='utf-16').read()
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-03-12 07:53:17 UTC (rev 1928)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-03-12 09:11:10 UTC (rev 1929)
@@ -398,6 +398,9 @@
-1,
0
)
+ if IsFindBlockComment:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
+ File=self.MetaFile)
self._Done()
## Data parser for the format in which there's path
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2010-03-16 08:25:04
|
Revision: 1932
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1932&view=rev
Author: lgao4
Date: 2010-03-16 08:24:57 +0000 (Tue, 16 Mar 2010)
Log Message:
-----------
Add Flag option for Build tool to specify the specific option when parse EDK uni file.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/build/build.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-03-15 10:01:05 UTC (rev 1931)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-03-16 08:24:57 UTC (rev 1932)
@@ -138,7 +138,7 @@
# @param SkuId SKU id from command line
#
def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
- BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=[], Fvs=[], SkuId=''):
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=[], Fvs=[], SkuId='', UniFlag=None):
self.MetaFile = ActivePlatform.MetaFile
self.WorkspaceDir = WorkspaceDir
self.Platform = ActivePlatform
@@ -146,6 +146,7 @@
self.ToolChain = Toolchain
self.ArchList = ArchList
self.SkuId = SkuId
+ self.UniFlag = UniFlag
self.BuildDatabase = MetaFileDb
self.TargetTxt = BuildConfig
@@ -1191,6 +1192,13 @@
BuildOptions[Tool][Attr] = Value[1:]
else:
BuildOptions[Tool][Attr] += " " + Value
+ if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
+ #
+ # Override UNI flag only for EDK module.
+ #
+ if 'BUILD' not in BuildOptions:
+ BuildOptions['BUILD'] = {}
+ BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag
return BuildOptions
Platform = property(_GetPlatform)
Modified: trunk/BaseTools/Source/Python/build/build.py
===================================================================
--- trunk/BaseTools/Source/Python/build/build.py 2010-03-15 10:01:05 UTC (rev 1931)
+++ trunk/BaseTools/Source/Python/build/build.py 2010-03-16 08:24:57 UTC (rev 1932)
@@ -704,7 +704,7 @@
BuildTarget, FlashDefinition, FdList=[], FvList=[],
MakefileType="nmake", SilentMode=False, ThreadNumber=2,
SkipAutoGen=False, Reparse=False, SkuId=None,
- ReportFile=None, ReportType=None):
+ ReportFile=None, ReportType=None, UniFlag=None):
self.WorkspaceDir = WorkspaceDir
self.Target = Target
@@ -731,6 +731,7 @@
self.BuildDatabase = self.Db.BuildObject
self.Platform = None
self.LoadFixAddress = 0
+ self.UniFlag = UniFlag
# print dot charater during doing some time-consuming work
self.Progress = Utils.Progressor()
@@ -1291,7 +1292,8 @@
self.Fdf,
self.FdList,
self.FvList,
- self.SkuId
+ self.SkuId,
+ self.UniFlag
)
self.BuildReport.AddPlatformReport(Wa)
self.Progress.Stop("done!")
@@ -1358,7 +1360,8 @@
self.Fdf,
self.FdList,
self.FvList,
- self.SkuId
+ self.SkuId,
+ self.UniFlag
)
Wa.CreateMakeFile(False)
self.Progress.Stop("done!")
@@ -1435,7 +1438,8 @@
self.Fdf,
self.FdList,
self.FvList,
- self.SkuId
+ self.SkuId,
+ self.UniFlag
)
self.BuildReport.AddPlatformReport(Wa)
Wa.CreateMakeFile(False)
@@ -1710,6 +1714,10 @@
Parser.add_option("-Y", "--report-type", action="append", type="choice", choices=['PCD','LIBRARY','FLASH','DEPEX','BUILD_FLAGS','FIXED_ADDRESS', 'EXECUTION_ORDER'], dest="ReportType", default=[],
help="Flags that control the type of build report to generate. Must be one of: [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS, EXECUTION_ORDER]. "\
"To specify more than one flag, repeat this option on the command line and the default flag set is [PCD, LIBRARY, FLASH, DEPEX, BUILD_FLAGS, FIXED_ADDRESS]")
+ Parser.add_option("-F", "--flag", action="store", type="string", dest="Flag",
+ help="Specify the specific option to parse EDK UNI file. Must be one of: [-c, -s]. -c is for EDK framework UNI file, and -s is for EDK UEFI UNI file. "\
+ "This option can also be specified by setting *_*_*_BUILD_FLAGS in [BuildOptions] section of platform DSC. If they are both specified, this value "\
+ "will override the setting in [BuildOptions] section of platform DSC.")
(Opt, Args)=Parser.parse_args()
return (Opt, Args)
@@ -1821,12 +1829,15 @@
if ErrorCode != 0:
EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo)
+ if Option.Flag != None and Option.Flag not in ['-c', '-s']:
+ EdkLogger.error("build", OPTION_VALUE_INVALID, "UNI flag must be one of -c or -s")
+
MyBuild = Build(Target, Workspace, Option.PlatformFile, Option.ModuleFile,
Option.TargetArch, Option.ToolChain, Option.BuildTarget,
Option.FdfFile, Option.RomImage, Option.FvImage,
None, Option.SilentMode, Option.ThreadNumber,
Option.SkipAutoGen, Option.Reparse, Option.SkuId,
- Option.ReportFile, Option.ReportType)
+ Option.ReportFile, Option.ReportType, Option.Flag)
MyBuild.Launch()
#MyBuild.DumpBuildData()
except FatalError, X:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2010-04-19 06:38:41
|
Revision: 1953
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1953&view=rev
Author: lgao4
Date: 2010-04-19 06:38:34 +0000 (Mon, 19 Apr 2010)
Log Message:
-----------
Update the output file (System map file and GUID XREF file) only when the file content is changed.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/GenFds/GenFds.py
trunk/BaseTools/Source/Python/build/build.py
Modified: trunk/BaseTools/Source/Python/GenFds/GenFds.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/GenFds.py 2010-04-16 05:30:08 UTC (rev 1952)
+++ trunk/BaseTools/Source/Python/GenFds/GenFds.py 2010-04-19 06:38:34 UTC (rev 1953)
@@ -35,6 +35,7 @@
from Common import EdkLogger
from Common.String import *
from Common.Misc import DirCache,PathClass
+from Common.Misc import SaveFileOnChange
## Version and Copyright
versionNumber = "1.0"
@@ -486,14 +487,15 @@
def GenerateGuidXRefFile(BuildDb, ArchList):
GuidXRefFileName = os.path.join(GenFdsGlobalVariable.FvDir, "Guid.xref")
- GuidXRefFile = open(GuidXRefFileName, "w+")
+ GuidXRefFile = StringIO.StringIO('')
for Arch in ArchList:
PlatformDataBase = BuildDb.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch]
for ModuleFile in PlatformDataBase.Modules:
Module = BuildDb.BuildObject[ModuleFile, Arch]
GuidXRefFile.write("%s %s\n" % (Module.Guid, Module.BaseName))
+ SaveFileOnChange(GuidXRefFileName, GuidXRefFile.getvalue(), False)
GuidXRefFile.close()
- GenFdsGlobalVariable.InfLogger("\nGUID cross reference file saved to %s" % GuidXRefFileName)
+ GenFdsGlobalVariable.InfLogger("\nGUID cross reference file can be found at %s" % GuidXRefFileName)
##Define GenFd as static function
GenFd = staticmethod(GenFd)
Modified: trunk/BaseTools/Source/Python/build/build.py
===================================================================
--- trunk/BaseTools/Source/Python/build/build.py 2010-04-16 05:30:08 UTC (rev 1952)
+++ trunk/BaseTools/Source/Python/build/build.py 2010-04-19 06:38:34 UTC (rev 1953)
@@ -1289,12 +1289,10 @@
#
# Save address map into MAP file.
#
- MapFile = open(MapFilePath, "wb")
- MapFile.write(MapBuffer.getvalue())
- MapFile.close()
- MapBuffer.close()
+ SaveFileOnChange(MapFilePath, MapBuffer.getvalue(), False)
+ MapBuffer.close()
if self.LoadFixAddress != 0:
- sys.stdout.write ("\nLoad Module At Fix Address Map file saved to %s\n" %(MapFilePath))
+ sys.stdout.write ("\nLoad Module At Fix Address Map file can be found at %s\n" %(MapFilePath))
sys.stdout.flush()
## Build active platform for different build targets and different tool chains
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-05-13 00:53:42
|
Revision: 1971
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1971&view=rev
Author: qhuang8
Date: 2010-05-13 00:53:35 +0000 (Thu, 13 May 2010)
Log Message:
-----------
Enhance ECC tool to add two new check items:
1. Check the doxygen file header for INF/DEC/DSC/FDF to start with "## @file"
2. Check the FILE_GUID duplication for all INFs scanned with new error code 10016.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Ecc/Check.py
trunk/BaseTools/Source/Python/Ecc/Configuration.py
trunk/BaseTools/Source/Python/Ecc/EccToolError.py
trunk/BaseTools/Source/Python/Ecc/config.ini
trunk/BaseTools/Source/Python/Table/TableReport.py
Modified: trunk/BaseTools/Source/Python/Ecc/Check.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Check.py 2010-05-12 01:33:12 UTC (rev 1970)
+++ trunk/BaseTools/Source/Python/Ecc/Check.py 2010-05-13 00:53:35 UTC (rev 1971)
@@ -341,9 +341,19 @@
for Dirpath, Dirnames, Filenames in self.WalkTree():
for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
+ Ext = os.path.splitext(F)[1]
+ if Ext in ('.h', '.c'):
FullName = os.path.join(Dirpath, F)
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
+ elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
+ FullName = os.path.join(Dirpath, F)
+ if not open(FullName).read().startswith('## @file'):
+ SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
+ ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
+
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFunctionHeader(self):
@@ -399,6 +409,7 @@
self.MetaDataFileCheckGuidDuplicate()
self.MetaDataFileCheckModuleFileNoUse()
self.MetaDataFileCheckPcdType()
+ self.MetaDataFileCheckModuleFileGuidDuplication()
# Check whether each file defined in meta-data exists
def MetaDataFileCheckPathName(self):
@@ -692,6 +703,38 @@
#ERROR_META_DATA_FILE_CHECK_PCD_TYPE
pass
+ # Internal worker function to get the INF workspace relative path from FileID
+ def GetInfFilePathFromID(self, FileID):
+ Table = EccGlobalData.gDb.TblFile
+ SqlCommand = """select A.FullPath from %s as A where A.ID = %s""" % (Table.Table, FileID)
+ RecordSet = Table.Exec(SqlCommand)
+ Path = ""
+ for Record in RecordSet:
+ Path = Record[0].replace(EccGlobalData.gWorkspace, '')
+ if Path.startswith('\\') or Path.startswith('/'):
+ Path = Path[1:]
+ return Path
+
+ # Check whether two module INFs under one workspace has the same FILE_GUID value
+ def MetaDataFileCheckModuleFileGuidDuplication(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for pcd type in c code function usage ...")
+ Table = EccGlobalData.gDb.TblInf
+ SqlCommand = """
+ select A.ID, A.Value2, A.BelongsToFile, B.BelongsToFile from %s as A, %s as B
+ where A.Value1 = 'FILE_GUID' and B.Value1 = 'FILE_GUID' and
+ A.Value2 = B.Value2 and A.ID <> B.ID group by A.ID
+ """ % (Table.Table, Table.Table)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ InfPath1 = self.GetInfFilePathFromID(Record[2])
+ InfPath2 = self.GetInfFilePathFromID(Record[3])
+ if InfPath1 and InfPath2:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
+ Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg = Msg, BelongsToTable = Table.Table, BelongsToItem = Record[0])
+
+
# Check whether these is duplicate Guid/Ppi/Protocol name
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
Name = ''
Modified: trunk/BaseTools/Source/Python/Ecc/Configuration.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Configuration.py 2010-05-12 01:33:12 UTC (rev 1970)
+++ trunk/BaseTools/Source/Python/Ecc/Configuration.py 2010-05-13 00:53:35 UTC (rev 1971)
@@ -222,7 +222,9 @@
self.MetaDataFileCheckModuleFileNoUse = 1
# Check whether the PCD is correctly used in C function via its type
self.MetaDataFileCheckPcdType = 1
-
+ # Check whether there are FILE_GUID duplication among different INF files
+ self.MetaDataFileCheckModuleFileGuidDuplication = 1
+
#
# The check points in this section are reserved
#
Modified: trunk/BaseTools/Source/Python/Ecc/EccToolError.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/EccToolError.py 2010-05-12 01:33:12 UTC (rev 1970)
+++ trunk/BaseTools/Source/Python/Ecc/EccToolError.py 2010-05-13 00:53:35 UTC (rev 1971)
@@ -92,6 +92,7 @@
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI = 10013
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE = 10014
ERROR_META_DATA_FILE_CHECK_PCD_TYPE = 10015
+ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION = 10016
ERROR_SPELLING_CHECK_ALL = 11000
@@ -177,7 +178,7 @@
ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI : "Duplicate PPI found",
ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE : "No used module files found",
ERROR_META_DATA_FILE_CHECK_PCD_TYPE : "Wrong C code function used for this kind of PCD",
-
+ ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION : "Module file has FILE_GUID collision with other module file",
ERROR_SPELLING_CHECK_ALL : "",
}
Modified: trunk/BaseTools/Source/Python/Ecc/config.ini
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/config.ini 2010-05-12 01:33:12 UTC (rev 1970)
+++ trunk/BaseTools/Source/Python/Ecc/config.ini 2010-05-13 00:53:35 UTC (rev 1971)
@@ -2,7 +2,7 @@
# This file is used to set configuration of ECC tool
# For the items listed below, 1 means valid, 0 means invalid
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -233,6 +233,8 @@
MetaDataFileCheckModuleFileNoUse = 1
# Check whether the PCD is correctly used in C function via its type
MetaDataFileCheckPcdType = 1
+# Check whether there are FILE_GUID duplication among different INF files
+MetaDataFileCheckModuleFileGuidDuplication = 1
#
# The check points in this section are reserved
Modified: trunk/BaseTools/Source/Python/Table/TableReport.py
===================================================================
--- trunk/BaseTools/Source/Python/Table/TableReport.py 2010-05-12 01:33:12 UTC (rev 1970)
+++ trunk/BaseTools/Source/Python/Table/TableReport.py 2010-05-13 00:53:35 UTC (rev 1971)
@@ -105,7 +105,7 @@
IsCorrected = Record[5]
SqlCommand = ''
if BelongsToTable == 'File':
- SqlCommand = """select 0, FullPath from %s where ID = %s
+ SqlCommand = """select 1, FullPath from %s where ID = %s
""" % (BelongsToTable, BelongsToItem)
else:
SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-06-11 07:14:38
|
Revision: 1978
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=1978&view=rev
Author: qhuang8
Date: 2010-06-11 07:14:32 +0000 (Fri, 11 Jun 2010)
Log Message:
-----------
Fix the incremental build hole about !include statement:
Whenever an !include file is successfully parsed, it inserts an imaginary named ?\226?\128?\156MODEL_EXTERNAL_DEPENDENCY?\226?\128?\157 to indicate that that the meta data file may have external dependency on other file. When we check the integrity of the meta data file, we need to consider all its external dependency.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-06-04 18:06:16 UTC (rev 1977)
+++ trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-06-11 07:14:32 UTC (rev 1978)
@@ -1,7 +1,7 @@
## @file
-# This file is used to define class for data sturcture used in ECC
+# This file is used to define class for data structure used in ECC
#
-# Copyright (c) 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -92,6 +92,8 @@
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
+MODEL_EXTERNAL_DEPENDENCY = 10000
+
MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
('MODEL_FILE_C', MODEL_FILE_C),
('MODEL_FILE_H', MODEL_FILE_H),
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-06-04 18:06:16 UTC (rev 1977)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-06-11 07:14:32 UTC (rev 1978)
@@ -774,6 +774,8 @@
except:
EdkLogger.error("Parser", PARSER_ERROR, File=self.MetaFile, Line=self._LineIndex+1,
ExtraData="Failed to parse content in file %s" % IncludedFile)
+ # insert an imaginary token in the DSC table to indicate its external dependency on another file
+ self._Store(MODEL_EXTERNAL_DEPENDENCY, IncludedFile, str(os.stat(IncludedFile)[8]), "")
# update current status with sub-parser's status
self._SectionName = Parser._SectionName
self._SectionType = Parser._SectionType
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-06-04 18:06:16 UTC (rev 1977)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-06-11 07:14:32 UTC (rev 1978)
@@ -2287,6 +2287,13 @@
Result = self.Cur.execute("select min(ID) from %s" % (TableName)).fetchall()
if Result[0][0] != -1:
return False
+ #
+ # Check whether the meta data file has external dependency by comparing the time stamp
+ #
+ Sql = "select Value1, Value2 from %s where Model=%d" % (TableName, MODEL_EXTERNAL_DEPENDENCY)
+ for Dependency in self.Cur.execute(Sql).fetchall():
+ if str(os.stat(Dependency[0])[8]) != Dependency[1]:
+ return False
except:
return False
return True
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <jw...@us...> - 2010-08-09 14:08:16
|
Revision: 2013
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2013&view=rev
Author: jwang36
Date: 2010-08-09 14:08:10 +0000 (Mon, 09 Aug 2010)
Log Message:
-----------
Updated tools code to support BSF file generation.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/AutoGen/GenC.py
trunk/BaseTools/Source/Python/Common/Misc.py
trunk/BaseTools/Source/Python/Common/String.py
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -770,10 +770,14 @@
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
+ if self._NonDynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
def _GetDynamicPcdList(self):
+ if self._DynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
Modified: trunk/BaseTools/Source/Python/AutoGen/GenC.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/AutoGen/GenC.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -1262,10 +1262,11 @@
VariableHeadValueList = []
Pcd.InitString = 'UNINIT'
- if Pcd.Type in ["DynamicVpd", "DynamicExVpd"]:
- Pcd.TokenTypeList = ['PCD_TYPE_VPD']
- elif Pcd.DatumType == 'VOID*':
- Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ if Pcd.DatumType == 'VOID*':
+ if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
+ Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ else:
+ Pcd.TokenTypeList = []
elif Pcd.DatumType == 'BOOLEAN':
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
else:
Modified: trunk/BaseTools/Source/Python/Common/Misc.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/Misc.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/Common/Misc.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -719,7 +719,7 @@
while Template:
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
if not MatchObj:
- if MatchEnd < len(Template):
+ if MatchEnd <= len(Template):
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
break
Modified: trunk/BaseTools/Source/Python/Common/String.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/String.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/Common/String.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -296,6 +296,50 @@
return Line
+## CleanString2
+#
+# Split comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString2(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace R8's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
+ #
+ # separate comments and statements
+ #
+ LineParts = Line.split(CommentCharacter, 1);
+ #
+ # remove whitespace again
+ #
+ Line = LineParts[0].strip();
+ if len(LineParts) > 1:
+ Comment = LineParts[1].strip()
+ # Remove prefixed and trailing comment characters
+ Start = 0
+ End = len(Comment)
+ while Start < End and Comment.startswith(CommentCharacter, Start, End):
+ Start += 1
+ while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
+ End -= 1
+ Comment = Comment[Start:End]
+ Comment = Comment.strip()
+ else:
+ Comment = ''
+
+ return Line, Comment
+
## GetMultipleValuesOfKeyFromLines
#
# Parse multiple strings to clean comment and spaces
Modified: trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -91,6 +91,7 @@
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
+MODEL_META_DATA_COMMENT = 5016
MODEL_EXTERNAL_DEPENDENCY = 10000
@@ -159,7 +160,8 @@
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
- ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)
+ ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
+ ('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
]
## FunctionClass
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -564,6 +564,7 @@
# sections which allow "!include" directive
_IncludeAllowedSection = [
+ TAB_COMMON_DEFINES.upper(),
TAB_LIBRARIES.upper(),
TAB_LIBRARY_CLASSES.upper(),
TAB_SKUIDS.upper(),
@@ -781,6 +782,7 @@
self._SectionType = Parser._SectionType
self._Scope = Parser._Scope
self._Enabled = Parser._Enabled
+ self._Macros.update(Parser._Macros)
else:
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
# evaluate the expression
@@ -965,6 +967,7 @@
#
def __init__(self, FilePath, FileType, Table, Macro=None):
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
+ self._Comments = []
## Parser starter
def Start(self):
@@ -975,27 +978,34 @@
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
for Index in range(0, len(self._Content)):
- Line = CleanString(self._Content[Index])
+ Line, Comment = CleanString2(self._Content[Index])
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # save comment for later use
+ if Comment:
+ self._Comments.append((Comment, self._LineIndex+1))
# skip empty line
if Line == '':
continue
- self._CurrentLine = Line
- self._LineIndex = Index
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
+ self._Comments = []
continue
elif Line.startswith('DEFINE '):
self._MacroParser()
continue
elif len(self._SectionType) == 0:
+ self._Comments = []
continue
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList == None:
+ self._Comments = []
continue
#
@@ -1017,6 +1027,22 @@
-1,
0
)
+ for Comment, LineNo in self._Comments:
+ self._Store(
+ MODEL_META_DATA_COMMENT,
+ Comment,
+ self._ValueList[0],
+ self._ValueList[1],
+ Arch,
+ ModuleType,
+ self._LastItem,
+ LineNo,
+ -1,
+ LineNo,
+ -1,
+ 0
+ )
+ self._Comments = []
self._Done()
## Section header parser
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-09 10:04:28 UTC (rev 2012)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-09 14:08:10 UTC (rev 2013)
@@ -280,6 +280,8 @@
def _SetSkuName(self, Value):
if Value in self.SkuIds:
self._SkuName = Value
+ # Needs to re-retrieve the PCD information
+ self._Pcds = None
def _GetFdfFile(self):
if self._FlashDefinition == None:
@@ -712,32 +714,22 @@
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid in PcdSet:
- ValueList = ['', '']
+ ValueList = ['', '', '']
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
TokenList = Setting.split(TAB_VALUE_SPLIT)
- # The TokenList have optional data, process flow will base on it's length
- if len(TokenList) == 1:
- VpdOffset = TokenList[0]
- MaxDatumSize, PcdValue = None, ''
- elif len(TokenList) == 2:
- VpdOffset, MaxDatumSize = TokenList[0:len(TokenList)]
- PcdValue = ''
- elif len(TokenList) == 3:
- VpdOffset, MaxDatumSize, PcdValue = TokenList[0:len(TokenList)]
- # Error format of vpd definition
- else:
- EdkLogger.error("build", FORMAT_INVALID, "Error format of VPD pcd definition.", File=self.MetaFile)
-
- SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)
+ ValueList[0:len(TokenList)] = TokenList
+ VpdOffset, MaxDatumSize, InitialValue = ValueList
+
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
self._PCD_TYPE_STRING_[Type],
'',
- PcdValue,
'',
+ '',
MaxDatumSize,
{self.SkuName : SkuInfo},
False,
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <hc...@us...> - 2010-08-16 11:30:50
|
Revision: 2015
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2015&view=rev
Author: hchen30
Date: 2010-08-16 11:30:42 +0000 (Mon, 16 Aug 2010)
Log Message:
-----------
1. Add a new checkpoint for ECC to check whether a file has Non-ASCII char.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/Ecc/Check.py
trunk/BaseTools/Source/Python/Ecc/Configuration.py
trunk/BaseTools/Source/Python/Ecc/EccToolError.py
trunk/BaseTools/Source/Python/Ecc/c.py
trunk/BaseTools/Source/Python/Ecc/config.ini
Modified: trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-16 11:30:42 UTC (rev 2015)
@@ -29,6 +29,7 @@
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
+MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
@@ -104,6 +105,8 @@
('MODEL_FILE_DSC', MODEL_FILE_DSC),
('MODEL_FILE_FDF', MODEL_FILE_FDF),
('MODEL_FILE_INC', MODEL_FILE_INC),
+ ('MODEL_FILE_CIF', MODEL_FILE_CIF),
+ ('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
@@ -167,10 +170,10 @@
## FunctionClass
#
# This class defines a structure of a function
-#
+#
# @param ID: ID of a Function
# @param Header: Header of a Function
-# @param Modifier: Modifier of a Function
+# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function
@@ -185,7 +188,7 @@
#
# @var ID: ID of a Function
# @var Header: Header of a Function
-# @var Modifier: Modifier of a Function
+# @var Modifier: Modifier of a Function
# @var Name: Name of a Function
# @var ReturnStatement: ReturnStatement of a Funciont
# @var StartLine: StartLine of a Function
@@ -206,7 +209,7 @@
FunNameStartLine = -1, FunNameStartColumn = -1):
self.ID = ID
self.Header = Header
- self.Modifier = Modifier
+ self.Modifier = Modifier
self.Name = Name
self.ReturnStatement = ReturnStatement
self.StartLine = StartLine
@@ -218,14 +221,14 @@
self.BelongsToFile = BelongsToFile
self.FunNameStartLine = FunNameStartLine
self.FunNameStartColumn = FunNameStartColumn
-
+
self.IdentifierList = IdentifierList
self.PcdList = PcdList
## IdentifierClass
#
# This class defines a structure of a variable
-#
+#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
@@ -271,7 +274,7 @@
## PcdClass
#
# This class defines a structure of a Pcd
-#
+#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
@@ -316,7 +319,7 @@
## FileClass
#
# This class defines a structure of a file
-#
+#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
@@ -342,14 +345,14 @@
class FileClass(object):
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
FunctionList = [], IdentifierList = [], PcdList = []):
- self.ID = ID
+ self.ID = ID
self.Name = Name
- self.ExtName = ExtName
+ self.ExtName = ExtName
self.Path = Path
self.FullPath = FullPath
self.Model = Model
self.TimeStamp = TimeStamp
-
+
self.FunctionList = FunctionList
self.IdentifierList = IdentifierList
self.PcdList = PcdList
Modified: trunk/BaseTools/Source/Python/Ecc/Check.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Check.py 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/Ecc/Check.py 2010-08-16 11:30:42 UTC (rev 2015)
@@ -30,6 +30,7 @@
# Check all required checkpoints
def Check(self):
+ self.GeneralCheck()
self.MetaDataFileCheck()
self.DoxygenCheck()
self.IncludeFileCheck()
@@ -38,6 +39,30 @@
self.FunctionLayoutCheck()
self.NamingConventionCheck()
+ # General Checking
+ def GeneralCheck(self):
+ self.GeneralCheckNonAcsii()
+
+ # Check whether file has non ACSII char
+ def GeneralCheckNonAcsii(self):
+ if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Non-ACSII char in file ...")
+ BinaryExtList = ['EXE', 'EFI', 'FV', 'ROM', 'DLL', 'COM', 'BMP', 'GIF', 'PYD', 'CMP', 'BIN', 'JPG', 'UNI', 'RAW', 'COM2', 'LIB', 'DEPEX']
+ SqlCommand = """select ID, FullPath, ExtName from File"""
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in BinaryExtList:
+ op = open(Record[1]).readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ IndexOfChar = 0
+ for Char in Line:
+ IndexOfChar += 1
+ if ord(Char) > 126:
+ OtherMsg = "File %s has Non-ASCII char at line %s column %s" %(Record[1], IndexOfLine, IndexOfChar)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg = OtherMsg, BelongsToTable = 'File', BelongsToItem = Record[0])
+
# C Function Layout Checking
def FunctionLayoutCheck(self):
self.FunctionLayoutCheckReturnType()
Modified: trunk/BaseTools/Source/Python/Ecc/Configuration.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/Configuration.py 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/Ecc/Configuration.py 2010-08-16 11:30:42 UTC (rev 2015)
@@ -28,7 +28,7 @@
class Configuration(object):
def __init__(self, Filename):
self.Filename = Filename
-
+
self.Version = 0.1
## Identify to if check all items
@@ -49,14 +49,14 @@
# SpaceCheckAll
#
self.AutoCorrect = 0
-
+
# List customized Modifer here, split with ','
# Defaultly use the definition in class DataType
self.ModifierList = MODIFIER_LIST
-
+
## General Checking
self.GeneralCheckAll = 0
-
+
# Check whether NO Tab is used, replaced with spaces
self.GeneralCheckNoTab = 1
# The width of Tab
@@ -77,31 +77,33 @@
self.GeneralCheckCarriageReturn = 1
# Check whether the file exists
self.GeneralCheckFileExistence = 1
-
+ # Check whether file has non ACSII char
+ self.GeneralCheckNonAcsii = 1
+
## Space Checking
self.SpaceCheckAll = 1
-
+
## Predicate Expression Checking
self.PredicateExpressionCheckAll = 0
-
+
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
self.PredicateExpressionCheckBooleanValue = 1
- # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
self.PredicateExpressionCheckNonBooleanOperator = 1
# Check whether a comparison of any pointer to zero must be done via the NULL type
self.PredicateExpressionCheckComparisonNullType = 1
-
+
## Headers Checking
self.HeaderCheckAll = 0
-
+
# Check whether File header exists
self.HeaderCheckFile = 1
# Check whether Function header exists
self.HeaderCheckFunction = 1
-
+
## C Function Layout Checking
self.CFunctionLayoutCheckAll = 0
-
+
# Check whether return type exists and in the first line
self.CFunctionLayoutCheckReturnType = 1
# Check whether any optional functional modifiers exist and next to the return type
@@ -119,10 +121,10 @@
self.CFunctionLayoutCheckNoInitOfVariable = 1
# Check whether no use of STATIC for functions
self.CFunctionLayoutCheckNoStatic = 1
-
+
## Include Files Checking
self.IncludeFileCheckAll = 0
-
+
#Check whether having include files with same name
self.IncludeFileCheckSameName = 1
# Check whether all include file contents is guarded by a #ifndef statement.
@@ -132,10 +134,10 @@
# Check whether include files contain only public or only private data
# Check whether include files NOT contain code or define data variables
self.IncludeFileCheckData = 1
-
+
## Declarations and Data Types Checking
self.DeclarationDataTypeCheckAll = 0
-
+
# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
self.DeclarationDataTypeCheckNoUseCType = 1
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
@@ -150,10 +152,10 @@
self.DeclarationDataTypeCheckSameStructure = 1
# Check whether Union Type has a 'typedef' and the name is capital
self.DeclarationDataTypeCheckUnionType = 1
-
+
## Naming Conventions Checking
self.NamingConventionCheckAll = 0
-
+
# Check whether only capital letters are used for #define declarations
self.NamingConventionCheckDefineStatement = 1
# Check whether only capital letters are used for typedef declarations
@@ -172,33 +174,33 @@
self.NamingConventionCheckFunctionName = 1
# Check whether NO use short variable name with single character
self.NamingConventionCheckSingleCharacterVariable = 1
-
+
## Doxygen Checking
self.DoxygenCheckAll = 0
-
+
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFileHeader = 1
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFunctionHeader = 1
- # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
self.DoxygenCheckCommentDescription = 1
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
self.DoxygenCheckCommentFormat = 1
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
self.DoxygenCheckCommand = 1
-
+
## Meta-Data File Processing Checking
self.MetaDataFileCheckAll = 0
-
+
# Check whether each file defined in meta-data exists
self.MetaDataFileCheckPathName = 1
# Generate a list for all files defined in meta-data files
self.MetaDataFileCheckGenerateFileList = 1
# The path of log file
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
- # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
- # Each Library Instance must specify the Supported Module Types in its INF file,
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its INF file,
# and any module specifying the library instance must be one of the supported types.
self.MetaDataFileCheckLibraryInstance = 1
# Check whether a Library Instance has been defined for all dependent library classes
@@ -236,13 +238,13 @@
self.SkipDirList = []
self.ParseConfig()
-
+
def ParseConfig(self):
Filepath = os.path.normpath(self.Filename)
if not os.path.isfile(Filepath):
ErrorMsg = "Can't find configuration file '%s'" % Filepath
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
-
+
LineNo = 0
for Line in open(Filepath, 'r'):
LineNo = LineNo + 1
@@ -259,7 +261,7 @@
if List[0] == 'SkipDirList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
self.__dict__[List[0]] = List[1]
-
+
def ShowMe(self):
print self.Filename
for Key in self.__dict__.keys():
Modified: trunk/BaseTools/Source/Python/Ecc/EccToolError.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/EccToolError.py 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/Ecc/EccToolError.py 2010-08-16 11:30:42 UTC (rev 2015)
@@ -19,6 +19,7 @@
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
+ERROR_GENERAL_CHECK_NON_ACSII = 1008
ERROR_SPACE_CHECK_ALL = 2000
@@ -105,6 +106,7 @@
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
+ ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
ERROR_SPACE_CHECK_ALL : "",
Modified: trunk/BaseTools/Source/Python/Ecc/c.py
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/c.py 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/Ecc/c.py 2010-08-16 11:30:42 UTC (rev 2015)
@@ -514,7 +514,9 @@
dirnames.append(Dirname)
for f in filenames:
+ collector = None
FullName = os.path.normpath(os.path.join(dirpath, f))
+ model = DataClass.MODEL_FILE_OTHERS
if os.path.splitext(f)[1] in ('.h', '.c'):
EdkLogger.info("Parsing " + FullName)
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
@@ -526,12 +528,13 @@
collector.CleanFileProfileBuffer()
collector.ParseFileWithClearedPPDirective()
# collector.PrintFragments()
- BaseName = os.path.basename(f)
- DirName = os.path.dirname(FullName)
- Ext = os.path.splitext(f)[1].lstrip('.')
- ModifiedTime = os.path.getmtime(FullName)
- FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
- FileObjList.append(FileObj)
+ BaseName = os.path.basename(f)
+ DirName = os.path.dirname(FullName)
+ Ext = os.path.splitext(f)[1].lstrip('.')
+ ModifiedTime = os.path.getmtime(FullName)
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
+ FileObjList.append(FileObj)
+ if collector:
collector.CleanFileProfileBuffer()
if len(ParseErrorFileList) > 0:
Modified: trunk/BaseTools/Source/Python/Ecc/config.ini
===================================================================
--- trunk/BaseTools/Source/Python/Ecc/config.ini 2010-08-11 07:29:32 UTC (rev 2014)
+++ trunk/BaseTools/Source/Python/Ecc/config.ini 2010-08-16 11:30:42 UTC (rev 2015)
@@ -21,7 +21,7 @@
# Identify to if check all items
# 1 - Check all items and ignore all other detailed items
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
-#
+#
CheckAll = 0
#
@@ -68,6 +68,8 @@
GeneralCheckCarriageReturn = 1
# Check whether the file exists
GeneralCheckFileExistence = 1
+# Check whether file has non ACSII char
+GeneralCheckNonAcsii = 1
#
# Space Checking
@@ -81,7 +83,7 @@
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
PredicateExpressionCheckBooleanValue = 1
-# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
PredicateExpressionCheckNonBooleanOperator = 1
# Check whether a comparison of any pointer to zero must be done via the NULL type
PredicateExpressionCheckComparisonNullType = 1
@@ -189,7 +191,7 @@
DoxygenCheckFileHeader = 1
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
DoxygenCheckFunctionHeader = 1
-# Check whether the first line of text in a comment block is a brief description of the element being documented.
+# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
DoxygenCheckCommentDescription = 1
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
@@ -208,8 +210,8 @@
MetaDataFileCheckGenerateFileList = 1
# The path of log file
MetaDataFileCheckPathOfGenerateFileList = File.log
-# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
-# Each Library Instance must specify the Supported Module Types in its INF file,
+# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+# Each Library Instance must specify the Supported Module Types in its INF file,
# and any module specifying the library instance must be one of the supported types.
MetaDataFileCheckLibraryInstance = 1
# Check whether a Library Instance has been defined for all dependent library classes
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <gi...@us...> - 2010-08-18 03:22:38
|
Revision: 2016
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2016&view=rev
Author: gikidy
Date: 2010-08-18 03:22:30 +0000 (Wed, 18 Aug 2010)
Log Message:
-----------
Fix below issues:
1. Added support for VPD_FILENAME define in DSC file;
2. Enhanced version and help output;
3. Enhance error handling.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/BPDG/BPDG.py
trunk/BaseTools/Source/Python/BPDG/GenVpd.py
trunk/BaseTools/Source/Python/BPDG/StringTable.py
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -516,7 +516,12 @@
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
- VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+ VpdFileName = self.Platform.VpdFileName
+ if VpdFileName == None :
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+ else :
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
+
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
VpdFile.Write(VpdFilePath)
@@ -528,16 +533,18 @@
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
BPDGToolName = ToolDef["PATH"]
break
-
# Call third party GUID BPDG tool.
if BPDGToolName != None:
- VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ if VpdFileName == None :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ else :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
if os.path.exists(VpdMapFilePath):
VpdFile.Read(VpdMapFilePath)
Modified: trunk/BaseTools/Source/Python/BPDG/BPDG.py
===================================================================
--- trunk/BaseTools/Source/Python/BPDG/BPDG.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/BPDG/BPDG.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -44,11 +44,12 @@
#
def main():
global Options, Args
+
+ # Initialize log system
+ EdkLogger.Initialize()
Options, Args = myOptionParser()
ReturnCode = 0
- # Initialize log system
- EdkLogger.Initialize()
if Options.opt_slient:
EdkLogger.SetLevel(EdkLogger.ERROR)
@@ -56,24 +57,24 @@
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Options.opt_debug != None:
- EdkLogger.SetLevel(Options.opt_debug + 1)
+ elif Options.debug_level != None:
+ EdkLogger.SetLevel(Options.debug_level + 1)
else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Options.opt_vpd_filename == None:
- EdkLogger.error("bpdg", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
- if Options.opt_map_file == None:
- EdkLogger.error("bpdg", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
+ if Options.vpd_filename == None:
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
+ if Options.filename == None:
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
Force = False
if Options.opt_force != None:
Force = True
if (Args[0] != None) :
- startBPDG(Args[0], Options.opt_map_file, Options.opt_vpd_filename, Force)
+ startBPDG(Args[0], Options.filename, Options.vpd_filename, Force)
else :
- EdkLogger.error("bpdg", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
+ EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
None)
return ReturnCode
@@ -83,11 +84,11 @@
# Process command line firstly.
#
parser = OptionParser(version="%s - Version %s\n" % (PROJECT_NAME, VERSION),
- description=PROJECT_NAME,
- prog='bpdg',
+ description='',
+ prog='BPDG',
usage=st.LBL_BPDG_USAGE
)
- parser.add_option('-d', '--debug', action='store', type="int", dest='opt_debug',
+ parser.add_option('-d', '--debug', action='store', type="int", dest='debug_level',
help=st.MSG_OPTION_DEBUG_LEVEL)
parser.add_option('-v', '--verbose', action='store_true', dest='opt_verbose',
help=st.MSG_OPTION_VERBOSE)
@@ -95,16 +96,17 @@
help=st.MSG_OPTION_SILENT)
parser.add_option('-q', '--quiet', action='store_true', dest='opt_quiet', default=False,
help=st.MSG_OPTION_QUIET)
- parser.add_option('-o', '--vpd-filename', action='store', dest='opt_vpd_filename',
+ parser.add_option('-o', '--vpd-filename', action='store', dest='vpd_filename',
help=st.MSG_OPTION_VPD_FILENAME)
- parser.add_option('-m', '--map-filename', action='store', dest='opt_map_file',
+ parser.add_option('-m', '--map-filename', action='store', dest='filename',
help=st.MSG_OPTION_MAP_FILENAME)
parser.add_option('-f', '--force', action='store_true', dest='opt_force',
help=st.MSG_OPTION_FORCE)
(options, args) = parser.parse_args()
if len(args) == 0:
- print parser.usage
+ EdkLogger.info("Please specify the filename.txt file which contain the VPD pcd info!")
+ EdkLogger.info(parser.usage)
sys.exit(1)
return options, args
@@ -126,7 +128,7 @@
GenVPD.FixVpdOffset()
GenVPD.GenerateVpdFile(MapFileName, VpdFileName)
- EdkLogger.info("- Done! -")
+ EdkLogger.info("- Vpd pcd fixed done! -")
if __name__ == '__main__':
r = main()
Modified: trunk/BaseTools/Source/Python/BPDG/GenVpd.py
===================================================================
--- trunk/BaseTools/Source/Python/BPDG/GenVpd.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/BPDG/GenVpd.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -1,4 +1,6 @@
## @file
+# This file include GenVpd class for fix the Vpd type PCD offset, and PcdEntry for describe
+# and process each entry of vpd type PCD.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
#
@@ -37,6 +39,15 @@
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
+ if self.PcdValue == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no Value specified!")
+
+ if self.PcdOffset == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no Offset specified!")
+
+ if self.PcdSize == '' :
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no PcdSize specified!")
+
self._GenOffsetValue ()
def _IsBoolean(self, ValueString):
@@ -52,7 +63,7 @@
try:
self.PcdBinOffset = int(self.PcdOffset, 16)
except:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid offset value %s for PCD %s" % (self.PcdOffset, self.PcdCName))
def _PackBooleanValue(self, ValueString):
@@ -63,7 +74,7 @@
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR.keys():
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD in integer datum size." % Size)
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
@@ -75,7 +86,7 @@
elif ValueString.startswith('"') and ValueString.endswith('"'):
self._PackString(ValueString, Size)
else:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid VOID* type PCD value %s" % ValueString)
def _PackString(self, ValueString, Size):
@@ -85,7 +96,7 @@
ValueString = ValueString[1:-1]
if len(ValueString) + 1 > Size:
- EdkLogger.error("bpdg", BuildToolError.RESOURCE_OVERFLOW,
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d" % (ValueString, Size))
self.PcdValue= pack('%ds' % Size, ValueString)
@@ -99,7 +110,7 @@
ValueList = [item.strip() for item in ValueList]
if len(ValueList) > Size:
- EdkLogger.error("bpdg", BuildToolError.RESOURCE_OVERFLOW,
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The byte array %s is too large for size %d" % (ValueString, Size))
ReturnArray = array.array('B')
@@ -111,7 +122,7 @@
try:
Value = int(ValueList[Index], 16)
except:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid HEX value." % \
(ValueList[Index], ValueString))
else:
@@ -119,12 +130,12 @@
try:
Value = int(ValueList[Index], 10)
except:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid DECIMAL value." % \
(ValueList[Index], ValueString))
if Value > 255:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s do not in range 0 ~ 0xFF" %\
(ValueList[Index], ValueString))
@@ -146,7 +157,7 @@
UnicodeString = UnicodeString[2:-1]
if (len(UnicodeString) + 1) * 2 > Size:
- EdkLogger.error("bpdg", BuildToolError.RESOURCE_OVERFLOW,
+ EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The size of unicode string %s is too larger for size %s" % \
(UnicodeString, Size))
@@ -156,7 +167,7 @@
ReturnArray.append(ord(Value))
ReturnArray.append(0)
except:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID,
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid unicode character %s in unicode string %s" % \
(Value, UnicodeString))
@@ -184,16 +195,15 @@
self.PcdFixedOffsetSizeList = []
self.PcdUnknownOffsetList = []
try:
- print InputFileName
fInputfile = open(InputFileName, "r", 0)
try:
self.FileLinesList = fInputfile.readlines()
except:
- EdkLogger.error("bpdg", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" %InputFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" %InputFileName,None)
finally:
fInputfile.close()
except:
- EdkLogger.error("bpdg", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %InputFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %InputFileName,None)
##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's
@@ -233,7 +243,7 @@
# Report warning messages to user's.
#
if len(self.FileLinesList) == 0 :
- EdkLogger.warn('bpdg', BuildToolError.RESOURCE_NOT_AVAILABLE,
+ EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.")
# Process the pcds one by one base on the pcd's value and size
@@ -245,8 +255,8 @@
PCD.PcdCName = PCD.PcdCName.strip(' ')
PCD.PcdOffset = PCD.PcdOffset.strip(' ')
PCD.PcdSize = PCD.PcdSize.strip(' ')
- PCD.PcdValue = PCD.PcdValue.strip(' ')
-
+ PCD.PcdValue = PCD.PcdValue.strip(' ')
+
#
# Store the original pcd value.
# This information will be useful while generate the output map file.
@@ -264,7 +274,7 @@
PackSize = int(PCD.PcdSize, 16)
PCD.PcdBinSize = PackSize
except:
- EdkLogger.error("bpdg", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s" % PCD.PcdSize)
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s" % PCD.PcdSize)
if PCD._IsBoolean(PCD.PcdValue):
PCD._PackBooleanValue(PCD.PcdValue)
@@ -327,7 +337,7 @@
# Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
- EdkLogger.warn("bpdg", "The offset of VPD type pcd should start with 0, please check it.",
+ EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None)
# Judge whether the offset in fixed pcd offset list is overlapped or not.
@@ -338,19 +348,19 @@
PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
- EdkLogger.error("bpdg", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s is same with %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
None)
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :
- EdkLogger.error("bpdg", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s is overlapped with %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
None)
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :
- EdkLogger.warn("bpdg", BuildToolError.ATTRIBUTE_GET_FAILURE,
+ EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s and %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
None)
count += 1
@@ -417,7 +427,7 @@
FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it overlapped.
else :
- EdkLogger.error("bpdg", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
+ EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, it's offset is: %s" %(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset),
None)
FixOffsetSizeListCount += 1
@@ -451,13 +461,13 @@
fVpdFile = open (BinFileName, "wb", 0)
except:
# Open failed
- EdkLogger.error("bpdg", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.VpdFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.VpdFileName,None)
try :
fMapFile = open (MapFileName, "w", 0)
except:
# Open failed
- EdkLogger.error("bpdg", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.MapFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" %self.MapFileName,None)
# Use a instance of StringIO to cache data
fStringIO = StringIO.StringIO('')
@@ -466,14 +476,14 @@
try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except:
- EdkLogger.error("bpdg", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None)
for eachPcd in self.PcdFixedOffsetSizeList :
# write map file
try :
fMapFile.write("%s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.PcdOffset, eachPcd.PcdSize,eachPcd.PcdUnpackValue))
except:
- EdkLogger.error("bpdg", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.MapFileName,None)
# Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset)
@@ -486,7 +496,7 @@
try :
fVpdFile.write (fStringIO.getvalue())
except:
- EdkLogger.error("bpdg", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.VpdFileName,None)
+ EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %self.VpdFileName,None)
fStringIO.close ()
fVpdFile.close ()
Modified: trunk/BaseTools/Source/Python/BPDG/StringTable.py
===================================================================
--- trunk/BaseTools/Source/Python/BPDG/StringTable.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/BPDG/StringTable.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -45,12 +45,12 @@
-LBL_BPDG_LONG_UNI = (u"Intel Binary Product Data Generation (BPDG) Application")
+LBL_BPDG_LONG_UNI = (u"Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)")
LBL_BPDG_VERSION = (u"0.1")
LBL_BPDG_USAGE = \
(
"""
-Usage: bpdg options -o Filename.bin -m Filename.map Filename.txt
+BPDG options -o Filename.bin -m Filename.map Filename.txt
Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG)
Copyright (c) 2010 Intel Corporation All Rights Reserved.
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -354,6 +354,7 @@
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
+TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#
Modified: trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -210,16 +210,20 @@
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
-def CallExtenalBPDGTool(ToolPath, VpdFileName):
+def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):
assert ToolPath != None, "Invalid parameter ToolPath"
- assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
+ assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
- OutputDir = os.path.dirname(VpdFileName)
- FileName = os.path.basename(VpdFileName)
- BaseName, ext = os.path.splitext(FileName)
- OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
- OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
-
+ OutputDir = os.path.dirname(VpdFilePath)
+ if (VpdFileName == None) :
+ FileName = os.path.basename(VpdFilePath)
+ BaseName, ext = os.path.splitext(FileName)
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
+ else :
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)
+
try:
PopenObject = subprocess.Popen([ToolPath,
'-o', OutputBinFileName,
@@ -227,7 +231,7 @@
'-s',
'-f',
'-v',
- VpdFileName],
+ VpdFilePath],
stdout=subprocess.PIPE,
stderr= subprocess.PIPE)
except Exception, X:
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-16 11:30:42 UTC (rev 2015)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-18 03:22:30 UTC (rev 2016)
@@ -137,6 +137,7 @@
self._BuildOptions = None
self._LoadFixAddress = None
self._VpdToolGuid = None
+ self._VpdFileName = None
## Get architecture
def _GetArch(self):
@@ -199,8 +200,9 @@
uuid.UUID(Record[1])
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
- self._VpdToolGuid = Record[1]
-
+ self._VpdToolGuid = Record[1]
+ elif Name == TAB_DSC_DEFINES_VPD_FILENAME:
+ self._VpdFileName = Record[1]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
@@ -344,6 +346,15 @@
if self._VpdToolGuid == None:
self._VpdToolGuid = ''
return self._VpdToolGuid
+
+ ## Retrieve the VPD file Name, this is optional in DSC file
+ def _GetVpdFileName(self):
+ if self._VpdFileName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._VpdFileName == None:
+ self._VpdFileName = ''
+ return self._VpdFileName
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
@@ -782,6 +793,7 @@
RtBaseAddress = property(_GetRtBaseAddress)
LoadFixAddress = property(_GetLoadFixAddress)
VpdToolGuid = property(_GetVpdToolGuid)
+ VpdFileName = property(_GetVpdFileName)
SkuIds = property(_GetSkuIds)
Modules = property(_GetModules)
LibraryInstances = property(_GetLibraryInstances)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-08-19 23:55:45
|
Revision: 2022
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2022&view=rev
Author: qhuang8
Date: 2010-08-19 23:55:38 +0000 (Thu, 19 Aug 2010)
Log Message:
-----------
1. Enhance the parser to support 'PKG_UNI_FILE' in DEC [Defines] section.
2. Update GenFds parser and build parser to expand $(WORKSPACE) macro in DSC and FDF file recursively so that $(WORKSPACE) macro can be used to define other macro.
3. Treat all elements and DEFINE statement in DSC [Defines] section as global macro so that they are also valid for FDF file.
4. Enhance the error handling to throw syntax error for PCD sections.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/GenFds/GenFds.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2010-08-19 12:55:45 UTC (rev 2021)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2010-08-19 23:55:38 UTC (rev 2022)
@@ -334,6 +334,7 @@
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
#
# Dsc Definitions
Modified: trunk/BaseTools/Source/Python/GenFds/GenFds.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/GenFds.py 2010-08-19 12:55:45 UTC (rev 2021)
+++ trunk/BaseTools/Source/Python/GenFds/GenFds.py 2010-08-19 23:55:38 UTC (rev 2022)
@@ -172,6 +172,7 @@
"""call Workspace build create database"""
os.environ["WORKSPACE"] = Workspace
+ FdfParser.InputMacroDict["WORKSPACE"] = Workspace
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
BuildWorkSpace.InitDatabase()
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-19 12:55:45 UTC (rev 2021)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-19 23:55:38 UTC (rev 2022)
@@ -19,6 +19,7 @@
import copy
import Common.EdkLogger as EdkLogger
+import Common.GlobalData as GlobalData
from CommonDataClass.DataClass import *
from Common.DataType import *
from Common.String import *
@@ -82,6 +83,7 @@
self.MetaFile = FilePath
self._FileDir = os.path.dirname(self.MetaFile)
self._Macros = copy.copy(Macros)
+ self._Macros["WORKSPACE"] = os.environ["WORKSPACE"]
# for recursive parsing
self._Owner = Owner
@@ -490,7 +492,12 @@
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(ValueList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
@@ -649,7 +656,11 @@
continue
# file private macros
elif Line.upper().startswith('DEFINE '):
- self._MacroParser()
+ (Name, Value) = self._MacroParser()
+ # Make the defined macro in DSC [Defines] section also
+ # available for FDF file.
+ if self._SectionName == TAB_COMMON_DEFINES.upper():
+ GlobalData.gGlobalDefines.setdefault(Name, Value)
continue
elif Line.upper().startswith('EDK_GLOBAL '):
(Name, Value) = self._MacroParser()
@@ -716,6 +727,8 @@
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
TokenList[1] = NormPath(TokenList[1], self._Macros)
self._ValueList[0:len(TokenList)] = TokenList
+ # Treat elements in the [defines] section as global macros for FDF file.
+ GlobalData.gGlobalDefines.setdefault(TokenList[0], TokenList[1])
## <subsection_header> parser
def _SubsectionHeaderParser(self):
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-19 12:55:45 UTC (rev 2021)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-19 23:55:38 UTC (rev 2022)
@@ -830,6 +830,7 @@
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
+ TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
}
@@ -871,6 +872,7 @@
self._PackageName = None
self._Guid = None
self._Version = None
+ self._PkgUniFile = None
self._Protocols = None
self._Ppis = None
self._Guids = None
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <qh...@us...> - 2010-08-25 05:08:53
|
Revision: 2025
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2025&view=rev
Author: qhuang8
Date: 2010-08-25 05:08:47 +0000 (Wed, 25 Aug 2010)
Log Message:
-----------
Store the global definition to the table so that they can be passed to FDF file even if the DSC file itself is not changed.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-23 03:47:45 UTC (rev 2024)
+++ trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2010-08-25 05:08:47 UTC (rev 2025)
@@ -93,6 +93,7 @@
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
MODEL_META_DATA_COMMENT = 5016
+MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_EXTERNAL_DEPENDENCY = 10000
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-23 03:47:45 UTC (rev 2024)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2010-08-25 05:08:47 UTC (rev 2025)
@@ -19,7 +19,6 @@
import copy
import Common.EdkLogger as EdkLogger
-import Common.GlobalData as GlobalData
from CommonDataClass.DataClass import *
from Common.DataType import *
from Common.String import *
@@ -660,7 +659,21 @@
# Make the defined macro in DSC [Defines] section also
# available for FDF file.
if self._SectionName == TAB_COMMON_DEFINES.upper():
- GlobalData.gGlobalDefines.setdefault(Name, Value)
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ Name,
+ Value,
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
continue
elif Line.upper().startswith('EDK_GLOBAL '):
(Name, Value) = self._MacroParser()
@@ -728,7 +741,21 @@
TokenList[1] = NormPath(TokenList[1], self._Macros)
self._ValueList[0:len(TokenList)] = TokenList
# Treat elements in the [defines] section as global macros for FDF file.
- GlobalData.gGlobalDefines.setdefault(TokenList[0], TokenList[1])
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ TokenList[0],
+ TokenList[1],
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
## <subsection_header> parser
def _SubsectionHeaderParser(self):
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-23 03:47:45 UTC (rev 2024)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-08-25 05:08:47 UTC (rev 2025)
@@ -100,6 +100,10 @@
RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
for Record in RecordList:
GlobalData.gEdkGlobal[Record[0]] = Record[1]
+
+ RecordList = self._RawData[MODEL_META_DATA_GLOBAL_DEFINE, self._Arch]
+ for Record in RecordList:
+ GlobalData.gGlobalDefines[Record[0]] = Record[1]
## XXX[key] = value
def __setitem__(self, key, value):
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <gi...@us...> - 2010-08-27 01:09:23
|
Revision: 2028
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2028&view=rev
Author: gikidy
Date: 2010-08-27 01:09:17 +0000 (Fri, 27 Aug 2010)
Log Message:
-----------
Fix no filename issue while generate VPD data.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-25 08:26:54 UTC (rev 2027)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-08-27 01:09:17 UTC (rev 2028)
@@ -518,7 +518,7 @@
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
VpdFileName = self.Platform.VpdFileName
- if VpdFileName == None :
+ if VpdFileName == None or VpdFileName == "" :
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
else :
VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
@@ -542,7 +542,7 @@
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
- if VpdFileName == None :
+ if VpdFileName == None or VpdFileName == "" :
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
else :
VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
Modified: trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-08-25 08:26:54 UTC (rev 2027)
+++ trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-08-27 01:09:17 UTC (rev 2028)
@@ -215,7 +215,7 @@
assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFilePath)
- if (VpdFileName == None) :
+ if (VpdFileName == None or VpdFileName == "") :
FileName = os.path.basename(VpdFilePath)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <gi...@us...> - 2010-09-01 04:09:03
|
Revision: 2032
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2032&view=rev
Author: gikidy
Date: 2010-09-01 03:40:27 +0000 (Wed, 01 Sep 2010)
Log Message:
-----------
Fix redirect issue of STDOUT while using command-line arguments.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/BPDG/BPDG.py
trunk/BaseTools/Source/Python/build/build.py
Modified: trunk/BaseTools/Source/Python/BPDG/BPDG.py
===================================================================
--- trunk/BaseTools/Source/Python/BPDG/BPDG.py 2010-08-31 00:40:33 UTC (rev 2031)
+++ trunk/BaseTools/Source/Python/BPDG/BPDG.py 2010-09-01 03:40:27 UTC (rev 2032)
@@ -22,6 +22,8 @@
#
import os
import sys
+import encodings.ascii
+
from optparse import OptionParser
from encodings import gbk
from Common import EdkLogger
Modified: trunk/BaseTools/Source/Python/build/build.py
===================================================================
--- trunk/BaseTools/Source/Python/build/build.py 2010-08-31 00:40:33 UTC (rev 2031)
+++ trunk/BaseTools/Source/Python/build/build.py 2010-09-01 03:40:27 UTC (rev 2032)
@@ -23,6 +23,7 @@
import time
import platform
import traceback
+import encodings.ascii
from struct import *
from threading import *
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <gi...@us...> - 2010-09-02 04:57:08
|
Revision: 2036
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2036&view=rev
Author: gikidy
Date: 2010-09-02 04:57:01 +0000 (Thu, 02 Sep 2010)
Log Message:
-----------
1. Fix bug for Gen VPD data while the platform has muti-Arch;
2. Add support for use VPD PCD as vpd signature;
3. Fix bug of the VPD PCD's value should can be override in DSC file.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-09-01 13:43:37 UTC (rev 2035)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-09-02 04:57:01 UTC (rev 2036)
@@ -245,7 +245,7 @@
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
- ## Create makefile for the platform and mdoules in it
+ ## Create makefile for the platform and modules in it
#
# @param CreateDepsMakeFile Flag indicating if the makefile for
# modules will be created as well
@@ -477,87 +477,135 @@
UnicodePcdArray = []
HiiPcdArray = []
OtherPcdArray = []
- VpdFile = VpdInfoFile.VpdInfoFile()
- NeedProcessVpdMapFile = False
+ VpdFile = VpdInfoFile.VpdInfoFile()
+ NeedProcessVpdMapFile = False
- for Pcd in self._DynamicPcdList:
- # just pick the a value to determine whether is unicode string type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
- PcdValue = Sku.DefaultValue
- if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
- UnicodePcdArray.append(Pcd)
- elif len(Sku.VariableName) > 0:
- # if found HII type PCD then insert to right of UnicodeIndex
- HiiPcdArray.append(Pcd)
- else:
- OtherPcdArray.append(Pcd)
+ if (self.Workspace.ArchList[-1] == self.Arch):
+ for Pcd in self._DynamicPcdList:
+
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
- VpdFile.Add(Pcd, Sku.VpdOffset)
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
-
- if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
- VpdFile.GetCount() != 0:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
- "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
-
- if VpdFile.GetCount() != 0:
- WorkspaceDb = self.BuildDatabase.WorkspaceDb
- DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
- FvPath = os.path.join(self.BuildDir, "FV")
- if not os.path.exists(FvPath):
- try:
- os.makedirs(FvPath)
- except:
- EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+ PcdValue = Sku.DefaultValue
+ if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ UnicodePcdArray.append(Pcd)
+ elif len(Sku.VariableName) > 0:
+ # if found HII type PCD then insert to right of UnicodeIndex
+ HiiPcdArray.append(Pcd)
+ else:
+ OtherPcdArray.append(Pcd)
- VpdFileName = self.Platform.VpdFileName
- if VpdFileName == None or VpdFileName == "" :
- VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
- else :
- VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ #
+ # Fix the optional data of VPD PCD.
+ #
+ if (Pcd.DatumType.strip() != "VOID*" and Pcd.DatumType.strip() != "VOID *"):
+ if Sku.DefaultValue == '':
+ Sku.DefaultValue = Pcd.MaxDatumSize
+
+ VpdFile.Add(Pcd, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+
+ #
+ # Fix the PCDs define in VPD PCD section that never referenced by module.
+ # An example is PCD for signature usage.
+ #
+ for DscPcd in self.Platform.Pcds:
+ DscPcdEntry = self.Platform.Pcds[DscPcd]
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ FoundFlag = False
+ for VpdPcd in VpdFile._VpdArray.keys():
+ # This PCD has been referenced by module
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
+ FoundFlag = True
+
+ # Not found, it should be signature
+ if not FoundFlag :
+ # just pick the a value to determine whether is unicode string type
+ Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ Sku.DefaultValue = DecPcdEntry.DefaultValue
+
+ VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+
+
+ if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
+ VpdFile.GetCount() != 0:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
- if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
- VpdFile.Write(VpdFilePath)
-
- # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
- BPDGToolName = None
- for ToolDef in self.ToolDefinition.values():
- if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
- if not ToolDef.has_key("PATH"):
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
- BPDGToolName = ToolDef["PATH"]
- break
- # Call third party GUID BPDG tool.
- if BPDGToolName != None:
- VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
- else:
- EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
-
- # Process VPD map file generated by third party BPDG tool
- if NeedProcessVpdMapFile:
+ if VpdFile.GetCount() != 0:
+ WorkspaceDb = self.BuildDatabase.WorkspaceDb
+ DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
+ FvPath = os.path.join(self.BuildDir, "FV")
+ if not os.path.exists(FvPath):
+ try:
+ os.makedirs(FvPath)
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+
+ VpdFileName = self.Platform.VpdFileName
if VpdFileName == None or VpdFileName == "" :
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
else :
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
- if os.path.exists(VpdMapFilePath):
- VpdFile.Read(VpdMapFilePath)
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
- # Fixup "*" offset
- for Pcd in self._DynamicPcdList:
- # just pick the a value to determine whether is unicode string type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- if Sku.VpdOffset == "*":
- Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
- else:
- EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
- del self._DynamicPcdList[:]
+ if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
+ VpdFile.Write(VpdFilePath)
+
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
+ BPDGToolName = None
+ for ToolDef in self.ToolDefinition.values():
+ if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
+ if not ToolDef.has_key("PATH"):
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
+ BPDGToolName = ToolDef["PATH"]
+ break
+ # Call third party GUID BPDG tool.
+ if BPDGToolName != None:
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
+ else:
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ # Process VPD map file generated by third party BPDG tool
+ if NeedProcessVpdMapFile:
+ if VpdFileName == None or VpdFileName == "" :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ else :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
+ if os.path.exists(VpdMapFilePath):
+ VpdFile.Read(VpdMapFilePath)
+
+ # Fixup "*" offset
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ if Sku.VpdOffset == "*":
+ Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
+ else:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ # Delete the DynamicPcdList At the last time enter into this function
+ del self._DynamicPcdList[:]
self._DynamicPcdList.extend(UnicodePcdArray)
self._DynamicPcdList.extend(HiiPcdArray)
self._DynamicPcdList.extend(OtherPcdArray)
Modified: trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-09-01 13:43:37 UTC (rev 2035)
+++ trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-09-02 04:57:01 UTC (rev 2036)
@@ -127,12 +127,15 @@
try:
# write file header
fd.write(FILE_COMMENT_TEMPLATE)
-
+
# write each of PCD in VPD type
for Pcd in self._VpdArray.keys():
for Offset in self._VpdArray[Pcd]:
- fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(),
- str(Pcd.MaxDatumSize).strip(), str(Pcd.DefaultValue).strip()))
+ PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
+ if PcdValue == "" :
+ PcdValue = Pcd.DefaultValue
+
+ fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_WRITE_FAILURE,
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-09-01 13:43:37 UTC (rev 2035)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2010-09-02 04:57:01 UTC (rev 2036)
@@ -735,6 +735,12 @@
continue
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
+ #
+ # For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
+ # For the Integer & Boolean type, the optional data can only be InitialValue.
+ # At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
+ # until the DEC parser has been called.
+ #
VpdOffset, MaxDatumSize, InitialValue = ValueList
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
@@ -805,7 +811,7 @@
Pcds = property(_GetPcds)
BuildOptions = property(_GetBuildOptions)
-## Platform build information from DSC file
+## Platform build information from DEC file
#
# This class is used to retrieve information stored in database and convert them
# into PackageBuildClassObject form for easier use for AutoGen.
@@ -2043,7 +2049,7 @@
## Database
#
-# This class defined the build databse for all modules, packages and platform.
+# This class defined the build database for all modules, packages and platform.
# It will call corresponding parser for the given file if it cannot find it in
# the database.
#
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <gi...@us...> - 2010-09-02 08:16:52
|
Revision: 2037
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2037&view=rev
Author: gikidy
Date: 2010-09-02 08:16:46 +0000 (Thu, 02 Sep 2010)
Log Message:
-----------
Enhanced error handling and fixed a bug for get the correct value of non-VOID* type VPD pcd.
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/BPDG/GenVpd.py
trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
trunk/BaseTools/Source/Python/build/build.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-09-02 04:57:01 UTC (rev 2036)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2010-09-02 08:16:46 UTC (rev 2037)
@@ -504,7 +504,8 @@
#
if (Pcd.DatumType.strip() != "VOID*" and Pcd.DatumType.strip() != "VOID *"):
if Sku.DefaultValue == '':
- Sku.DefaultValue = Pcd.MaxDatumSize
+ Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
+ Pcd.MaxDatumSize = None
VpdFile.Add(Pcd, Sku.VpdOffset)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
Modified: trunk/BaseTools/Source/Python/BPDG/GenVpd.py
===================================================================
--- trunk/BaseTools/Source/Python/BPDG/GenVpd.py 2010-09-02 04:57:01 UTC (rev 2036)
+++ trunk/BaseTools/Source/Python/BPDG/GenVpd.py 2010-09-02 08:16:46 UTC (rev 2037)
@@ -29,24 +29,29 @@
}
class PcdEntry:
- def __init__(self, PcdCName, PcdOffset, PcdSize, PcdValue, PcdUnpackValue=None,
+ def __init__(self, PcdCName, PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None):
self.PcdCName = PcdCName.strip()
self.PcdOffset = PcdOffset.strip()
self.PcdSize = PcdSize.strip()
self.PcdValue = PcdValue.strip()
+ self.Lineno = Lineno.strip()
+ self.FileName = FileName.strip()
self.PcdUnpackValue = PcdUnpackValue
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
if self.PcdValue == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no Value specified!")
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" %(self.PcdCName, self.FileName, self.Lineno))
if self.PcdOffset == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no Offset specified!")
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" %(self.PcdCName, self.FileName, self.Lineno))
if self.PcdSize == '' :
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD format, no PcdSize specified!")
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue ()
@@ -64,7 +69,7 @@
self.PcdBinOffset = int(self.PcdOffset, 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid offset value %s for PCD %s" % (self.PcdOffset, self.PcdCName))
+ "Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE":
@@ -75,7 +80,7 @@
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR.keys():
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size %d for PCD in integer datum size." % Size)
+ "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
def _PackPtrValue(self, ValueString, Size):
@@ -87,22 +92,28 @@
self._PackString(ValueString, Size)
else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid VOID* type PCD value %s" % ValueString)
+ "Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
def _PackString(self, ValueString, Size):
- assert Size > 0, "Invalid parameter Size!"
- assert ValueString != "", "Invalid parameter ValueString"
- assert len(ValueString) >= 2, 'An ASCII string at least contains two "'
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+ if (ValueString == ""):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
+ if (len(ValueString) < 2):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" % (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
ValueString = ValueString[1:-1]
if len(ValueString) + 1 > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
- "PCD value string %s is exceed to size %d" % (ValueString, Size))
+ "PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
self.PcdValue= pack('%ds' % Size, ValueString)
def _PackByteArray(self, ValueString, Size):
- assert Size > 0, "Invalid parameter Size!"
- assert ValueString != "", "Invalid parameter ValueString"
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+ if (ValueString == ""):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
ValueString = ValueString.strip()
ValueString = ValueString.lstrip('{').strip('}')
@@ -111,7 +122,7 @@
if len(ValueList) > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
- "The byte array %s is too large for size %d" % (ValueString, Size))
+ "The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
@@ -123,21 +134,21 @@
Value = int(ValueList[Index], 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "The value item %s in byte array %s is an invalid HEX value." % \
- (ValueList[Index], ValueString))
+ "The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
else:
# translate decimal value
try:
Value = int(ValueList[Index], 10)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "The value item %s in byte array %s is an invalid DECIMAL value." % \
- (ValueList[Index], ValueString))
+ "The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
if Value > 255:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "The value item %s in byte array %s do not in range 0 ~ 0xFF" %\
- (ValueList[Index], ValueString))
+ "The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" %\
+ (ValueList[Index], ValueString, self.FileName, self.Lineno))
ReturnArray.append(Value)
@@ -151,15 +162,19 @@
# A unicode string for a PCD should be in format as L"".
#
def _PackUnicode(self, UnicodeString, Size):
- assert Size > 0, "Invalid parameter Size"
- assert len(UnicodeString) >= 3, "Invalid parameter UnicodeString"
+ if (Size < 0):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" %\
+ (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
+ if (len(UnicodeString) < 3):
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "For PCD: %s ,ASCII string %s at least contains two!(File: %s Line: %s)" %\
+ (self.PcdCName, self.PcdUnpackValue, self.FileName, self.Lineno))
UnicodeString = UnicodeString[2:-1]
if (len(UnicodeString) + 1) * 2 > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
- "The size of unicode string %s is too larger for size %s" % \
- (UnicodeString, Size))
+ "The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \
+ (UnicodeString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
for Value in UnicodeString:
@@ -168,8 +183,8 @@
ReturnArray.append(0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid unicode character %s in unicode string %s" % \
- (Value, UnicodeString))
+ "Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
+ (Value, UnicodeString, self.FileName, self.Lineno))
for Index in range(len(UnicodeString) * 2, Size):
ReturnArray.append(0)
@@ -219,6 +234,8 @@
# Skip the comment line
if (not line.startswith("#")) and len(line) > 1 :
self.FileLinesList[count] = line.split('|')
+ # Store the line number
+ self.FileLinesList[count].append(str(count+1))
elif len(line) <= 1 :
# Set the blank line to "None"
self.FileLinesList[count] = None
@@ -250,12 +267,13 @@
count = 0
for line in self.FileLinesList:
if line != None :
- PCD = PcdEntry(line[0], line[1], line[2], line[3])
+ PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4], self.InputFileName)
# Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ')
PCD.PcdOffset = PCD.PcdOffset.strip(' ')
PCD.PcdSize = PCD.PcdSize.strip(' ')
PCD.PcdValue = PCD.PcdValue.strip(' ')
+ PCD.Lineno = PCD.Lineno.strip(' ')
#
# Store the original pcd value.
@@ -274,7 +292,7 @@
PackSize = int(PCD.PcdSize, 16)
PCD.PcdBinSize = PackSize
except:
- EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s" % PCD.PcdSize)
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
if PCD._IsBoolean(PCD.PcdValue):
PCD._PackBooleanValue(PCD.PcdValue)
@@ -363,19 +381,22 @@
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offset of %s is same with %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
+ "The offset of %s at line: %s is same with %s at line: %s in file %s" %\
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offset of %s is overlapped with %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
+ "The offset of %s at line: %s is overlapped with %s at line: %s in file %s" %\
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdBinSize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
- "The offsets have free space of between %s and %s" % (PcdNow.PcdCName, PcdNext.PcdCName),
+ "The offsets have free space of between %s at line: %s and %s at line: %s in file %s" %\
+ (PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
@@ -442,7 +463,8 @@
# Usually it will not enter into this thunk, if so, means it overlapped.
else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
- "The offset value definition has overlapped at pcd: %s, it's offset is: %s" %(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset),
+ "The offset value definition has overlapped at pcd: %s, it's offset is: %s, in file: %s line: %s" %\
+ (eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
Modified: trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-09-02 04:57:01 UTC (rev 2036)
+++ trunk/BaseTools/Source/Python/Common/VpdInfoFile.py 2010-09-02 08:16:46 UTC (rev 2037)
@@ -85,17 +85,22 @@
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, Offset):
- assert Vpd != None, "Invalid VPD PCD entry."
- assert Offset >= 0 or Offset == "*", "Invalid offset parameter: %s." % Offset
+ if (Vpd == None):
+ EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
+ if not (Offset >= 0 or Offset == "*"):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
+
if Vpd.DatumType == "VOID*":
if Vpd.MaxDatumSize <= 0:
- assert False, "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName)
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
else:
- assert False, "Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName)
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
if Vpd not in self._VpdArray.keys():
#
@@ -115,8 +120,9 @@
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
- assert FilePath != None or len(FilePath) != 0, "Invalid parameter FilePath: %s." % FilePath
-
+ if not (FilePath != None or len(FilePath) != 0):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid parameter FilePath: %s." % FilePath)
try:
fd = open(FilePath, "w")
except:
Modified: trunk/BaseTools/Source/Python/build/build.py
===================================================================
--- trunk/BaseTools/Source/Python/build/build.py 2010-09-02 04:57:01 UTC (rev 2036)
+++ trunk/BaseTools/Source/Python/build/build.py 2010-09-02 08:16:46 UTC (rev 2037)
@@ -736,7 +736,7 @@
self.LoadFixAddress = 0
self.UniFlag = UniFlag
- # print dot charater during doing some time-consuming work
+ # print dot character during doing some time-consuming work
self.Progress = Utils.Progressor()
# parse target.txt, tools_def.txt, and platform file
@@ -1268,9 +1268,9 @@
if len (SmmModuleList) > 0:
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))
- PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
+ PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize
- RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
+ RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|