|
From: <hc...@us...> - 2012-06-01 05:12:47
|
Revision: 2531
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2531&view=rev
Author: hchen30
Date: 2012-06-01 05:12:41 +0000 (Fri, 01 Jun 2012)
Log Message:
-----------
Enhance build tool to break when there is an invalid section name in DEC file.
Reviewed-by: Yurui Zeng <yur...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2012-06-01 01:44:56 UTC (rev 2530)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2012-06-01 05:12:41 UTC (rev 2531)
@@ -1,7 +1,7 @@
## @file
# This file is used to define common static strings used by INF/DEC/DSC files
#
-# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2012, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -459,14 +459,18 @@
TAB_BUILD_RULE_VERSION = "build_rule_version"
# section name for PCDs
-TAB_PCDS_DYNAMIC_DEFAULT = "PcdsDynamicDefault"
-TAB_PCDS_DYNAMIC_VPD = "PcdsDynamicVpd"
-TAB_PCDS_DYNAMIC_HII = "PcdsDynamicHii"
-TAB_PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
-TAB_PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
-TAB_PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
+PCDS_DYNAMIC_DEFAULT = "PcdsDynamicDefault"
+PCDS_DYNAMIC_VPD = "PcdsDynamicVpd"
+PCDS_DYNAMIC_HII = "PcdsDynamicHii"
+PCDS_DYNAMICEX_DEFAULT = "PcdsDynamicExDefault"
+PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
+PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
# Section allowed to have items after arch
SECTIONS_HAVE_ITEM_AFTER_ARCH = [TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
- TAB_PCDS_DYNAMIC_DEFAULT.upper(), TAB_PCDS_DYNAMIC_VPD.upper(), TAB_PCDS_DYNAMIC_HII.upper(),
- TAB_PCDS_DYNAMICEX_DEFAULT.upper(), TAB_PCDS_DYNAMICEX_VPD.upper(), TAB_PCDS_DYNAMICEX_HII.upper()]
+ PCDS_DYNAMIC_DEFAULT.upper(),
+ PCDS_DYNAMIC_VPD.upper(),
+ PCDS_DYNAMIC_HII.upper(),
+ PCDS_DYNAMICEX_DEFAULT.upper(),
+ PCDS_DYNAMICEX_VPD.upper(),
+ PCDS_DYNAMICEX_HII.upper()]
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-06-01 01:44:56 UTC (rev 2530)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-06-01 05:12:41 UTC (rev 2531)
@@ -1,7 +1,7 @@
## @file
# This file is used to parse meta files
#
-# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2012, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -288,15 +288,17 @@
self._SectionType = self.DataType[self._SectionName]
else:
self._SectionType = MODEL_UNKNOWN
- EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
- Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+
+ # Check if the section name is valid
+ if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 2:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
- if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 2:
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
-
else:
S1 = 'COMMON'
ArchList.add(S1)
@@ -1637,7 +1639,9 @@
ArchList = set()
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
- continue
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR,
+ "section name can NOT be empty or incorrectly use separator comma",
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
ItemList = GetSplitValueList(Item, TAB_SPLIT)
# different types of PCD are permissible in one section
@@ -1646,9 +1650,8 @@
if self.DataType[self._SectionName] not in self._SectionType:
self._SectionType.append(self.DataType[self._SectionName])
else:
- EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
- Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
- continue
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
EdkLogger.error(
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <hc...@us...> - 2012-06-08 01:50:05
|
Revision: 2537
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2537&view=rev
Author: hchen30
Date: 2012-06-08 01:49:59 +0000 (Fri, 08 Jun 2012)
Log Message:
-----------
1. Add checkpoint of PCD datum type for DSC file
2. Add BuildOptions to the scope of section names which can have other items after ARCH.
Reviewed-by: Yurui Zeng <yur...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2012-06-07 16:28:29 UTC (rev 2536)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2012-06-08 01:49:59 UTC (rev 2537)
@@ -38,6 +38,7 @@
TAB_UINT16 = 'UINT16'
TAB_UINT32 = 'UINT32'
TAB_UINT64 = 'UINT64'
+TAB_VOID = 'VOID*'
TAB_EDK_SOURCE = '$(EDK_SOURCE)'
TAB_EFI_SOURCE = '$(EFI_SOURCE)'
@@ -473,4 +474,5 @@
PCDS_DYNAMIC_HII.upper(),
PCDS_DYNAMICEX_DEFAULT.upper(),
PCDS_DYNAMICEX_VPD.upper(),
- PCDS_DYNAMICEX_HII.upper()]
+ PCDS_DYNAMICEX_HII.upper(),
+ TAB_BUILD_OPTIONS.upper()]
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-06-07 16:28:29 UTC (rev 2536)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-06-08 01:49:59 UTC (rev 2537)
@@ -1043,6 +1043,14 @@
EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
File=self.MetaFile, Line=self._LineIndex + 1)
+
+ # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD
+ ValueList = GetSplitValueList(self._ValueList[2])
+ if len(ValueList) > 1 and ValueList[1] != TAB_VOID \
+ and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1],
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
+
# if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
@@ -1050,6 +1058,7 @@
elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
+
## [components] section parser
@ParseMacro
def _ComponentParser(self):
@@ -1441,10 +1450,11 @@
def __ProcessPcd(self):
PcdValue = None
ValueList = GetSplitValueList(self._ValueList[2])
+
#
# PCD value can be an expression
#
- if len(ValueList) > 1 and ValueList[1] == 'VOID*':
+ if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
PcdValue = ValueList[0]
try:
ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <yi...@us...> - 2012-07-03 06:44:51
|
Revision: 2545
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2545&view=rev
Author: yingke
Date: 2012-07-03 06:44:40 +0000 (Tue, 03 Jul 2012)
Log Message:
-----------
Fix PCD bugs:
1. The PCD value in DSC does not match the data type declared in DEC files.
2. Cannot evaluate expression which contains "|" in it.
3. The max size of PCD defined in DSC is always ignored.
4. The PCD value defined in DSC cannot be retrieved sometimes.
Reviewed-by: Su Jikui <jik...@in...>
Reviewed-by: Zeng Yurui <yur...@in...>
Signed-off-by: Liu Yingke <yin...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/Common/Misc.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Added Paths:
-----------
trunk/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2012-07-03 02:53:29 UTC (rev 2544)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2012-07-03 06:44:40 UTC (rev 2545)
@@ -300,7 +300,6 @@
for Pcd in Pkg.Pcds:
DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd]
DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
- Platform.IsPlatformPcdDeclared(DecPcds)
Platform.SkuName = self.SkuId
for Name, Guid in PcdSet:
Modified: trunk/BaseTools/Source/Python/Common/Misc.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/Misc.py 2012-07-03 02:53:29 UTC (rev 2544)
+++ trunk/BaseTools/Source/Python/Common/Misc.py 2012-07-03 06:44:40 UTC (rev 2545)
@@ -30,6 +30,7 @@
from Common import GlobalData as GlobalData
from DataType import *
from BuildToolError import *
+from CommonDataClass.DataClass import *
## Regular expression used to find out place holders in string template
gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE)
@@ -1176,6 +1177,113 @@
Opr.close()
Opw.close()
+## AnalyzeDscPcd
+#
+# Analyze DSC PCD value, since there is no data type info in DSC
+# This fuction is used to match functions (AnalyzePcdData, AnalyzeHiiPcdData, AnalyzeVpdPcdData) used for retrieving PCD value from database
+# 1. Feature flag: TokenSpace.PcdCName|PcdValue
+# 2. Fix and Patch:TokenSpace.PcdCName|PcdValue[|MaxSize]
+# 3. Dynamic default:
+# TokenSpace.PcdCName|PcdValue[|VOID*[|MaxSize]]
+# TokenSpace.PcdCName|PcdValue
+# 4. Dynamic VPD:
+# TokenSpace.PcdCName|VpdOffset[|VpdValue]
+# TokenSpace.PcdCName|VpdOffset[|MaxSize[|VpdValue]]
+# 5. Dynamic HII:
+# TokenSpace.PcdCName|HiiString|VaiableGuid|VariableOffset[|HiiValue]
+# PCD value needs to be located in such kind of string, and the PCD value might be an expression in which
+# there might have "|" operator, also in string value.
+#
+# @param Setting: String contain information described above with "TokenSpace.PcdCName|" stripped
+# @param PcdType: PCD type: feature, fixed, dynamic default VPD HII
+# @param DataType: The datum type of PCD: VOID*, UNIT, BOOL
+# @retval:
+# ValueList: A List contain fields described above
+# IsValid: True if conforming EBNF, otherwise False
+# Index: The index where PcdValue is in ValueList
+#
+def AnalyzeDscPcd(Setting, PcdType, DataType=''):
+ Setting = Setting.strip()
+ # There might be escaped quote in a string: \", \\\"
+ Data = Setting.replace('\\\\', '//').replace('\\\"', '\\\'')
+ # There might be '|' in string and in ( ... | ... ), replace it with '-'
+ NewStr = ''
+ InStr = False
+ Pair = 0
+ for ch in Data:
+ if ch == '"':
+ InStr = not InStr
+ elif ch == '(' and not InStr:
+ Pair += 1
+ elif ch == ')' and not InStr:
+ Pair -= 1
+
+ if (Pair > 0 or InStr) and ch == TAB_VALUE_SPLIT:
+ NewStr += '-'
+ else:
+ NewStr += ch
+ FieldList = []
+ StartPos = 0
+ while True:
+ Pos = NewStr.find(TAB_VALUE_SPLIT, StartPos)
+ if Pos < 0:
+ FieldList.append(Setting[StartPos:].strip())
+ break
+ FieldList.append(Setting[StartPos:Pos].strip())
+ StartPos = Pos + 1
+
+ IsValid = True
+ if PcdType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_FEATURE_FLAG):
+ Value = FieldList[0]
+ Size = ''
+ if len(FieldList) > 1:
+ Size = FieldList[1]
+ if DataType == 'VOID*':
+ IsValid = (len(FieldList) <= 2)
+ else:
+ IsValid = (len(FieldList) <= 1)
+ return [Value, '', Size], IsValid, 0
+ elif PcdType in (MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT):
+ Value = FieldList[0]
+ Size = Type = ''
+ if len(FieldList) > 1:
+ Type = FieldList[1]
+ if len(FieldList) > 2:
+ Size = FieldList[2]
+ if DataType == 'VOID*':
+ IsValid = (len(FieldList) <= 3)
+ else:
+ IsValid = (len(FieldList) <= 1)
+ return [Value, Type, Size], IsValid, 0
+ elif PcdType in (MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_VPD):
+ VpdOffset = FieldList[0]
+ Value = Size = ''
+ if not DataType == 'VOID*':
+ if len(FieldList) > 1:
+ Value = FieldList[1]
+ else:
+ if len(FieldList) > 1:
+ Size = FieldList[1]
+ if len(FieldList) > 2:
+ Value = FieldList[2]
+ if DataType == 'VOID*':
+ IsValid = (len(FieldList) <= 3)
+ else:
+ IsValid = (len(FieldList) <= 2)
+ return [VpdOffset, Size, Value], IsValid, 2
+ elif PcdType in (MODEL_PCD_DYNAMIC_HII, MODEL_PCD_DYNAMIC_EX_HII):
+ HiiString = FieldList[0]
+ Guid = Offset = Value = ''
+ if len(FieldList) > 1:
+ Guid = FieldList[1]
+ if len(FieldList) > 2:
+ Offset = FieldList[2]
+ if len(FieldList) > 3:
+ Value = FieldList[3]
+ IsValid = (3 <= len(FieldList) <= 4)
+ return [HiiString, Guid, Offset, Value], IsValid, 3
+ return [], False, 0
+
## AnalyzePcdData
#
# Analyze the pcd Value, Datum type and TokenNumber.
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-07-03 02:53:29 UTC (rev 2544)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2012-07-03 06:44:40 UTC (rev 2545)
@@ -25,7 +25,7 @@
from CommonDataClass.DataClass import *
from Common.DataType import *
from Common.String import *
-from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData
+from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd
from Common.Expression import *
from CommonDataClass.Exceptions import *
@@ -1273,15 +1273,15 @@
def __RetrievePcdValue(self):
Records = self._RawTable.Query(MODEL_PCD_FEATURE_FLAG, BelongsToItem= -1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
- Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
Name = TokenSpaceGuid + '.' + PcdName
- self._Symbols[Name] = Value
+ ValList, Valid, Index = AnalyzeDscPcd(Value, MODEL_PCD_FEATURE_FLAG)
+ self._Symbols[Name] = ValList[Index]
Records = self._RawTable.Query(MODEL_PCD_FIXED_AT_BUILD, BelongsToItem= -1.0)
for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
- Value, DatumType, MaxDatumSize = AnalyzePcdData(Value)
Name = TokenSpaceGuid + '.' + PcdName
- self._Symbols[Name] = Value
+ ValList, Valid, Index = AnalyzeDscPcd(Value, MODEL_PCD_FIXED_AT_BUILD)
+ self._Symbols[Name] = ValList[Index]
Content = open(str(self.MetaFile), 'r').readlines()
GlobalData.gPlatformOtherPcds['DSCFILE'] = str(self.MetaFile)
@@ -1448,50 +1448,28 @@
self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
def __ProcessPcd(self):
- PcdValue = None
- ValueList = GetSplitValueList(self._ValueList[2])
+ if self._ItemType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
+ self._ValueList[2] = ReplaceMacro(self._ValueList[2], self._Macros, RaiseError=True)
+ return
- #
- # PCD value can be an expression
- #
- if len(ValueList) > 1 and ValueList[1] == TAB_VOID:
- PcdValue = ValueList[0]
- try:
- ValueList[0] = ValueExpression(PcdValue, self._Macros)(True)
- except WrnExpression, Value:
- ValueList[0] = Value.result
- PcdValue = ValueList[0]
- else:
- #
- # Int*/Boolean VPD PCD
- # TokenSpace | PcdCName | Offset | [Value]
- #
- # VOID* VPD PCD
- # TokenSpace | PcdCName | Offset | [Size] | [Value]
- #
- if self._ItemType == MODEL_PCD_DYNAMIC_VPD:
- if len(ValueList) >= 4:
- PcdValue = ValueList[-1]
- else:
- PcdValue = ValueList[-1]
- #
- # For the VPD PCD, there may not have PcdValue data in DSC file
- #
- if PcdValue:
- try:
- ValueList[-1] = ValueExpression(PcdValue, self._Macros)(True)
- except WrnExpression, Value:
- ValueList[-1] = Value.result
+ ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
+ if not Valid:
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex+1,
+ ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
+ PcdValue = ValList[Index]
+ try:
+ ValList[Index] = ValueExpression(PcdValue, self._Macros)(True)
+ except WrnExpression, Value:
+ ValList[Index] = Value.result
- if ValueList[-1] == 'True':
- ValueList[-1] = '1'
- if ValueList[-1] == 'False':
- ValueList[-1] = '0'
- PcdValue = ValueList[-1]
- if PcdValue and self._ItemType in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
- GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
- self._ValueList[2] = '|'.join(ValueList)
+ if ValList[Index] == 'True':
+ ValList[Index] = '1'
+ if ValList[Index] == 'False':
+ ValList[Index] = '0'
+ GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
+ self._ValueList[2] = '|'.join(ValList)
+
def __ProcessComponent(self):
self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
Added: trunk/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceCommon.py (rev 0)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceCommon.py 2012-07-03 06:44:40 UTC (rev 2545)
@@ -0,0 +1,237 @@
+## @file
+# Common routines used by workspace
+#
+# Copyright (c) 2012, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+from Common.Misc import sdict
+from Common.DataType import SUP_MODULE_USER_DEFINED
+from BuildClassObject import LibraryClassObject
+
+## Get all packages from platform for specified arch, target and toolchain
+#
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: List of packages which are DecBuildData instances
+#
+def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain):
+ PkgSet = set()
+ for ModuleFile in Platform.Modules:
+ Data = BuildDatabase[ModuleFile, Arch, Target, Toolchain]
+ PkgSet.update(Data.Packages)
+ for Lib in GetLiabraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
+ PkgSet.update(Lib.Packages)
+ return list(PkgSet)
+
+## Get all declared PCD from platform for specified arch, target and toolchain
+#
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid)
+#
+def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain):
+ PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain)
+ DecPcds = {}
+ for Pkg in PkgList:
+ for Pcd in Pkg.Pcds:
+ DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd]
+ return DecPcds
+
+## Get all dependent libraries for a module
+#
+# @param Module: InfBuildData instance
+# @param Platform: DscBuildData instance
+# @param BuildDatabase: The database saves all data for all metafiles
+# @param Arch: Current arch
+# @param Target: Current target
+# @param Toolchain: Current toolchain
+# @retval: List of dependent libraries which are InfBuildData instances
+#
+def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
+ if Module.AutoGenVersion >= 0x00010005:
+ return _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain)
+ else:
+ return _ResolveLibraryReference(Module, Platform)
+
+def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
+ ModuleType = Module.ModuleType
+
+ # for overriding library instances with module specific setting
+ PlatformModule = Platform.Modules[str(Module)]
+
+ # add forced library instances (specified under LibraryClasses sections)
+ #
+ # If a module has a MODULE_TYPE of USER_DEFINED,
+ # do not link in NULL library class instances from the global [LibraryClasses.*] sections.
+ #
+ if Module.ModuleType != SUP_MODULE_USER_DEFINED:
+ for LibraryClass in Platform.LibraryClasses.GetKeys():
+ if LibraryClass.startswith("NULL") and Platform.LibraryClasses[LibraryClass, Module.ModuleType]:
+ Module.LibraryClasses[LibraryClass] = Platform.LibraryClasses[LibraryClass, Module.ModuleType]
+
+ # add forced library instances (specified in module overrides)
+ for LibraryClass in PlatformModule.LibraryClasses:
+ if LibraryClass.startswith("NULL"):
+ Module.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
+
+ # EdkII module
+ LibraryConsumerList = [Module]
+ Constructor = []
+ ConsumedByList = sdict()
+ LibraryInstance = sdict()
+
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryClassName in M.LibraryClasses:
+ if LibraryClassName not in LibraryInstance:
+ # override library instance for this module
+ if LibraryClassName in PlatformModule.LibraryClasses:
+ LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
+ else:
+ LibraryPath = Platform.LibraryClasses[LibraryClassName, ModuleType]
+ if LibraryPath == None or LibraryPath == "":
+ LibraryPath = M.LibraryClasses[LibraryClassName]
+ if LibraryPath == None or LibraryPath == "":
+ return []
+
+ LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
+ # for those forced library instance (NULL library), add a fake library class
+ if LibraryClassName.startswith("NULL"):
+ LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
+ elif LibraryModule.LibraryClass == None \
+ or len(LibraryModule.LibraryClass) == 0 \
+ or (ModuleType != 'USER_DEFINED'
+ and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
+ # only USER_DEFINED can link against any library instance despite of its SupModList
+ return []
+
+ LibraryInstance[LibraryClassName] = LibraryModule
+ LibraryConsumerList.append(LibraryModule)
+ else:
+ LibraryModule = LibraryInstance[LibraryClassName]
+
+ if LibraryModule == None:
+ continue
+
+ if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
+ Constructor.append(LibraryModule)
+
+ if LibraryModule not in ConsumedByList:
+ ConsumedByList[LibraryModule] = []
+ # don't add current module itself to consumer list
+ if M != Module:
+ if M in ConsumedByList[LibraryModule]:
+ continue
+ ConsumedByList[LibraryModule].append(M)
+ #
+ # Initialize the sorted output list to the empty set
+ #
+ SortedLibraryList = []
+ #
+ # Q <- Set of all nodes with no incoming edges
+ #
+ LibraryList = [] #LibraryInstance.values()
+ Q = []
+ for LibraryClassName in LibraryInstance:
+ M = LibraryInstance[LibraryClassName]
+ LibraryList.append(M)
+ if ConsumedByList[M] == []:
+ Q.append(M)
+
+ #
+ # start the DAG algorithm
+ #
+ while True:
+ EdgeRemoved = True
+ while Q == [] and EdgeRemoved:
+ EdgeRemoved = False
+ # for each node Item with a Constructor
+ for Item in LibraryList:
+ if Item not in Constructor:
+ continue
+ # for each Node without a constructor with an edge e from Item to Node
+ for Node in ConsumedByList[Item]:
+ if Node in Constructor:
+ continue
+ # remove edge e from the graph if Node has no constructor
+ ConsumedByList[Item].remove(Node)
+ EdgeRemoved = True
+ if ConsumedByList[Item] == []:
+ # insert Item into Q
+ Q.insert(0, Item)
+ break
+ if Q != []:
+ break
+ # DAG is done if there's no more incoming edge for all nodes
+ if Q == []:
+ break
+
+ # remove node from Q
+ Node = Q.pop()
+ # output Node
+ SortedLibraryList.append(Node)
+
+ # for each node Item with an edge e from Node to Item do
+ for Item in LibraryList:
+ if Node not in ConsumedByList[Item]:
+ continue
+ # remove edge e from the graph
+ ConsumedByList[Item].remove(Node)
+
+ if ConsumedByList[Item] != []:
+ continue
+ # insert Item into Q, if Item has no other incoming edges
+ Q.insert(0, Item)
+
+ #
+ # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
+ #
+ for Item in LibraryList:
+ if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
+ return []
+ if Item not in SortedLibraryList:
+ SortedLibraryList.append(Item)
+
+ #
+ # Build the list of constructor and destructir names
+ # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
+ #
+ SortedLibraryList.reverse()
+ return SortedLibraryList
+
+def _ResolveLibraryReference(Module, Platform):
+ LibraryConsumerList = [Module]
+
+ # "CompilerStub" is a must for Edk modules
+ if Module.Libraries:
+ Module.Libraries.append("CompilerStub")
+ LibraryList = []
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryName in M.Libraries:
+ Library = Platform.LibraryClasses[LibraryName, ':dummy:']
+ if Library == None:
+ for Key in Platform.LibraryClasses.data.keys():
+ if LibraryName.upper() == Key.upper():
+ Library = Platform.LibraryClasses[Key, ':dummy:']
+ break
+ if Library == None:
+ continue
+
+ if Library not in LibraryList:
+ LibraryList.append(Library)
+ LibraryConsumerList.append(Library)
+ return LibraryList
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2012-07-03 02:53:29 UTC (rev 2544)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2012-07-03 06:44:40 UTC (rev 2545)
@@ -34,6 +34,8 @@
from MetaFileTable import *
from MetaFileParser import *
from BuildClassObject import *
+from WorkspaceCommon import GetDeclaredPcd
+from Common.Misc import AnalyzeDscPcd
## Platform build information from DSC file
#
@@ -134,6 +136,7 @@
self._LibraryInstances = None
self._LibraryClasses = None
self._Pcds = None
+ self._DecPcds = None
self._BuildOptions = None
self._LoadFixAddress = None
self._RFCLanguages = None
@@ -613,6 +616,45 @@
self._LibraryClasses[Library.BaseName, ':dummy:'] = Library
return self._LibraryClasses
+ def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
+ if self._DecPcds == None:
+ self._DecPcds = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain)
+ if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
+ EdkLogger.error('build', PARSER_ERROR,
+ "Pcd (%s.%s) defined in DSC is not declared in DEC files." % (TokenSpaceGuid, PcdCName),
+ File=self.MetaFile, Line=LineNo)
+ ValueList, IsValid, Index = AnalyzeDscPcd(Setting, PcdType, self._DecPcds[PcdCName, TokenSpaceGuid].DatumType)
+ if not IsValid and PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
+ EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
+ ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
+ if PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
+ try:
+ ValueList[Index] = ValueExpression(ValueList[Index], GlobalData.gPlatformPcds)(True)
+ except WrnExpression, Value:
+ ValueList[Index] = Value.result
+ except EvaluationException, Excpt:
+ if hasattr(Excpt, 'Pcd'):
+ if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
+ " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
+ " of the DSC file" % Excpt.Pcd,
+ File=self.MetaFile, Line=LineNo)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
+ File=self.MetaFile, Line=LineNo)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
+ File=self.MetaFile, Line=LineNo)
+ if ValueList[Index] == 'True':
+ ValueList[Index] = '1'
+ elif ValueList[Index] == 'False':
+ ValueList[Index] = '0'
+ Valid, ErrStr = CheckPcdDatum(self._DecPcds[PcdCName, TokenSpaceGuid].DatumType, ValueList[Index])
+ if not Valid:
+ EdkLogger.error('build', FORMAT_INVALID, ErrStr, File=self.MetaFile, Line=LineNo,
+ ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName))
+ return ValueList
+
## Retrieve all PCD settings in platform
def _GetPcds(self):
if self._Pcds == None:
@@ -663,14 +705,14 @@
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdSet.add((PcdCName, TokenSpaceGuid, Dummy4))
PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates
- for PcdCName, TokenSpaceGuid in PcdSet:
+ for PcdCName, TokenSpaceGuid, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
- PcdValue, DatumType, MaxDatumSize = AnalyzePcdData(Setting)
+ PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
@@ -702,15 +744,15 @@
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- PcdList.append((PcdCName, TokenSpaceGuid))
+ PcdList.append((PcdCName, TokenSpaceGuid, Dummy4))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid in PcdList:
+ for PcdCName, TokenSpaceGuid, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
- PcdValue, DatumType, MaxDatumSize = AnalyzePcdData(Setting)
+ PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', '', PcdValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
@@ -744,14 +786,14 @@
RecordList = self._RawData[Type, self._Arch]
# Find out all possible PCD candidates for self._Arch
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdSet.add((PcdCName, TokenSpaceGuid, Dummy4))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid in PcdSet:
+ for PcdCName, TokenSpaceGuid, Dummy4 in PcdSet:
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
- VariableName, VariableGuid, VariableOffset, DefaultValue = AnalyzeHiiPcdData(Setting)
+ VariableName, VariableGuid, VariableOffset, DefaultValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
@@ -784,10 +826,10 @@
# Find out all possible PCD candidates for self._Arch
RecordList = self._RawData[Type, self._Arch]
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- PcdList.append((PcdCName, TokenSpaceGuid))
+ PcdList.append((PcdCName, TokenSpaceGuid, Dummy4))
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid in PcdList:
+ for PcdCName, TokenSpaceGuid, Dummy4 in PcdList:
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
@@ -797,7 +839,7 @@
# At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
# until the DEC parser has been called.
#
- VpdOffset, MaxDatumSize, InitialValue = AnalyzeVpdPcdData(Setting)
+ VpdOffset, MaxDatumSize, InitialValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
@@ -842,46 +884,6 @@
self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
self.Pcds[Name, Guid].DefaultValue = Value
- def IsPlatformPcdD...
[truncated message content] |
|
From: <yi...@us...> - 2013-04-02 05:22:42
|
Revision: 2577
http://edk2-buildtools.svn.sourceforge.net/edk2-buildtools/?rev=2577&view=rev
Author: yingke
Date: 2013-04-02 05:22:30 +0000 (Tue, 02 Apr 2013)
Log Message:
-----------
1. Don?\226?\128?\153t update *.ver files if generated version files are same with existed files
2. Use correct arch specified by build option to get necessary items other than get all items from DSC file when generating build report
3. Ignore unknown sections for EDK style INF modules
Reviewed-by: Gao Liming <lim...@in...>
Signed-off-by: Liu Yingke <yin...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/build/BuildReport.py
Modified: trunk/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py 2013-03-27 01:49:07 UTC (rev 2576)
+++ trunk/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py 2013-04-02 05:22:30 UTC (rev 2577)
@@ -364,6 +364,7 @@
for SecAlign in InputAlign:
Cmd += ["--sectionalign", SecAlign]
+ CommandFile = Output + '.txt'
if Ui not in [None, '']:
#Cmd += ["-n", '"' + Ui + '"']
SectionData = array.array('B', [0,0,0,0])
@@ -378,12 +379,16 @@
if BuildNumber:
Cmd += ["-j", BuildNumber]
Cmd += ["-o", Output]
+
+ SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
+ if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
+ return
+
GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section")
else:
Cmd += ["-o", Output]
Cmd += Input
- CommandFile = Output + '.txt'
SaveFileOnChange(CommandFile, ' '.join(Cmd), False)
if not GenFdsGlobalVariable.NeedsUpdate(Output, list(Input) + [CommandFile]):
return
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-03-27 01:49:07 UTC (rev 2576)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-04-02 05:22:30 UTC (rev 2577)
@@ -286,16 +286,16 @@
self._SectionName = ItemList[0].upper()
if self._SectionName in self.DataType:
self._SectionType = self.DataType[self._SectionName]
+ # Check if the section name is valid
+ if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 2:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
+ elif self._Version >= 0x00010005:
+ EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
+ self.MetaFile, self._LineIndex + 1, self._CurrentLine)
else:
self._SectionType = MODEL_UNKNOWN
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
- # Check if the section name is valid
- if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 2:
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
-
# S1 is always Arch
if len(ItemList) > 1:
S1 = ItemList[1].upper()
Modified: trunk/BaseTools/Source/Python/build/BuildReport.py
===================================================================
--- trunk/BaseTools/Source/Python/build/BuildReport.py 2013-03-27 01:49:07 UTC (rev 2576)
+++ trunk/BaseTools/Source/Python/build/BuildReport.py 2013-04-02 05:22:30 UTC (rev 2577)
@@ -699,7 +699,8 @@
# Collect PCDs defined in DSC common section
#
self.DscPcdDefault = {}
- for Platform in Wa.BuildDatabase.WorkspaceDb.PlatformList:
+ for Arch in Wa.ArchList:
+ Platform = Wa.BuildDatabase[Wa.MetaFile, Arch, Wa.BuildTarget, Wa.ToolChain]
for (TokenCName, TokenSpaceGuidCName) in Platform.Pcds:
DscDefaultValue = Platform.Pcds[(TokenCName, TokenSpaceGuidCName)].DefaultValue
if DscDefaultValue:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <hc...@us...> - 2013-07-18 02:17:04
|
Revision: 2588
http://sourceforge.net/p/edk2-buildtools/code/2588
Author: hchen30
Date: 2013-07-18 02:17:01 +0000 (Thu, 18 Jul 2013)
Log Message:
-----------
Update BaseTools\Source\Python\Common\PyUtility.pyd, BaseTools\Source\Python\Eot\EfiCompressor.pyd and BaseTools\Source\Python\Eot\LzmaCompressor.pyd from Python 2.5 to 2.7.3
signed-by: Hess Chen(hes...@in...)
reviewed-by: Liming Gao(lim...@in...)
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/PyUtility.pyd
trunk/BaseTools/Source/Python/Eot/EfiCompressor.pyd
trunk/BaseTools/Source/Python/Eot/LzmaCompressor.pyd
Modified: trunk/BaseTools/Source/Python/Common/PyUtility.pyd
===================================================================
(Binary files differ)
Modified: trunk/BaseTools/Source/Python/Eot/EfiCompressor.pyd
===================================================================
(Binary files differ)
Modified: trunk/BaseTools/Source/Python/Eot/LzmaCompressor.pyd
===================================================================
(Binary files differ)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2013-11-18 06:34:15
|
Revision: 2610
http://sourceforge.net/p/edk2-buildtools/code/2610
Author: lgao4
Date: 2013-11-18 06:34:11 +0000 (Mon, 18 Nov 2013)
Log Message:
-----------
Enable PCD External Database, Sku and PcdInfo feature.
Signed-off-by: Feng, Bob C <bob...@in...>
Reviewed-by: Liu, Yingke D <yin...@in...>
Reviewed-by: Liu, Jiang A jia...@in...
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
trunk/BaseTools/Source/Python/AutoGen/GenC.py
trunk/BaseTools/Source/Python/BPDG/GenVpd.py
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Common/GlobalData.py
trunk/BaseTools/Source/Python/Common/Misc.py
trunk/BaseTools/Source/Python/Common/VpdInfoFile.py
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py
trunk/BaseTools/Source/Python/GenFds/FfsInfStatement.py
trunk/BaseTools/Source/Python/GenFds/Section.py
trunk/BaseTools/Source/Python/GenPatchPcdTable/GenPatchPcdTable.py
trunk/BaseTools/Source/Python/PatchPcdValue/PatchPcdValue.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/MetaFileTable.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Added Paths:
-----------
trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py
trunk/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
Modified: trunk/BaseTools/Source/Python/AutoGen/AutoGen.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2013-11-18 05:45:54 UTC (rev 2609)
+++ trunk/BaseTools/Source/Python/AutoGen/AutoGen.py 2013-11-18 06:34:11 UTC (rev 2610)
@@ -34,9 +34,12 @@
from GenFds.FdfParser import *
from CommonDataClass.CommonClass import SkuInfoClass
from Workspace.BuildClassObject import *
+from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
import Common.VpdInfoFile as VpdInfoFile
+from GenPcdDb import CreatePcdDatabaseCode
+from Workspace.MetaFileCommentParser import UsageList
-## Regular expression for splitting Dependency Expression stirng into tokens
+## Regular expression for splitting Dependency Expression string into tokens
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
## Mapping Makefile type
@@ -59,13 +62,7 @@
#
# Template string to generic AsBuilt INF
#
-gAsBuiltInfHeaderString = TemplateString("""## @file
-# ${module_name}
-#
-# DO NOT EDIT
-# FILE auto-generated Binary INF
-#
-##
+gAsBuiltInfHeaderString = TemplateString("""${header_comments}
[Defines]
INF_VERSION = 0x00010016
@@ -73,6 +70,7 @@
FILE_GUID = ${module_guid}
MODULE_TYPE = ${module_module_type}
VERSION_STRING = ${module_version_string}${BEGIN}
+ PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}
@@ -82,8 +80,21 @@
[Binaries.${module_arch}]${BEGIN}
${binary_item}${END}
-[PcdEx]${BEGIN}
- ${pcd_item}${END}
+[PatchPcd.${module_arch}]${BEGIN}
+ ${patchablepcd_item}
+${END}
+[Protocols.${module_arch}]${BEGIN}
+ ${protocol_item}
+${END}
+[Ppis.${module_arch}]${BEGIN}
+ ${ppi_item}
+${END}
+[Guids.${module_arch}]${BEGIN}
+ ${guid_item}
+${END}
+[PcdEx.${module_arch}]${BEGIN}
+ ${pcd_item}
+${END}
## @AsBuilt${BEGIN}
## ${flags_item}${END}
@@ -228,15 +239,6 @@
ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
% (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
- # Validate SKU ID
- if not self.SkuId:
- self.SkuId = 'DEFAULT'
-
- if self.SkuId not in self.Platform.SkuIds:
- EdkLogger.error("build", PARAMETER_INVALID,
- ExtraData="SKU-ID [%s] is not supported by the platform. [Valid SKU-ID: %s]"
- % (self.SkuId, " ".join(self.Platform.SkuIds.keys())))
-
# parse FDF file to get PCDs in it, if any
if not self.FdfFile:
self.FdfFile = self.Platform.FlashDefinition
@@ -867,7 +869,7 @@
for PcdFromModule in M.ModulePcdList+M.LibraryPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
- if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize == None:
+ if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
if PcdFromModule.Type in GenC.gDynamicPcd or PcdFromModule.Type in GenC.gDynamicExPcd:
@@ -938,19 +940,19 @@
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
#
for PcdKey in PlatformPcds:
- Pcd = self.Platform.Pcds[PcdKey]
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- Pcd = VpdPcdDict[PcdKey]
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
- VpdFile.Add(Pcd, Sku.VpdOffset)
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
- if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
- EdkLogger.error("Build", FILE_NOT_FOUND, \
- "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+ Pcd = self.Platform.Pcds[PcdKey]
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
+ PcdKey in VpdPcdDict:
+ Pcd = VpdPcdDict[PcdKey]
+ for (SkuName,Sku) in Pcd.SkuInfoList.items():
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ VpdFile.Add(Pcd, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+ if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
+ EdkLogger.error("Build", FILE_NOT_FOUND, \
+ "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
#
@@ -971,32 +973,46 @@
# Not found, it should be signature
if not FoundFlag :
# just pick the a value to determine whether is unicode string type
- Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
- # Need to iterate DEC pcd information to get the value & datumtype
- for eachDec in self.PackageList:
- for DecPcd in eachDec.Pcds:
- DecPcdEntry = eachDec.Pcds[DecPcd]
- if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
- (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
- # Print warning message to let the developer make a determine.
- EdkLogger.warn("build", "Unreferenced vpd pcd used!",
- File=self.MetaFile, \
- ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
- %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
-
- DscPcdEntry.DatumType = DecPcdEntry.DatumType
- DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
- # Only fix the value while no value provided in DSC file.
- if (Sku.DefaultValue == "" or Sku.DefaultValue==None):
- DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
-
+ for (SkuName,Sku) in DscPcdEntry.SkuInfoList.items():
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ # Print warning message to let the developer make a determine.
+ EdkLogger.warn("build", "Unreferenced vpd pcd used!",
+ File=self.MetaFile, \
+ ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
+ DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
+ # Only fix the value while no value provided in DSC file.
+ if (Sku.DefaultValue == "" or Sku.DefaultValue==None):
+ DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
+
+ if DscPcdEntry not in self._DynamicPcdList:
+ self._DynamicPcdList.append(DscPcdEntry)
+# Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ PcdValue = Sku.DefaultValue
+ VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+ if DscPcdEntry.DatumType == 'VOID*' and PcdValue.startswith("L"):
+ UnicodePcdArray.append(DscPcdEntry)
+ elif len(Sku.VariableName) > 0:
+ HiiPcdArray.append(DscPcdEntry)
+ else:
+ OtherPcdArray.append(DscPcdEntry)
+
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
- VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
@@ -1043,9 +1059,11 @@
# Fixup "*" offset
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- if Sku.VpdOffset == "*":
- Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0].strip()
+ i = 0
+ for (SkuName,Sku) in Pcd.SkuInfoList.items():
+ if Sku.VpdOffset == "*":
+ Sku.VpdOffset = VpdFile.GetOffset(Pcd)[i].strip()
+ i += 1
else:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
@@ -1593,13 +1611,13 @@
% (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName))
Value = ToPcd.DefaultValue
if Value in [None, '']:
- ToPcd.MaxDatumSize = 1
+ ToPcd.MaxDatumSize = '1'
elif Value[0] == 'L':
- ToPcd.MaxDatumSize = str(len(Value) * 2)
+ ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
ToPcd.MaxDatumSize = str(len(Value.split(',')))
else:
- ToPcd.MaxDatumSize = str(len(Value))
+ ToPcd.MaxDatumSize = str(len(Value) - 1)
# apply default SKU for dynamic PCDS if specified one is not available
if (ToPcd.Type in PCD_DYNAMIC_TYPE_LIST or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_LIST) \
@@ -1992,9 +2010,14 @@
self._DerivedPackageList = None
self._ModulePcdList = None
self._LibraryPcdList = None
+ self._PcdComments = sdict()
self._GuidList = None
+ self._GuidsUsedByPcd = None
+ self._GuidComments = sdict()
self._ProtocolList = None
+ self._ProtocolComments = sdict()
self._PpiList = None
+ self._PpiComments = sdict()
self._DepexList = None
self._DepexExpressionList = None
self._BuildOption = None
@@ -2095,6 +2118,10 @@
self._LibraryFlag = False
return self._LibraryFlag
+ ## Check if the module is binary module or not
+ def _IsBinaryModule(self):
+ return self.Module.IsBinaryModule
+
## Return the directory to store intermediate files of the module
def _GetBuildDir(self):
if self._BuildDir == None:
@@ -2551,6 +2578,12 @@
self._DependentLibraryList = self.PlatformInfo.ApplyLibraryInstance(self.Module)
return self._DependentLibraryList
+ @staticmethod
+ def UpdateComments(Recver, Src):
+ for Key in Src:
+ if Key not in Recver:
+ Recver[Key] = []
+ Recver[Key].extend(Src[Key])
## Get the list of PCDs from current module
#
# @retval list The list of PCD
@@ -2559,6 +2592,7 @@
if self._ModulePcdList == None:
# apply PCD settings from platform
self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
+ self.UpdateComments(self._PcdComments, self.Module.PcdComments)
return self._ModulePcdList
## Get the list of PCDs from dependent libraries
@@ -2571,6 +2605,7 @@
if not self.IsLibrary:
# get PCDs from dependent libraries
for Library in self.DependentLibraryList:
+ self.UpdateComments(self._PcdComments, Library.PcdComments)
for Key in Library.Pcds:
# skip duplicated PCDs
if Key in self.Module.Pcds or Key in Pcds:
@@ -2591,8 +2626,17 @@
self._GuidList = self.Module.Guids
for Library in self.DependentLibraryList:
self._GuidList.update(Library.Guids)
+ self.UpdateComments(self._GuidComments, Library.GuidComments)
+ self.UpdateComments(self._GuidComments, self.Module.GuidComments)
return self._GuidList
+ def GetGuidsUsedByPcd(self):
+ if self._GuidsUsedByPcd == None:
+ self._GuidsUsedByPcd = sdict()
+ self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())
+ for Library in self.DependentLibraryList:
+ self._GuidsUsedByPcd.update(Library.GetGuidsUsedByPcd())
+ return self._GuidsUsedByPcd
## Get the protocol value mapping
#
# @retval dict The mapping between protocol cname and its value
@@ -2602,6 +2646,8 @@
self._ProtocolList = self.Module.Protocols
for Library in self.DependentLibraryList:
self._ProtocolList.update(Library.Protocols)
+ self.UpdateComments(self._ProtocolComments, Library.ProtocolComments)
+ self.UpdateComments(self._ProtocolComments, self.Module.ProtocolComments)
return self._ProtocolList
## Get the PPI value mapping
@@ -2613,6 +2659,8 @@
self._PpiList = self.Module.Ppis
for Library in self.DependentLibraryList:
self._PpiList.update(Library.Ppis)
+ self.UpdateComments(self._PpiComments, Library.PpiComments)
+ self.UpdateComments(self._PpiComments, self.Module.PpiComments)
return self._PpiList
## Get the list of include search path
@@ -2669,38 +2717,74 @@
### TODO: How to handles mixed source and binary modules
- # Find all DynamicEx PCDs used by this module and dependent libraries
+ # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
# Also find all packages that the DynamicEx PCDs depend on
Pcds = []
+ PatchablePcds = {}
Packages = []
+ PcdCheckList = []
+ PcdTokenSpaceList = []
for Pcd in self.ModulePcdList + self.LibraryPcdList:
- if Pcd.Type in GenC.gDynamicExPcd:
- if Pcd not in Pcds:
- Pcds += [Pcd]
- for Package in self.DerivedPackageList:
- if Package not in Packages:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'DynamicEx') in Package.Pcds:
- Packages += [Package]
- elif (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'Dynamic') in Package.Pcds:
- Packages += [Package]
+ if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ PatchablePcds[Pcd.TokenCName] = Pcd
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'PatchableInModule'))
+ elif Pcd.Type in GenC.gDynamicExPcd:
+ if Pcd not in Pcds:
+ Pcds += [Pcd]
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'DynamicEx'))
+ PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'Dynamic'))
+ PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
+ GuidList = sdict()
+ GuidList.update(self.GuidList)
+ for TokenSpace in self.GetGuidsUsedByPcd():
+ # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
+ # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
+ if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
+ GuidList.pop(TokenSpace)
+ CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
+ for Package in self.DerivedPackageList:
+ if Package in Packages:
+ continue
+ BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
+ Found = False
+ for Index in range(len(BeChecked)):
+ for Item in CheckList[Index]:
+ if Item in BeChecked[Index]:
+ Packages += [Package]
+ Found = True
+ break
+ if Found: break
ModuleType = self.ModuleType
if ModuleType == 'UEFI_DRIVER' and self.DepexGenerated:
- ModuleType = 'DXE_DRIVER'
+ ModuleType = 'DXE_DRIVER'
+ DriverType = ''
+ if self.PcdIsDriver != '':
+ DriverType = self.PcdIsDriver
+
AsBuiltInfDict = {
'module_name' : self.Name,
'module_guid' : self.Guid,
'module_module_type' : ModuleType,
'module_version_string' : self.Version,
+ 'pcd_is_driver_string' : [],
'module_uefi_specification_version' : [],
'module_pi_specification_version' : [],
'module_arch' : self.Arch,
'package_item' : ['%s' % (Package.MetaFile.File.replace('\\','/')) for Package in Packages],
'binary_item' : [],
+ 'patchablepcd_item' : [],
'pcd_item' : [],
- 'flags_item' : []
+ 'protocol_item' : [],
+ 'ppi_item' : [],
+ 'guid_item' : [],
+ 'flags_item' : [],
+ 'libraryclasses_item' : []
}
+ AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
+ if DriverType:
+ AsBuiltInfDict['pcd_is_driver_string'] += [DriverType]
if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
AsBuiltInfDict['module_uefi_specification_version'] += [self.Specification['UEFI_SPECIFICATION_VERSION']]
@@ -2732,9 +2816,125 @@
if self.ModuleType in ['DXE_SMM_DRIVER']:
AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex']
+ for Root, Dirs, Files in os.walk(OutputDir):
+ for File in Files:
+ if File.lower().endswith('.pdb'):
+ AsBuiltInfDict['binary_item'] += ['DISPOSABLE|' + File]
+ HeaderComments = self.Module.HeaderComments
+ StartPos = 0
+ for Index in range(len(HeaderComments)):
+ if HeaderComments[Index].find('@BinaryHeader') != -1:
+ HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
+ StartPos = Index
+ break
+ AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
+ GenList = [
+ (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
+ (self.PpiList, self._PpiComments, 'ppi_item'),
+ (GuidList, self._GuidComments, 'guid_item')
+ ]
+ for Item in GenList:
+ for CName in Item[0]:
+ Comments = ''
+ if CName in Item[1]:
+ Comments = '\n '.join(Item[1][CName])
+ Entry = CName
+ if Comments:
+ Entry = Comments + '\n ' + CName
+ AsBuiltInfDict[Item[2]].append(Entry)
+ PatchList = parsePcdInfoFromMapFile(
+ os.path.join(self.OutputDir, self.Name + '.map'),
+ os.path.join(self.OutputDir, self.Name + '.efi')
+ )
+ if PatchList:
+ for PatchPcd in PatchList:
+ if PatchPcd[0] not in PatchablePcds:
+ continue
+ Pcd = PatchablePcds[PatchPcd[0]]
+ PcdValue = ''
+ if Pcd.DatumType != 'VOID*':
+ HexFormat = '0x%02x'
+ if Pcd.DatumType == 'UINT16':
+ HexFormat = '0x%04x'
+ elif Pcd.DatumType == 'UINT32':
+ HexFormat = '0x%08x'
+ elif Pcd.DatumType == 'UINT64':
+ HexFormat = '0x%016x'
+ PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
+ else:
+ if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ )
+ ArraySize = int(Pcd.MaxDatumSize, 0)
+ PcdValue = Pcd.DefaultValue
+ if PcdValue[0] != '{':
+ Unicode = False
+ if PcdValue[0] == 'L':
+ Unicode = True
+ PcdValue = PcdValue.lstrip('L')
+ PcdValue = eval(PcdValue)
+ NewValue = '{'
+ for Index in range(0, len(PcdValue)):
+ if Unicode:
+ CharVal = ord(PcdValue[Index])
+ NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
+ + '0x%02x' % (CharVal >> 8) + ', '
+ else:
+ NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
+ Padding = '0x00, '
+ if Unicode:
+ Padding = Padding * 2
+ ArraySize = ArraySize / 2
+ if ArraySize < (len(PcdValue) + 1):
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ )
+ if ArraySize > len(PcdValue) + 1:
+ NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
+ PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
+ elif len(PcdValue.split(',')) <= ArraySize:
+ PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
+ PcdValue += '}'
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName)
+ )
+ PcdItem = '%s.%s|%s|0x%X' % \
+ (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, PcdValue, PatchPcd[1])
+ PcdComments = ''
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
+ if PcdComments:
+ PcdItem = PcdComments + '\n ' + PcdItem
+ AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
for Pcd in Pcds:
- AsBuiltInfDict['pcd_item'] += [Pcd.TokenSpaceGuidCName + '.' + Pcd.TokenCName]
-
+ PcdComments = ''
+ PcdCommentList = []
+ HiiInfo = ''
+ if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
+ for SkuName in Pcd.SkuInfoList:
+ SkuInfo = Pcd.SkuInfoList[SkuName]
+ HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
+ break
+ if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
+ PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
+ if HiiInfo:
+ UsageIndex = -1
+ for Index, Comment in enumerate(PcdCommentList):
+ for Usage in UsageList:
+ if Comment.find(Usage) != -1:
+ UsageIndex = Index
+ break
+ if UsageIndex != -1:
+ PcdCommentList[UsageIndex] = PcdCommentList[UsageIndex] + ' ' + HiiInfo
+ else:
+ PcdCommentList.append('## ' + HiiInfo)
+ PcdComments = '\n '.join(PcdCommentList)
+ PcdEntry = Pcd.TokenSpaceGui...
[truncated message content] |
|
From: <lg...@us...> - 2013-11-26 02:10:42
|
Revision: 2614
http://sourceforge.net/p/edk2-buildtools/code/2614
Author: lgao4
Date: 2013-11-26 02:10:38 +0000 (Tue, 26 Nov 2013)
Log Message:
-----------
1. Fixed the error that SkuName does not support dot character.
2. Fixed the error that SkuName does not support low case.
3. Fixed the error that there is no commet mark in AsBuild Inf file
Signed-off-by: Feng, Bob C bob...@in...
Reviewed-by: Liu, Yingke D <yin...@in...>
Reviewed-by: Hesheng Chen <hes...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/DataType.py
trunk/BaseTools/Source/Python/Common/String.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/Common/DataType.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/DataType.py 2013-11-25 03:04:13 UTC (rev 2613)
+++ trunk/BaseTools/Source/Python/Common/DataType.py 2013-11-26 02:10:38 UTC (rev 2614)
@@ -486,6 +486,8 @@
PCDS_DYNAMICEX_VPD = "PcdsDynamicExVpd"
PCDS_DYNAMICEX_HII = "PcdsDynamicExHii"
+SECTIONS_HAVE_ITEM_PCD = [PCDS_DYNAMIC_DEFAULT.upper(),PCDS_DYNAMIC_VPD.upper(),PCDS_DYNAMIC_HII.upper(), \
+ PCDS_DYNAMICEX_DEFAULT.upper(),PCDS_DYNAMICEX_VPD.upper(),PCDS_DYNAMICEX_HII.upper()]
# Section allowed to have items after arch
SECTIONS_HAVE_ITEM_AFTER_ARCH = [TAB_LIBRARY_CLASSES.upper(), TAB_DEPEX.upper(), TAB_USER_EXTENSIONS.upper(),
PCDS_DYNAMIC_DEFAULT.upper(),
Modified: trunk/BaseTools/Source/Python/Common/String.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/String.py 2013-11-25 03:04:13 UTC (rev 2613)
+++ trunk/BaseTools/Source/Python/Common/String.py 2013-11-26 02:10:38 UTC (rev 2614)
@@ -401,16 +401,6 @@
Comment = Line[Index:].strip()
Line = Line[0:Index].strip()
break
- if Comment:
- # Remove prefixed and trailing comment characters
- Start = 0
- End = len(Comment)
- while Start < End and Comment.startswith(CommentCharacter, Start, End):
- Start += 1
- while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
- End -= 1
- Comment = Comment[Start:End]
- Comment = Comment.strip()
return Line, Comment
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-11-25 03:04:13 UTC (rev 2613)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-11-26 02:10:38 UTC (rev 2614)
@@ -278,7 +278,7 @@
for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
if Item == '':
continue
- ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ ItemList = GetSplitValueList(Item, TAB_SPLIT,2)
# different section should not mix in one section
if self._SectionName != '' and self._SectionName != ItemList[0].upper():
EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
@@ -305,7 +305,10 @@
# S2 may be Platform or ModuleType
if len(ItemList) > 2:
- S2 = ItemList[2].upper()
+ if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD:
+ S2 = ItemList[2]
+ else:
+ S2 = ItemList[2].upper()
else:
S2 = 'COMMON'
self._Scope.append([S1, S2])
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-11-25 03:04:13 UTC (rev 2613)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-11-26 02:10:38 UTC (rev 2614)
@@ -460,7 +460,7 @@
if Record[1] in [None, '']:
EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
File=self.MetaFile, Line=Record[-1])
- self._SkuIds[Record[1].upper()] = Record[0]
+ self._SkuIds[Record[1]] = Record[0]
if 'DEFAULT' not in self._SkuIds:
self._SkuIds['DEFAULT'] = '0'
if 'COMMON' not in self._SkuIds:
@@ -731,7 +731,6 @@
RecordList = self._RawData[Type, self._Arch]
PcdValueDict = sdict()
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- SkuName = SkuName.upper()
if SkuName in (SkuObj.SystemSkuId,'DEFAULT','COMMON'):
PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
PcdDict[Arch, PcdCName, TokenSpaceGuid,SkuName] = Setting
@@ -798,7 +797,6 @@
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
@@ -881,7 +879,6 @@
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
@@ -954,7 +951,6 @@
AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- SkuName = SkuName.upper()
if SkuName not in AvailableSkuIdSet:
continue
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2013-12-06 02:52:53
|
Revision: 2623
http://sourceforge.net/p/edk2-buildtools/code/2623
Author: lgao4
Date: 2013-12-06 02:52:50 +0000 (Fri, 06 Dec 2013)
Log Message:
-----------
1. Fixed the error of get incorrect pcd value of DynamicHii type.
2. Fix build.exe '-x skuid' option error
3. Fixed the data with 2 bytes alignment in the stringtable of External PCD Database
4. Fixed the issue that Patchable Pcd does not list in AsBuildInf file
Signed-off-by: Feng, Bob C <bob...@in...>
Reviewed-by: Liu, Yingke D <yin...@in...>
Reviewed-by: Chen, Hesheng hes...@in...
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py
trunk/BaseTools/Source/Python/Common/String.py
trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
trunk/BaseTools/Source/Python/GenFds/FfsInfStatement.py
trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -842,6 +842,7 @@
# resolve variable table offset
for VariableEntries in VariableTable:
+ skuindex = 0
for VariableEntryPerSku in VariableEntries:
(VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable) = VariableEntryPerSku[:]
DbIndex = 0
@@ -853,7 +854,9 @@
DbOffset += DbItemTotal[DbIndex].GetListSize()
else:
assert(False)
-
+ if isinstance(VariableRefTable[0],list):
+ DbOffset += skuindex * 4
+ skuindex += 1
if DbIndex >= InitTableNum:
assert(False)
@@ -995,10 +998,6 @@
DbFile.write(PcdDbBuffer)
Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
-
-def CArrayToArray(carray):
- return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in carray])
-
## Create PCD database in DXE or PEI phase
#
# @param Platform The platform object
@@ -1094,6 +1093,8 @@
Dict['PCD_TOKENSPACE_MAP'] = []
Dict['PCD_NAME_OFFSET'] = []
+ PCD_STRING_INDEX_MAP = {}
+
StringTableIndex = 0
StringTableSize = 0
NumberOfLocalTokens = 0
@@ -1172,10 +1173,7 @@
Pcd.TokenTypeList += ['PCD_TYPE_HII']
Pcd.InitString = 'INIT'
# store VariableName to stringTable and calculate the VariableHeadStringIndex
- if Sku.VariableName.startswith('{'):
- VariableNameStructure = CArrayToArray(Sku.VariableName)
- else:
- VariableNameStructure = StringToArray(Sku.VariableName)
+ VariableNameStructure = StringToArray(Sku.VariableName)
if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
Dict['STRING_TABLE_CNAME'].append(CName)
Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
@@ -1183,11 +1181,15 @@
Dict['STRING_TABLE_INDEX'].append('')
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
-
- Dict['STRING_TABLE_LENGTH'].append((len(Sku.VariableName) - 3 + 1) * 2 )
+ VarNameSize = len(VariableNameStructure.replace(',',' ').split())
+ Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
+ StringHeadOffsetList.append(str(StringTableSize) + 'U')
+ VarStringDbOffsetList = []
+ VarStringDbOffsetList.append(StringTableSize)
+ Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
StringTableIndex += 1
- StringTableSize += (len(Sku.VariableName) - 3 + 1) * 2
+ StringTableSize += len(VariableNameStructure.replace(',',' ').split())
VariableHeadStringIndex = 0
for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
@@ -1246,7 +1248,7 @@
# the Pcd default value was filled before
VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
- VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable])
+ VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable])
elif Sku.VpdOffset != '':
Pcd.TokenTypeList += ['PCD_TYPE_VPD']
@@ -1278,26 +1280,31 @@
else:
Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
if Sku.DefaultValue[0] == 'L':
- Size = (len(Sku.DefaultValue) - 3 + 1) * 2
- Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue))
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(DefaultValueBinStructure.replace(',',' ').split())
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"':
- Size = len(Sku.DefaultValue) - 2 + 1
- Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue))
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(DefaultValueBinStructure.replace(',',' ').split())
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{':
- Size = len(Sku.DefaultValue.replace(',',' ').split())
- Dict['STRING_TABLE_VALUE'].append(Sku.DefaultValue)
+ DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
+ Size = len(DefaultValueBinStructure.replace(',',' ').split())
+ Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
Dict['SIZE_TABLE_CNAME'].append(CName)
- Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Size) + 'U')
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
+ Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
if MaxDatumSize < Size:
MaxDatumSize = Size
Size = MaxDatumSize
+ if Size % 2:
+ Size += 1
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Size) + 'U')
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
Dict['STRING_TABLE_LENGTH'].append(Size)
StringTableIndex += 1
StringTableSize += (Size)
@@ -1352,6 +1359,7 @@
Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
+ PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
@@ -1405,11 +1413,12 @@
TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
- Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCName) + 1 )
+ Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
- Dict['PCD_CNAME'][GeneratedTokenNumber] = StringToArray('"' + CName + '"' )
+ CNameBinArray = StringToArray('"' + CName + '"' )
+ Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
- Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CName) + 1
+ Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
@@ -1427,6 +1436,7 @@
if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
# Find index by CName, TokenSpaceGuid
Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
+ Offset = PCD_STRING_INDEX_MAP[Offset]
assert(Offset != -1)
Table = Dict['STRING_DB_VALUE']
if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
@@ -1475,13 +1485,13 @@
Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
for index in range(len(Dict['PCD_TOKENSPACE'])):
StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
-
+ StringTableIndex += 1
for index in range(len(Dict['PCD_CNAME'])):
Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
Dict['PCD_NAME_OFFSET'].append(StringTableSize)
StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
-
+ StringTableIndex += 1
if GuidList != []:
Dict['GUID_TABLE_EMPTY'] = 'FALSE'
Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
Modified: trunk/BaseTools/Source/Python/Common/String.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/String.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/Common/String.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -801,11 +801,25 @@
return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String[2:-1]])
elif String.startswith('"'):
if String == "\"\"":
- return "{0x00}";
+ return "{0x00,0x00}"
else:
- return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
+ StringLen = len(String[1:-1])
+ if StringLen % 2:
+ return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
+ else:
+ return "{%s, 0x00,0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
+ elif String.startswith('{'):
+ StringLen = len(String[1:-1])
+ if StringLen % 2:
+ return "{%s, 0x00}" % ", ".join([ C for C in String[1:-1].split(',')])
+ else:
+ return "{%s}" % ", ".join([ C for C in String[1:-1].split(',')])
+
else:
- return '{%s, 0}' % ', '.join(String.split())
+ if len(String.split()) % 2:
+ return '{%s, 0}' % ', '.join(String.split())
+ else:
+ return '{%s, 0,0}' % ', '.join(String.split())
def StringArrayLength(String):
if isinstance(String, unicode):
Modified: trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py
===================================================================
--- trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/CommonDataClass/DataClass.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -97,6 +97,7 @@
MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_META_DATA_SECTION_HEADER = 5100
MODEL_META_DATA_SUBSECTION_HEADER = 5200
+MODEL_META_DATA_TAIL_COMMENT = 5300
MODEL_EXTERNAL_DEPENDENCY = 10000
Modified: trunk/BaseTools/Source/Python/GenFds/FfsInfStatement.py
===================================================================
--- trunk/BaseTools/Source/Python/GenFds/FfsInfStatement.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/GenFds/FfsInfStatement.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -43,6 +43,8 @@
#
#
class FfsInfStatement(FfsInfStatementClassObject):
+ ## The mapping dictionary from datum type to its maximum number.
+ _MAX_SIZE_TYPE = {"BOOLEAN":0x01, "UINT8":0xFF, "UINT16":0xFFFF, "UINT32":0xFFFFFFFF, "UINT64":0xFFFFFFFFFFFFFFFF}
## The constructor
#
# @param self The object pointer
@@ -204,10 +206,15 @@
if Inf._Defs != None and len(Inf._Defs) > 0:
self.OptRomDefs.update(Inf._Defs)
+
self.PatchPcds = []
InfPcds = Inf.Pcds
Platform = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, self.CurrentArch, GenFdsGlobalVariable.TargetName, GenFdsGlobalVariable.ToolChainTag]
FdfPcdDict = GenFdsGlobalVariable.FdfParser.Profile.PcdDict
+
+ # Workaround here: both build and GenFds tool convert the workspace path to lower case
+ # But INF file path in FDF and DSC file may have real case characters.
+ # Try to convert the path to lower case to see if PCDs value are override by DSC.
DscModules = {}
for DscModule in Platform.Modules:
DscModules[str(DscModule).lower()] = Platform.Modules[DscModule]
@@ -217,6 +224,7 @@
continue
if Pcd.Type != 'PatchableInModule':
continue
+ # Override Patchable PCD value by the value from DSC
PatchPcd = None
InfLowerPath = str(PathClassObj).lower()
if InfLowerPath in DscModules and PcdKey in DscModules[InfLowerPath].Pcds:
@@ -227,16 +235,22 @@
if PatchPcd and Pcd.Type == PatchPcd.Type:
DefaultValue = PatchPcd.DefaultValue
DscOverride = True
+
+ # Override Patchable PCD value by the value from FDF
FdfOverride = False
if PcdKey in FdfPcdDict:
DefaultValue = FdfPcdDict[PcdKey]
FdfOverride = True
+
if not DscOverride and not FdfOverride:
continue
+ # Check value, if value are equal, no need to patch
if Pcd.DatumType == "VOID*":
if Pcd.DefaultValue == DefaultValue or DefaultValue in [None, '']:
continue
+ # Get the string size from FDF or DSC
if DefaultValue[0] == 'L':
+ # Remove L"", but the '\0' must be appended
MaxDatumSize = str((len(DefaultValue) - 2) * 2)
elif DefaultValue[0] == '{':
MaxDatumSize = str(len(DefaultValue.split(',')))
@@ -244,6 +258,7 @@
MaxDatumSize = str(len(DefaultValue) - 1)
if DscOverride:
Pcd.MaxDatumSize = PatchPcd.MaxDatumSize
+ # If no defined the maximum size in DSC, try to get current size from INF
if Pcd.MaxDatumSize in ['', None]:
Pcd.MaxDatumSize = str(len(Pcd.DefaultValue.split(',')))
else:
@@ -259,6 +274,7 @@
continue
except:
continue
+ # Check the Pcd size and data type
if Pcd.DatumType == "VOID*":
if int(MaxDatumSize) > int(Pcd.MaxDatumSize):
EdkLogger.error("GenFds", GENFDS_ERROR, "The size of VOID* type PCD '%s.%s' exceeds its maximum size %d bytes." \
@@ -306,7 +322,7 @@
return EfiFile
Basename = os.path.basename(EfiFile)
Output = os.path.join(self.OutputPath, Basename)
- CopyLongFilePath(EfiFile, Output)
+ shutil.copy(EfiFile, Output)
for Pcd in self.PatchPcds:
RetVal, RetStr = PatchBinaryFile(Output, int(Pcd.Offset, 0), Pcd.DatumType, Pcd.DefaultValue, Pcd.MaxDatumSize)
if RetVal:
Modified: trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/Workspace/MetaFileParser.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -499,6 +499,8 @@
# parse the file line by line
IsFindBlockComment = False
GetHeaderComment = False
+ TailComments = []
+ SectionComments = []
Comments = []
for Index in range(0, len(Content)):
@@ -510,6 +512,9 @@
if Line == '':
if Comment:
Comments.append((Comment, Index + 1))
+ elif GetHeaderComment:
+ SectionComments.extend(Comments)
+ Comments = []
continue
if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
IsFindBlockComment = True
@@ -530,6 +535,8 @@
self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', 'COMMON',
'COMMON', self._Owner[-1], LNo, -1, LNo, -1, 0)
GetHeaderComment = True
+ else:
+ TailComments.extend(SectionComments + Comments)
Comments = []
self._SectionHeaderParser()
# Check invalid sections
@@ -605,9 +612,16 @@
self._Store(MODEL_META_DATA_COMMENT, Comment, '', '', Arch, Platform,
LastItem, LineNo, -1, LineNo, -1, 0)
Comments = []
+ SectionComments = []
+ TailComments.extend(SectionComments + Comments)
if IsFindBlockComment:
EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
File=self.MetaFile)
+
+ # If there are tail comments in INF file, save to database whatever the comments are
+ for Comment in TailComments:
+ self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', 'COMMON',
+ 'COMMON', self._Owner[-1], -1, -1, -1, -1, 0)
self._Done()
## Data parser for the format in which there's path
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-12-06 01:44:58 UTC (rev 2622)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-12-06 02:52:50 UTC (rev 2623)
@@ -328,6 +328,8 @@
return False
def _GetSkuIdentifier(self):
+ if self._SkuName:
+ return self._SkuName
if self._SkuIdentifier == None:
if self._Header == None:
self._GetHeaderInfo()
@@ -337,16 +339,14 @@
if self._SkuName == None:
if self._Header == None:
self._GetHeaderInfo()
- if self._SkuName == None or self._SkuName not in self.SkuIds:
+ if (self._SkuName == None or self._SkuName not in self.SkuIds):
self._SkuName = 'DEFAULT'
return self._SkuName
## Override SKUID_IDENTIFIER
def _SetSkuName(self, Value):
- if Value in self.SkuIds:
- self._SkuName = Value
- # Needs to re-retrieve the PCD information
- self._Pcds = None
+ self._SkuName = Value
+ self._Pcds = None
def _GetFdfFile(self):
if self._FlashDefinition == None:
@@ -829,8 +829,8 @@
)
for pcd in Pcds.values():
- if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
+ pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
+ if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '', '', '', valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@@ -844,14 +844,24 @@
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
- if SkuObj.SkuUsageType == SkuObj.MULTIPLE:
- if pcd.DatumType == "VOID*":
- MaxSize = int(pcd.MaxDatumSize,0)
- for (skuname,skuobj) in pcd.SkuInfoList.items():
- datalen = len(skuobj.DefaultValue)
- if datalen>MaxSize:
- MaxSize = datalen
- pcd.MaxDatumSize = str(MaxSize)
+
+ if pcd.MaxDatumSize.strip():
+ MaxSize = int(pcd.MaxDatumSize,0)
+ else:
+ MaxSize = 0
+ if pcdDecObject.DatumType == 'VOID*':
+ for (skuname,skuobj) in pcd.SkuInfoList.items():
+ if skuobj.DefaultValue.startswith("L"):
+ datalen = len(skuobj.DefaultValue) * 2
+ elif skuobj.DefaultValue.startswith("{"):
+ datalen = len(skuobj.DefaultValue.split(","))
+ else:
+ datalen = len(skuobj.DefaultValue)
+ if datalen>MaxSize:
+ MaxSize = datalen
+ if MaxSize % 2:
+ MaxSize += 1
+ pcd.MaxDatumSize = str(MaxSize)
return Pcds
@@ -911,8 +921,8 @@
for pcd in Pcds.values():
SkuInfoObj = pcd.SkuInfoList.values()[0]
- if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
+ pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
+ if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@@ -926,6 +936,25 @@
if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
+
+
+ if pcd.MaxDatumSize.strip():
+ MaxSize = int(pcd.MaxDatumSize,0)
+ else:
+ MaxSize = 0
+ if pcdDecObject.DatumType == 'VOID*':
+ for (skuname,skuobj) in pcd.SkuInfoList.items():
+ if skuobj.DefaultValue.startswith("L"):
+ datalen = len(skuobj.DefaultValue) * 2
+ elif skuobj.DefaultValue.startswith("{"):
+ datalen = len(skuobj.DefaultValue.split(","))
+ else:
+ datalen = len(skuobj.DefaultValue)
+ if datalen>MaxSize:
+ MaxSize = datalen
+ if MaxSize % 2:
+ MaxSize += 1
+ pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Retrieve dynamic VPD PCD settings
@@ -987,8 +1016,8 @@
)
for pcd in Pcds.values():
SkuInfoObj = pcd.SkuInfoList.values()[0]
+ pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
valuefromDec = pcdDecObject.DefaultValue
SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '','',SkuInfoObj.VpdOffset, valuefromDec)
pcd.SkuInfoList['DEFAULT'] = SkuInfo
@@ -1002,14 +1031,23 @@
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
- if SkuObj.SkuUsageType == SkuObj.MULTIPLE:
- if pcd.MaxDatumSize.strip():
- MaxSize = int(pcd.MaxDatumSize,0)
- for (skuname,skuobj) in pcd.SkuInfoList.items():
- datalen = len(skuobj.DefaultValue)
- if datalen>MaxSize:
- MaxSize = datalen
- pcd.MaxDatumSize = str(MaxSize)
+ if pcd.MaxDatumSize.strip():
+ MaxSize = int(pcd.MaxDatumSize,0)
+ else:
+ MaxSize = 0
+ if pcdDecObject.DatumType == 'VOID*':
+ for (skuname,skuobj) in pcd.SkuInfoList.items():
+ if skuobj.DefaultValue.startswith("L"):
+ datalen = len(skuobj.DefaultValue) * 2
+ elif skuobj.DefaultValue.startswith("{"):
+ datalen = len(skuobj.DefaultValue.split(","))
+ else:
+ datalen = len(skuobj.DefaultValue)
+ if datalen>MaxSize:
+ MaxSize = datalen
+ if MaxSize % 2:
+ MaxSize += 1
+ pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Add external modules
@@ -1516,6 +1554,7 @@
## Set all internal used members of InfBuildData to None
def _Clear(self):
self._HeaderComments = None
+ self._TailComments = None
self._Header_ = None
self._AutoGenVersion = None
self._BaseName = None
@@ -1608,7 +1647,13 @@
for Record in RecordList:
self._HeaderComments.append(Record[0])
return self._HeaderComments
-
+ def _GetTailComments(self):
+ if not self._TailComments:
+ self._TailComments = []
+ RecordList = self._RawData[MODEL_META_DATA_TAIL_COMMENT]
+ for Record in RecordList:
+ self._TailComments.append(Record[0])
+ return self._TailComments
## Retrieve all information in [Defines] section
#
# (Retriving all [Defines] information in one-shot is just to save time.)
@@ -2443,6 +2488,7 @@
Platform = property(_GetPlatform, _SetPlatform)
HeaderComments = property(_GetHeaderComments)
+ TailComments = property(_GetTailComments)
AutoGenVersion = property(_GetInfVersion)
BaseName = property(_GetBaseName)
ModuleType = property(_GetModuleType)
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <lg...@us...> - 2013-12-26 01:55:27
|
Revision: 2629
http://sourceforge.net/p/edk2-buildtools/code/2629
Author: lgao4
Date: 2013-12-26 01:55:21 +0000 (Thu, 26 Dec 2013)
Log Message:
-----------
Fixed the error of Pcd's size is incorrect for void* pcd
Signed-off-by: Feng, Bob C <bob...@in...>
Reviewed-by: Hess Chen <hes...@in...>
Reviewed-by: Liu, Yingke D <yin...@in...>
Reviewed-by: Zeng, Star <sta...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py
trunk/BaseTools/Source/Python/Common/Misc.py
trunk/BaseTools/Source/Python/Common/String.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py
===================================================================
--- trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py 2013-12-18 08:52:05 UTC (rev 2628)
+++ trunk/BaseTools/Source/Python/AutoGen/GenPcdDb.py 2013-12-26 01:55:21 UTC (rev 2629)
@@ -528,13 +528,20 @@
if RawDataList is None:
RawDataList = []
DbItemList.__init__(self, ItemSize, DataList, RawDataList)
+ def GetListSize(self):
+ length = 0
+ for Data in self.RawDataList:
+ length += (1 + len(Data[1]))
+ return length * self.ItemSize
def PackData(self):
- PackStr = "=HH"
+ PackStr = "=H"
Buffer = ''
for Data in self.RawDataList:
Buffer += pack(PackStr,
- GetIntegerValue(Data[0]),
- GetIntegerValue(Data[1]))
+ GetIntegerValue(Data[0]))
+ for subData in Data[1]:
+ Buffer += pack(PackStr,
+ GetIntegerValue(subData))
return Buffer
## DbStringItemList
@@ -732,7 +739,7 @@
DbPcdNameOffsetTable = DbItemList(4,RawDataList = PcdNameOffsetTable)
SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
- DbSizeTableValue = DbSizeTableItemList(4, RawDataList = SizeTableValue)
+ DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
@@ -812,7 +819,7 @@
SkuIndexIndexTable = [(0) for i in xrange(len(Dict['SKU_INDEX_VALUE']))]
SkuIndexIndexTable[0] = 0 #Dict['SKU_INDEX_VALUE'][0][0]
for i in range(1,len(Dict['SKU_INDEX_VALUE'])):
- SkuIndexIndexTable[i] = SkuIndexIndexTable[i-1]+Dict['SKU_INDEX_VALUE'][i-1][0]
+ SkuIndexIndexTable[i] = SkuIndexIndexTable[i-1]+Dict['SKU_INDEX_VALUE'][i-1][0] + 1
for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
DbIndex = 0
DbOffset = FixedHeaderLen
@@ -829,7 +836,7 @@
LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
# if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
- SkuIndexTabalOffset = SkuIdTableOffset + Dict['SKUID_VALUE'][0]
+ SkuIndexTabalOffset = SkuIdTableOffset + Dict['SKUID_VALUE'][0] + 1
if (TokenTypeValue & (0x2 << 28)):
SkuTable[SkuHeaderIndex] = (DbOffset|int(TokenTypeValue & ~(0x2<<28)), SkuIndexTabalOffset + SkuIndexIndexTable[SkuHeaderIndex])
LocalTokenNumberTable[LocalTokenNumberTableIndex] = (SkuTableOffset + SkuHeaderIndex * 8) | int(TokenTypeValue)
@@ -1106,6 +1113,7 @@
GuidList = []
i = 0
for Pcd in Platform.DynamicPcdList:
+ VoidStarTypeCurrSize = []
i += 1
CName = Pcd.TokenCName
TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
@@ -1258,11 +1266,8 @@
# Also add the VOID* string of VPD PCD to SizeTable
if Pcd.DatumType == 'VOID*':
NumberOfSizeItems += 1
- Dict['SIZE_TABLE_CNAME'].append(CName)
- Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
# For VPD type of PCD, its current size is equal to its MAX size.
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
+ VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
continue
if Pcd.DatumType == 'VOID*':
@@ -1285,29 +1290,31 @@
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '"':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
- Size = len(DefaultValueBinStructure.replace(',',' ').split())
+ Size = len(Sku.DefaultValue) -2 + 1
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
elif Sku.DefaultValue[0] == '{':
DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
- Size = len(DefaultValueBinStructure.replace(',',' ').split())
+ Size = len(Sku.DefaultValue.split(","))
Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
StringHeadOffsetList.append(str(StringTableSize) + 'U')
StringDbOffsetList.append(StringTableSize)
- Dict['SIZE_TABLE_CNAME'].append(CName)
- Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
if MaxDatumSize < Size:
- MaxDatumSize = Size
- Size = MaxDatumSize
- if Size % 2:
- Size += 1
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append(str(Size) + 'U')
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
- Dict['STRING_TABLE_LENGTH'].append(Size)
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Platform))
+ else:
+ MaxDatumSize = Size
+ StringTabLen = MaxDatumSize
+ if StringTabLen % 2:
+ StringTabLen += 1
+ if Sku.VpdOffset == '':
+ VoidStarTypeCurrSize.append(str(Size) + 'U')
+ Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
StringTableIndex += 1
- StringTableSize += (Size)
+ StringTableSize += (StringTabLen)
else:
if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
Pcd.TokenTypeList += ['PCD_TYPE_DATA']
@@ -1333,8 +1340,14 @@
DbValueList.append(Sku.DefaultValue)
Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
+ if Pcd.DatumType == 'VOID*':
+ Dict['SIZE_TABLE_CNAME'].append(CName)
+ Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
+ Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
- SkuIndexTableTmp[0] = len(SkuIndexTableTmp)
+
+ SkuIndexTableTmp[0] = len(SkuIndexTableTmp) - 1
if len(Pcd.SkuInfoList) > 1:
Dict['SKU_INDEX_VALUE'].append(SkuIndexTableTmp)
@@ -1511,7 +1524,7 @@
if Dict['SIZE_TABLE_CNAME'] == []:
Dict['SIZE_TABLE_CNAME'].append('')
Dict['SIZE_TABLE_GUID'].append('')
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append('0U')
+ Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
if NumberOfLocalTokens != 0:
@@ -1534,7 +1547,7 @@
if NumberOfSkuEnabledPcd != 0:
Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
- Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE'])
+ Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
if NumberOfLocalTokens == 0:
Modified: trunk/BaseTools/Source/Python/Common/Misc.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/Misc.py 2013-12-18 08:52:05 UTC (rev 2628)
+++ trunk/BaseTools/Source/Python/Common/Misc.py 2013-12-26 01:55:21 UTC (rev 2629)
@@ -1255,7 +1255,12 @@
Size = FieldList[2]
else:
if Type == 'VOID*':
- Size = str(len(Value))
+ if Value.startswith("L"):
+ Size = str((len(Value)- 3 + 1) * 2)
+ elif Value.startswith("{"):
+ Size = str(len(Value.split(",")))
+ else:
+ Size = str(len(Value) -2 + 1 )
if DataType == 'VOID*':
IsValid = (len(FieldList) <= 3)
else:
Modified: trunk/BaseTools/Source/Python/Common/String.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/String.py 2013-12-18 08:52:05 UTC (rev 2628)
+++ trunk/BaseTools/Source/Python/Common/String.py 2013-12-26 01:55:21 UTC (rev 2629)
@@ -809,7 +809,7 @@
else:
return "{%s, 0x00,0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
elif String.startswith('{'):
- StringLen = len(String[1:-1])
+ StringLen = len(String.split(","))
if StringLen % 2:
return "{%s, 0x00}" % ", ".join([ C for C in String[1:-1].split(',')])
else:
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-12-18 08:52:05 UTC (rev 2628)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2013-12-26 01:55:21 UTC (rev 2629)
@@ -814,6 +814,16 @@
if (PcdCName,TokenSpaceGuid) in Pcds.keys():
pcdObject = Pcds[PcdCName,TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
+ if MaxDatumSize.strip():
+ CurrentMaxSize = int(MaxDatumSize.strip(),0)
+ else:
+ CurrentMaxSize = 0
+ if pcdObject.MaxDatumSize:
+ PcdMaxSize = int(pcdObject.MaxDatumSize,0)
+ else:
+ PcdMaxSize = 0
+ if CurrentMaxSize > PcdMaxSize:
+ pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
@@ -843,27 +853,7 @@
if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
-
-
- if pcd.MaxDatumSize.strip():
- MaxSize = int(pcd.MaxDatumSize,0)
- else:
- MaxSize = 0
- if pcdDecObject.DatumType == 'VOID*':
- for (skuname,skuobj) in pcd.SkuInfoList.items():
- if skuobj.DefaultValue.startswith("L"):
- datalen = len(skuobj.DefaultValue) * 2
- elif skuobj.DefaultValue.startswith("{"):
- datalen = len(skuobj.DefaultValue.split(","))
- else:
- datalen = len(skuobj.DefaultValue)
- if datalen>MaxSize:
- MaxSize = datalen
- if MaxSize % 2:
- MaxSize += 1
- pcd.MaxDatumSize = str(MaxSize)
-
-
+
return Pcds
## Retrieve dynamic HII PCD settings
@@ -944,16 +934,15 @@
MaxSize = 0
if pcdDecObject.DatumType == 'VOID*':
for (skuname,skuobj) in pcd.SkuInfoList.items():
- if skuobj.DefaultValue.startswith("L"):
- datalen = len(skuobj.DefaultValue) * 2
- elif skuobj.DefaultValue.startswith("{"):
- datalen = len(skuobj.DefaultValue.split(","))
+ datalen = 0
+ if skuobj.HiiDefaultValue.startswith("L"):
+ datalen = (len(skuobj.HiiDefaultValue)- 3 + 1) * 2
+ elif skuobj.HiiDefaultValue.startswith("{"):
+ datalen = len(skuobj.HiiDefaultValue.split(","))
else:
- datalen = len(skuobj.DefaultValue)
+ datalen = len(skuobj.HiiDefaultValue) -2 + 1
if datalen>MaxSize:
MaxSize = datalen
- if MaxSize % 2:
- MaxSize += 1
pcd.MaxDatumSize = str(MaxSize)
return Pcds
@@ -1001,6 +990,16 @@
if (PcdCName,TokenSpaceGuid) in Pcds.keys():
pcdObject = Pcds[PcdCName,TokenSpaceGuid]
pcdObject.SkuInfoList[SkuName] = SkuInfo
+ if MaxDatumSize.strip():
+ CurrentMaxSize = int(MaxDatumSize.strip(),0)
+ else:
+ CurrentMaxSize = 0
+ if pcdObject.MaxDatumSize:
+ PcdMaxSize = int(pcdObject.MaxDatumSize,0)
+ else:
+ PcdMaxSize = 0
+ if CurrentMaxSize > PcdMaxSize:
+ pcdObject.MaxDatumSize = str(CurrentMaxSize)
else:
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
@@ -1031,23 +1030,6 @@
pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
del(pcd.SkuInfoList['DEFAULT'])
- if pcd.MaxDatumSize.strip():
- MaxSize = int(pcd.MaxDatumSize,0)
- else:
- MaxSize = 0
- if pcdDecObject.DatumType == 'VOID*':
- for (skuname,skuobj) in pcd.SkuInfoList.items():
- if skuobj.DefaultValue.startswith("L"):
- datalen = len(skuobj.DefaultValue) * 2
- elif skuobj.DefaultValue.startswith("{"):
- datalen = len(skuobj.DefaultValue.split(","))
- else:
- datalen = len(skuobj.DefaultValue)
- if datalen>MaxSize:
- MaxSize = datalen
- if MaxSize % 2:
- MaxSize += 1
- pcd.MaxDatumSize = str(MaxSize)
return Pcds
## Add external modules
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|
|
From: <hc...@us...> - 2014-04-30 01:08:29
|
Revision: 2663
http://sourceforge.net/p/edk2-buildtools/code/2663
Author: hchen30
Date: 2014-04-30 01:08:26 +0000 (Wed, 30 Apr 2014)
Log Message:
-----------
1. Add a support for <Word>.<Word> format of PCD offset.
Contributed-under: TianoCore Contribution Agreement 1.0
Signed-off-by: Hess Chen <hes...@in...>
Reviewed-by: Liu, Yingke D <yin...@in...>
Reviewed-by: Feng, Bob C <bob...@in...>
Modified Paths:
--------------
trunk/BaseTools/Source/Python/Common/Parsing.py
trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
Modified: trunk/BaseTools/Source/Python/Common/Parsing.py
===================================================================
--- trunk/BaseTools/Source/Python/Common/Parsing.py 2014-04-25 06:32:07 UTC (rev 2662)
+++ trunk/BaseTools/Source/Python/Common/Parsing.py 2014-04-30 01:08:26 UTC (rev 2663)
@@ -1,7 +1,7 @@
## @file
# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
#
-# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -877,3 +877,38 @@
List[Key] = [Value]
else:
List[Key].append(Value)
+
+## IsValidWord
+#
+# Check whether the word is valid.
+# <Word> ::= (a-zA-Z0-9_)(a-zA-Z0-9_-){0,} Alphanumeric characters with
+# optional
+# dash "-" and/or underscore "_" characters. No whitespace
+# characters are permitted.
+#
+# @param Word: The word string need to be checked.
+#
+def IsValidWord(Word):
+ if not Word:
+ return False
+ #
+ # The first char should be alpha, _ or Digit.
+ #
+ if not Word[0].isalnum() and \
+ not Word[0] == '_' and \
+ not Word[0].isdigit():
+ return False
+
+ LastChar = ''
+ for Char in Word[1:]:
+ if (not Char.isalpha()) and \
+ (not Char.isdigit()) and \
+ Char != '-' and \
+ Char != '_' and \
+ Char != '.':
+ return False
+ if Char == '.' and LastChar == '.':
+ return False
+ LastChar = Char
+
+ return True
Modified: trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
===================================================================
--- trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2014-04-25 06:32:07 UTC (rev 2662)
+++ trunk/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py 2014-04-30 01:08:26 UTC (rev 2663)
@@ -37,6 +37,7 @@
from WorkspaceCommon import GetDeclaredPcd
from Common.Misc import AnalyzeDscPcd
import re
+from Common.Parsing import IsValidWord
## Platform build information from DSC file
#
@@ -893,13 +894,23 @@
VariableName, VariableGuid, VariableOffset, DefaultValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
ExceedMax = False
+ FormatCorrect = True
if VariableOffset.isdigit():
if int(VariableOffset,10) > 0xFFFF:
ExceedMax = True
elif re.match(r'[\t\s]*0[xX][a-fA-F0-9]+$',VariableOffset):
if int(VariableOffset,16) > 0xFFFF:
ExceedMax = True
+ # For Offset written in "A.B"
+ elif VariableOffset.find('.') > -1:
+ VariableOffsetList = VariableOffset.split(".")
+ if not (len(VariableOffsetList) == 2
+ and IsValidWord(VariableOffsetList[0])
+ and IsValidWord(VariableOffsetList[1])):
+ FormatCorrect = False
else:
+ FormatCorrect = False
+ if not FormatCorrect:
EdkLogger.error('Build', FORMAT_INVALID, "Invalid syntax or format of the variable offset value is incorrect for %s." % ".".join((TokenSpaceGuid,PcdCName)))
if ExceedMax:
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
|