diff options
author | lgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524> | 2010-10-11 06:26:52 +0000 |
---|---|---|
committer | lgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524> | 2010-10-11 06:26:52 +0000 |
commit | 08dd311f5dc735c595d39faf2e6f7e2810bb79a9 (patch) | |
tree | eb384e3139391ac0dabc69e40b4605d9d7315342 /BaseTools/Source/Python | |
parent | d69bf66dc1ad8143260dcb8e095d7ed91b211dd7 (diff) | |
download | edk2-08dd311f5dc735c595d39faf2e6f7e2810bb79a9.zip edk2-08dd311f5dc735c595d39faf2e6f7e2810bb79a9.tar.gz edk2-08dd311f5dc735c595d39faf2e6f7e2810bb79a9.tar.bz2 |
Sync EDKII BaseTools to BaseTools project r2065.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10915 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'BaseTools/Source/Python')
21 files changed, 358 insertions, 192 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py index d95f40b..aaba768 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGen.py +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -162,6 +162,10 @@ class WorkspaceAutoGen(AutoGen): # parse FDF file to get PCDs in it, if any
if self.FdfFile != None and self.FdfFile != '':
+ #
+ # Make global macros available when parsing FDF file
+ #
+ InputMacroDict.update(self.BuildDatabase.WorkspaceDb._GlobalMacros)
Fdf = FdfParser(self.FdfFile.Path)
Fdf.ParseFile()
PcdSet = Fdf.Profile.PcdDict
@@ -544,9 +548,18 @@ class PlatformAutoGen(AutoGen): DecPcdEntry = eachDec.Pcds[DecPcd]
if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ # Print warning message to let the developer make a determine.
+ EdkLogger.warn("build", "Unreferenced vpd pcd used!",
+ File=self.MetaFile, \
+ ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
- Sku.DefaultValue = DecPcdEntry.DefaultValue
+ # Only fix the value while no value provided in DSC file.
+ if (Sku.DefaultValue == "" or Sku.DefaultValue==None):
+ DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
+
VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
@@ -569,11 +582,9 @@ class PlatformAutoGen(AutoGen): except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
- VpdFileName = self.Platform.VpdFileName
- if VpdFileName == None or VpdFileName == "" :
- VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
- else :
- VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
+
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+
if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
VpdFile.Write(VpdFilePath)
@@ -588,16 +599,13 @@ class PlatformAutoGen(AutoGen): break
# Call third party GUID BPDG tool.
if BPDGToolName != None:
- VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
- if VpdFileName == None or VpdFileName == "" :
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
- else :
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
if os.path.exists(VpdMapFilePath):
VpdFile.Read(VpdMapFilePath)
@@ -1709,12 +1717,12 @@ class ModuleAutoGen(AutoGen): self._SourceFileList = []
for F in self.Module.Sources:
# match tool chain
- if F.TagName != "" and F.TagName != self.ToolChain:
+ if F.TagName not in ("", "*", self.ToolChain):
EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
"but [%s] is needed" % (F.TagName, str(F), self.ToolChain))
continue
# match tool chain family
- if F.ToolChainFamily != "" and F.ToolChainFamily != self.ToolChainFamily:
+ if F.ToolChainFamily not in ("", "*", self.ToolChainFamily):
EdkLogger.debug(
EdkLogger.DEBUG_0,
"The file [%s] must be built by tools of [%s], " \
@@ -2128,14 +2136,6 @@ class ModuleAutoGen(AutoGen): self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
return self._LibraryAutoGenList
- ## Return build command string
- #
- # @retval string Build command string
- #
- def _GetBuildCommand(self):
- return self.PlatformInfo.BuildCommand
-
-
Module = property(_GetModule)
Name = property(_GetBaseName)
Guid = property(_GetGuid)
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py index 2e18f67..b1e4385 100644 --- a/BaseTools/Source/Python/AutoGen/GenMake.py +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -26,7 +26,7 @@ from BuildEngine import * import Common.GlobalData as GlobalData ## Regular expression for finding header file inclusions -gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:[\"<]?[ \t]*)([\w.\\/() \t]+)(?:[ \t]*[\">]?)", re.MULTILINE|re.UNICODE|re.IGNORECASE) +gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE) ## Regular expression for matching macro used in header file inclusion gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE) @@ -769,7 +769,7 @@ cleanlib: Inc = os.path.normpath(Inc) for SearchPath in [CurrentFilePath] + SearchPathList: FilePath = os.path.join(SearchPath, Inc) - if not os.path.exists(FilePath) or FilePath in CurrentFileDependencyList: + if not os.path.isfile(FilePath) or FilePath in CurrentFileDependencyList: continue FilePath = PathClass(FilePath) CurrentFileDependencyList.append(FilePath) diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py index 76a0549..1eb65c1 100644 --- a/BaseTools/Source/Python/AutoGen/UniClassObject.py +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -444,6 +444,13 @@ class UniFileClassObject(object): # Add a string to list
#
def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
+ for LangNameItem in self.LanguageDef:
+ if Language == LangNameItem[0]:
+ break
+ else:
+ EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
+ % (Language, Name, self.File))
+
if Language not in self.OrderedStringList:
self.OrderedStringList[Language] = []
diff --git a/BaseTools/Source/Python/BPDG/BPDG.py b/BaseTools/Source/Python/BPDG/BPDG.py index 10692c4..f50e6f7 100644 --- a/BaseTools/Source/Python/BPDG/BPDG.py +++ b/BaseTools/Source/Python/BPDG/BPDG.py @@ -25,7 +25,6 @@ import sys import encodings.ascii
from optparse import OptionParser
-from encodings import gbk
from Common import EdkLogger
from Common.BuildToolError import *
@@ -49,13 +48,11 @@ def main(): # Initialize log system
EdkLogger.Initialize()
- Options, Args = myOptionParser()
+ Options, Args = MyOptionParser()
ReturnCode = 0
- if Options.opt_slient:
- EdkLogger.SetLevel(EdkLogger.ERROR)
- elif Options.opt_verbose:
+ if Options.opt_verbose:
EdkLogger.SetLevel(EdkLogger.VERBOSE)
elif Options.opt_quiet:
EdkLogger.SetLevel(EdkLogger.QUIET)
@@ -64,7 +61,7 @@ def main(): else:
EdkLogger.SetLevel(EdkLogger.INFO)
- if Options.vpd_filename == None:
+ if Options.bin_filename == None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -o option to specify the file name for the VPD binary file")
if Options.filename == None:
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please use the -m option to specify the file name for the mapping file")
@@ -74,14 +71,22 @@ def main(): Force = True
if (Args[0] != None) :
- startBPDG(Args[0], Options.filename, Options.vpd_filename, Force)
+ StartBpdg(Args[0], Options.filename, Options.bin_filename, Force)
else :
EdkLogger.error("BPDG", ATTRIBUTE_NOT_AVAILABLE, "Please specify the file which contain the VPD pcd info.",
None)
return ReturnCode
-
-def myOptionParser():
+
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval options A optparse.Values object containing the parsed options
+# @retval args Target of BPDG command
+#
+def MyOptionParser():
#
# Process command line firstly.
#
@@ -94,11 +99,9 @@ def myOptionParser(): help=st.MSG_OPTION_DEBUG_LEVEL)
parser.add_option('-v', '--verbose', action='store_true', dest='opt_verbose',
help=st.MSG_OPTION_VERBOSE)
- parser.add_option('-s', '--silent', action='store_true', dest='opt_slient', default=False,
- help=st.MSG_OPTION_SILENT)
parser.add_option('-q', '--quiet', action='store_true', dest='opt_quiet', default=False,
help=st.MSG_OPTION_QUIET)
- parser.add_option('-o', '--vpd-filename', action='store', dest='vpd_filename',
+ parser.add_option('-o', '--vpd-filename', action='store', dest='bin_filename',
help=st.MSG_OPTION_VPD_FILENAME)
parser.add_option('-m', '--map-filename', action='store', dest='filename',
help=st.MSG_OPTION_MAP_FILENAME)
@@ -111,8 +114,22 @@ def myOptionParser(): EdkLogger.info(parser.usage)
sys.exit(1)
return options, args
-
-def startBPDG(InputFileName, MapFileName, VpdFileName, Force):
+
+
+## Start BPDG and call the main functions
+#
+# This method mainly focus on call GenVPD class member functions to complete
+# BPDG's target. It will process VpdFile override, and provide the interface file
+# information.
+#
+# @Param InputFileName The filename include the vpd type pcd information
+# @param MapFileName The filename of map file that stores vpd type pcd information.
+# This file will be generated by the BPDG tool after fix the offset
+# and adjust the offset to make the pcd data aligned.
+# @param VpdFileName The filename of Vpd file that hold vpd pcd information.
+# @param Force Override the exist Vpdfile or not.
+#
+def StartBpdg(InputFileName, MapFileName, VpdFileName, Force):
if os.path.exists(VpdFileName) and not Force:
print "\nFile %s already exist, Overwrite(Yes/No)?[Y]: " % VpdFileName
choice = sys.stdin.readline()
diff --git a/BaseTools/Source/Python/BPDG/GenVpd.py b/BaseTools/Source/Python/BPDG/GenVpd.py index 05f5b6c..f0196e0 100644 --- a/BaseTools/Source/Python/BPDG/GenVpd.py +++ b/BaseTools/Source/Python/BPDG/GenVpd.py @@ -28,6 +28,10 @@ _FORMAT_CHAR = {1: 'B', 8: 'Q'
}
+## The VPD PCD data structure for store and process each VPD PCD entry.
+#
+# This class contain method to format and pack pcd's value.
+#
class PcdEntry:
def __init__(self, PcdCName, PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None):
@@ -54,12 +58,29 @@ class PcdEntry: "Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" %(self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue ()
+
+ ## Analyze the string value to judge the PCD's datum type euqal to Boolean or not.
+ #
+ # @param ValueString PCD's value
+ # @param Size PCD's size
+ #
+ # @retval True PCD's datum type is Boolean
+ # @retval False PCD's datum type is not Boolean.
+ #
+ def _IsBoolean(self, ValueString, Size):
+ if (Size == "1"):
+ if ValueString.upper() in ["TRUE", "FALSE"]:
+ return True
+ elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
+ return True
- def _IsBoolean(self, ValueString):
- if ValueString.upper() in ["TRUE", "FALSE"]:
- return True
return False
+ ## Convert the PCD's value from string to integer.
+ #
+ # This function will try to convert the Offset value form string to integer
+ # for both hexadecimal and decimal.
+ #
def _GenOffsetValue(self):
if self.PcdOffset != "*" :
try:
@@ -70,9 +91,14 @@ class PcdEntry: except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
-
+
+ ## Pack Boolean type VPD PCD's value form string to binary type.
+ #
+ # @param ValueString The boolean type string for pack.
+ #
+ #
def _PackBooleanValue(self, ValueString):
- if ValueString.upper() == "TRUE":
+ if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try:
self.PcdValue = pack(_FORMAT_CHAR[1], 1)
except:
@@ -83,18 +109,65 @@ class PcdEntry: self.PcdValue = pack(_FORMAT_CHAR[1], 0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
-
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack Integer type VPD PCD's value form string to binary type.
+ #
+ # @param ValueString The Integer type string for pack.
+ #
+ #
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR.keys():
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
+
+ if Size == 1:
+ if IntValue < 0:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "PCD can't be set to negative value %d for PCD %s in UINT8 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif IntValue >= 0x100:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT8 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif Size == 2:
+ if IntValue < 0:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "PCD can't be set to negative value %d for PCD %s in UINT16 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif IntValue >= 0x10000:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT16 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif Size == 4:
+ if IntValue < 0:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif IntValue >= 0x100000000:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif Size == 8:
+ if IntValue < 0:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "PCD can't be set to negative value %d for PCD %s in UINT32 datum type(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ elif IntValue >= 0x10000000000000000:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Too large PCD value %d for datum type UINT32 for PCD %s(File: %s Line: %s)." % (IntValue, self.PcdCName, self.FileName, self.Lineno))
+ else:
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
+ "Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
+
try:
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
-
+
+ ## Pack VOID* type VPD PCD's value form string to binary type.
+ #
+ # The VOID* type of string divided into 3 sub-type:
+ # 1: L"String", Unicode type string.
+ # 2: "String", Ascii type string.
+ # 3: {bytearray}, only support byte-array.
+ #
+ # @param ValueString The Integer type string for pack.
+ #
def _PackPtrValue(self, ValueString, Size):
if ValueString.startswith('L"'):
self._PackUnicode(ValueString, Size)
@@ -105,7 +178,11 @@ class PcdEntry: else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
-
+
+ ## Pack an Ascii PCD value.
+ #
+ # An Ascii string for a PCD should be in format as "".
+ #
def _PackString(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
@@ -123,8 +200,12 @@ class PcdEntry: self.PcdValue= pack('%ds' % Size, ValueString)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
- "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
-
+ "Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
+
+ ## Pack a byte-array PCD value.
+ #
+ # A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
+ #
def _PackByteArray(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
@@ -206,9 +287,18 @@ class PcdEntry: ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
-
-class GenVPD :
-
+
+
+
+## The class implementing the BPDG VPD PCD offset fix process
+#
+# The VPD PCD offset fix process includes:
+# 1. Parse the input guided.txt file and store it in the data structure;
+# 2. Format the input file data to remove unused lines;
+# 3. Fixed offset if needed;
+# 4. Generate output file, including guided.map and guided.bin file;
+#
+class GenVPD :
## Constructor of DscBuildData
#
# Initialize object of GenVPD
@@ -310,7 +400,7 @@ class GenVPD : except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
- if PCD._IsBoolean(PCD.PcdValue):
+ if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
PCD._PackBooleanValue(PCD.PcdValue)
self.FileLinesList[count] = PCD
count += 1
diff --git a/BaseTools/Source/Python/BPDG/StringTable.py b/BaseTools/Source/Python/BPDG/StringTable.py index 0db282a..a661da0 100644 --- a/BaseTools/Source/Python/BPDG/StringTable.py +++ b/BaseTools/Source/Python/BPDG/StringTable.py @@ -55,7 +55,7 @@ Intel(r) Binary Product Data Generation Tool (Intel(r) BPDG) Copyright (c) 2010 Intel Corporation All Rights Reserved.
Required Flags:
- -o VPD_FILENAME, --vpd-filename=VPD_FILENAME
+ -o BIN_FILENAME, --vpd-filename=BIN_FILENAME
Specify the file name for the VPD binary file
-m FILENAME, --map-filename=FILENAME
Generate file name for consumption during the build that contains
@@ -67,11 +67,10 @@ Required Flags: MSG_OPTION_HELP = ("Show this help message and exit.")
MSG_OPTION_DEBUG_LEVEL = ("Print DEBUG statements, where DEBUG_LEVEL is 0-9.")
MSG_OPTION_VERBOSE = ("Print informational statements.")
-MSG_OPTION_SILENT = ("Only the exit code will be returned, all informational and error messages will not be displayed.")
MSG_OPTION_QUIET = ("Returns the exit code and will display only error messages.")
MSG_OPTION_VPD_FILENAME = ("Specify the file name for the VPD binary file.")
MSG_OPTION_MAP_FILENAME = ("Generate file name for consumption during the build that contains the mapping of Pcd name, offset, datum size and value derived from the input file and any automatic calculations.")
-MSG_OPTION_FORCE = ("Disable prompting the user for overwriting files as well as for missing input content.")
+MSG_OPTION_FORCE = ("Will force overwriting existing output files rather than returning an error message.")
ERR_INVALID_DEBUG_LEVEL = ("Invalid level for debug message. Only "
"'DEBUG', 'INFO', 'WARNING', 'ERROR', "
diff --git a/BaseTools/Source/Python/Common/BuildToolError.py b/BaseTools/Source/Python/Common/BuildToolError.py index 9986ba2..b5dc371 100644 --- a/BaseTools/Source/Python/Common/BuildToolError.py +++ b/BaseTools/Source/Python/Common/BuildToolError.py @@ -1,7 +1,7 @@ ## @file # Standardized Error Hanlding infrastructures. # -# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR> +# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR> # This program and the accompanying materials # are licensed and made available under the terms and conditions of the BSD License # which accompanies this distribution. The full text of the license may be found at @@ -125,7 +125,7 @@ gErrorMessage = { RESOURCE_FULL : "Full", RESOURCE_OVERFLOW : "Overflow", RESOURCE_UNDERRUN : "Underrun", - RESOURCE_UNKNOWN_ERROR : "Unkown error", + RESOURCE_UNKNOWN_ERROR : "Unknown error", ATTRIBUTE_NOT_AVAILABLE : "Not available", ATTRIBUTE_GET_FAILURE : "Failed to retrieve", diff --git a/BaseTools/Source/Python/Common/DataType.py b/BaseTools/Source/Python/Common/DataType.py index 62a23ea..d9d1774 100644 --- a/BaseTools/Source/Python/Common/DataType.py +++ b/BaseTools/Source/Python/Common/DataType.py @@ -355,7 +355,6 @@ TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress' TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
-TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#
@@ -364,7 +363,6 @@ TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS' TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
-TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
TAB_TAT_DEFINES_TARGET = 'TARGET'
TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
diff --git a/BaseTools/Source/Python/Common/Dictionary.py b/BaseTools/Source/Python/Common/Dictionary.py index e3460e9..de3556b 100644 --- a/BaseTools/Source/Python/Common/Dictionary.py +++ b/BaseTools/Source/Python/Common/Dictionary.py @@ -25,26 +25,26 @@ from DataType import * # @retval 1 Open file failed
#
def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
- try:
- F = open(FileName,'r')
- Keys = []
- for Line in F:
- if Line.startswith(CommentCharacter):
- continue
- LineList = Line.split(KeySplitCharacter,1)
- if len(LineList) >= 2:
- Key = LineList[0].split()
- if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
- if ValueSplitFlag:
- Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
- else:
- Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
- Keys += [Key[0]]
- F.close()
- return 0
- except:
- EdkLogger.info('Open file failed')
- return 1
+ try:
+ F = open(FileName,'r')
+ Keys = []
+ for Line in F:
+ if Line.startswith(CommentCharacter):
+ continue
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
+ if ValueSplitFlag:
+ Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ else:
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Keys += [Key[0]]
+ F.close()
+ return 0
+ except:
+ EdkLogger.info('Open file failed')
+ return 1
## Print the dictionary
#
@@ -53,11 +53,11 @@ def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplit # @param Dict: The dictionary to be printed
#
def printDict(Dict):
- if Dict != None:
- KeyList = Dict.keys()
- for Key in KeyList:
- if Dict[Key] != '':
- print Key + ' = ' + str(Dict[Key])
+ if Dict != None:
+ KeyList = Dict.keys()
+ for Key in KeyList:
+ if Dict[Key] != '':
+ print Key + ' = ' + str(Dict[Key])
## Print the dictionary
#
@@ -67,9 +67,9 @@ def printDict(Dict): # @param key: The key of the item to be printed
#
def printList(Key, List):
- if type(List) == type([]):
- if len(List) > 0:
- if key.find(TAB_SPLIT) != -1:
- print "\n" + Key
- for Item in List:
- print Item
+ if type(List) == type([]):
+ if len(List) > 0:
+ if Key.find(TAB_SPLIT) != -1:
+ print "\n" + Key
+ for Item in List:
+ print Item
diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py index 283e913..195fa5c 100644 --- a/BaseTools/Source/Python/Common/String.py +++ b/BaseTools/Source/Python/Common/String.py @@ -23,6 +23,9 @@ import EdkLogger as EdkLogger from GlobalData import *
from BuildToolError import *
+gHexVerPatt = re.compile('0x[a-f0-9]{4}[a-f0-9]{4}$',re.IGNORECASE)
+gHumanReadableVerPatt = re.compile(r'([1-9][0-9]*|0)\.[0-9]{1,2}$')
+
## GetSplitValueList
#
# Get a value list from a string with multiple values splited with SplitTag
@@ -377,6 +380,34 @@ def GetDefineValue(String, Key, CommentCharacter): String = CleanString(String)
return String[String.find(Key + ' ') + len(Key + ' ') : ]
+## GetHexVerValue
+#
+# Get a Hex Version Value
+#
+# @param VerString: The version string to be parsed
+#
+#
+# @retval: If VerString is incorrectly formatted, return "None" which will break the build.
+# If VerString is correctly formatted, return a Hex value of the Version Number (0xmmmmnnnn)
+# where mmmm is the major number and nnnn is the adjusted minor number.
+#
+def GetHexVerValue(VerString):
+ VerString = CleanString(VerString)
+
+ if gHumanReadableVerPatt.match(VerString):
+ ValueList = VerString.split('.')
+ Major = ValueList[0]
+ Minor = ValueList[1]
+ if len(Minor) == 1:
+ Minor += '0'
+ DeciValue = (int(Major) << 16) + int(Minor);
+ return "0x%08x"%DeciValue
+ elif gHexVerPatt.match(VerString):
+ return VerString
+ else:
+ return None
+
+
## GetSingleValueOfKeyFromLines
#
# Parse multiple strings as below to get value of each definition line
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py index fc5d589..a7dec65 100644 --- a/BaseTools/Source/Python/Common/TargetTxtClassObject.py +++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py @@ -1,7 +1,7 @@ ## @file
# This file is used to define each component of Target.txt file
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -37,7 +37,6 @@ class TargetTxtClassObject(object): DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
- DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD : '',
DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
DataType.TAB_TAT_DEFINES_TARGET : [],
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
@@ -102,12 +101,6 @@ class TargetTxtClassObject(object): elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
self.TargetTxtDictionary[Key] = Value.split()
- elif Key == DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD:
- if Value not in ["Enable", "Disable"]:
- EdkLogger.error("build", FORMAT_INVALID, "Invalid setting of [%s]: %s." % (Key, Value),
- ExtraData="\tSetting must be one of [Enable, Disable]",
- File=FileName)
- self.TargetTxtDictionary[Key] = Value
elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
try:
V = int(Value, 0)
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py index 549f76c..b5cd5ee 100644 --- a/BaseTools/Source/Python/Common/ToolDefClassObject.py +++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py @@ -23,7 +23,7 @@ from BuildToolError import * from TargetTxtClassObject import *
##
-# Static vailabes used for pattern
+# Static variables used for pattern
#
gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py index 0111744..5f92fa5 100644 --- a/BaseTools/Source/Python/Common/VpdInfoFile.py +++ b/BaseTools/Source/Python/Common/VpdInfoFile.py @@ -219,28 +219,23 @@ class VpdInfoFile: # @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
-def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):
+def CallExtenalBPDGTool(ToolPath, VpdFileName):
assert ToolPath != None, "Invalid parameter ToolPath"
- assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
+ assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
- OutputDir = os.path.dirname(VpdFilePath)
- if (VpdFileName == None or VpdFileName == "") :
- FileName = os.path.basename(VpdFilePath)
- BaseName, ext = os.path.splitext(FileName)
- OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
- OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
- else :
- OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)
- OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)
+ OutputDir = os.path.dirname(VpdFileName)
+ FileName = os.path.basename(VpdFileName)
+ BaseName, ext = os.path.splitext(FileName)
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
try:
PopenObject = subprocess.Popen([ToolPath,
'-o', OutputBinFileName,
'-m', OutputMapFileName,
- '-s',
+ '-q',
'-f',
- '-v',
- VpdFilePath],
+ VpdFileName],
stdout=subprocess.PIPE,
stderr= subprocess.PIPE)
except Exception, X:
diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py index 5096010..7a0123b 100644 --- a/BaseTools/Source/Python/CommonDataClass/CommonClass.py +++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py @@ -1,7 +1,7 @@ ## @file
# This file is used to define common items of class object
#
-# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -53,7 +53,7 @@ class CommonClass(object): self.HelpText = HelpText
self.HelpTextList = []
-## CommonClass
+## CommonHeaderClass
#
# This class defined common items used in Module/Platform/Package files
#
@@ -301,7 +301,7 @@ class SkuInfoClass(object): # @retval Rtn Formatted String
#
def __str__(self):
- Rtn = Rtn = 'SkuId = ' + str(self.SkuId) + "," + \
+ Rtn = 'SkuId = ' + str(self.SkuId) + "," + \
'SkuIdName = ' + str(self.SkuIdName) + "," + \
'VariableName = ' + str(self.VariableName) + "," + \
'VariableGuid = ' + str(self.VariableGuid) + "," + \
diff --git a/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py b/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py index b433299..8f12026 100644 --- a/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py +++ b/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py @@ -150,7 +150,6 @@ class DistributionPackageClass(object): # script.
#
if __name__ == '__main__':
- pass
D = DistributionPackageClass()
D.GetDistributionPackage(os.getenv('WORKSPACE'), ['MdePkg/MdePkg.dec', 'TianoModulePkg/TianoModulePkg.dec'], ['MdeModulePkg/Application/HelloWorld/HelloWorld.inf'])
Xml = DistributionPackageXml()
diff --git a/BaseTools/Source/Python/PackagingTool/DependencyRules.py b/BaseTools/Source/Python/PackagingTool/DependencyRules.py index 7956c8a..741736e 100644 --- a/BaseTools/Source/Python/PackagingTool/DependencyRules.py +++ b/BaseTools/Source/Python/PackagingTool/DependencyRules.py @@ -1,7 +1,7 @@ ## @file
# This file is for installed package information database operations
#
-# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -25,7 +25,7 @@ DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3) ## IpiDb
#
-# This class represents the installed package information databse
+# This class represents the installed package information database
# Add/Remove/Get installed distribution package information here.
#
#
@@ -57,7 +57,7 @@ class DependencyRules(object): return False
- ## Check whether a module depex satified by current workspace.
+ ## Check whether a module depex satisfied by current workspace.
#
# @param ModuleObj:
# @param DpObj:
@@ -103,7 +103,7 @@ class DependencyRules(object): EdkLogger.verbose("Check package exists in workspace ... DONE!")
- ## Check whether a package depex satified by current workspace.
+ ## Check whether a package depex satisfied by current workspace.
#
# @param ModuleObj:
# @param DpObj:
@@ -135,7 +135,7 @@ class DependencyRules(object): EdkLogger.verbose("Check DP exists in workspace ... DONE!")
- ## Check whether a DP depex satified by current workspace.
+ ## Check whether a DP depex satisfied by current workspace.
#
# @param ModuleObj:
# @param DpObj:
@@ -158,7 +158,7 @@ class DependencyRules(object): return True
- ## Check whether a DP depex satified by current workspace.
+ ## Check whether a DP depex satisfied by current workspace.
#
# @param ModuleObj:
# @param DpObj:
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py index 68c5571..7993023 100644 --- a/BaseTools/Source/Python/TargetTool/TargetTool.py +++ b/BaseTools/Source/Python/TargetTool/TargetTool.py @@ -33,7 +33,6 @@ class TargetTool(): self.TargetTxtDictionary = {
TAB_TAT_DEFINES_ACTIVE_PLATFORM : None,
TAB_TAT_DEFINES_TOOL_CHAIN_CONF : None,
- TAB_TAT_DEFINES_MULTIPLE_THREAD : None,
TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : None,
TAB_TAT_DEFINES_TARGET : None,
TAB_TAT_DEFINES_TOOL_CHAIN_TAG : None,
@@ -44,7 +43,7 @@ class TargetTool(): def LoadTargetTxtFile(self, filename):
if os.path.exists(filename) and os.path.isfile(filename):
- return self.ConvertTextFileToDict(filename, '#', '=')
+ return self.ConvertTextFileToDict(filename, '#', '=')
else:
raise ParseError('LoadTargetTxtFile() : No Target.txt file exists.')
return 1
@@ -64,7 +63,7 @@ class TargetTool(): Key = LineList[0].strip()
if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary.keys():
if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM or Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF \
- or Key == TAB_TAT_DEFINES_MULTIPLE_THREAD or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
+ or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
or Key == TAB_TAT_DEFINES_ACTIVE_MODULE:
self.TargetTxtDictionary[Key] = LineList[1].replace('\\', '/').strip()
elif Key == TAB_TAT_DEFINES_TARGET or Key == TAB_TAT_DEFINES_TARGET_ARCH \
@@ -149,15 +148,13 @@ def GetConfigureKeyValue(self, Key): else:
EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
"Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
- elif Key == TAB_TAT_DEFINES_MULTIPLE_THREAD and self.Opt.NUM != None:
- if self.Opt.NUM >= 2:
- Line = "%-30s = %s\n" % (Key, 'Enable')
- else:
- Line = "%-30s = %s\n" % (Key, 'Disable')
+
+ elif self.Opt.NUM >= 2:
+ Line = "%-30s = %s\n" % (Key, 'Enable')
+ elif self.Opt.NUM <= 1:
+ Line = "%-30s = %s\n" % (Key, 'Disable')
elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM != None:
Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
- elif Key == TAB_TAT_DEFINES_MULTIPLE_THREAD and self.Opt.ENABLE_MULTI_THREAD != None:
- Line = "%-30s = %s\n" % (Key, self.Opt.ENABLE_MULTI_THREAD)
elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET != None:
Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH != None:
@@ -216,8 +213,6 @@ def MyOptionParser(): help="Specify the build rule configure file, which replaces target.txt's BUILD_RULE_CONF definition. If not specified, the default value Conf/build_rule.txt will be set.")
parser.add_option("-m", "--multithreadnum", action="callback", type="int", dest="NUM", callback=RangeCheckCallback,
help="Specify the multi-thread number which replace target.txt's MAX_CONCURRENT_THREAD_NUMBER. If the value is less than 2, MULTIPLE_THREAD will be disabled. If the value is larger than 1, MULTIPLE_THREAD will be enabled.")
- parser.add_option("-e", "--enablemultithread", action="store", type="choice", choices=['Enable', 'Disable'], dest="ENABLE_MULTI_THREAD",
- help="Specify whether enable multi-thread! If Enable, multi-thread is enabled; If Disable, mutli-thread is disable")
(opt, args)=parser.parse_args()
return (opt, args)
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py index 75c08bb..b3ad167 100644 --- a/BaseTools/Source/Python/Trim/Trim.py +++ b/BaseTools/Source/Python/Trim/Trim.py @@ -40,6 +40,10 @@ gPragmaPattern = re.compile("^\s*#pragma\s+pack", re.MULTILINE) gHexNumberPattern = re.compile("0[xX]([0-9a-fA-F]+)") ## Regular expression for matching "Include ()" in asl file gAslIncludePattern = re.compile("^(\s*)[iI]nclude\s*\(\"?([^\"\(\)]+)\"\)", re.MULTILINE) +## Regular expression for matching C style #include "XXX.asl" in asl file +gAslCIncludePattern = re.compile(r'^(\s*)#include\s*[<"]\s*([-\\/\w.]+)\s*[>"]', re.MULTILINE) +## Regular expression for matching constant with 'ULL' and 'UL', 'LL', 'L' postfix +gLongNumberPattern = re.compile("(0[xX][0-9a-fA-F]+|[0-9]+)U?LL", re.MULTILINE) ## Patterns used to convert EDK conventions to EDK2 ECP conventions gImportCodePatterns = [ [ @@ -118,7 +122,7 @@ gIncludedAslFile = [] # @param Target File to store the trimmed content # @param Convert If True, convert standard HEX format to MASM format # -def TrimPreprocessedFile(Source, Target, Convert): +def TrimPreprocessedFile(Source, Target, ConvertHex, TrimLong): CreateDirectory(os.path.dirname(Target)) try: f = open (Source, 'r') @@ -164,8 +168,10 @@ def TrimPreprocessedFile(Source, Target, Convert): % (LineIndexOfOriginalFile + 1)) # convert HEX number format if indicated - if Convert: + if ConvertHex: Line = gHexNumberPattern.sub(r"0\1h", Line) + if TrimLong: + Line = gLongNumberPattern.sub(r"\1", Line) if LineNumber != None: EdkLogger.verbose("Got line directive: line=%d" % LineNumber) @@ -264,31 +270,43 @@ def TrimPreprocessedVfr(Source, Target): ## Read the content ASL file, including ASL included, recursively # -# @param Source File to be read -# @param Indent Spaces before the Include() statement +# @param Source File to be read +# @param Indent Spaces before the Include() statement +# @param IncludePathList The list of external include file # -def DoInclude(Source, Indent=''): +def DoInclude(Source, Indent='', IncludePathList=[]): NewFileContent = [] - # avoid A "include" B and B "include" A - if Source in gIncludedAslFile: - EdkLogger.warn("Trim", "Circular include", - ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), Source)) - return [] - gIncludedAslFile.append(Source) try: - F = open(Source,'r') + for IncludePath in IncludePathList: + IncludeFile = os.path.join(IncludePath, Source) + if os.path.isfile(IncludeFile): + F = open(IncludeFile, "r") + break + else: + EdkLogger.error("Trim", "Failed to find include file %s" % Source) except: EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) + + # avoid A "include" B and B "include" A + IncludeFile = os.path.abspath(os.path.normpath(IncludeFile)) + if IncludeFile in gIncludedAslFile: + EdkLogger.warn("Trim", "Circular include", + ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), IncludeFile)) + return [] + gIncludedAslFile.append(IncludeFile) + for Line in F: Result = gAslIncludePattern.findall(Line) if len(Result) == 0: - NewFileContent.append("%s%s" % (Indent, Line)) - continue + Result = gAslCIncludePattern.findall(Line) + if len(Result) == 0 or os.path.splitext(Result[0][1])[1].lower() not in [".asl", ".asi"]: + NewFileContent.append("%s%s" % (Indent, Line)) + continue CurrentIndent = Indent + Result[0][0] IncludedFile = Result[0][1] - NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent)) + NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent, IncludePathList)) NewFileContent.append("\n") gIncludedAslFile.pop() @@ -301,19 +319,44 @@ def DoInclude(Source, Indent=''): # # Replace ASL include statement with the content the included file # -# @param Source File to be trimmed -# @param Target File to store the trimmed content +# @param Source File to be trimmed +# @param Target File to store the trimmed content +# @param IncludePathFile The file to log the external include path # -def TrimAslFile(Source, Target): +def TrimAslFile(Source, Target, IncludePathFile): CreateDirectory(os.path.dirname(Target)) - Cwd = os.getcwd() SourceDir = os.path.dirname(Source) if SourceDir == '': SourceDir = '.' - os.chdir(SourceDir) - Lines = DoInclude(Source) - os.chdir(Cwd) + + # + # Add source directory as the first search directory + # + IncludePathList = [SourceDir] + + # + # If additional include path file is specified, append them all + # to the search directory list. + # + if IncludePathFile: + try: + LineNum = 0 + for Line in open(IncludePathFile,'r'): + LineNum += 1 + if Line.startswith("/I") or Line.startswith ("-I"): + IncludePathList.append(Line[2:].strip()) + else: + EdkLogger.warn("Trim", "Invalid include line in include list file.", IncludePathFile, LineNum) + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=IncludePathFile) + + Lines = DoInclude(Source, '', IncludePathList) + + # + # Undef MIN and MAX to avoid collision in ASL source code + # + Lines.insert(0, "#undef MIN\n#undef MAX\n") # save all lines trimmed try: @@ -437,6 +480,10 @@ def Options(): make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true", help="Convert standard hex format (0xabcd) to MASM format (abcdh)"), + make_option("-l", "--trim-long", dest="TrimLong", action="store_true", + help="Remove postfix of long number"), + make_option("-i", "--include-path-file", dest="IncludePathFile", + help="The input file is include path list to search for ASL include file"), make_option("-o", "--output", dest="OutputFile", help="File to store the trimmed content"), make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, @@ -449,7 +496,7 @@ def Options(): ] # use clearer usage to override default usage message - UsageString = "%prog [-s|-r|-a] [-c] [-v|-d <debug_level>|-q] [-o <output_file>] <input_file>" + UsageString = "%prog [-s|-r|-a] [-c] [-v|-d <debug_level>|-q] [-i <include_path_file>] [-o <output_file>] <input_file>" Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) Parser.set_defaults(FileType="Vfr") @@ -495,13 +542,13 @@ def Main(): elif CommandOptions.FileType == "Asl": if CommandOptions.OutputFile == None: CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii' - TrimAslFile(InputFile, CommandOptions.OutputFile) + TrimAslFile(InputFile, CommandOptions.OutputFile, CommandOptions.IncludePathFile) elif CommandOptions.FileType == "R8SourceCode": TrimR8Sources(InputFile, CommandOptions.OutputFile) else : if CommandOptions.OutputFile == None: CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii' - TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex) + TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex, CommandOptions.TrimLong) except FatalError, X: import platform import traceback diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index fb66e41..3c7d7fd 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -474,11 +474,11 @@ class InfParser(MetaFileParser): def _DefineParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) self._ValueList[0:len(TokenList)] = TokenList - self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) if self._ValueList[1] == '': EdkLogger.error('Parser', FORMAT_INVALID, "No value specified", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) - + self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) + ## [nmake] section parser (R8.x style only) def _NmakeParser(self): TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) @@ -655,6 +655,11 @@ class DscParser(MetaFileParser): continue # file private macros elif Line.upper().startswith('DEFINE '): + if self._Enabled < 0: + # Do not parse the macro and add it to self._Macros dictionary if directives + # statement is evaluated to false. + continue + (Name, Value) = self._MacroParser() # Make the defined macro in DSC [Defines] section also # available for FDF file. @@ -676,6 +681,11 @@ class DscParser(MetaFileParser): ) continue elif Line.upper().startswith('EDK_GLOBAL '): + if self._Enabled < 0: + # Do not parse the macro and add it to self._Macros dictionary + # if previous directives statement is evaluated to false. + continue + (Name, Value) = self._MacroParser() for Arch, ModuleType in self._Scope: self._LastItem = self._Store( @@ -802,8 +812,8 @@ class DscParser(MetaFileParser): if not self._SectionName in self._IncludeAllowedSection: EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1, ExtraData="'!include' is not allowed under section [%s]" % self._SectionName) - # the included file must be relative to the parsing file - IncludedFile = os.path.join(self._FileDir, NormPath(self._ValueList[1], self._Macros)) + # the included file must be relative to workspace + IncludedFile = os.path.join(os.environ["WORKSPACE"], NormPath(self._ValueList[1], self._Macros)) Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem) # set the parser status with current status Parser._SectionName = self._SectionName diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py index dad6ecd..9d53fa8 100644 --- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py +++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py @@ -141,7 +141,6 @@ class DscBuildData(PlatformBuildClassObject): self._BuildOptions = None
self._LoadFixAddress = None
self._VpdToolGuid = None
- self._VpdFileName = None
## Get architecture
def _GetArch(self):
@@ -204,9 +203,7 @@ class DscBuildData(PlatformBuildClassObject): uuid.UUID(Record[1])
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
- self._VpdToolGuid = Record[1]
- elif Name == TAB_DSC_DEFINES_VPD_FILENAME:
- self._VpdFileName = Record[1]
+ self._VpdToolGuid = Record[1]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
@@ -350,16 +347,7 @@ class DscBuildData(PlatformBuildClassObject): if self._VpdToolGuid == None:
self._VpdToolGuid = ''
return self._VpdToolGuid
-
- ## Retrieve the VPD file Name, this is optional in DSC file
- def _GetVpdFileName(self):
- if self._VpdFileName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._VpdFileName == None:
- self._VpdFileName = ''
- return self._VpdFileName
-
+
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
if self._SkuIds == None:
@@ -802,8 +790,7 @@ class DscBuildData(PlatformBuildClassObject): BsBaseAddress = property(_GetBsBaseAddress)
RtBaseAddress = property(_GetRtBaseAddress)
LoadFixAddress = property(_GetLoadFixAddress)
- VpdToolGuid = property(_GetVpdToolGuid)
- VpdFileName = property(_GetVpdFileName)
+ VpdToolGuid = property(_GetVpdToolGuid)
SkuIds = property(_GetSkuIds)
Modules = property(_GetModules)
LibraryInstances = property(_GetLibraryInstances)
@@ -1330,18 +1317,16 @@ class InfBuildData(ModuleBuildClassObject): if Name in self:
self[Name] = Record[1]
# some special items in [Defines] section need special treatment
- elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
- if self._Specification == None:
- self._Specification = sdict()
- self._Specification['UEFI_SPECIFICATION_VERSION'] = Record[1]
- elif Name == 'EDK_RELEASE_VERSION':
+ elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
+ if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
+ Name = 'UEFI_SPECIFICATION_VERSION'
if self._Specification == None:
self._Specification = sdict()
- self._Specification[Name] = Record[1]
- elif Name == 'PI_SPECIFICATION_VERSION':
- if self._Specification == None:
- self._Specification = sdict()
- self._Specification[Name] = Record[1]
+ self._Specification[Name] = GetHexVerValue(Record[1])
+ if self._Specification[Name] == None:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
+ "'%s' format is not supported for %s" % (Record[1], Name),
+ File=self.MetaFile, Line=Record[-1])
elif Name == 'LIBRARY_CLASS':
if self._LibraryClass == None:
self._LibraryClass = []
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index af9d3d5..5426595 100644 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -305,7 +305,7 @@ class BuildUnit: ## str() method
#
- # It just returns the string representaion of self.BuildObject
+ # It just returns the string representation of self.BuildObject
#
# @param self The object pointer
#
@@ -943,7 +943,7 @@ class Build(): ## Build a module or platform
#
- # Create autogen code and makfile for a module or platform, and the launch
+ # Create autogen code and makefile for a module or platform, and the launch
# "make" command to build it
#
# @param Target The target of build command
|