diff options
author | vanjeff <vanjeff@6f19259b-4bc3-4df7-8a09-765794883524> | 2009-09-11 03:14:43 +0000 |
---|---|---|
committer | vanjeff <vanjeff@6f19259b-4bc3-4df7-8a09-765794883524> | 2009-09-11 03:14:43 +0000 |
commit | fd171542e0aa89ac12a09d79608173f48019b14b (patch) | |
tree | b4e1320ab3bc1cb59355f9fcbc361f84ae0c4f0a /BaseTools/Source/Python | |
parent | f22911b49e8be58d364f9e21f5af6bd3f0513cf7 (diff) | |
download | edk2-fd171542e0aa89ac12a09d79608173f48019b14b.zip edk2-fd171542e0aa89ac12a09d79608173f48019b14b.tar.gz edk2-fd171542e0aa89ac12a09d79608173f48019b14b.tar.bz2 |
Sync basetools' source and binary files with r1707 of the basetools project.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@9257 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'BaseTools/Source/Python')
21 files changed, 756 insertions, 351 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py index 028c4e3..647e1d0 100644 --- a/BaseTools/Source/Python/AutoGen/AutoGen.py +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -1061,12 +1061,45 @@ class PlatformAutoGen(AutoGen): # def _ExpandBuildOption(self, Options): BuildOptions = {} + FamilyMatch = False + FamilyIsNull = True for Key in Options: Family = Key[0] Target, Tag, Arch, Tool, Attr = Key[1].split("_") # if tool chain family doesn't match, skip it - if Family and Tool in self.ToolDefinition and Family != self.ToolDefinition[Tool]["FAMILY"]: + if Tool in self.ToolDefinition and Family != "": + FamilyIsNull = False + if self.ToolDefinition[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "": + if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]: + continue + elif Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]: + continue + FamilyMatch = True + # expand any wildcard + if Target == "*" or Target == self.BuildTarget: + if Tag == "*" or Tag == self.ToolChain: + if Arch == "*" or Arch == self.Arch: + if Tool not in BuildOptions: + BuildOptions[Tool] = {} + if Attr != "FLAGS" or Attr not in BuildOptions[Tool]: + BuildOptions[Tool][Attr] = Options[Key] + else: + # append options for the same tool + BuildOptions[Tool][Attr] += " " + Options[Key] + # Build Option Family has been checked, which need't to be checked again for family. + if FamilyMatch or FamilyIsNull: + return BuildOptions + + for Key in Options: + Family = Key[0] + Target, Tag, Arch, Tool, Attr = Key[1].split("_") + # if tool chain family doesn't match, skip it + if Tool not in self.ToolDefinition or Family =="": continue + # option has been added before + if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]: + continue + # expand any wildcard if Target == "*" or Target == self.BuildTarget: if Tag == "*" or Tag == self.ToolChain: diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py index f689a86..c5d8991 100644 --- a/BaseTools/Source/Python/AutoGen/GenMake.py +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -1308,8 +1308,14 @@ ${END}\t@cd $(BUILD_DIR)\n if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "": FdfFileList = [PlatformInfo.FdfFile] # macros passed to GenFds + # MacroList.append('"%s=%s"' % ("WORKSPACE", GlobalData.gWorkspace)) + MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource)) + MacroList.append('"%s=%s"' % ("EDK_SOURCE", GlobalData.gEdkSource)) for MacroName in GlobalData.gGlobalDefines: - MacroList.append('"%s=%s"' % (MacroName, GlobalData.gGlobalDefines[MacroName])) + if GlobalData.gGlobalDefines[MacroName] != "": + MacroList.append('"%s=%s"' % (MacroName, GlobalData.gGlobalDefines[MacroName])) + else: + MacroList.append('"%s"' % MacroName) else: FdfFileList = [] diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py index e82ad3a..0f64444 100644 --- a/BaseTools/Source/Python/AutoGen/StrGather.py +++ b/BaseTools/Source/Python/AutoGen/StrGather.py @@ -171,9 +171,15 @@ def CreateHFileContent(BaseName, UniObjectClass): if Name != None:
Line = ''
if Referenced == True:
- Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
+ else:
+ Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
else:
- Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ else:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
Str = WriteLine(Str, Line)
Str = WriteLine(Str, '')
diff --git a/BaseTools/Source/Python/Common/DscClassObject.py b/BaseTools/Source/Python/Common/DscClassObject.py index ddccf65..50b6cc5 100644 --- a/BaseTools/Source/Python/Common/DscClassObject.py +++ b/BaseTools/Source/Python/Common/DscClassObject.py @@ -385,10 +385,11 @@ class Dsc(DscObject): for IncludeFile in IncludeFiles:
if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_LIBRARIES, '', IncludeFile[2])
- for NewItem in open(Filename, 'r').readlines():
- if CleanString(NewItem) == '':
- continue
- MergeArches(Libraries, NewItem, Arch)
+ if os.path.exists(Filename):
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ MergeArches(Libraries, NewItem, Arch)
for Record in RecordSet:
if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
diff --git a/BaseTools/Source/Python/Common/FdfParserLite.py b/BaseTools/Source/Python/Common/FdfParserLite.py index 59006fa..5099ed6 100644 --- a/BaseTools/Source/Python/Common/FdfParserLite.py +++ b/BaseTools/Source/Python/Common/FdfParserLite.py @@ -1599,7 +1599,7 @@ class FdfParser(object): if not self.__GetNextWord():
return True
- if not self.__Token in ("SET", "FV", "FILE", "DATA"):
+ if not self.__Token in ("SET", "FV", "FILE", "DATA", "CAPSULE"):
self.__UndoToken()
RegionObj.PcdOffset = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdOffset] = RegionObj.Offset + long(Fd.BaseAddress, 0)
@@ -1620,10 +1620,14 @@ class FdfParser(object): if not self.__GetNextWord():
return True
- if self.__Token == "FV":
+ elif self.__Token == "FV":
self.__UndoToken()
self.__GetRegionFvType( RegionObj)
+ elif self.__Token == "CAPSULE":
+ self.__UndoToken()
+ self.__GetRegionCapType( RegionObj)
+
elif self.__Token == "FILE":
self.__UndoToken()
self.__GetRegionFileType( RegionObj)
@@ -1664,7 +1668,38 @@ class FdfParser(object): raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
RegionObj.RegionDataList.append(self.__Token)
-
+
+ ## __GetRegionCapType() method
+ #
+ # Get region capsule data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionCapType(self, RegionObj):
+
+ if not self.__IsKeyword("CAPSULE"):
+ raise Warning("expected Keyword 'CAPSULE' at line", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' at line", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected CAPSULE name at line", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "CAPSULE"
+ RegionObj.RegionDataList.append(self.__Token)
+
+ while self.__IsKeyword("CAPSULE"):
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' at line", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected CAPSULE name at line", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
## __GetRegionFileType() method
#
# Get region file data for region
@@ -2713,7 +2748,7 @@ class FdfParser(object): Arch = self.__SkippedChars.rstrip(".")
if Arch.upper() not in ("IA32", "X64", "IPF", "EBC", "ARM", "COMMON"):
- raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+ raise Warning("Unknown Arch '%s'" % Arch, self.FileName, self.CurrentLineNumber)
ModuleType = self.__GetModuleType()
@@ -2763,7 +2798,7 @@ class FdfParser(object): "DXE_SMM_DRIVER", "DXE_RUNTIME_DRIVER", \
"UEFI_DRIVER", "UEFI_APPLICATION", "USER_DEFINED", "DEFAULT", "BASE", \
"SECURITY_CORE", "COMBINED_PEIM_DRIVER", "PIC_PEIM", "RELOCATABLE_PEIM", \
- "PE32_PEIM", "BS_DRIVER", "RT_DRIVER", "SAL_RT_DRIVER", "APPLICATION"):
+ "PE32_PEIM", "BS_DRIVER", "RT_DRIVER", "SAL_RT_DRIVER", "APPLICATION", "ACPITABLE", "SMM_DRIVER", "SMM_CORE"):
raise Warning("Unknown Module type At line ", self.FileName, self.CurrentLineNumber)
return self.__Token
@@ -2803,11 +2838,11 @@ class FdfParser(object): raise Warning("expected FILE At Line ", self.FileName, self.CurrentLineNumber)
if not self.__GetNextWord():
- raise Warning("expected FV type At Line ", self.FileName, self.CurrentLineNumber)
+ raise Warning("expected FFS type At Line ", self.FileName, self.CurrentLineNumber)
Type = self.__Token.strip().upper()
if Type not in ("RAW", "FREEFORM", "SEC", "PEI_CORE", "PEIM",\
- "PEI_DXE_COMBO", "DRIVER", "DXE_CORE", "APPLICATION", "FV_IMAGE"):
+ "PEI_DXE_COMBO", "DRIVER", "DXE_CORE", "APPLICATION", "FV_IMAGE", "SMM_DXE_COMBO", "SMM", "SMM_CORE"):
raise Warning("Unknown FV type At line ", self.FileName, self.CurrentLineNumber)
if not self.__IsToken("="):
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py index 14f6550..2c1041c 100644 --- a/BaseTools/Source/Python/Common/Misc.py +++ b/BaseTools/Source/Python/Common/Misc.py @@ -418,6 +418,14 @@ def RealPath(File, Dir='', OverrideDir=''): return NewFile def RealPath2(File, Dir='', OverrideDir=''): + if OverrideDir: + NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))] + if NewFile: + if OverrideDir[-1] == os.path.sep: + return NewFile[len(OverrideDir):], NewFile[0:len(OverrideDir)] + else: + return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] + NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] if NewFile: if Dir: @@ -428,10 +436,6 @@ def RealPath2(File, Dir='', OverrideDir=''): else: return NewFile, '' - if OverrideDir: - NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))] - if NewFile: - return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] return None, None ## Check if gvien file exists or not diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py index 4767645..ea9d0b3 100644 --- a/BaseTools/Source/Python/Ecc/Ecc.py +++ b/BaseTools/Source/Python/Ecc/Ecc.py @@ -29,6 +29,7 @@ from Common.FdfClassObject import Fdf from Common.String import NormPath
from Common import BuildToolError
import c
+import re, string
from Exception import *
## Ecc
@@ -51,29 +52,29 @@ class Ecc(object): self.IsInit = True
self.ScanSourceCode = True
self.ScanMetaData = True
-
+
# Parse the options and args
self.ParseOption()
# Generate checkpoints list
EccGlobalData.gConfig = Configuration(self.ConfigFile)
-
+
# Generate exception list
EccGlobalData.gException = ExceptionCheck(self.ExceptionFile)
-
+
# Init Ecc database
EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
EccGlobalData.gDb.InitDatabase(self.IsInit)
-
+
# Build ECC database
self.BuildDatabase()
-
+
# Start to check
self.Check()
-
+
# Show report
self.GenReport()
-
+
# Close Database
EccGlobalData.gDb.Close()
@@ -94,7 +95,7 @@ class Ecc(object): # Clean report table
EccGlobalData.gDb.TblReport.Drop()
EccGlobalData.gDb.TblReport.Create()
-
+
# Build database
if self.IsInit:
if self.ScanSourceCode:
@@ -103,9 +104,9 @@ class Ecc(object): if self.ScanMetaData:
EdkLogger.quiet("Building database for source code done!")
self.BuildMetaDataFileDatabase()
-
+
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
-
+
## BuildMetaDataFileDatabase
#
# Build the database for meta data files
@@ -115,10 +116,11 @@ class Ecc(object): Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
#SkipDirs = Read from config file
SkipDirs = EccGlobalData.gConfig.SkipDirList
+ SkipDirString = string.join(SkipDirs, '|')
+ p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % SkipDirString)
for Root, Dirs, Files in os.walk(EccGlobalData.gTarget):
- for Dir in Dirs:
- if Dir.upper() in SkipDirs:
- Dirs.remove(Dir)
+ if p.match(Root.upper()):
+ continue
for Dir in Dirs:
Dirname = os.path.join(Root, Dir)
@@ -152,15 +154,15 @@ class Ecc(object): Filename = os.path.normpath(os.path.join(Root, File))
EdkLogger.quiet("Parsing %s" % Filename)
Op.write("%s\r" % Filename)
- Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
continue
Op.close()
-
+
# Commit to database
EccGlobalData.gDb.Conn.commit()
-
+
EdkLogger.quiet("Building database for meta data files done!")
-
+
##
#
# Check each checkpoint
@@ -170,7 +172,7 @@ class Ecc(object): EccCheck = Check()
EccCheck.Check()
EdkLogger.quiet("Checking done!")
-
+
##
#
# Generate the scan report
@@ -179,7 +181,7 @@ class Ecc(object): EdkLogger.quiet("Generating report ...")
EccGlobalData.gDb.TblReport.ToCSV(self.ReportFile)
EdkLogger.quiet("Generating report done!")
-
+
def GetRealPathCase(self, path):
TmpPath = path.rstrip(os.sep)
PathParts = TmpPath.split(os.sep)
@@ -193,7 +195,7 @@ class Ecc(object): for Dir in Dirs:
if Dir.upper() == PathParts[0].upper():
return Dir
-
+
if PathParts[0].strip().endswith(':'):
PathParts[0] = PathParts[0].upper()
ParentDir = PathParts[0]
@@ -201,7 +203,7 @@ class Ecc(object): if PathParts[0] == '':
RealPath = os.sep
ParentDir = os.sep
-
+
PathParts.remove(PathParts[0]) # need to remove the parent
for Part in PathParts:
Dirs = os.listdir(ParentDir + os.sep)
@@ -212,9 +214,9 @@ class Ecc(object): break
ParentDir += os.sep
ParentDir += Dir
-
+
return RealPath
-
+
## ParseOption
#
# Parse options
@@ -222,10 +224,10 @@ class Ecc(object): def ParseOption(self):
EdkLogger.quiet("Loading ECC configuration ... done")
(Options, Target) = self.EccOptionParser()
-
+
# Check workspace envirnoment
if "WORKSPACE" not in os.environ:
- EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
else:
EccGlobalData.gWorkspace = os.path.normpath(os.getenv("WORKSPACE"))
@@ -234,7 +236,7 @@ class Ecc(object): os.environ["WORKSPACE"] = EccGlobalData.gWorkspace
# Set log level
self.SetLogLevel(Options)
-
+
# Set other options
if Options.ConfigFile != None:
self.ConfigFile = Options.ConfigFile
@@ -258,12 +260,12 @@ class Ecc(object): self.ScanSourceCode = False
if Options.sourcecode != None:
self.ScanMetaData = False
-
+
## SetLogLevel
#
# Set current log level of the tool based on args
#
- # @param Option: The option list including log level setting
+ # @param Option: The option list including log level setting
#
def SetLogLevel(self, Option):
if Option.verbose != None:
@@ -295,19 +297,19 @@ class Ecc(object): Parser.add_option("-m", "--metadata", action="store_true", type=None, help="Only scan meta-data files information if this option is specified.")
Parser.add_option("-s", "--sourcecode", action="store_true", type=None, help="Only scan source code files information if this option is specified.")
Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Ecc database will not be cleaned except report information if this option is specified.")
- Parser.add_option("-l", "--log filename", action="store", dest="LogFile", help="""If specified, the tool should emit the changes that
- were made by the tool after printing the result message.
- If filename, the emit to the file, otherwise emit to
- standard output. If no modifications were made, then do not
+ Parser.add_option("-l", "--log filename", action="store", dest="LogFile", help="""If specified, the tool should emit the changes that
+ were made by the tool after printing the result message.
+ If filename, the emit to the file, otherwise emit to
+ standard output. If no modifications were made, then do not
create a log file, or output a log message.""")
Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
"including library instances selected, final dependency expression, "\
"and warning messages, etc.")
Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
-
+
(Opt, Args)=Parser.parse_args()
-
+
return (Opt, Args)
##
diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py index 7f17fcd..961b88a 100644 --- a/BaseTools/Source/Python/GenFds/Capsule.py +++ b/BaseTools/Source/Python/GenFds/Capsule.py @@ -21,6 +21,7 @@ import os import subprocess
import StringIO
from Common.Misc import SaveFileOnChange
+from GenFds import GenFds
T_CHAR_LF = '\n'
@@ -39,17 +40,26 @@ class Capsule (CapsuleClassObject) : self.BlockSize = None
# For GenFv
self.BlockNum = None
+ self.CapsuleName = None
## Generate capsule
#
# @param self The object pointer
+ # @retval string Generated Capsule file path
#
def GenCapsule(self):
+ if self.UiCapsuleName.upper() + 'cap' in GenFds.ImageBinDict.keys():
+ return GenFds.ImageBinDict[self.UiCapsuleName.upper() + 'cap']
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerate %s Capsule" %self.UiCapsuleName)
CapInfFile = self.GenCapInf()
CapInfFile.writelines("[files]" + T_CHAR_LF)
-
+ CapFileList = []
for CapsuleDataObj in self.CapsuleDataList :
+ CapsuleDataObj.CapsuleName = self.CapsuleName
FileName = CapsuleDataObj.GenCapsuleSubItem()
+ CapsuleDataObj.CapsuleName = None
+ CapFileList.append(FileName)
CapInfFile.writelines("EFI_FILE_NAME = " + \
FileName + \
T_CHAR_LF)
@@ -63,9 +73,14 @@ class Capsule (CapsuleClassObject) : GenFdsGlobalVariable.GenerateFirmwareVolume(
CapOutputFile,
[self.CapInfFileName],
- Capsule=True
+ Capsule=True,
+ FfsList=CapFileList
)
+
+ GenFdsGlobalVariable.VerboseLogger( "\nGenerate %s Capsule Successfully" %self.UiCapsuleName)
GenFdsGlobalVariable.SharpCounter = 0
+ GenFds.ImageBinDict[self.UiCapsuleName.upper() + 'cap'] = CapOutputFile
+ return CapOutputFile
## Generate inf file for capsule
#
diff --git a/BaseTools/Source/Python/GenFds/CapsuleData.py b/BaseTools/Source/Python/GenFds/CapsuleData.py index db29737..0940094 100644 --- a/BaseTools/Source/Python/GenFds/CapsuleData.py +++ b/BaseTools/Source/Python/GenFds/CapsuleData.py @@ -45,6 +45,7 @@ class CapsuleFfs (CapsuleData): #
def __init_(self) :
self.Ffs = None
+ self.FvName = None
## generate FFS capsule data
#
@@ -64,7 +65,9 @@ class CapsuleFv (CapsuleData): # @param self The object pointer
#
def __init__(self) :
+ self.Ffs = None
self.FvName = None
+ self.CapsuleName = None
## generate FV capsule data
#
@@ -76,9 +79,11 @@ class CapsuleFv (CapsuleData): if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
FdBuffer = StringIO.StringIO('')
+ FvObj.CapsuleName = self.CapsuleName
FvFile = FvObj.AddToBuffer(FdBuffer)
+ FvObj.CapsuleName = None
+ FdBuffer.close()
return FvFile
-
else:
FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)
return FvFile
diff --git a/BaseTools/Source/Python/GenFds/Fd.py b/BaseTools/Source/Python/GenFds/Fd.py index 99baa6a..370008c 100644 --- a/BaseTools/Source/Python/GenFds/Fd.py +++ b/BaseTools/Source/Python/GenFds/Fd.py @@ -26,6 +26,7 @@ from CommonDataClass.FdfClass import FDClassObject from Common import EdkLogger
from Common.BuildToolError import *
from Common.Misc import SaveFileOnChange
+from GenFds import GenFds
## generate FD
#
@@ -42,11 +43,12 @@ class FD(FDClassObject): #
# Generate FD
#
- # @param self The object pointer
- # @param FvBinDict dictionary contains generated FV name and its file name
# @retval string Generated FD file name
#
- def GenFd (self, FvBinDict):
+ def GenFd (self):
+ if self.FdUiName.upper() + 'fd' in GenFds.ImageBinDict.keys():
+ return GenFds.ImageBinDict[self.FdUiName.upper() + 'fd']
+
#
# Print Information
#
@@ -80,7 +82,7 @@ class FD(FDClassObject): PadRegion = Region.Region()
PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
PadRegion.Size = RegionObj.Offset - PadRegion.Offset
- PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, FvBinDict, self.vtfRawDict, self.DefineVarDict)
+ PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
PreviousRegionStart = RegionObj.Offset
PreviousRegionSize = RegionObj.Size
#
@@ -89,23 +91,19 @@ class FD(FDClassObject): if PreviousRegionSize > self.Size:
EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s size too small' % self.FdUiName)
GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
- RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, FvBinDict, self.vtfRawDict, self.DefineVarDict)
+ RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, GenFds.ImageBinDict, self.vtfRawDict, self.DefineVarDict)
#
# Create a empty Fd file
#
GenFdsGlobalVariable.VerboseLogger ('Create an empty Fd file')
- FdFileName = os.path.join(GenFdsGlobalVariable.FvDir,
- self.FdUiName + '.fd')
- #FdFile = open(FdFileName, 'wb')
-
+ FdFileName = os.path.join(GenFdsGlobalVariable.FvDir,self.FdUiName + '.fd')
#
# Write the buffer contents to Fd file
#
GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')
SaveFileOnChange(FdFileName, FdBuffer.getvalue())
- #FdFile.write(FdBuffer.getvalue());
- #FdFile.close();
FdBuffer.close();
+ GenFds.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName
return FdFileName
## generate VTF
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py index 0bf8f55..07de926 100644 --- a/BaseTools/Source/Python/GenFds/FdfParser.py +++ b/BaseTools/Source/Python/GenFds/FdfParser.py @@ -173,7 +173,7 @@ class FileProfile : self.FdDict = {}
self.FvDict = {}
- self.CapsuleList = []
+ self.CapsuleDict = {}
self.VtfList = []
self.RuleDict = {}
self.OptRomDict = {}
@@ -1622,7 +1622,7 @@ class FdfParser: if not self.__GetNextWord():
return True
- if not self.__Token in ("SET", "FV", "FILE", "DATA"):
+ if not self.__Token in ("SET", "FV", "FILE", "DATA", "CAPSULE"):
self.__UndoToken()
RegionObj.PcdOffset = self.__GetNextPcdName()
self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))
@@ -1639,10 +1639,14 @@ class FdfParser: if not self.__GetNextWord():
return True
- if self.__Token == "FV":
+ elif self.__Token == "FV":
self.__UndoToken()
self.__GetRegionFvType( RegionObj)
+ elif self.__Token == "CAPSULE":
+ self.__UndoToken()
+ self.__GetRegionCapType( RegionObj)
+
elif self.__Token == "FILE":
self.__UndoToken()
self.__GetRegionFileType( RegionObj)
@@ -1684,6 +1688,37 @@ class FdfParser: RegionObj.RegionDataList.append(self.__Token)
+ ## __GetRegionCapType() method
+ #
+ # Get region capsule data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionCapType(self, RegionObj):
+
+ if not self.__IsKeyword("CAPSULE"):
+ raise Warning("expected Keyword 'CAPSULE'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected CAPSULE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "CAPSULE"
+ RegionObj.RegionDataList.append(self.__Token)
+
+ while self.__IsKeyword("CAPSULE"):
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected CAPSULE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
## __GetRegionFileType() method
#
# Get region file data for region
@@ -2624,7 +2659,7 @@ class FdfParser: CapsuleObj.CreateFile = self.__Token
self.__GetCapsuleStatements(CapsuleObj)
- self.Profile.CapsuleList.append(CapsuleObj)
+ self.Profile.CapsuleDict[CapsuleObj.UiCapsuleName] = CapsuleObj
return True
## __GetCapsuleStatements() method
@@ -2638,10 +2673,9 @@ class FdfParser: self.__GetCapsuleTokens(Obj)
self.__GetDefineStatements(Obj)
self.__GetSetStatements(Obj)
-
self.__GetCapsuleData(Obj)
- ## __GetCapsuleStatements() method
+ ## __GetCapsuleTokens() method
#
# Get token statements for capsule
#
@@ -3558,51 +3592,53 @@ class FdfParser: def __GetOptRomOverrides(self, Obj):
if self.__IsToken('{'):
Overrides = OptionRom.OverrideAttribs()
- if self.__IsKeyword( "PCI_VENDOR_ID"):
- if not self.__IsToken( "="):
- raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
- if not self.__GetNextHexNumber():
- raise Warning("expected Hex vendor id", self.FileName, self.CurrentLineNumber)
- Overrides.PciVendorId = self.__Token
-
- if self.__IsKeyword( "PCI_CLASS_CODE"):
- if not self.__IsToken( "="):
- raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
- if not self.__GetNextHexNumber():
- raise Warning("expected Hex class code", self.FileName, self.CurrentLineNumber)
- Overrides.PciClassCode = self.__Token
-
- if self.__IsKeyword( "PCI_DEVICE_ID"):
- if not self.__IsToken( "="):
- raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
- if not self.__GetNextHexNumber():
- raise Warning("expected Hex device id", self.FileName, self.CurrentLineNumber)
-
- Overrides.PciDeviceId = self.__Token
-
- if self.__IsKeyword( "PCI_REVISION"):
- if not self.__IsToken( "="):
- raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
- if not self.__GetNextHexNumber():
- raise Warning("expected Hex revision", self.FileName, self.CurrentLineNumber)
- Overrides.PciRevision = self.__Token
-
- if self.__IsKeyword( "COMPRESS"):
- if not self.__IsToken( "="):
- raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
- if not self.__GetNextToken():
- raise Warning("expected TRUE/FALSE for compress", self.FileName, self.CurrentLineNumber)
-
- if self.__Token.upper() == 'TRUE':
- Overrides.NeedCompress = True
-
- if not self.__IsToken( "}"):
-
- if self.__Token not in ("PCI_CLASS_CODE", "PCI_VENDOR_ID", "PCI_DEVICE_ID", "PCI_REVISION", "COMPRESS"):
- raise Warning("unknown attribute %s" % self.__Token, self.FileName, self.CurrentLineNumber)
-
- raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
-
+ while True:
+ if self.__IsKeyword( "PCI_VENDOR_ID"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex vendor id", self.FileName, self.CurrentLineNumber)
+ Overrides.PciVendorId = self.__Token
+ continue
+
+ if self.__IsKeyword( "PCI_CLASS_CODE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex class code", self.FileName, self.CurrentLineNumber)
+ Overrides.PciClassCode = self.__Token
+ continue
+
+ if self.__IsKeyword( "PCI_DEVICE_ID"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex device id", self.FileName, self.CurrentLineNumber)
+
+ Overrides.PciDeviceId = self.__Token
+ continue
+
+ if self.__IsKeyword( "PCI_REVISION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex revision", self.FileName, self.CurrentLineNumber)
+ Overrides.PciRevision = self.__Token
+ continue
+
+ if self.__IsKeyword( "COMPRESS"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected TRUE/FALSE for compress", self.FileName, self.CurrentLineNumber)
+ Overrides.NeedCompress = self.__Token.upper() == 'TRUE'
+ continue
+
+ if self.__IsToken( "}"):
+ break
+ else:
+ EdkLogger.error("FdfParser", FORMAT_INVALID, File=self.FileName, Line=self.CurrentLineNumber)
+
Obj.OverrideAttribs = Overrides
## __GetOptRomFileStatement() method
@@ -3635,8 +3671,52 @@ class FdfParser: Obj.FfsList.append(FfsFileObj)
return True
-
-
+
+ ## __GetCapInFd() method
+ #
+ # Get Cap list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval CapList List of Capsule in FD
+ #
+ def __GetCapInFd (self, FdName):
+
+ CapList = []
+ if FdName.upper() in self.Profile.FdDict.keys():
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == 'CAPSULE':
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData.endswith(".cap"):
+ continue
+ if elementRegionData != None and elementRegionData.upper() not in CapList:
+ CapList.append(elementRegionData.upper())
+ return CapList
+
+ ## __GetReferencedFdCapTuple() method
+ #
+ # Get FV and FD list referenced by a capsule image
+ #
+ # @param self The object pointer
+ # @param CapObj Capsule section to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def __GetReferencedFdCapTuple(self, CapObj, RefFdList = [], RefFvList = []):
+
+ for CapsuleDataObj in CapObj.CapsuleDataList :
+ if CapsuleDataObj.FvName != None and CapsuleDataObj.FvName.upper() not in RefFvList:
+ RefFvList.append (CapsuleDataObj.FvName.upper())
+ elif CapsuleDataObj.Ffs != None:
+ if isinstance(CapsuleDataObj.Ffs, FfsFileStatement.FileStatement):
+ if CapsuleDataObj.Ffs.FvName != None and CapsuleDataObj.Ffs.FvName.upper() not in RefFvList:
+ RefFvList.append(CapsuleDataObj.Ffs.FvName.upper())
+ elif CapsuleDataObj.Ffs.FdName != None and CapsuleDataObj.Ffs.FdName.upper() not in RefFdList:
+ RefFdList.append(CapsuleDataObj.Ffs.FdName.upper())
+ else:
+ self.__GetReferencedFdFvTupleFromSection(CapsuleDataObj.Ffs, RefFdList, RefFvList)
+
## __GetFvInFd() method
#
# Get FV list contained in FD
@@ -3653,6 +3733,8 @@ class FdfParser: for elementRegion in FdObj.RegionList:
if elementRegion.RegionType == 'FV':
for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData.endswith(".fv"):
+ continue
if elementRegionData != None and elementRegionData.upper() not in FvList:
FvList.append(elementRegionData.upper())
return FvList
@@ -3711,60 +3793,126 @@ class FdfParser: # @retval False Not exists cycle reference
#
def CycleReferenceCheck(self):
+ #
+ # Check the cycle between FV and FD image
+ #
+ MaxLength = len (self.Profile.FvDict)
+ for FvName in self.Profile.FvDict.keys():
+ LogStr = "\nCycle Reference Checking for FV: %s\n" % FvName
+ RefFvStack = []
+ RefFvStack.append(FvName)
+ FdAnalyzedList = []
+
+ Index = 0
+ while RefFvStack != [] and Index < MaxLength:
+ Index = Index + 1
+ FvNameFromStack = RefFvStack.pop()
+ if FvNameFromStack.upper() in self.Profile.FvDict.keys():
+ FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
+ else:
+ continue
- CycleRefExists = False
+ RefFdList = []
+ RefFvList = []
+ self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
- try:
- for FvName in self.Profile.FvDict.keys():
- LogStr = "Cycle Reference Checking for FV: %s\n" % FvName
- RefFvStack = []
- RefFvStack.append(FvName)
- FdAnalyzedList = []
-
- while RefFvStack != []:
- FvNameFromStack = RefFvStack.pop()
- if FvNameFromStack.upper() in self.Profile.FvDict.keys():
- FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
- else:
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
continue
- RefFdList = []
- RefFvList = []
- self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+ LogStr += "FV %s contains FD %s\n" % (FvNameFromStack, RefFdName)
+ FvInFdList = self.__GetFvInFd(RefFdName)
+ if FvInFdList != []:
+ for FvNameInFd in FvInFdList:
+ LogStr += "FD %s contains FV %s\n" % (RefFdName,FvNameInFd)
+ if FvNameInFd not in RefFvStack:
+ RefFvStack.append(FvNameInFd)
+
+ if FvName in RefFvStack or FvNameFromStack in RefFvStack:
+ EdkLogger.info(LogStr)
+ return True
+ FdAnalyzedList.append(RefFdName)
+ for RefFvName in RefFvList:
+ LogStr += "FV %s contains FV %s\n" % (FvNameFromStack, RefFvName)
+ if RefFvName not in RefFvStack:
+ RefFvStack.append(RefFvName)
+
+ if FvName in RefFvStack or FvNameFromStack in RefFvStack:
+ EdkLogger.info(LogStr)
+ return True
+
+ #
+ # Check the cycle between Capsule and FD image
+ #
+ MaxLength = len (self.Profile.CapsuleDict)
+ for CapName in self.Profile.CapsuleDict.keys():
+ #
+ # Capsule image to be checked.
+ #
+ LogStr = "\n\n\nCycle Reference Checking for Capsule: %s\n" % CapName
+ RefCapStack = []
+ RefCapStack.append(CapName)
+ FdAnalyzedList = []
+ FvAnalyzedList = []
+
+ Index = 0
+ while RefCapStack != [] and Index < MaxLength:
+ Index = Index + 1
+ CapNameFromStack = RefCapStack.pop()
+ if CapNameFromStack.upper() in self.Profile.CapsuleDict.keys():
+ CapObj = self.Profile.CapsuleDict[CapNameFromStack.upper()]
+ else:
+ continue
+
+ RefFvList = []
+ RefFdList = []
+ self.__GetReferencedFdCapTuple(CapObj, RefFdList, RefFvList)
+
+ FvListLength = 0
+ FdListLength = 0
+ while FvListLength < len (RefFvList) or FdListLength < len (RefFdList):
for RefFdName in RefFdList:
if RefFdName in FdAnalyzedList:
continue
- LogStr += "FD %s is referenced by FV %s\n" % (RefFdName, FvNameFromStack)
+ LogStr += "Capsule %s contains FD %s\n" % (CapNameFromStack, RefFdName)
+ CapInFdList = self.__GetCapInFd(RefFdName)
+ if CapInFdList != []:
+ for CapNameInFd in CapInFdList:
+ LogStr += "FD %s contains Capsule %s\n" % (RefFdName,CapNameInFd)
+ if CapNameInFd not in RefCapStack:
+ RefCapStack.append(CapNameInFd)
+
+ if CapName in RefCapStack or CapNameFromStack in RefCapStack:
+ EdkLogger.info(LogStr)
+ return True
+
FvInFdList = self.__GetFvInFd(RefFdName)
if FvInFdList != []:
- LogStr += "FD %s contains FV: " % RefFdName
- for FvObj in FvInFdList:
- LogStr += FvObj
- LogStr += ' \n'
- if FvObj not in RefFvStack:
- RefFvStack.append(FvObj)
-
- if FvName in RefFvStack:
- CycleRefExists = True
- raise Warning(LogStr)
- FdAnalyzedList.append(RefFdName)
+ for FvNameInFd in FvInFdList:
+ LogStr += "FD %s contains FV %s\n" % (RefFdName,FvNameInFd)
+ if FvNameInFd not in RefFvList:
+ RefFvList.append(FvNameInFd)
+ FdAnalyzedList.append(RefFdName)
+ #
+ # the number of the parsed FV and FD image
+ #
+ FvListLength = len (RefFvList)
+ FdListLength = len (RefFdList)
for RefFvName in RefFvList:
- LogStr += "FV %s is referenced by FV %s\n" % (RefFvName, FvNameFromStack)
- if RefFvName not in RefFvStack:
- RefFvStack.append(RefFvName)
-
- if FvName in RefFvStack:
- CycleRefExists = True
- raise Warning(LogStr)
-
- except Warning:
- print LogStr
+ if RefFvName in FvAnalyzedList:
+ continue
+ LogStr += "Capsule %s contains FV %s\n" % (CapNameFromStack, RefFvName)
+ if RefFvName.upper() in self.Profile.FvDict.keys():
+ FvObj = self.Profile.FvDict[RefFvName.upper()]
+ else:
+ continue
+ self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+ FvAnalyzedList.append(RefFvName)
- finally:
- return CycleRefExists
+ return False
if __name__ == "__main__":
parser = FdfParser("..\LakeportX64Pkg.fdf")
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py index ed778f3..e3f2e68 100644 --- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py +++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py @@ -78,8 +78,7 @@ class FileStatement (FileStatementClassObject) : if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
- FvBin = {}
- FileName = Fd.GenFd(FvBin)
+ FileName = Fd.GenFd()
SectionFiles = [FileName]
elif self.FileName != None:
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py index 74248f7..23ec582 100644 --- a/BaseTools/Source/Python/GenFds/Fv.py +++ b/BaseTools/Source/Python/GenFds/Fv.py @@ -44,6 +44,7 @@ class FV (FvClassObject): self.BaseAddress = None
self.InfFileName = None
self.FvAddressFileName = None
+ self.CapsuleName = None
## AddToBuffer()
#
@@ -61,10 +62,27 @@ class FV (FvClassObject): #
def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict = {}) :
- if self.UiFvName.upper() in GenFds.FvBinDict.keys():
- return GenFds.FvBinDict[self.UiFvName.upper()]
-
- GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV ..." %self.UiFvName)
+ if self.UiFvName.upper() + 'fv' in GenFds.ImageBinDict.keys():
+ return GenFds.ImageBinDict[self.UiFvName.upper() + 'fv']
+
+ #
+ # Check whether FV in Capsule is in FD flash region.
+ # If yes, return error. Doesn't support FV in Capsule image is also in FD flash region.
+ #
+ if self.CapsuleName != None:
+ for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): + FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName] + for RegionObj in FdObj.RegionList:
+ if RegionObj.RegionType == 'FV':
+ for RegionData in RegionObj.RegionDataList:
+ if RegionData.endswith(".fv"):
+ continue
+ elif RegionData.upper() + 'fv' in GenFds.ImageBinDict.keys():
+ continue
+ elif self.UiFvName.upper() == RegionData.upper():
+ GenFdsGlobalVariable.ErrorLogger("Capsule %s in FD region can't contain a FV %s in FD region." % (self.CapsuleName, self.UiFvName.upper()))
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV" %self.UiFvName)
self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict)
#
@@ -115,12 +133,12 @@ class FV (FvClassObject): #
FvFileObj = open ( FvOutputFile,'r+b')
- GenFdsGlobalVariable.InfLogger( "\nGenerate %s FV Successfully" %self.UiFvName)
+ GenFdsGlobalVariable.VerboseLogger( "\nGenerate %s FV Successfully" %self.UiFvName)
GenFdsGlobalVariable.SharpCounter = 0
Buffer.write(FvFileObj.read())
FvFileObj.close()
- GenFds.FvBinDict[self.UiFvName.upper()] = FvOutputFile
+ GenFds.ImageBinDict[self.UiFvName.upper() + 'fv'] = FvOutputFile
return FvOutputFile
## __InitializeInf__()
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py index 2bc416f..1df1910 100644 --- a/BaseTools/Source/Python/GenFds/GenFds.py +++ b/BaseTools/Source/Python/GenFds/GenFds.py @@ -20,7 +20,7 @@ import sys import os import linecache import FdfParser -from Common.BuildToolError import * +import Common.BuildToolError as BuildToolError from GenFdsGlobalVariable import GenFdsGlobalVariable from Workspace.WorkspaceDatabase import WorkspaceDatabase from Workspace.BuildClassObject import PcdClassObject @@ -77,10 +77,10 @@ def main(): EdkLogger.SetLevel(EdkLogger.INFO) if (Options.Workspace == None): - EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "WORKSPACE not defined", + EdkLogger.error("GenFds", OPTION_MISSING, "WORKSPACE not defined", ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.") elif not os.path.exists(Options.Workspace): - EdkLogger.error("GenFds", BuildToolError.PARAMETER_INVALID, "WORKSPACE is invalid", + EdkLogger.error("GenFds", PARAMETER_INVALID, "WORKSPACE is invalid", ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.") else: Workspace = os.path.normcase(Options.Workspace) @@ -95,17 +95,17 @@ def main(): FdfFilename = Options.filename FdfFilename = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdfFilename) else: - EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing FDF filename") + EdkLogger.error("GenFds", OPTION_MISSING, "Missing FDF filename") if (Options.BuildTarget): GenFdsGlobalVariable.TargetName = Options.BuildTarget else: - EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing build target") + EdkLogger.error("GenFds", OPTION_MISSING, "Missing build target") if (Options.ToolChain): GenFdsGlobalVariable.ToolChainTag = Options.ToolChain else: - EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing tool chain tag") + EdkLogger.error("GenFds", OPTION_MISSING, "Missing tool chain tag") if FdfFilename[0:2] == '..': FdfFilename = os.path.realpath(FdfFilename) @@ -113,7 +113,7 @@ def main(): FdfFilename = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename) if not os.path.exists(FdfFilename): - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=FdfFilename) + EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=FdfFilename) GenFdsGlobalVariable.FdfFile = FdfFilename GenFdsGlobalVariable.FdfFileTimeStamp = os.path.getmtime(FdfFilename) @@ -128,19 +128,19 @@ def main(): ActivePlatform = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ActivePlatform) if not os.path.exists(ActivePlatform) : - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist!") + EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist!") if ActivePlatform.find(Workspace) == -1: - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist in Workspace!") + EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist in Workspace!") ActivePlatform = ActivePlatform.replace(Workspace, '') if len(ActivePlatform) > 0 : if ActivePlatform[0] == '\\' or ActivePlatform[0] == '/': ActivePlatform = ActivePlatform[1:] else: - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist!") + EdkLogger.error("GenFds", FILE_NOT_FOUND, "ActivePlatform doesn't exist!") else : - EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing active platform") + EdkLogger.error("GenFds", OPTION_MISSING, "Missing active platform") GenFdsGlobalVariable.ActivePlatform = PathClass(NormPath(ActivePlatform), Workspace) @@ -148,26 +148,28 @@ def main(): if os.path.isfile(BuildConfigurationFile) == True: TargetTxtClassObject.TargetTxtClassObject(BuildConfigurationFile) else: - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=BuildConfigurationFile) + EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile) if Options.Macros: for Pair in Options.Macros: Pair.strip('"') List = Pair.split('=') if len(List) == 2: - FdfParser.InputMacroDict[List[0].strip()] = List[1].strip() if List[0].strip() == "EFI_SOURCE": GlobalData.gEfiSource = List[1].strip() + continue elif List[0].strip() == "EDK_SOURCE": GlobalData.gEdkSource = List[1].strip() + continue else: GlobalData.gEdkGlobal[List[0].strip()] = List[1].strip() + FdfParser.InputMacroDict[List[0].strip()] = List[1].strip() else: - FdfParser.InputMacroDict[List[0].strip()] = None + FdfParser.InputMacroDict[List[0].strip()] = "" """call Workspace build create database""" os.environ["WORKSPACE"] = Workspace - BuildWorkSpace = WorkspaceDatabase(':memory:', GlobalData.gGlobalDefines) + BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict) BuildWorkSpace.InitDatabase() # @@ -179,7 +181,7 @@ def main(): if (Options.archList) : ArchList = Options.archList.split(',') else: -# EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing build ARCH") +# EdkLogger.error("GenFds", OPTION_MISSING, "Missing build ARCH") ArchList = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'COMMON'].SupArchList TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'COMMON'].SupArchList) & set(ArchList) @@ -206,7 +208,7 @@ def main(): OutputDir = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, OutputDir) if not os.path.exists(OutputDir): - EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=OutputDir) + EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=OutputDir) GenFdsGlobalVariable.OutputDirDict[Key] = OutputDir """ Parse Fdf file, has to place after build Workspace as FDF may contain macros from DSC file """ @@ -214,20 +216,20 @@ def main(): FdfParserObj.ParseFile() if FdfParserObj.CycleReferenceCheck(): - EdkLogger.error("GenFds", BuildToolError.FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file") + EdkLogger.error("GenFds", FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file") if (Options.uiFdName) : if Options.uiFdName.upper() in FdfParserObj.Profile.FdDict.keys(): GenFds.OnlyGenerateThisFd = Options.uiFdName else: - EdkLogger.error("GenFds", BuildToolError.OPTION_VALUE_INVALID, + EdkLogger.error("GenFds", OPTION_VALUE_INVALID, "No such an FD in FDF file: %s" % Options.uiFdName) if (Options.uiFvName) : if Options.uiFvName.upper() in FdfParserObj.Profile.FvDict.keys(): GenFds.OnlyGenerateThisFv = Options.uiFvName else: - EdkLogger.error("GenFds", BuildToolError.OPTION_VALUE_INVALID, + EdkLogger.error("GenFds", OPTION_VALUE_INVALID, "No such an FV in FDF file: %s" % Options.uiFvName) """Modify images from build output if the feature of loading driver at fixed address is on.""" @@ -240,8 +242,8 @@ def main(): GenFds.DisplayFvSpaceInfo(FdfParserObj) except FdfParser.Warning, X: - EdkLogger.error(X.ToolName, BuildToolError.FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) - ReturnCode = BuildToolError.FORMAT_INVALID + EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) + ReturnCode = FORMAT_INVALID except FatalError, X: if Options.debug != None: import traceback @@ -309,8 +311,8 @@ def myOptionParser(): # class GenFds : FdfParsef = None - # FvName in FDF, FvBinFile name - FvBinDict = {} + # FvName, FdName, CapName in FDF, Image file name + ImageBinDict = {} OnlyGenerateThisFd = None OnlyGenerateThisFv = None @@ -324,17 +326,17 @@ class GenFds : def GenFd (OutputDir, FdfParser, WorkSpace, ArchList): GenFdsGlobalVariable.SetDir ('', FdfParser, WorkSpace, ArchList) - GenFdsGlobalVariable.VerboseLogger(" Gen Fd !") + GenFdsGlobalVariable.VerboseLogger(" Generate all Fd images and their required FV and Capsule images!") if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(GenFds.OnlyGenerateThisFd.upper()) if FdObj != None: - FdObj.GenFd(GenFds.FvBinDict) - elif GenFds.OnlyGenerateThisFv == None: + FdObj.GenFd() + elif GenFds.OnlyGenerateThisFd == None: for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName] - FdObj.GenFd(GenFds.FvBinDict) + FdObj.GenFd() - GenFdsGlobalVariable.VerboseLogger(" Gen FV ! ") + GenFdsGlobalVariable.VerboseLogger("\n Generate other FV images! ") if GenFds.OnlyGenerateThisFv != None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper()) if FvObj != None: @@ -343,7 +345,7 @@ class GenFds : FvObj.AddToBuffer(Buffer, None, GenFds.GetFvBlockSize(FvObj)) Buffer.close() return - elif GenFds.OnlyGenerateThisFd == None: + elif GenFds.OnlyGenerateThisFv == None: for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): Buffer = StringIO.StringIO('') FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName] @@ -352,12 +354,14 @@ class GenFds : Buffer.close() if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None: - GenFdsGlobalVariable.VerboseLogger(" Gen Capsule !") - for CapsuleObj in GenFdsGlobalVariable.FdfParser.Profile.CapsuleList: - CapsuleObj.GenCapsule() - + if GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict != {}: + GenFdsGlobalVariable.VerboseLogger("\n Generate other Capsule images!") + for CapsuleName in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys(): + CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[CapsuleName] + CapsuleObj.GenCapsule() + if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}: - GenFdsGlobalVariable.VerboseLogger(" Gen Option ROM !") + GenFdsGlobalVariable.VerboseLogger("\n Generate all Option ROM!") for DriverName in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.keys(): OptRomObj = GenFdsGlobalVariable.FdfParser.Profile.OptRomDict[DriverName] OptRomObj.AddToBuffer(None) diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py index d556ce7..77c8821 100644 --- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py +++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py @@ -360,7 +360,7 @@ class GenFdsGlobalVariable: try: PopenObject = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr= subprocess.PIPE) except Exception, X: - EdkLogger.error("GenFds", BuildToolError.COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) + EdkLogger.error("GenFds", COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) (out, error) = PopenObject.communicate() while PopenObject.returncode == None : @@ -371,7 +371,7 @@ class GenFdsGlobalVariable: GenFdsGlobalVariable.InfLogger (error) if PopenObject.returncode != 0: print "###", cmd - EdkLogger.error("GenFds", BuildToolError.COMMAND_FAILURE, errorMess) + EdkLogger.error("GenFds", COMMAND_FAILURE, errorMess) def VerboseLogger (msg): EdkLogger.verbose(msg) @@ -380,7 +380,7 @@ class GenFdsGlobalVariable: EdkLogger.info(msg) def ErrorLogger (msg, File = None, Line = None, ExtraData = None): - EdkLogger.error('GenFds', BuildToolError.GENFDS_ERROR, msg, File, Line, ExtraData) + EdkLogger.error('GenFds', GENFDS_ERROR, msg, File, Line, ExtraData) def DebugLogger (Level, msg): EdkLogger.debug(Level, msg) diff --git a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py index b9f0af5..8cd7429 100644 --- a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py +++ b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py @@ -48,7 +48,15 @@ class OptRomInfStatement (FfsInfStatement): if self.OverrideAttribs == None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
-
+
+ if self.OverrideAttribs.NeedCompress == None:
+ self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('COMPRESS')
+ if self.OverrideAttribs.NeedCompress is not None:
+ if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
+ GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for COMPRESS: %s" %self.InfFileName)
+ self.OverrideAttribs.NeedCompress = \
+ self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
+
if self.OverrideAttribs.PciVendorId == None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
diff --git a/BaseTools/Source/Python/GenFds/OptionRom.py b/BaseTools/Source/Python/GenFds/OptionRom.py index e102e65..28e77aa 100644 --- a/BaseTools/Source/Python/GenFds/OptionRom.py +++ b/BaseTools/Source/Python/GenFds/OptionRom.py @@ -135,6 +135,6 @@ class OverrideAttribs: self.PciClassCode = None
self.PciDeviceId = None
self.PciRevision = None
- self.NeedCompress = False
+ self.NeedCompress = None
\ No newline at end of file diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py index ed16c6f..99f1ac3 100644 --- a/BaseTools/Source/Python/GenFds/Region.py +++ b/BaseTools/Source/Python/GenFds/Region.py @@ -20,10 +20,10 @@ from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO
from CommonDataClass.FdfClass import RegionClassObject
import os
+from stat import *
from Common import EdkLogger
from Common.BuildToolError import *
-
## generate Region
#
#
@@ -52,9 +52,9 @@ class Region(RegionClassObject): # @retval string Generated FV file path
#
- def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, FvBinDict, vtfDict = None, MacroDict = {}):
+ def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, ImageBinDict, vtfDict = None, MacroDict = {}):
Size = self.Size
- GenFdsGlobalVariable.InfLogger('Generate Region at Offset 0x%X' % self.Offset)
+ GenFdsGlobalVariable.InfLogger('\nGenerate Region at Offset 0x%X' % self.Offset)
GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" %Size)
GenFdsGlobalVariable.SharpCounter = 0
@@ -62,15 +62,14 @@ class Region(RegionClassObject): #
# Get Fv from FvDict
#
- FvBuffer = StringIO.StringIO('')
RegionBlockSize = self.BlockSizeOfRegion(BlockSizeList)
RegionBlockNum = self.BlockNumOfRegion(RegionBlockSize)
self.FvAddress = int(BaseAddress, 16) + self.Offset
- FvBaseAddress = '0x%X' %self.FvAddress
-
+ FvBaseAddress = '0x%X' %self.FvAddress
+ FvOffset = 0
for RegionData in self.RegionDataList:
-
+ FileName = None
if RegionData.endswith(".fv"):
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s'%RegionData)
@@ -79,83 +78,165 @@ class Region(RegionClassObject): if not os.path.exists(RegionData):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
- BinFile = open (RegionData, 'r+b')
- FvBuffer.write(BinFile.read())
- if FvBuffer.len > Size:
+ FileName = RegionData
+ elif RegionData.upper() + 'fv' in ImageBinDict.keys():
+ GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ FileName = ImageBinDict[RegionData.upper() + 'fv']
+ else:
+ #
+ # Generate FvImage.
+ #
+ FvObj = None
+ if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
+
+ if FvObj != None :
+ GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ #
+ # Call GenFv tool
+ #
+ BlockSize = RegionBlockSize
+ BlockNum = RegionBlockNum
+ if FvObj.BlockSizeList != []:
+ if FvObj.BlockSizeList[0][0] != None:
+ BlockSize = FvObj.BlockSizeList[0][0]
+ if FvObj.BlockSizeList[0][1] != None:
+ BlockNum = FvObj.BlockSizeList[0][1]
+ self.FvAddress = self.FvAddress + FvOffset
+ FvAlignValue = self.GetFvAlignValue(FvObj.FvAlignment)
+ if self.FvAddress % FvAlignValue != 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
+ FvBuffer = StringIO.StringIO('')
+ FvBaseAddress = '0x%X' %self.FvAddress
+ FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
+ if FvBuffer.len > Size:
+ FvBuffer.close()
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size))
+ #
+ # Put the generated image into FD buffer.
+ #
+ Buffer.write(FvBuffer.getvalue())
+ FvBuffer.close()
+ FvOffset = FvOffset + FvBuffer.len
+ Size = Size - FvBuffer.len
+ continue
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData))
+ #
+ # Add the exist Fv image into FD buffer
+ #
+ if FileName != None:
+ FileLength = os.stat(FileName)[ST_SIZE]
+ if FileLength > Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
"Size of FV File (%s) is larger than Region Size 0x%X specified." \
% (RegionData, Size))
- break
-
- if RegionData.upper() in FvBinDict.keys():
- continue
+ BinFile = open (FileName, 'r+b')
+ Buffer.write(BinFile.read())
+ BinFile.close()
+ Size = Size - FileLength
+ #
+ # Pad the left buffer
+ #
+ if Size > 0:
+ if (ErasePolarity == '1') :
+ PadData = 0xFF
+ else :
+ PadData = 0
+ for i in range(0, Size):
+ Buffer.write(pack('B', PadData))
- FvObj = None
- if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
- FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
+ if self.RegionType == 'CAPSULE':
+ #
+ # Get Capsule from Capsule Dict
+ #
+ for RegionData in self.RegionDataList:
+ if RegionData.endswith(".cap"):
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ GenFdsGlobalVariable.InfLogger(' Region CAPSULE Image Name = .cap : %s'%RegionData)
+ if RegionData[1] != ':' :
+ RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
- if FvObj != None :
- GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ FileName = RegionData
+ elif RegionData.upper() + 'cap' in ImageBinDict.keys():
+ GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
+ FileName = ImageBinDict[RegionData.upper() + 'cap']
+ else:
#
- # Call GenFv tool
+ # Generate Capsule image and Put it into FD buffer
#
- BlockSize = RegionBlockSize
- BlockNum = RegionBlockNum
- if FvObj.BlockSizeList != []:
- if FvObj.BlockSizeList[0][0] != None:
- BlockSize = FvObj.BlockSizeList[0][0]
- if FvObj.BlockSizeList[0][1] != None:
- BlockNum = FvObj.BlockSizeList[0][1]
- self.FvAddress = self.FvAddress + FvBuffer.len
- FvAlignValue = self.GetFvAlignValue(FvObj.FvAlignment)
- if self.FvAddress % FvAlignValue != 0:
- EdkLogger.error("GenFds", GENFDS_ERROR,
- "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
- FvBaseAddress = '0x%X' %self.FvAddress
- FileName = FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
-
- if FvBuffer.len > Size:
- EdkLogger.error("GenFds", GENFDS_ERROR,
- "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size))
- else:
- EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData))
+ CapsuleObj = None
+ if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict.keys():
+ CapsuleObj = GenFdsGlobalVariable.FdfParser.Profile.CapsuleDict[RegionData.upper()]
+ if CapsuleObj != None :
+ CapsuleObj.CapsuleName = RegionData.upper()
+ GenFdsGlobalVariable.InfLogger(' Region Name = CAPSULE')
+ #
+ # Call GenFv tool to generate Capsule Image
+ #
+ FileName = CapsuleObj.GenCapsule()
+ CapsuleObj.CapsuleName = None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Capsule (%s) is NOT described in FDF file!" % (RegionData))
- if FvBuffer.len > 0:
- Buffer.write(FvBuffer.getvalue())
- else:
- BinFile = open (FileName, 'rb')
+ #
+ # Add the capsule image into FD buffer
+ #
+ FileLength = os.stat(FileName)[ST_SIZE]
+ if FileLength > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size 0x%X of Capsule File (%s) is larger than Region Size 0x%X specified." \
+ % (FileLength, RegionData, Size))
+ BinFile = open (FileName, 'r+b')
Buffer.write(BinFile.read())
-
- FvBuffer.close()
+ BinFile.close()
+ Size = Size - FileLength
+ #
+ # Pad the left buffer
+ #
+ if Size > 0:
+ if (ErasePolarity == '1') :
+ PadData = 0xFF
+ else :
+ PadData = 0
+ for i in range(0, Size):
+ Buffer.write(pack('B', PadData))
if self.RegionType == 'FILE':
- FvBuffer = StringIO.StringIO('')
for RegionData in self.RegionDataList:
RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
- GenFdsGlobalVariable.InfLogger(' Region File Name = FILE: %s'%RegionData)
if RegionData[1] != ':' :
RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
if not os.path.exists(RegionData):
EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
-
- BinFile = open (RegionData, 'r+b')
- FvBuffer.write(BinFile.read())
- if FvBuffer.len > Size :
+ #
+ # Add the file image into FD buffer
+ #
+ FileLength = os.stat(RegionData)[ST_SIZE]
+ if FileLength > Size:
EdkLogger.error("GenFds", GENFDS_ERROR,
- "Size of File (%s) large than Region Size " % RegionData)
-
+ "Size of File (%s) is larger than Region Size 0x%X specified." \
+ % (RegionData, Size))
+ GenFdsGlobalVariable.InfLogger(' Region File Name = %s'%RegionData)
+ BinFile = open (RegionData, 'r+b')
+ Buffer.write(BinFile.read())
+ BinFile.close()
+ Size = Size - FileLength
#
- # If File contents less than region size, append "0xff" after it
+ # Pad the left buffer
#
- if FvBuffer.len < Size:
- for index in range(0, (Size-FvBuffer.len)):
- if (ErasePolarity == '1'):
- FvBuffer.write(pack('B', int('0xFF', 16)))
- else:
- FvBuffer.write(pack('B', int('0x00', 16)))
- Buffer.write(FvBuffer.getvalue())
- FvBuffer.close()
+ if Size > 0:
+ if (ErasePolarity == '1') :
+ PadData = 0xFF
+ else :
+ PadData = 0
+ for i in range(0, Size):
+ Buffer.write(pack('B', PadData))
if self.RegionType == 'DATA' :
GenFdsGlobalVariable.InfLogger(' Region Name = DATA')
@@ -168,12 +249,16 @@ class Region(RegionClassObject): else:
for item in Data :
Buffer.write(pack('B', int(item, 16)))
- if DataSize < Size:
- if (ErasePolarity == '1'):
+ Size = Size - DataSize
+ #
+ # Pad the left buffer
+ #
+ if Size > 0:
+ if (ErasePolarity == '1') :
PadData = 0xFF
- else:
+ else :
PadData = 0
- for i in range(Size - DataSize):
+ for i in range(0, Size):
Buffer.write(pack('B', PadData))
if self.RegionType == None:
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index 294237d..40eb826 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -16,6 +16,7 @@ # import os import time +import copy import Common.EdkLogger as EdkLogger from CommonDataClass.DataClass import * @@ -55,7 +56,7 @@ class MetaFileParser(object): self._FileType = FileType self.MetaFile = FilePath self._FileDir = os.path.dirname(self.MetaFile) - self._Macros = {} + self._Macros = copy.copy(Macros) # for recursive parsing self._Owner = Owner @@ -87,7 +88,9 @@ class MetaFileParser(object): ## Set parsing complete flag in both class and table def _Done(self): self._Finished = True - self._Table.SetEndFlag() + ## Do not set end flag when processing included files + if self._From == -1: + self._Table.SetEndFlag() ## Return the table containg parsed data # @@ -208,11 +211,14 @@ class MetaFileParser(object): if TokenList[0] == '': EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given", ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) - if len(TokenList) == 1: - self._Macros[TokenList[0]] = '' - else: - # keep the macro definition for later use - self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) + + # Macros defined in the command line override ones defined in the meta-data file + if not TokenList[0] in self._Macros: + if len(TokenList) == 1: + self._Macros[TokenList[0]] = '' + else: + # keep the macro definition for later use + self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) return TokenList[0], self._Macros[TokenList[0]] diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py index df0fa81..348d219 100644 --- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py +++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py @@ -17,6 +17,7 @@ import sqlite3 import os import os.path +import pickle import Common.EdkLogger as EdkLogger import Common.GlobalData as GlobalData @@ -24,6 +25,7 @@ import Common.GlobalData as GlobalData from Common.String import * from Common.DataType import * from Common.Misc import * +from types import * from CommonDataClass.CommonClass import SkuInfoClass @@ -1109,6 +1111,7 @@ class InfBuildData(ModuleBuildClassObject): "BS_DRIVER" : "DXE_DRIVER", "RT_DRIVER" : "DXE_RUNTIME_DRIVER", "SAL_RT_DRIVER" : "DXE_SAL_DRIVER", + "SMM_DRIVER" : "SMM_DRIVER", # "BS_DRIVER" : "DXE_SMM_DRIVER", # "BS_DRIVER" : "UEFI_DRIVER", "APPLICATION" : "UEFI_APPLICATION", @@ -2059,11 +2062,11 @@ class WorkspaceDatabase(object): if DbPath != ':memory:': DbDir = os.path.split(DbPath)[0] if not os.path.exists(DbDir): - os.makedirs(DbDir)
- - # remove db file in case inconsistency between db and file in file system - if self._CheckWhetherDbNeedRenew(RenewDb, DbPath):
- os.remove(DbPath)
+ os.makedirs(DbDir) + + # remove db file in case inconsistency between db and file in file system + if self._CheckWhetherDbNeedRenew(RenewDb, DbPath): + os.remove(DbPath) # create db with optimized parameters self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED') @@ -2084,60 +2087,95 @@ class WorkspaceDatabase(object): # conversion object for build or file format conversion purpose self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self) self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self) -
- ## Check whether workspace database need to be renew.
- # The renew reason maybe:
- # 1) If user force to renew;
- # 2) If user do not force renew, and
- # a) If the time of last modified python source is newer than database file;
- # b) If the time of last modified frozen executable file is newer than database file;
- #
- # @param force User force renew database
- # @param DbPath The absolute path of workspace database file
- #
- # @return Bool value for whether need renew workspace databse
- #
- def _CheckWhetherDbNeedRenew (self, force, DbPath):
- # if database does not exist, we need do nothing
- if not os.path.exists(DbPath): return False
-
- # if user force to renew database, then not check whether database is out of date
- if force: return True
-
- #
- # Check the time of last modified source file or build.exe
- # if is newer than time of database, then database need to be re-created.
- #
- timeOfToolModified = 0
- if hasattr(sys, "frozen"):
- exePath = os.path.abspath(sys.executable)
- timeOfToolModified = os.stat(exePath).st_mtime
- else:
- curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
- rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
- if rootPath == "" or rootPath == None:
- EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
-determine whether database file is out of date!\n")
-
- # walk the root path of source or build's binary to get the time last modified.
-
- for root, dirs, files in os.walk (rootPath):
- for dir in dirs:
- # bypass source control folder
- if dir.lower() in [".svn", "_svn", "cvs"]:
- dirs.remove(dir)
-
- for file in files:
- ext = os.path.splitext(file)[1]
- if ext.lower() == ".py": # only check .py files
- fd = os.stat(os.path.join(root, file))
- if timeOfToolModified < fd.st_mtime:
- timeOfToolModified = fd.st_mtime
- if timeOfToolModified > os.stat(DbPath).st_mtime:
- EdkLogger.verbose("\nWorkspace database is out of data!")
- return True
-
- return False
+ + ## Check whether workspace database need to be renew. + # The renew reason maybe: + # 1) If user force to renew; + # 2) If user do not force renew, and + # a) If the time of last modified python source is newer than database file; + # b) If the time of last modified frozen executable file is newer than database file; + # + # @param force User force renew database + # @param DbPath The absolute path of workspace database file + # + # @return Bool value for whether need renew workspace databse + # + def _CheckWhetherDbNeedRenew (self, force, DbPath): + DbDir = os.path.split(DbPath)[0] + MacroFilePath = os.path.normpath(os.path.join(DbDir, "build.mac")) + MacroMatch = False + if os.path.exists(MacroFilePath) and os.path.isfile(MacroFilePath): + LastMacros = None + try: + f = open(MacroFilePath,'r') + LastMacros = pickle.load(f) + f.close() + except IOError: + pass + except: + f.close() + + if LastMacros != None and type(LastMacros) is DictType: + if LastMacros == self._GlobalMacros: + MacroMatch = True + for Macro in LastMacros.keys(): + if not (Macro in self._GlobalMacros and LastMacros[Macro] == self._GlobalMacros[Macro]): + MacroMatch = False; + break; + + if not MacroMatch: + # save command line macros to file + try: + f = open(MacroFilePath,'w') + pickle.dump(self._GlobalMacros, f, 2) + f.close() + except IOError: + pass + except: + f.close() + + force = True + + # if database does not exist, we need do nothing + if not os.path.exists(DbPath): return False + + # if user force to renew database, then not check whether database is out of date + if force: return True + + # + # Check the time of last modified source file or build.exe + # if is newer than time of database, then database need to be re-created. + # + timeOfToolModified = 0 + if hasattr(sys, "frozen"): + exePath = os.path.abspath(sys.executable) + timeOfToolModified = os.stat(exePath).st_mtime + else: + curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py + rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python + if rootPath == "" or rootPath == None: + EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \ +determine whether database file is out of date!\n") + + # walk the root path of source or build's binary to get the time last modified. + + for root, dirs, files in os.walk (rootPath): + for dir in dirs: + # bypass source control folder + if dir.lower() in [".svn", "_svn", "cvs"]: + dirs.remove(dir) + + for file in files: + ext = os.path.splitext(file)[1] + if ext.lower() == ".py": # only check .py files + fd = os.stat(os.path.join(root, file)) + if timeOfToolModified < fd.st_mtime: + timeOfToolModified = fd.st_mtime + if timeOfToolModified > os.stat(DbPath).st_mtime: + EdkLogger.verbose("\nWorkspace database is out of data!") + return True + + return False ## Initialize build database def InitDatabase(self): diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index c92b442..7d14e15 100644 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -158,13 +158,6 @@ def CheckEnvVariable(): EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", ExtraData="PATH") - # for macro replacement in R9 DSC/DEC/INF file - GlobalData.gGlobalDefines["WORKSPACE"] = "" - - # for macro replacement in R8 INF file - GlobalData.gGlobalDefines["EFI_SOURCE"] = EfiSourceDir - GlobalData.gGlobalDefines["EDK_SOURCE"] = EdkSourceDir - GlobalData.gWorkspace = WorkspaceDir GlobalData.gEfiSource = EfiSourceDir GlobalData.gEdkSource = EdkSourceDir @@ -705,8 +698,8 @@ class Build(): self.TargetTxt = TargetTxtClassObject() self.ToolDef = ToolDefClassObject() - #self.Db = WorkspaceDatabase(None, GlobalData.gGlobalDefines, self.Reparse) - self.Db = WorkspaceDatabase(None, {}, self.Reparse) + self.Db = WorkspaceDatabase(None, GlobalData.gGlobalDefines, self.Reparse) + #self.Db = WorkspaceDatabase(None, {}, self.Reparse) self.BuildDatabase = self.Db.BuildObject self.Platform = None @@ -1258,6 +1251,7 @@ def MyOptionParser(): "including library instances selected, final dependency expression, "\ "and warning messages, etc.") Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") + Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".") (Opt, Args)=Parser.parse_args() return (Opt, Args) @@ -1321,7 +1315,7 @@ def Main(): EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target, ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) - # GlobalData.gGlobalDefines = ParseDefines(Option.Defines) + GlobalData.gGlobalDefines = ParseDefines(Option.Macros) # # Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH # |