diff options
Diffstat (limited to 'BaseTools/Source/Python/AutoGen')
-rw-r--r-- | BaseTools/Source/Python/AutoGen/AutoGen.py | 2367 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/BuildEngine.py | 622 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenC.py | 2011 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenDepex.py | 448 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/GenMake.py | 1399 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/StrGather.py | 665 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/UniClassObject.py | 571 | ||||
-rw-r--r-- | BaseTools/Source/Python/AutoGen/__init__.py | 17 |
8 files changed, 8100 insertions, 0 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py new file mode 100644 index 0000000000..4ecf2eafe7 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -0,0 +1,2367 @@ +## @file
+# Generate AutoGen.h, AutoGen.c and *.depex files
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+## Import Modules
+#
+import os
+import re
+import os.path as path
+import copy
+
+import GenC
+import GenMake
+import GenDepex
+from StringIO import StringIO
+
+from StrGather import *
+from BuildEngine import BuildRule
+
+from Common.BuildToolError import *
+from Common.DataType import *
+from Common.Misc import *
+from Common.String import *
+import Common.GlobalData as GlobalData
+from GenFds.FdfParser import *
+from CommonDataClass.CommonClass import SkuInfoClass
+from Workspace.BuildClassObject import *
+import Common.VpdInfoFile as VpdInfoFile
+
+## Regular expression for splitting Dependency Expression stirng into tokens
+gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
+
+## Mapping Makefile type
+gMakeTypeMap = {"MSFT":"nmake", "GCC":"gmake"}
+
+
+## Build rule configuration file
+gBuildRuleFile = 'Conf/build_rule.txt'
+
+## default file name for AutoGen
+gAutoGenCodeFileName = "AutoGen.c"
+gAutoGenHeaderFileName = "AutoGen.h"
+gAutoGenStringFileName = "%(module_name)sStrDefs.h"
+gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
+gAutoGenDepexFileName = "%(module_name)s.depex"
+
+## Base class for AutoGen
+#
+# This class just implements the cache mechanism of AutoGen objects.
+#
+class AutoGen(object):
+ # database to maintain the objects of xxxAutoGen
+ _CACHE_ = {} # (BuildTarget, ToolChain) : {ARCH : {platform file: AutoGen object}}}
+
+ ## Factory method
+ #
+ # @param Class class object of real AutoGen class
+ # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
+ # @param Workspace Workspace directory or WorkspaceAutoGen object
+ # @param MetaFile The path of meta file
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param Arch Target arch
+ # @param *args The specific class related parameters
+ # @param **kwargs The specific class related dict parameters
+ #
+ def __new__(Class, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ # check if the object has been created
+ Key = (Target, Toolchain)
+ if Key not in Class._CACHE_ or Arch not in Class._CACHE_[Key] \
+ or MetaFile not in Class._CACHE_[Key][Arch]:
+ AutoGenObject = super(AutoGen, Class).__new__(Class)
+ # call real constructor
+ if not AutoGenObject._Init(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
+ return None
+ if Key not in Class._CACHE_:
+ Class._CACHE_[Key] = {}
+ if Arch not in Class._CACHE_[Key]:
+ Class._CACHE_[Key][Arch] = {}
+ Class._CACHE_[Key][Arch][MetaFile] = AutoGenObject
+ else:
+ AutoGenObject = Class._CACHE_[Key][Arch][MetaFile]
+
+ return AutoGenObject
+
+ ## hash() operator
+ #
+ # The file path of platform file will be used to represent hash value of this object
+ #
+ # @retval int Hash value of the file path of platform file
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+ ## str() operator
+ #
+ # The file path of platform file will be used to represent this object
+ #
+ # @retval string String of platform file path
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## "==" operator
+ def __eq__(self, Other):
+ return Other and self.MetaFile == Other
+
+## Workspace AutoGen class
+#
+# This class is used mainly to control the whole platform build for different
+# architecture. This class will generate top level makefile.
+#
+class WorkspaceAutoGen(AutoGen):
+ ## Real constructor of WorkspaceAutoGen
+ #
+ # This method behaves the same as __init__ except that it needs explict invoke
+ # (in super class's __new__ method)
+ #
+ # @param WorkspaceDir Root directory of workspace
+ # @param ActivePlatform Meta-file of active platform
+ # @param Target Build target
+ # @param Toolchain Tool chain name
+ # @param ArchList List of architecture of current build
+ # @param MetaFileDb Database containing meta-files
+ # @param BuildConfig Configuration of build
+ # @param ToolDefinition Tool chain definitions
+ # @param FlashDefinitionFile File of flash definition
+ # @param Fds FD list to be generated
+ # @param Fvs FV list to be generated
+ # @param SkuId SKU id from command line
+ #
+ def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
+ BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=[], Fvs=[], SkuId='', UniFlag=None):
+ self.MetaFile = ActivePlatform.MetaFile
+ self.WorkspaceDir = WorkspaceDir
+ self.Platform = ActivePlatform
+ self.BuildTarget = Target
+ self.ToolChain = Toolchain
+ self.ArchList = ArchList
+ self.SkuId = SkuId
+ self.UniFlag = UniFlag
+
+ self.BuildDatabase = MetaFileDb
+ self.TargetTxt = BuildConfig
+ self.ToolDef = ToolDefinition
+ self.FdfFile = FlashDefinitionFile
+ self.FdTargetList = Fds
+ self.FvTargetList = Fvs
+ self.AutoGenObjectList = []
+
+ # there's many relative directory operations, so ...
+ os.chdir(self.WorkspaceDir)
+
+ # parse FDF file to get PCDs in it, if any
+ if self.FdfFile != None and self.FdfFile != '':
+ #
+ # Make global macros available when parsing FDF file
+ #
+ InputMacroDict.update(self.BuildDatabase.WorkspaceDb._GlobalMacros)
+ Fdf = FdfParser(self.FdfFile.Path)
+ Fdf.ParseFile()
+ PcdSet = Fdf.Profile.PcdDict
+ ModuleList = Fdf.Profile.InfList
+ self.FdfProfile = Fdf.Profile
+ else:
+ PcdSet = {}
+ ModuleList = []
+ self.FdfProfile = None
+
+ # apply SKU and inject PCDs from Flash Definition file
+ for Arch in self.ArchList:
+ Platform = self.BuildDatabase[self.MetaFile, Arch]
+ Platform.SkuName = self.SkuId
+ for Name, Guid in PcdSet:
+ Platform.AddPcd(Name, Guid, PcdSet[Name, Guid])
+
+ Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)
+ #
+ # Explicitly collect platform's dynamic PCDs
+ #
+ Pa.CollectPlatformDynamicPcds()
+ self.AutoGenObjectList.append(Pa)
+
+ #
+ # Check PCDs token value conflict in each DEC file.
+ #
+ self._CheckAllPcdsTokenValueConflict()
+
+ self._BuildDir = None
+ self._FvDir = None
+ self._MakeFileDir = None
+ self._BuildCommand = None
+
+ return True
+
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
+
+ ## Return the directory to store FV files
+ def _GetFvDir(self):
+ if self._FvDir == None:
+ self._FvDir = path.join(self.BuildDir, 'FV')
+ return self._FvDir
+
+ ## Return the directory to store all intermediate and final files built
+ def _GetBuildDir(self):
+ return self.AutoGenObjectList[0].BuildDir
+
+ ## Return the build output directory platform specifies
+ def _GetOutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return platform name
+ def _GetName(self):
+ return self.Platform.PlatformName
+
+ ## Return meta-file GUID
+ def _GetGuid(self):
+ return self.Platform.Guid
+
+ ## Return platform version
+ def _GetVersion(self):
+ return self.Platform.Version
+
+ ## Return paths of tools
+ def _GetToolDefinition(self):
+ return self.AutoGenObjectList[0].ToolDefinition
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ def _GetMakeFileDir(self):
+ if self._MakeFileDir == None:
+ self._MakeFileDir = self.BuildDir
+ return self._MakeFileDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ def _GetBuildCommand(self):
+ if self._BuildCommand == None:
+ # BuildCommand should be all the same. So just get one from platform AutoGen
+ self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
+ return self._BuildCommand
+
+ ## Check the PCDs token value conflict in each DEC file.
+ #
+ # Will cause build break and raise error message while two PCDs conflict.
+ #
+ # @return None
+ #
+ def _CheckAllPcdsTokenValueConflict(self):
+ if len(self.BuildDatabase.WorkspaceDb.PackageList) >= 1:
+ for Package in self.BuildDatabase.WorkspaceDb.PackageList:
+ PcdList = Package.Pcds.values()
+ PcdList.sort(lambda x, y: cmp(x.TokenValue, y.TokenValue))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Make sure in the same token space the TokenValue should be unique
+ #
+ if (Item.TokenValue == ItemNext.TokenValue):
+ SameTokenValuePcdList = []
+ SameTokenValuePcdList.append(Item)
+ SameTokenValuePcdList.append(ItemNext)
+ RemainPcdListLength = len(PcdList) - Count - 2
+ for ValueSameCount in range(RemainPcdListLength):
+ if PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue == Item.TokenValue:
+ SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
+ else:
+ break;
+ #
+ # Sort same token value PCD list with TokenGuid and TokenCName
+ #
+ SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
+ SameTokenValuePcdListCount = 0
+ while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
+ TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
+ TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
+
+ if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
+ % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
+ ExtraData=None
+ )
+ SameTokenValuePcdListCount += 1
+ Count += SameTokenValuePcdListCount
+ Count += 1
+
+ PcdList = Package.Pcds.values()
+ PcdList.sort(lambda x, y: cmp("%s.%s"%(x.TokenSpaceGuidCName, x.TokenCName), "%s.%s"%(y.TokenSpaceGuidCName, y.TokenCName)))
+ Count = 0
+ while (Count < len(PcdList) - 1) :
+ Item = PcdList[Count]
+ ItemNext = PcdList[Count + 1]
+ #
+ # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
+ #
+ if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (Item.TokenValue != ItemNext.TokenValue):
+ EdkLogger.error(
+ 'build',
+ FORMAT_INVALID,
+ "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
+ % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
+ ExtraData=None
+ )
+ Count += 1
+
+
+ ## Create makefile for the platform and modules in it
+ #
+ # @param CreateDepsMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateDepsMakeFile=False):
+ # create makefile for platform
+ Makefile = GenMake.TopLevelMakefile(self)
+ if Makefile.Generate():
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for platform [%s] %s\n" %
+ (self.MetaFile, self.ArchList))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for platform [%s] %s\n" %
+ (self.MetaFile, self.ArchList))
+
+ if CreateDepsMakeFile:
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateMakeFile(CreateDepsMakeFile)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateDepsCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ def CreateCodeFile(self, CreateDepsCodeFile=False):
+ if not CreateDepsCodeFile:
+ return
+ for Pa in self.AutoGenObjectList:
+ Pa.CreateCodeFile(CreateDepsCodeFile)
+
+ Name = property(_GetName)
+ Guid = property(_GetGuid)
+ Version = property(_GetVersion)
+ OutputDir = property(_GetOutputDir)
+
+ ToolDefinition = property(_GetToolDefinition) # toolcode : tool path
+
+ BuildDir = property(_GetBuildDir)
+ FvDir = property(_GetFvDir)
+ MakeFileDir = property(_GetMakeFileDir)
+ BuildCommand = property(_GetBuildCommand)
+
+## AutoGen class for platform
+#
+# PlatformAutoGen class will process the original information in platform
+# file in order to generate makefile for platform.
+#
+class PlatformAutoGen(AutoGen):
+ #
+ # Used to store all PCDs for both PEI and DXE phase, in order to generate
+ # correct PCD database
+ #
+ _DynaPcdList_ = []
+ _NonDynaPcdList_ = []
+
+ #
+ # The priority list while override build option
+ #
+ PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
+ "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
+ "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
+ "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
+ "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
+ "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
+ "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
+ "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
+ "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
+ "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
+
+ ## The real constructor of PlatformAutoGen
+ #
+ # This method is not supposed to be called by users of PlatformAutoGen. It's
+ # only used by factory method __new__() to do real initialization work for an
+ # object of PlatformAutoGen
+ #
+ # @param Workspace WorkspaceAutoGen object
+ # @param PlatformFile Platform file (DSC file)
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch arch of the platform supports
+ #
+ def _Init(self, Workspace, PlatformFile, Target, Toolchain, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
+
+ self.MetaFile = PlatformFile
+ self.Workspace = Workspace
+ self.WorkspaceDir = Workspace.WorkspaceDir
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.SourceDir = PlatformFile.SubDir
+ self.SourceOverrideDir = None
+ self.FdTargetList = self.Workspace.FdTargetList
+ self.FvTargetList = self.Workspace.FvTargetList
+ self.AllPcdList = []
+
+ # flag indicating if the makefile/C-code file has been created or not
+ self.IsMakeFileCreated = False
+ self.IsCodeFileCreated = False
+
+ self._Platform = None
+ self._Name = None
+ self._Guid = None
+ self._Version = None
+
+ self._BuildRule = None
+ self._SourceDir = None
+ self._BuildDir = None
+ self._OutputDir = None
+ self._FvDir = None
+ self._MakeFileDir = None
+ self._FdfFile = None
+
+ self._PcdTokenNumber = None # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
+ self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+ self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+
+ self._ToolDefinitions = None
+ self._ToolDefFile = None # toolcode : tool path
+ self._ToolChainFamily = None
+ self._BuildRuleFamily = None
+ self._BuildOption = None # toolcode : option
+ self._EdkBuildOption = None # edktoolcode : option
+ self._EdkIIBuildOption = None # edkiitoolcode : option
+ self._PackageList = None
+ self._ModuleAutoGenList = None
+ self._LibraryAutoGenList = None
+ self._BuildCommand = None
+
+ # get the original module/package/platform objects
+ self.BuildDatabase = Workspace.BuildDatabase
+ return True
+
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ ## Create autogen code for platform and modules
+ #
+ # Since there's no autogen code for platform, this method will do nothing
+ # if CreateModuleCodeFile is set to False.
+ #
+ # @param CreateModuleCodeFile Flag indicating if creating module's
+ # autogen code file or not
+ #
+ def CreateCodeFile(self, CreateModuleCodeFile=False):
+ # only module has code to be greated, so do nothing if CreateModuleCodeFile is False
+ if self.IsCodeFileCreated or not CreateModuleCodeFile:
+ return
+
+ for Ma in self.ModuleAutoGenList:
+ Ma.CreateCodeFile(True)
+
+ # don't do this twice
+ self.IsCodeFileCreated = True
+
+ ## Create makefile for the platform and mdoules in it
+ #
+ # @param CreateModuleMakeFile Flag indicating if the makefile for
+ # modules will be created as well
+ #
+ def CreateMakeFile(self, CreateModuleMakeFile=False):
+ if CreateModuleMakeFile:
+ for ModuleFile in self.Platform.Modules:
+ Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget,
+ self.ToolChain, self.Arch, self.MetaFile)
+ Ma.CreateMakeFile(True)
+
+ # no need to create makefile for the platform more than once
+ if self.IsMakeFileCreated:
+ return
+
+ # create makefile for platform
+ Makefile = GenMake.PlatformMakefile(self)
+ if Makefile.Generate():
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for platform [%s] [%s]\n" %
+ (self.MetaFile, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for platform [%s] [%s]\n" %
+ (self.MetaFile, self.Arch))
+ self.IsMakeFileCreated = True
+
+ ## Collect dynamic PCDs
+ #
+ # Gather dynamic PCDs list from each module and their settings from platform
+ # This interface should be invoked explicitly when platform action is created.
+ #
+ def CollectPlatformDynamicPcds(self):
+ # for gathering error information
+ NoDatumTypePcdList = set()
+
+ self._GuidValue = {}
+ for F in self.Platform.Modules.keys():
+ M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile)
+ #GuidValue.update(M.Guids)
+
+ self.Platform.Modules[F].M = M
+
+ for PcdFromModule in M.ModulePcdList+M.LibraryPcdList:
+ # make sure that the "VOID*" kind of datum has MaxDatumSize set
+ if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize == None:
+ NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
+
+ if PcdFromModule.Type in GenC.gDynamicPcd or PcdFromModule.Type in GenC.gDynamicExPcd:
+ #
+ # If a dynamic PCD used by a PEM module/PEI module & DXE module,
+ # it should be stored in Pcd PEI database, If a dynamic only
+ # used by DXE module, it should be stored in DXE PCD database.
+ # The default Phase is DXE
+ #
+ if M.ModuleType in ["PEIM", "PEI_CORE"]:
+ PcdFromModule.Phase = "PEI"
+ if PcdFromModule not in self._DynaPcdList_:
+ self._DynaPcdList_.append(PcdFromModule)
+ elif PcdFromModule.Phase == 'PEI':
+ # overwrite any the same PCD existing, if Phase is PEI
+ Index = self._DynaPcdList_.index(PcdFromModule)
+ self._DynaPcdList_[Index] = PcdFromModule
+ elif PcdFromModule not in self._NonDynaPcdList_:
+ self._NonDynaPcdList_.append(PcdFromModule)
+
+ # print out error information and break the build, if error found
+ if len(NoDatumTypePcdList) > 0:
+ NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
+ % NoDatumTypePcdListString)
+ self._NonDynamicPcdList = self._NonDynaPcdList_
+ self._DynamicPcdList = self._DynaPcdList_
+ self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList
+
+ #
+ # Sort dynamic PCD list to:
+ # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
+ # try to be put header of dynamicd List
+ # 2) If PCD is HII type, the PCD item should be put after unicode type PCD
+ #
+ # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
+ #
+ UnicodePcdArray = []
+ HiiPcdArray = []
+ OtherPcdArray = []
+ VpdPcdDict = {}
+ VpdFile = VpdInfoFile.VpdInfoFile()
+ NeedProcessVpdMapFile = False
+
+ if (self.Workspace.ArchList[-1] == self.Arch):
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ PcdValue = Sku.DefaultValue
+ if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ UnicodePcdArray.append(Pcd)
+ elif len(Sku.VariableName) > 0:
+ # if found HII type PCD then insert to right of UnicodeIndex
+ HiiPcdArray.append(Pcd)
+ else:
+ OtherPcdArray.append(Pcd)
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
+
+ PlatformPcds = self.Platform.Pcds.keys()
+ PlatformPcds.sort()
+ #
+ # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
+ #
+ for PcdKey in PlatformPcds:
+ Pcd = self.Platform.Pcds[PcdKey]
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ Pcd = VpdPcdDict[PcdKey]
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+ #
+ # Fix the optional data of VPD PCD.
+ #
+ if (Pcd.DatumType.strip() != "VOID*"):
+ if Sku.DefaultValue == '':
+ Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
+ Pcd.MaxDatumSize = None
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
+
+ VpdFile.Add(Pcd, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+ if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
+ EdkLogger.error("Build", FILE_NOT_FOUND, \
+ "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+
+ #
+ # Fix the PCDs define in VPD PCD section that never referenced by module.
+ # An example is PCD for signature usage.
+ #
+ for DscPcd in PlatformPcds:
+ DscPcdEntry = self.Platform.Pcds[DscPcd]
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ FoundFlag = False
+ for VpdPcd in VpdFile._VpdArray.keys():
+ # This PCD has been referenced by module
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
+ FoundFlag = True
+
+ # Not found, it should be signature
+ if not FoundFlag :
+ # just pick the a value to determine whether is unicode string type
+ Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ # Print warning message to let the developer make a determine.
+ EdkLogger.warn("build", "Unreferenced vpd pcd used!",
+ File=self.MetaFile, \
+ ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
+ %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
+
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ # Only fix the value while no value provided in DSC file.
+ if (Sku.DefaultValue == "" or Sku.DefaultValue==None):
+ DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
+
+
+ VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+
+
+ if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
+ VpdFile.GetCount() != 0:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
+
+ if VpdFile.GetCount() != 0:
+ WorkspaceDb = self.BuildDatabase.WorkspaceDb
+ DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
+ FvPath = os.path.join(self.BuildDir, "FV")
+ if not os.path.exists(FvPath):
+ try:
+ os.makedirs(FvPath)
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+
+
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+
+
+ if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
+ VpdFile.Write(VpdFilePath)
+
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
+ BPDGToolName = None
+ for ToolDef in self.ToolDefinition.values():
+ if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
+ if not ToolDef.has_key("PATH"):
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
+ BPDGToolName = ToolDef["PATH"]
+ break
+ # Call third party GUID BPDG tool.
+ if BPDGToolName != None:
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
+ else:
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ # Process VPD map file generated by third party BPDG tool
+ if NeedProcessVpdMapFile:
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ if os.path.exists(VpdMapFilePath):
+ VpdFile.Read(VpdMapFilePath)
+
+ # Fixup "*" offset
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ if Sku.VpdOffset == "*":
+ Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
+ else:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ # Delete the DynamicPcdList At the last time enter into this function
+ del self._DynamicPcdList[:]
+ self._DynamicPcdList.extend(UnicodePcdArray)
+ self._DynamicPcdList.extend(HiiPcdArray)
+ self._DynamicPcdList.extend(OtherPcdArray)
+
+
+ ## Return the platform build data object
+ def _GetPlatform(self):
+ if self._Platform == None:
+ self._Platform = self.BuildDatabase[self.MetaFile, self.Arch]
+ return self._Platform
+
+ ## Return platform name
+ def _GetName(self):
+ return self.Platform.PlatformName
+
+ ## Return the meta file GUID
+ def _GetGuid(self):
+ return self.Platform.Guid
+
+ ## Return the platform version
+ def _GetVersion(self):
+ return self.Platform.Version
+
+ ## Return the FDF file name
+ def _GetFdfFile(self):
+ if self._FdfFile == None:
+ if self.Workspace.FdfFile != "":
+ self._FdfFile= path.join(self.WorkspaceDir, self.Workspace.FdfFile)
+ else:
+ self._FdfFile = ''
+ return self._FdfFile
+
+ ## Return the build output directory platform specifies
+ def _GetOutputDir(self):
+ return self.Platform.OutputDirectory
+
+ ## Return the directory to store all intermediate and final files built
+ def _GetBuildDir(self):
+ if self._BuildDir == None:
+ if os.path.isabs(self.OutputDir):
+ self._BuildDir = path.join(
+ path.abspath(self.OutputDir),
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ else:
+ self._BuildDir = path.join(
+ self.WorkspaceDir,
+ self.OutputDir,
+ self.BuildTarget + "_" + self.ToolChain,
+ )
+ return self._BuildDir
+
+ ## Return directory of platform makefile
+ #
+ # @retval string Makefile directory
+ #
+ def _GetMakeFileDir(self):
+ if self._MakeFileDir == None:
+ self._MakeFileDir = path.join(self.BuildDir, self.Arch)
+ return self._MakeFileDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ def _GetBuildCommand(self):
+ if self._BuildCommand == None:
+ self._BuildCommand = []
+ if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
+ self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])
+ if "FLAGS" in self.ToolDefinition["MAKE"]:
+ NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
+ if NewOption != '':
+ self._BuildCommand += SplitOption(NewOption)
+ return self._BuildCommand
+
+ ## Get tool chain definition
+ #
+ # Get each tool defition for given tool chain from tools_def.txt and platform
+ #
+ def _GetToolDefinition(self):
+ if self._ToolDefinitions == None:
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
+ if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
+ ExtraData="[%s]" % self.MetaFile)
+ self._ToolDefinitions = {}
+ DllPathList = set()
+ for Def in ToolDefinition:
+ Target, Tag, Arch, Tool, Attr = Def.split("_")
+ if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
+ continue
+
+ Value = ToolDefinition[Def]
+ # don't record the DLL
+ if Attr == "DLL":
+ DllPathList.add(Value)
+ continue
+
+ if Tool not in self._ToolDefinitions:
+ self._ToolDefinitions[Tool] = {}
+ self._ToolDefinitions[Tool][Attr] = Value
+
+ ToolsDef = ''
+ MakePath = ''
+ if GlobalData.gOptions.SilentMode and "MAKE" in self._ToolDefinitions:
+ if "FLAGS" not in self._ToolDefinitions["MAKE"]:
+ self._ToolDefinitions["MAKE"]["FLAGS"] = ""
+ self._ToolDefinitions["MAKE"]["FLAGS"] += " -s"
+ MakeFlags = ''
+ for Tool in self._ToolDefinitions:
+ for Attr in self._ToolDefinitions[Tool]:
+ Value = self._ToolDefinitions[Tool][Attr]
+ if Tool in self.BuildOption and Attr in self.BuildOption[Tool]:
+ # check if override is indicated
+ if self.BuildOption[Tool][Attr].startswith('='):
+ Value = self.BuildOption[Tool][Attr][1:]
+ else:
+ Value += " " + self.BuildOption[Tool][Attr]
+
+ if Attr == "PATH":
+ # Don't put MAKE definition in the file
+ if Tool == "MAKE":
+ MakePath = Value
+ else:
+ ToolsDef += "%s = %s\n" % (Tool, Value)
+ elif Attr != "DLL":
+ # Don't put MAKE definition in the file
+ if Tool == "MAKE":
+ if Attr == "FLAGS":
+ MakeFlags = Value
+ else:
+ ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
+ ToolsDef += "\n"
+
+ SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)
+ for DllPath in DllPathList:
+ os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
+ os.environ["MAKE_FLAGS"] = MakeFlags
+
+ return self._ToolDefinitions
+
+ ## Return the paths of tools
+ def _GetToolDefFile(self):
+ if self._ToolDefFile == None:
+ self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
+ return self._ToolDefFile
+
+ ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
+ def _GetToolChainFamily(self):
+ if self._ToolChainFamily == None:
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ self._ToolChainFamily = "MSFT"
+ else:
+ self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
+ return self._ToolChainFamily
+
+ def _GetBuildRuleFamily(self):
+ if self._BuildRuleFamily == None:
+ ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
+ or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
+ or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
+ EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
+ % self.ToolChain)
+ self._BuildRuleFamily = "MSFT"
+ else:
+ self._BuildRuleFamily = ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
+ return self._BuildRuleFamily
+
+ ## Return the build options specific for all modules in this platform
+ def _GetBuildOptions(self):
+ if self._BuildOption == None:
+ self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
+ return self._BuildOption
+
+ ## Return the build options specific for EDK modules in this platform
+ def _GetEdkBuildOptions(self):
+ if self._EdkBuildOption == None:
+ self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
+ return self._EdkBuildOption
+
+ ## Return the build options specific for EDKII modules in this platform
+ def _GetEdkIIBuildOptions(self):
+ if self._EdkIIBuildOption == None:
+ self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
+ return self._EdkIIBuildOption
+
+ ## Parse build_rule.txt in $(WORKSPACE)/Conf/build_rule.txt
+ #
+ # @retval BuildRule object
+ #
+ def _GetBuildRule(self):
+ if self._BuildRule == None:
+ BuildRuleFile = None
+ if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
+ BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
+ if BuildRuleFile in [None, '']:
+ BuildRuleFile = gBuildRuleFile
+ self._BuildRule = BuildRule(BuildRuleFile)
+ return self._BuildRule
+
+ ## Summarize the packages used by modules in this platform
+ def _GetPackageList(self):
+ if self._PackageList == None:
+ self._PackageList = set()
+ for La in self.LibraryAutoGenList:
+ self._PackageList.update(La.DependentPackageList)
+ for Ma in self.ModuleAutoGenList:
+ self._PackageList.update(Ma.DependentPackageList)
+ self._PackageList = list(self._PackageList)
+ return self._PackageList
+
+ ## Get list of non-dynamic PCDs
+ def _GetNonDynamicPcdList(self):
+ if self._NonDynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
+ return self._NonDynamicPcdList
+
+ ## Get list of dynamic PCDs
+ def _GetDynamicPcdList(self):
+ if self._DynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
+ return self._DynamicPcdList
+
+ ## Generate Token Number for all PCD
+ def _GetPcdTokenNumbers(self):
+ if self._PcdTokenNumber == None:
+ self._PcdTokenNumber = sdict()
+ TokenNumber = 1
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "PEI":
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.DynamicPcdList:
+ if Pcd.Phase == "DXE":
+ EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
+ self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+
+ for Pcd in self.NonDynamicPcdList:
+ self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
+ TokenNumber += 1
+ return self._PcdTokenNumber
+
+ ## Summarize ModuleAutoGen objects of all modules/libraries to be built for this platform
+ def _GetAutoGenObjectList(self):
+ self._ModuleAutoGenList = []
+ self._LibraryAutoGenList = []
+ for ModuleFile in self.Platform.Modules:
+ Ma = ModuleAutoGen(
+ self.Workspace,
+ ModuleFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.MetaFile
+ )
+ if Ma not in self._ModuleAutoGenList:
+ self._ModuleAutoGenList.append(Ma)
+ for La in Ma.LibraryAutoGenList:
+ if La not in self._LibraryAutoGenList:
+ self._LibraryAutoGenList.append(La)
+
+ ## Summarize ModuleAutoGen objects of all modules to be built for this platform
+ def _GetModuleAutoGenList(self):
+ if self._ModuleAutoGenList == None:
+ self._GetAutoGenObjectList()
+ return self._ModuleAutoGenList
+
+ ## Summarize ModuleAutoGen objects of all libraries to be built for this platform
+ def _GetLibraryAutoGenList(self):
+ if self._LibraryAutoGenList == None:
+ self._GetAutoGenObjectList()
+ return self._LibraryAutoGenList
+
+ ## Test if a module is supported by the platform
+ #
+ # An error will be raised directly if the module or its arch is not supported
+ # by the platform or current configuration
+ #
+ def ValidModule(self, Module):
+ return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances
+
+ ## Resolve the library classes in a module to library instances
+ #
+ # This method will not only resolve library classes but also sort the library
+ # instances according to the dependency-ship.
+ #
+ # @param Module The module from which the library classes will be resolved
+ #
+ # @retval library_list List of library instances sorted
+ #
+ def ApplyLibraryInstance(self, Module):
+ ModuleType = Module.ModuleType
+
+ # for overridding library instances with module specific setting
+ PlatformModule = self.Platform.Modules[str(Module)]
+
+ # add forced library instances (specified under LibraryClasses sections)
+ for LibraryClass in self.Platform.LibraryClasses.GetKeys():
+ if LibraryClass.startswith("NULL"):
+ Module.LibraryClasses[LibraryClass] = self.Platform.LibraryClasses[LibraryClass]
+
+ # add forced library instances (specified in module overrides)
+ for LibraryClass in PlatformModule.LibraryClasses:
+ if LibraryClass.startswith("NULL"):
+ Module.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
+
+ # R9 module
+ LibraryConsumerList = [Module]
+ Constructor = []
+ ConsumedByList = sdict()
+ LibraryInstance = sdict()
+
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryClassName in M.LibraryClasses:
+ if LibraryClassName not in LibraryInstance:
+ # override library instance for this module
+ if LibraryClassName in PlatformModule.LibraryClasses:
+ LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
+ else:
+ LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]
+ if LibraryPath == None or LibraryPath == "":
+ LibraryPath = M.LibraryClasses[LibraryClassName]
+ if LibraryPath == None or LibraryPath == "":
+ EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
+ "Instance of library class [%s] is not found" % LibraryClassName,
+ File=self.MetaFile,
+ ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), self.Arch, str(Module)))
+
+ LibraryModule = self.BuildDatabase[LibraryPath, self.Arch]
+ # for those forced library instance (NULL library), add a fake library class
+ if LibraryClassName.startswith("NULL"):
+ LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
+ elif LibraryModule.LibraryClass == None \
+ or len(LibraryModule.LibraryClass) == 0 \
+ or (ModuleType != 'USER_DEFINED'
+ and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
+ # only USER_DEFINED can link against any library instance despite of its SupModList
+ EdkLogger.error("build", OPTION_MISSING,
+ "Module type [%s] is not supported by library instance [%s]" \
+ % (ModuleType, LibraryPath), File=self.MetaFile,
+ ExtraData="consumed by [%s]" % str(Module))
+
+ LibraryInstance[LibraryClassName] = LibraryModule
+ LibraryConsumerList.append(LibraryModule)
+ EdkLogger.verbose("\t" + str(LibraryClassName) + " : " + str(LibraryModule))
+ else:
+ LibraryModule = LibraryInstance[LibraryClassName]
+
+ if LibraryModule == None:
+ continue
+
+ if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
+ Constructor.append(LibraryModule)
+
+ if LibraryModule not in ConsumedByList:
+ ConsumedByList[LibraryModule] = []
+ # don't add current module itself to consumer list
+ if M != Module:
+ if M in ConsumedByList[LibraryModule]:
+ continue
+ ConsumedByList[LibraryModule].append(M)
+ #
+ # Initialize the sorted output list to the empty set
+ #
+ SortedLibraryList = []
+ #
+ # Q <- Set of all nodes with no incoming edges
+ #
+ LibraryList = [] #LibraryInstance.values()
+ Q = []
+ for LibraryClassName in LibraryInstance:
+ M = LibraryInstance[LibraryClassName]
+ LibraryList.append(M)
+ if ConsumedByList[M] == []:
+ Q.append(M)
+
+ #
+ # start the DAG algorithm
+ #
+ while True:
+ EdgeRemoved = True
+ while Q == [] and EdgeRemoved:
+ EdgeRemoved = False
+ # for each node Item with a Constructor
+ for Item in LibraryList:
+ if Item not in Constructor:
+ continue
+ # for each Node without a constructor with an edge e from Item to Node
+ for Node in ConsumedByList[Item]:
+ if Node in Constructor:
+ continue
+ # remove edge e from the graph if Node has no constructor
+ ConsumedByList[Item].remove(Node)
+ EdgeRemoved = True
+ if ConsumedByList[Item] == []:
+ # insert Item into Q
+ Q.insert(0, Item)
+ break
+ if Q != []:
+ break
+ # DAG is done if there's no more incoming edge for all nodes
+ if Q == []:
+ break
+
+ # remove node from Q
+ Node = Q.pop()
+ # output Node
+ SortedLibraryList.append(Node)
+
+ # for each node Item with an edge e from Node to Item do
+ for Item in LibraryList:
+ if Node not in ConsumedByList[Item]:
+ continue
+ # remove edge e from the graph
+ ConsumedByList[Item].remove(Node)
+
+ if ConsumedByList[Item] != []:
+ continue
+ # insert Item into Q, if Item has no other incoming edges
+ Q.insert(0, Item)
+
+ #
+ # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
+ #
+ for Item in LibraryList:
+ if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
+ ErrorMessage = "\tconsumed by " + "\n\tconsumed by ".join([str(L) for L in ConsumedByList[Item]])
+ EdkLogger.error("build", BUILD_ERROR, 'Library [%s] with constructors has a cycle' % str(Item),
+ ExtraData=ErrorMessage, File=self.MetaFile)
+ if Item not in SortedLibraryList:
+ SortedLibraryList.append(Item)
+
+ #
+ # Build the list of constructor and destructir names
+ # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
+ #
+ SortedLibraryList.reverse()
+ return SortedLibraryList
+
+
+ ## Override PCD setting (type, value, ...)
+ #
+ # @param ToPcd The PCD to be overrided
+ # @param FromPcd The PCD overrideing from
+ #
+ def _OverridePcd(self, ToPcd, FromPcd, Module=""):
+ #
+ # in case there's PCDs coming from FDF file, which have no type given.
+ # at this point, ToPcd.Type has the type found from dependent
+ # package
+ #
+ if FromPcd != None:
+ if ToPcd.Pending and FromPcd.Type not in [None, '']:
+ ToPcd.Type = FromPcd.Type
+ elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
+ and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
+ if ToPcd.Type.strip() == "DynamicEx":
+ ToPcd.Type = FromPcd.Type
+ elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
+ and ToPcd.Type != FromPcd.Type:
+ EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
+ ExtraData="%s.%s is defined as [%s] in module %s, but as [%s] in platform."\
+ % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName,
+ ToPcd.Type, Module, FromPcd.Type),
+ File=self.MetaFile)
+
+ if FromPcd.MaxDatumSize not in [None, '']:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ if FromPcd.DefaultValue not in [None, '']:
+ ToPcd.DefaultValue = FromPcd.DefaultValue
+ if FromPcd.TokenValue not in [None, '']:
+ ToPcd.TokenValue = FromPcd.TokenValue
+ if FromPcd.MaxDatumSize not in [None, '']:
+ ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
+ if FromPcd.DatumType not in [None, '']:
+ ToPcd.DatumType = FromPcd.DatumType
+ if FromPcd.SkuInfoList not in [None, '', []]:
+ ToPcd.SkuInfoList = FromPcd.SkuInfoList
+
+ # check the validation of datum
+ IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
+ if not IsValid:
+ EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
+ ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName))
+
+ if ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
+ % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName))
+ Value = ToPcd.DefaultValue
+ if Value in [None, '']:
+ ToPcd.MaxDatumSize = 1
+ elif Value[0] == 'L':
+ ToPcd.MaxDatumSize = str(len(Value) * 2)
+ elif Value[0] == '{':
+ ToPcd.MaxDatumSize = str(len(Value.split(',')))
+ else:
+ ToPcd.MaxDatumSize = str(len(Value))
+
+ # apply default SKU for dynamic PCDS if specified one is not available
+ if (ToPcd.Type in PCD_DYNAMIC_TYPE_LIST or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_LIST) \
+ and ToPcd.SkuInfoList in [None, {}, '']:
+ if self.Platform.SkuName in self.Platform.SkuIds:
+ SkuName = self.Platform.SkuName
+ else:
+ SkuName = 'DEFAULT'
+ ToPcd.SkuInfoList = {
+ SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName], '', '', '', '', '', ToPcd.DefaultValue)
+ }
+
+ ## Apply PCD setting defined platform to a module
+ #
+ # @param Module The module from which the PCD setting will be overrided
+ #
+ # @retval PCD_list The list PCDs with settings from platform
+ #
+ def ApplyPcdSetting(self, Module, Pcds):
+ # for each PCD in module
+ for Name,Guid in Pcds:
+ PcdInModule = Pcds[Name,Guid]
+ # find out the PCD setting in platform
+ if (Name,Guid) in self.Platform.Pcds:
+ PcdInPlatform = self.Platform.Pcds[Name,Guid]
+ else:
+ PcdInPlatform = None
+ # then override the settings if any
+ self._OverridePcd(PcdInModule, PcdInPlatform, Module)
+ # resolve the VariableGuid value
+ for SkuId in PcdInModule.SkuInfoList:
+ Sku = PcdInModule.SkuInfoList[SkuId]
+ if Sku.VariableGuid == '': continue
+ Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList)
+ if Sku.VariableGuidValue == None:
+ PackageList = "\n\t".join([str(P) for P in self.PackageList])
+ EdkLogger.error(
+ 'build',
+ RESOURCE_NOT_AVAILABLE,
+ "Value of GUID [%s] is not found in" % Sku.VariableGuid,
+ ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
+ % (Guid, Name, str(Module)),
+ File=self.MetaFile
+ )
+
+ # override PCD settings with module specific setting
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ for Key in PlatformModule.Pcds:
+ if Key in Pcds:
+ self._OverridePcd(Pcds[Key], PlatformModule.Pcds[Key], Module)
+ return Pcds.values()
+
+ ## Resolve library names to library modules
+ #
+ # (for R8.x modules)
+ #
+ # @param Module The module from which the library names will be resolved
+ #
+ # @retval library_list The list of library modules
+ #
+ def ResolveLibraryReference(self, Module):
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))
+ LibraryConsumerList = [Module]
+
+ # "CompilerStub" is a must for R8 modules
+ if Module.Libraries:
+ Module.Libraries.append("CompilerStub")
+ LibraryList = []
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryName in M.Libraries:
+ Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']
+ if Library == None:
+ for Key in self.Platform.LibraryClasses.data.keys():
+ if LibraryName.upper() == Key.upper():
+ Library = self.Platform.LibraryClasses[Key, ':dummy:']
+ break
+ if Library == None:
+ EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),
+ ExtraData="\t%s [%s]" % (str(Module), self.Arch))
+ continue
+
+ if Library not in LibraryList:
+ LibraryList.append(Library)
+ LibraryConsumerList.append(Library)
+ EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))
+ return LibraryList
+
+ ## Calculate the priority value of the build option
+ #
+ # @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ # @retval Value Priority value based on the priority list.
+ #
+ def CalculatePriorityValue(self, Key):
+ Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
+ PriorityValue = 0x11111
+ if Target == "*":
+ PriorityValue &= 0x01111
+ if ToolChain == "*":
+ PriorityValue &= 0x10111
+ if Arch == "*":
+ PriorityValue &= 0x11011
+ if CommandType == "*":
+ PriorityValue &= 0x11101
+ if Attr == "*":
+ PriorityValue &= 0x11110
+
+ return self.PrioList["0x%0.5x"%PriorityValue]
+
+
+ ## Expand * in build option key
+ #
+ # @param Options Options to be expanded
+ #
+ # @retval options Options expanded
+ #
+ def _ExpandBuildOption(self, Options, ModuleStyle=None):
+ BuildOptions = {}
+ FamilyMatch = False
+ FamilyIsNull = True
+
+ OverrideList = {}
+ #
+ # Construct a list contain the build options which need override.
+ #
+ for Key in Options:
+ #
+ # Key[0] -- tool family
+ # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
+ #
+ if Key[0] == self.BuildRuleFamily :
+ Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
+ if Target == self.BuildTarget or Target == "*":
+ if ToolChain == self.ToolChain or ToolChain == "*":
+ if Arch == self.Arch or Arch == "*":
+ if Options[Key].startswith("="):
+ if OverrideList.get(Key[1]) != None:
+ OverrideList.pop(Key[1])
+ OverrideList[Key[1]] = Options[Key]
+
+ #
+ # Use the highest priority value.
+ #
+ if (len(OverrideList) >= 2):
+ KeyList = OverrideList.keys()
+ for Index in range(len(KeyList)):
+ NowKey = KeyList[Index]
+ Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
+ for Index1 in range(len(KeyList) - Index - 1):
+ NextKey = KeyList[Index1 + Index + 1]
+ #
+ # Compare two Key, if one is included by another, choose the higher priority one
+ #
+ Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
+ if Target1 == Target2 or Target1 == "*" or Target2 == "*":
+ if ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*":
+ if Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*":
+ if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
+ if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
+ if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
+ if Options.get((self.BuildRuleFamily, NextKey)) != None:
+ Options.pop((self.BuildRuleFamily, NextKey))
+ else:
+ if Options.get((self.BuildRuleFamily, NowKey)) != None:
+ Options.pop((self.BuildRuleFamily, NowKey))
+
+
+ for Key in Options:
+ if ModuleStyle != None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Tool in self.ToolDefinition and Family != "":
+ FamilyIsNull = False
+ if self.ToolDefinition[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":
+ if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
+ continue
+ elif Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
+ continue
+ FamilyMatch = True
+ # expand any wildcard
+ if Target == "*" or Target == self.BuildTarget:
+ if Tag == "*" or Tag == self.ToolChain:
+ if Arch == "*" or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool]:
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ # Build Option Family has been checked, which need't to be checked again for family.
+ if FamilyMatch or FamilyIsNull:
+ return BuildOptions
+
+ for Key in Options:
+ if ModuleStyle != None and len (Key) > 2:
+ # Check Module style is EDK or EDKII.
+ # Only append build option for the matched style module.
+ if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
+ continue
+ elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
+ continue
+ Family = Key[0]
+ Target, Tag, Arch, Tool, Attr = Key[1].split("_")
+ # if tool chain family doesn't match, skip it
+ if Tool not in self.ToolDefinition or Family =="":
+ continue
+ # option has been added before
+ if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
+ continue
+
+ # expand any wildcard
+ if Target == "*" or Target == self.BuildTarget:
+ if Tag == "*" or Tag == self.ToolChain:
+ if Arch == "*" or Arch == self.Arch:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+ if Attr != "FLAGS" or Attr not in BuildOptions[Tool]:
+ BuildOptions[Tool][Attr] = Options[Key]
+ else:
+ # append options for the same tool
+ BuildOptions[Tool][Attr] += " " + Options[Key]
+ return BuildOptions
+
+ ## Append build options in platform to a module
+ #
+ # @param Module The module to which the build options will be appened
+ #
+ # @retval options The options appended with build options in platform
+ #
+ def ApplyBuildOption(self, Module):
+ # Get the different options for the different style module
+ if Module.AutoGenVersion < 0x00010005:
+ PlatformOptions = self.EdkBuildOption
+ else:
+ PlatformOptions = self.EdkIIBuildOption
+ ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
+ if Module in self.Platform.Modules:
+ PlatformModule = self.Platform.Modules[str(Module)]
+ PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
+ else:
+ PlatformModuleOptions = {}
+
+ AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() + PlatformModuleOptions.keys() + self.ToolDefinition.keys())
+ BuildOptions = {}
+ for Tool in AllTools:
+ if Tool not in BuildOptions:
+ BuildOptions[Tool] = {}
+
+ for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, PlatformModuleOptions]:
+ if Tool not in Options:
+ continue
+ for Attr in Options[Tool]:
+ Value = Options[Tool][Attr]
+ if Attr not in BuildOptions[Tool]:
+ BuildOptions[Tool][Attr] = ""
+ # check if override is indicated
+ if Value.startswith('='):
+ BuildOptions[Tool][Attr] = Value[1:]
+ else:
+ BuildOptions[Tool][Attr] += " " + Value
+ if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
+ #
+ # Override UNI flag only for EDK module.
+ #
+ if 'BUILD' not in BuildOptions:
+ BuildOptions['BUILD'] = {}
+ BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag
+ return BuildOptions
+
+ Platform = property(_GetPlatform)
+ Name = property(_GetName)
+ Guid = property(_GetGuid)
+ Version = property(_GetVersion)
+
+ OutputDir = property(_GetOutputDir)
+ BuildDir = property(_GetBuildDir)
+ MakeFileDir = property(_GetMakeFileDir)
+ FdfFile = property(_GetFdfFile)
+
+ PcdTokenNumber = property(_GetPcdTokenNumbers) # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
+ DynamicPcdList = property(_GetDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+ NonDynamicPcdList = property(_GetNonDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
+ PackageList = property(_GetPackageList)
+
+ ToolDefinition = property(_GetToolDefinition) # toolcode : tool path
+ ToolDefinitionFile = property(_GetToolDefFile) # toolcode : lib path
+ ToolChainFamily = property(_GetToolChainFamily)
+ BuildRuleFamily = property(_GetBuildRuleFamily)
+ BuildOption = property(_GetBuildOptions) # toolcode : option
+ EdkBuildOption = property(_GetEdkBuildOptions) # edktoolcode : option
+ EdkIIBuildOption = property(_GetEdkIIBuildOptions) # edkiitoolcode : option
+
+ BuildCommand = property(_GetBuildCommand)
+ BuildRule = property(_GetBuildRule)
+ ModuleAutoGenList = property(_GetModuleAutoGenList)
+ LibraryAutoGenList = property(_GetLibraryAutoGenList)
+
+## ModuleAutoGen class
+#
+# This class encapsules the AutoGen behaviors for the build tools. In addition to
+# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
+# to the [depex] section in module's inf file.
+#
+class ModuleAutoGen(AutoGen):
+ ## The real constructor of ModuleAutoGen
+ #
+ # This method is not supposed to be called by users of ModuleAutoGen. It's
+ # only used by factory method __new__() to do real initialization work for an
+ # object of ModuleAutoGen
+ #
+ # @param Workspace EdkIIWorkspaceBuild object
+ # @param ModuleFile The path of module file
+ # @param Target Build target (DEBUG, RELEASE)
+ # @param Toolchain Name of tool chain
+ # @param Arch The arch the module supports
+ # @param PlatformFile Platform meta-file
+ #
+ def _Init(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
+ GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
+
+ self.Workspace = Workspace
+ self.WorkspaceDir = Workspace.WorkspaceDir
+
+ self.MetaFile = ModuleFile
+ self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch)
+ # check if this module is employed by active platform
+ if not self.PlatformInfo.ValidModule(self.MetaFile):
+ EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
+ % (self.MetaFile, Arch))
+ return False
+
+ self.SourceDir = self.MetaFile.SubDir
+ self.SourceOverrideDir = None
+ # use overrided path defined in DSC file
+ if self.MetaFile.Key in GlobalData.gOverrideDir:
+ self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]
+
+ self.ToolChain = Toolchain
+ self.BuildTarget = Target
+ self.Arch = Arch
+ self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
+ self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
+
+ self.IsMakeFileCreated = False
+ self.IsCodeFileCreated = False
+
+ self.BuildDatabase = self.Workspace.BuildDatabase
+
+ self._Module = None
+ self._Name = None
+ self._Guid = None
+ self._Version = None
+ self._ModuleType = None
+ self._ComponentType = None
+ self._PcdIsDriver = None
+ self._AutoGenVersion = None
+ self._LibraryFlag = None
+ self._CustomMakefile = None
+ self._Macro = None
+
+ self._BuildDir = None
+ self._OutputDir = None
+ self._DebugDir = None
+ self._MakeFileDir = None
+
+ self._IncludePathList = None
+ self._AutoGenFileList = None
+ self._UnicodeFileList = None
+ self._SourceFileList = None
+ self._ObjectFileList = None
+ self._BinaryFileList = None
+
+ self._DependentPackageList = None
+ self._DependentLibraryList = None
+ self._LibraryAutoGenList = None
+ self._DerivedPackageList = None
+ self._ModulePcdList = None
+ self._LibraryPcdList = None
+ self._GuidList = None
+ self._ProtocolList = None
+ self._PpiList = None
+ self._DepexList = None
+ self._DepexExpressionList = None
+ self._BuildOption = None
+ self._BuildTargets = None
+ self._IntroBuildTargetList = None
+ self._FinalBuildTargetList = None
+ self._FileTypes = None
+ self._BuildRules = None
+
+ return True
+
+ def __repr__(self):
+ return "%s [%s]" % (self.MetaFile, self.Arch)
+
+ # Macros could be used in build_rule.txt (also Makefile)
+ def _GetMacros(self):
+ if self._Macro == None:
+ self._Macro = sdict()
+ self._Macro["WORKSPACE" ] = self.WorkspaceDir
+ self._Macro["MODULE_NAME" ] = self.Name
+ self._Macro["MODULE_GUID" ] = self.Guid
+ self._Macro["MODULE_VERSION" ] = self.Version
+ self._Macro["MODULE_TYPE" ] = self.ModuleType
+ self._Macro["MODULE_FILE" ] = str(self.MetaFile)
+ self._Macro["MODULE_FILE_BASE_NAME" ] = self.MetaFile.BaseName
+ self._Macro["MODULE_RELATIVE_DIR" ] = self.SourceDir
+ self._Macro["MODULE_DIR" ] = self.SourceDir
+
+ self._Macro["BASE_NAME" ] = self.Name
+
+ self._Macro["ARCH" ] = self.Arch
+ self._Macro["TOOLCHAIN" ] = self.ToolChain
+ self._Macro["TOOLCHAIN_TAG" ] = self.ToolChain
+ self._Macro["TARGET" ] = self.BuildTarget
+
+ self._Macro["BUILD_DIR" ] = self.PlatformInfo.BuildDir
+ self._Macro["BIN_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch)
+ self._Macro["LIB_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch)
+ self._Macro["MODULE_BUILD_DIR" ] = self.BuildDir
+ self._Macro["OUTPUT_DIR" ] = self.OutputDir
+ self._Macro["DEBUG_DIR" ] = self.DebugDir
+ return self._Macro
+
+ ## Return the module build data object
+ def _GetModule(self):
+ if self._Module == None:
+ self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch]
+ return self._Module
+
+ ## Return the module name
+ def _GetBaseName(self):
+ return self.Module.BaseName
+
+ ## Return the module SourceOverridePath
+ def _GetSourceOverridePath(self):
+ return self.Module.SourceOverridePath
+
+ ## Return the module meta-file GUID
+ def _GetGuid(self):
+ return self.Module.Guid
+
+ ## Return the module version
+ def _GetVersion(self):
+ return self.Module.Version
+
+ ## Return the module type
+ def _GetModuleType(self):
+ return self.Module.ModuleType
+
+ ## Return the component type (for R8.x style of module)
+ def _GetComponentType(self):
+ return self.Module.ComponentType
+
+ ## Return the build type
+ def _GetBuildType(self):
+ return self.Module.BuildType
+
+ ## Return the PCD_IS_DRIVER setting
+ def _GetPcdIsDriver(self):
+ return self.Module.PcdIsDriver
+
+ ## Return the autogen version, i.e. module meta-file version
+ def _GetAutoGenVersion(self):
+ return self.Module.AutoGenVersion
+
+ ## Check if the module is library or not
+ def _IsLibrary(self):
+ if self._LibraryFlag == None:
+ if self.Module.LibraryClass != None and self.Module.LibraryClass != []:
+ self._LibraryFlag = True
+ else:
+ self._LibraryFlag = False
+ return self._LibraryFlag
+
+ ## Return the directory to store intermediate files of the module
+ def _GetBuildDir(self):
+ if self._BuildDir == None:
+ self._BuildDir = path.join(
+ self.PlatformInfo.BuildDir,
+ self.Arch,
+ self.SourceDir,
+ self.MetaFile.BaseName
+ )
+ CreateDirectory(self._BuildDir)
+ return self._BuildDir
+
+ ## Return the directory to store the intermediate object files of the mdoule
+ def _GetOutputDir(self):
+ if self._OutputDir == None:
+ self._OutputDir = path.join(self.BuildDir, "OUTPUT")
+ CreateDirectory(self._OutputDir)
+ return self._OutputDir
+
+ ## Return the directory to store auto-gened source files of the mdoule
+ def _GetDebugDir(self):
+ if self._DebugDir == None:
+ self._DebugDir = path.join(self.BuildDir, "DEBUG")
+ CreateDirectory(self._DebugDir)
+ return self._DebugDir
+
+ ## Return the path of custom file
+ def _GetCustomMakefile(self):
+ if self._CustomMakefile == None:
+ self._CustomMakefile = {}
+ for Type in self.Module.CustomMakefile:
+ if Type in gMakeTypeMap:
+ MakeType = gMakeTypeMap[Type]
+ else:
+ MakeType = 'nmake'
+ if self.SourceOverrideDir != None:
+ File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])
+ if not os.path.exists(File):
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
+ else:
+ File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
+ self._CustomMakefile[MakeType] = File
+ return self._CustomMakefile
+
+ ## Return the directory of the makefile
+ #
+ # @retval string The directory string of module's makefile
+ #
+ def _GetMakeFileDir(self):
+ return self.BuildDir
+
+ ## Return build command string
+ #
+ # @retval string Build command string
+ #
+ def _GetBuildCommand(self):
+ return self.PlatformInfo.BuildCommand
+
+ ## Get object list of all packages the module and its dependent libraries belong to
+ #
+ # @retval list The list of package object
+ #
+ def _GetDerivedPackageList(self):
+ PackageList = []
+ for M in [self.Module] + self.DependentLibraryList:
+ for Package in M.Packages:
+ if Package in PackageList:
+ continue
+ PackageList.append(Package)
+ return PackageList
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ def _GetDepexTokenList(self):
+ if self._DepexList == None:
+ self._DepexList = {}
+ if self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return self._DepexList
+
+ self._DepexList[self.ModuleType] = []
+
+ for ModuleType in self._DepexList:
+ DepexList = self._DepexList[ModuleType]
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
+ #
+ for M in [self.Module] + self.DependentLibraryList:
+ Inherited = False
+ for D in M.Depex[self.Arch, ModuleType]:
+ if DepexList != []:
+ DepexList.append('AND')
+ DepexList.append('(')
+ DepexList.extend(D)
+ if DepexList[-1] == 'END': # no need of a END at this time
+ DepexList.pop()
+ DepexList.append(')')
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList))
+ if 'BEFORE' in DepexList or 'AFTER' in DepexList:
+ break
+ if len(DepexList) > 0:
+ EdkLogger.verbose('')
+ return self._DepexList
+
+ ## Merge dependency expression
+ #
+ # @retval list The token list of the dependency expression after parsed
+ #
+ def _GetDepexExpressionTokenList(self):
+ if self._DepexExpressionList == None:
+ self._DepexExpressionList = {}
+ if self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
+ return self._DepexExpressionList
+
+ self._DepexExpressionList[self.ModuleType] = ''
+
+ for ModuleType in self._DepexExpressionList:
+ DepexExpressionList = self._DepexExpressionList[ModuleType]
+ #
+ # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
+ #
+ for M in [self.Module] + self.DependentLibraryList:
+ Inherited = False
+ for D in M.DepexExpression[self.Arch, ModuleType]:
+ if DepexExpressionList != '':
+ DepexExpressionList += ' AND '
+ DepexExpressionList += '('
+ DepexExpressionList += D
+ DepexExpressionList = DepexExpressionList.rstrip('END').strip()
+ DepexExpressionList += ')'
+ Inherited = True
+ if Inherited:
+ EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionList))
+ if 'BEFORE' in DepexExpressionList or 'AFTER' in DepexExpressionList:
+ break
+ if len(DepexExpressionList) > 0:
+ EdkLogger.verbose('')
+ self._DepexExpressionList[ModuleType] = DepexExpressionList
+ return self._DepexExpressionList
+
+ ## Return the list of specification version required for the module
+ #
+ # @retval list The list of specification defined in module file
+ #
+ def _GetSpecification(self):
+ return self.Module.Specification
+
+ ## Tool option for the module build
+ #
+ # @param PlatformInfo The object of PlatformBuildInfo
+ # @retval dict The dict containing valid options
+ #
+ def _GetModuleBuildOption(self):
+ if self._BuildOption == None:
+ self._BuildOption = self.PlatformInfo.ApplyBuildOption(self.Module)
+ return self._BuildOption
+
+ ## Return a list of files which can be built from source
+ #
+ # What kind of files can be built is determined by build rules in
+ # $(WORKSPACE)/Conf/build_rule.txt and toolchain family.
+ #
+ def _GetSourceFileList(self):
+ if self._SourceFileList == None:
+ self._SourceFileList = []
+ for F in self.Module.Sources:
+ # match tool chain
+ if F.TagName not in ("", "*", self.ToolChain):
+ EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
+ "but [%s] is needed" % (F.TagName, str(F), self.ToolChain))
+ continue
+ # match tool chain family
+ if F.ToolChainFamily not in ("", "*", self.ToolChainFamily):
+ EdkLogger.debug(
+ EdkLogger.DEBUG_0,
+ "The file [%s] must be built by tools of [%s], " \
+ "but current toolchain family is [%s]" \
+ % (str(F), F.ToolChainFamily, self.ToolChainFamily))
+ continue
+
+ # add the file path into search path list for file including
+ if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005:
+ self.IncludePathList.insert(0, F.Dir)
+ self._SourceFileList.append(F)
+ self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
+ return self._SourceFileList
+
+ ## Return the list of unicode files
+ def _GetUnicodeFileList(self):
+ if self._UnicodeFileList == None:
+ if TAB_UNICODE_FILE in self.FileTypes:
+ self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]
+ else:
+ self._UnicodeFileList = []
+ return self._UnicodeFileList
+
+ ## Return a list of files which can be built from binary
+ #
+ # "Build" binary files are just to copy them to build directory.
+ #
+ # @retval list The list of files which can be built later
+ #
+ def _GetBinaryFiles(self):
+ if self._BinaryFileList == None:
+ self._BinaryFileList = []
+ for F in self.Module.Binaries:
+ if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:
+ continue
+ self._BinaryFileList.append(F)
+ self._ApplyBuildRule(F, F.Type)
+ return self._BinaryFileList
+
+ def _GetBuildRules(self):
+ if self._BuildRules == None:
+ BuildRules = {}
+ BuildRuleDatabase = self.PlatformInfo.BuildRule
+ for Type in BuildRuleDatabase.FileTypeList:
+ #first try getting build rule by BuildRuleFamily
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
+ #second try getting build rule by ToolChainFamily
+ if not RuleObject:
+ RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ # build type is always module type, but ...
+ if self.ModuleType != self.BuildType:
+ RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
+ if not RuleObject:
+ continue
+ RuleObject = RuleObject.Instantiate(self.Macros)
+ BuildRules[Type] = RuleObject
+ for Ext in RuleObject.SourceFileExtList:
+ BuildRules[Ext] = RuleObject
+ self._BuildRules = BuildRules
+ return self._BuildRules
+
+ def _ApplyBuildRule(self, File, FileType):
+ if self._BuildTargets == None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = {}
+ self._FileTypes = {}
+
+ LastTarget = None
+ RuleChain = []
+ SourceList = [File]
+ Index = 0
+ while Index < len(SourceList):
+ Source = SourceList[Index]
+ Index = Index + 1
+
+ if Source != File:
+ CreateDirectory(Source.Dir)
+
+ if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
+ RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
+ elif FileType in self.BuildRules:
+ RuleObject = self.BuildRules[FileType]
+ elif Source.Ext in self.BuildRules:
+ RuleObject = self.BuildRules[Source.Ext]
+ else:
+ # stop at no more rules
+ if LastTarget:
+ self._FinalBuildTargetList.add(LastTarget)
+ break
+
+ FileType = RuleObject.SourceFileType
+ if FileType not in self._FileTypes:
+ self._FileTypes[FileType] = set()
+ self._FileTypes[FileType].add(Source)
+
+ # stop at STATIC_LIBRARY for library
+ if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
+ if LastTarget:
+ self._FinalBuildTargetList.add(LastTarget)
+ break
+
+ Target = RuleObject.Apply(Source)
+ if not Target:
+ if LastTarget:
+ self._FinalBuildTargetList.add(LastTarget)
+ break
+ elif not Target.Outputs:
+ # Only do build for target with outputs
+ self._FinalBuildTargetList.add(Target)
+
+ if FileType not in self._BuildTargets:
+ self._BuildTargets[FileType] = set()
+ self._BuildTargets[FileType].add(Target)
+
+ if not Source.IsBinary and Source == File:
+ self._IntroBuildTargetList.add(Target)
+
+ # to avoid cyclic rule
+ if FileType in RuleChain:
+ break
+
+ RuleChain.append(FileType)
+ SourceList.extend(Target.Outputs)
+ LastTarget = Target
+ FileType = TAB_UNKNOWN_FILE
+
+ def _GetTargets(self):
+ if self._BuildTargets == None:
+ self._IntroBuildTargetList = set()
+ self._FinalBuildTargetList = set()
+ self._BuildTargets = {}
+ self._FileTypes = {}
+
+ #TRICK: call _GetSourceFileList to apply build rule for binary files
+ if self.SourceFileList:
+ pass
+
+ #TRICK: call _GetBinaryFileList to apply build rule for binary files
+ if self.BinaryFileList:
+ pass
+
+ return self._BuildTargets
+
+ def _GetIntroTargetList(self):
+ self._GetTargets()
+ return self._IntroBuildTargetList
+
+ def _GetFinalTargetList(self):
+ self._GetTargets()
+ return self._FinalBuildTargetList
+
+ def _GetFileTypes(self):
+ self._GetTargets()
+ return self._FileTypes
+
+ ## Get the list of package object the module depends on
+ #
+ # @retval list The package object list
+ #
+ def _GetDependentPackageList(self):
+ return self.Module.Packages
+
+ ## Return the list of auto-generated code file
+ #
+ # @retval list The list of auto-generated file
+ #
+ def _GetAutoGenFileList(self):
+ UniStringAutoGenC = True
+ UniStringBinBuffer = None
+ if self.BuildType == 'UEFI_HII':
+ UniStringBinBuffer = StringIO()
+ UniStringAutoGenC = False
+ if self._AutoGenFileList == None:
+ self._AutoGenFileList = {}
+ AutoGenC = TemplateString()
+ AutoGenH = TemplateString()
+ StringH = TemplateString()
+ GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, UniStringAutoGenC, UniStringBinBuffer)
+ if str(AutoGenC) != "" and TAB_C_CODE_FILE in self.FileTypes:
+ AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
+ self._AutoGenFileList[AutoFile] = str(AutoGenC)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(AutoGenH) != "":
+ AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
+ self._AutoGenFileList[AutoFile] = str(AutoGenH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if str(StringH) != "":
+ AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
+ self._AutoGenFileList[AutoFile] = str(StringH)
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":
+ AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
+ self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()
+ AutoFile.IsBinary = True
+ self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
+ if UniStringBinBuffer != None:
+ UniStringBinBuffer.close()
+ return self._AutoGenFileList
+
+ ## Return the list of library modules explicitly or implicityly used by this module
+ def _GetLibraryList(self):
+ if self._DependentLibraryList == None:
+ # only merge library classes and PCD for non-library module
+ if self.IsLibrary:
+ self._DependentLibraryList = []
+ else:
+ if self.AutoGenVersion < 0x00010005:
+ self._DependentLibraryList = self.PlatformInfo.ResolveLibraryReference(self.Module)
+ else:
+ self._DependentLibraryList = self.PlatformInfo.ApplyLibraryInstance(self.Module)
+ return self._DependentLibraryList
+
+ ## Get the list of PCDs from current module
+ #
+ # @retval list The list of PCD
+ #
+ def _GetModulePcdList(self):
+ if self._ModulePcdList == None:
+ # apply PCD settings from platform
+ self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
+ return self._ModulePcdList
+
+ ## Get the list of PCDs from dependent libraries
+ #
+ # @retval list The list of PCD
+ #
+ def _GetLibraryPcdList(self):
+ if self._LibraryPcdList == None:
+ Pcds = {}
+ if not self.IsLibrary:
+ # get PCDs from dependent libraries
+ for Library in self.DependentLibraryList:
+ for Key in Library.Pcds:
+ # skip duplicated PCDs
+ if Key in self.Module.Pcds or Key in Pcds:
+ continue
+ Pcds[Key] = copy.copy(Library.Pcds[Key])
+ # apply PCD settings from platform
+ self._LibraryPcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, Pcds)
+ else:
+ self._LibraryPcdList = []
+ return self._LibraryPcdList
+
+ ## Get the GUID value mapping
+ #
+ # @retval dict The mapping between GUID cname and its value
+ #
+ def _GetGuidList(self):
+ if self._GuidList == None:
+ self._GuidList = self.Module.Guids
+ for Library in self.DependentLibraryList:
+ self._GuidList.update(Library.Guids)
+ return self._GuidList
+
+ ## Get the protocol value mapping
+ #
+ # @retval dict The mapping between protocol cname and its value
+ #
+ def _GetProtocolList(self):
+ if self._ProtocolList == None:
+ self._ProtocolList = self.Module.Protocols
+ for Library in self.DependentLibraryList:
+ self._ProtocolList.update(Library.Protocols)
+ return self._ProtocolList
+
+ ## Get the PPI value mapping
+ #
+ # @retval dict The mapping between PPI cname and its value
+ #
+ def _GetPpiList(self):
+ if self._PpiList == None:
+ self._PpiList = self.Module.Ppis
+ for Library in self.DependentLibraryList:
+ self._PpiList.update(Library.Ppis)
+ return self._PpiList
+
+ ## Get the list of include search path
+ #
+ # @retval list The list path
+ #
+ def _GetIncludePathList(self):
+ if self._IncludePathList == None:
+ self._IncludePathList = []
+ if self.AutoGenVersion < 0x00010005:
+ for Inc in self.Module.Includes:
+ if Inc not in self._IncludePathList:
+ self._IncludePathList.append(Inc)
+ # for r8 modules
+ Inc = path.join(Inc, self.Arch.capitalize())
+ if os.path.exists(Inc) and Inc not in self._IncludePathList:
+ self._IncludePathList.append(Inc)
+ # r8 module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time
+ self._IncludePathList.append(self.DebugDir)
+ else:
+ self._IncludePathList.append(self.MetaFile.Dir)
+ self._IncludePathList.append(self.DebugDir)
+
+ for Package in self.Module.Packages:
+ PackageDir = path.join(self.WorkspaceDir, Package.MetaFile.Dir)
+ if PackageDir not in self._IncludePathList:
+ self._IncludePathList.append(PackageDir)
+ for Inc in Package.Includes:
+ if Inc not in self._IncludePathList:
+ self._IncludePathList.append(str(Inc))
+ return self._IncludePathList
+
+ ## Create makefile for the module and its dependent libraries
+ #
+ # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
+ # dependent libraries will be created
+ #
+ def CreateMakeFile(self, CreateLibraryMakeFile=True):
+ if self.IsMakeFileCreated:
+ return
+
+ if not self.IsLibrary and CreateLibraryMakeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateMakeFile()
+
+ if len(self.CustomMakefile) == 0:
+ Makefile = GenMake.ModuleMakefile(self)
+ else:
+ Makefile = GenMake.CustomMakefile(self)
+ if Makefile.Generate():
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
+ (self.Name, self.Arch))
+
+ self.IsMakeFileCreated = True
+
+ ## Create autogen code for the module and its dependent libraries
+ #
+ # @param CreateLibraryCodeFile Flag indicating if or not the code of
+ # dependent libraries will be created
+ #
+ def CreateCodeFile(self, CreateLibraryCodeFile=True):
+ if self.IsCodeFileCreated:
+ return
+
+ if not self.IsLibrary and CreateLibraryCodeFile:
+ for LibraryAutoGen in self.LibraryAutoGenList:
+ LibraryAutoGen.CreateCodeFile()
+
+ AutoGenList = []
+ IgoredAutoGenList = []
+
+ for File in self.AutoGenFileList:
+ if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
+ #Ignore R8 AutoGen.c
+ if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c':
+ continue
+
+ AutoGenList.append(str(File))
+ else:
+ IgoredAutoGenList.append(str(File))
+
+ # Skip the following code for EDK I inf
+ if self.AutoGenVersion < 0x00010005:
+ return
+
+ for ModuleType in self.DepexList:
+ # Ignore empty [depex] section or [depex] section for "USER_DEFINED" module
+ if len(self.DepexList[ModuleType]) == 0 or ModuleType == "USER_DEFINED":
+ continue
+
+ Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
+ DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
+
+ if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
+ AutoGenList.append(str(DpxFile))
+ else:
+ IgoredAutoGenList.append(str(DpxFile))
+
+ if IgoredAutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
+ (" ".join(AutoGenList), self.Name, self.Arch))
+ elif AutoGenList == []:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
+ (" ".join(IgoredAutoGenList), self.Name, self.Arch))
+ else:
+ EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
+ (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
+
+ self.IsCodeFileCreated = True
+ return AutoGenList
+
+ ## Summarize the ModuleAutoGen objects of all libraries used by this module
+ def _GetLibraryAutoGenList(self):
+ if self._LibraryAutoGenList == None:
+ self._LibraryAutoGenList = []
+ for Library in self.DependentLibraryList:
+ La = ModuleAutoGen(
+ self.Workspace,
+ Library.MetaFile,
+ self.BuildTarget,
+ self.ToolChain,
+ self.Arch,
+ self.PlatformInfo.MetaFile
+ )
+ if La not in self._LibraryAutoGenList:
+ self._LibraryAutoGenList.append(La)
+ for Lib in La.CodaTargetList:
+ self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
+ return self._LibraryAutoGenList
+
+ Module = property(_GetModule)
+ Name = property(_GetBaseName)
+ Guid = property(_GetGuid)
+ Version = property(_GetVersion)
+ ModuleType = property(_GetModuleType)
+ ComponentType = property(_GetComponentType)
+ BuildType = property(_GetBuildType)
+ PcdIsDriver = property(_GetPcdIsDriver)
+ AutoGenVersion = property(_GetAutoGenVersion)
+ Macros = property(_GetMacros)
+ Specification = property(_GetSpecification)
+
+ IsLibrary = property(_IsLibrary)
+
+ BuildDir = property(_GetBuildDir)
+ OutputDir = property(_GetOutputDir)
+ DebugDir = property(_GetDebugDir)
+ MakeFileDir = property(_GetMakeFileDir)
+ CustomMakefile = property(_GetCustomMakefile)
+
+ IncludePathList = property(_GetIncludePathList)
+ AutoGenFileList = property(_GetAutoGenFileList)
+ UnicodeFileList = property(_GetUnicodeFileList)
+ SourceFileList = property(_GetSourceFileList)
+ BinaryFileList = property(_GetBinaryFiles) # FileType : [File List]
+ Targets = property(_GetTargets)
+ IntroTargetList = property(_GetIntroTargetList)
+ CodaTargetList = property(_GetFinalTargetList)
+ FileTypes = property(_GetFileTypes)
+ BuildRules = property(_GetBuildRules)
+
+ DependentPackageList = property(_GetDependentPackageList)
+ DependentLibraryList = property(_GetLibraryList)
+ LibraryAutoGenList = property(_GetLibraryAutoGenList)
+ DerivedPackageList = property(_GetDerivedPackageList)
+
+ ModulePcdList = property(_GetModulePcdList)
+ LibraryPcdList = property(_GetLibraryPcdList)
+ GuidList = property(_GetGuidList)
+ ProtocolList = property(_GetProtocolList)
+ PpiList = property(_GetPpiList)
+ DepexList = property(_GetDepexTokenList)
+ DepexExpressionList = property(_GetDepexExpressionTokenList)
+ BuildOption = property(_GetModuleBuildOption)
+ BuildCommand = property(_GetBuildCommand)
+
+# This acts like the main() function for the script, unless it is 'import'ed into another script.
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py new file mode 100644 index 0000000000..73b4a97417 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py @@ -0,0 +1,622 @@ +## @file +# The engine for building files +# +# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR> +# This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import re +import copy +import string + +from Common.GlobalData import * +from Common.BuildToolError import * +from Common.Misc import tdict, PathClass +from Common.String import NormPath +from Common.DataType import * + +import Common.EdkLogger as EdkLogger + +## Convert file type to file list macro name +# +# @param FileType The name of file type +# +# @retval string The name of macro +# +def FileListMacro(FileType): + return "%sS" % FileType.replace("-", "_").upper() + +## Convert file type to list file macro name +# +# @param FileType The name of file type +# +# @retval string The name of macro +# +def ListFileMacro(FileType): + return "%s_LIST" % FileListMacro(FileType) + +class TargetDescBlock(object): + _Cache_ = {} # {TargetFile : TargetDescBlock object} + + # Factory method + def __new__(Class, Inputs, Outputs, Commands, Dependencies): + if Outputs[0] in Class._Cache_: + Tdb = Class._Cache_[Outputs[0]] + for File in Inputs: + Tdb.AddInput(File) + else: + Tdb = super(TargetDescBlock, Class).__new__(Class) + Tdb._Init(Inputs, Outputs, Commands, Dependencies) + #Class._Cache_[Outputs[0]] = Tdb + return Tdb + + def _Init(self, Inputs, Outputs, Commands, Dependencies): + self.Inputs = Inputs + self.Outputs = Outputs + self.Commands = Commands + self.Dependencies = Dependencies + if self.Outputs: + self.Target = self.Outputs[0] + else: + self.Target = None + + def __str__(self): + return self.Target.Path + + def __hash__(self): + return hash(self.Target.Path) + + def __eq__(self, Other): + if type(Other) == type(self): + return Other.Target.Path == self.Target.Path + else: + return str(Other) == self.Target.Path + + def AddInput(self, Input): + if Input not in self.Inputs: + self.Inputs.append(Input) + + def IsMultipleInput(self): + return len(self.Inputs) > 1 + + @staticmethod + def Renew(): + TargetDescBlock._Cache_ = {} + +## Class for one build rule +# +# This represents a build rule which can give out corresponding command list for +# building the given source file(s). The result can be used for generating the +# target for makefile. +# +class FileBuildRule: + INC_LIST_MACRO = "INC_LIST" + INC_MACRO = "INC" + + ## constructor + # + # @param Input The dictionary represeting input file(s) for a rule + # @param Output The list represeting output file(s) for a rule + # @param Command The list containing commands to generate the output from input + # + def __init__(self, Type, Input, Output, Command, ExtraDependency=None): + # The Input should not be empty + if not Input: + Input = [] + if not Output: + Output = [] + if not Command: + Command = [] + + self.FileListMacro = FileListMacro(Type) + self.ListFileMacro = ListFileMacro(Type) + self.IncListFileMacro = self.INC_LIST_MACRO + + self.SourceFileType = Type + # source files listed not in "*" or "?" pattern format + if not ExtraDependency: + self.ExtraSourceFileList = [] + else: + self.ExtraSourceFileList = ExtraDependency + + # + # Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST. + # If found, generate a file to keep the input files used to get over the + # limitation of command line length + # + self.MacroList = [] + self.CommandList = [] + for CmdLine in Command: + self.MacroList.extend(gMacroPattern.findall(CmdLine)) + # replace path separator with native one + self.CommandList.append(CmdLine) + + # Indicate what should be generated + if self.FileListMacro in self.MacroList: + self.GenFileListMacro = True + else: + self.GenFileListMacro = False + + if self.ListFileMacro in self.MacroList: + self.GenListFile = True + self.GenFileListMacro = True + else: + self.GenListFile = False + + if self.INC_LIST_MACRO in self.MacroList: + self.GenIncListFile = True + else: + self.GenIncListFile = False + + # Check input files + self.IsMultipleInput = False + self.SourceFileExtList = [] + for File in Input: + Base, Ext = os.path.splitext(File) + if Base.find("*") >= 0: + # There's "*" in the file name + self.IsMultipleInput = True + self.GenFileListMacro = True + elif Base.find("?") < 0: + # There's no "*" and "?" in file name + self.ExtraSourceFileList.append(File) + continue + if Ext not in self.SourceFileExtList: + self.SourceFileExtList.append(Ext) + + # Check output files + self.DestFileList = [] + for File in Output: + self.DestFileList.append(File) + + # All build targets generated by this rule for a module + self.BuildTargets = {} + + ## str() function support + # + # @retval string + # + def __str__(self): + SourceString = "" + SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList) + DestString = ", ".join(self.DestFileList) + CommandString = "\n\t".join(self.CommandList) + return "%s : %s\n\t%s" % (DestString, SourceString, CommandString) + + ## Check if given file extension is supported by this rule + # + # @param FileExt The extension of a file + # + # @retval True If the extension is supported + # @retval False If the extension is not supported + # + def IsSupported(self, FileExt): + return FileExt in self.SourceFileExtList + + def Instantiate(self, Macros={}): + NewRuleObject = copy.copy(self) + NewRuleObject.BuildTargets = {} + NewRuleObject.DestFileList = [] + for File in self.DestFileList: + NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros))) + return NewRuleObject + + ## Apply the rule to given source file(s) + # + # @param SourceFile One file or a list of files to be built + # @param RelativeToDir The relative path of the source file + # @param PathSeparator Path separator + # + # @retval tuple (Source file in full path, List of individual sourcefiles, Destionation file, List of build commands) + # + def Apply(self, SourceFile): + if not self.CommandList or not self.DestFileList: + return None + + # source file + if self.IsMultipleInput: + SrcFileName = "" + SrcFileBase = "" + SrcFileExt = "" + SrcFileDir = "" + SrcPath = "" + # SourceFile must be a list + SrcFile = "$(%s)" % self.FileListMacro + else: + SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext + if SourceFile.Root: + SrcFileDir = SourceFile.SubDir + if SrcFileDir == "": + SrcFileDir = "." + else: + SrcFileDir = "." + SrcFile = SourceFile.Path + SrcPath = SourceFile.Dir + + # destination file (the first one) + if self.DestFileList: + DestFile = self.DestFileList[0].Path + DestPath = self.DestFileList[0].Dir + DestFileName = self.DestFileList[0].Name + DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext + else: + DestFile = "" + DestPath = "" + DestFileName = "" + DestFileBase = "" + DestFileExt = "" + + BuildRulePlaceholderDict = { + # source file + "src" : SrcFile, + "s_path" : SrcPath, + "s_dir" : SrcFileDir, + "s_name" : SrcFileName, + "s_base" : SrcFileBase, + "s_ext" : SrcFileExt, + # destination file + "dst" : DestFile, + "d_path" : DestPath, + "d_name" : DestFileName, + "d_base" : DestFileBase, + "d_ext" : DestFileExt, + } + + DstFile = [] + for File in self.DestFileList: + File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict) + File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict) + DstFile.append(PathClass(File, IsBinary=True)) + + if DstFile[0] in self.BuildTargets: + TargetDesc = self.BuildTargets[DstFile[0]] + TargetDesc.AddInput(SourceFile) + else: + CommandList = [] + for CommandString in self.CommandList: + CommandString = string.Template(CommandString).safe_substitute(BuildRulePlaceholderDict) + CommandString = string.Template(CommandString).safe_substitute(BuildRulePlaceholderDict) + CommandList.append(CommandString) + TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList) + TargetDesc.ListFileMacro = self.ListFileMacro + TargetDesc.FileListMacro = self.FileListMacro + TargetDesc.IncListFileMacro = self.IncListFileMacro + TargetDesc.GenFileListMacro = self.GenFileListMacro + TargetDesc.GenListFile = self.GenListFile + TargetDesc.GenIncListFile = self.GenIncListFile + self.BuildTargets[DstFile[0]] = TargetDesc + return TargetDesc + +## Class for build rules +# +# BuildRule class parses rules defined in a file or passed by caller, and converts +# the rule into FileBuildRule object. +# +class BuildRule: + _SectionHeader = "SECTIONHEADER" + _Section = "SECTION" + _SubSectionHeader = "SUBSECTIONHEADER" + _SubSection = "SUBSECTION" + _InputFile = "INPUTFILE" + _OutputFile = "OUTPUTFILE" + _ExtraDependency = "EXTRADEPENDENCY" + _Command = "COMMAND" + _UnknownSection = "UNKNOWNSECTION" + + _SubSectionList = [_InputFile, _OutputFile, _Command] + + _PATH_SEP = "(+)" + _FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$") + _BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")], + ["$(CP) ${src} ${dst}"], []) + + ## Constructor + # + # @param File The file containing build rules in a well defined format + # @param Content The string list of build rules in a well defined format + # @param LineIndex The line number from which the parsing will begin + # @param SupportedFamily The list of supported tool chain families + # + def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]): + self.RuleFile = File + # Read build rules from file if it's not none + if File != None: + try: + self.RuleContent = open(File, 'r').readlines() + except: + EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File) + elif Content != None: + self.RuleContent = Content + else: + EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given") + + self.SupportedToolChainFamilyList = SupportedFamily + self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object} + self.Ext2FileType = {} # {ext : file-type} + self.FileTypeList = set() + + self._LineIndex = LineIndex + self._State = "" + self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}} + self._FileType = '' + self._BuildTypeList = [] + self._ArchList = [] + self._FamilyList = [] + self._TotalToolChainFamilySet = set() + self._RuleObjectList = [] # FileBuildRule object list + + self.Parse() + + # some intrinsic rules + self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, "COMMON", "COMMON", "COMMON"] = self._BinaryFileRule + self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE) + + ## Parse the build rule strings + def Parse(self): + self._State = self._Section + for Index in range(self._LineIndex, len(self.RuleContent)): + # Clean up the line and replace path separator with native one + Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep) + self.RuleContent[Index] = Line + + # skip empty or comment line + if Line == "" or Line[0] == "#": + continue + + # find out section header, enclosed by [] + if Line[0] == '[' and Line[-1] == ']': + # merge last section information into rule database + self.EndOfSection() + self._State = self._SectionHeader + # find out sub-section header, enclosed by <> + elif Line[0] == '<' and Line[-1] == '>': + if self._State != self._UnknownSection: + self._State = self._SubSectionHeader + + # call section handler to parse each (sub)section + self._StateHandler[self._State](self, Index) + # merge last section information into rule database + self.EndOfSection() + + ## Parse definitions under a section + # + # @param LineIndex The line index of build rule text + # + def ParseSection(self, LineIndex): + pass + + ## Parse definitions under a subsection + # + # @param LineIndex The line index of build rule text + # + def ParseSubSection(self, LineIndex): + # currenly nothing here + pass + + ## Placeholder for not supported sections + # + # @param LineIndex The line index of build rule text + # + def SkipSection(self, LineIndex): + pass + + ## Merge section information just got into rule database + def EndOfSection(self): + Database = self.RuleDatabase + # if there's specific toochain family, 'COMMON' doesn't make sense any more + if len(self._TotalToolChainFamilySet) > 1 and 'COMMON' in self._TotalToolChainFamilySet: + self._TotalToolChainFamilySet.remove('COMMON') + for Family in self._TotalToolChainFamilySet: + Input = self._RuleInfo[Family, self._InputFile] + Output = self._RuleInfo[Family, self._OutputFile] + Command = self._RuleInfo[Family, self._Command] + ExtraDependency = self._RuleInfo[Family, self._ExtraDependency] + + BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency) + for BuildType in self._BuildTypeList: + for Arch in self._ArchList: + Database[self._FileType, BuildType, Arch, Family] = BuildRule + for FileExt in BuildRule.SourceFileExtList: + self.Ext2FileType[FileExt] = self._FileType + + ## Parse section header + # + # @param LineIndex The line index of build rule text + # + def ParseSectionHeader(self, LineIndex): + self._RuleInfo = tdict(True, 2) + self._BuildTypeList = [] + self._ArchList = [] + self._FamilyList = [] + self._TotalToolChainFamilySet = set() + FileType = '' + RuleNameList = self.RuleContent[LineIndex][1:-1].split(',') + for RuleName in RuleNameList: + Arch = 'COMMON' + BuildType = 'COMMON' + TokenList = [Token.strip().upper() for Token in RuleName.split('.')] + # old format: Build.File-Type + if TokenList[0] == "BUILD": + if len(TokenList) == 1: + EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + FileType = TokenList[1] + if FileType == '': + EdkLogger.error("build", FORMAT_INVALID, "No file type given", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if self._FileTypePattern.match(FileType) == None: + EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, + ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type") + # new format: File-Type.Build-Type.Arch + else: + if FileType == '': + FileType = TokenList[0] + elif FileType != TokenList[0]: + EdkLogger.error("build", FORMAT_INVALID, + "Different file types are not allowed in the same rule section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if len(TokenList) > 1: + BuildType = TokenList[1] + if len(TokenList) > 2: + Arch = TokenList[2] + if BuildType not in self._BuildTypeList: + self._BuildTypeList.append(BuildType) + if Arch not in self._ArchList: + self._ArchList.append(Arch) + + if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific build types must not be mixed with common one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if 'COMMON' in self._ArchList and len(self._ArchList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific ARCH must not be mixed with common one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + self._FileType = FileType + self._State = self._Section + self.FileTypeList.add(FileType) + + ## Parse sub-section header + # + # @param LineIndex The line index of build rule text + # + def ParseSubSectionHeader(self, LineIndex): + SectionType = "" + List = self.RuleContent[LineIndex][1:-1].split(',') + FamilyList = [] + for Section in List: + TokenList = Section.split('.') + Type = TokenList[0].strip().upper() + + if SectionType == "": + SectionType = Type + elif SectionType != Type: + EdkLogger.error("build", FORMAT_INVALID, + "Two different section types are not allowed in the same sub-section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + if len(TokenList) > 1: + Family = TokenList[1].strip().upper() + else: + Family = "COMMON" + + if Family not in FamilyList: + FamilyList.append(Family) + + self._FamilyList = FamilyList + self._TotalToolChainFamilySet.update(FamilyList) + self._State = SectionType.upper() + if 'COMMON' in FamilyList and len(FamilyList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific tool chain family should not be mixed with general one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if self._State not in self._StateHandler: + EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, + ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex]) + ## Parse <InputFile> sub-section + # + # @param LineIndex The line index of build rule text + # + def ParseInputFile(self, LineIndex): + FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")] + for ToolChainFamily in self._FamilyList: + InputFiles = self._RuleInfo[ToolChainFamily, self._State] + if InputFiles == None: + InputFiles = [] + self._RuleInfo[ToolChainFamily, self._State] = InputFiles + InputFiles.extend(FileList) + + ## Parse <ExtraDependency> sub-section + # + # @param LineIndex The line index of build rule text + # + def ParseCommon(self, LineIndex): + for ToolChainFamily in self._FamilyList: + Items = self._RuleInfo[ToolChainFamily, self._State] + if Items == None: + Items = [] + self._RuleInfo[ToolChainFamily, self._State] = Items + Items.append(self.RuleContent[LineIndex]) + + ## Get a build rule via [] operator + # + # @param FileExt The extension of a file + # @param ToolChainFamily The tool chain family name + # @param BuildVersion The build version number. "*" means any rule + # is applicalbe. + # + # @retval FileType The file type string + # @retval FileBuildRule The object of FileBuildRule + # + # Key = (FileExt, ModuleType, Arch, ToolChainFamily) + def __getitem__(self, Key): + if not Key: + return None + + if Key[0] in self.Ext2FileType: + Type = self.Ext2FileType[Key[0]] + elif Key[0].upper() in self.FileTypeList: + Type = Key[0].upper() + else: + return None + + if len(Key) > 1: + Key = (Type,) + Key[1:] + else: + Key = (Type,) + return self.RuleDatabase[Key] + + _StateHandler = { + _SectionHeader : ParseSectionHeader, + _Section : ParseSection, + _SubSectionHeader : ParseSubSectionHeader, + _SubSection : ParseSubSection, + _InputFile : ParseInputFile, + _OutputFile : ParseCommon, + _ExtraDependency : ParseCommon, + _Command : ParseCommon, + _UnknownSection : SkipSection, + } + +# This acts like the main() function for the script, unless it is 'import'ed into another +# script. +if __name__ == '__main__': + import sys + EdkLogger.Initialize() + if len(sys.argv) > 1: + Br = BuildRule(sys.argv[1]) + print str(Br[".c", "DXE_DRIVER", "IA32", "MSFT"][1]) + print + print str(Br[".c", "DXE_DRIVER", "IA32", "INTEL"][1]) + print + print str(Br[".c", "DXE_DRIVER", "IA32", "GCC"][1]) + print + print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1]) + print + print str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1]) + print + print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1]) + print + print str(Br[".s", "SEC", "IPF", "COMMON"][1]) + print + print str(Br[".s", "SEC"][1]) + diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py new file mode 100644 index 0000000000..3c256c8b74 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenC.py @@ -0,0 +1,2011 @@ +## @file +# Routines for generating AutoGen.h and AutoGen.c +# +# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR> +# This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import Modules +# +import string + +from Common import EdkLogger + +from Common.BuildToolError import * +from Common.DataType import * +from Common.Misc import * +from Common.String import StringToArray +from StrGather import * + +## PCD type string +gItemTypeStringDatabase = { + TAB_PCDS_FEATURE_FLAG : 'FixedAtBuild', + TAB_PCDS_FIXED_AT_BUILD : 'FixedAtBuild', + TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch', + TAB_PCDS_DYNAMIC : '', + TAB_PCDS_DYNAMIC_DEFAULT : '', + TAB_PCDS_DYNAMIC_VPD : '', + TAB_PCDS_DYNAMIC_HII : '', + TAB_PCDS_DYNAMIC_EX : '', + TAB_PCDS_DYNAMIC_EX_DEFAULT : '', + TAB_PCDS_DYNAMIC_EX_VPD : '', + TAB_PCDS_DYNAMIC_EX_HII : '', +} + +## Dynamic PCD types +gDynamicPcd = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII] + +## Dynamic-ex PCD types +gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII] + +## Datum size +gDatumSizeStringDatabase = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOLEAN','VOID*':'8'} +gDatumSizeStringDatabaseH = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOL','VOID*':'PTR'} +gDatumSizeStringDatabaseLib = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'Bool','VOID*':'Ptr'} + +## Mapping between PCD driver type and EFI phase +gPcdPhaseMap = { + "PEI_PCD_DRIVER" : "PEI", + "DXE_PCD_DRIVER" : "DXE" +} + +gPcdDatabaseCommonAutoGenH = """ +// +// The following definition will be generated by build tool +// + +// +// Common definitions +// +typedef UINT8 SKU_ID; + +#define PCD_TYPE_SHIFT 28 + +#define PCD_TYPE_DATA (0x0 << PCD_TYPE_SHIFT) +#define PCD_TYPE_HII (0x8 << PCD_TYPE_SHIFT) +#define PCD_TYPE_VPD (0x4 << PCD_TYPE_SHIFT) +#define PCD_TYPE_SKU_ENABLED (0x2 << PCD_TYPE_SHIFT) +#define PCD_TYPE_STRING (0x1 << PCD_TYPE_SHIFT) + +#define PCD_TYPE_ALL_SET (PCD_TYPE_DATA | PCD_TYPE_HII | PCD_TYPE_VPD | PCD_TYPE_SKU_ENABLED | PCD_TYPE_STRING) + +#define PCD_DATUM_TYPE_SHIFT 24 + +#define PCD_DATUM_TYPE_POINTER (0x0 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT8 (0x1 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT16 (0x2 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT32 (0x4 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT64 (0x8 << PCD_DATUM_TYPE_SHIFT) + +#define PCD_DATUM_TYPE_ALL_SET (PCD_DATUM_TYPE_POINTER | \\ + PCD_DATUM_TYPE_UINT8 | \\ + PCD_DATUM_TYPE_UINT16 | \\ + PCD_DATUM_TYPE_UINT32 | \\ + PCD_DATUM_TYPE_UINT64) + +#define PCD_DATABASE_OFFSET_MASK (~(PCD_TYPE_ALL_SET | PCD_DATUM_TYPE_ALL_SET)) + +typedef struct { + UINT32 ExTokenNumber; + UINT16 LocalTokenNumber; // PCD Number of this particular platform build + UINT16 ExGuidIndex; // Index of GuidTable +} DYNAMICEX_MAPPING; + +typedef struct { + UINT32 SkuDataStartOffset; //We have to use offsetof MACRO as we don't know padding done by compiler + UINT32 SkuIdTableOffset; //Offset from the PCD_DB +} SKU_HEAD; + +typedef struct { + UINT16 GuidTableIndex; // Offset in Guid Table in units of GUID. + UINT16 StringIndex; // Offset in String Table in units of UINT16. + UINT16 Offset; // Offset in Variable + UINT16 DefaultValueOffset; // Offset of the Default Value +} VARIABLE_HEAD; + +typedef struct { + UINT32 Offset; +} VPD_HEAD; + +typedef UINT16 STRING_HEAD; + +typedef UINT16 SIZE_INFO; + +#define offsetof(s,m) (UINT32) (UINTN) &(((s *)0)->m) + +""" + +gPcdDatabaseEpilogueAutoGenH = """ +typedef struct { + PEI_PCD_DATABASE PeiDb; + DXE_PCD_DATABASE DxeDb; +} PCD_DATABASE; + +#define PCD_TOTAL_TOKEN_NUMBER (PEI_LOCAL_TOKEN_NUMBER + DXE_LOCAL_TOKEN_NUMBER) + +""" + +gPcdDatabaseAutoGenH = TemplateString(""" +#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE} +#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE} +#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE} +#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE} +#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER} +#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE} +#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER} +#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE} +#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY} +#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY} +#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY} +#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY} +#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY} + +typedef struct { +${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}]; +${END} +${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64}; +${END} +${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}]; +${END} +${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32}; +${END} +${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}]; +${END} + DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE]; + UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE]; + GUID GuidTable[${PHASE}_GUID_TABLE_SIZE]; +${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}]; +${END} +${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}]; +${END} +${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */ +${END} + SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE]; +${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}]; +${END} +${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16}; +${END} +${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}]; +${END} +${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8}; +${END} +${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}]; +${END} +${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN}; +${END} + UINT8 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE]; +${SYSTEM_SKU_ID} +} ${PHASE}_PCD_DATABASE_INIT; + +typedef struct { +${PCD_DATABASE_UNINIT_EMPTY} +${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}]; +${END} +${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}]; +${END} +${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}]; +${END} +${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}]; +${END} +${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}]; +${END} +} ${PHASE}_PCD_DATABASE_UNINIT; + +#define PCD_${PHASE}_SERVICE_DRIVER_VERSION 2 + +typedef struct { + ${PHASE}_PCD_DATABASE_INIT Init; + ${PHASE}_PCD_DATABASE_UNINIT Uninit; +} ${PHASE}_PCD_DATABASE; + +#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER) +""") + +gEmptyPcdDatabaseAutoGenC = TemplateString(""" +${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = { + /* ExMapTable */ + { + {0, 0, 0} + }, + /* LocalTokenNumberTable */ + { + 0 + }, + /* GuidTable */ + { + {0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}} + }, + /* StringTable */ + { 0 }, + /* SizeTable */ + { + 0, 0 + }, + /* SkuIdTable */ + { 0 }, + ${SYSTEM_SKU_ID_VALUE} +}; +""") + +gPcdDatabaseAutoGenC = TemplateString(""" +${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = { +${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */ +${END} +${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */ +${END} + /* VPD */ +${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */ +${END} + /* ExMapTable */ + { +${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} }, +${END} + }, + /* LocalTokenNumberTable */ + { +${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, +${END} + }, + /* GuidTable */ + { +${BEGIN} ${GUID_STRUCTURE}, +${END} + }, +${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */ +${END} +${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */ + { + ${VARIABLE_HEAD_VALUE} + }, +${END} + /* StringTable */ +${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */ +${END} + /* SizeTable */ + { +${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */ +${END} + }, +${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */ +${END} +${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */ +${END} +${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */ +${END} +${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */ +${END} + /* SkuIdTable */ + { ${BEGIN}${SKUID_VALUE}, ${END} }, + ${SYSTEM_SKU_ID_VALUE} +}; +""") + + +## AutoGen File Header Templates +gAutoGenHeaderString = TemplateString("""\ +/** + DO NOT EDIT + FILE auto-generated + Module name: + ${FileName} + Abstract: Auto-generated ${FileName} for building module or library. +**/ +""") + +gAutoGenHPrologueString = TemplateString(""" +#ifndef _${File}_${Guid} +#define _${File}_${Guid} + +""") + +gAutoGenHEpilogueString = """ +#endif +""" + +## PEI Core Entry Point Templates +gPeiCoreEntryPointPrototype = TemplateString(""" +${BEGIN} +VOID +EFIAPI +${Function} ( + IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData, + IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList, + IN VOID *Context + ); +${END} +""") + +gPeiCoreEntryPointString = TemplateString(""" +${BEGIN} +VOID +EFIAPI +ProcessModuleEntryPointList ( + IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData, + IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList, + IN VOID *Context + ) + +{ + ${Function} (SecCoreData, PpiList, Context); +} +${END} +""") + + +## DXE Core Entry Point Templates +gDxeCoreEntryPointPrototype = TemplateString(""" +${BEGIN} +VOID +EFIAPI +${Function} ( + IN VOID *HobStart + ); +${END} +""") + +gDxeCoreEntryPointString = TemplateString(""" +${BEGIN} +VOID +EFIAPI +ProcessModuleEntryPointList ( + IN VOID *HobStart + ) + +{ + ${Function} (HobStart); +} +${END} +""") + +## PEIM Entry Point Templates +gPeimEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ); +${END} +""") + +gPeimEntryPointString = [ +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + return ${Function} (FileHandle, PeiServices); +} +${END} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + EFI_STATUS Status; + EFI_STATUS CombinedStatus; + + CombinedStatus = EFI_LOAD_ERROR; +${BEGIN} + Status = ${Function} (FileHandle, PeiServices); + if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) { + CombinedStatus = Status; + } +${END} + return CombinedStatus; +} +""") +] + +## SMM_CORE Entry Point Templates +gSmmCoreEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gSmmCoreEntryPointString = TemplateString(""" +${BEGIN} +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +""") + +## DXE SMM Entry Point Templates +gDxeSmmEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gDxeSmmEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus; + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + mDriverEntryPointStatus = EFI_LOAD_ERROR; + +${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } +${END} + + return mDriverEntryPointStatus; +} +""") +] + +## UEFI Driver Entry Point Templates +gUefiDriverEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gUefiDriverEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (EFI_ERROR (Status)) { + ProcessLibraryDestructorList (gImageHandle, gST); + } + gBS->Exit (gImageHandle, Status, 0, NULL); +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + mDriverEntryPointStatus = EFI_LOAD_ERROR; + ${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } + ${END} + return mDriverEntryPointStatus; +} + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} +""") +] + + +## UEFI Application Entry Point Templates +gUefiApplicationEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gUefiApplicationEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (EFI_ERROR (Status)) { + ProcessLibraryDestructorList (gImageHandle, gST); + } + gBS->Exit (gImageHandle, Status, 0, NULL); +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${UefiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + ${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } + ${END} + return mDriverEntryPointStatus; +} + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR; + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} +""") +] + +## UEFI Unload Image Templates +gUefiUnloadImagePrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle + ); +${END} +""") + +gUefiUnloadImageString = [ +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + return ${Function} (ImageHandle); +} +${END} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + EFI_STATUS Status; + + Status = EFI_SUCCESS; +${BEGIN} + if (EFI_ERROR (Status)) { + ${Function} (ImageHandle); + } else { + Status = ${Function} (ImageHandle); + } +${END} + return Status; +} +""") +] + +gLibraryStructorPrototype = { +'BASE' : TemplateString("""${BEGIN} +RETURN_STATUS +EFIAPI +${Function} ( + VOID + );${END} +"""), + +'PEI' : TemplateString("""${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + );${END} +"""), + +'DXE' : TemplateString("""${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + );${END} +"""), +} + +gLibraryStructorCall = { +'BASE' : TemplateString("""${BEGIN} + Status = ${Function} (); + ASSERT_EFI_ERROR (Status);${END} +"""), + +'PEI' : TemplateString("""${BEGIN} + Status = ${Function} (FileHandle, PeiServices); + ASSERT_EFI_ERROR (Status);${END} +"""), + +'DXE' : TemplateString("""${BEGIN} + Status = ${Function} (ImageHandle, SystemTable); + ASSERT_EFI_ERROR (Status);${END} +"""), +} + +## Library Constructor and Destructor Templates +gLibraryString = { +'BASE' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + VOID + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), + +'PEI' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), + +'DXE' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), +} + +gBasicHeaderFile = "Base.h" + +gModuleTypeHeaderFile = { + "BASE" : [gBasicHeaderFile], + "SEC" : ["PiPei.h", "Library/DebugLib.h"], + "PEI_CORE" : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"], + "PEIM" : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"], + "DXE_CORE" : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"], + "DXE_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_SMM_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_RUNTIME_DRIVER": ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_SAL_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "UEFI_DRIVER" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "UEFI_APPLICATION" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"], + "SMM_CORE" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiDriverEntryPoint.h"], + "USER_DEFINED" : [gBasicHeaderFile] +} + +## Create code for module PCDs +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param Pcd The PCD object +# +def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): + TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName] + PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber + # + # Write PCDs + # + PcdTokenName = '_PCD_TOKEN_' + Pcd.TokenCName + if Pcd.Type in gDynamicExPcd: + TokenNumber = int(Pcd.TokenValue, 0) + else: + if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber: + EdkLogger.error("build", AUTOGEN_ERROR, + "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] + AutoGenH.Append('\n#define %s %d\n' % (PcdTokenName, TokenNumber)) + + EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + Pcd.TokenCName + "." + Pcd.TokenSpaceGuidCName) + if Pcd.Type not in gItemTypeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] + DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] + GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + Pcd.TokenCName + SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + Pcd.TokenCName + + if Pcd.Type in gDynamicExPcd: + AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + elif Pcd.Type in gDynamicPcd: + AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName)) + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName)) + else: + PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + Pcd.TokenCName + Const = 'const' + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + Const = '' + Type = '' + Array = '' + Value = Pcd.DefaultValue + Unicode = False + ValueNumber = 0 + if Pcd.DatumType in ['UINT64', 'UINT32', 'UINT16', 'UINT8']: + try: + if Value.upper().startswith('0X'): + ValueNumber = int (Value, 16) + else: + ValueNumber = int (Value) + except: + EdkLogger.error("build", AUTOGEN_ERROR, + "PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType == 'UINT64': + if ValueNumber < 0: + EdkLogger.error("build", AUTOGEN_ERROR, + "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif ValueNumber >= 0x10000000000000000: + EdkLogger.error("build", AUTOGEN_ERROR, + "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if not Value.endswith('ULL'): + Value += 'ULL' + elif Pcd.DatumType == 'UINT32': + if ValueNumber < 0: + EdkLogger.error("build", AUTOGEN_ERROR, + "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif ValueNumber >= 0x100000000: + EdkLogger.error("build", AUTOGEN_ERROR, + "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif Pcd.DatumType == 'UINT16': + if ValueNumber < 0: + EdkLogger.error("build", AUTOGEN_ERROR, + "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif ValueNumber >= 0x10000: + EdkLogger.error("build", AUTOGEN_ERROR, + "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif Pcd.DatumType == 'UINT8': + if ValueNumber < 0: + EdkLogger.error("build", AUTOGEN_ERROR, + "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + elif ValueNumber >= 0x100: + EdkLogger.error("build", AUTOGEN_ERROR, + "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType == 'VOID*': + if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '': + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + ArraySize = int(Pcd.MaxDatumSize, 0) + if Value[0] == '{': + Type = '(VOID *)' + else: + if Value[0] == 'L': + Unicode = True + Value = Value.lstrip('L') #.strip('"') + Value = eval(Value) # translate escape character + NewValue = '{' + for Index in range(0,len(Value)): + if Unicode: + NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', ' + else: + NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', ' + if Unicode: + ArraySize = ArraySize / 2; + + if ArraySize < (len(Value) + 1): + EdkLogger.error("build", AUTOGEN_ERROR, + "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + Value = NewValue + '0 }' + Array = '[%d]' % ArraySize + # + # skip casting for fixed at build since it breaks ARM assembly. + # Long term we need PCD macros that work in assembly + # + elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD: + Value = "((%s)%s)" % (Pcd.DatumType, Value) + + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + PcdValueName = '_PCD_PATCHABLE_VALUE_' + Pcd.TokenCName + else: + PcdValueName = '_PCD_VALUE_' + Pcd.TokenCName + + if Pcd.DatumType == 'VOID*': + # + # For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed. + # + if Unicode: + AutoGenH.Append('#define _PCD_PATCHABLE_%s_SIZE %s\n' % (Pcd.TokenCName, Pcd.MaxDatumSize)) + AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value)) + AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName)) + else: + AutoGenH.Append('#define _PCD_PATCHABLE_%s_SIZE %s\n' % (Pcd.TokenCName, Pcd.MaxDatumSize)) + AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value)) + AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName)) + elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) + AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) + AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName)) + else: + AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) + AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName)) + + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtr(_gPcd_BinaryPatch_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName)) + else: + AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName)) + else: + AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName) + +## Create code for library module PCDs +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param Pcd The PCD object +# +def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd): + PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + TokenCName = Pcd.TokenCName + TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[TokenSpaceGuidCName] + if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber: + EdkLogger.error("build", AUTOGEN_ERROR, + "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + TokenNumber = PcdTokenNumber[TokenCName, TokenSpaceGuidCName] + + if Pcd.Type not in gItemTypeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + DatumType = Pcd.DatumType + DatumSize = gDatumSizeStringDatabaseH[DatumType] + DatumSizeLib= gDatumSizeStringDatabaseLib[DatumType] + GetModeName = '_PCD_GET_MODE_' + DatumSize + '_' + TokenCName + SetModeName = '_PCD_SET_MODE_' + DatumSize + '_' + TokenCName + + Type = '' + Array = '' + if Pcd.DatumType == 'VOID*': + Type = '(VOID *)' + Array = '[]' + + AutoGenH.Append('#define _PCD_TOKEN_%s %d\n' % (TokenCName, TokenNumber)) + + PcdItemType = Pcd.Type + #if PcdItemType in gDynamicPcd: + # PcdItemType = TAB_PCDS_FIXED_AT_BUILD + # if (TokenCName, TokenSpaceGuidCName) in Info.PlatformInfo.Platform.Pcds: + # PcdItemType = Info.PlatformInfo.Platform.Pcds[TokenCName, TokenSpaceGuidCName].Type + if PcdItemType in gDynamicExPcd: + PcdTokenName = '_PCD_TOKEN_' + TokenCName + AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + if DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName,DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + if PcdItemType in gDynamicPcd: + PcdTokenName = '_PCD_TOKEN_' + TokenCName + AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName)) + if DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName)) + if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE: + PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName + AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array) ) + AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName)) + AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName)) + if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG: + AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array)) + #AutoGenH.Append('#define _PCD_VALUE_%s _gPcd_FixedAtBuild_%s\n' %(TokenCName, TokenCName)) + AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName)) + AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName) + +## Create code for PCD database in DXE or PEI phase +# +# @param Platform The platform object +# @retval tuple Two TemplateString objects for C code and header file, +# respectively +# +def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase): + AutoGenC = TemplateString() + AutoGenH = TemplateString() + + Dict = { + 'PHASE' : Phase, + 'GUID_TABLE_SIZE' : '1', + 'STRING_TABLE_SIZE' : '1', + 'SKUID_TABLE_SIZE' : '1', + 'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '1', + 'LOCAL_TOKEN_NUMBER' : '0', + 'EXMAPPING_TABLE_SIZE' : '1', + 'EX_TOKEN_NUMBER' : '0', + 'SIZE_TABLE_SIZE' : '2', + 'GUID_TABLE_EMPTY' : 'TRUE', + 'STRING_TABLE_EMPTY' : 'TRUE', + 'SKUID_TABLE_EMPTY' : 'TRUE', + 'DATABASE_EMPTY' : 'TRUE', + 'EXMAP_TABLE_EMPTY' : 'TRUE', + 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is emptry */', + 'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;', + 'SYSTEM_SKU_ID_VALUE' : '0' + } + + for DatumType in ['UINT64','UINT32','UINT16','UINT8','BOOLEAN', "VOID*"]: + Dict['VARDEF_CNAME_' + DatumType] = [] + Dict['VARDEF_GUID_' + DatumType] = [] + Dict['VARDEF_SKUID_' + DatumType] = [] + Dict['VARDEF_VALUE_' + DatumType] = [] + for Init in ['INIT','UNINIT']: + Dict[Init+'_CNAME_DECL_' + DatumType] = [] + Dict[Init+'_GUID_DECL_' + DatumType] = [] + Dict[Init+'_NUMSKUS_DECL_' + DatumType] = [] + Dict[Init+'_VALUE_' + DatumType] = [] + + for Type in ['STRING_HEAD','VPD_HEAD','VARIABLE_HEAD']: + Dict[Type + '_CNAME_DECL'] = [] + Dict[Type + '_GUID_DECL'] = [] + Dict[Type + '_NUMSKUS_DECL'] = [] + Dict[Type + '_VALUE'] = [] + + Dict['STRING_TABLE_INDEX'] = [] + Dict['STRING_TABLE_LENGTH'] = [] + Dict['STRING_TABLE_CNAME'] = [] + Dict['STRING_TABLE_GUID'] = [] + Dict['STRING_TABLE_VALUE'] = [] + + Dict['SIZE_TABLE_CNAME'] = [] + Dict['SIZE_TABLE_GUID'] = [] + Dict['SIZE_TABLE_CURRENT_LENGTH'] = [] + Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = [] + + Dict['EXMAPPING_TABLE_EXTOKEN'] = [] + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = [] + Dict['EXMAPPING_TABLE_GUID_INDEX'] = [] + + Dict['GUID_STRUCTURE'] = [] + + Dict['SKUID_VALUE'] = [] + Dict['VARDEF_HEADER'] = [] + if Phase == 'DXE': + Dict['SYSTEM_SKU_ID'] = '' + Dict['SYSTEM_SKU_ID_VALUE'] = '' + + StringTableIndex = 0 + StringTableSize = 0 + NumberOfLocalTokens = 0 + NumberOfPeiLocalTokens = 0 + NumberOfDxeLocalTokens = 0 + NumberOfExTokens = 0 + NumberOfSizeItems = 0 + GuidList = [] + + for Pcd in Platform.DynamicPcdList: + CName = Pcd.TokenCName + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + + EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Platform)) + + if Pcd.Phase == 'PEI': + NumberOfPeiLocalTokens += 1 + if Pcd.Phase == 'DXE': + NumberOfDxeLocalTokens += 1 + if Pcd.Phase != Phase: + continue + + # + # TODO: need GetGuidValue() definition + # + TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue + TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure) + if Pcd.Type in gDynamicExPcd: + if TokenSpaceGuid not in GuidList: + GuidList += [TokenSpaceGuid] + Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure) + NumberOfExTokens += 1 + + ValueList = [] + StringHeadOffsetList = [] + VpdHeadOffsetList = [] + VariableHeadValueList = [] + Pcd.InitString = 'UNINIT' + + if Pcd.DatumType == 'VOID*': + if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]: + Pcd.TokenTypeList = ['PCD_TYPE_STRING'] + else: + Pcd.TokenTypeList = [] + elif Pcd.DatumType == 'BOOLEAN': + Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8'] + else: + Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType] + + if len(Pcd.SkuInfoList) > 1: + Pcd.TokenTypeList += ['PCD_TYPE_SKU_ENABLED'] + + for SkuName in Pcd.SkuInfoList: + Sku = Pcd.SkuInfoList[SkuName] + SkuId = Sku.SkuId + if SkuId == None or SkuId == '': + continue + + if SkuId not in Dict['SKUID_VALUE']: + Dict['SKUID_VALUE'].append(SkuId) + + SkuIdIndex = Dict['SKUID_VALUE'].index(SkuId) + if len(Sku.VariableName) > 0: + Pcd.TokenTypeList += ['PCD_TYPE_HII'] + Pcd.InitString = 'INIT' + VariableNameStructure = StringToArray(Sku.VariableName) + if VariableNameStructure not in Dict['STRING_TABLE_VALUE']: + Dict['STRING_TABLE_CNAME'].append(CName) + Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid) + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + else: + Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) + + Dict['STRING_TABLE_LENGTH'].append((len(Sku.VariableName) - 3 + 1) * 2) + Dict['STRING_TABLE_VALUE'].append(VariableNameStructure) + StringTableIndex += 1 + StringTableSize += (len(Sku.VariableName) - 3 + 1) * 2 + + VariableHeadStringIndex = 0 + for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)): + VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index] + + VariableGuidStructure = Sku.VariableGuidValue + VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure) + if VariableGuid not in GuidList: + GuidList += [VariableGuid] + Dict['GUID_STRUCTURE'].append(VariableGuidStructure) + VariableHeadGuidIndex = GuidList.index(VariableGuid) + + if "PCD_TYPE_STRING" in Pcd.TokenTypeList: + VariableHeadValueList.append('%d, %d, %s, offsetof(%s_PCD_DATABASE, Init.%s_%s)' % + (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, + Phase, CName, TokenSpaceGuid)) + else: + VariableHeadValueList.append('%d, %d, %s, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' % + (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, + Phase, CName, TokenSpaceGuid, SkuIdIndex)) + Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName) + Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid) + Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex) + if "PCD_TYPE_STRING" in Pcd.TokenTypeList: + Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex)) + else: + Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue) + elif Sku.VpdOffset != '': + Pcd.TokenTypeList += ['PCD_TYPE_VPD'] + Pcd.InitString = 'INIT' + VpdHeadOffsetList.append(Sku.VpdOffset) + continue + + if Pcd.DatumType == 'VOID*': + Pcd.TokenTypeList += ['PCD_TYPE_STRING'] + Pcd.InitString = 'INIT' + if Sku.HiiDefaultValue != '' and Sku.DefaultValue == '': + Sku.DefaultValue = Sku.HiiDefaultValue + if Sku.DefaultValue != '': + NumberOfSizeItems += 1 + Dict['STRING_TABLE_CNAME'].append(CName) + Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid) + + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + else: + Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) + if Sku.DefaultValue[0] == 'L': + Size = (len(Sku.DefaultValue) - 3 + 1) * 2 + Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue)) + elif Sku.DefaultValue[0] == '"': + Size = len(Sku.DefaultValue) - 2 + 1 + Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue)) + elif Sku.DefaultValue[0] == '{': + Size = len(Sku.DefaultValue.replace(',',' ').split()) + Dict['STRING_TABLE_VALUE'].append(Sku.DefaultValue) + + StringHeadOffsetList.append(str(StringTableSize)) + Dict['SIZE_TABLE_CNAME'].append(CName) + Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid) + Dict['SIZE_TABLE_CURRENT_LENGTH'].append(Size) + Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize) + if Pcd.MaxDatumSize != '': + MaxDatumSize = int(Pcd.MaxDatumSize, 0) + if MaxDatumSize < Size: + EdkLogger.error("build", AUTOGEN_ERROR, + "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Platform)) + Size = MaxDatumSize + Dict['STRING_TABLE_LENGTH'].append(Size) + StringTableIndex += 1 + StringTableSize += (Size) + else: + if "PCD_TYPE_HII" not in Pcd.TokenTypeList: + Pcd.TokenTypeList += ['PCD_TYPE_DATA'] + if Sku.DefaultValue == 'TRUE': + Pcd.InitString = 'INIT' + else: + try: + if int(Sku.DefaultValue, 0) != 0: + Pcd.InitString = 'INIT' + except: + pass + + # + # For UNIT64 type PCD's value, ULL should be append to avoid + # warning under linux building environment. + # + if Pcd.DatumType == "UINT64": + ValueList.append(Sku.DefaultValue + "ULL") + else: + ValueList.append(Sku.DefaultValue) + + Pcd.TokenTypeList = list(set(Pcd.TokenTypeList)) + + + if 'PCD_TYPE_HII' in Pcd.TokenTypeList: + Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName) + Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList)) + Dict['VARDEF_HEADER'].append('_Variable_Header') + else: + Dict['VARDEF_HEADER'].append('') + if 'PCD_TYPE_VPD' in Pcd.TokenTypeList: + Dict['VPD_HEAD_CNAME_DECL'].append(CName) + Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList)) + if 'PCD_TYPE_STRING' in Pcd.TokenTypeList: + Dict['STRING_HEAD_CNAME_DECL'].append(CName) + Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList)) + if 'PCD_TYPE_DATA' in Pcd.TokenTypeList: + Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName) + Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid) + Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList)) + if Pcd.InitString == 'UNINIT': + Dict['PCD_DATABASE_UNINIT_EMPTY'] = '' + else: + Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList)) + + if Phase == 'PEI': + NumberOfLocalTokens = NumberOfPeiLocalTokens + if Phase == 'DXE': + NumberOfLocalTokens = NumberOfDxeLocalTokens + + Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)] + + for Pcd in Platform.DynamicPcdList: + CName = Pcd.TokenCName + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + if Pcd.Phase != Phase: + continue + + TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName)) + GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1 + if Phase == 'DXE': + GeneratedTokenNumber -= NumberOfPeiLocalTokens + + EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName)) + EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase) + EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber)) + + Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init' + if Pcd.InitString == 'UNINIT': + Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit' + Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName + Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid + Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList) + if Pcd.Type in gDynamicExPcd: + Dict['EXMAPPING_TABLE_EXTOKEN'].append(Pcd.TokenValue) + if Phase == 'DXE': + GeneratedTokenNumber += NumberOfPeiLocalTokens + # + # Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number. + # For each EX type PCD, a PCD Token Number is assigned. When the + # PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number, + # the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by + # Pcd Driver/PEIM in MdeModulePkg. + # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted + # to the EXMAPPING_TABLE. + # + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(GeneratedTokenNumber + 1) + Dict['EXMAPPING_TABLE_GUID_INDEX'].append(GuidList.index(TokenSpaceGuid)) + + if GuidList != []: + Dict['GUID_TABLE_EMPTY'] = 'FALSE' + Dict['GUID_TABLE_SIZE'] = len(GuidList) + else: + Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')] + + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + Dict['STRING_TABLE_LENGTH'].append(1) + Dict['STRING_TABLE_CNAME'].append('') + Dict['STRING_TABLE_GUID'].append('') + Dict['STRING_TABLE_VALUE'].append('{ 0 }') + else: + Dict['STRING_TABLE_EMPTY'] = 'FALSE' + Dict['STRING_TABLE_SIZE'] = StringTableSize + + if Dict['SIZE_TABLE_CNAME'] == []: + Dict['SIZE_TABLE_CNAME'].append('') + Dict['SIZE_TABLE_GUID'].append('') + Dict['SIZE_TABLE_CURRENT_LENGTH'].append(0) + Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(0) + + if NumberOfLocalTokens != 0: + Dict['DATABASE_EMPTY'] = 'FALSE' + Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens + Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens + + if NumberOfExTokens != 0: + Dict['EXMAP_TABLE_EMPTY'] = 'FALSE' + Dict['EXMAPPING_TABLE_SIZE'] = NumberOfExTokens + Dict['EX_TOKEN_NUMBER'] = NumberOfExTokens + else: + Dict['EXMAPPING_TABLE_EXTOKEN'].append(0) + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(0) + Dict['EXMAPPING_TABLE_GUID_INDEX'].append(0) + + if NumberOfSizeItems != 0: + Dict['SIZE_TABLE_SIZE'] = NumberOfSizeItems * 2 + + AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict)) + if NumberOfLocalTokens == 0: + AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict)) + else: + AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict)) + + return AutoGenH, AutoGenC + +## Create code for PCD database +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH): + if Info.PcdIsDriver == "": + return + if Info.PcdIsDriver not in gPcdPhaseMap: + EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver, + ExtraData="[%s]" % str(Info)) + + AutoGenH.Append(gPcdDatabaseCommonAutoGenH) + AdditionalAutoGenH, AdditionalAutoGenC = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI') + AutoGenH.Append(AdditionalAutoGenH.String) + + Phase = gPcdPhaseMap[Info.PcdIsDriver] + if Phase == 'PEI': + AutoGenC.Append(AdditionalAutoGenC.String) + + if Phase == 'DXE': + AdditionalAutoGenH, AdditionalAutoGenC = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase) + AutoGenH.Append(AdditionalAutoGenH.String) + AutoGenC.Append(AdditionalAutoGenC.String) + AutoGenH.Append(gPcdDatabaseEpilogueAutoGenH) + +## Create code for library constructor +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH): + # + # Library Constructors + # + ConstructorPrototypeString = TemplateString() + ConstructorCallingString = TemplateString() + if Info.IsLibrary: + DependentLibraryList = [Info.Module] + else: + DependentLibraryList = Info.DependentLibraryList + for Lib in DependentLibraryList: + if len(Lib.ConstructorList) <= 0: + continue + Dict = {'Function':Lib.ConstructorList} + if Lib.ModuleType in ['BASE', 'SEC']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict)) + elif Lib.ModuleType in ['PEI_CORE','PEIM']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) + elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) + + if str(ConstructorPrototypeString) == '': + ConstructorPrototypeList = [] + else: + ConstructorPrototypeList = [str(ConstructorPrototypeString)] + if str(ConstructorCallingString) == '': + ConstructorCallingList = [] + else: + ConstructorCallingList = [str(ConstructorCallingString)] + + Dict = { + 'Type' : 'Constructor', + 'FunctionPrototype' : ConstructorPrototypeList, + 'FunctionCall' : ConstructorCallingList + } + if Info.IsLibrary: + AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict) + else: + if Info.ModuleType in ['BASE', 'SEC']: + AutoGenC.Append(gLibraryString['BASE'].Replace(Dict)) + elif Info.ModuleType in ['PEI_CORE','PEIM']: + AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) + elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']: + AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) + +## Create code for library destructor +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH): + # + # Library Destructors + # + DestructorPrototypeString = TemplateString() + DestructorCallingString = TemplateString() + if Info.IsLibrary: + DependentLibraryList = [Info.Module] + else: + DependentLibraryList = Info.DependentLibraryList + for Index in range(len(DependentLibraryList)-1, -1, -1): + Lib = DependentLibraryList[Index] + if len(Lib.DestructorList) <= 0: + continue + Dict = {'Function':Lib.DestructorList} + if Lib.ModuleType in ['BASE', 'SEC']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict)) + elif Lib.ModuleType in ['PEI_CORE','PEIM']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) + elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_CORE']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) + + if str(DestructorPrototypeString) == '': + DestructorPrototypeList = [] + else: + DestructorPrototypeList = [str(DestructorPrototypeString)] + if str(DestructorCallingString) == '': + DestructorCallingList = [] + else: + DestructorCallingList = [str(DestructorCallingString)] + + Dict = { + 'Type' : 'Destructor', + 'FunctionPrototype' : DestructorPrototypeList, + 'FunctionCall' : DestructorCallingList + } + if Info.IsLibrary: + AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict) + else: + if Info.ModuleType in ['BASE', 'SEC']: + AutoGenC.Append(gLibraryString['BASE'].Replace(Dict)) + elif Info.ModuleType in ['PEI_CORE','PEIM']: + AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) + elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']: + AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) + + +## Create code for ModuleEntryPoint +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']: + return + # + # Module Entry Points + # + NumEntryPoints = len(Info.Module.ModuleEntryPointList) + if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification: + PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION'] + else: + PiSpecVersion = 0 + if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification: + UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION'] + else: + UefiSpecVersion = 0 + Dict = { + 'Function' : Info.Module.ModuleEntryPointList, + 'PiSpecVersion' : PiSpecVersion, + 'UefiSpecVersion': UefiSpecVersion + } + + if Info.ModuleType in ['PEI_CORE', 'DXE_CORE', 'SMM_CORE']: + if NumEntryPoints != 1: + EdkLogger.error( + "build", + AUTOGEN_ERROR, + '%s must have exactly one entry point' % Info.ModuleType, + File=str(Info), + ExtraData= ", ".join(Info.Module.ModuleEntryPointList) + ) + if Info.ModuleType == 'PEI_CORE': + AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict)) + AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'DXE_CORE': + AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict)) + AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'SMM_CORE': + AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict)) + AutoGenH.Append(gSmmCoreEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'PEIM': + if NumEntryPoints < 2: + AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType in ['DXE_RUNTIME_DRIVER','DXE_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']: + if NumEntryPoints < 2: + AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'DXE_SMM_DRIVER': + if NumEntryPoints == 0: + AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict)) + else: + AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict)) + AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'UEFI_APPLICATION': + if NumEntryPoints < 2: + AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict)) + +## Create code for ModuleUnloadImage +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']: + return + # + # Unload Image Handlers + # + NumUnloadImage = len(Info.Module.ModuleUnloadImageList) + Dict = {'Count':NumUnloadImage, 'Function':Info.Module.ModuleUnloadImageList} + if NumUnloadImage < 2: + AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict)) + else: + AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict)) + AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict)) + +## Create code for GUID +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.GuidList: + AutoGenC.Append("\n// Guids\n") + # + # GUIDs + # + for Key in Info.GuidList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key])) + +## Create code for protocol +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.ProtocolList: + AutoGenC.Append("\n// Protocols\n") + # + # Protocol GUIDs + # + for Key in Info.ProtocolList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key])) + +## Create code for PPI +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.PpiList: + AutoGenC.Append("\n// PPIs\n") + # + # PPI GUIDs + # + for Key in Info.PpiList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key])) + +## Create code for PCD +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePcdCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + if Info.ModulePcdList: + AutoGenH.Append("\n// PCD definitions\n") + for Pcd in Info.ModulePcdList: + CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd) + else: + if Info.ModulePcdList: + AutoGenH.Append("\n// Definition of PCDs used in this module\n") + AutoGenC.Append("\n// Definition of PCDs used in this module\n") + for Pcd in Info.ModulePcdList: + CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd) + + if Info.LibraryPcdList: + AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n") + AutoGenC.Append("\n// Definition of PCDs used in libraries\n") + for Pcd in Info.LibraryPcdList: + CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd) + CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH) + +## Create code for unicode string definition +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True +# @param UniGenBinBuffer Buffer to store uni string package data +# +def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuffer): + WorkingDir = os.getcwd() + os.chdir(Info.WorkspaceDir) + + IncList = [Info.MetaFile.Dir] + # Get all files under [Sources] section in inf file for EDK-II module + EDK2Module = True + SrcList = [F for F in Info.SourceFileList] + if Info.AutoGenVersion < 0x00010005: + EDK2Module = False + # Get all files under the module directory for EDK-I module + Cwd = os.getcwd() + os.chdir(Info.MetaFile.Dir) + for Root, Dirs, Files in os.walk("."): + if 'CVS' in Dirs: + Dirs.remove('CVS') + if '.svn' in Dirs: + Dirs.remove('.svn') + for File in Files: + File = PathClass(os.path.join(Root, File), Info.MetaFile.Dir) + if File in SrcList: + continue + SrcList.append(File) + os.chdir(Cwd) + + if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1: + CompatibleMode = True + else: + CompatibleMode = False + + # + # -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2 + # + if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1: + if CompatibleMode: + EdkLogger.error("build", AUTOGEN_ERROR, + "-c and -s build options should be used exclusively", + ExtraData="[%s]" % str(Info)) + ShellMode = True + else: + ShellMode = False + + #RFC4646 is only for EDKII modules and ISO639-2 for EDK modules + if EDK2Module: + FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages] + else: + FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages] + Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo) + if CompatibleMode or UniGenCFlag: + AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n") + AutoGenC.Append(Code) + AutoGenC.Append("\n") + AutoGenH.Append("\n//\n//Unicode String ID\n//\n") + AutoGenH.Append(Header) + if CompatibleMode or UniGenCFlag: + AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name) + os.chdir(WorkingDir) + +## Create common code +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateHeaderCode(Info, AutoGenC, AutoGenH): + # file header + AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'})) + # header file Prologue + AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-','_')})) + if Info.AutoGenVersion >= 0x00010005: + # header files includes + AutoGenH.Append("#include <%s>\n" % gBasicHeaderFile) + if Info.ModuleType in gModuleTypeHeaderFile \ + and gModuleTypeHeaderFile[Info.ModuleType][0] != gBasicHeaderFile: + AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0]) + AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;\n\n') + + if Info.IsLibrary: + return + + AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid)) + + if Info.IsLibrary: + return + # C file header + AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'})) + if Info.AutoGenVersion >= 0x00010005: + # C file header files includes + if Info.ModuleType in gModuleTypeHeaderFile: + for Inc in gModuleTypeHeaderFile[Info.ModuleType]: + AutoGenC.Append("#include <%s>\n" % Inc) + else: + AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile) + + # + # Publish the CallerId Guid + # + AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid)) + +## Create common code for header file +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateFooterCode(Info, AutoGenC, AutoGenH): + AutoGenH.Append(gAutoGenHEpilogueString) + +## Create code for a module +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True +# @param UniGenBinBuffer Buffer to store uni string package data +# +def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer): + CreateHeaderCode(Info, AutoGenC, AutoGenH) + + if Info.AutoGenVersion >= 0x00010005: + CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH) + CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH) + CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH) + CreatePcdCode(Info, AutoGenC, AutoGenH) + CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH) + CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH) + CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH) + CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH) + + if Info.UnicodeFileList: + FileName = "%sStrDefs.h" % Info.Name + StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName})) + StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-','_')})) + CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer) + StringH.Append("\n#endif\n") + AutoGenH.Append('#include "%s"\n' % FileName) + + CreateFooterCode(Info, AutoGenC, AutoGenH) + + # no generation of AutoGen.c for R8 modules without unicode file + if Info.AutoGenVersion < 0x00010005 and len(Info.UnicodeFileList) == 0: + AutoGenC.String = '' + +## Create the code file +# +# @param FilePath The path of code file +# @param Content The content of code file +# @param IsBinaryFile The flag indicating if the file is binary file or not +# +# @retval True If file content is changed or file doesn't exist +# @retval False If the file exists and the content is not changed +# +def Generate(FilePath, Content, IsBinaryFile): + return SaveFileOnChange(FilePath, Content, IsBinaryFile) + diff --git a/BaseTools/Source/Python/AutoGen/GenDepex.py b/BaseTools/Source/Python/AutoGen/GenDepex.py new file mode 100644 index 0000000000..f80824b744 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenDepex.py @@ -0,0 +1,448 @@ +## @file
+# This file is used to generate DEPEX file for module's dependency expression
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+## Import Modules
+#
+import sys
+import os
+import re
+import traceback
+
+from StringIO import StringIO
+from struct import pack
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+from Common.Misc import GuidStructureStringToGuidString
+from Common import EdkLogger as EdkLogger
+
+
+## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
+gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
+
+## Mapping between module type and EFI phase
+gType2Phase = {
+ "BASE" : None,
+ "SEC" : "PEI",
+ "PEI_CORE" : "PEI",
+ "PEIM" : "PEI",
+ "DXE_CORE" : "DXE",
+ "DXE_DRIVER" : "DXE",
+ "DXE_SMM_DRIVER" : "DXE",
+ "DXE_RUNTIME_DRIVER": "DXE",
+ "DXE_SAL_DRIVER" : "DXE",
+ "UEFI_DRIVER" : "DXE",
+ "UEFI_APPLICATION" : "DXE",
+ "SMM_CORE" : "DXE",
+}
+
+## Convert dependency expression string into EFI internal representation
+#
+# DependencyExpression class is used to parse dependency expression string and
+# convert it into its binary form.
+#
+class DependencyExpression:
+
+ ArchProtocols = set([
+ '665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
+ '26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
+ '26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
+ '1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
+ '27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
+ '27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
+ 'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
+ 'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
+ '26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
+ '6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
+ '1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
+ '665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
+ ]
+ )
+
+ OpcodePriority = {
+ "AND" : 1,
+ "OR" : 1,
+ "NOT" : 2,
+ # "SOR" : 9,
+ # "BEFORE": 9,
+ # "AFTER" : 9,
+ }
+
+ Opcode = {
+ "PEI" : {
+ "PUSH" : 0x02,
+ "AND" : 0x03,
+ "OR" : 0x04,
+ "NOT" : 0x05,
+ "TRUE" : 0x06,
+ "FALSE" : 0x07,
+ "END" : 0x08
+ },
+
+ "DXE" : {
+ "BEFORE": 0x00,
+ "AFTER" : 0x01,
+ "PUSH" : 0x02,
+ "AND" : 0x03,
+ "OR" : 0x04,
+ "NOT" : 0x05,
+ "TRUE" : 0x06,
+ "FALSE" : 0x07,
+ "END" : 0x08,
+ "SOR" : 0x09
+ }
+ }
+
+ # all supported op codes and operands
+ SupportedOpcode = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR"]
+ SupportedOperand = ["TRUE", "FALSE"]
+
+ OpcodeWithSingleOperand = ['NOT', 'BEFORE', 'AFTER']
+ OpcodeWithTwoOperand = ['AND', 'OR']
+
+ # op code that should not be the last one
+ NonEndingOpcode = ["AND", "OR", "NOT", 'SOR']
+ # op code must not present at the same time
+ ExclusiveOpcode = ["BEFORE", "AFTER"]
+ # op code that should be the first one if it presents
+ AboveAllOpcode = ["SOR", "BEFORE", "AFTER"]
+
+ #
+ # open and close brace must be taken as individual tokens
+ #
+ TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
+
+ ## Constructor
+ #
+ # @param Expression The list or string of dependency expression
+ # @param ModuleType The type of the module using the dependency expression
+ #
+ def __init__(self, Expression, ModuleType, Optimize=False):
+ self.ModuleType = ModuleType
+ self.Phase = gType2Phase[ModuleType]
+ if type(Expression) == type([]):
+ self.ExpressionString = " ".join(Expression)
+ self.TokenList = Expression
+ else:
+ self.ExpressionString = Expression
+ self.GetExpressionTokenList()
+
+ self.PostfixNotation = []
+ self.OpcodeList = []
+
+ self.GetPostfixNotation()
+ self.ValidateOpcode()
+
+ EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
+ if Optimize:
+ self.Optimize()
+ EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
+
+ def __str__(self):
+ return " ".join(self.TokenList)
+
+ def __repr__(self):
+ WellForm = ''
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode:
+ WellForm += "\n " + Token
+ else:
+ WellForm += ' ' + Token
+ return WellForm
+
+ ## Split the expression string into token list
+ def GetExpressionTokenList(self):
+ self.TokenList = self.TokenPattern.findall(self.ExpressionString)
+
+ ## Convert token list into postfix notation
+ def GetPostfixNotation(self):
+ Stack = []
+ LastToken = ''
+ for Token in self.TokenList:
+ if Token == "(":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
+ ExtraData="Near %s" % LastToken)
+ Stack.append(Token)
+ elif Token == ")":
+ if '(' not in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ elif LastToken in self.SupportedOpcode + ['', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
+ ExtraData="Near %s" % LastToken)
+ while len(Stack) > 0:
+ if Stack[-1] == '(':
+ Stack.pop()
+ break
+ self.PostfixNotation.append(Stack.pop())
+ elif Token in self.OpcodePriority:
+ if Token == "NOT":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
+ ExtraData="Near %s" % LastToken)
+ elif LastToken in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
+ ExtraData="Near %s" % LastToken)
+
+ while len(Stack) > 0:
+ if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
+ break
+ self.PostfixNotation.append(Stack.pop())
+ Stack.append(Token)
+ self.OpcodeList.append(Token)
+ else:
+ if Token not in self.SupportedOpcode:
+ # not OP, take it as GUID
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
+ ExtraData="Near %s" % LastToken)
+ if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
+ if Token not in self.SupportedOperand:
+ self.PostfixNotation.append("PUSH")
+ # check if OP is valid in this phase
+ elif Token in self.Opcode[self.Phase]:
+ if Token == "END":
+ break
+ self.OpcodeList.append(Token)
+ else:
+ EdkLogger.error("GenDepex", PARSER_ERROR,
+ "Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
+ ExtraData=str(self))
+ self.PostfixNotation.append(Token)
+ LastToken = Token
+
+ # there should not be parentheses in Stack
+ if '(' in Stack or ')' in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ while len(Stack) > 0:
+ self.PostfixNotation.append(Stack.pop())
+ if self.PostfixNotation[-1] != 'END':
+ self.PostfixNotation.append("END")
+
+ ## Validate the dependency expression
+ def ValidateOpcode(self):
+ for Op in self.AboveAllOpcode:
+ if Op in self.PostfixNotation:
+ if Op != self.PostfixNotation[0]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ for Op in self.ExclusiveOpcode:
+ if Op in self.OpcodeList:
+ if len(self.OpcodeList) > 1:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ if self.TokenList[-1] != 'END' and self.TokenList[-1] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
+ ExtraData=str(self))
+ if self.TokenList[-1] == 'END' and self.TokenList[-2] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
+ ExtraData=str(self))
+ if "END" in self.TokenList and "END" != self.TokenList[-1]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
+ ExtraData=str(self))
+
+ ## Simply optimize the dependency expression by removing duplicated operands
+ def Optimize(self):
+ ValidOpcode = list(set(self.OpcodeList))
+ if len(ValidOpcode) != 1 or ValidOpcode[0] not in ['AND', 'OR']:
+ return
+ Op = ValidOpcode[0]
+ NewOperand = []
+ AllOperand = set()
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode or Token in NewOperand:
+ continue
+ AllOperand.add(Token)
+ if Token == 'TRUE':
+ if Op == 'AND':
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ elif Token == 'FALSE':
+ if Op == 'OR':
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ NewOperand.append(Token)
+
+ # don't generate depex if only TRUE operand left
+ if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE':
+ self.PostfixNotation = []
+ return
+
+ # don't generate depex if all operands are architecture protocols
+ if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \
+ Op == 'AND' and \
+ self.ArchProtocols == set([GuidStructureStringToGuidString(Guid) for Guid in AllOperand]):
+ self.PostfixNotation = []
+ return
+
+ if len(NewOperand) == 0:
+ self.TokenList = list(AllOperand)
+ else:
+ self.TokenList = []
+ while True:
+ self.TokenList.append(NewOperand.pop(0))
+ if NewOperand == []:
+ break
+ self.TokenList.append(Op)
+ self.PostfixNotation = []
+ self.GetPostfixNotation()
+
+
+ ## Convert a GUID value in C structure format into its binary form
+ #
+ # @param Guid The GUID value in C structure format
+ #
+ # @retval array The byte array representing the GUID value
+ #
+ def GetGuidValue(self, Guid):
+ GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
+ return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
+
+ ## Save the binary form of dependency expression in file
+ #
+ # @param File The path of file. If None is given, put the data on console
+ #
+ # @retval True If the file doesn't exist or file is changed
+ # @retval False If file exists and is not changed.
+ #
+ def Generate(self, File=None):
+ Buffer = StringIO()
+ if len(self.PostfixNotation) == 0:
+ return False
+
+ for Item in self.PostfixNotation:
+ if Item in self.Opcode[self.Phase]:
+ Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
+ elif Item in self.SupportedOpcode:
+ EdkLogger.error("GenDepex", FORMAT_INVALID,
+ "Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
+ ExtraData=self.ExpressionString)
+ else:
+ Buffer.write(self.GetGuidValue(Item))
+
+ FilePath = ""
+ FileChangeFlag = True
+ if File == None:
+ sys.stdout.write(Buffer.getvalue())
+ FilePath = "STDOUT"
+ else:
+ FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
+
+ Buffer.close()
+ return FileChangeFlag
+
+versionNumber = "0.04"
+__version__ = "%prog Version " + versionNumber
+__copyright__ = "Copyright (c) 2007-2010, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] [dependency_expression_file]"
+
+## Parse command line options
+#
+# @retval OptionParser
+#
+def GetOptions():
+ from optparse import OptionParser
+
+ Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
+
+ Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
+ help="Specify the name of depex file to be generated")
+ Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
+ help="The type of module for which the dependency expression serves")
+ Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
+ help="The string of dependency expression. If this option presents, the input file will be ignored.")
+ Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
+ help="Do some simple optimization on the expression.")
+ Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
+ help="build with verbose information")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
+ help="build with little information")
+
+ return Parser.parse_args()
+
+
+## Entrance method
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Option, Input = GetOptions()
+
+ # Set log level
+ if Option.quiet:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.verbose:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.debug != None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ try:
+ if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
+
+ DxsFile = ''
+ if len(Input) > 0 and Option.Expression == "":
+ DxsFile = Input[0]
+ DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
+ DxsString = gStartClosePattern.sub("\\1", DxsString)
+ elif Option.Expression != "":
+ if Option.Expression[0] == '"':
+ DxsString = Option.Expression[1:-1]
+ else:
+ DxsString = Option.Expression
+ else:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
+
+ Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
+ if Option.OutputFile != None:
+ FileChangeFlag = Dpx.Generate(Option.OutputFile)
+ if not FileChangeFlag and DxsFile:
+ #
+ # Touch the output file if its time stamp is older than the original
+ # DXS file to avoid re-invoke this tool for the dependency check in build rule.
+ #
+ if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
+ os.utime(Option.OutputFile, None)
+ else:
+ Dpx.Generate()
+ except BaseException, X:
+ EdkLogger.quiet("")
+ if Option != None and Option.debug != None:
+ EdkLogger.quiet(traceback.format_exc())
+ else:
+ EdkLogger.quiet(str(X))
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py new file mode 100644 index 0000000000..49ac33f47f --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -0,0 +1,1399 @@ +## @file +# Create makefile for MS nmake and GNU make +# +# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR> +# This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import Modules +# +import os +import sys +import string +import re +import os.path as path + +from Common.BuildToolError import * +from Common.Misc import * +from Common.String import * +from BuildEngine import * +import Common.GlobalData as GlobalData + +## Regular expression for finding header file inclusions +gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE|re.UNICODE|re.IGNORECASE) + +## Regular expression for matching macro used in header file inclusion +gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE) + +## pattern for include style in R8.x code +gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h" +gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h" +gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h" +gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h" +gIncludeMacroConversion = { + "EFI_PROTOCOL_DEFINITION" : gProtocolDefinition, + "EFI_GUID_DEFINITION" : gGuidDefinition, + "EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition, + "EFI_PROTOCOL_PRODUCER" : gProtocolDefinition, + "EFI_PROTOCOL_CONSUMER" : gProtocolDefinition, + "EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition, + "EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition, + "EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition, + "EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition, + "EFI_PPI_DEFINITION" : gPpiDefinition, + "EFI_PPI_PRODUCER" : gPpiDefinition, + "EFI_PPI_CONSUMER" : gPpiDefinition, + "EFI_PPI_DEPENDENCY" : gPpiDefinition, +} + +## default makefile type +gMakeType = "" +if sys.platform == "win32": + gMakeType = "nmake" +else: + gMakeType = "gmake" + + +## BuildFile class +# +# This base class encapsules build file and its generation. It uses template to generate +# the content of build file. The content of build file will be got from AutoGen objects. +# +class BuildFile(object): + ## template used to generate the build file (i.e. makefile if using make) + _TEMPLATE_ = TemplateString('') + + _DEFAULT_FILE_NAME_ = "Makefile" + + ## default file name for each type of build file + _FILE_NAME_ = { + "nmake" : "Makefile", + "gmake" : "GNUmakefile" + } + + ## Fixed header string for makefile + _MAKEFILE_HEADER = '''# +# DO NOT EDIT +# This file is auto-generated by build utility +# +# Module Name: +# +# %s +# +# Abstract: +# +# Auto-generated makefile for building modules, libraries or platform +# + ''' + + ## Header string for each type of build file + _FILE_HEADER_ = { + "nmake" : _MAKEFILE_HEADER % _FILE_NAME_["nmake"], + "gmake" : _MAKEFILE_HEADER % _FILE_NAME_["gmake"] + } + + ## shell commands which can be used in build file in the form of macro + # $(CP) copy file command + # $(MV) move file command + # $(RM) remove file command + # $(MD) create dir command + # $(RD) remove dir command + # + _SHELL_CMD_ = { + "nmake" : { + "CP" : "copy /y", + "MV" : "move /y", + "RM" : "del /f /q", + "MD" : "mkdir", + "RD" : "rmdir /s /q", + }, + + "gmake" : { + "CP" : "cp -f", + "MV" : "mv -f", + "RM" : "rm -f", + "MD" : "mkdir -p", + "RD" : "rm -r -f", + } + } + + ## directory separator + _SEP_ = { + "nmake" : "\\", + "gmake" : "/" + } + + ## directory creation template + _MD_TEMPLATE_ = { + "nmake" : 'if not exist %(dir)s $(MD) %(dir)s', + "gmake" : "$(MD) %(dir)s" + } + + ## directory removal template + _RD_TEMPLATE_ = { + "nmake" : 'if exist %(dir)s $(RD) %(dir)s', + "gmake" : "$(RD) %(dir)s" + } + + _CD_TEMPLATE_ = { + "nmake" : 'if exist %(dir)s cd %(dir)s', + "gmake" : "test -e %(dir)s && cd %(dir)s" + } + + _MAKE_TEMPLATE_ = { + "nmake" : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s', + "gmake" : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s' + } + + _INCLUDE_CMD_ = { + "nmake" : '!INCLUDE', + "gmake" : "include" + } + + _INC_FLAG_ = {"MSFT" : "/I", "GCC" : "-I", "INTEL" : "-I", "RVCT" : "-I"} + + ## Constructor of BuildFile + # + # @param AutoGenObject Object of AutoGen class + # + def __init__(self, AutoGenObject): + self._AutoGenObject = AutoGenObject + self._FileType = gMakeType + + ## Create build file + # + # @param FileType Type of build file. Only nmake and gmake are supported now. + # + # @retval TRUE The build file is created or re-created successfully + # @retval FALSE The build file exists and is the same as the one to be generated + # + def Generate(self, FileType=gMakeType): + if FileType not in self._FILE_NAME_: + EdkLogger.error("build", PARAMETER_INVALID, "Invalid build type [%s]" % FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + self._FileType = FileType + FileContent = self._TEMPLATE_.Replace(self._TemplateDict) + FileName = self._FILE_NAME_[FileType] + return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False) + + ## Return a list of directory creation command string + # + # @param DirList The list of directory to be created + # + # @retval list The directory creation command list + # + def GetCreateDirectoryCommand(self, DirList): + return [self._MD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList] + + ## Return a list of directory removal command string + # + # @param DirList The list of directory to be removed + # + # @retval list The directory removal command list + # + def GetRemoveDirectoryCommand(self, DirList): + return [self._RD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList] + + def PlaceMacro(self, Path, MacroDefinitions={}): + if Path.startswith("$("): + return Path + else: + PathLength = len(Path) + for MacroName in MacroDefinitions: + MacroValue = MacroDefinitions[MacroName] + MacroValueLength = len(MacroValue) + if MacroValueLength <= PathLength and Path.startswith(MacroValue): + Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:]) + break + return Path + +## ModuleMakefile class +# +# This class encapsules makefie and its generation for module. It uses template to generate +# the content of makefile. The content of makefile will be got from ModuleAutoGen object. +# +class ModuleMakefile(BuildFile): + ## template used to generate the makefile for module + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_RELATIVE_DIR = ${platform_relative_directory} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Module Macro Definition +# +MODULE_NAME = ${module_name} +MODULE_GUID = ${module_guid} +MODULE_VERSION = ${module_version} +MODULE_TYPE = ${module_type} +MODULE_FILE = ${module_file} +MODULE_FILE_BASE_NAME = ${module_file_base_name} +BASE_NAME = $(MODULE_NAME) +MODULE_RELATIVE_DIR = ${module_relative_directory} +MODULE_DIR = $(WORKSPACE)${separator}${module_relative_directory} + +MODULE_ENTRY_POINT = ${module_entry_point} +ARCH_ENTRY_POINT = ${arch_entry_point} +IMAGE_ENTRY_POINT = ${image_entry_point} + +${BEGIN}${module_extra_defines} +${END} +# +# Build Configuration Macro Definition +# +ARCH = ${architecture} +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +# PLATFORM_BUILD_DIR = ${platform_build_directory} +BUILD_DIR = ${platform_build_directory} +BIN_DIR = $(BUILD_DIR)${separator}${architecture} +LIB_DIR = $(BIN_DIR) +MODULE_BUILD_DIR = ${module_build_directory} +OUTPUT_DIR = ${module_output_directory} +DEBUG_DIR = ${module_debug_directory} +DEST_DIR_OUTPUT = $(OUTPUT_DIR) +DEST_DIR_DEBUG = $(DEBUG_DIR) + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +# +# Tools definitions specific to this module +# +${BEGIN}${module_tool_definitions} +${END} +MAKE_FILE = ${makefile_path} + +# +# Build Macro +# +${BEGIN}${file_macro} +${END} + +COMMON_DEPS = ${BEGIN}${common_dependency_file} \\ + ${END} + +# +# Overridable Target Macro Definitions +# +FORCE_REBUILD = force_build +INIT_TARGET = init +PCH_TARGET = +BC_TARGET = ${BEGIN}${backward_compatible_target} ${END} +CODA_TARGET = ${BEGIN}${remaining_build_target} \\ + ${END} + +# +# Default target, which will build dependent libraries in addition to source files +# + +all: mbuild + + +# +# Target used when called from platform makefile, which will bypass the build of dependent libraries +# + +pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET) + +# +# ModuleTarget +# + +mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET) + +# +# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets +# + +tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET) + +# +# Phony target which is used to force executing commands for a target +# +force_build: +\t-@ + +# +# Target to update the FD +# + +fds: mbuild gen_fds + +# +# Initialization target: print build information and create necessary directories +# +init: info dirs + +info: +\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)] + +dirs: +${BEGIN}\t-@${create_directory_command}\n${END} + +strdefs: +\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h + +# +# GenLibsTarget +# +gen_libs: +\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name} +\t${END}@cd $(MODULE_BUILD_DIR) + +# +# Build Flash Device Image +# +gen_fds: +\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds +\t@cd $(MODULE_BUILD_DIR) + +# +# Individual Object Build Targets +# +${BEGIN}${file_build_target} +${END} + +# +# clean all intermediate files +# +clean: +\t${BEGIN}${clean_command} +\t${END} + +# +# clean all generated files +# +cleanall: +${BEGIN}\t${cleanall_command} +${END}\t$(RM) *.pdb *.idb > NUL 2>&1 +\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi + +# +# clean all dependent libraries built +# +cleanlib: +\t${BEGIN}-@${library_build_command} cleanall +\t${END}@cd $(MODULE_BUILD_DIR)\n\n''') + + _FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n") + _BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n") + + ## Constructor of ModuleMakefile + # + # @param ModuleAutoGen Object of ModuleAutoGen class + # + def __init__(self, ModuleAutoGen): + BuildFile.__init__(self, ModuleAutoGen) + self.PlatformInfo = self._AutoGenObject.PlatformInfo + + self.ResultFileList = [] + self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"] + + self.SourceFileDatabase = {} # {file type : file path} + self.DestFileDatabase = {} # {file type : file path} + self.FileBuildTargetList = [] # [(src, target string)] + self.BuildTargetList = [] # [target string] + self.PendingBuildTargetList = [] # [FileBuildRule objects] + self.CommonFileDependency = [] + self.FileListMacros = {} + self.ListFileMacros = {} + + self.FileDependency = [] + self.LibraryBuildCommandList = [] + self.LibraryFileList = [] + self.LibraryMakefileList = [] + self.LibraryBuildDirectoryList = [] + self.SystemLibraryList = [] + self.Macros = sdict() + self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"] + self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"] + self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"] + self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"] + self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"] + self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + if self._FileType not in self._SEP_: + EdkLogger.error("build", PARAMETER_INVALID, "Invalid Makefile type [%s]" % self._FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + Separator = self._SEP_[self._FileType] + + # break build if no source files and binary files are found + if len(self._AutoGenObject.SourceFileList) == 0 and len(self._AutoGenObject.BinaryFileList) == 0: + EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]" + % (self._AutoGenObject.BuildTarget, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), + ExtraData="[%s]" % str(self._AutoGenObject)) + + # convert dependent libraries to build command + self.ProcessDependentLibrary() + if len(self._AutoGenObject.Module.ModuleEntryPointList) > 0: + ModuleEntryPoint = self._AutoGenObject.Module.ModuleEntryPointList[0] + else: + ModuleEntryPoint = "_ModuleEntryPoint" + + # Intel EBC compiler enforces EfiMain + if self._AutoGenObject.AutoGenVersion < 0x00010005 and self._AutoGenObject.Arch == "EBC": + ArchEntryPoint = "EfiMain" + else: + ArchEntryPoint = ModuleEntryPoint + + if self._AutoGenObject.Arch == "EBC": + # EBC compiler always use "EfiStart" as entry point. Only applies to R9 modules + ImageEntryPoint = "EfiStart" + elif self._AutoGenObject.AutoGenVersion < 0x00010005: + # R8 modules use entry point specified in INF file + ImageEntryPoint = ModuleEntryPoint + else: + # R9 modules always use "_ModuleEntryPoint" as entry point + ImageEntryPoint = "_ModuleEntryPoint" + + # tools definitions + ToolsDef = [] + IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily] + for Tool in self._AutoGenObject.BuildOption: + for Attr in self._AutoGenObject.BuildOption[Tool]: + Value = self._AutoGenObject.BuildOption[Tool][Attr] + if Attr == "FAMILY": + continue + elif Attr == "PATH": + ToolsDef.append("%s = %s" % (Tool, Value)) + else: + # Don't generate MAKE_FLAGS in makefile. It's put in environment variable. + if Tool == "MAKE": + continue + # Remove duplicated include path, if any + if Attr == "FLAGS": + Value = RemoveDupOption(Value, IncPrefix, self._AutoGenObject.IncludePathList) + ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value)) + ToolsDef.append("") + + # convert source files and binary files to build targets + self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList] + if len(self.ResultFileList) == 0: + EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build", + ExtraData="[%s]" % str(self._AutoGenObject)) + + self.ProcessBuildTargetList() + + # Generate macros used to represent input files + FileMacroList = [] # macro name = file list + for FileListMacro in self.FileListMacros: + FileMacro = self._FILE_MACRO_TEMPLATE.Replace( + { + "macro_name" : FileListMacro, + "source_file" : self.FileListMacros[FileListMacro] + } + ) + FileMacroList.append(FileMacro) + + # INC_LIST is special + FileMacro = "" + IncludePathList = [] + for P in self._AutoGenObject.IncludePathList: + IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros)) + if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros: + self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P) + FileMacro += self._FILE_MACRO_TEMPLATE.Replace( + { + "macro_name" : "INC", + "source_file" : IncludePathList + } + ) + FileMacroList.append(FileMacro) + + # Generate macros used to represent files containing list of input files + for ListFileMacro in self.ListFileMacros: + ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5]) + FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName)) + SaveFileOnChange( + ListFileName, + "\n".join(self.ListFileMacros[ListFileMacro]), + False + ) + + # R8 modules need <BaseName>StrDefs.h for string ID + #if self._AutoGenObject.AutoGenVersion < 0x00010005 and len(self._AutoGenObject.UnicodeFileList) > 0: + # BcTargetList = ['strdefs'] + #else: + # BcTargetList = [] + BcTargetList = [] + + MakefileName = self._FILE_NAME_[self._FileType] + LibraryMakeCommandList = [] + for D in self.LibraryBuildDirectoryList: + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join(D, MakefileName)} + LibraryMakeCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName), + "makefile_name" : MakefileName, + "platform_name" : self.PlatformInfo.Name, + "platform_guid" : self.PlatformInfo.Guid, + "platform_version" : self.PlatformInfo.Version, + "platform_relative_directory": self.PlatformInfo.SourceDir, + "platform_output_directory" : self.PlatformInfo.OutputDir, + + "module_name" : self._AutoGenObject.Name, + "module_guid" : self._AutoGenObject.Guid, + "module_version" : self._AutoGenObject.Version, + "module_type" : self._AutoGenObject.ModuleType, + "module_file" : self._AutoGenObject.MetaFile.Name, + "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName, + "module_relative_directory" : self._AutoGenObject.SourceDir, + "module_extra_defines" : ["%s = %s" % (k, v) for k,v in self._AutoGenObject.Module.Defines.iteritems()], + + "architecture" : self._AutoGenObject.Arch, + "toolchain_tag" : self._AutoGenObject.ToolChain, + "build_target" : self._AutoGenObject.BuildTarget, + + "platform_build_directory" : self.PlatformInfo.BuildDir, + "module_build_directory" : self._AutoGenObject.BuildDir, + "module_output_directory" : self._AutoGenObject.OutputDir, + "module_debug_directory" : self._AutoGenObject.DebugDir, + + "separator" : Separator, + "module_tool_definitions" : ToolsDef, + + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + + "module_entry_point" : ModuleEntryPoint, + "image_entry_point" : ImageEntryPoint, + "arch_entry_point" : ArchEntryPoint, + "remaining_build_target" : self.ResultFileList, + "common_dependency_file" : self.CommonFileDependency, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]), + "cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]), + "dependent_library_build_directory" : self.LibraryBuildDirectoryList, + "library_build_command" : LibraryMakeCommandList, + "file_macro" : FileMacroList, + "file_build_target" : self.BuildTargetList, + "backward_compatible_target": BcTargetList, + } + + return MakefileTemplateDict + + def ProcessBuildTargetList(self): + # + # Search dependency file list for each source file + # + ForceIncludedFile = [] + for File in self._AutoGenObject.AutoGenFileList: + if File.Ext == '.h': + ForceIncludedFile.append(File) + SourceFileList = [] + for Target in self._AutoGenObject.IntroTargetList: + SourceFileList.extend(Target.Inputs) + + self.FileDependency = self.GetFileDependency( + SourceFileList, + ForceIncludedFile, + self._AutoGenObject.IncludePathList + ) + DepSet = None + for File in self.FileDependency: + if not self.FileDependency[File]: + self.FileDependency[File] = ['$(FORCE_REBUILD)'] + continue + # skip non-C files + if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c": + continue + elif DepSet == None: + DepSet = set(self.FileDependency[File]) + else: + DepSet &= set(self.FileDependency[File]) + # in case nothing in SourceFileList + if DepSet == None: + DepSet = set() + # + # Extract common files list in the dependency files + # + for File in DepSet: + self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros)) + + for File in self.FileDependency: + # skip non-C files + if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c": + continue + NewDepSet = set(self.FileDependency[File]) + NewDepSet -= DepSet + self.FileDependency[File] = ["$(COMMON_DEPS)"] + list(NewDepSet) + + # Convert target description object to target string in makefile + for Type in self._AutoGenObject.Targets: + for T in self._AutoGenObject.Targets[Type]: + # Generate related macros if needed + if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros: + self.FileListMacros[T.FileListMacro] = [] + if T.GenListFile and T.ListFileMacro not in self.ListFileMacros: + self.ListFileMacros[T.ListFileMacro] = [] + if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros: + self.ListFileMacros[T.IncListFileMacro] = [] + + Deps = [] + # Add force-dependencies + for Dep in T.Dependencies: + Deps.append(self.PlaceMacro(str(Dep), self.Macros)) + # Add inclusion-dependencies + if len(T.Inputs) == 1 and T.Inputs[0] in self.FileDependency: + for F in self.FileDependency[T.Inputs[0]]: + Deps.append(self.PlaceMacro(str(F), self.Macros)) + # Add source-dependencies + for F in T.Inputs: + NewFile = self.PlaceMacro(str(F), self.Macros) + # In order to use file list macro as dependency + if T.GenListFile: + self.ListFileMacros[T.ListFileMacro].append(str(F)) + self.FileListMacros[T.FileListMacro].append(NewFile) + elif T.GenFileListMacro: + self.FileListMacros[T.FileListMacro].append(NewFile) + else: + Deps.append(NewFile) + + # Use file list macro as dependency + if T.GenFileListMacro: + Deps.append("$(%s)" % T.FileListMacro) + + TargetDict = { + "target" : self.PlaceMacro(T.Target.Path, self.Macros), + "cmd" : "\n\t".join(T.Commands), + "deps" : Deps + } + self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict)) + + ## For creating makefile targets for dependent libraries + def ProcessDependentLibrary(self): + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros)) + + ## Return a list containing source file's dependencies + # + # @param FileList The list of source files + # @param ForceInculeList The list of files which will be included forcely + # @param SearchPathList The list of search path + # + # @retval dict The mapping between source file path and its dependencies + # + def GetFileDependency(self, FileList, ForceInculeList, SearchPathList): + Dependency = {} + for F in FileList: + Dependency[F] = self.GetDependencyList(F, ForceInculeList, SearchPathList) + return Dependency + + ## Find dependencies for one source file + # + # By searching recursively "#include" directive in file, find out all the + # files needed by given source file. The dependecies will be only searched + # in given search path list. + # + # @param File The source file + # @param ForceInculeList The list of files which will be included forcely + # @param SearchPathList The list of search path + # + # @retval list The list of files the given source file depends on + # + def GetDependencyList(self, File, ForceList, SearchPathList): + EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File) + FileStack = [File] + ForceList + DependencySet = set() + MacroUsedByIncludedFile = False + + if self._AutoGenObject.Arch not in gDependencyDatabase: + gDependencyDatabase[self._AutoGenObject.Arch] = {} + DepDb = gDependencyDatabase[self._AutoGenObject.Arch] + + # add path of given source file into search path list. + if File.Dir not in SearchPathList: + SearchPathList.append(File.Dir) + while len(FileStack) > 0: + F = FileStack.pop() + + CurrentFileDependencyList = [] + if F in DepDb: + CurrentFileDependencyList = DepDb[F] + for Dep in CurrentFileDependencyList: + if Dep not in FileStack and Dep not in DependencySet: + FileStack.append(Dep) + else: + try: + Fd = open(F.Path, 'r') + except BaseException, X: + EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path+"\n\t"+str(X)) + + FileContent = Fd.read() + Fd.close() + if len(FileContent) == 0: + continue + + if FileContent[0] == 0xff or FileContent[0] == 0xfe: + FileContent = unicode(FileContent, "utf-16") + IncludedFileList = gIncludePattern.findall(FileContent) + + CurrentFilePath = F.Dir + for Inc in IncludedFileList: + Inc = Inc.strip() + # if there's macro used to reference header file, expand it + HeaderList = gMacroPattern.findall(Inc) + if len(HeaderList) == 1 and len(HeaderList[0]) == 2: + HeaderType = HeaderList[0][0] + HeaderKey = HeaderList[0][1] + if HeaderType in gIncludeMacroConversion: + Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey} + else: + # not known macro used in #include + MacroUsedByIncludedFile = True + continue + Inc = os.path.normpath(Inc) + for SearchPath in [CurrentFilePath] + SearchPathList: + FilePath = os.path.join(SearchPath, Inc) + if not os.path.isfile(FilePath) or FilePath in CurrentFileDependencyList: + continue + FilePath = PathClass(FilePath) + CurrentFileDependencyList.append(FilePath) + if FilePath not in FileStack and FilePath not in DependencySet: + FileStack.append(FilePath) + break + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\ + "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList))) + + if not MacroUsedByIncludedFile: + if F == File: + CurrentFileDependencyList += ForceList + # + # Don't keep the file in cache if it uses macro in included file. + # So it will be scanned again if another file includes this file. + # + DepDb[F] = CurrentFileDependencyList + DependencySet.update(CurrentFileDependencyList) + + # + # If there's macro used in included file, always build the file by + # returning a empty dependency + # + if MacroUsedByIncludedFile: + DependencyList = [] + else: + DependencyList = list(DependencySet) # remove duplicate ones + + return DependencyList + + _TemplateDict = property(_CreateTemplateDict) + +## CustomMakefile class +# +# This class encapsules makefie and its generation for module. It uses template to generate +# the content of makefile. The content of makefile will be got from ModuleAutoGen object. +# +class CustomMakefile(BuildFile): + ## template used to generate the makefile for module with custom makefile + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_RELATIVE_DIR = ${platform_relative_directory} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Module Macro Definition +# +MODULE_NAME = ${module_name} +MODULE_GUID = ${module_guid} +MODULE_VERSION = ${module_version} +MODULE_TYPE = ${module_type} +MODULE_FILE = ${module_file} +MODULE_FILE_BASE_NAME = ${module_file_base_name} +BASE_NAME = $(MODULE_NAME) +MODULE_RELATIVE_DIR = ${module_relative_directory} +MODULE_DIR = $(WORKSPACE)${separator}${module_relative_directory} + +# +# Build Configuration Macro Definition +# +ARCH = ${architecture} +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +# PLATFORM_BUILD_DIR = ${platform_build_directory} +BUILD_DIR = ${platform_build_directory} +BIN_DIR = $(BUILD_DIR)${separator}${architecture} +LIB_DIR = $(BIN_DIR) +MODULE_BUILD_DIR = ${module_build_directory} +OUTPUT_DIR = ${module_output_directory} +DEBUG_DIR = ${module_debug_directory} +DEST_DIR_OUTPUT = $(OUTPUT_DIR) +DEST_DIR_DEBUG = $(DEBUG_DIR) + +# +# Tools definitions specific to this module +# +${BEGIN}${module_tool_definitions} +${END} +MAKE_FILE = ${makefile_path} + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +${custom_makefile_content} + +# +# Target used when called from platform makefile, which will bypass the build of dependent libraries +# + +pbuild: init all + + +# +# ModuleTarget +# + +mbuild: init all + +# +# Build Target used in multi-thread build mode, which no init target is needed +# + +tbuild: all + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)] +${BEGIN}\t-@${create_directory_command}\n${END}\ + +''') + + ## Constructor of CustomMakefile + # + # @param ModuleAutoGen Object of ModuleAutoGen class + # + def __init__(self, ModuleAutoGen): + BuildFile.__init__(self, ModuleAutoGen) + self.PlatformInfo = self._AutoGenObject.PlatformInfo + self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + if self._FileType not in self._AutoGenObject.CustomMakefile: + EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + MakefilePath = os.path.join( + self._AutoGenObject.WorkspaceDir, + self._AutoGenObject.CustomMakefile[self._FileType] + ) + try: + CustomMakefile = open(MakefilePath, 'r').read() + except: + EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(self._AutoGenObject), + ExtraData=self._AutoGenObject.CustomMakefile[self._FileType]) + + # tools definitions + ToolsDef = [] + for Tool in self._AutoGenObject.BuildOption: + # Don't generate MAKE_FLAGS in makefile. It's put in environment variable. + if Tool == "MAKE": + continue + for Attr in self._AutoGenObject.BuildOption[Tool]: + if Attr == "FAMILY": + continue + elif Attr == "PATH": + ToolsDef.append("%s = %s" % (Tool, self._AutoGenObject.BuildOption[Tool][Attr])) + else: + ToolsDef.append("%s_%s = %s" % (Tool, Attr, self._AutoGenObject.BuildOption[Tool][Attr])) + ToolsDef.append("") + + MakefileName = self._FILE_NAME_[self._FileType] + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName), + "platform_name" : self.PlatformInfo.Name, + "platform_guid" : self.PlatformInfo.Guid, + "platform_version" : self.PlatformInfo.Version, + "platform_relative_directory": self.PlatformInfo.SourceDir, + "platform_output_directory" : self.PlatformInfo.OutputDir, + + "module_name" : self._AutoGenObject.Name, + "module_guid" : self._AutoGenObject.Guid, + "module_version" : self._AutoGenObject.Version, + "module_type" : self._AutoGenObject.ModuleType, + "module_file" : self._AutoGenObject.MetaFile, + "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName, + "module_relative_directory" : self._AutoGenObject.SourceDir, + + "architecture" : self._AutoGenObject.Arch, + "toolchain_tag" : self._AutoGenObject.ToolChain, + "build_target" : self._AutoGenObject.BuildTarget, + + "platform_build_directory" : self.PlatformInfo.BuildDir, + "module_build_directory" : self._AutoGenObject.BuildDir, + "module_output_directory" : self._AutoGenObject.OutputDir, + "module_debug_directory" : self._AutoGenObject.DebugDir, + + "separator" : Separator, + "module_tool_definitions" : ToolsDef, + + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "custom_makefile_content" : CustomMakefile + } + + return MakefileTemplateDict + + _TemplateDict = property(_CreateTemplateDict) + +## PlatformMakefile class +# +# This class encapsules makefie and its generation for platform. It uses +# template to generate the content of makefile. The content of makefile will be +# got from PlatformAutoGen object. +# +class PlatformMakefile(BuildFile): + ## template used to generate the makefile for platform + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_FILE = ${platform_file} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Build Configuration Macro Definition +# +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +BUILD_DIR = ${platform_build_directory} +FV_DIR = ${platform_build_directory}${separator}FV + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +MAKE = ${make_path} +MAKE_FILE = ${makefile_path} + +# +# Default target +# +all: init build_libraries build_modules + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}] +\t${BEGIN}-@${create_directory_command} +\t${END} +# +# library build target +# +libraries: init build_libraries + +# +# module build target +# +modules: init build_libraries build_modules + +# +# Build all libraries: +# +build_libraries: +${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild +${END}\t@cd $(BUILD_DIR) + +# +# Build all modules: +# +build_modules: +${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild +${END}\t@cd $(BUILD_DIR) + +# +# Clean intermediate files +# +clean: +\t${BEGIN}-@${library_build_command} clean +\t${END}${BEGIN}-@${module_build_command} clean +\t${END}@cd $(BUILD_DIR) + +# +# Clean all generated files except to makefile +# +cleanall: +${BEGIN}\t${cleanall_command} +${END} + +# +# Clean all library files +# +cleanlib: +\t${BEGIN}-@${library_build_command} cleanall +\t${END}@cd $(BUILD_DIR)\n +''') + + ## Constructor of PlatformMakefile + # + # @param ModuleAutoGen Object of PlatformAutoGen class + # + def __init__(self, PlatformAutoGen): + BuildFile.__init__(self, PlatformAutoGen) + self.ModuleBuildCommandList = [] + self.ModuleMakefileList = [] + self.IntermediateDirectoryList = [] + self.ModuleBuildDirectoryList = [] + self.LibraryBuildDirectoryList = [] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + + PlatformInfo = self._AutoGenObject + if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]: + EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!", + ExtraData="[%s]" % str(self._AutoGenObject)) + + self.IntermediateDirectoryList = ["$(BUILD_DIR)"] + self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList() + self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList() + + MakefileName = self._FILE_NAME_[self._FileType] + LibraryMakefileList = [] + LibraryMakeCommandList = [] + for D in self.LibraryBuildDirectoryList: + D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir}) + Makefile = os.path.join(D, MakefileName) + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile} + LibraryMakefileList.append(Makefile) + LibraryMakeCommandList.append(Command) + + ModuleMakefileList = [] + ModuleMakeCommandList = [] + for D in self.ModuleBuildDirectoryList: + D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir}) + Makefile = os.path.join(D, MakefileName) + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile} + ModuleMakefileList.append(Makefile) + ModuleMakeCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName), + "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"], + "makefile_name" : MakefileName, + "platform_name" : PlatformInfo.Name, + "platform_guid" : PlatformInfo.Guid, + "platform_version" : PlatformInfo.Version, + "platform_file" : self._AutoGenObject.MetaFile, + "platform_relative_directory": PlatformInfo.SourceDir, + "platform_output_directory" : PlatformInfo.OutputDir, + "platform_build_directory" : PlatformInfo.BuildDir, + + "toolchain_tag" : PlatformInfo.ToolChain, + "build_target" : PlatformInfo.BuildTarget, + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + "build_architecture_list" : self._AutoGenObject.Arch, + "architecture" : self._AutoGenObject.Arch, + "separator" : Separator, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList), + "library_makefile_list" : LibraryMakefileList, + "module_makefile_list" : ModuleMakefileList, + "library_build_command" : LibraryMakeCommandList, + "module_build_command" : ModuleMakeCommandList, + } + + return MakefileTemplateDict + + ## Get the root directory list for intermediate files of all modules build + # + # @retval list The list of directory + # + def GetModuleBuildDirectoryList(self): + DirList = [] + for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir)) + return DirList + + ## Get the root directory list for intermediate files of all libraries build + # + # @retval list The list of directory + # + def GetLibraryBuildDirectoryList(self): + DirList = [] + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir)) + return DirList + + _TemplateDict = property(_CreateTemplateDict) + +## TopLevelMakefile class +# +# This class encapsules makefie and its generation for entrance makefile. It +# uses template to generate the content of makefile. The content of makefile +# will be got from WorkspaceAutoGen object. +# +class TopLevelMakefile(BuildFile): + ## template used to generate toplevel makefile + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} + +# +# Build Configuration Macro Definition +# +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +BUILD_DIR = ${platform_build_directory} +FV_DIR = ${platform_build_directory}${separator}FV + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +MAKE = ${make_path} +MAKE_FILE = ${makefile_path} + +# +# Default target +# +all: modules fds + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@ +\t${BEGIN}-@${create_directory_command} +\t${END} +# +# library build target +# +libraries: init +${BEGIN}\t@cd $(BUILD_DIR)${separator}${arch} && "$(MAKE)" $(MAKE_FLAGS) libraries +${END}\t@cd $(BUILD_DIR) + +# +# module build target +# +modules: init +${BEGIN}\t@cd $(BUILD_DIR)${separator}${arch} && "$(MAKE)" $(MAKE_FLAGS) modules +${END}\t@cd $(BUILD_DIR) + +# +# Flash Device Image Target +# +fds: init +\t-@cd $(FV_DIR) +${BEGIN}\tGenFds -f ${fdf_file} -o $(BUILD_DIR) -t $(TOOLCHAIN) -b $(TARGET) -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -D ${macro} ${END} + +# +# run command for emulator platform only +# +run: +\tcd $(BUILD_DIR)${separator}IA32 && ".${separator}SecMain" +\tcd $(BUILD_DIR) + +# +# Clean intermediate files +# +clean: +${BEGIN}\t-@${sub_build_command} clean +${END}\t@cd $(BUILD_DIR) + +# +# Clean all generated files except to makefile +# +cleanall: +${BEGIN}\t${cleanall_command} +${END} + +# +# Clean all library files +# +cleanlib: +${BEGIN}\t-@${sub_build_command} cleanlib +${END}\t@cd $(BUILD_DIR)\n +''') + + ## Constructor of TopLevelMakefile + # + # @param Workspace Object of WorkspaceAutoGen class + # + def __init__(self, Workspace): + BuildFile.__init__(self, Workspace) + self.IntermediateDirectoryList = [] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + + # any platform autogen object is ok because we just need common information + PlatformInfo = self._AutoGenObject + + if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]: + EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!", + ExtraData="[%s]" % str(self._AutoGenObject)) + + for Arch in PlatformInfo.ArchList: + self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch])) + self.IntermediateDirectoryList.append("$(FV_DIR)") + + # TRICK: for not generating GenFds call in makefile if no FDF file + MacroList = [] + if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "": + FdfFileList = [PlatformInfo.FdfFile] + # macros passed to GenFds + # MacroList.append('"%s=%s"' % ("WORKSPACE", GlobalData.gWorkspace)) + MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource)) + MacroList.append('"%s=%s"' % ("EDK_SOURCE", GlobalData.gEdkSource)) + for MacroName in GlobalData.gGlobalDefines: + if GlobalData.gGlobalDefines[MacroName] != "": + MacroList.append('"%s=%s"' % (MacroName, GlobalData.gGlobalDefines[MacroName])) + else: + MacroList.append('"%s"' % MacroName) + else: + FdfFileList = [] + + # pass extra common options to external program called in makefile, currently GenFds.exe + ExtraOption = '' + LogLevel = EdkLogger.GetLevel() + if LogLevel == EdkLogger.VERBOSE: + ExtraOption += " -v" + elif LogLevel <= EdkLogger.DEBUG_9: + ExtraOption += " -d %d" % (LogLevel - 1) + elif LogLevel == EdkLogger.QUIET: + ExtraOption += " -q" + + if GlobalData.gCaseInsensitive: + ExtraOption += " -c" + + MakefileName = self._FILE_NAME_[self._FileType] + SubBuildCommandList = [] + for A in PlatformInfo.ArchList: + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)} + SubBuildCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName), + "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"], + "platform_name" : PlatformInfo.Name, + "platform_guid" : PlatformInfo.Guid, + "platform_version" : PlatformInfo.Version, + "platform_build_directory" : PlatformInfo.BuildDir, + + "toolchain_tag" : PlatformInfo.ToolChain, + "build_target" : PlatformInfo.BuildTarget, + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + 'arch' : list(PlatformInfo.ArchList), + "build_architecture_list" : ','.join(PlatformInfo.ArchList), + "separator" : Separator, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList), + "sub_build_command" : SubBuildCommandList, + "fdf_file" : FdfFileList, + "active_platform" : str(PlatformInfo), + "fd" : PlatformInfo.FdTargetList, + "fv" : PlatformInfo.FvTargetList, + "extra_options" : ExtraOption, + "macro" : MacroList, + } + + return MakefileTemplateDict + + ## Get the root directory list for intermediate files of all modules build + # + # @retval list The list of directory + # + def GetModuleBuildDirectoryList(self): + DirList = [] + for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir)) + return DirList + + ## Get the root directory list for intermediate files of all libraries build + # + # @retval list The list of directory + # + def GetLibraryBuildDirectoryList(self): + DirList = [] + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir)) + return DirList + + _TemplateDict = property(_CreateTemplateDict) + +# This acts like the main() function for the script, unless it is 'import'ed into another script. +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py new file mode 100644 index 0000000000..3df493834c --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/StrGather.py @@ -0,0 +1,665 @@ +# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+#
+#This file is used to parse a strings file and create or add to a string database file.
+#
+
+##
+# Import Modules
+#
+import re
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from UniClassObject import *
+from StringIO import StringIO
+from struct import pack
+
+##
+# Static definitions
+#
+EFI_HII_SIBT_END = '0x00'
+EFI_HII_SIBT_STRING_SCSU = '0x10'
+EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
+EFI_HII_SIBT_STRINGS_SCSU = '0x12'
+EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
+EFI_HII_SIBT_STRING_UCS2 = '0x14'
+EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
+EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
+EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
+EFI_HII_SIBT_DUPLICATE = '0x20'
+EFI_HII_SIBT_SKIP2 = '0x21'
+EFI_HII_SIBT_SKIP1 = '0x22'
+EFI_HII_SIBT_EXT1 = '0x30'
+EFI_HII_SIBT_EXT2 = '0x31'
+EFI_HII_SIBT_EXT4 = '0x32'
+EFI_HII_SIBT_FONT = '0x40'
+
+EFI_HII_PACKAGE_STRINGS = '0x04'
+EFI_HII_PACKAGE_FORM = '0x02'
+
+StringPackageType = EFI_HII_PACKAGE_STRINGS
+StringPackageForm = EFI_HII_PACKAGE_FORM
+StringBlockType = EFI_HII_SIBT_STRING_UCS2
+StringSkipType = EFI_HII_SIBT_SKIP2
+
+HexHeader = '0x'
+
+COMMENT = '// '
+DEFINE_STR = '#define'
+COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
+NOT_REFERENCED = 'not referenced'
+COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
+CHAR_ARRAY_DEFIN = 'unsigned char'
+COMMON_FILE_NAME = 'Strings'
+OFFSET = 'offset'
+STRING = 'string'
+TO = 'to'
+STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+COMPATIBLE_STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Za-z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+EFI_HII_ARRAY_SIZE_LENGTH = 4
+EFI_HII_PACKAGE_HEADER_LENGTH = 4
+EFI_HII_HDR_SIZE_LENGTH = 4
+EFI_HII_STRING_OFFSET_LENGTH = 4
+EFI_STRING_ID = 1
+EFI_STRING_ID_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW = 0
+EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
+EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
+
+H_C_FILE_HEADER = ['//', \
+ '// DO NOT EDIT -- auto-generated file', \
+ '//', \
+ '// This file is generated by the StrGather utility', \
+ '//']
+LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
+PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
+
+## Convert a dec number to a hex string
+#
+# Convert a dec number to a formatted hex string in length digit
+# The digit is set to default 8
+# The hex string starts with "0x"
+# DecToHexStr(1000) is '0x000003E8'
+# DecToHexStr(1000, 6) is '0x0003E8'
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex string
+#
+# @retval: The formatted hex string
+#
+def DecToHexStr(Dec, Digit = 8):
+ return eval("'0x%0" + str(Digit) + "X' % int(Dec)")
+
+## Convert a dec number to a hex list
+#
+# Convert a dec number to a formatted hex list in size digit
+# The digit is set to default 8
+# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
+# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex list
+#
+# @retval: A list for formatted hex string
+#
+def DecToHexList(Dec, Digit = 8):
+ Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" )
+ List = []
+ for Bit in range(Digit - 2, -1, -2):
+ List.append(HexHeader + Hex[Bit:Bit + 2])
+ return List
+
+## Convert a acsii string to a hex list
+#
+# Convert a acsii string to a formatted hex list
+# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
+#
+# @param Ascii: The acsii string
+#
+# @retval: A list for formatted hex string
+#
+def AscToHexList(Ascii):
+ List = []
+ for Item in Ascii:
+ List.append('0x%2X' % ord(Item))
+
+ return List
+
+## Create header of .h file
+#
+# Create a header of .h file
+#
+# @param BaseName: The basename of strings
+#
+# @retval Str: A string for .h file header
+#
+def CreateHFileHeader(BaseName):
+ Str = ''
+ for Item in H_C_FILE_HEADER:
+ Str = WriteLine(Str, Item)
+ Str = WriteLine(Str, '#ifndef _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
+ Str = WriteLine(Str, '#define _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
+ return Str
+
+## Create content of .h file
+#
+# Create content of .h file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of .h file content
+#
+def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ Str = ''
+ ValueStartPtr = 60
+ Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+
+ #Group the referred STRING token together.
+ for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
+ StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
+ Name = StringItem.StringName
+ Token = StringItem.Token
+ Referenced = StringItem.Referenced
+ if Name != None:
+ Line = ''
+ if Referenced == True:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
+ else:
+ Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
+ Str = WriteLine(Str, Line)
+
+ #Group the unused STRING token together.
+ for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
+ StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
+ Name = StringItem.StringName
+ Token = StringItem.Token
+ Referenced = StringItem.Referenced
+ if Name != None:
+ Line = ''
+ if Referenced == False:
+ if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ else:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+
+ Str = WriteLine(Str, '')
+ if IsCompatibleMode or UniGenCFlag:
+ Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
+ return Str
+
+## Create a complete .h file
+#
+# Create a complet .h file with file header and file content
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
+#
+# @retval Str: A string of complete .h file
+#
+def CreateHFile(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
+ HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag))
+
+ return HFile
+
+## Create header of .c file
+#
+# Create a header of .c file
+#
+# @retval Str: A string for .c file header
+#
+def CreateCFileHeader():
+ Str = ''
+ for Item in H_C_FILE_HEADER:
+ Str = WriteLine(Str, Item)
+
+ return Str
+
+## Create a buffer to store all items in an array
+#
+# @param BinBuffer Buffer to contain Binary data.
+# @param Array: The array need to be formatted
+#
+def CreateBinBuffer(BinBuffer, Array):
+ for Item in Array:
+ BinBuffer.write(pack("B", int(Item,16)))
+
+## Create a formatted string all items in an array
+#
+# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
+#
+# @param Array: The array need to be formatted
+# @param Width: The line length, the default value is set to 16
+#
+# @retval ArrayItem: A string for all formatted array items
+#
+def CreateArrayItem(Array, Width = 16):
+ MaxLength = Width
+ Index = 0
+ Line = ' '
+ ArrayItem = ''
+
+ for Item in Array:
+ if Index < MaxLength:
+ Line = Line + Item + ', '
+ Index = Index + 1
+ else:
+ ArrayItem = WriteLine(ArrayItem, Line)
+ Line = ' ' + Item + ', '
+ Index = 1
+ ArrayItem = Write(ArrayItem, Line.rstrip())
+
+ return ArrayItem
+
+## CreateCFileStringValue
+#
+# Create a line with string value
+#
+# @param Value: Value of the string
+#
+# @retval Str: A formatted string with string value
+#
+
+def CreateCFileStringValue(Value):
+ Value = [StringBlockType] + Value
+ Str = WriteLine('', CreateArrayItem(Value))
+
+ return Str
+
+## GetFilteredLanguage
+#
+# apply get best language rules to the UNI language code list
+#
+# @param UniLanguageList: language code definition list in *.UNI file
+# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
+#
+# @retval UniLanguageListFiltered: the filtered language code
+#
+def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
+ UniLanguageListFiltered = []
+ # if filter list is empty, then consider there is no filter
+ if LanguageFilterList == []:
+ UniLanguageListFiltered = UniLanguageList
+ return UniLanguageListFiltered
+ for Language in LanguageFilterList:
+ # first check for exact match
+ if Language in UniLanguageList:
+ if Language not in UniLanguageListFiltered:
+ UniLanguageListFiltered += [Language]
+ # find the first one with the same/equivalent primary tag
+ else:
+ if Language.find('-') != -1:
+ PrimaryTag = Language[0:Language.find('-')].lower()
+ else:
+ PrimaryTag = Language
+
+ if len(PrimaryTag) == 3:
+ PrimaryTag = LangConvTable.get(PrimaryTag)
+
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.find('-') != -1:
+ UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
+ else:
+ UniLanguagePrimaryTag = UniLanguage
+
+ if len(UniLanguagePrimaryTag) == 3:
+ UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
+
+ if PrimaryTag == UniLanguagePrimaryTag:
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered += [UniLanguage]
+ break
+ else:
+ # Here is rule 3 for "get best language"
+ # If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
+ # for better processing, find the one that best suit for it.
+ DefaultTag = 'en'
+ if DefaultTag not in UniLanguageListFiltered:
+ # check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
+ for UniLanguage in UniLanguageList:
+ if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
+ if UniLanguage not in UniLanguageListFiltered:
+ UniLanguageListFiltered += [UniLanguage]
+ break
+ else:
+ UniLanguageListFiltered += [DefaultTag]
+ return UniLanguageListFiltered
+
+
+## Create content of .c file
+#
+# Create content of .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible mode
+# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
+# @param FilterInfo Platform language filter information
+#
+# @retval Str: A string of .c file content
+#
+def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
+ #
+ # Init array length
+ #
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
+ Str = ''
+ Offset = 0
+
+ EDK2Module = FilterInfo[0]
+ if EDK2Module:
+ LanguageFilterList = FilterInfo[1]
+ else:
+ # EDK module is using ISO639-2 format filter, convert to the RFC4646 format
+ LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
+
+ UniLanguageList = []
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
+
+ UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
+
+
+ #
+ # Create lines for each language's strings
+ #
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ Language = UniObjectClass.LanguageDef[IndexI][0]
+ LangPrintName = UniObjectClass.LanguageDef[IndexI][1]
+ if Language not in UniLanguageListFiltered:
+ continue
+
+ StringBuffer = StringIO()
+ StrStringValue = ''
+ ArrayLength = 0
+ NumberOfUseOtherLangDef = 0
+ Index = 0
+ for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
+ Item = UniObjectClass.FindByToken(IndexJ, Language)
+ Name = Item.StringName
+ Value = Item.StringValueByteList
+ Referenced = Item.Referenced
+ Token = Item.Token
+ Length = Item.Length
+ UseOtherLangDef = Item.UseOtherLangDef
+
+ if UseOtherLangDef != '' and Referenced:
+ NumberOfUseOtherLangDef = NumberOfUseOtherLangDef + 1
+ Index = Index + 1
+ else:
+ if NumberOfUseOtherLangDef > 0:
+ StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ CreateBinBuffer (StringBuffer, ([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
+ NumberOfUseOtherLangDef = 0
+ ArrayLength = ArrayLength + 3
+ if Referenced and Item.Token > 0:
+ Index = Index + 1
+ StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
+ StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
+ CreateBinBuffer (StringBuffer, [StringBlockType] + Value)
+ ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
+
+ #
+ # EFI_HII_PACKAGE_HEADER
+ #
+ Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
+ ArrayLength = Offset + ArrayLength + 1
+
+ #
+ # Create PACKAGE HEADER
+ #
+ Str = WriteLine(Str, '// PACKAGE HEADER\n')
+ TotalLength = TotalLength + ArrayLength
+
+ List = DecToHexList(ArrayLength, 6) + \
+ [StringPackageType] + \
+ DecToHexList(Offset) + \
+ DecToHexList(Offset) + \
+ DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
+ DecToHexList(EFI_STRING_ID, 4) + \
+ AscToHexList(Language) + \
+ DecToHexList(0, 2)
+ Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
+
+ #
+ # Create PACKAGE DATA
+ #
+ Str = WriteLine(Str, '// PACKAGE DATA\n')
+ Str = Write(Str, StrStringValue)
+
+ #
+ # Add an EFI_HII_SIBT_END at last
+ #
+ Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
+
+ #
+ # Create binary UNI string
+ #
+ if UniBinBuffer:
+ CreateBinBuffer (UniBinBuffer, List)
+ UniBinBuffer.write (StringBuffer.getvalue())
+ UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END,16)))
+ StringBuffer.close()
+
+ #
+ # Create line for string variable name
+ # "unsigned char $(BaseName)Strings[] = {"
+ #
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n' )
+
+ if IsCompatibleMode:
+ #
+ # Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
+ #
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
+ else:
+ #
+ # Create whole array length in UEFI mode
+ #
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+
+ #
+ # Join package data
+ #
+ AllStr = Write(AllStr, Str)
+
+ return AllStr
+
+## Create end of .c file
+#
+# Create end of .c file
+#
+# @retval Str: A string of .h file end
+#
+def CreateCFileEnd():
+ Str = Write('', '};')
+ return Str
+
+## Create a .c file
+#
+# Create a complete .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass A UniObjectClass instance
+# @param IsCompatibleMode Compatible Mode
+# @param FilterInfo Platform language filter information
+#
+# @retval CFile: A string of complete .c file
+#
+def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
+ CFile = ''
+ #CFile = WriteLine(CFile, CreateCFileHeader())
+ CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
+ CFile = WriteLine(CFile, CreateCFileEnd())
+ return CFile
+
+## GetFileList
+#
+# Get a list for all files
+#
+# @param IncludeList: A list of all path to be searched
+# @param SkipList: A list of all types of file could be skipped
+#
+# @retval FileList: A list of all files found
+#
+def GetFileList(SourceFileList, IncludeList, SkipList):
+ if IncludeList == None:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
+
+ FileList = []
+ if SkipList == None:
+ SkipList = []
+
+ for File in SourceFileList:
+ for Dir in IncludeList:
+ if not os.path.exists(Dir):
+ continue
+ File = os.path.join(Dir, File.Path)
+ #
+ # Ignore Dir
+ #
+ if os.path.isfile(File) != True:
+ continue
+ #
+ # Ignore file listed in skip list
+ #
+ IsSkip = False
+ for Skip in SkipList:
+ if os.path.splitext(File)[1].upper() == Skip.upper():
+ EdkLogger.verbose("Skipped %s for string token uses search" % File)
+ IsSkip = True
+ break
+
+ if not IsSkip:
+ FileList.append(File)
+
+ break
+
+ return FileList
+
+## SearchString
+#
+# Search whether all string defined in UniObjectClass are referenced
+# All string used should be set to Referenced
+#
+# @param UniObjectClass: Input UniObjectClass
+# @param FileList: Search path list
+# @param IsCompatibleMode Compatible Mode
+#
+# @retval UniObjectClass: UniObjectClass after searched
+#
+def SearchString(UniObjectClass, FileList, IsCompatibleMode):
+ if FileList == []:
+ return UniObjectClass
+
+ for File in FileList:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ if not IsCompatibleMode:
+ StringTokenList = STRING_TOKEN.findall(Line)
+ else:
+ StringTokenList = COMPATIBLE_STRING_TOKEN.findall(Line)
+ for StrName in StringTokenList:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
+ UniObjectClass.SetStringReferenced(StrName)
+
+ UniObjectClass.ReToken()
+
+ return UniObjectClass
+
+## GetStringFiles
+#
+# This function is used for UEFI2.1 spec
+#
+#
+def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
+ Status = True
+ ErrorMessage = ''
+
+ if len(UniFilList) > 0:
+ if ShellMode:
+ #
+ # support ISO 639-2 codes in .UNI files of EDK Shell
+ #
+ Uni = UniFileClassObject(sorted (UniFilList), True, IncludePathList)
+ else:
+ Uni = UniFileClassObject(sorted (UniFilList), IsCompatibleMode, IncludePathList)
+ else:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
+
+ FileList = GetFileList(SourceFileList, IncludeList, SkipList)
+
+ Uni = SearchString(Uni, sorted (FileList), IsCompatibleMode)
+
+ HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
+ CFile = None
+ if IsCompatibleMode or UniGenCFlag:
+ CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
+ if UniGenBinBuffer:
+ CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
+
+ return HFile, CFile
+
+#
+# Write an item
+#
+def Write(Target, Item):
+ return Target + Item
+
+#
+# Write an item with a break line
+#
+def WriteLine(Target, Item):
+ return Target + Item + '\n'
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.info('start')
+
+ UniFileList = [
+ r'C:\\Edk\\Strings2.uni',
+ r'C:\\Edk\\Strings.uni'
+ ]
+
+ SrcFileList = []
+ for Root, Dirs, Files in os.walk('C:\\Edk'):
+ for File in Files:
+ SrcFileList.append(File)
+
+ IncludeList = [
+ r'C:\\Edk'
+ ]
+
+ SkipList = ['.inf', '.uni']
+ BaseName = 'DriverSample'
+ (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
+ hfile = open('unistring.h', 'w')
+ cfile = open('unistring.c', 'w')
+ hfile.write(h)
+ cfile.write(c)
+
+ EdkLogger.info('end')
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py new file mode 100644 index 0000000000..1eb65c1e9a --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -0,0 +1,571 @@ +# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+#
+#This file is used to collect all defined strings in multiple uni files
+#
+
+##
+# Import Modules
+#
+import os, codecs, re
+import distutils.util
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.String import GetLineNo
+from Common.Misc import PathClass
+
+##
+# Static definitions
+#
+UNICODE_WIDE_CHAR = u'\\wide'
+UNICODE_NARROW_CHAR = u'\\narrow'
+UNICODE_NON_BREAKING_CHAR = u'\\nbr'
+UNICODE_UNICODE_CR = '\r'
+UNICODE_UNICODE_LF = '\n'
+
+NARROW_CHAR = u'\uFFF0'
+WIDE_CHAR = u'\uFFF1'
+NON_BREAKING_CHAR = u'\uFFF2'
+CR = u'\u000D'
+LF = u'\u000A'
+NULL = u'\u0000'
+TAB = u'\t'
+BACK_SPLASH = u'\\'
+
+gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
+
+## Convert a python unicode string to a normal string
+#
+# Convert a python unicode string to a normal string
+# UniToStr(u'I am a string') is 'I am a string'
+#
+# @param Uni: The python unicode string
+#
+# @retval: The formatted normal string
+#
+def UniToStr(Uni):
+ return repr(Uni)[2:-1]
+
+## Convert a unicode string to a Hex list
+#
+# Convert a unicode string to a Hex list
+# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
+#
+# @param Uni: The python unicode string
+#
+# @retval List: The formatted hex list
+#
+def UniToHexList(Uni):
+ List = []
+ for Item in Uni:
+ Temp = '%04X' % ord(Item)
+ List.append('0x' + Temp[2:4])
+ List.append('0x' + Temp[0:2])
+ return List
+
+LangConvTable = {'eng':'en', 'fra':'fr', \
+ 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
+ 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
+ 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
+ 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
+ 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
+ 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
+ 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
+ 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
+ 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
+ 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
+ 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
+ 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
+ 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
+ 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
+ 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
+ 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
+ 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
+ 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
+ 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
+ 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
+ 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
+ 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
+ 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
+ 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
+ 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
+ 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
+ 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
+ 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
+ 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
+ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
+ 'zho':'zh', 'zul':'zu'}
+
+## GetLanguageCode
+#
+# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
+# ISO 639-2 language codes supported in compatiblity mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid lanugage code in RFC 4646 format or None
+#
+def GetLanguageCode(LangName, IsCompatibleMode, File):
+ global LangConvTable
+
+ length = len(LangName)
+ if IsCompatibleMode:
+ if length == 3 and LangName.isalpha():
+ TempLangName = LangConvTable.get(LangName.lower())
+ if TempLangName != None:
+ return TempLangName
+ return LangName
+ else:
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
+
+ if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
+ return LangName
+ if length == 2:
+ if LangName.isalpha():
+ return LangName
+ elif length == 3:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
+ return LangName
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
+ return LangName
+
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
+
+## StringDefClassObject
+#
+# A structure for language definition
+#
+class StringDefClassObject(object):
+ def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
+ self.StringName = ''
+ self.StringNameByteList = []
+ self.StringValue = ''
+ self.StringValueByteList = ''
+ self.Token = 0
+ self.Referenced = Referenced
+ self.UseOtherLangDef = UseOtherLangDef
+ self.Length = 0
+
+ if Name != None:
+ self.StringName = Name
+ self.StringNameByteList = UniToHexList(Name)
+ if Value != None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+ if Token != None:
+ self.Token = Token
+
+ def __str__(self):
+ return repr(self.StringName) + ' ' + \
+ repr(self.Token) + ' ' + \
+ repr(self.Referenced) + ' ' + \
+ repr(self.StringValue) + ' ' + \
+ repr(self.UseOtherLangDef)
+
+ def UpdateValue(self, Value = None):
+ if Value != None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+
+## UniFileClassObject
+#
+# A structure for .uni file definition
+#
+class UniFileClassObject(object):
+ def __init__(self, FileList = [], IsCompatibleMode = False, IncludePathList = []):
+ self.FileList = FileList
+ self.Token = 2
+ self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
+ self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
+ self.IsCompatibleMode = IsCompatibleMode
+ self.IncludePathList = IncludePathList
+ if len(self.FileList) > 0:
+ self.LoadUniFiles(FileList)
+
+ #
+ # Get Language definition
+ #
+ def GetLangDef(self, File, Line):
+ Lang = distutils.util.split_quoted((Line.split(u"//")[0]))
+ if len(Lang) != 3:
+ try:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf-16').read()
+ except UnicodeError, X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
+ LineNo = GetLineNo(FileIn, Line, False)
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File = File, Line = LineNo)
+ else:
+ LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
+ LangPrintName = Lang[2]
+
+ IsLangInDef = False
+ for Item in self.LanguageDef:
+ if Item[0] == LangName:
+ IsLangInDef = True
+ break;
+
+ if not IsLangInDef:
+ self.LanguageDef.append([LangName, LangPrintName])
+
+ #
+ # Add language string
+ #
+ self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
+ self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
+
+ if not IsLangInDef:
+ #
+ # The found STRING tokens will be added into new language string list
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+ if LangName != FirstLangName:
+ for Index in range (2, len (self.OrderedStringList[FirstLangName])):
+ Item = self.OrderedStringList[FirstLangName][Index]
+ if Item.UseOtherLangDef != '':
+ OtherLang = Item.UseOtherLangDef
+ else:
+ OtherLang = FirstLangName
+ self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName, '', Item.Referenced, Item.Token, OtherLang))
+
+ return True
+
+ #
+ # Get String name and value
+ #
+ def GetStringObject(self, Item):
+ Name = ''
+ Language = ''
+ Value = ''
+
+ Name = Item.split()[1]
+ # Check the string name is the upper character
+ if not self.IsCompatibleMode and Name != '':
+ MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
+ if MatchString == None or MatchString.end(0) != len(Name):
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File))
+ LanguageList = Item.split(u'#language ')
+ for IndexI in range(len(LanguageList)):
+ if IndexI == 0:
+ continue
+ else:
+ Language = LanguageList[IndexI].split()[0]
+ Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+
+ #
+ # Get include file list and load them
+ #
+ def GetIncludeFile(self, Item, Dir):
+ FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
+ self.LoadUniFile(FileName)
+
+ #
+ # Pre-process before parse .uni file
+ #
+ def PreProcess(self, File):
+ if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
+
+ try:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf-16').readlines()
+ except UnicodeError, X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
+
+ Lines = []
+ #
+ # Use unique identifier
+ #
+ for Line in FileIn:
+ Line = Line.strip()
+ #
+ # Ignore comment line and empty line
+ #
+ if Line == u'' or Line.startswith(u'//'):
+ continue
+ Line = Line.replace(u'/langdef', u'#langdef')
+ Line = Line.replace(u'/string', u'#string')
+ Line = Line.replace(u'/language', u'#language')
+ Line = Line.replace(u'/include', u'#include')
+
+ Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
+ Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
+ Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
+
+ Line = Line.replace(u'\\\\', u'\u0006')
+ Line = Line.replace(u'\\r\\n', CR + LF)
+ Line = Line.replace(u'\\n', CR + LF)
+ Line = Line.replace(u'\\r', CR)
+ Line = Line.replace(u'\\t', u'\t')
+ Line = Line.replace(u'''\"''', u'''"''')
+ Line = Line.replace(u'\t', u' ')
+ Line = Line.replace(u'\u0006', u'\\')
+
+# if Line.find(u'\\x'):
+# hex = Line[Line.find(u'\\x') + 2 : Line.find(u'\\x') + 6]
+# hex = "u'\\u" + hex + "'"
+
+ IncList = gIncludePattern.findall(Line)
+ if len(IncList) == 1:
+ for Dir in [File.Dir] + self.IncludePathList:
+ IncFile = PathClass(str(IncList[0]), Dir)
+ if os.path.isfile(IncFile.Path):
+ Lines.extend(self.PreProcess(IncFile))
+ break
+ else:
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, Message="Cannot find include file", ExtraData=str(IncList[0]))
+ continue
+
+ Lines.append(Line)
+
+ return Lines
+
+ #
+ # Load a .uni file
+ #
+ def LoadUniFile(self, File = None):
+ if File == None:
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
+ self.File = File
+ #
+ # Process special char in file
+ #
+ Lines = self.PreProcess(File)
+
+ #
+ # Get Unicode Information
+ #
+ for IndexI in range(len(Lines)):
+ Line = Lines[IndexI]
+ if (IndexI + 1) < len(Lines):
+ SecondLine = Lines[IndexI + 1]
+ if (IndexI + 2) < len(Lines):
+ ThirdLine = Lines[IndexI + 2]
+
+ #
+ # Get Language def information
+ #
+ if Line.find(u'#langdef ') >= 0:
+ self.GetLangDef(File, Line)
+ continue
+
+ Name = ''
+ Language = ''
+ Value = ''
+ #
+ # Get string def information format 1 as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # My first English string line 1
+ # My first English string line 2
+ # #string MY_STRING_1
+ # #language spa
+ # Mi segunda secuencia 1
+ # Mi segunda secuencia 2
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
+ SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
+ ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
+ Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
+ Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
+ for IndexJ in range(IndexI + 2, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
+ Value = Value + Lines[IndexJ]
+ else:
+ IndexI = IndexJ
+ break
+ # Value = Value.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ # Check the string name is the upper character
+ if not self.IsCompatibleMode and Name != '':
+ MatchString = re.match('[A-Z0-9_]+', Name, re.UNICODE)
+ if MatchString == None or MatchString.end(0) != len(Name):
+ EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid lower case character.' %(Name, self.File))
+ self.AddStringToList(Name, Language, Value)
+ continue
+
+ #
+ # Get string def information format 2 as below
+ #
+ # #string MY_STRING_1 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ # #string MY_STRING_2 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
+ StringItem = Line
+ for IndexJ in range(IndexI + 1, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ IndexI = IndexJ
+ break
+ elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ StringItem = StringItem + Lines[IndexJ]
+ elif Lines[IndexJ].count(u'\"') >= 2:
+ StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
+ self.GetStringObject(StringItem)
+ continue
+
+ #
+ # Load multiple .uni files
+ #
+ def LoadUniFiles(self, FileList):
+ if len(FileList) > 0:
+ for File in FileList:
+ self.LoadUniFile(File)
+
+ #
+ # Add a string to list
+ #
+ def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
+ for LangNameItem in self.LanguageDef:
+ if Language == LangNameItem[0]:
+ break
+ else:
+ EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
+ % (Language, Name, self.File))
+
+ if Language not in self.OrderedStringList:
+ self.OrderedStringList[Language] = []
+
+ IsAdded = True
+ for Item in self.OrderedStringList[Language]:
+ if Name == Item.StringName:
+ IsAdded = False
+ if Value != None:
+ Item.UpdateValue(Value)
+ Item.UseOtherLangDef = ''
+ break
+
+ if IsAdded:
+ Token = len(self.OrderedStringList[Language])
+ if Index == -1:
+ self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ for LangName in self.LanguageDef:
+ #
+ # New STRING token will be added into all language string lists.
+ # so that the unique STRING identifier is reserved for all languages in the package list.
+ #
+ if LangName[0] != Language:
+ if UseOtherLangDef != '':
+ OtherLangDef = UseOtherLangDef
+ else:
+ OtherLangDef = Language
+ self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name, '', Referenced, Token, OtherLangDef))
+ else:
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+
+ #
+ # Set the string as referenced
+ #
+ def SetStringReferenced(self, Name):
+ #
+ # String stoken are added in the same order in all language string lists.
+ # So, only update the status of string stoken in first language string list.
+ #
+ Lang = self.LanguageDef[0][0]
+ for Item in self.OrderedStringList[Lang]:
+ if Name == Item.StringName:
+ Item.Referenced = True
+ break
+ #
+ # Search the string in language definition by Name
+ #
+ def FindStringValue(self, Name, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.StringName == Name:
+ return Item
+
+ return None
+
+ #
+ # Search the string in language definition by Token
+ #
+ def FindByToken(self, Token, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.Token == Token:
+ return Item
+
+ return None
+
+ #
+ # Re-order strings and re-generate tokens
+ #
+ def ReToken(self):
+ #
+ # Retoken all language strings according to the status of string stoken in the first language string.
+ #
+ FirstLangName = self.LanguageDef[0][0]
+
+ #
+ # Use small token for all referred string stoken.
+ #
+ RefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == True:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Referenced = True
+ OtherLangItem.Token = RefToken
+ RefToken = RefToken + 1
+
+ #
+ # Use big token for all unreferred string stoken.
+ #
+ UnRefToken = 0
+ for Index in range (0, len (self.OrderedStringList[FirstLangName])):
+ FirstLangItem = self.OrderedStringList[FirstLangName][Index]
+ if FirstLangItem.Referenced == False:
+ for LangNameItem in self.LanguageDef:
+ LangName = LangNameItem[0]
+ OtherLangItem = self.OrderedStringList[LangName][Index]
+ OtherLangItem.Token = RefToken + UnRefToken
+ UnRefToken = UnRefToken + 1
+
+ #
+ # Show the instance itself
+ #
+ def ShowMe(self):
+ print self.LanguageDef
+ #print self.OrderedStringList
+ for Item in self.OrderedStringList:
+ print Item
+ for Member in self.OrderedStringList[Item]:
+ print str(Member)
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ a = UniFileClassObject(['C:\\Edk\\Strings.uni', 'C:\\Edk\\Strings2.uni'])
+ a.ReToken()
+ a.ShowMe()
diff --git a/BaseTools/Source/Python/AutoGen/__init__.py b/BaseTools/Source/Python/AutoGen/__init__.py new file mode 100644 index 0000000000..d2de425732 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/__init__.py @@ -0,0 +1,17 @@ +## @file
+# Python 'AutoGen' package initialization file.
+#
+# This file is required to make Python interpreter treat the directory
+# as containing package.
+#
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+__all__ = ["AutoGen"]
|