summaryrefslogtreecommitdiff
path: root/BaseTools/Source/Python/AutoGen
diff options
context:
space:
mode:
authorGuo Mang <mang.guo@intel.com>2018-04-25 17:24:58 +0800
committerGuo Mang <mang.guo@intel.com>2018-04-25 17:26:11 +0800
commit6e3789d7424660b14ef3d7123221c97db5d8aff5 (patch)
tree6a5a7f1e0bc5a5296f2de0c8f02091c85e3443b7 /BaseTools/Source/Python/AutoGen
parentd33896d88d9d32d516129e92e25b80f8fddc6f7b (diff)
downloadedk2-platforms-6e3789d7424660b14ef3d7123221c97db5d8aff5.tar.xz
Remove unused files
Contributed-under: TianoCore Contribution Agreement 1.1 Signed-off-by: Guo Mang <mang.guo@intel.com>
Diffstat (limited to 'BaseTools/Source/Python/AutoGen')
-rw-r--r--BaseTools/Source/Python/AutoGen/AutoGen.py4302
-rw-r--r--BaseTools/Source/Python/AutoGen/BuildEngine.py643
-rw-r--r--BaseTools/Source/Python/AutoGen/GenC.py2023
-rw-r--r--BaseTools/Source/Python/AutoGen/GenDepex.py448
-rw-r--r--BaseTools/Source/Python/AutoGen/GenMake.py1530
-rw-r--r--BaseTools/Source/Python/AutoGen/GenPcdDb.py1703
-rw-r--r--BaseTools/Source/Python/AutoGen/IdfClassObject.py162
-rw-r--r--BaseTools/Source/Python/AutoGen/InfSectionParser.py107
-rw-r--r--BaseTools/Source/Python/AutoGen/StrGather.py656
-rw-r--r--BaseTools/Source/Python/AutoGen/UniClassObject.py701
-rw-r--r--BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py350
-rw-r--r--BaseTools/Source/Python/AutoGen/__init__.py17
12 files changed, 0 insertions, 12642 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
deleted file mode 100644
index 356eb21310..0000000000
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ /dev/null
@@ -1,4302 +0,0 @@
-## @file
-# Generate AutoGen.h, AutoGen.c and *.depex files
-#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-## Import Modules
-#
-import Common.LongFilePathOs as os
-import re
-import os.path as path
-import copy
-import uuid
-
-import GenC
-import GenMake
-import GenDepex
-from StringIO import StringIO
-
-from StrGather import *
-from BuildEngine import BuildRule
-
-from Common.LongFilePathSupport import CopyLongFilePath
-from Common.BuildToolError import *
-from Common.DataType import *
-from Common.Misc import *
-from Common.String import *
-import Common.GlobalData as GlobalData
-from GenFds.FdfParser import *
-from CommonDataClass.CommonClass import SkuInfoClass
-from Workspace.BuildClassObject import *
-from GenPatchPcdTable.GenPatchPcdTable import parsePcdInfoFromMapFile
-import Common.VpdInfoFile as VpdInfoFile
-from GenPcdDb import CreatePcdDatabaseCode
-from Workspace.MetaFileCommentParser import UsageList
-from Common.MultipleWorkspace import MultipleWorkspace as mws
-import InfSectionParser
-import datetime
-
-## Regular expression for splitting Dependency Expression string into tokens
-gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
-
-#
-# Match name = variable
-#
-gEfiVarStoreNamePattern = re.compile("\s*name\s*=\s*(\w+)")
-#
-# The format of guid in efivarstore statement likes following and must be correct:
-# guid = {0xA04A27f4, 0xDF00, 0x4D42, {0xB5, 0x52, 0x39, 0x51, 0x13, 0x02, 0x11, 0x3D}}
-#
-gEfiVarStoreGuidPattern = re.compile("\s*guid\s*=\s*({.*?{.*?}\s*})")
-
-## Mapping Makefile type
-gMakeTypeMap = {"MSFT":"nmake", "GCC":"gmake"}
-
-
-## Build rule configuration file
-gDefaultBuildRuleFile = 'build_rule.txt'
-
-## Tools definition configuration file
-gDefaultToolsDefFile = 'tools_def.txt'
-
-## Build rule default version
-AutoGenReqBuildRuleVerNum = "0.1"
-
-## default file name for AutoGen
-gAutoGenCodeFileName = "AutoGen.c"
-gAutoGenHeaderFileName = "AutoGen.h"
-gAutoGenStringFileName = "%(module_name)sStrDefs.h"
-gAutoGenStringFormFileName = "%(module_name)sStrDefs.hpk"
-gAutoGenDepexFileName = "%(module_name)s.depex"
-gAutoGenImageDefFileName = "%(module_name)sImgDefs.h"
-gAutoGenIdfFileName = "%(module_name)sIdf.hpk"
-gInfSpecVersion = "0x00010017"
-
-#
-# Template string to generic AsBuilt INF
-#
-gAsBuiltInfHeaderString = TemplateString("""${header_comments}
-
-# DO NOT EDIT
-# FILE auto-generated
-
-[Defines]
- INF_VERSION = ${module_inf_version}
- BASE_NAME = ${module_name}
- FILE_GUID = ${module_guid}
- MODULE_TYPE = ${module_module_type}${BEGIN}
- VERSION_STRING = ${module_version_string}${END}${BEGIN}
- PCD_IS_DRIVER = ${pcd_is_driver_string}${END}${BEGIN}
- UEFI_SPECIFICATION_VERSION = ${module_uefi_specification_version}${END}${BEGIN}
- PI_SPECIFICATION_VERSION = ${module_pi_specification_version}${END}${BEGIN}
- ENTRY_POINT = ${module_entry_point}${END}${BEGIN}
- UNLOAD_IMAGE = ${module_unload_image}${END}${BEGIN}
- CONSTRUCTOR = ${module_constructor}${END}${BEGIN}
- DESTRUCTOR = ${module_destructor}${END}${BEGIN}
- SHADOW = ${module_shadow}${END}${BEGIN}
- PCI_VENDOR_ID = ${module_pci_vendor_id}${END}${BEGIN}
- PCI_DEVICE_ID = ${module_pci_device_id}${END}${BEGIN}
- PCI_CLASS_CODE = ${module_pci_class_code}${END}${BEGIN}
- PCI_REVISION = ${module_pci_revision}${END}${BEGIN}
- BUILD_NUMBER = ${module_build_number}${END}${BEGIN}
- SPEC = ${module_spec}${END}${BEGIN}
- UEFI_HII_RESOURCE_SECTION = ${module_uefi_hii_resource_section}${END}${BEGIN}
- MODULE_UNI_FILE = ${module_uni_file}${END}
-
-[Packages.${module_arch}]${BEGIN}
- ${package_item}${END}
-
-[Binaries.${module_arch}]${BEGIN}
- ${binary_item}${END}
-
-[PatchPcd.${module_arch}]${BEGIN}
- ${patchablepcd_item}
-${END}
-
-[Protocols.${module_arch}]${BEGIN}
- ${protocol_item}
-${END}
-
-[Ppis.${module_arch}]${BEGIN}
- ${ppi_item}
-${END}
-
-[Guids.${module_arch}]${BEGIN}
- ${guid_item}
-${END}
-
-[PcdEx.${module_arch}]${BEGIN}
- ${pcd_item}
-${END}
-
-[LibraryClasses.${module_arch}]
-## @LIB_INSTANCES${BEGIN}
-# ${libraryclasses_item}${END}
-
-${depexsection_item}
-
-${tail_comments}
-
-[BuildOptions.${module_arch}]
-## @AsBuilt${BEGIN}
-## ${flags_item}${END}
-""")
-
-## Base class for AutoGen
-#
-# This class just implements the cache mechanism of AutoGen objects.
-#
-class AutoGen(object):
- # database to maintain the objects of xxxAutoGen
- _CACHE_ = {} # (BuildTarget, ToolChain) : {ARCH : {platform file: AutoGen object}}}
-
- ## Factory method
- #
- # @param Class class object of real AutoGen class
- # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen)
- # @param Workspace Workspace directory or WorkspaceAutoGen object
- # @param MetaFile The path of meta file
- # @param Target Build target
- # @param Toolchain Tool chain name
- # @param Arch Target arch
- # @param *args The specific class related parameters
- # @param **kwargs The specific class related dict parameters
- #
- def __new__(Class, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
- # check if the object has been created
- Key = (Target, Toolchain)
- if Key not in Class._CACHE_ or Arch not in Class._CACHE_[Key] \
- or MetaFile not in Class._CACHE_[Key][Arch]:
- AutoGenObject = super(AutoGen, Class).__new__(Class)
- # call real constructor
- if not AutoGenObject._Init(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
- return None
- if Key not in Class._CACHE_:
- Class._CACHE_[Key] = {}
- if Arch not in Class._CACHE_[Key]:
- Class._CACHE_[Key][Arch] = {}
- Class._CACHE_[Key][Arch][MetaFile] = AutoGenObject
- else:
- AutoGenObject = Class._CACHE_[Key][Arch][MetaFile]
-
- return AutoGenObject
-
- ## hash() operator
- #
- # The file path of platform file will be used to represent hash value of this object
- #
- # @retval int Hash value of the file path of platform file
- #
- def __hash__(self):
- return hash(self.MetaFile)
-
- ## str() operator
- #
- # The file path of platform file will be used to represent this object
- #
- # @retval string String of platform file path
- #
- def __str__(self):
- return str(self.MetaFile)
-
- ## "==" operator
- def __eq__(self, Other):
- return Other and self.MetaFile == Other
-
-## Workspace AutoGen class
-#
-# This class is used mainly to control the whole platform build for different
-# architecture. This class will generate top level makefile.
-#
-class WorkspaceAutoGen(AutoGen):
- ## Real constructor of WorkspaceAutoGen
- #
- # This method behaves the same as __init__ except that it needs explicit invoke
- # (in super class's __new__ method)
- #
- # @param WorkspaceDir Root directory of workspace
- # @param ActivePlatform Meta-file of active platform
- # @param Target Build target
- # @param Toolchain Tool chain name
- # @param ArchList List of architecture of current build
- # @param MetaFileDb Database containing meta-files
- # @param BuildConfig Configuration of build
- # @param ToolDefinition Tool chain definitions
- # @param FlashDefinitionFile File of flash definition
- # @param Fds FD list to be generated
- # @param Fvs FV list to be generated
- # @param Caps Capsule list to be generated
- # @param SkuId SKU id from command line
- #
- def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb,
- BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=None, Fvs=None, Caps=None, SkuId='', UniFlag=None,
- Progress=None, BuildModule=None):
- if Fds is None:
- Fds = []
- if Fvs is None:
- Fvs = []
- if Caps is None:
- Caps = []
- self.BuildDatabase = MetaFileDb
- self.MetaFile = ActivePlatform
- self.WorkspaceDir = WorkspaceDir
- self.Platform = self.BuildDatabase[self.MetaFile, 'COMMON', Target, Toolchain]
- GlobalData.gActivePlatform = self.Platform
- self.BuildTarget = Target
- self.ToolChain = Toolchain
- self.ArchList = ArchList
- self.SkuId = SkuId
- self.UniFlag = UniFlag
-
- self.TargetTxt = BuildConfig
- self.ToolDef = ToolDefinition
- self.FdfFile = FlashDefinitionFile
- self.FdTargetList = Fds
- self.FvTargetList = Fvs
- self.CapTargetList = Caps
- self.AutoGenObjectList = []
-
- # there's many relative directory operations, so ...
- os.chdir(self.WorkspaceDir)
-
- #
- # Merge Arch
- #
- if not self.ArchList:
- ArchList = set(self.Platform.SupArchList)
- else:
- ArchList = set(self.ArchList) & set(self.Platform.SupArchList)
- if not ArchList:
- EdkLogger.error("build", PARAMETER_INVALID,
- ExtraData = "Invalid ARCH specified. [Valid ARCH: %s]" % (" ".join(self.Platform.SupArchList)))
- elif self.ArchList and len(ArchList) != len(self.ArchList):
- SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList))
- EdkLogger.verbose("\nArch [%s] is ignored because the platform supports [%s] only!"
- % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList)))
- self.ArchList = tuple(ArchList)
-
- # Validate build target
- if self.BuildTarget not in self.Platform.BuildTargets:
- EdkLogger.error("build", PARAMETER_INVALID,
- ExtraData="Build target [%s] is not supported by the platform. [Valid target: %s]"
- % (self.BuildTarget, " ".join(self.Platform.BuildTargets)))
-
-
- # parse FDF file to get PCDs in it, if any
- if not self.FdfFile:
- self.FdfFile = self.Platform.FlashDefinition
-
- EdkLogger.info("")
- if self.ArchList:
- EdkLogger.info('%-16s = %s' % ("Architecture(s)", ' '.join(self.ArchList)))
- EdkLogger.info('%-16s = %s' % ("Build target", self.BuildTarget))
- EdkLogger.info('%-16s = %s' % ("Toolchain", self.ToolChain))
-
- EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.Platform))
- if BuildModule:
- EdkLogger.info('%-24s = %s' % ("Active Module", BuildModule))
-
- if self.FdfFile:
- EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.FdfFile))
-
- EdkLogger.verbose("\nFLASH_DEFINITION = %s" % self.FdfFile)
-
- if Progress:
- Progress.Start("\nProcessing meta-data")
-
- if self.FdfFile:
- #
- # Mark now build in AutoGen Phase
- #
- GlobalData.gAutoGenPhase = True
- Fdf = FdfParser(self.FdfFile.Path)
- Fdf.ParseFile()
- GlobalData.gFdfParser = Fdf
- GlobalData.gAutoGenPhase = False
- PcdSet = Fdf.Profile.PcdDict
- if Fdf.CurrentFdName and Fdf.CurrentFdName in Fdf.Profile.FdDict:
- FdDict = Fdf.Profile.FdDict[Fdf.CurrentFdName]
- for FdRegion in FdDict.RegionList:
- if str(FdRegion.RegionType) is 'FILE' and self.Platform.VpdToolGuid in str(FdRegion.RegionDataList):
- if int(FdRegion.Offset) % 8 != 0:
- EdkLogger.error("build", FORMAT_INVALID, 'The VPD Base Address %s must be 8-byte aligned.' % (FdRegion.Offset))
- ModuleList = Fdf.Profile.InfList
- self.FdfProfile = Fdf.Profile
- for fvname in self.FvTargetList:
- if fvname.upper() not in self.FdfProfile.FvDict:
- EdkLogger.error("build", OPTION_VALUE_INVALID,
- "No such an FV in FDF file: %s" % fvname)
-
- # In DSC file may use FILE_GUID to override the module, then in the Platform.Modules use FILE_GUIDmodule.inf as key,
- # but the path (self.MetaFile.Path) is the real path
- for key in self.FdfProfile.InfDict:
- if key == 'ArchTBD':
- Platform_cache = {}
- MetaFile_cache = {}
- for Arch in self.ArchList:
- Platform_cache[Arch] = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]
- MetaFile_cache[Arch] = []
- for Pkey in Platform_cache[Arch].Modules.keys():
- MetaFile_cache[Arch].append(Platform_cache[Arch].Modules[Pkey].MetaFile)
- for Inf in self.FdfProfile.InfDict[key]:
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
- for Arch in self.ArchList:
- if ModuleFile in MetaFile_cache[Arch]:
- break
- else:
- ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]
- if not ModuleData.IsBinaryModule:
- EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
-
- else:
- for Arch in self.ArchList:
- if Arch == key:
- Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]
- MetaFileList = []
- for Pkey in Platform.Modules.keys():
- MetaFileList.append(Platform.Modules[Pkey].MetaFile)
- for Inf in self.FdfProfile.InfDict[key]:
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
- if ModuleFile in MetaFileList:
- continue
- ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]
- if not ModuleData.IsBinaryModule:
- EdkLogger.error('build', PARSER_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % ModuleFile)
-
- else:
- PcdSet = {}
- ModuleList = []
- self.FdfProfile = None
- if self.FdTargetList:
- EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdTargetList))
- self.FdTargetList = []
- if self.FvTargetList:
- EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvTargetList))
- self.FvTargetList = []
- if self.CapTargetList:
- EdkLogger.info("No flash definition file found. Capsule [%s] will be ignored." % " ".join(self.CapTargetList))
- self.CapTargetList = []
-
- # apply SKU and inject PCDs from Flash Definition file
- for Arch in self.ArchList:
- Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]
-
- DecPcds = {}
- DecPcdsKey = set()
- PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)
- if GlobalData.BuildOptionPcd:
- for i, pcd in enumerate(GlobalData.BuildOptionPcd):
- if type(pcd) is tuple:
- continue
- (pcdname, pcdvalue) = pcd.split('=')
- if not pcdvalue:
- EdkLogger.error('build', AUTOGEN_ERROR, "No Value specified for the PCD %s." % (pcdname))
- if '.' in pcdname:
- (TokenSpaceGuidCName, TokenCName) = pcdname.split('.')
- HasTokenSpace = True
- else:
- TokenCName = pcdname
- TokenSpaceGuidCName = ''
- HasTokenSpace = False
- TokenSpaceGuidCNameList = []
- FoundFlag = False
- PcdDatumType = ''
- NewValue = ''
- for package in PGen.PackageList:
- for key in package.Pcds:
- PcdItem = package.Pcds[key]
- if HasTokenSpace:
- if (PcdItem.TokenCName, PcdItem.TokenSpaceGuidCName) == (TokenCName, TokenSpaceGuidCName):
- PcdDatumType = PcdItem.DatumType
- NewValue = BuildOptionPcdValueFormat(TokenSpaceGuidCName, TokenCName, PcdDatumType, pcdvalue)
- FoundFlag = True
- else:
- if PcdItem.TokenCName == TokenCName:
- if not PcdItem.TokenSpaceGuidCName in TokenSpaceGuidCNameList:
- if len (TokenSpaceGuidCNameList) < 1:
- TokenSpaceGuidCNameList.append(PcdItem.TokenSpaceGuidCName)
- PcdDatumType = PcdItem.DatumType
- TokenSpaceGuidCName = PcdItem.TokenSpaceGuidCName
- NewValue = BuildOptionPcdValueFormat(TokenSpaceGuidCName, TokenCName, PcdDatumType, pcdvalue)
- FoundFlag = True
- else:
- EdkLogger.error(
- 'build',
- AUTOGEN_ERROR,
- "The Pcd %s is found under multiple different TokenSpaceGuid: %s and %s." % (TokenCName, PcdItem.TokenSpaceGuidCName, TokenSpaceGuidCNameList[0])
- )
-
- GlobalData.BuildOptionPcd[i] = (TokenSpaceGuidCName, TokenCName, NewValue)
-
- if not FoundFlag:
- if HasTokenSpace:
- EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s.%s is not found in the DEC file." % (TokenSpaceGuidCName, TokenCName))
- else:
- EdkLogger.error('build', AUTOGEN_ERROR, "The Pcd %s is not found in the DEC file." % (TokenCName))
-
- for BuildData in PGen.BuildDatabase._CACHE_.values():
- if BuildData.Arch != Arch:
- continue
- if BuildData.MetaFile.Ext == '.dec':
- continue
- for key in BuildData.Pcds:
- PcdItem = BuildData.Pcds[key]
- if (TokenSpaceGuidCName, TokenCName) == (PcdItem.TokenSpaceGuidCName, PcdItem.TokenCName):
- PcdItem.DefaultValue = NewValue
-
- if (TokenCName, TokenSpaceGuidCName) in PcdSet:
- PcdSet[(TokenCName, TokenSpaceGuidCName)] = NewValue
-
- SourcePcdDict = {'DynamicEx':[], 'PatchableInModule':[],'Dynamic':[],'FixedAtBuild':[]}
- BinaryPcdDict = {'DynamicEx':[], 'PatchableInModule':[]}
- SourcePcdDict_Keys = SourcePcdDict.keys()
- BinaryPcdDict_Keys = BinaryPcdDict.keys()
-
- # generate the SourcePcdDict and BinaryPcdDict
- for BuildData in PGen.BuildDatabase._CACHE_.values():
- if BuildData.Arch != Arch:
- continue
- if BuildData.MetaFile.Ext == '.inf':
- for key in BuildData.Pcds:
- if BuildData.Pcds[key].Pending:
- if key in Platform.Pcds:
- PcdInPlatform = Platform.Pcds[key]
- if PcdInPlatform.Type not in [None, '']:
- BuildData.Pcds[key].Type = PcdInPlatform.Type
-
- if BuildData.MetaFile in Platform.Modules:
- PlatformModule = Platform.Modules[str(BuildData.MetaFile)]
- if key in PlatformModule.Pcds:
- PcdInPlatform = PlatformModule.Pcds[key]
- if PcdInPlatform.Type not in [None, '']:
- BuildData.Pcds[key].Type = PcdInPlatform.Type
-
- if 'DynamicEx' in BuildData.Pcds[key].Type:
- if BuildData.IsBinaryModule:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in BinaryPcdDict['DynamicEx']:
- BinaryPcdDict['DynamicEx'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
- else:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in SourcePcdDict['DynamicEx']:
- SourcePcdDict['DynamicEx'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
-
- elif 'PatchableInModule' in BuildData.Pcds[key].Type:
- if BuildData.MetaFile.Ext == '.inf':
- if BuildData.IsBinaryModule:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in BinaryPcdDict['PatchableInModule']:
- BinaryPcdDict['PatchableInModule'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
- else:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in SourcePcdDict['PatchableInModule']:
- SourcePcdDict['PatchableInModule'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
-
- elif 'Dynamic' in BuildData.Pcds[key].Type:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in SourcePcdDict['Dynamic']:
- SourcePcdDict['Dynamic'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
- elif 'FixedAtBuild' in BuildData.Pcds[key].Type:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) not in SourcePcdDict['FixedAtBuild']:
- SourcePcdDict['FixedAtBuild'].append((BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName))
- else:
- pass
- #
- # A PCD can only use one type for all source modules
- #
- for i in SourcePcdDict_Keys:
- for j in SourcePcdDict_Keys:
- if i != j:
- IntersectionList = list(set(SourcePcdDict[i]).intersection(set(SourcePcdDict[j])))
- if len(IntersectionList) > 0:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "Building modules from source INFs, following PCD use %s and %s access method. It must be corrected to use only one access method." % (i, j),
- ExtraData="%s" % '\n\t'.join([str(P[1]+'.'+P[0]) for P in IntersectionList])
- )
- else:
- pass
-
- #
- # intersection the BinaryPCD for Mixed PCD
- #
- for i in BinaryPcdDict_Keys:
- for j in BinaryPcdDict_Keys:
- if i != j:
- IntersectionList = list(set(BinaryPcdDict[i]).intersection(set(BinaryPcdDict[j])))
- for item in IntersectionList:
- NewPcd1 = (item[0] + '_' + i, item[1])
- NewPcd2 = (item[0] + '_' + j, item[1])
- if item not in GlobalData.MixedPcd:
- GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
- else:
- if NewPcd1 not in GlobalData.MixedPcd[item]:
- GlobalData.MixedPcd[item].append(NewPcd1)
- if NewPcd2 not in GlobalData.MixedPcd[item]:
- GlobalData.MixedPcd[item].append(NewPcd2)
- else:
- pass
-
- #
- # intersection the SourcePCD and BinaryPCD for Mixed PCD
- #
- for i in SourcePcdDict_Keys:
- for j in BinaryPcdDict_Keys:
- if i != j:
- IntersectionList = list(set(SourcePcdDict[i]).intersection(set(BinaryPcdDict[j])))
- for item in IntersectionList:
- NewPcd1 = (item[0] + '_' + i, item[1])
- NewPcd2 = (item[0] + '_' + j, item[1])
- if item not in GlobalData.MixedPcd:
- GlobalData.MixedPcd[item] = [NewPcd1, NewPcd2]
- else:
- if NewPcd1 not in GlobalData.MixedPcd[item]:
- GlobalData.MixedPcd[item].append(NewPcd1)
- if NewPcd2 not in GlobalData.MixedPcd[item]:
- GlobalData.MixedPcd[item].append(NewPcd2)
- else:
- pass
-
- for BuildData in PGen.BuildDatabase._CACHE_.values():
- if BuildData.Arch != Arch:
- continue
- for key in BuildData.Pcds:
- for SinglePcd in GlobalData.MixedPcd:
- if (BuildData.Pcds[key].TokenCName, BuildData.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
- for item in GlobalData.MixedPcd[SinglePcd]:
- Pcd_Type = item[0].split('_')[-1]
- if (Pcd_Type == BuildData.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and BuildData.Pcds[key].Type in GenC.gDynamicExPcd) or \
- (Pcd_Type == TAB_PCDS_DYNAMIC and BuildData.Pcds[key].Type in GenC.gDynamicPcd):
- Value = BuildData.Pcds[key]
- Value.TokenCName = BuildData.Pcds[key].TokenCName + '_' + Pcd_Type
- if len(key) == 2:
- newkey = (Value.TokenCName, key[1])
- elif len(key) == 3:
- newkey = (Value.TokenCName, key[1], key[2])
- del BuildData.Pcds[key]
- BuildData.Pcds[newkey] = Value
- break
- else:
- pass
- break
- else:
- pass
-
- # handle the mixed pcd in FDF file
- for key in PcdSet:
- if key in GlobalData.MixedPcd:
- Value = PcdSet[key]
- del PcdSet[key]
- for item in GlobalData.MixedPcd[key]:
- PcdSet[item] = Value
-
- #Collect package set information from INF of FDF
- PkgSet = set()
- for Inf in ModuleList:
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch)
- if ModuleFile in Platform.Modules:
- continue
- ModuleData = self.BuildDatabase[ModuleFile, Arch, Target, Toolchain]
- PkgSet.update(ModuleData.Packages)
- Pkgs = list(PkgSet) + list(PGen.PackageList)
- for Pkg in Pkgs:
- for Pcd in Pkg.Pcds:
- DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd]
- DecPcdsKey.add((Pcd[0], Pcd[1], Pcd[2]))
-
- Platform.SkuName = self.SkuId
- for Name, Guid in PcdSet:
- if (Name, Guid) not in DecPcds:
- EdkLogger.error(
- 'build',
- PARSER_ERROR,
- "PCD (%s.%s) used in FDF is not declared in DEC files." % (Guid, Name),
- File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]
- )
- else:
- # Check whether Dynamic or DynamicEx PCD used in FDF file. If used, build break and give a error message.
- if (Name, Guid, TAB_PCDS_FIXED_AT_BUILD) in DecPcdsKey \
- or (Name, Guid, TAB_PCDS_PATCHABLE_IN_MODULE) in DecPcdsKey \
- or (Name, Guid, TAB_PCDS_FEATURE_FLAG) in DecPcdsKey:
- Platform.AddPcd(Name, Guid, PcdSet[Name, Guid])
- continue
- elif (Name, Guid, TAB_PCDS_DYNAMIC) in DecPcdsKey or (Name, Guid, TAB_PCDS_DYNAMIC_EX) in DecPcdsKey:
- EdkLogger.error(
- 'build',
- PARSER_ERROR,
- "Using Dynamic or DynamicEx type of PCD [%s.%s] in FDF file is not allowed." % (Guid, Name),
- File = self.FdfProfile.PcdFileLineDict[Name, Guid][0],
- Line = self.FdfProfile.PcdFileLineDict[Name, Guid][1]
- )
-
- Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)
- #
- # Explicitly collect platform's dynamic PCDs
- #
- Pa.CollectPlatformDynamicPcds()
- Pa.CollectFixedAtBuildPcds()
- self.AutoGenObjectList.append(Pa)
-
- #
- # Check PCDs token value conflict in each DEC file.
- #
- self._CheckAllPcdsTokenValueConflict()
-
- #
- # Check PCD type and definition between DSC and DEC
- #
- self._CheckPcdDefineAndType()
-
-# if self.FdfFile:
-# self._CheckDuplicateInFV(Fdf)
-
- self._BuildDir = None
- self._FvDir = None
- self._MakeFileDir = None
- self._BuildCommand = None
-
- #
- # Create BuildOptions Macro & PCD metafile, also add the Active Platform and FDF file.
- #
- content = 'gCommandLineDefines: '
- content += str(GlobalData.gCommandLineDefines)
- content += os.linesep
- content += 'BuildOptionPcd: '
- content += str(GlobalData.BuildOptionPcd)
- content += os.linesep
- content += 'Active Platform: '
- content += str(self.Platform)
- content += os.linesep
- if self.FdfFile:
- content += 'Flash Image Definition: '
- content += str(self.FdfFile)
- SaveFileOnChange(os.path.join(self.BuildDir, 'BuildOptions'), content, False)
-
- #
- # Create PcdToken Number file for Dynamic/DynamicEx Pcd.
- #
- PcdTokenNumber = 'PcdTokenNumber: '
- if Pa.PcdTokenNumber:
- if Pa.DynamicPcdList:
- for Pcd in Pa.DynamicPcdList:
- PcdTokenNumber += os.linesep
- PcdTokenNumber += str((Pcd.TokenCName, Pcd.TokenSpaceGuidCName))
- PcdTokenNumber += ' : '
- PcdTokenNumber += str(Pa.PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName])
- SaveFileOnChange(os.path.join(self.BuildDir, 'PcdTokenNumber'), PcdTokenNumber, False)
-
- #
- # Get set of workspace metafiles
- #
- AllWorkSpaceMetaFiles = self._GetMetaFiles(Target, Toolchain, Arch)
-
- #
- # Retrieve latest modified time of all metafiles
- #
- SrcTimeStamp = 0
- for f in AllWorkSpaceMetaFiles:
- if os.stat(f)[8] > SrcTimeStamp:
- SrcTimeStamp = os.stat(f)[8]
- self._SrcTimeStamp = SrcTimeStamp
-
- #
- # Write metafile list to build directory
- #
- AutoGenFilePath = os.path.join(self.BuildDir, 'AutoGen')
- if os.path.exists (AutoGenFilePath):
- os.remove(AutoGenFilePath)
- if not os.path.exists(self.BuildDir):
- os.makedirs(self.BuildDir)
- with open(os.path.join(self.BuildDir, 'AutoGen'), 'w+') as file:
- for f in AllWorkSpaceMetaFiles:
- print >> file, f
- return True
-
-
- def _GetMetaFiles(self, Target, Toolchain, Arch):
- AllWorkSpaceMetaFiles = set()
- #
- # add fdf
- #
- if self.FdfFile:
- AllWorkSpaceMetaFiles.add (self.FdfFile.Path)
- if self.FdfFile:
- FdfFiles = GlobalData.gFdfParser.GetAllIncludedFile()
- for f in FdfFiles:
- AllWorkSpaceMetaFiles.add (f.FileName)
- #
- # add dsc
- #
- AllWorkSpaceMetaFiles.add(self.MetaFile.Path)
-
- #
- # add build_rule.txt & tools_def.txt
- #
- AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultBuildRuleFile))
- AllWorkSpaceMetaFiles.add(os.path.join(GlobalData.gConfDirectory, gDefaultToolsDefFile))
-
- # add BuildOption metafile
- #
- AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'BuildOptions'))
-
- # add PcdToken Number file for Dynamic/DynamicEx Pcd
- #
- AllWorkSpaceMetaFiles.add(os.path.join(self.BuildDir, 'PcdTokenNumber'))
-
- for Arch in self.ArchList:
- Platform = self.BuildDatabase[self.MetaFile, Arch, Target, Toolchain]
- PGen = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch)
-
- #
- # add dec
- #
- for Package in PGen.PackageList:
- AllWorkSpaceMetaFiles.add(Package.MetaFile.Path)
-
- #
- # add included dsc
- #
- for filePath in Platform._RawData.IncludedFiles:
- AllWorkSpaceMetaFiles.add(filePath.Path)
-
- return AllWorkSpaceMetaFiles
-
- ## _CheckDuplicateInFV() method
- #
- # Check whether there is duplicate modules/files exist in FV section.
- # The check base on the file GUID;
- #
- def _CheckDuplicateInFV(self, Fdf):
- for Fv in Fdf.Profile.FvDict:
- _GuidDict = {}
- for FfsFile in Fdf.Profile.FvDict[Fv].FfsList:
- if FfsFile.InfFileName and FfsFile.NameGuid == None:
- #
- # Get INF file GUID
- #
- InfFoundFlag = False
- for Pa in self.AutoGenObjectList:
- if InfFoundFlag:
- break
- for Module in Pa.ModuleAutoGenList:
- if path.normpath(Module.MetaFile.File) == path.normpath(FfsFile.InfFileName):
- InfFoundFlag = True
- if not Module.Guid.upper() in _GuidDict.keys():
- _GuidDict[Module.Guid.upper()] = FfsFile
- break
- else:
- EdkLogger.error("build",
- FORMAT_INVALID,
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
- FfsFile.CurrentLineContent,
- _GuidDict[Module.Guid.upper()].CurrentLineNum,
- _GuidDict[Module.Guid.upper()].CurrentLineContent,
- Module.Guid.upper()),
- ExtraData=self.FdfFile)
- #
- # Some INF files not have entity in DSC file.
- #
- if not InfFoundFlag:
- if FfsFile.InfFileName.find('$') == -1:
- InfPath = NormPath(FfsFile.InfFileName)
- if not os.path.exists(InfPath):
- EdkLogger.error('build', GENFDS_ERROR, "Non-existant Module %s !" % (FfsFile.InfFileName))
-
- PathClassObj = PathClass(FfsFile.InfFileName, self.WorkspaceDir)
- #
- # Here we just need to get FILE_GUID from INF file, use 'COMMON' as ARCH attribute. and use
- # BuildObject from one of AutoGenObjectList is enough.
- #
- InfObj = self.AutoGenObjectList[0].BuildDatabase.WorkspaceDb.BuildObject[PathClassObj, 'COMMON', self.BuildTarget, self.ToolChain]
- if not InfObj.Guid.upper() in _GuidDict.keys():
- _GuidDict[InfObj.Guid.upper()] = FfsFile
- else:
- EdkLogger.error("build",
- FORMAT_INVALID,
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
- FfsFile.CurrentLineContent,
- _GuidDict[InfObj.Guid.upper()].CurrentLineNum,
- _GuidDict[InfObj.Guid.upper()].CurrentLineContent,
- InfObj.Guid.upper()),
- ExtraData=self.FdfFile)
- InfFoundFlag = False
-
- if FfsFile.NameGuid != None:
- _CheckPCDAsGuidPattern = re.compile("^PCD\(.+\..+\)$")
-
- #
- # If the NameGuid reference a PCD name.
- # The style must match: PCD(xxxx.yyy)
- #
- if _CheckPCDAsGuidPattern.match(FfsFile.NameGuid):
- #
- # Replace the PCD value.
- #
- _PcdName = FfsFile.NameGuid.lstrip("PCD(").rstrip(")")
- PcdFoundFlag = False
- for Pa in self.AutoGenObjectList:
- if not PcdFoundFlag:
- for PcdItem in Pa.AllPcdList:
- if (PcdItem.TokenSpaceGuidCName + "." + PcdItem.TokenCName) == _PcdName:
- #
- # First convert from CFormatGuid to GUID string
- #
- _PcdGuidString = GuidStructureStringToGuidString(PcdItem.DefaultValue)
-
- if not _PcdGuidString:
- #
- # Then try Byte array.
- #
- _PcdGuidString = GuidStructureByteArrayToGuidString(PcdItem.DefaultValue)
-
- if not _PcdGuidString:
- #
- # Not Byte array or CFormat GUID, raise error.
- #
- EdkLogger.error("build",
- FORMAT_INVALID,
- "The format of PCD value is incorrect. PCD: %s , Value: %s\n" % (_PcdName, PcdItem.DefaultValue),
- ExtraData=self.FdfFile)
-
- if not _PcdGuidString.upper() in _GuidDict.keys():
- _GuidDict[_PcdGuidString.upper()] = FfsFile
- PcdFoundFlag = True
- break
- else:
- EdkLogger.error("build",
- FORMAT_INVALID,
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
- FfsFile.CurrentLineContent,
- _GuidDict[_PcdGuidString.upper()].CurrentLineNum,
- _GuidDict[_PcdGuidString.upper()].CurrentLineContent,
- FfsFile.NameGuid.upper()),
- ExtraData=self.FdfFile)
-
- if not FfsFile.NameGuid.upper() in _GuidDict.keys():
- _GuidDict[FfsFile.NameGuid.upper()] = FfsFile
- else:
- #
- # Two raw file GUID conflict.
- #
- EdkLogger.error("build",
- FORMAT_INVALID,
- "Duplicate GUID found for these lines: Line %d: %s and Line %d: %s. GUID: %s" % (FfsFile.CurrentLineNum,
- FfsFile.CurrentLineContent,
- _GuidDict[FfsFile.NameGuid.upper()].CurrentLineNum,
- _GuidDict[FfsFile.NameGuid.upper()].CurrentLineContent,
- FfsFile.NameGuid.upper()),
- ExtraData=self.FdfFile)
-
-
- def _CheckPcdDefineAndType(self):
- PcdTypeList = [
- "FixedAtBuild", "PatchableInModule", "FeatureFlag",
- "Dynamic", #"DynamicHii", "DynamicVpd",
- "DynamicEx", # "DynamicExHii", "DynamicExVpd"
- ]
-
- # This dict store PCDs which are not used by any modules with specified arches
- UnusedPcd = sdict()
- for Pa in self.AutoGenObjectList:
- # Key of DSC's Pcds dictionary is PcdCName, TokenSpaceGuid
- for Pcd in Pa.Platform.Pcds:
- PcdType = Pa.Platform.Pcds[Pcd].Type
-
- # If no PCD type, this PCD comes from FDF
- if not PcdType:
- continue
-
- # Try to remove Hii and Vpd suffix
- if PcdType.startswith("DynamicEx"):
- PcdType = "DynamicEx"
- elif PcdType.startswith("Dynamic"):
- PcdType = "Dynamic"
-
- for Package in Pa.PackageList:
- # Key of DEC's Pcds dictionary is PcdCName, TokenSpaceGuid, PcdType
- if (Pcd[0], Pcd[1], PcdType) in Package.Pcds:
- break
- for Type in PcdTypeList:
- if (Pcd[0], Pcd[1], Type) in Package.Pcds:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "Type [%s] of PCD [%s.%s] in DSC file doesn't match the type [%s] defined in DEC file." \
- % (Pa.Platform.Pcds[Pcd].Type, Pcd[1], Pcd[0], Type),
- ExtraData=None
- )
- return
- else:
- UnusedPcd.setdefault(Pcd, []).append(Pa.Arch)
-
- for Pcd in UnusedPcd:
- EdkLogger.warn(
- 'build',
- "The PCD was not specified by any INF module in the platform for the given architecture.\n"
- "\tPCD: [%s.%s]\n\tPlatform: [%s]\n\tArch: %s"
- % (Pcd[1], Pcd[0], os.path.basename(str(self.MetaFile)), str(UnusedPcd[Pcd])),
- ExtraData=None
- )
-
- def __repr__(self):
- return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList))
-
- ## Return the directory to store FV files
- def _GetFvDir(self):
- if self._FvDir == None:
- self._FvDir = path.join(self.BuildDir, 'FV')
- return self._FvDir
-
- ## Return the directory to store all intermediate and final files built
- def _GetBuildDir(self):
- return self.AutoGenObjectList[0].BuildDir
-
- ## Return the build output directory platform specifies
- def _GetOutputDir(self):
- return self.Platform.OutputDirectory
-
- ## Return platform name
- def _GetName(self):
- return self.Platform.PlatformName
-
- ## Return meta-file GUID
- def _GetGuid(self):
- return self.Platform.Guid
-
- ## Return platform version
- def _GetVersion(self):
- return self.Platform.Version
-
- ## Return paths of tools
- def _GetToolDefinition(self):
- return self.AutoGenObjectList[0].ToolDefinition
-
- ## Return directory of platform makefile
- #
- # @retval string Makefile directory
- #
- def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
- self._MakeFileDir = self.BuildDir
- return self._MakeFileDir
-
- ## Return build command string
- #
- # @retval string Build command string
- #
- def _GetBuildCommand(self):
- if self._BuildCommand == None:
- # BuildCommand should be all the same. So just get one from platform AutoGen
- self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
- return self._BuildCommand
-
- ## Check the PCDs token value conflict in each DEC file.
- #
- # Will cause build break and raise error message while two PCDs conflict.
- #
- # @return None
- #
- def _CheckAllPcdsTokenValueConflict(self):
- for Pa in self.AutoGenObjectList:
- for Package in Pa.PackageList:
- PcdList = Package.Pcds.values()
- PcdList.sort(lambda x, y: cmp(int(x.TokenValue, 0), int(y.TokenValue, 0)))
- Count = 0
- while (Count < len(PcdList) - 1) :
- Item = PcdList[Count]
- ItemNext = PcdList[Count + 1]
- #
- # Make sure in the same token space the TokenValue should be unique
- #
- if (int(Item.TokenValue, 0) == int(ItemNext.TokenValue, 0)):
- SameTokenValuePcdList = []
- SameTokenValuePcdList.append(Item)
- SameTokenValuePcdList.append(ItemNext)
- RemainPcdListLength = len(PcdList) - Count - 2
- for ValueSameCount in range(RemainPcdListLength):
- if int(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount].TokenValue, 0) == int(Item.TokenValue, 0):
- SameTokenValuePcdList.append(PcdList[len(PcdList) - RemainPcdListLength + ValueSameCount])
- else:
- break;
- #
- # Sort same token value PCD list with TokenGuid and TokenCName
- #
- SameTokenValuePcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
- SameTokenValuePcdListCount = 0
- while (SameTokenValuePcdListCount < len(SameTokenValuePcdList) - 1):
- Flag = False
- TemListItem = SameTokenValuePcdList[SameTokenValuePcdListCount]
- TemListItemNext = SameTokenValuePcdList[SameTokenValuePcdListCount + 1]
-
- if (TemListItem.TokenSpaceGuidCName == TemListItemNext.TokenSpaceGuidCName) and (TemListItem.TokenCName != TemListItemNext.TokenCName):
- for PcdItem in GlobalData.MixedPcd:
- if (TemListItem.TokenCName, TemListItem.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem] or \
- (TemListItemNext.TokenCName, TemListItemNext.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- Flag = True
- if not Flag:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "The TokenValue [%s] of PCD [%s.%s] is conflict with: [%s.%s] in %s"\
- % (TemListItem.TokenValue, TemListItem.TokenSpaceGuidCName, TemListItem.TokenCName, TemListItemNext.TokenSpaceGuidCName, TemListItemNext.TokenCName, Package),
- ExtraData=None
- )
- SameTokenValuePcdListCount += 1
- Count += SameTokenValuePcdListCount
- Count += 1
-
- PcdList = Package.Pcds.values()
- PcdList.sort(lambda x, y: cmp("%s.%s" % (x.TokenSpaceGuidCName, x.TokenCName), "%s.%s" % (y.TokenSpaceGuidCName, y.TokenCName)))
- Count = 0
- while (Count < len(PcdList) - 1) :
- Item = PcdList[Count]
- ItemNext = PcdList[Count + 1]
- #
- # Check PCDs with same TokenSpaceGuidCName.TokenCName have same token value as well.
- #
- if (Item.TokenSpaceGuidCName == ItemNext.TokenSpaceGuidCName) and (Item.TokenCName == ItemNext.TokenCName) and (int(Item.TokenValue, 0) != int(ItemNext.TokenValue, 0)):
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "The TokenValue [%s] of PCD [%s.%s] in %s defined in two places should be same as well."\
- % (Item.TokenValue, Item.TokenSpaceGuidCName, Item.TokenCName, Package),
- ExtraData=None
- )
- Count += 1
- ## Generate fds command
- def _GenFdsCommand(self):
- return (GenMake.TopLevelMakefile(self)._TEMPLATE_.Replace(GenMake.TopLevelMakefile(self)._TemplateDict)).strip()
-
- ## Create makefile for the platform and modules in it
- #
- # @param CreateDepsMakeFile Flag indicating if the makefile for
- # modules will be created as well
- #
- def CreateMakeFile(self, CreateDepsMakeFile=False):
- if CreateDepsMakeFile:
- for Pa in self.AutoGenObjectList:
- Pa.CreateMakeFile(CreateDepsMakeFile)
-
- ## Create autogen code for platform and modules
- #
- # Since there's no autogen code for platform, this method will do nothing
- # if CreateModuleCodeFile is set to False.
- #
- # @param CreateDepsCodeFile Flag indicating if creating module's
- # autogen code file or not
- #
- def CreateCodeFile(self, CreateDepsCodeFile=False):
- if not CreateDepsCodeFile:
- return
- for Pa in self.AutoGenObjectList:
- Pa.CreateCodeFile(CreateDepsCodeFile)
-
- ## Create AsBuilt INF file the platform
- #
- def CreateAsBuiltInf(self):
- return
-
- Name = property(_GetName)
- Guid = property(_GetGuid)
- Version = property(_GetVersion)
- OutputDir = property(_GetOutputDir)
-
- ToolDefinition = property(_GetToolDefinition) # toolcode : tool path
-
- BuildDir = property(_GetBuildDir)
- FvDir = property(_GetFvDir)
- MakeFileDir = property(_GetMakeFileDir)
- BuildCommand = property(_GetBuildCommand)
- GenFdsCommand = property(_GenFdsCommand)
-
-## AutoGen class for platform
-#
-# PlatformAutoGen class will process the original information in platform
-# file in order to generate makefile for platform.
-#
-class PlatformAutoGen(AutoGen):
- #
- # Used to store all PCDs for both PEI and DXE phase, in order to generate
- # correct PCD database
- #
- _DynaPcdList_ = []
- _NonDynaPcdList_ = []
- _PlatformPcds = {}
-
- #
- # The priority list while override build option
- #
- PrioList = {"0x11111" : 16, # TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE (Highest)
- "0x01111" : 15, # ******_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
- "0x10111" : 14, # TARGET_*********_ARCH_COMMANDTYPE_ATTRIBUTE
- "0x00111" : 13, # ******_*********_ARCH_COMMANDTYPE_ATTRIBUTE
- "0x11011" : 12, # TARGET_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
- "0x01011" : 11, # ******_TOOLCHAIN_****_COMMANDTYPE_ATTRIBUTE
- "0x10011" : 10, # TARGET_*********_****_COMMANDTYPE_ATTRIBUTE
- "0x00011" : 9, # ******_*********_****_COMMANDTYPE_ATTRIBUTE
- "0x11101" : 8, # TARGET_TOOLCHAIN_ARCH_***********_ATTRIBUTE
- "0x01101" : 7, # ******_TOOLCHAIN_ARCH_***********_ATTRIBUTE
- "0x10101" : 6, # TARGET_*********_ARCH_***********_ATTRIBUTE
- "0x00101" : 5, # ******_*********_ARCH_***********_ATTRIBUTE
- "0x11001" : 4, # TARGET_TOOLCHAIN_****_***********_ATTRIBUTE
- "0x01001" : 3, # ******_TOOLCHAIN_****_***********_ATTRIBUTE
- "0x10001" : 2, # TARGET_*********_****_***********_ATTRIBUTE
- "0x00001" : 1} # ******_*********_****_***********_ATTRIBUTE (Lowest)
-
- ## The real constructor of PlatformAutoGen
- #
- # This method is not supposed to be called by users of PlatformAutoGen. It's
- # only used by factory method __new__() to do real initialization work for an
- # object of PlatformAutoGen
- #
- # @param Workspace WorkspaceAutoGen object
- # @param PlatformFile Platform file (DSC file)
- # @param Target Build target (DEBUG, RELEASE)
- # @param Toolchain Name of tool chain
- # @param Arch arch of the platform supports
- #
- def _Init(self, Workspace, PlatformFile, Target, Toolchain, Arch):
- EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
- GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
-
- self.MetaFile = PlatformFile
- self.Workspace = Workspace
- self.WorkspaceDir = Workspace.WorkspaceDir
- self.ToolChain = Toolchain
- self.BuildTarget = Target
- self.Arch = Arch
- self.SourceDir = PlatformFile.SubDir
- self.SourceOverrideDir = None
- self.FdTargetList = self.Workspace.FdTargetList
- self.FvTargetList = self.Workspace.FvTargetList
- self.AllPcdList = []
- # get the original module/package/platform objects
- self.BuildDatabase = Workspace.BuildDatabase
-
- # flag indicating if the makefile/C-code file has been created or not
- self.IsMakeFileCreated = False
- self.IsCodeFileCreated = False
-
- self._Platform = None
- self._Name = None
- self._Guid = None
- self._Version = None
-
- self._BuildRule = None
- self._SourceDir = None
- self._BuildDir = None
- self._OutputDir = None
- self._FvDir = None
- self._MakeFileDir = None
- self._FdfFile = None
-
- self._PcdTokenNumber = None # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
- self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
- self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
- self._NonDynamicPcdDict = {}
-
- self._ToolDefinitions = None
- self._ToolDefFile = None # toolcode : tool path
- self._ToolChainFamily = None
- self._BuildRuleFamily = None
- self._BuildOption = None # toolcode : option
- self._EdkBuildOption = None # edktoolcode : option
- self._EdkIIBuildOption = None # edkiitoolcode : option
- self._PackageList = None
- self._ModuleAutoGenList = None
- self._LibraryAutoGenList = None
- self._BuildCommand = None
- self._AsBuildInfList = []
- self._AsBuildModuleList = []
- if GlobalData.gFdfParser != None:
- self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
- for Inf in self._AsBuildInfList:
- InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
- M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
- if not M.IsSupportedArch:
- continue
- self._AsBuildModuleList.append(InfClass)
- # get library/modules for build
- self.LibraryBuildDirectoryList = []
- self.ModuleBuildDirectoryList = []
- return True
-
- def __repr__(self):
- return "%s [%s]" % (self.MetaFile, self.Arch)
-
- ## Create autogen code for platform and modules
- #
- # Since there's no autogen code for platform, this method will do nothing
- # if CreateModuleCodeFile is set to False.
- #
- # @param CreateModuleCodeFile Flag indicating if creating module's
- # autogen code file or not
- #
- def CreateCodeFile(self, CreateModuleCodeFile=False):
- # only module has code to be greated, so do nothing if CreateModuleCodeFile is False
- if self.IsCodeFileCreated or not CreateModuleCodeFile:
- return
-
- for Ma in self.ModuleAutoGenList:
- Ma.CreateCodeFile(True)
-
- # don't do this twice
- self.IsCodeFileCreated = True
-
- ## Generate Fds Command
- def _GenFdsCommand(self):
- return self.Workspace.GenFdsCommand
-
- ## Create makefile for the platform and mdoules in it
- #
- # @param CreateModuleMakeFile Flag indicating if the makefile for
- # modules will be created as well
- #
- def CreateMakeFile(self, CreateModuleMakeFile=False):
- if CreateModuleMakeFile:
- for ModuleFile in self.Platform.Modules:
- Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget,
- self.ToolChain, self.Arch, self.MetaFile)
- Ma.CreateMakeFile(True)
- #Ma.CreateAsBuiltInf()
-
- # no need to create makefile for the platform more than once
- if self.IsMakeFileCreated:
- return
-
- # create library/module build dirs for platform
- Makefile = GenMake.PlatformMakefile(self)
- self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()
- self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()
-
- self.IsMakeFileCreated = True
-
- ## Deal with Shared FixedAtBuild Pcds
- #
- def CollectFixedAtBuildPcds(self):
- for LibAuto in self.LibraryAutoGenList:
- FixedAtBuildPcds = {}
- ShareFixedAtBuildPcdsSameValue = {}
- for Module in LibAuto._ReferenceModules:
- for Pcd in Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds:
- key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
- if key not in FixedAtBuildPcds:
- ShareFixedAtBuildPcdsSameValue[key] = True
- FixedAtBuildPcds[key] = Pcd.DefaultValue
- else:
- if FixedAtBuildPcds[key] != Pcd.DefaultValue:
- ShareFixedAtBuildPcdsSameValue[key] = False
- for Pcd in LibAuto.FixedAtBuildPcds:
- key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
- if (Pcd.TokenCName,Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
- continue
- else:
- DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)]
- if DscPcd.Type != "FixedAtBuild":
- continue
- if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
- LibAuto.ConstPcd[key] = Pcd.DefaultValue
-
- ## Collect dynamic PCDs
- #
- # Gather dynamic PCDs list from each module and their settings from platform
- # This interface should be invoked explicitly when platform action is created.
- #
- def CollectPlatformDynamicPcds(self):
- # Override the platform Pcd's value by build option
- if GlobalData.BuildOptionPcd:
- for key in self.Platform.Pcds:
- PlatformPcd = self.Platform.Pcds[key]
- for PcdItem in GlobalData.BuildOptionPcd:
- if (PlatformPcd.TokenSpaceGuidCName, PlatformPcd.TokenCName) == (PcdItem[0], PcdItem[1]):
- PlatformPcd.DefaultValue = PcdItem[2]
- if PlatformPcd.SkuInfoList:
- Sku = PlatformPcd.SkuInfoList[PlatformPcd.SkuInfoList.keys()[0]]
- Sku.DefaultValue = PcdItem[2]
- break
-
- for key in self.Platform.Pcds:
- for SinglePcd in GlobalData.MixedPcd:
- if (self.Platform.Pcds[key].TokenCName, self.Platform.Pcds[key].TokenSpaceGuidCName) == SinglePcd:
- for item in GlobalData.MixedPcd[SinglePcd]:
- Pcd_Type = item[0].split('_')[-1]
- if (Pcd_Type == self.Platform.Pcds[key].Type) or (Pcd_Type == TAB_PCDS_DYNAMIC_EX and self.Platform.Pcds[key].Type in GenC.gDynamicExPcd) or \
- (Pcd_Type == TAB_PCDS_DYNAMIC and self.Platform.Pcds[key].Type in GenC.gDynamicPcd):
- Value = self.Platform.Pcds[key]
- Value.TokenCName = self.Platform.Pcds[key].TokenCName + '_' + Pcd_Type
- if len(key) == 2:
- newkey = (Value.TokenCName, key[1])
- elif len(key) == 3:
- newkey = (Value.TokenCName, key[1], key[2])
- del self.Platform.Pcds[key]
- self.Platform.Pcds[newkey] = Value
- break
- else:
- pass
- break
- else:
- pass
-
- # for gathering error information
- NoDatumTypePcdList = set()
- PcdNotInDb = []
- self._GuidValue = {}
- FdfModuleList = []
- for InfName in self._AsBuildInfList:
- InfName = mws.join(self.WorkspaceDir, InfName)
- FdfModuleList.append(os.path.normpath(InfName))
- for F in self.Platform.Modules.keys():
- M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile)
- #GuidValue.update(M.Guids)
-
- self.Platform.Modules[F].M = M
-
- for PcdFromModule in M.ModulePcdList + M.LibraryPcdList:
- # make sure that the "VOID*" kind of datum has MaxDatumSize set
- if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
- NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F))
-
- # Check the PCD from Binary INF or Source INF
- if M.IsBinaryModule == True:
- PcdFromModule.IsFromBinaryInf = True
-
- # Check the PCD from DSC or not
- if (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds.keys():
- PcdFromModule.IsFromDsc = True
- else:
- PcdFromModule.IsFromDsc = False
- if PcdFromModule.Type in GenC.gDynamicPcd or PcdFromModule.Type in GenC.gDynamicExPcd:
- if F.Path not in FdfModuleList:
- # If one of the Source built modules listed in the DSC is not listed
- # in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
- # access method (it is only listed in the DEC file that declares the
- # PCD as PcdsDynamic), then build tool will report warning message
- # notify the PI that they are attempting to build a module that must
- # be included in a flash image in order to be functional. These Dynamic
- # PCD will not be added into the Database unless it is used by other
- # modules that are included in the FDF file.
- if PcdFromModule.Type in GenC.gDynamicPcd and \
- PcdFromModule.IsFromBinaryInf == False:
- # Print warning message to let the developer make a determine.
- if PcdFromModule not in PcdNotInDb:
- PcdNotInDb.append(PcdFromModule)
- continue
- # If one of the Source built modules listed in the DSC is not listed in
- # FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
- # access method (it is only listed in the DEC file that declares the
- # PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
- # PCD to the Platform's PCD Database.
- if PcdFromModule.Type in GenC.gDynamicExPcd:
- if PcdFromModule not in PcdNotInDb:
- PcdNotInDb.append(PcdFromModule)
- continue
- #
- # If a dynamic PCD used by a PEM module/PEI module & DXE module,
- # it should be stored in Pcd PEI database, If a dynamic only
- # used by DXE module, it should be stored in DXE PCD database.
- # The default Phase is DXE
- #
- if M.ModuleType in ["PEIM", "PEI_CORE"]:
- PcdFromModule.Phase = "PEI"
- if PcdFromModule not in self._DynaPcdList_:
- self._DynaPcdList_.append(PcdFromModule)
- elif PcdFromModule.Phase == 'PEI':
- # overwrite any the same PCD existing, if Phase is PEI
- Index = self._DynaPcdList_.index(PcdFromModule)
- self._DynaPcdList_[Index] = PcdFromModule
- elif PcdFromModule not in self._NonDynaPcdList_:
- self._NonDynaPcdList_.append(PcdFromModule)
- elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:
- Index = self._NonDynaPcdList_.index(PcdFromModule)
- if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:
- #The PCD from Binary INF will override the same one from source INF
- self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])
- PcdFromModule.Pending = False
- self._NonDynaPcdList_.append (PcdFromModule)
- # Parse the DynamicEx PCD from the AsBuild INF module list of FDF.
- DscModuleList = []
- for ModuleInf in self.Platform.Modules.keys():
- DscModuleList.append (os.path.normpath(ModuleInf.Path))
- # add the PCD from modules that listed in FDF but not in DSC to Database
- for InfName in FdfModuleList:
- if InfName not in DscModuleList:
- InfClass = PathClass(InfName)
- M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
- # If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
- # for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
- # For binary module, if in current arch, we need to list the PCDs into database.
- if not M.IsSupportedArch:
- continue
- # Override the module PCD setting by platform setting
- ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)
- for PcdFromModule in ModulePcdList:
- PcdFromModule.IsFromBinaryInf = True
- PcdFromModule.IsFromDsc = False
- # Only allow the DynamicEx and Patchable PCD in AsBuild INF
- if PcdFromModule.Type not in GenC.gDynamicExPcd and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:
- EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
- File=self.MetaFile,
- ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"
- % (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))
- # make sure that the "VOID*" kind of datum has MaxDatumSize set
- if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize in [None, '']:
- NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))
- if M.ModuleType in ["PEIM", "PEI_CORE"]:
- PcdFromModule.Phase = "PEI"
- if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in GenC.gDynamicExPcd:
- self._DynaPcdList_.append(PcdFromModule)
- elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
- self._NonDynaPcdList_.append(PcdFromModule)
- if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in GenC.gDynamicExPcd:
- # Overwrite the phase of any the same PCD existing, if Phase is PEI.
- # It is to solve the case that a dynamic PCD used by a PEM module/PEI
- # module & DXE module at a same time.
- # Overwrite the type of the PCDs in source INF by the type of AsBuild
- # INF file as DynamicEx.
- Index = self._DynaPcdList_.index(PcdFromModule)
- self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
- self._DynaPcdList_[Index].Type = PcdFromModule.Type
- for PcdFromModule in self._NonDynaPcdList_:
- # If a PCD is not listed in the DSC file, but binary INF files used by
- # this platform all (that use this PCD) list the PCD in a [PatchPcds]
- # section, AND all source INF files used by this platform the build
- # that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
- # section, then the tools must NOT add the PCD to the Platform's PCD
- # Database; the build must assign the access method for this PCD as
- # PcdsPatchableInModule.
- if PcdFromModule not in self._DynaPcdList_:
- continue
- Index = self._DynaPcdList_.index(PcdFromModule)
- if PcdFromModule.IsFromDsc == False and \
- PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \
- PcdFromModule.IsFromBinaryInf == True and \
- self._DynaPcdList_[Index].IsFromBinaryInf == False:
- Index = self._DynaPcdList_.index(PcdFromModule)
- self._DynaPcdList_.remove (self._DynaPcdList_[Index])
-
- # print out error information and break the build, if error found
- if len(NoDatumTypePcdList) > 0:
- NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
- EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
- File=self.MetaFile,
- ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
- % NoDatumTypePcdListString)
- self._NonDynamicPcdList = self._NonDynaPcdList_
- self._DynamicPcdList = self._DynaPcdList_
- #
- # Sort dynamic PCD list to:
- # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
- # try to be put header of dynamicd List
- # 2) If PCD is HII type, the PCD item should be put after unicode type PCD
- #
- # The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
- #
- UnicodePcdArray = []
- HiiPcdArray = []
- OtherPcdArray = []
- VpdPcdDict = {}
- VpdFile = VpdInfoFile.VpdInfoFile()
- NeedProcessVpdMapFile = False
-
- for pcd in self.Platform.Pcds.keys():
- if pcd not in self._PlatformPcds.keys():
- self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]
-
- if (self.Workspace.ArchList[-1] == self.Arch):
- for Pcd in self._DynamicPcdList:
- # just pick the a value to determine whether is unicode string type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
- PcdValue = Sku.DefaultValue
- if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
- UnicodePcdArray.append(Pcd)
- elif len(Sku.VariableName) > 0:
- # if found HII type PCD then insert to right of UnicodeIndex
- HiiPcdArray.append(Pcd)
- else:
- OtherPcdArray.append(Pcd)
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
-
- PlatformPcds = self._PlatformPcds.keys()
- PlatformPcds.sort()
- #
- # Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
- #
- for PcdKey in PlatformPcds:
- Pcd = self._PlatformPcds[PcdKey]
- if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
- PcdKey in VpdPcdDict:
- Pcd = VpdPcdDict[PcdKey]
- for (SkuName,Sku) in Pcd.SkuInfoList.items():
- Sku.VpdOffset = Sku.VpdOffset.strip()
- PcdValue = Sku.DefaultValue
- if PcdValue == "":
- PcdValue = Pcd.DefaultValue
- if Sku.VpdOffset != '*':
- if PcdValue.startswith("{"):
- Alignment = 8
- elif PcdValue.startswith("L"):
- Alignment = 2
- else:
- Alignment = 1
- try:
- VpdOffset = int(Sku.VpdOffset)
- except:
- try:
- VpdOffset = int(Sku.VpdOffset, 16)
- except:
- EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
- if VpdOffset % Alignment != 0:
- if PcdValue.startswith("{"):
- EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)
- else:
- EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
- VpdFile.Add(Pcd, Sku.VpdOffset)
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
- if self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == '':
- EdkLogger.error("Build", FILE_NOT_FOUND, \
- "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
-
-
- #
- # Fix the PCDs define in VPD PCD section that never referenced by module.
- # An example is PCD for signature usage.
- #
- for DscPcd in PlatformPcds:
- DscPcdEntry = self._PlatformPcds[DscPcd]
- if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
- if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
- FoundFlag = False
- for VpdPcd in VpdFile._VpdArray.keys():
- # This PCD has been referenced by module
- if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
- (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
- FoundFlag = True
-
- # Not found, it should be signature
- if not FoundFlag :
- # just pick the a value to determine whether is unicode string type
- for (SkuName,Sku) in DscPcdEntry.SkuInfoList.items():
- Sku.VpdOffset = Sku.VpdOffset.strip()
-
- # Need to iterate DEC pcd information to get the value & datumtype
- for eachDec in self.PackageList:
- for DecPcd in eachDec.Pcds:
- DecPcdEntry = eachDec.Pcds[DecPcd]
- if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
- (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
- # Print warning message to let the developer make a determine.
- EdkLogger.warn("build", "Unreferenced vpd pcd used!",
- File=self.MetaFile, \
- ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
- %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
-
- DscPcdEntry.DatumType = DecPcdEntry.DatumType
- DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
- DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
- DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
- # Only fix the value while no value provided in DSC file.
- if (Sku.DefaultValue == "" or Sku.DefaultValue==None):
- DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]].DefaultValue = DecPcdEntry.DefaultValue
-
- if DscPcdEntry not in self._DynamicPcdList:
- self._DynamicPcdList.append(DscPcdEntry)
-# Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
- Sku.VpdOffset = Sku.VpdOffset.strip()
- PcdValue = Sku.DefaultValue
- if PcdValue == "":
- PcdValue = DscPcdEntry.DefaultValue
- if Sku.VpdOffset != '*':
- if PcdValue.startswith("{"):
- Alignment = 8
- elif PcdValue.startswith("L"):
- Alignment = 2
- else:
- Alignment = 1
- try:
- VpdOffset = int(Sku.VpdOffset)
- except:
- try:
- VpdOffset = int(Sku.VpdOffset, 16)
- except:
- EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))
- if VpdOffset % Alignment != 0:
- if PcdValue.startswith("{"):
- EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)
- else:
- EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
- VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
- if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
- NeedProcessVpdMapFile = True
- if DscPcdEntry.DatumType == 'VOID*' and PcdValue.startswith("L"):
- UnicodePcdArray.append(DscPcdEntry)
- elif len(Sku.VariableName) > 0:
- HiiPcdArray.append(DscPcdEntry)
- else:
- OtherPcdArray.append(DscPcdEntry)
-
- # if the offset of a VPD is *, then it need to be fixed up by third party tool.
-
-
-
- if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
- VpdFile.GetCount() != 0:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
- "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
-
- if VpdFile.GetCount() != 0:
- FvPath = os.path.join(self.BuildDir, "FV")
- if not os.path.exists(FvPath):
- try:
- os.makedirs(FvPath)
- except:
- EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
-
- VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
-
- if VpdFile.Write(VpdFilePath):
- # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
- BPDGToolName = None
- for ToolDef in self.ToolDefinition.values():
- if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
- if not ToolDef.has_key("PATH"):
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
- BPDGToolName = ToolDef["PATH"]
- break
- # Call third party GUID BPDG tool.
- if BPDGToolName != None:
- VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
- else:
- EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
-
- # Process VPD map file generated by third party BPDG tool
- if NeedProcessVpdMapFile:
- VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
- if os.path.exists(VpdMapFilePath):
- VpdFile.Read(VpdMapFilePath)
-
- # Fixup "*" offset
- for Pcd in self._DynamicPcdList:
- # just pick the a value to determine whether is unicode string type
- i = 0
- for (SkuName,Sku) in Pcd.SkuInfoList.items():
- if Sku.VpdOffset == "*":
- Sku.VpdOffset = VpdFile.GetOffset(Pcd)[i].strip()
- i += 1
- else:
- EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
-
- # Delete the DynamicPcdList At the last time enter into this function
- del self._DynamicPcdList[:]
- self._DynamicPcdList.extend(UnicodePcdArray)
- self._DynamicPcdList.extend(HiiPcdArray)
- self._DynamicPcdList.extend(OtherPcdArray)
- self.AllPcdList = self._NonDynamicPcdList + self._DynamicPcdList
-
- ## Return the platform build data object
- def _GetPlatform(self):
- if self._Platform == None:
- self._Platform = self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
- return self._Platform
-
- ## Return platform name
- def _GetName(self):
- return self.Platform.PlatformName
-
- ## Return the meta file GUID
- def _GetGuid(self):
- return self.Platform.Guid
-
- ## Return the platform version
- def _GetVersion(self):
- return self.Platform.Version
-
- ## Return the FDF file name
- def _GetFdfFile(self):
- if self._FdfFile == None:
- if self.Workspace.FdfFile != "":
- self._FdfFile= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
- else:
- self._FdfFile = ''
- return self._FdfFile
-
- ## Return the build output directory platform specifies
- def _GetOutputDir(self):
- return self.Platform.OutputDirectory
-
- ## Return the directory to store all intermediate and final files built
- def _GetBuildDir(self):
- if self._BuildDir == None:
- if os.path.isabs(self.OutputDir):
- self._BuildDir = path.join(
- path.abspath(self.OutputDir),
- self.BuildTarget + "_" + self.ToolChain,
- )
- else:
- self._BuildDir = path.join(
- self.WorkspaceDir,
- self.OutputDir,
- self.BuildTarget + "_" + self.ToolChain,
- )
- return self._BuildDir
-
- ## Return directory of platform makefile
- #
- # @retval string Makefile directory
- #
- def _GetMakeFileDir(self):
- if self._MakeFileDir == None:
- self._MakeFileDir = path.join(self.BuildDir, self.Arch)
- return self._MakeFileDir
-
- ## Return build command string
- #
- # @retval string Build command string
- #
- def _GetBuildCommand(self):
- if self._BuildCommand == None:
- self._BuildCommand = []
- if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
- self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"])
- if "FLAGS" in self.ToolDefinition["MAKE"]:
- NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
- if NewOption != '':
- self._BuildCommand += SplitOption(NewOption)
- return self._BuildCommand
-
- ## Get tool chain definition
- #
- # Get each tool defition for given tool chain from tools_def.txt and platform
- #
- def _GetToolDefinition(self):
- if self._ToolDefinitions == None:
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
- if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
- ExtraData="[%s]" % self.MetaFile)
- self._ToolDefinitions = {}
- DllPathList = set()
- for Def in ToolDefinition:
- Target, Tag, Arch, Tool, Attr = Def.split("_")
- if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
- continue
-
- Value = ToolDefinition[Def]
- # don't record the DLL
- if Attr == "DLL":
- DllPathList.add(Value)
- continue
-
- if Tool not in self._ToolDefinitions:
- self._ToolDefinitions[Tool] = {}
- self._ToolDefinitions[Tool][Attr] = Value
-
- ToolsDef = ''
- MakePath = ''
- if GlobalData.gOptions.SilentMode and "MAKE" in self._ToolDefinitions:
- if "FLAGS" not in self._ToolDefinitions["MAKE"]:
- self._ToolDefinitions["MAKE"]["FLAGS"] = ""
- self._ToolDefinitions["MAKE"]["FLAGS"] += " -s"
- MakeFlags = ''
- for Tool in self._ToolDefinitions:
- for Attr in self._ToolDefinitions[Tool]:
- Value = self._ToolDefinitions[Tool][Attr]
- if Tool in self.BuildOption and Attr in self.BuildOption[Tool]:
- # check if override is indicated
- if self.BuildOption[Tool][Attr].startswith('='):
- Value = self.BuildOption[Tool][Attr][1:]
- else:
- if Attr != 'PATH':
- Value += " " + self.BuildOption[Tool][Attr]
- else:
- Value = self.BuildOption[Tool][Attr]
-
- if Attr == "PATH":
- # Don't put MAKE definition in the file
- if Tool == "MAKE":
- MakePath = Value
- else:
- ToolsDef += "%s = %s\n" % (Tool, Value)
- elif Attr != "DLL":
- # Don't put MAKE definition in the file
- if Tool == "MAKE":
- if Attr == "FLAGS":
- MakeFlags = Value
- else:
- ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
- ToolsDef += "\n"
-
- SaveFileOnChange(self.ToolDefinitionFile, ToolsDef)
- for DllPath in DllPathList:
- os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
- os.environ["MAKE_FLAGS"] = MakeFlags
-
- return self._ToolDefinitions
-
- ## Return the paths of tools
- def _GetToolDefFile(self):
- if self._ToolDefFile == None:
- self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
- return self._ToolDefFile
-
- ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
- def _GetToolChainFamily(self):
- if self._ToolChainFamily == None:
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
- if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
- or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
- or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
- EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
- % self.ToolChain)
- self._ToolChainFamily = "MSFT"
- else:
- self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
- return self._ToolChainFamily
-
- def _GetBuildRuleFamily(self):
- if self._BuildRuleFamily == None:
- ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
- if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
- or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
- or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
- EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
- % self.ToolChain)
- self._BuildRuleFamily = "MSFT"
- else:
- self._BuildRuleFamily = ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
- return self._BuildRuleFamily
-
- ## Return the build options specific for all modules in this platform
- def _GetBuildOptions(self):
- if self._BuildOption == None:
- self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions)
- return self._BuildOption
-
- ## Return the build options specific for EDK modules in this platform
- def _GetEdkBuildOptions(self):
- if self._EdkBuildOption == None:
- self._EdkBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
- return self._EdkBuildOption
-
- ## Return the build options specific for EDKII modules in this platform
- def _GetEdkIIBuildOptions(self):
- if self._EdkIIBuildOption == None:
- self._EdkIIBuildOption = self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
- return self._EdkIIBuildOption
-
- ## Parse build_rule.txt in Conf Directory.
- #
- # @retval BuildRule object
- #
- def _GetBuildRule(self):
- if self._BuildRule == None:
- BuildRuleFile = None
- if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
- BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
- if BuildRuleFile in [None, '']:
- BuildRuleFile = gDefaultBuildRuleFile
- self._BuildRule = BuildRule(BuildRuleFile)
- if self._BuildRule._FileVersion == "":
- self._BuildRule._FileVersion = AutoGenReqBuildRuleVerNum
- else:
- if self._BuildRule._FileVersion < AutoGenReqBuildRuleVerNum :
- # If Build Rule's version is less than the version number required by the tools, halting the build.
- EdkLogger.error("build", AUTOGEN_ERROR,
- ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
- % (self._BuildRule._FileVersion, AutoGenReqBuildRuleVerNum))
-
- return self._BuildRule
-
- ## Summarize the packages used by modules in this platform
- def _GetPackageList(self):
- if self._PackageList == None:
- self._PackageList = set()
- for La in self.LibraryAutoGenList:
- self._PackageList.update(La.DependentPackageList)
- for Ma in self.ModuleAutoGenList:
- self._PackageList.update(Ma.DependentPackageList)
- #Collect package set information from INF of FDF
- PkgSet = set()
- for ModuleFile in self._AsBuildModuleList:
- if ModuleFile in self.Platform.Modules:
- continue
- ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]
- PkgSet.update(ModuleData.Packages)
- self._PackageList = list(self._PackageList) + list (PkgSet)
- return self._PackageList
-
- def _GetNonDynamicPcdDict(self):
- if self._NonDynamicPcdDict:
- return self._NonDynamicPcdDict
- for Pcd in self.NonDynamicPcdList:
- self._NonDynamicPcdDict[(Pcd.TokenCName,Pcd.TokenSpaceGuidCName)] = Pcd
- return self._NonDynamicPcdDict
-
- ## Get list of non-dynamic PCDs
- def _GetNonDynamicPcdList(self):
- if self._NonDynamicPcdList == None:
- self.CollectPlatformDynamicPcds()
- return self._NonDynamicPcdList
-
- ## Get list of dynamic PCDs
- def _GetDynamicPcdList(self):
- if self._DynamicPcdList == None:
- self.CollectPlatformDynamicPcds()
- return self._DynamicPcdList
-
- ## Generate Token Number for all PCD
- def _GetPcdTokenNumbers(self):
- if self._PcdTokenNumber == None:
- self._PcdTokenNumber = sdict()
- TokenNumber = 1
- #
- # Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
- # Such as:
- #
- # Dynamic PCD:
- # TokenNumber 0 ~ 10
- # DynamicEx PCD:
- # TokeNumber 11 ~ 20
- #
- for Pcd in self.DynamicPcdList:
- if Pcd.Phase == "PEI":
- if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
- self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
- TokenNumber += 1
-
- for Pcd in self.DynamicPcdList:
- if Pcd.Phase == "PEI":
- if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
- self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
- TokenNumber += 1
-
- for Pcd in self.DynamicPcdList:
- if Pcd.Phase == "DXE":
- if Pcd.Type in ["Dynamic", "DynamicDefault", "DynamicVpd", "DynamicHii"]:
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
- self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
- TokenNumber += 1
-
- for Pcd in self.DynamicPcdList:
- if Pcd.Phase == "DXE":
- if Pcd.Type in ["DynamicEx", "DynamicExDefault", "DynamicExVpd", "DynamicExHii"]:
- EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
- self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
- TokenNumber += 1
-
- for Pcd in self.NonDynamicPcdList:
- self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
- TokenNumber += 1
- return self._PcdTokenNumber
-
- ## Summarize ModuleAutoGen objects of all modules/libraries to be built for this platform
- def _GetAutoGenObjectList(self):
- self._ModuleAutoGenList = []
- self._LibraryAutoGenList = []
- for ModuleFile in self.Platform.Modules:
- Ma = ModuleAutoGen(
- self.Workspace,
- ModuleFile,
- self.BuildTarget,
- self.ToolChain,
- self.Arch,
- self.MetaFile
- )
- if Ma not in self._ModuleAutoGenList:
- self._ModuleAutoGenList.append(Ma)
- for La in Ma.LibraryAutoGenList:
- if La not in self._LibraryAutoGenList:
- self._LibraryAutoGenList.append(La)
- if Ma not in La._ReferenceModules:
- La._ReferenceModules.append(Ma)
-
- ## Summarize ModuleAutoGen objects of all modules to be built for this platform
- def _GetModuleAutoGenList(self):
- if self._ModuleAutoGenList == None:
- self._GetAutoGenObjectList()
- return self._ModuleAutoGenList
-
- ## Summarize ModuleAutoGen objects of all libraries to be built for this platform
- def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
- self._GetAutoGenObjectList()
- return self._LibraryAutoGenList
-
- ## Test if a module is supported by the platform
- #
- # An error will be raised directly if the module or its arch is not supported
- # by the platform or current configuration
- #
- def ValidModule(self, Module):
- return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
- or Module in self._AsBuildModuleList
-
- ## Resolve the library classes in a module to library instances
- #
- # This method will not only resolve library classes but also sort the library
- # instances according to the dependency-ship.
- #
- # @param Module The module from which the library classes will be resolved
- #
- # @retval library_list List of library instances sorted
- #
- def ApplyLibraryInstance(self, Module):
- # Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly
- if str(Module) not in self.Platform.Modules:
- return []
-
- ModuleType = Module.ModuleType
-
- # for overridding library instances with module specific setting
- PlatformModule = self.Platform.Modules[str(Module)]
-
- # add forced library instances (specified under LibraryClasses sections)
- #
- # If a module has a MODULE_TYPE of USER_DEFINED,
- # do not link in NULL library class instances from the global [LibraryClasses.*] sections.
- #
- if Module.ModuleType != SUP_MODULE_USER_DEFINED:
- for LibraryClass in self.Platform.LibraryClasses.GetKeys():
- if LibraryClass.startswith("NULL") and self.Platform.LibraryClasses[LibraryClass, Module.ModuleType]:
- Module.LibraryClasses[LibraryClass] = self.Platform.LibraryClasses[LibraryClass, Module.ModuleType]
-
- # add forced library instances (specified in module overrides)
- for LibraryClass in PlatformModule.LibraryClasses:
- if LibraryClass.startswith("NULL"):
- Module.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
-
- # EdkII module
- LibraryConsumerList = [Module]
- Constructor = []
- ConsumedByList = sdict()
- LibraryInstance = sdict()
-
- EdkLogger.verbose("")
- EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))
- while len(LibraryConsumerList) > 0:
- M = LibraryConsumerList.pop()
- for LibraryClassName in M.LibraryClasses:
- if LibraryClassName not in LibraryInstance:
- # override library instance for this module
- if LibraryClassName in PlatformModule.LibraryClasses:
- LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
- else:
- LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
- LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
- EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
- "Instance of library class [%s] is not found" % LibraryClassName,
- File=self.MetaFile,
- ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), self.Arch, str(Module)))
-
- LibraryModule = self.BuildDatabase[LibraryPath, self.Arch, self.BuildTarget, self.ToolChain]
- # for those forced library instance (NULL library), add a fake library class
- if LibraryClassName.startswith("NULL"):
- LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
- or len(LibraryModule.LibraryClass) == 0 \
- or (ModuleType != 'USER_DEFINED'
- and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
- # only USER_DEFINED can link against any library instance despite of its SupModList
- EdkLogger.error("build", OPTION_MISSING,
- "Module type [%s] is not supported by library instance [%s]" \
- % (ModuleType, LibraryPath), File=self.MetaFile,
- ExtraData="consumed by [%s]" % str(Module))
-
- LibraryInstance[LibraryClassName] = LibraryModule
- LibraryConsumerList.append(LibraryModule)
- EdkLogger.verbose("\t" + str(LibraryClassName) + " : " + str(LibraryModule))
- else:
- LibraryModule = LibraryInstance[LibraryClassName]
-
- if LibraryModule == None:
- continue
-
- if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
- Constructor.append(LibraryModule)
-
- if LibraryModule not in ConsumedByList:
- ConsumedByList[LibraryModule] = []
- # don't add current module itself to consumer list
- if M != Module:
- if M in ConsumedByList[LibraryModule]:
- continue
- ConsumedByList[LibraryModule].append(M)
- #
- # Initialize the sorted output list to the empty set
- #
- SortedLibraryList = []
- #
- # Q <- Set of all nodes with no incoming edges
- #
- LibraryList = [] #LibraryInstance.values()
- Q = []
- for LibraryClassName in LibraryInstance:
- M = LibraryInstance[LibraryClassName]
- LibraryList.append(M)
- if ConsumedByList[M] == []:
- Q.append(M)
-
- #
- # start the DAG algorithm
- #
- while True:
- EdgeRemoved = True
- while Q == [] and EdgeRemoved:
- EdgeRemoved = False
- # for each node Item with a Constructor
- for Item in LibraryList:
- if Item not in Constructor:
- continue
- # for each Node without a constructor with an edge e from Item to Node
- for Node in ConsumedByList[Item]:
- if Node in Constructor:
- continue
- # remove edge e from the graph if Node has no constructor
- ConsumedByList[Item].remove(Node)
- EdgeRemoved = True
- if ConsumedByList[Item] == []:
- # insert Item into Q
- Q.insert(0, Item)
- break
- if Q != []:
- break
- # DAG is done if there's no more incoming edge for all nodes
- if Q == []:
- break
-
- # remove node from Q
- Node = Q.pop()
- # output Node
- SortedLibraryList.append(Node)
-
- # for each node Item with an edge e from Node to Item do
- for Item in LibraryList:
- if Node not in ConsumedByList[Item]:
- continue
- # remove edge e from the graph
- ConsumedByList[Item].remove(Node)
-
- if ConsumedByList[Item] != []:
- continue
- # insert Item into Q, if Item has no other incoming edges
- Q.insert(0, Item)
-
- #
- # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
- #
- for Item in LibraryList:
- if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
- ErrorMessage = "\tconsumed by " + "\n\tconsumed by ".join([str(L) for L in ConsumedByList[Item]])
- EdkLogger.error("build", BUILD_ERROR, 'Library [%s] with constructors has a cycle' % str(Item),
- ExtraData=ErrorMessage, File=self.MetaFile)
- if Item not in SortedLibraryList:
- SortedLibraryList.append(Item)
-
- #
- # Build the list of constructor and destructir names
- # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
- #
- SortedLibraryList.reverse()
- return SortedLibraryList
-
-
- ## Override PCD setting (type, value, ...)
- #
- # @param ToPcd The PCD to be overrided
- # @param FromPcd The PCD overrideing from
- #
- def _OverridePcd(self, ToPcd, FromPcd, Module=""):
- #
- # in case there's PCDs coming from FDF file, which have no type given.
- # at this point, ToPcd.Type has the type found from dependent
- # package
- #
- TokenCName = ToPcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- if FromPcd != None:
- if GlobalData.BuildOptionPcd:
- for pcd in GlobalData.BuildOptionPcd:
- if (FromPcd.TokenSpaceGuidCName, FromPcd.TokenCName) == (pcd[0], pcd[1]):
- FromPcd.DefaultValue = pcd[2]
- break
- if ToPcd.Pending and FromPcd.Type not in [None, '']:
- ToPcd.Type = FromPcd.Type
- elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
- and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
- if ToPcd.Type.strip() == "DynamicEx":
- ToPcd.Type = FromPcd.Type
- elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
- and ToPcd.Type != FromPcd.Type:
- EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
- ExtraData="%s.%s is defined as [%s] in module %s, but as [%s] in platform."\
- % (ToPcd.TokenSpaceGuidCName, TokenCName,
- ToPcd.Type, Module, FromPcd.Type),
- File=self.MetaFile)
-
- if FromPcd.MaxDatumSize not in [None, '']:
- ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
- if FromPcd.DefaultValue not in [None, '']:
- ToPcd.DefaultValue = FromPcd.DefaultValue
- if FromPcd.TokenValue not in [None, '']:
- ToPcd.TokenValue = FromPcd.TokenValue
- if FromPcd.MaxDatumSize not in [None, '']:
- ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
- if FromPcd.DatumType not in [None, '']:
- ToPcd.DatumType = FromPcd.DatumType
- if FromPcd.SkuInfoList not in [None, '', []]:
- ToPcd.SkuInfoList = FromPcd.SkuInfoList
-
- # check the validation of datum
- IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
- if not IsValid:
- EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
- ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
- ToPcd.validateranges = FromPcd.validateranges
- ToPcd.validlists = FromPcd.validlists
- ToPcd.expressions = FromPcd.expressions
-
- if ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]:
- EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
- % (ToPcd.TokenSpaceGuidCName, TokenCName))
- Value = ToPcd.DefaultValue
- if Value in [None, '']:
- ToPcd.MaxDatumSize = '1'
- elif Value[0] == 'L':
- ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
- elif Value[0] == '{':
- ToPcd.MaxDatumSize = str(len(Value.split(',')))
- else:
- ToPcd.MaxDatumSize = str(len(Value) - 1)
-
- # apply default SKU for dynamic PCDS if specified one is not available
- if (ToPcd.Type in PCD_DYNAMIC_TYPE_LIST or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_LIST) \
- and ToPcd.SkuInfoList in [None, {}, '']:
- if self.Platform.SkuName in self.Platform.SkuIds:
- SkuName = self.Platform.SkuName
- else:
- SkuName = 'DEFAULT'
- ToPcd.SkuInfoList = {
- SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName], '', '', '', '', '', ToPcd.DefaultValue)
- }
-
- ## Apply PCD setting defined platform to a module
- #
- # @param Module The module from which the PCD setting will be overrided
- #
- # @retval PCD_list The list PCDs with settings from platform
- #
- def ApplyPcdSetting(self, Module, Pcds):
- # for each PCD in module
- for Name, Guid in Pcds:
- PcdInModule = Pcds[Name, Guid]
- # find out the PCD setting in platform
- if (Name, Guid) in self.Platform.Pcds:
- PcdInPlatform = self.Platform.Pcds[Name, Guid]
- else:
- PcdInPlatform = None
- # then override the settings if any
- self._OverridePcd(PcdInModule, PcdInPlatform, Module)
- # resolve the VariableGuid value
- for SkuId in PcdInModule.SkuInfoList:
- Sku = PcdInModule.SkuInfoList[SkuId]
- if Sku.VariableGuid == '': continue
- Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
- if Sku.VariableGuidValue == None:
- PackageList = "\n\t".join([str(P) for P in self.PackageList])
- EdkLogger.error(
- 'build',
- RESOURCE_NOT_AVAILABLE,
- "Value of GUID [%s] is not found in" % Sku.VariableGuid,
- ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
- % (Guid, Name, str(Module)),
- File=self.MetaFile
- )
-
- # override PCD settings with module specific setting
- if Module in self.Platform.Modules:
- PlatformModule = self.Platform.Modules[str(Module)]
- for Key in PlatformModule.Pcds:
- if Key in Pcds:
- self._OverridePcd(Pcds[Key], PlatformModule.Pcds[Key], Module)
- return Pcds.values()
-
- ## Resolve library names to library modules
- #
- # (for Edk.x modules)
- #
- # @param Module The module from which the library names will be resolved
- #
- # @retval library_list The list of library modules
- #
- def ResolveLibraryReference(self, Module):
- EdkLogger.verbose("")
- EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch))
- LibraryConsumerList = [Module]
-
- # "CompilerStub" is a must for Edk modules
- if Module.Libraries:
- Module.Libraries.append("CompilerStub")
- LibraryList = []
- while len(LibraryConsumerList) > 0:
- M = LibraryConsumerList.pop()
- for LibraryName in M.Libraries:
- Library = self.Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
- for Key in self.Platform.LibraryClasses.data.keys():
- if LibraryName.upper() == Key.upper():
- Library = self.Platform.LibraryClasses[Key, ':dummy:']
- break
- if Library == None:
- EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M),
- ExtraData="\t%s [%s]" % (str(Module), self.Arch))
- continue
-
- if Library not in LibraryList:
- LibraryList.append(Library)
- LibraryConsumerList.append(Library)
- EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library)))
- return LibraryList
-
- ## Calculate the priority value of the build option
- #
- # @param Key Build option definition contain: TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
- #
- # @retval Value Priority value based on the priority list.
- #
- def CalculatePriorityValue(self, Key):
- Target, ToolChain, Arch, CommandType, Attr = Key.split('_')
- PriorityValue = 0x11111
- if Target == "*":
- PriorityValue &= 0x01111
- if ToolChain == "*":
- PriorityValue &= 0x10111
- if Arch == "*":
- PriorityValue &= 0x11011
- if CommandType == "*":
- PriorityValue &= 0x11101
- if Attr == "*":
- PriorityValue &= 0x11110
-
- return self.PrioList["0x%0.5x" % PriorityValue]
-
-
- ## Expand * in build option key
- #
- # @param Options Options to be expanded
- #
- # @retval options Options expanded
- #
- def _ExpandBuildOption(self, Options, ModuleStyle=None):
- BuildOptions = {}
- FamilyMatch = False
- FamilyIsNull = True
-
- OverrideList = {}
- #
- # Construct a list contain the build options which need override.
- #
- for Key in Options:
- #
- # Key[0] -- tool family
- # Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
- #
- if (Key[0] == self.BuildRuleFamily and
- (ModuleStyle == None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
- Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
- if Target == self.BuildTarget or Target == "*":
- if ToolChain == self.ToolChain or ToolChain == "*":
- if Arch == self.Arch or Arch == "*":
- if Options[Key].startswith("="):
- if OverrideList.get(Key[1]) != None:
- OverrideList.pop(Key[1])
- OverrideList[Key[1]] = Options[Key]
-
- #
- # Use the highest priority value.
- #
- if (len(OverrideList) >= 2):
- KeyList = OverrideList.keys()
- for Index in range(len(KeyList)):
- NowKey = KeyList[Index]
- Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
- for Index1 in range(len(KeyList) - Index - 1):
- NextKey = KeyList[Index1 + Index + 1]
- #
- # Compare two Key, if one is included by another, choose the higher priority one
- #
- Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
- if Target1 == Target2 or Target1 == "*" or Target2 == "*":
- if ToolChain1 == ToolChain2 or ToolChain1 == "*" or ToolChain2 == "*":
- if Arch1 == Arch2 or Arch1 == "*" or Arch2 == "*":
- if CommandType1 == CommandType2 or CommandType1 == "*" or CommandType2 == "*":
- if Attr1 == Attr2 or Attr1 == "*" or Attr2 == "*":
- if self.CalculatePriorityValue(NowKey) > self.CalculatePriorityValue(NextKey):
- if Options.get((self.BuildRuleFamily, NextKey)) != None:
- Options.pop((self.BuildRuleFamily, NextKey))
- else:
- if Options.get((self.BuildRuleFamily, NowKey)) != None:
- Options.pop((self.BuildRuleFamily, NowKey))
-
- for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
- # Check Module style is EDK or EDKII.
- # Only append build option for the matched style module.
- if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
- continue
- elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
- continue
- Family = Key[0]
- Target, Tag, Arch, Tool, Attr = Key[1].split("_")
- # if tool chain family doesn't match, skip it
- if Tool in self.ToolDefinition and Family != "":
- FamilyIsNull = False
- if self.ToolDefinition[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":
- if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
- continue
- elif Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
- continue
- FamilyMatch = True
- # expand any wildcard
- if Target == "*" or Target == self.BuildTarget:
- if Tag == "*" or Tag == self.ToolChain:
- if Arch == "*" or Arch == self.Arch:
- if Tool not in BuildOptions:
- BuildOptions[Tool] = {}
- if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
- BuildOptions[Tool][Attr] = Options[Key]
- else:
- # append options for the same tool except PATH
- if Attr != 'PATH':
- BuildOptions[Tool][Attr] += " " + Options[Key]
- else:
- BuildOptions[Tool][Attr] = Options[Key]
- # Build Option Family has been checked, which need't to be checked again for family.
- if FamilyMatch or FamilyIsNull:
- return BuildOptions
-
- for Key in Options:
- if ModuleStyle != None and len (Key) > 2:
- # Check Module style is EDK or EDKII.
- # Only append build option for the matched style module.
- if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
- continue
- elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
- continue
- Family = Key[0]
- Target, Tag, Arch, Tool, Attr = Key[1].split("_")
- # if tool chain family doesn't match, skip it
- if Tool not in self.ToolDefinition or Family == "":
- continue
- # option has been added before
- if Family != self.ToolDefinition[Tool][TAB_TOD_DEFINES_FAMILY]:
- continue
-
- # expand any wildcard
- if Target == "*" or Target == self.BuildTarget:
- if Tag == "*" or Tag == self.ToolChain:
- if Arch == "*" or Arch == self.Arch:
- if Tool not in BuildOptions:
- BuildOptions[Tool] = {}
- if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
- BuildOptions[Tool][Attr] = Options[Key]
- else:
- # append options for the same tool except PATH
- if Attr != 'PATH':
- BuildOptions[Tool][Attr] += " " + Options[Key]
- else:
- BuildOptions[Tool][Attr] = Options[Key]
- return BuildOptions
-
- ## Append build options in platform to a module
- #
- # @param Module The module to which the build options will be appened
- #
- # @retval options The options appended with build options in platform
- #
- def ApplyBuildOption(self, Module):
- # Get the different options for the different style module
- if Module.AutoGenVersion < 0x00010005:
- PlatformOptions = self.EdkBuildOption
- ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDK_NAME, Module.ModuleType)
- else:
- PlatformOptions = self.EdkIIBuildOption
- ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
- ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
- ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
- if Module in self.Platform.Modules:
- PlatformModule = self.Platform.Modules[str(Module)]
- PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
- else:
- PlatformModuleOptions = {}
-
- BuildRuleOrder = None
- for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
- for Tool in Options:
- for Attr in Options[Tool]:
- if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
- BuildRuleOrder = Options[Tool][Attr]
-
- AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() +
- PlatformModuleOptions.keys() + ModuleTypeOptions.keys() +
- self.ToolDefinition.keys())
- BuildOptions = {}
- for Tool in AllTools:
- if Tool not in BuildOptions:
- BuildOptions[Tool] = {}
-
- for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
- if Tool not in Options:
- continue
- for Attr in Options[Tool]:
- Value = Options[Tool][Attr]
- #
- # Do not generate it in Makefile
- #
- if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
- continue
- if Attr not in BuildOptions[Tool]:
- BuildOptions[Tool][Attr] = ""
- # check if override is indicated
- if Value.startswith('='):
- ToolPath = Value[1:]
- ToolPath = mws.handleWsMacro(ToolPath)
- BuildOptions[Tool][Attr] = ToolPath
- else:
- Value = mws.handleWsMacro(Value)
- if Attr != 'PATH':
- BuildOptions[Tool][Attr] += " " + Value
- else:
- BuildOptions[Tool][Attr] = Value
- if Module.AutoGenVersion < 0x00010005 and self.Workspace.UniFlag != None:
- #
- # Override UNI flag only for EDK module.
- #
- if 'BUILD' not in BuildOptions:
- BuildOptions['BUILD'] = {}
- BuildOptions['BUILD']['FLAGS'] = self.Workspace.UniFlag
- return BuildOptions, BuildRuleOrder
-
- Platform = property(_GetPlatform)
- Name = property(_GetName)
- Guid = property(_GetGuid)
- Version = property(_GetVersion)
-
- OutputDir = property(_GetOutputDir)
- BuildDir = property(_GetBuildDir)
- MakeFileDir = property(_GetMakeFileDir)
- FdfFile = property(_GetFdfFile)
-
- PcdTokenNumber = property(_GetPcdTokenNumbers) # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber
- DynamicPcdList = property(_GetDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
- NonDynamicPcdList = property(_GetNonDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
- NonDynamicPcdDict = property(_GetNonDynamicPcdDict)
- PackageList = property(_GetPackageList)
-
- ToolDefinition = property(_GetToolDefinition) # toolcode : tool path
- ToolDefinitionFile = property(_GetToolDefFile) # toolcode : lib path
- ToolChainFamily = property(_GetToolChainFamily)
- BuildRuleFamily = property(_GetBuildRuleFamily)
- BuildOption = property(_GetBuildOptions) # toolcode : option
- EdkBuildOption = property(_GetEdkBuildOptions) # edktoolcode : option
- EdkIIBuildOption = property(_GetEdkIIBuildOptions) # edkiitoolcode : option
-
- BuildCommand = property(_GetBuildCommand)
- BuildRule = property(_GetBuildRule)
- ModuleAutoGenList = property(_GetModuleAutoGenList)
- LibraryAutoGenList = property(_GetLibraryAutoGenList)
- GenFdsCommand = property(_GenFdsCommand)
-
-## ModuleAutoGen class
-#
-# This class encapsules the AutoGen behaviors for the build tools. In addition to
-# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according
-# to the [depex] section in module's inf file.
-#
-class ModuleAutoGen(AutoGen):
- ## Cache the timestamps of metafiles of every module in a class variable
- #
- TimeDict = {}
-
- ## The real constructor of ModuleAutoGen
- #
- # This method is not supposed to be called by users of ModuleAutoGen. It's
- # only used by factory method __new__() to do real initialization work for an
- # object of ModuleAutoGen
- #
- # @param Workspace EdkIIWorkspaceBuild object
- # @param ModuleFile The path of module file
- # @param Target Build target (DEBUG, RELEASE)
- # @param Toolchain Name of tool chain
- # @param Arch The arch the module supports
- # @param PlatformFile Platform meta-file
- #
- def _Init(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile):
- EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch))
- GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target)
-
- self.Workspace = Workspace
- self.WorkspaceDir = Workspace.WorkspaceDir
-
- self.MetaFile = ModuleFile
- self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch)
- # check if this module is employed by active platform
- if not self.PlatformInfo.ValidModule(self.MetaFile):
- EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \
- % (self.MetaFile, Arch))
- return False
-
- self.SourceDir = self.MetaFile.SubDir
- self.SourceDir = mws.relpath(self.SourceDir, self.WorkspaceDir)
-
- self.SourceOverrideDir = None
- # use overrided path defined in DSC file
- if self.MetaFile.Key in GlobalData.gOverrideDir:
- self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key]
-
- self.ToolChain = Toolchain
- self.BuildTarget = Target
- self.Arch = Arch
- self.ToolChainFamily = self.PlatformInfo.ToolChainFamily
- self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily
-
- self.IsMakeFileCreated = False
- self.IsCodeFileCreated = False
- self.IsAsBuiltInfCreated = False
- self.DepexGenerated = False
-
- self.BuildDatabase = self.Workspace.BuildDatabase
- self.BuildRuleOrder = None
-
- self._Module = None
- self._Name = None
- self._Guid = None
- self._Version = None
- self._ModuleType = None
- self._ComponentType = None
- self._PcdIsDriver = None
- self._AutoGenVersion = None
- self._LibraryFlag = None
- self._CustomMakefile = None
- self._Macro = None
-
- self._BuildDir = None
- self._OutputDir = None
- self._DebugDir = None
- self._MakeFileDir = None
-
- self._IncludePathList = None
- self._IncludePathLength = 0
- self._AutoGenFileList = None
- self._UnicodeFileList = None
- self._VfrFileList = None
- self._IdfFileList = None
- self._SourceFileList = None
- self._ObjectFileList = None
- self._BinaryFileList = None
-
- self._DependentPackageList = None
- self._DependentLibraryList = None
- self._LibraryAutoGenList = None
- self._DerivedPackageList = None
- self._ModulePcdList = None
- self._LibraryPcdList = None
- self._PcdComments = sdict()
- self._GuidList = None
- self._GuidsUsedByPcd = None
- self._GuidComments = sdict()
- self._ProtocolList = None
- self._ProtocolComments = sdict()
- self._PpiList = None
- self._PpiComments = sdict()
- self._DepexList = None
- self._DepexExpressionList = None
- self._BuildOption = None
- self._BuildOptionIncPathList = None
- self._BuildTargets = None
- self._IntroBuildTargetList = None
- self._FinalBuildTargetList = None
- self._FileTypes = None
- self._BuildRules = None
-
- self._TimeStampPath = None
-
- self.AutoGenDepSet = set()
-
-
- ## The Modules referenced to this Library
- # Only Library has this attribute
- self._ReferenceModules = []
-
- ## Store the FixedAtBuild Pcds
- #
- self._FixedAtBuildPcds = []
- self.ConstPcd = {}
- return True
-
- def __repr__(self):
- return "%s [%s]" % (self.MetaFile, self.Arch)
-
- # Get FixedAtBuild Pcds of this Module
- def _GetFixedAtBuildPcds(self):
- if self._FixedAtBuildPcds:
- return self._FixedAtBuildPcds
- for Pcd in self.ModulePcdList:
- if Pcd.Type != "FixedAtBuild":
- continue
- if Pcd not in self._FixedAtBuildPcds:
- self._FixedAtBuildPcds.append(Pcd)
-
- return self._FixedAtBuildPcds
-
- def _GetUniqueBaseName(self):
- BaseName = self.Name
- for Module in self.PlatformInfo.ModuleAutoGenList:
- if Module.MetaFile == self.MetaFile:
- continue
- if Module.Name == self.Name:
- if uuid.UUID(Module.Guid) == uuid.UUID(self.Guid):
- EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'
- ' %s\n %s' % (Module.MetaFile, self.MetaFile))
- BaseName = '%s_%s' % (self.Name, self.Guid)
- return BaseName
-
- # Macros could be used in build_rule.txt (also Makefile)
- def _GetMacros(self):
- if self._Macro == None:
- self._Macro = sdict()
- self._Macro["WORKSPACE" ] = self.WorkspaceDir
- self._Macro["MODULE_NAME" ] = self.Name
- self._Macro["MODULE_NAME_GUID" ] = self._GetUniqueBaseName()
- self._Macro["MODULE_GUID" ] = self.Guid
- self._Macro["MODULE_VERSION" ] = self.Version
- self._Macro["MODULE_TYPE" ] = self.ModuleType
- self._Macro["MODULE_FILE" ] = str(self.MetaFile)
- self._Macro["MODULE_FILE_BASE_NAME" ] = self.MetaFile.BaseName
- self._Macro["MODULE_RELATIVE_DIR" ] = self.SourceDir
- self._Macro["MODULE_DIR" ] = self.SourceDir
-
- self._Macro["BASE_NAME" ] = self.Name
-
- self._Macro["ARCH" ] = self.Arch
- self._Macro["TOOLCHAIN" ] = self.ToolChain
- self._Macro["TOOLCHAIN_TAG" ] = self.ToolChain
- self._Macro["TOOL_CHAIN_TAG" ] = self.ToolChain
- self._Macro["TARGET" ] = self.BuildTarget
-
- self._Macro["BUILD_DIR" ] = self.PlatformInfo.BuildDir
- self._Macro["BIN_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch)
- self._Macro["LIB_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch)
- self._Macro["MODULE_BUILD_DIR" ] = self.BuildDir
- self._Macro["OUTPUT_DIR" ] = self.OutputDir
- self._Macro["DEBUG_DIR" ] = self.DebugDir
- self._Macro["DEST_DIR_OUTPUT" ] = self.OutputDir
- self._Macro["DEST_DIR_DEBUG" ] = self.DebugDir
- self._Macro["PLATFORM_NAME" ] = self.PlatformInfo.Name
- self._Macro["PLATFORM_GUID" ] = self.PlatformInfo.Guid
- self._Macro["PLATFORM_VERSION" ] = self.PlatformInfo.Version
- self._Macro["PLATFORM_RELATIVE_DIR" ] = self.PlatformInfo.SourceDir
- self._Macro["PLATFORM_DIR" ] = mws.join(self.WorkspaceDir, self.PlatformInfo.SourceDir)
- self._Macro["PLATFORM_OUTPUT_DIR" ] = self.PlatformInfo.OutputDir
- return self._Macro
-
- ## Return the module build data object
- def _GetModule(self):
- if self._Module == None:
- self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
- return self._Module
-
- ## Return the module name
- def _GetBaseName(self):
- return self.Module.BaseName
-
- ## Return the module DxsFile if exist
- def _GetDxsFile(self):
- return self.Module.DxsFile
-
- ## Return the module SourceOverridePath
- def _GetSourceOverridePath(self):
- return self.Module.SourceOverridePath
-
- ## Return the module meta-file GUID
- def _GetGuid(self):
- #
- # To build same module more than once, the module path with FILE_GUID overridden has
- # the file name FILE_GUIDmodule.inf, but the relative path (self.MetaFile.File) is the realy path
- # in DSC. The overridden GUID can be retrieved from file name
- #
- if os.path.basename(self.MetaFile.File) != os.path.basename(self.MetaFile.Path):
- #
- # Length of GUID is 36
- #
- return os.path.basename(self.MetaFile.Path)[:36]
- return self.Module.Guid
-
- ## Return the module version
- def _GetVersion(self):
- return self.Module.Version
-
- ## Return the module type
- def _GetModuleType(self):
- return self.Module.ModuleType
-
- ## Return the component type (for Edk.x style of module)
- def _GetComponentType(self):
- return self.Module.ComponentType
-
- ## Return the build type
- def _GetBuildType(self):
- return self.Module.BuildType
-
- ## Return the PCD_IS_DRIVER setting
- def _GetPcdIsDriver(self):
- return self.Module.PcdIsDriver
-
- ## Return the autogen version, i.e. module meta-file version
- def _GetAutoGenVersion(self):
- return self.Module.AutoGenVersion
-
- ## Check if the module is library or not
- def _IsLibrary(self):
- if self._LibraryFlag == None:
- if self.Module.LibraryClass != None and self.Module.LibraryClass != []:
- self._LibraryFlag = True
- else:
- self._LibraryFlag = False
- return self._LibraryFlag
-
- ## Check if the module is binary module or not
- def _IsBinaryModule(self):
- return self.Module.IsBinaryModule
-
- ## Return the directory to store intermediate files of the module
- def _GetBuildDir(self):
- if self._BuildDir == None:
- self._BuildDir = path.join(
- self.PlatformInfo.BuildDir,
- self.Arch,
- self.SourceDir,
- self.MetaFile.BaseName
- )
- CreateDirectory(self._BuildDir)
- return self._BuildDir
-
- ## Return the directory to store the intermediate object files of the mdoule
- def _GetOutputDir(self):
- if self._OutputDir == None:
- self._OutputDir = path.join(self.BuildDir, "OUTPUT")
- CreateDirectory(self._OutputDir)
- return self._OutputDir
-
- ## Return the directory to store auto-gened source files of the mdoule
- def _GetDebugDir(self):
- if self._DebugDir == None:
- self._DebugDir = path.join(self.BuildDir, "DEBUG")
- CreateDirectory(self._DebugDir)
- return self._DebugDir
-
- ## Return the path of custom file
- def _GetCustomMakefile(self):
- if self._CustomMakefile == None:
- self._CustomMakefile = {}
- for Type in self.Module.CustomMakefile:
- if Type in gMakeTypeMap:
- MakeType = gMakeTypeMap[Type]
- else:
- MakeType = 'nmake'
- if self.SourceOverrideDir != None:
- File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type])
- if not os.path.exists(File):
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
- else:
- File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type])
- self._CustomMakefile[MakeType] = File
- return self._CustomMakefile
-
- ## Return the directory of the makefile
- #
- # @retval string The directory string of module's makefile
- #
- def _GetMakeFileDir(self):
- return self.BuildDir
-
- ## Return build command string
- #
- # @retval string Build command string
- #
- def _GetBuildCommand(self):
- return self.PlatformInfo.BuildCommand
-
- ## Get object list of all packages the module and its dependent libraries belong to
- #
- # @retval list The list of package object
- #
- def _GetDerivedPackageList(self):
- PackageList = []
- for M in [self.Module] + self.DependentLibraryList:
- for Package in M.Packages:
- if Package in PackageList:
- continue
- PackageList.append(Package)
- return PackageList
-
- ## Get the depex string
- #
- # @return : a string contain all depex expresion.
- def _GetDepexExpresionString(self):
- DepexStr = ''
- DepexList = []
- ## DPX_SOURCE IN Define section.
- if self.Module.DxsFile:
- return DepexStr
- for M in [self.Module] + self.DependentLibraryList:
- Filename = M.MetaFile.Path
- InfObj = InfSectionParser.InfSectionParser(Filename)
- DepexExpresionList = InfObj.GetDepexExpresionList()
- for DepexExpresion in DepexExpresionList:
- for key in DepexExpresion.keys():
- Arch, ModuleType = key
- DepexExpr = [x for x in DepexExpresion[key] if not str(x).startswith('#')]
- # the type of build module is USER_DEFINED.
- # All different DEPEX section tags would be copied into the As Built INF file
- # and there would be separate DEPEX section tags
- if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
- if (Arch.upper() == self.Arch.upper()) and (ModuleType.upper() != TAB_ARCH_COMMON):
- DepexList.append({(Arch, ModuleType): DepexExpr})
- else:
- if Arch.upper() == TAB_ARCH_COMMON or \
- (Arch.upper() == self.Arch.upper() and \
- ModuleType.upper() in [TAB_ARCH_COMMON, self.ModuleType.upper()]):
- DepexList.append({(Arch, ModuleType): DepexExpr})
-
- #the type of build module is USER_DEFINED.
- if self.ModuleType.upper() == SUP_MODULE_USER_DEFINED:
- for Depex in DepexList:
- for key in Depex.keys():
- DepexStr += '[Depex.%s.%s]\n' % key
- DepexStr += '\n'.join(['# '+ val for val in Depex[key]])
- DepexStr += '\n\n'
- if not DepexStr:
- return '[Depex.%s]\n' % self.Arch
- return DepexStr
-
- #the type of build module not is USER_DEFINED.
- Count = 0
- for Depex in DepexList:
- Count += 1
- if DepexStr != '':
- DepexStr += ' AND '
- DepexStr += '('
- for D in Depex.values():
- DepexStr += ' '.join([val for val in D])
- Index = DepexStr.find('END')
- if Index > -1 and Index == len(DepexStr) - 3:
- DepexStr = DepexStr[:-3]
- DepexStr = DepexStr.strip()
- DepexStr += ')'
- if Count == 1:
- DepexStr = DepexStr.lstrip('(').rstrip(')').strip()
- if not DepexStr:
- return '[Depex.%s]\n' % self.Arch
- return '[Depex.%s]\n# ' % self.Arch + DepexStr
-
- ## Merge dependency expression
- #
- # @retval list The token list of the dependency expression after parsed
- #
- def _GetDepexTokenList(self):
- if self._DepexList == None:
- self._DepexList = {}
- if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
- return self._DepexList
-
- self._DepexList[self.ModuleType] = []
-
- for ModuleType in self._DepexList:
- DepexList = self._DepexList[ModuleType]
- #
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
- #
- for M in [self.Module] + self.DependentLibraryList:
- Inherited = False
- for D in M.Depex[self.Arch, ModuleType]:
- if DepexList != []:
- DepexList.append('AND')
- DepexList.append('(')
- DepexList.extend(D)
- if DepexList[-1] == 'END': # no need of a END at this time
- DepexList.pop()
- DepexList.append(')')
- Inherited = True
- if Inherited:
- EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList))
- if 'BEFORE' in DepexList or 'AFTER' in DepexList:
- break
- if len(DepexList) > 0:
- EdkLogger.verbose('')
- return self._DepexList
-
- ## Merge dependency expression
- #
- # @retval list The token list of the dependency expression after parsed
- #
- def _GetDepexExpressionTokenList(self):
- if self._DepexExpressionList == None:
- self._DepexExpressionList = {}
- if self.DxsFile or self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes:
- return self._DepexExpressionList
-
- self._DepexExpressionList[self.ModuleType] = ''
-
- for ModuleType in self._DepexExpressionList:
- DepexExpressionList = self._DepexExpressionList[ModuleType]
- #
- # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion
- #
- for M in [self.Module] + self.DependentLibraryList:
- Inherited = False
- for D in M.DepexExpression[self.Arch, ModuleType]:
- if DepexExpressionList != '':
- DepexExpressionList += ' AND '
- DepexExpressionList += '('
- DepexExpressionList += D
- DepexExpressionList = DepexExpressionList.rstrip('END').strip()
- DepexExpressionList += ')'
- Inherited = True
- if Inherited:
- EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexExpressionList))
- if 'BEFORE' in DepexExpressionList or 'AFTER' in DepexExpressionList:
- break
- if len(DepexExpressionList) > 0:
- EdkLogger.verbose('')
- self._DepexExpressionList[ModuleType] = DepexExpressionList
- return self._DepexExpressionList
-
- ## Return the list of specification version required for the module
- #
- # @retval list The list of specification defined in module file
- #
- def _GetSpecification(self):
- return self.Module.Specification
-
- ## Tool option for the module build
- #
- # @param PlatformInfo The object of PlatformBuildInfo
- # @retval dict The dict containing valid options
- #
- def _GetModuleBuildOption(self):
- if self._BuildOption == None:
- self._BuildOption, self.BuildRuleOrder = self.PlatformInfo.ApplyBuildOption(self.Module)
- if self.BuildRuleOrder:
- self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
- return self._BuildOption
-
- ## Get include path list from tool option for the module build
- #
- # @retval list The include path list
- #
- def _GetBuildOptionIncPathList(self):
- if self._BuildOptionIncPathList == None:
- #
- # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC/RVCT
- # is the former use /I , the Latter used -I to specify include directories
- #
- if self.PlatformInfo.ToolChainFamily in ('MSFT'):
- gBuildOptIncludePattern = re.compile(r"(?:.*?)/I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
- elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC', 'RVCT'):
- gBuildOptIncludePattern = re.compile(r"(?:.*?)-I[ \t]*([^ ]*)", re.MULTILINE | re.DOTALL)
- else:
- #
- # New ToolChainFamily, don't known whether there is option to specify include directories
- #
- self._BuildOptionIncPathList = []
- return self._BuildOptionIncPathList
-
- BuildOptionIncPathList = []
- for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
- Attr = 'FLAGS'
- try:
- FlagOption = self.BuildOption[Tool][Attr]
- except KeyError:
- FlagOption = ''
-
- if self.PlatformInfo.ToolChainFamily != 'RVCT':
- IncPathList = [NormPath(Path, self.Macros) for Path in gBuildOptIncludePattern.findall(FlagOption)]
- else:
- #
- # RVCT may specify a list of directory seperated by commas
- #
- IncPathList = []
- for Path in gBuildOptIncludePattern.findall(FlagOption):
- PathList = GetSplitList(Path, TAB_COMMA_SPLIT)
- IncPathList += [NormPath(PathEntry, self.Macros) for PathEntry in PathList]
-
- #
- # EDK II modules must not reference header files outside of the packages they depend on or
- # within the module's directory tree. Report error if violation.
- #
- if self.AutoGenVersion >= 0x00010005 and len(IncPathList) > 0:
- for Path in IncPathList:
- if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
- ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
- EdkLogger.error("build",
- PARAMETER_INVALID,
- ExtraData=ErrMsg,
- File=str(self.MetaFile))
-
-
- BuildOptionIncPathList += IncPathList
-
- self._BuildOptionIncPathList = BuildOptionIncPathList
-
- return self._BuildOptionIncPathList
-
- ## Return a list of files which can be built from source
- #
- # What kind of files can be built is determined by build rules in
- # $(CONF_DIRECTORY)/build_rule.txt and toolchain family.
- #
- def _GetSourceFileList(self):
- if self._SourceFileList == None:
- self._SourceFileList = []
- for F in self.Module.Sources:
- # match tool chain
- if F.TagName not in ("", "*", self.ToolChain):
- EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, "
- "but [%s] is needed" % (F.TagName, str(F), self.ToolChain))
- continue
- # match tool chain family
- if F.ToolChainFamily not in ("", "*", self.ToolChainFamily):
- EdkLogger.debug(
- EdkLogger.DEBUG_0,
- "The file [%s] must be built by tools of [%s], " \
- "but current toolchain family is [%s]" \
- % (str(F), F.ToolChainFamily, self.ToolChainFamily))
- continue
-
- # add the file path into search path list for file including
- if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005:
- self.IncludePathList.insert(0, F.Dir)
- self._SourceFileList.append(F)
-
- self._MatchBuildRuleOrder(self._SourceFileList)
-
- for F in self._SourceFileList:
- self._ApplyBuildRule(F, TAB_UNKNOWN_FILE)
- return self._SourceFileList
-
- def _MatchBuildRuleOrder(self, FileList):
- Order_Dict = {}
- self._GetModuleBuildOption()
- for SingleFile in FileList:
- if self.BuildRuleOrder and SingleFile.Ext in self.BuildRuleOrder and SingleFile.Ext in self.BuildRules:
- key = SingleFile.Path.split(SingleFile.Ext)[0]
- if key in Order_Dict:
- Order_Dict[key].append(SingleFile.Ext)
- else:
- Order_Dict[key] = [SingleFile.Ext]
-
- RemoveList = []
- for F in Order_Dict:
- if len(Order_Dict[F]) > 1:
- Order_Dict[F].sort(key=lambda i: self.BuildRuleOrder.index(i))
- for Ext in Order_Dict[F][1:]:
- RemoveList.append(F + Ext)
-
- for item in RemoveList:
- FileList.remove(item)
-
- return FileList
-
- ## Return the list of unicode files
- def _GetUnicodeFileList(self):
- if self._UnicodeFileList == None:
- if TAB_UNICODE_FILE in self.FileTypes:
- self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE]
- else:
- self._UnicodeFileList = []
- return self._UnicodeFileList
-
- ## Return the list of vfr files
- def _GetVfrFileList(self):
- if self._VfrFileList == None:
- if TAB_VFR_FILE in self.FileTypes:
- self._VfrFileList = self.FileTypes[TAB_VFR_FILE]
- else:
- self._VfrFileList = []
- return self._VfrFileList
-
- ## Return the list of Image Definition files
- def _GetIdfFileList(self):
- if self._IdfFileList == None:
- if TAB_IMAGE_FILE in self.FileTypes:
- self._IdfFileList = self.FileTypes[TAB_IMAGE_FILE]
- else:
- self._IdfFileList = []
- return self._IdfFileList
-
- ## Return a list of files which can be built from binary
- #
- # "Build" binary files are just to copy them to build directory.
- #
- # @retval list The list of files which can be built later
- #
- def _GetBinaryFiles(self):
- if self._BinaryFileList == None:
- self._BinaryFileList = []
- for F in self.Module.Binaries:
- if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget:
- continue
- self._BinaryFileList.append(F)
- self._ApplyBuildRule(F, F.Type)
- return self._BinaryFileList
-
- def _GetBuildRules(self):
- if self._BuildRules == None:
- BuildRules = {}
- BuildRuleDatabase = self.PlatformInfo.BuildRule
- for Type in BuildRuleDatabase.FileTypeList:
- #first try getting build rule by BuildRuleFamily
- RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily]
- if not RuleObject:
- # build type is always module type, but ...
- if self.ModuleType != self.BuildType:
- RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily]
- #second try getting build rule by ToolChainFamily
- if not RuleObject:
- RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily]
- if not RuleObject:
- # build type is always module type, but ...
- if self.ModuleType != self.BuildType:
- RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily]
- if not RuleObject:
- continue
- RuleObject = RuleObject.Instantiate(self.Macros)
- BuildRules[Type] = RuleObject
- for Ext in RuleObject.SourceFileExtList:
- BuildRules[Ext] = RuleObject
- self._BuildRules = BuildRules
- return self._BuildRules
-
- def _ApplyBuildRule(self, File, FileType):
- if self._BuildTargets == None:
- self._IntroBuildTargetList = set()
- self._FinalBuildTargetList = set()
- self._BuildTargets = {}
- self._FileTypes = {}
-
- SubDirectory = os.path.join(self.OutputDir, File.SubDir)
- if not os.path.exists(SubDirectory):
- CreateDirectory(SubDirectory)
- LastTarget = None
- RuleChain = []
- SourceList = [File]
- Index = 0
- #
- # Make sure to get build rule order value
- #
- self._GetModuleBuildOption()
-
- while Index < len(SourceList):
- Source = SourceList[Index]
- Index = Index + 1
-
- if Source != File:
- CreateDirectory(Source.Dir)
-
- if File.IsBinary and File == Source and self._BinaryFileList != None and File in self._BinaryFileList:
- # Skip all files that are not binary libraries
- if not self.IsLibrary:
- continue
- RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE]
- elif FileType in self.BuildRules:
- RuleObject = self.BuildRules[FileType]
- elif Source.Ext in self.BuildRules:
- RuleObject = self.BuildRules[Source.Ext]
- else:
- # stop at no more rules
- if LastTarget:
- self._FinalBuildTargetList.add(LastTarget)
- break
-
- FileType = RuleObject.SourceFileType
- if FileType not in self._FileTypes:
- self._FileTypes[FileType] = set()
- self._FileTypes[FileType].add(Source)
-
- # stop at STATIC_LIBRARY for library
- if self.IsLibrary and FileType == TAB_STATIC_LIBRARY:
- if LastTarget:
- self._FinalBuildTargetList.add(LastTarget)
- break
-
- Target = RuleObject.Apply(Source, self.BuildRuleOrder)
- if not Target:
- if LastTarget:
- self._FinalBuildTargetList.add(LastTarget)
- break
- elif not Target.Outputs:
- # Only do build for target with outputs
- self._FinalBuildTargetList.add(Target)
-
- if FileType not in self._BuildTargets:
- self._BuildTargets[FileType] = set()
- self._BuildTargets[FileType].add(Target)
-
- if not Source.IsBinary and Source == File:
- self._IntroBuildTargetList.add(Target)
-
- # to avoid cyclic rule
- if FileType in RuleChain:
- break
-
- RuleChain.append(FileType)
- SourceList.extend(Target.Outputs)
- LastTarget = Target
- FileType = TAB_UNKNOWN_FILE
-
- def _GetTargets(self):
- if self._BuildTargets == None:
- self._IntroBuildTargetList = set()
- self._FinalBuildTargetList = set()
- self._BuildTargets = {}
- self._FileTypes = {}
-
- #TRICK: call _GetSourceFileList to apply build rule for source files
- if self.SourceFileList:
- pass
-
- #TRICK: call _GetBinaryFileList to apply build rule for binary files
- if self.BinaryFileList:
- pass
-
- return self._BuildTargets
-
- def _GetIntroTargetList(self):
- self._GetTargets()
- return self._IntroBuildTargetList
-
- def _GetFinalTargetList(self):
- self._GetTargets()
- return self._FinalBuildTargetList
-
- def _GetFileTypes(self):
- self._GetTargets()
- return self._FileTypes
-
- ## Get the list of package object the module depends on
- #
- # @retval list The package object list
- #
- def _GetDependentPackageList(self):
- return self.Module.Packages
-
- ## Return the list of auto-generated code file
- #
- # @retval list The list of auto-generated file
- #
- def _GetAutoGenFileList(self):
- UniStringAutoGenC = True
- IdfStringAutoGenC = True
- UniStringBinBuffer = StringIO()
- IdfGenBinBuffer = StringIO()
- if self.BuildType == 'UEFI_HII':
- UniStringAutoGenC = False
- IdfStringAutoGenC = False
- if self._AutoGenFileList == None:
- self._AutoGenFileList = {}
- AutoGenC = TemplateString()
- AutoGenH = TemplateString()
- StringH = TemplateString()
- StringIdf = TemplateString()
- GenC.CreateCode(self, AutoGenC, AutoGenH, StringH, UniStringAutoGenC, UniStringBinBuffer, StringIdf, IdfStringAutoGenC, IdfGenBinBuffer)
- #
- # AutoGen.c is generated if there are library classes in inf, or there are object files
- #
- if str(AutoGenC) != "" and (len(self.Module.LibraryClasses) > 0
- or TAB_OBJECT_FILE in self.FileTypes):
- AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir)
- self._AutoGenFileList[AutoFile] = str(AutoGenC)
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if str(AutoGenH) != "":
- AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir)
- self._AutoGenFileList[AutoFile] = str(AutoGenH)
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if str(StringH) != "":
- AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir)
- self._AutoGenFileList[AutoFile] = str(StringH)
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None and UniStringBinBuffer.getvalue() != "":
- AutoFile = PathClass(gAutoGenStringFormFileName % {"module_name":self.Name}, self.OutputDir)
- self._AutoGenFileList[AutoFile] = UniStringBinBuffer.getvalue()
- AutoFile.IsBinary = True
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if UniStringBinBuffer != None:
- UniStringBinBuffer.close()
- if str(StringIdf) != "":
- AutoFile = PathClass(gAutoGenImageDefFileName % {"module_name":self.Name}, self.DebugDir)
- self._AutoGenFileList[AutoFile] = str(StringIdf)
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None and IdfGenBinBuffer.getvalue() != "":
- AutoFile = PathClass(gAutoGenIdfFileName % {"module_name":self.Name}, self.OutputDir)
- self._AutoGenFileList[AutoFile] = IdfGenBinBuffer.getvalue()
- AutoFile.IsBinary = True
- self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE)
- if IdfGenBinBuffer != None:
- IdfGenBinBuffer.close()
- return self._AutoGenFileList
-
- ## Return the list of library modules explicitly or implicityly used by this module
- def _GetLibraryList(self):
- if self._DependentLibraryList == None:
- # only merge library classes and PCD for non-library module
- if self.IsLibrary:
- self._DependentLibraryList = []
- else:
- if self.AutoGenVersion < 0x00010005:
- self._DependentLibraryList = self.PlatformInfo.ResolveLibraryReference(self.Module)
- else:
- self._DependentLibraryList = self.PlatformInfo.ApplyLibraryInstance(self.Module)
- return self._DependentLibraryList
-
- @staticmethod
- def UpdateComments(Recver, Src):
- for Key in Src:
- if Key not in Recver:
- Recver[Key] = []
- Recver[Key].extend(Src[Key])
- ## Get the list of PCDs from current module
- #
- # @retval list The list of PCD
- #
- def _GetModulePcdList(self):
- if self._ModulePcdList == None:
- # apply PCD settings from platform
- self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds)
- self.UpdateComments(self._PcdComments, self.Module.PcdComments)
- return self._ModulePcdList
-
- ## Get the list of PCDs from dependent libraries
- #
- # @retval list The list of PCD
- #
- def _GetLibraryPcdList(self):
- if self._LibraryPcdList == None:
- Pcds = sdict()
- if not self.IsLibrary:
- # get PCDs from dependent libraries
- for Library in self.DependentLibraryList:
- self.UpdateComments(self._PcdComments, Library.PcdComments)
- for Key in Library.Pcds:
- # skip duplicated PCDs
- if Key in self.Module.Pcds or Key in Pcds:
- continue
- Pcds[Key] = copy.copy(Library.Pcds[Key])
- # apply PCD settings from platform
- self._LibraryPcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, Pcds)
- else:
- self._LibraryPcdList = []
- return self._LibraryPcdList
-
- ## Get the GUID value mapping
- #
- # @retval dict The mapping between GUID cname and its value
- #
- def _GetGuidList(self):
- if self._GuidList == None:
- self._GuidList = sdict()
- self._GuidList.update(self.Module.Guids)
- for Library in self.DependentLibraryList:
- self._GuidList.update(Library.Guids)
- self.UpdateComments(self._GuidComments, Library.GuidComments)
- self.UpdateComments(self._GuidComments, self.Module.GuidComments)
- return self._GuidList
-
- def GetGuidsUsedByPcd(self):
- if self._GuidsUsedByPcd == None:
- self._GuidsUsedByPcd = sdict()
- self._GuidsUsedByPcd.update(self.Module.GetGuidsUsedByPcd())
- for Library in self.DependentLibraryList:
- self._GuidsUsedByPcd.update(Library.GetGuidsUsedByPcd())
- return self._GuidsUsedByPcd
- ## Get the protocol value mapping
- #
- # @retval dict The mapping between protocol cname and its value
- #
- def _GetProtocolList(self):
- if self._ProtocolList == None:
- self._ProtocolList = sdict()
- self._ProtocolList.update(self.Module.Protocols)
- for Library in self.DependentLibraryList:
- self._ProtocolList.update(Library.Protocols)
- self.UpdateComments(self._ProtocolComments, Library.ProtocolComments)
- self.UpdateComments(self._ProtocolComments, self.Module.ProtocolComments)
- return self._ProtocolList
-
- ## Get the PPI value mapping
- #
- # @retval dict The mapping between PPI cname and its value
- #
- def _GetPpiList(self):
- if self._PpiList == None:
- self._PpiList = sdict()
- self._PpiList.update(self.Module.Ppis)
- for Library in self.DependentLibraryList:
- self._PpiList.update(Library.Ppis)
- self.UpdateComments(self._PpiComments, Library.PpiComments)
- self.UpdateComments(self._PpiComments, self.Module.PpiComments)
- return self._PpiList
-
- ## Get the list of include search path
- #
- # @retval list The list path
- #
- def _GetIncludePathList(self):
- if self._IncludePathList == None:
- self._IncludePathList = []
- if self.AutoGenVersion < 0x00010005:
- for Inc in self.Module.Includes:
- if Inc not in self._IncludePathList:
- self._IncludePathList.append(Inc)
- # for Edk modules
- Inc = path.join(Inc, self.Arch.capitalize())
- if os.path.exists(Inc) and Inc not in self._IncludePathList:
- self._IncludePathList.append(Inc)
- # Edk module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time
- self._IncludePathList.append(self.DebugDir)
- else:
- self._IncludePathList.append(self.MetaFile.Dir)
- self._IncludePathList.append(self.DebugDir)
-
- for Package in self.Module.Packages:
- PackageDir = mws.join(self.WorkspaceDir, Package.MetaFile.Dir)
- if PackageDir not in self._IncludePathList:
- self._IncludePathList.append(PackageDir)
- IncludesList = Package.Includes
- if Package._PrivateIncludes:
- if not self.MetaFile.Path.startswith(PackageDir):
- IncludesList = list(set(Package.Includes).difference(set(Package._PrivateIncludes)))
- for Inc in IncludesList:
- if Inc not in self._IncludePathList:
- self._IncludePathList.append(str(Inc))
- return self._IncludePathList
-
- def _GetIncludePathLength(self):
- self._IncludePathLength = 0
- if self._IncludePathList:
- for inc in self._IncludePathList:
- self._IncludePathLength += len(' ' + inc)
- return self._IncludePathLength
-
- ## Get HII EX PCDs which maybe used by VFR
- #
- # efivarstore used by VFR may relate with HII EX PCDs
- # Get the variable name and GUID from efivarstore and HII EX PCD
- # List the HII EX PCDs in As Built INF if both name and GUID match.
- #
- # @retval list HII EX PCDs
- #
- def _GetPcdsMaybeUsedByVfr(self):
- if not self.SourceFileList:
- return []
-
- NameGuids = []
- for SrcFile in self.SourceFileList:
- if SrcFile.Ext.lower() != '.vfr':
- continue
- Vfri = os.path.join(self.OutputDir, SrcFile.BaseName + '.i')
- if not os.path.exists(Vfri):
- continue
- VfriFile = open(Vfri, 'r')
- Content = VfriFile.read()
- VfriFile.close()
- Pos = Content.find('efivarstore')
- while Pos != -1:
- #
- # Make sure 'efivarstore' is the start of efivarstore statement
- # In case of the value of 'name' (name = efivarstore) is equal to 'efivarstore'
- #
- Index = Pos - 1
- while Index >= 0 and Content[Index] in ' \t\r\n':
- Index -= 1
- if Index >= 0 and Content[Index] != ';':
- Pos = Content.find('efivarstore', Pos + len('efivarstore'))
- continue
- #
- # 'efivarstore' must be followed by name and guid
- #
- Name = gEfiVarStoreNamePattern.search(Content, Pos)
- if not Name:
- break
- Guid = gEfiVarStoreGuidPattern.search(Content, Pos)
- if not Guid:
- break
- NameArray = ConvertStringToByteArray('L"' + Name.group(1) + '"')
- NameGuids.append((NameArray, GuidStructureStringToGuidString(Guid.group(1))))
- Pos = Content.find('efivarstore', Name.end())
- if not NameGuids:
- return []
- HiiExPcds = []
- for Pcd in self.PlatformInfo.Platform.Pcds.values():
- if Pcd.Type != TAB_PCDS_DYNAMIC_EX_HII:
- continue
- for SkuName in Pcd.SkuInfoList:
- SkuInfo = Pcd.SkuInfoList[SkuName]
- Name = ConvertStringToByteArray(SkuInfo.VariableName)
- Value = GuidValue(SkuInfo.VariableGuid, self.PlatformInfo.PackageList, self.MetaFile.Path)
- if not Value:
- continue
- Guid = GuidStructureStringToGuidString(Value)
- if (Name, Guid) in NameGuids and Pcd not in HiiExPcds:
- HiiExPcds.append(Pcd)
- break
-
- return HiiExPcds
-
- def _GenOffsetBin(self):
- VfrUniBaseName = {}
- for SourceFile in self.Module.Sources:
- if SourceFile.Type.upper() == ".VFR" :
- #
- # search the .map file to find the offset of vfr binary in the PE32+/TE file.
- #
- VfrUniBaseName[SourceFile.BaseName] = (SourceFile.BaseName + "Bin")
- if SourceFile.Type.upper() == ".UNI" :
- #
- # search the .map file to find the offset of Uni strings binary in the PE32+/TE file.
- #
- VfrUniBaseName["UniOffsetName"] = (self.Name + "Strings")
-
- if len(VfrUniBaseName) == 0:
- return None
- MapFileName = os.path.join(self.OutputDir, self.Name + ".map")
- EfiFileName = os.path.join(self.OutputDir, self.Name + ".efi")
- VfrUniOffsetList = GetVariableOffset(MapFileName, EfiFileName, VfrUniBaseName.values())
- if not VfrUniOffsetList:
- return None
-
- OutputName = '%sOffset.bin' % self.Name
- UniVfrOffsetFileName = os.path.join( self.OutputDir, OutputName)
-
- try:
- fInputfile = open(UniVfrOffsetFileName, "wb+", 0)
- except:
- EdkLogger.error("build", FILE_OPEN_FAILURE, "File open failed for %s" % UniVfrOffsetFileName,None)
-
- # Use a instance of StringIO to cache data
- fStringIO = StringIO('')
-
- for Item in VfrUniOffsetList:
- if (Item[0].find("Strings") != -1):
- #
- # UNI offset in image.
- # GUID + Offset
- # { 0x8913c5e0, 0x33f6, 0x4d86, { 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66 } }
- #
- UniGuid = [0xe0, 0xc5, 0x13, 0x89, 0xf6, 0x33, 0x86, 0x4d, 0x9b, 0xf1, 0x43, 0xef, 0x89, 0xfc, 0x6, 0x66]
- UniGuid = [chr(ItemGuid) for ItemGuid in UniGuid]
- fStringIO.write(''.join(UniGuid))
- UniValue = pack ('Q', int (Item[1], 16))
- fStringIO.write (UniValue)
- else:
- #
- # VFR binary offset in image.
- # GUID + Offset
- # { 0xd0bc7cb4, 0x6a47, 0x495f, { 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2 } };
- #
- VfrGuid = [0xb4, 0x7c, 0xbc, 0xd0, 0x47, 0x6a, 0x5f, 0x49, 0xaa, 0x11, 0x71, 0x7, 0x46, 0xda, 0x6, 0xa2]
- VfrGuid = [chr(ItemGuid) for ItemGuid in VfrGuid]
- fStringIO.write(''.join(VfrGuid))
- type (Item[1])
- VfrValue = pack ('Q', int (Item[1], 16))
- fStringIO.write (VfrValue)
- #
- # write data into file.
- #
- try :
- fInputfile.write (fStringIO.getvalue())
- except:
- EdkLogger.error("build", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the "
- "file been locked or using by other applications." %UniVfrOffsetFileName,None)
-
- fStringIO.close ()
- fInputfile.close ()
- return OutputName
-
- ## Create AsBuilt INF file the module
- #
- def CreateAsBuiltInf(self):
- if self.IsAsBuiltInfCreated:
- return
-
- # Skip the following code for EDK I inf
- if self.AutoGenVersion < 0x00010005:
- return
-
- # Skip the following code for libraries
- if self.IsLibrary:
- return
-
- # Skip the following code for modules with no source files
- if self.SourceFileList == None or self.SourceFileList == []:
- return
-
- # Skip the following code for modules without any binary files
- if self.BinaryFileList <> None and self.BinaryFileList <> []:
- return
-
- ### TODO: How to handles mixed source and binary modules
-
- # Find all DynamicEx and PatchableInModule PCDs used by this module and dependent libraries
- # Also find all packages that the DynamicEx PCDs depend on
- Pcds = []
- PatchablePcds = []
- Packages = []
- PcdCheckList = []
- PcdTokenSpaceList = []
- for Pcd in self.ModulePcdList + self.LibraryPcdList:
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- PatchablePcds += [Pcd]
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'PatchableInModule'))
- elif Pcd.Type in GenC.gDynamicExPcd:
- if Pcd not in Pcds:
- Pcds += [Pcd]
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'DynamicEx'))
- PcdCheckList.append((Pcd.TokenCName, Pcd.TokenSpaceGuidCName, 'Dynamic'))
- PcdTokenSpaceList.append(Pcd.TokenSpaceGuidCName)
- GuidList = sdict()
- GuidList.update(self.GuidList)
- for TokenSpace in self.GetGuidsUsedByPcd():
- # If token space is not referred by patch PCD or Ex PCD, remove the GUID from GUID list
- # The GUIDs in GUIDs section should really be the GUIDs in source INF or referred by Ex an patch PCDs
- if TokenSpace not in PcdTokenSpaceList and TokenSpace in GuidList:
- GuidList.pop(TokenSpace)
- CheckList = (GuidList, self.PpiList, self.ProtocolList, PcdCheckList)
- for Package in self.DerivedPackageList:
- if Package in Packages:
- continue
- BeChecked = (Package.Guids, Package.Ppis, Package.Protocols, Package.Pcds)
- Found = False
- for Index in range(len(BeChecked)):
- for Item in CheckList[Index]:
- if Item in BeChecked[Index]:
- Packages += [Package]
- Found = True
- break
- if Found: break
-
- VfrPcds = self._GetPcdsMaybeUsedByVfr()
- for Pkg in self.PlatformInfo.PackageList:
- if Pkg in Packages:
- continue
- for VfrPcd in VfrPcds:
- if ((VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, 'DynamicEx') in Pkg.Pcds or
- (VfrPcd.TokenCName, VfrPcd.TokenSpaceGuidCName, 'Dynamic') in Pkg.Pcds):
- Packages += [Pkg]
- break
-
- ModuleType = self.ModuleType
- if ModuleType == 'UEFI_DRIVER' and self.DepexGenerated:
- ModuleType = 'DXE_DRIVER'
-
- DriverType = ''
- if self.PcdIsDriver != '':
- DriverType = self.PcdIsDriver
-
- Guid = self.Guid
- MDefs = self.Module.Defines
-
- AsBuiltInfDict = {
- 'module_name' : self.Name,
- 'module_guid' : Guid,
- 'module_module_type' : ModuleType,
- 'module_version_string' : [MDefs['VERSION_STRING']] if 'VERSION_STRING' in MDefs else [],
- 'pcd_is_driver_string' : [],
- 'module_uefi_specification_version' : [],
- 'module_pi_specification_version' : [],
- 'module_entry_point' : self.Module.ModuleEntryPointList,
- 'module_unload_image' : self.Module.ModuleUnloadImageList,
- 'module_constructor' : self.Module.ConstructorList,
- 'module_destructor' : self.Module.DestructorList,
- 'module_shadow' : [MDefs['SHADOW']] if 'SHADOW' in MDefs else [],
- 'module_pci_vendor_id' : [MDefs['PCI_VENDOR_ID']] if 'PCI_VENDOR_ID' in MDefs else [],
- 'module_pci_device_id' : [MDefs['PCI_DEVICE_ID']] if 'PCI_DEVICE_ID' in MDefs else [],
- 'module_pci_class_code' : [MDefs['PCI_CLASS_CODE']] if 'PCI_CLASS_CODE' in MDefs else [],
- 'module_pci_revision' : [MDefs['PCI_REVISION']] if 'PCI_REVISION' in MDefs else [],
- 'module_build_number' : [MDefs['BUILD_NUMBER']] if 'BUILD_NUMBER' in MDefs else [],
- 'module_spec' : [MDefs['SPEC']] if 'SPEC' in MDefs else [],
- 'module_uefi_hii_resource_section' : [MDefs['UEFI_HII_RESOURCE_SECTION']] if 'UEFI_HII_RESOURCE_SECTION' in MDefs else [],
- 'module_uni_file' : [MDefs['MODULE_UNI_FILE']] if 'MODULE_UNI_FILE' in MDefs else [],
- 'module_arch' : self.Arch,
- 'package_item' : ['%s' % (Package.MetaFile.File.replace('\\', '/')) for Package in Packages],
- 'binary_item' : [],
- 'patchablepcd_item' : [],
- 'pcd_item' : [],
- 'protocol_item' : [],
- 'ppi_item' : [],
- 'guid_item' : [],
- 'flags_item' : [],
- 'libraryclasses_item' : []
- }
-
- if self.AutoGenVersion > int(gInfSpecVersion, 0):
- AsBuiltInfDict['module_inf_version'] = '0x%08x' % self.AutoGenVersion
- else:
- AsBuiltInfDict['module_inf_version'] = gInfSpecVersion
-
- if DriverType:
- AsBuiltInfDict['pcd_is_driver_string'] += [DriverType]
-
- if 'UEFI_SPECIFICATION_VERSION' in self.Specification:
- AsBuiltInfDict['module_uefi_specification_version'] += [self.Specification['UEFI_SPECIFICATION_VERSION']]
- if 'PI_SPECIFICATION_VERSION' in self.Specification:
- AsBuiltInfDict['module_pi_specification_version'] += [self.Specification['PI_SPECIFICATION_VERSION']]
-
- OutputDir = self.OutputDir.replace('\\', '/').strip('/')
-
- for Item in self.CodaTargetList:
- File = Item.Target.Path.replace('\\', '/').strip('/').replace(OutputDir, '').strip('/')
- if Item.Target.Ext.lower() == '.aml':
- AsBuiltInfDict['binary_item'] += ['ASL|' + File]
- elif Item.Target.Ext.lower() == '.acpi':
- AsBuiltInfDict['binary_item'] += ['ACPI|' + File]
- elif Item.Target.Ext.lower() == '.efi':
- AsBuiltInfDict['binary_item'] += ['PE32|' + self.Name + '.efi']
- else:
- AsBuiltInfDict['binary_item'] += ['BIN|' + File]
- if self.DepexGenerated:
- if self.ModuleType in ['PEIM']:
- AsBuiltInfDict['binary_item'] += ['PEI_DEPEX|' + self.Name + '.depex']
- if self.ModuleType in ['DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'UEFI_DRIVER']:
- AsBuiltInfDict['binary_item'] += ['DXE_DEPEX|' + self.Name + '.depex']
- if self.ModuleType in ['DXE_SMM_DRIVER']:
- AsBuiltInfDict['binary_item'] += ['SMM_DEPEX|' + self.Name + '.depex']
-
- Bin = self._GenOffsetBin()
- if Bin:
- AsBuiltInfDict['binary_item'] += ['BIN|%s' % Bin]
-
- for Root, Dirs, Files in os.walk(OutputDir):
- for File in Files:
- if File.lower().endswith('.pdb'):
- AsBuiltInfDict['binary_item'] += ['DISPOSABLE|' + File]
- HeaderComments = self.Module.HeaderComments
- StartPos = 0
- for Index in range(len(HeaderComments)):
- if HeaderComments[Index].find('@BinaryHeader') != -1:
- HeaderComments[Index] = HeaderComments[Index].replace('@BinaryHeader', '@file')
- StartPos = Index
- break
- AsBuiltInfDict['header_comments'] = '\n'.join(HeaderComments[StartPos:]).replace(':#', '://')
- AsBuiltInfDict['tail_comments'] = '\n'.join(self.Module.TailComments)
-
- GenList = [
- (self.ProtocolList, self._ProtocolComments, 'protocol_item'),
- (self.PpiList, self._PpiComments, 'ppi_item'),
- (GuidList, self._GuidComments, 'guid_item')
- ]
- for Item in GenList:
- for CName in Item[0]:
- Comments = ''
- if CName in Item[1]:
- Comments = '\n '.join(Item[1][CName])
- Entry = CName
- if Comments:
- Entry = Comments + '\n ' + CName
- AsBuiltInfDict[Item[2]].append(Entry)
- PatchList = parsePcdInfoFromMapFile(
- os.path.join(self.OutputDir, self.Name + '.map'),
- os.path.join(self.OutputDir, self.Name + '.efi')
- )
- if PatchList:
- for Pcd in PatchablePcds:
- TokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- for PatchPcd in PatchList:
- if TokenCName == PatchPcd[0]:
- break
- else:
- continue
- PcdValue = ''
- if Pcd.DatumType == 'BOOLEAN':
- BoolValue = Pcd.DefaultValue.upper()
- if BoolValue == 'TRUE':
- Pcd.DefaultValue = '1'
- elif BoolValue == 'FALSE':
- Pcd.DefaultValue = '0'
-
- if Pcd.DatumType != 'VOID*':
- HexFormat = '0x%02x'
- if Pcd.DatumType == 'UINT16':
- HexFormat = '0x%04x'
- elif Pcd.DatumType == 'UINT32':
- HexFormat = '0x%08x'
- elif Pcd.DatumType == 'UINT64':
- HexFormat = '0x%016x'
- PcdValue = HexFormat % int(Pcd.DefaultValue, 0)
- else:
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName)
- )
- ArraySize = int(Pcd.MaxDatumSize, 0)
- PcdValue = Pcd.DefaultValue
- if PcdValue[0] != '{':
- Unicode = False
- if PcdValue[0] == 'L':
- Unicode = True
- PcdValue = PcdValue.lstrip('L')
- PcdValue = eval(PcdValue)
- NewValue = '{'
- for Index in range(0, len(PcdValue)):
- if Unicode:
- CharVal = ord(PcdValue[Index])
- NewValue = NewValue + '0x%02x' % (CharVal & 0x00FF) + ', ' \
- + '0x%02x' % (CharVal >> 8) + ', '
- else:
- NewValue = NewValue + '0x%02x' % (ord(PcdValue[Index]) % 0x100) + ', '
- Padding = '0x00, '
- if Unicode:
- Padding = Padding * 2
- ArraySize = ArraySize / 2
- if ArraySize < (len(PcdValue) + 1):
- EdkLogger.error("build", AUTOGEN_ERROR,
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
- )
- if ArraySize > len(PcdValue) + 1:
- NewValue = NewValue + Padding * (ArraySize - len(PcdValue) - 1)
- PcdValue = NewValue + Padding.strip().rstrip(',') + '}'
- elif len(PcdValue.split(',')) <= ArraySize:
- PcdValue = PcdValue.rstrip('}') + ', 0x00' * (ArraySize - len(PcdValue.split(',')))
- PcdValue += '}'
- else:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName)
- )
- PcdItem = '%s.%s|%s|0x%X' % \
- (Pcd.TokenSpaceGuidCName, TokenCName, PcdValue, PatchPcd[1])
- PcdComments = ''
- if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
- PcdComments = '\n '.join(self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName])
- if PcdComments:
- PcdItem = PcdComments + '\n ' + PcdItem
- AsBuiltInfDict['patchablepcd_item'].append(PcdItem)
-
- HiiPcds = []
- for Pcd in Pcds + VfrPcds:
- PcdComments = ''
- PcdCommentList = []
- HiiInfo = ''
- SkuId = ''
- TokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- if Pcd.Type == TAB_PCDS_DYNAMIC_EX_HII:
- for SkuName in Pcd.SkuInfoList:
- SkuInfo = Pcd.SkuInfoList[SkuName]
- SkuId = SkuInfo.SkuId
- HiiInfo = '## %s|%s|%s' % (SkuInfo.VariableName, SkuInfo.VariableGuid, SkuInfo.VariableOffset)
- break
- if SkuId:
- #
- # Don't generate duplicated HII PCD
- #
- if (SkuId, Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in HiiPcds:
- continue
- else:
- HiiPcds.append((SkuId, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
- if (Pcd.TokenSpaceGuidCName, Pcd.TokenCName) in self._PcdComments:
- PcdCommentList = self._PcdComments[Pcd.TokenSpaceGuidCName, Pcd.TokenCName][:]
- if HiiInfo:
- UsageIndex = -1
- UsageStr = ''
- for Index, Comment in enumerate(PcdCommentList):
- for Usage in UsageList:
- if Comment.find(Usage) != -1:
- UsageStr = Usage
- UsageIndex = Index
- break
- if UsageIndex != -1:
- PcdCommentList[UsageIndex] = '## %s %s %s' % (UsageStr, HiiInfo, PcdCommentList[UsageIndex].replace(UsageStr, ''))
- else:
- PcdCommentList.append('## UNDEFINED ' + HiiInfo)
- PcdComments = '\n '.join(PcdCommentList)
- PcdEntry = Pcd.TokenSpaceGuidCName + '.' + TokenCName
- if PcdComments:
- PcdEntry = PcdComments + '\n ' + PcdEntry
- AsBuiltInfDict['pcd_item'] += [PcdEntry]
- for Item in self.BuildOption:
- if 'FLAGS' in self.BuildOption[Item]:
- AsBuiltInfDict['flags_item'] += ['%s:%s_%s_%s_%s_FLAGS = %s' % (self.ToolChainFamily, self.BuildTarget, self.ToolChain, self.Arch, Item, self.BuildOption[Item]['FLAGS'].strip())]
-
- # Generated LibraryClasses section in comments.
- for Library in self.LibraryAutoGenList:
- AsBuiltInfDict['libraryclasses_item'] += [Library.MetaFile.File.replace('\\', '/')]
-
- # Generated depex expression section in comments.
- AsBuiltInfDict['depexsection_item'] = ''
- DepexExpresion = self._GetDepexExpresionString()
- if DepexExpresion:
- AsBuiltInfDict['depexsection_item'] = DepexExpresion
-
- AsBuiltInf = TemplateString()
- AsBuiltInf.Append(gAsBuiltInfHeaderString.Replace(AsBuiltInfDict))
-
- SaveFileOnChange(os.path.join(self.OutputDir, self.Name + '.inf'), str(AsBuiltInf), False)
-
- self.IsAsBuiltInfCreated = True
-
- ## Create makefile for the module and its dependent libraries
- #
- # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of
- # dependent libraries will be created
- #
- def CreateMakeFile(self, CreateLibraryMakeFile=True):
- # Ignore generating makefile when it is a binary module
- if self.IsBinaryModule:
- return
-
- if self.IsMakeFileCreated:
- return
- if self.CanSkip():
- return
-
- if not self.IsLibrary and CreateLibraryMakeFile:
- for LibraryAutoGen in self.LibraryAutoGenList:
- LibraryAutoGen.CreateMakeFile()
-
- if len(self.CustomMakefile) == 0:
- Makefile = GenMake.ModuleMakefile(self)
- else:
- Makefile = GenMake.CustomMakefile(self)
- if Makefile.Generate():
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" %
- (self.Name, self.Arch))
- else:
- EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" %
- (self.Name, self.Arch))
-
- self.CreateTimeStamp(Makefile)
- self.IsMakeFileCreated = True
-
- def CopyBinaryFiles(self):
- for File in self.Module.Binaries:
- SrcPath = File.Path
- DstPath = os.path.join(self.OutputDir , os.path.basename(SrcPath))
- CopyLongFilePath(SrcPath, DstPath)
- ## Create autogen code for the module and its dependent libraries
- #
- # @param CreateLibraryCodeFile Flag indicating if or not the code of
- # dependent libraries will be created
- #
- def CreateCodeFile(self, CreateLibraryCodeFile=True):
- if self.IsCodeFileCreated:
- return
- if self.CanSkip():
- return
-
- # Need to generate PcdDatabase even PcdDriver is binarymodule
- if self.IsBinaryModule and self.PcdIsDriver != '':
- CreatePcdDatabaseCode(self, TemplateString(), TemplateString())
- return
- if self.IsBinaryModule:
- if self.IsLibrary:
- self.CopyBinaryFiles()
- return
-
- if not self.IsLibrary and CreateLibraryCodeFile:
- for LibraryAutoGen in self.LibraryAutoGenList:
- LibraryAutoGen.CreateCodeFile()
-
- AutoGenList = []
- IgoredAutoGenList = []
-
- for File in self.AutoGenFileList:
- if GenC.Generate(File.Path, self.AutoGenFileList[File], File.IsBinary):
- #Ignore Edk AutoGen.c
- if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c':
- continue
-
- AutoGenList.append(str(File))
- else:
- IgoredAutoGenList.append(str(File))
-
- # Skip the following code for EDK I inf
- if self.AutoGenVersion < 0x00010005:
- return
-
- for ModuleType in self.DepexList:
- # Ignore empty [depex] section or [depex] section for "USER_DEFINED" module
- if len(self.DepexList[ModuleType]) == 0 or ModuleType == "USER_DEFINED":
- continue
-
- Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True)
- DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name}
-
- if len(Dpx.PostfixNotation) <> 0:
- self.DepexGenerated = True
-
- if Dpx.Generate(path.join(self.OutputDir, DpxFile)):
- AutoGenList.append(str(DpxFile))
- else:
- IgoredAutoGenList.append(str(DpxFile))
-
- if IgoredAutoGenList == []:
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" %
- (" ".join(AutoGenList), self.Name, self.Arch))
- elif AutoGenList == []:
- EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" %
- (" ".join(IgoredAutoGenList), self.Name, self.Arch))
- else:
- EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" %
- (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch))
-
- self.IsCodeFileCreated = True
- return AutoGenList
-
- ## Summarize the ModuleAutoGen objects of all libraries used by this module
- def _GetLibraryAutoGenList(self):
- if self._LibraryAutoGenList == None:
- self._LibraryAutoGenList = []
- for Library in self.DependentLibraryList:
- La = ModuleAutoGen(
- self.Workspace,
- Library.MetaFile,
- self.BuildTarget,
- self.ToolChain,
- self.Arch,
- self.PlatformInfo.MetaFile
- )
- if La not in self._LibraryAutoGenList:
- self._LibraryAutoGenList.append(La)
- for Lib in La.CodaTargetList:
- self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE)
- return self._LibraryAutoGenList
-
- ## Decide whether we can skip the ModuleAutoGen process
- # If any source file is newer than the modeule than we cannot skip
- #
- def CanSkip(self):
- if not os.path.exists(self.GetTimeStampPath()):
- return False
- #last creation time of the module
- DstTimeStamp = os.stat(self.GetTimeStampPath())[8]
-
- SrcTimeStamp = self.Workspace._SrcTimeStamp
- if SrcTimeStamp > DstTimeStamp:
- return False
-
- with open(self.GetTimeStampPath(),'r') as f:
- for source in f:
- source = source.rstrip('\n')
- if not os.path.exists(source):
- return False
- if source not in ModuleAutoGen.TimeDict :
- ModuleAutoGen.TimeDict[source] = os.stat(source)[8]
- if ModuleAutoGen.TimeDict[source] > DstTimeStamp:
- return False
- return True
-
- def GetTimeStampPath(self):
- if self._TimeStampPath == None:
- self._TimeStampPath = os.path.join(self.MakeFileDir, 'AutoGenTimeStamp')
- return self._TimeStampPath
- def CreateTimeStamp(self, Makefile):
-
- FileSet = set()
-
- FileSet.add (self.MetaFile.Path)
-
- for SourceFile in self.Module.Sources:
- FileSet.add (SourceFile.Path)
-
- for Lib in self.DependentLibraryList:
- FileSet.add (Lib.MetaFile.Path)
-
- for f in self.AutoGenDepSet:
- FileSet.add (f.Path)
-
- if os.path.exists (self.GetTimeStampPath()):
- os.remove (self.GetTimeStampPath())
- with open(self.GetTimeStampPath(), 'w+') as file:
- for f in FileSet:
- print >> file, f
-
- Module = property(_GetModule)
- Name = property(_GetBaseName)
- Guid = property(_GetGuid)
- Version = property(_GetVersion)
- ModuleType = property(_GetModuleType)
- ComponentType = property(_GetComponentType)
- BuildType = property(_GetBuildType)
- PcdIsDriver = property(_GetPcdIsDriver)
- AutoGenVersion = property(_GetAutoGenVersion)
- Macros = property(_GetMacros)
- Specification = property(_GetSpecification)
-
- IsLibrary = property(_IsLibrary)
- IsBinaryModule = property(_IsBinaryModule)
- BuildDir = property(_GetBuildDir)
- OutputDir = property(_GetOutputDir)
- DebugDir = property(_GetDebugDir)
- MakeFileDir = property(_GetMakeFileDir)
- CustomMakefile = property(_GetCustomMakefile)
-
- IncludePathList = property(_GetIncludePathList)
- IncludePathLength = property(_GetIncludePathLength)
- AutoGenFileList = property(_GetAutoGenFileList)
- UnicodeFileList = property(_GetUnicodeFileList)
- VfrFileList = property(_GetVfrFileList)
- SourceFileList = property(_GetSourceFileList)
- BinaryFileList = property(_GetBinaryFiles) # FileType : [File List]
- Targets = property(_GetTargets)
- IntroTargetList = property(_GetIntroTargetList)
- CodaTargetList = property(_GetFinalTargetList)
- FileTypes = property(_GetFileTypes)
- BuildRules = property(_GetBuildRules)
- IdfFileList = property(_GetIdfFileList)
-
- DependentPackageList = property(_GetDependentPackageList)
- DependentLibraryList = property(_GetLibraryList)
- LibraryAutoGenList = property(_GetLibraryAutoGenList)
- DerivedPackageList = property(_GetDerivedPackageList)
-
- ModulePcdList = property(_GetModulePcdList)
- LibraryPcdList = property(_GetLibraryPcdList)
- GuidList = property(_GetGuidList)
- ProtocolList = property(_GetProtocolList)
- PpiList = property(_GetPpiList)
- DepexList = property(_GetDepexTokenList)
- DxsFile = property(_GetDxsFile)
- DepexExpressionList = property(_GetDepexExpressionTokenList)
- BuildOption = property(_GetModuleBuildOption)
- BuildOptionIncPathList = property(_GetBuildOptionIncPathList)
- BuildCommand = property(_GetBuildCommand)
-
- FixedAtBuildPcds = property(_GetFixedAtBuildPcds)
-
-# This acts like the main() function for the script, unless it is 'import'ed into another script.
-if __name__ == '__main__':
- pass
-
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py
deleted file mode 100644
index 63ed47d94b..0000000000
--- a/BaseTools/Source/Python/AutoGen/BuildEngine.py
+++ /dev/null
@@ -1,643 +0,0 @@
-## @file
-# The engine for building files
-#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import Common.LongFilePathOs as os
-import re
-import copy
-import string
-from Common.LongFilePathSupport import OpenLongFilePath as open
-
-from Common.GlobalData import *
-from Common.BuildToolError import *
-from Common.Misc import tdict, PathClass
-from Common.String import NormPath
-from Common.DataType import *
-
-import Common.EdkLogger as EdkLogger
-
-## Convert file type to file list macro name
-#
-# @param FileType The name of file type
-#
-# @retval string The name of macro
-#
-def FileListMacro(FileType):
- return "%sS" % FileType.replace("-", "_").upper()
-
-## Convert file type to list file macro name
-#
-# @param FileType The name of file type
-#
-# @retval string The name of macro
-#
-def ListFileMacro(FileType):
- return "%s_LIST" % FileListMacro(FileType)
-
-class TargetDescBlock(object):
- _Cache_ = {} # {TargetFile : TargetDescBlock object}
-
- # Factory method
- def __new__(Class, Inputs, Outputs, Commands, Dependencies):
- if Outputs[0] in Class._Cache_:
- Tdb = Class._Cache_[Outputs[0]]
- for File in Inputs:
- Tdb.AddInput(File)
- else:
- Tdb = super(TargetDescBlock, Class).__new__(Class)
- Tdb._Init(Inputs, Outputs, Commands, Dependencies)
- #Class._Cache_[Outputs[0]] = Tdb
- return Tdb
-
- def _Init(self, Inputs, Outputs, Commands, Dependencies):
- self.Inputs = Inputs
- self.Outputs = Outputs
- self.Commands = Commands
- self.Dependencies = Dependencies
- if self.Outputs:
- self.Target = self.Outputs[0]
- else:
- self.Target = None
-
- def __str__(self):
- return self.Target.Path
-
- def __hash__(self):
- return hash(self.Target.Path)
-
- def __eq__(self, Other):
- if type(Other) == type(self):
- return Other.Target.Path == self.Target.Path
- else:
- return str(Other) == self.Target.Path
-
- def AddInput(self, Input):
- if Input not in self.Inputs:
- self.Inputs.append(Input)
-
- def IsMultipleInput(self):
- return len(self.Inputs) > 1
-
- @staticmethod
- def Renew():
- TargetDescBlock._Cache_ = {}
-
-## Class for one build rule
-#
-# This represents a build rule which can give out corresponding command list for
-# building the given source file(s). The result can be used for generating the
-# target for makefile.
-#
-class FileBuildRule:
- INC_LIST_MACRO = "INC_LIST"
- INC_MACRO = "INC"
-
- ## constructor
- #
- # @param Input The dictionary represeting input file(s) for a rule
- # @param Output The list represeting output file(s) for a rule
- # @param Command The list containing commands to generate the output from input
- #
- def __init__(self, Type, Input, Output, Command, ExtraDependency=None):
- # The Input should not be empty
- if not Input:
- Input = []
- if not Output:
- Output = []
- if not Command:
- Command = []
-
- self.FileListMacro = FileListMacro(Type)
- self.ListFileMacro = ListFileMacro(Type)
- self.IncListFileMacro = self.INC_LIST_MACRO
-
- self.SourceFileType = Type
- # source files listed not in "*" or "?" pattern format
- if not ExtraDependency:
- self.ExtraSourceFileList = []
- else:
- self.ExtraSourceFileList = ExtraDependency
-
- #
- # Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST.
- # If found, generate a file to keep the input files used to get over the
- # limitation of command line length
- #
- self.MacroList = []
- self.CommandList = []
- for CmdLine in Command:
- self.MacroList.extend(gMacroRefPattern.findall(CmdLine))
- # replace path separator with native one
- self.CommandList.append(CmdLine)
-
- # Indicate what should be generated
- if self.FileListMacro in self.MacroList:
- self.GenFileListMacro = True
- else:
- self.GenFileListMacro = False
-
- if self.ListFileMacro in self.MacroList:
- self.GenListFile = True
- self.GenFileListMacro = True
- else:
- self.GenListFile = False
-
- if self.INC_LIST_MACRO in self.MacroList:
- self.GenIncListFile = True
- else:
- self.GenIncListFile = False
-
- # Check input files
- self.IsMultipleInput = False
- self.SourceFileExtList = []
- for File in Input:
- Base, Ext = os.path.splitext(File)
- if Base.find("*") >= 0:
- # There's "*" in the file name
- self.IsMultipleInput = True
- self.GenFileListMacro = True
- elif Base.find("?") < 0:
- # There's no "*" and "?" in file name
- self.ExtraSourceFileList.append(File)
- continue
- if Ext not in self.SourceFileExtList:
- self.SourceFileExtList.append(Ext)
-
- # Check output files
- self.DestFileList = []
- for File in Output:
- self.DestFileList.append(File)
-
- # All build targets generated by this rule for a module
- self.BuildTargets = {}
-
- ## str() function support
- #
- # @retval string
- #
- def __str__(self):
- SourceString = ""
- SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList)
- DestString = ", ".join(self.DestFileList)
- CommandString = "\n\t".join(self.CommandList)
- return "%s : %s\n\t%s" % (DestString, SourceString, CommandString)
-
- ## Check if given file extension is supported by this rule
- #
- # @param FileExt The extension of a file
- #
- # @retval True If the extension is supported
- # @retval False If the extension is not supported
- #
- def IsSupported(self, FileExt):
- return FileExt in self.SourceFileExtList
-
- def Instantiate(self, Macros={}):
- NewRuleObject = copy.copy(self)
- NewRuleObject.BuildTargets = {}
- NewRuleObject.DestFileList = []
- for File in self.DestFileList:
- NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros)))
- return NewRuleObject
-
- ## Apply the rule to given source file(s)
- #
- # @param SourceFile One file or a list of files to be built
- # @param RelativeToDir The relative path of the source file
- # @param PathSeparator Path separator
- #
- # @retval tuple (Source file in full path, List of individual sourcefiles, Destionation file, List of build commands)
- #
- def Apply(self, SourceFile, BuildRuleOrder=None):
- if not self.CommandList or not self.DestFileList:
- return None
-
- # source file
- if self.IsMultipleInput:
- SrcFileName = ""
- SrcFileBase = ""
- SrcFileExt = ""
- SrcFileDir = ""
- SrcPath = ""
- # SourceFile must be a list
- SrcFile = "$(%s)" % self.FileListMacro
- else:
- SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext
- if SourceFile.Root:
- SrcFileDir = SourceFile.SubDir
- if SrcFileDir == "":
- SrcFileDir = "."
- else:
- SrcFileDir = "."
- SrcFile = SourceFile.Path
- SrcPath = SourceFile.Dir
-
- # destination file (the first one)
- if self.DestFileList:
- DestFile = self.DestFileList[0].Path
- DestPath = self.DestFileList[0].Dir
- DestFileName = self.DestFileList[0].Name
- DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext
- else:
- DestFile = ""
- DestPath = ""
- DestFileName = ""
- DestFileBase = ""
- DestFileExt = ""
-
- BuildRulePlaceholderDict = {
- # source file
- "src" : SrcFile,
- "s_path" : SrcPath,
- "s_dir" : SrcFileDir,
- "s_name" : SrcFileName,
- "s_base" : SrcFileBase,
- "s_ext" : SrcFileExt,
- # destination file
- "dst" : DestFile,
- "d_path" : DestPath,
- "d_name" : DestFileName,
- "d_base" : DestFileBase,
- "d_ext" : DestFileExt,
- }
-
- DstFile = []
- for File in self.DestFileList:
- File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
- File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict)
- DstFile.append(PathClass(File, IsBinary=True))
-
- if DstFile[0] in self.BuildTargets:
- TargetDesc = self.BuildTargets[DstFile[0]]
- if BuildRuleOrder and SourceFile.Ext in BuildRuleOrder:
- Index = BuildRuleOrder.index(SourceFile.Ext)
- for Input in TargetDesc.Inputs:
- if Input.Ext not in BuildRuleOrder or BuildRuleOrder.index(Input.Ext) > Index:
- #
- # Command line should be regenerated since some macros are different
- #
- CommandList = self._BuildCommand(BuildRulePlaceholderDict)
- TargetDesc._Init([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
- break
- else:
- TargetDesc.AddInput(SourceFile)
- else:
- CommandList = self._BuildCommand(BuildRulePlaceholderDict)
- TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList)
- TargetDesc.ListFileMacro = self.ListFileMacro
- TargetDesc.FileListMacro = self.FileListMacro
- TargetDesc.IncListFileMacro = self.IncListFileMacro
- TargetDesc.GenFileListMacro = self.GenFileListMacro
- TargetDesc.GenListFile = self.GenListFile
- TargetDesc.GenIncListFile = self.GenIncListFile
- self.BuildTargets[DstFile[0]] = TargetDesc
- return TargetDesc
-
- def _BuildCommand(self, Macros):
- CommandList = []
- for CommandString in self.CommandList:
- CommandString = string.Template(CommandString).safe_substitute(Macros)
- CommandString = string.Template(CommandString).safe_substitute(Macros)
- CommandList.append(CommandString)
- return CommandList
-
-## Class for build rules
-#
-# BuildRule class parses rules defined in a file or passed by caller, and converts
-# the rule into FileBuildRule object.
-#
-class BuildRule:
- _SectionHeader = "SECTIONHEADER"
- _Section = "SECTION"
- _SubSectionHeader = "SUBSECTIONHEADER"
- _SubSection = "SUBSECTION"
- _InputFile = "INPUTFILE"
- _OutputFile = "OUTPUTFILE"
- _ExtraDependency = "EXTRADEPENDENCY"
- _Command = "COMMAND"
- _UnknownSection = "UNKNOWNSECTION"
-
- _SubSectionList = [_InputFile, _OutputFile, _Command]
-
- _PATH_SEP = "(+)"
- _FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$")
- _BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")],
- ["$(CP) ${src} ${dst}"], [])
-
- ## Constructor
- #
- # @param File The file containing build rules in a well defined format
- # @param Content The string list of build rules in a well defined format
- # @param LineIndex The line number from which the parsing will begin
- # @param SupportedFamily The list of supported tool chain families
- #
- def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]):
- self.RuleFile = File
- # Read build rules from file if it's not none
- if File != None:
- try:
- self.RuleContent = open(File, 'r').readlines()
- except:
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
- elif Content != None:
- self.RuleContent = Content
- else:
- EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
-
- self.SupportedToolChainFamilyList = SupportedFamily
- self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
- self.Ext2FileType = {} # {ext : file-type}
- self.FileTypeList = set()
-
- self._LineIndex = LineIndex
- self._State = ""
- self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}}
- self._FileType = ''
- self._BuildTypeList = []
- self._ArchList = []
- self._FamilyList = []
- self._TotalToolChainFamilySet = set()
- self._RuleObjectList = [] # FileBuildRule object list
- self._FileVersion = ""
-
- self.Parse()
-
- # some intrinsic rules
- self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, "COMMON", "COMMON", "COMMON"] = self._BinaryFileRule
- self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE)
-
- ## Parse the build rule strings
- def Parse(self):
- self._State = self._Section
- for Index in range(self._LineIndex, len(self.RuleContent)):
- # Clean up the line and replace path separator with native one
- Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep)
- self.RuleContent[Index] = Line
-
- # find the build_rule_version
- if Line and Line[0] == "#" and Line.find(TAB_BUILD_RULE_VERSION) <> -1:
- if Line.find("=") <> -1 and Line.find("=") < (len(Line) - 1) and (Line[(Line.find("=") + 1):]).split():
- self._FileVersion = (Line[(Line.find("=") + 1):]).split()[0]
- # skip empty or comment line
- if Line == "" or Line[0] == "#":
- continue
-
- # find out section header, enclosed by []
- if Line[0] == '[' and Line[-1] == ']':
- # merge last section information into rule database
- self.EndOfSection()
- self._State = self._SectionHeader
- # find out sub-section header, enclosed by <>
- elif Line[0] == '<' and Line[-1] == '>':
- if self._State != self._UnknownSection:
- self._State = self._SubSectionHeader
-
- # call section handler to parse each (sub)section
- self._StateHandler[self._State](self, Index)
- # merge last section information into rule database
- self.EndOfSection()
-
- ## Parse definitions under a section
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseSection(self, LineIndex):
- pass
-
- ## Parse definitions under a subsection
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseSubSection(self, LineIndex):
- # currenly nothing here
- pass
-
- ## Placeholder for not supported sections
- #
- # @param LineIndex The line index of build rule text
- #
- def SkipSection(self, LineIndex):
- pass
-
- ## Merge section information just got into rule database
- def EndOfSection(self):
- Database = self.RuleDatabase
- # if there's specific toochain family, 'COMMON' doesn't make sense any more
- if len(self._TotalToolChainFamilySet) > 1 and 'COMMON' in self._TotalToolChainFamilySet:
- self._TotalToolChainFamilySet.remove('COMMON')
- for Family in self._TotalToolChainFamilySet:
- Input = self._RuleInfo[Family, self._InputFile]
- Output = self._RuleInfo[Family, self._OutputFile]
- Command = self._RuleInfo[Family, self._Command]
- ExtraDependency = self._RuleInfo[Family, self._ExtraDependency]
-
- BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency)
- for BuildType in self._BuildTypeList:
- for Arch in self._ArchList:
- Database[self._FileType, BuildType, Arch, Family] = BuildRule
- for FileExt in BuildRule.SourceFileExtList:
- self.Ext2FileType[FileExt] = self._FileType
-
- ## Parse section header
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseSectionHeader(self, LineIndex):
- self._RuleInfo = tdict(True, 2)
- self._BuildTypeList = []
- self._ArchList = []
- self._FamilyList = []
- self._TotalToolChainFamilySet = set()
- FileType = ''
- RuleNameList = self.RuleContent[LineIndex][1:-1].split(',')
- for RuleName in RuleNameList:
- Arch = 'COMMON'
- BuildType = 'COMMON'
- TokenList = [Token.strip().upper() for Token in RuleName.split('.')]
- # old format: Build.File-Type
- if TokenList[0] == "BUILD":
- if len(TokenList) == 1:
- EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
-
- FileType = TokenList[1]
- if FileType == '':
- EdkLogger.error("build", FORMAT_INVALID, "No file type given",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
- if self._FileTypePattern.match(FileType) == None:
- EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
- ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type")
- # new format: File-Type.Build-Type.Arch
- else:
- if FileType == '':
- FileType = TokenList[0]
- elif FileType != TokenList[0]:
- EdkLogger.error("build", FORMAT_INVALID,
- "Different file types are not allowed in the same rule section",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
- if len(TokenList) > 1:
- BuildType = TokenList[1]
- if len(TokenList) > 2:
- Arch = TokenList[2]
- if BuildType not in self._BuildTypeList:
- self._BuildTypeList.append(BuildType)
- if Arch not in self._ArchList:
- self._ArchList.append(Arch)
-
- if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1:
- EdkLogger.error("build", FORMAT_INVALID,
- "Specific build types must not be mixed with common one",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
- if 'COMMON' in self._ArchList and len(self._ArchList) > 1:
- EdkLogger.error("build", FORMAT_INVALID,
- "Specific ARCH must not be mixed with common one",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
-
- self._FileType = FileType
- self._State = self._Section
- self.FileTypeList.add(FileType)
-
- ## Parse sub-section header
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseSubSectionHeader(self, LineIndex):
- SectionType = ""
- List = self.RuleContent[LineIndex][1:-1].split(',')
- FamilyList = []
- for Section in List:
- TokenList = Section.split('.')
- Type = TokenList[0].strip().upper()
-
- if SectionType == "":
- SectionType = Type
- elif SectionType != Type:
- EdkLogger.error("build", FORMAT_INVALID,
- "Two different section types are not allowed in the same sub-section",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
-
- if len(TokenList) > 1:
- Family = TokenList[1].strip().upper()
- else:
- Family = "COMMON"
-
- if Family not in FamilyList:
- FamilyList.append(Family)
-
- self._FamilyList = FamilyList
- self._TotalToolChainFamilySet.update(FamilyList)
- self._State = SectionType.upper()
- if 'COMMON' in FamilyList and len(FamilyList) > 1:
- EdkLogger.error("build", FORMAT_INVALID,
- "Specific tool chain family should not be mixed with general one",
- File=self.RuleFile, Line=LineIndex + 1,
- ExtraData=self.RuleContent[LineIndex])
- if self._State not in self._StateHandler:
- EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex + 1,
- ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex])
- ## Parse <InputFile> sub-section
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseInputFile(self, LineIndex):
- FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")]
- for ToolChainFamily in self._FamilyList:
- InputFiles = self._RuleInfo[ToolChainFamily, self._State]
- if InputFiles == None:
- InputFiles = []
- self._RuleInfo[ToolChainFamily, self._State] = InputFiles
- InputFiles.extend(FileList)
-
- ## Parse <ExtraDependency> sub-section
- #
- # @param LineIndex The line index of build rule text
- #
- def ParseCommon(self, LineIndex):
- for ToolChainFamily in self._FamilyList:
- Items = self._RuleInfo[ToolChainFamily, self._State]
- if Items == None:
- Items = []
- self._RuleInfo[ToolChainFamily, self._State] = Items
- Items.append(self.RuleContent[LineIndex])
-
- ## Get a build rule via [] operator
- #
- # @param FileExt The extension of a file
- # @param ToolChainFamily The tool chain family name
- # @param BuildVersion The build version number. "*" means any rule
- # is applicalbe.
- #
- # @retval FileType The file type string
- # @retval FileBuildRule The object of FileBuildRule
- #
- # Key = (FileExt, ModuleType, Arch, ToolChainFamily)
- def __getitem__(self, Key):
- if not Key:
- return None
-
- if Key[0] in self.Ext2FileType:
- Type = self.Ext2FileType[Key[0]]
- elif Key[0].upper() in self.FileTypeList:
- Type = Key[0].upper()
- else:
- return None
-
- if len(Key) > 1:
- Key = (Type,) + Key[1:]
- else:
- Key = (Type,)
- return self.RuleDatabase[Key]
-
- _StateHandler = {
- _SectionHeader : ParseSectionHeader,
- _Section : ParseSection,
- _SubSectionHeader : ParseSubSectionHeader,
- _SubSection : ParseSubSection,
- _InputFile : ParseInputFile,
- _OutputFile : ParseCommon,
- _ExtraDependency : ParseCommon,
- _Command : ParseCommon,
- _UnknownSection : SkipSection,
- }
-
-# This acts like the main() function for the script, unless it is 'import'ed into another
-# script.
-if __name__ == '__main__':
- import sys
- EdkLogger.Initialize()
- if len(sys.argv) > 1:
- Br = BuildRule(sys.argv[1])
- print str(Br[".c", "DXE_DRIVER", "IA32", "MSFT"][1])
- print
- print str(Br[".c", "DXE_DRIVER", "IA32", "INTEL"][1])
- print
- print str(Br[".c", "DXE_DRIVER", "IA32", "GCC"][1])
- print
- print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1])
- print
- print str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1])
- print
- print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1])
- print
- print str(Br[".s", "SEC", "IPF", "COMMON"][1])
- print
- print str(Br[".s", "SEC"][1])
-
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py
deleted file mode 100644
index 67aaef70a1..0000000000
--- a/BaseTools/Source/Python/AutoGen/GenC.py
+++ /dev/null
@@ -1,2023 +0,0 @@
-## @file
-# Routines for generating AutoGen.h and AutoGen.c
-#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-## Import Modules
-#
-import string
-import collections
-import struct
-from Common import EdkLogger
-
-from Common.BuildToolError import *
-from Common.DataType import *
-from Common.Misc import *
-from Common.String import StringToArray
-from StrGather import *
-from GenPcdDb import CreatePcdDatabaseCode
-from IdfClassObject import *
-
-## PCD type string
-gItemTypeStringDatabase = {
- TAB_PCDS_FEATURE_FLAG : 'FixedAtBuild',
- TAB_PCDS_FIXED_AT_BUILD : 'FixedAtBuild',
- TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch',
- TAB_PCDS_DYNAMIC : '',
- TAB_PCDS_DYNAMIC_DEFAULT : '',
- TAB_PCDS_DYNAMIC_VPD : '',
- TAB_PCDS_DYNAMIC_HII : '',
- TAB_PCDS_DYNAMIC_EX : '',
- TAB_PCDS_DYNAMIC_EX_DEFAULT : '',
- TAB_PCDS_DYNAMIC_EX_VPD : '',
- TAB_PCDS_DYNAMIC_EX_HII : '',
-}
-
-## Dynamic PCD types
-gDynamicPcd = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
-
-## Dynamic-ex PCD types
-gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
-
-## Datum size
-gDatumSizeStringDatabase = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOLEAN','VOID*':'8'}
-gDatumSizeStringDatabaseH = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOL','VOID*':'PTR'}
-gDatumSizeStringDatabaseLib = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'Bool','VOID*':'Ptr'}
-
-## AutoGen File Header Templates
-gAutoGenHeaderString = TemplateString("""\
-/**
- DO NOT EDIT
- FILE auto-generated
- Module name:
- ${FileName}
- Abstract: Auto-generated ${FileName} for building module or library.
-**/
-""")
-
-gAutoGenHPrologueString = TemplateString("""
-#ifndef _${File}_${Guid}
-#define _${File}_${Guid}
-
-""")
-
-gAutoGenHCppPrologueString = """\
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-"""
-
-gAutoGenHEpilogueString = """
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif
-"""
-
-## PEI Core Entry Point Templates
-gPeiCoreEntryPointPrototype = TemplateString("""
-${BEGIN}
-VOID
-EFIAPI
-${Function} (
- IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
- IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
- IN VOID *Context
- );
-${END}
-""")
-
-gPeiCoreEntryPointString = TemplateString("""
-${BEGIN}
-VOID
-EFIAPI
-ProcessModuleEntryPointList (
- IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData,
- IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList,
- IN VOID *Context
- )
-
-{
- ${Function} (SecCoreData, PpiList, Context);
-}
-${END}
-""")
-
-
-## DXE Core Entry Point Templates
-gDxeCoreEntryPointPrototype = TemplateString("""
-${BEGIN}
-VOID
-EFIAPI
-${Function} (
- IN VOID *HobStart
- );
-${END}
-""")
-
-gDxeCoreEntryPointString = TemplateString("""
-${BEGIN}
-VOID
-EFIAPI
-ProcessModuleEntryPointList (
- IN VOID *HobStart
- )
-
-{
- ${Function} (HobStart);
-}
-${END}
-""")
-
-## PEIM Entry Point Templates
-gPeimEntryPointPrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- );
-${END}
-""")
-
-gPeimEntryPointString = [
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- )
-
-{
- return EFI_SUCCESS;
-}
-"""),
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
-${BEGIN}
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- )
-
-{
- return ${Function} (FileHandle, PeiServices);
-}
-${END}
-"""),
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- )
-
-{
- EFI_STATUS Status;
- EFI_STATUS CombinedStatus;
-
- CombinedStatus = EFI_LOAD_ERROR;
-${BEGIN}
- Status = ${Function} (FileHandle, PeiServices);
- if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) {
- CombinedStatus = Status;
- }
-${END}
- return CombinedStatus;
-}
-""")
-]
-
-## SMM_CORE Entry Point Templates
-gSmmCoreEntryPointPrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- );
-${END}
-""")
-
-gSmmCoreEntryPointString = TemplateString("""
-${BEGIN}
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
- return ${Function} (ImageHandle, SystemTable);
-}
-${END}
-""")
-
-## DXE SMM Entry Point Templates
-gDxeSmmEntryPointPrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- );
-${END}
-""")
-
-gDxeSmmEntryPointString = [
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-
-{
- return EFI_SUCCESS;
-}
-"""),
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
-static EFI_STATUS mDriverEntryPointStatus;
-
-VOID
-EFIAPI
-ExitDriver (
- IN EFI_STATUS Status
- )
-{
- if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
- mDriverEntryPointStatus = Status;
- }
- LongJump (&mJumpContext, (UINTN)-1);
- ASSERT (FALSE);
-}
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
- mDriverEntryPointStatus = EFI_LOAD_ERROR;
-
-${BEGIN}
- if (SetJump (&mJumpContext) == 0) {
- ExitDriver (${Function} (ImageHandle, SystemTable));
- ASSERT (FALSE);
- }
-${END}
-
- return mDriverEntryPointStatus;
-}
-""")
-]
-
-## UEFI Driver Entry Point Templates
-gUefiDriverEntryPointPrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- );
-${END}
-""")
-
-gUefiDriverEntryPointString = [
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
- return EFI_SUCCESS;
-}
-"""),
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-${BEGIN}
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-
-{
- return ${Function} (ImageHandle, SystemTable);
-}
-${END}
-VOID
-EFIAPI
-ExitDriver (
- IN EFI_STATUS Status
- )
-{
- if (EFI_ERROR (Status)) {
- ProcessLibraryDestructorList (gImageHandle, gST);
- }
- gBS->Exit (gImageHandle, Status, 0, NULL);
-}
-"""),
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-const UINT32 _gDxeRevision = ${PiSpecVersion};
-
-static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
-static EFI_STATUS mDriverEntryPointStatus;
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
- mDriverEntryPointStatus = EFI_LOAD_ERROR;
- ${BEGIN}
- if (SetJump (&mJumpContext) == 0) {
- ExitDriver (${Function} (ImageHandle, SystemTable));
- ASSERT (FALSE);
- }
- ${END}
- return mDriverEntryPointStatus;
-}
-
-VOID
-EFIAPI
-ExitDriver (
- IN EFI_STATUS Status
- )
-{
- if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
- mDriverEntryPointStatus = Status;
- }
- LongJump (&mJumpContext, (UINTN)-1);
- ASSERT (FALSE);
-}
-""")
-]
-
-
-## UEFI Application Entry Point Templates
-gUefiApplicationEntryPointPrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- );
-${END}
-""")
-
-gUefiApplicationEntryPointString = [
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
- return EFI_SUCCESS;
-}
-"""),
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-
-${BEGIN}
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-
-{
- return ${Function} (ImageHandle, SystemTable);
-}
-${END}
-VOID
-EFIAPI
-ExitDriver (
- IN EFI_STATUS Status
- )
-{
- if (EFI_ERROR (Status)) {
- ProcessLibraryDestructorList (gImageHandle, gST);
- }
- gBS->Exit (gImageHandle, Status, 0, NULL);
-}
-"""),
-TemplateString("""
-const UINT32 _gUefiDriverRevision = ${UefiSpecVersion};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleEntryPointList (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-
-{
- ${BEGIN}
- if (SetJump (&mJumpContext) == 0) {
- ExitDriver (${Function} (ImageHandle, SystemTable));
- ASSERT (FALSE);
- }
- ${END}
- return mDriverEntryPointStatus;
-}
-
-static BASE_LIBRARY_JUMP_BUFFER mJumpContext;
-static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR;
-
-VOID
-EFIAPI
-ExitDriver (
- IN EFI_STATUS Status
- )
-{
- if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) {
- mDriverEntryPointStatus = Status;
- }
- LongJump (&mJumpContext, (UINTN)-1);
- ASSERT (FALSE);
-}
-""")
-]
-
-## UEFI Unload Image Templates
-gUefiUnloadImagePrototype = TemplateString("""
-${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle
- );
-${END}
-""")
-
-gUefiUnloadImageString = [
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleUnloadList (
- IN EFI_HANDLE ImageHandle
- )
-{
- return EFI_SUCCESS;
-}
-"""),
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
-
-${BEGIN}
-EFI_STATUS
-EFIAPI
-ProcessModuleUnloadList (
- IN EFI_HANDLE ImageHandle
- )
-{
- return ${Function} (ImageHandle);
-}
-${END}
-"""),
-TemplateString("""
-GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count};
-
-EFI_STATUS
-EFIAPI
-ProcessModuleUnloadList (
- IN EFI_HANDLE ImageHandle
- )
-{
- EFI_STATUS Status;
-
- Status = EFI_SUCCESS;
-${BEGIN}
- if (EFI_ERROR (Status)) {
- ${Function} (ImageHandle);
- } else {
- Status = ${Function} (ImageHandle);
- }
-${END}
- return Status;
-}
-""")
-]
-
-gLibraryStructorPrototype = {
-'BASE' : TemplateString("""${BEGIN}
-RETURN_STATUS
-EFIAPI
-${Function} (
- VOID
- );${END}
-"""),
-
-'PEI' : TemplateString("""${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- );${END}
-"""),
-
-'DXE' : TemplateString("""${BEGIN}
-EFI_STATUS
-EFIAPI
-${Function} (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- );${END}
-"""),
-}
-
-gLibraryStructorCall = {
-'BASE' : TemplateString("""${BEGIN}
- Status = ${Function} ();
- ASSERT_EFI_ERROR (Status);${END}
-"""),
-
-'PEI' : TemplateString("""${BEGIN}
- Status = ${Function} (FileHandle, PeiServices);
- ASSERT_EFI_ERROR (Status);${END}
-"""),
-
-'DXE' : TemplateString("""${BEGIN}
- Status = ${Function} (ImageHandle, SystemTable);
- ASSERT_EFI_ERROR (Status);${END}
-"""),
-}
-
-## Library Constructor and Destructor Templates
-gLibraryString = {
-'BASE' : TemplateString("""
-${BEGIN}${FunctionPrototype}${END}
-
-VOID
-EFIAPI
-ProcessLibrary${Type}List (
- VOID
- )
-{
-${BEGIN} EFI_STATUS Status;
-${FunctionCall}${END}
-}
-"""),
-
-'PEI' : TemplateString("""
-${BEGIN}${FunctionPrototype}${END}
-
-VOID
-EFIAPI
-ProcessLibrary${Type}List (
- IN EFI_PEI_FILE_HANDLE FileHandle,
- IN CONST EFI_PEI_SERVICES **PeiServices
- )
-{
-${BEGIN} EFI_STATUS Status;
-${FunctionCall}${END}
-}
-"""),
-
-'DXE' : TemplateString("""
-${BEGIN}${FunctionPrototype}${END}
-
-VOID
-EFIAPI
-ProcessLibrary${Type}List (
- IN EFI_HANDLE ImageHandle,
- IN EFI_SYSTEM_TABLE *SystemTable
- )
-{
-${BEGIN} EFI_STATUS Status;
-${FunctionCall}${END}
-}
-"""),
-}
-
-gBasicHeaderFile = "Base.h"
-
-gModuleTypeHeaderFile = {
- "BASE" : [gBasicHeaderFile],
- "SEC" : ["PiPei.h", "Library/DebugLib.h"],
- "PEI_CORE" : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"],
- "PEIM" : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"],
- "DXE_CORE" : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"],
- "DXE_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
- "DXE_SMM_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
- "DXE_RUNTIME_DRIVER": ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
- "DXE_SAL_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
- "UEFI_DRIVER" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"],
- "UEFI_APPLICATION" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"],
- "SMM_CORE" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiDriverEntryPoint.h"],
- "USER_DEFINED" : [gBasicHeaderFile]
-}
-
-## Autogen internal worker macro to define DynamicEx PCD name includes both the TokenSpaceGuidName
-# the TokenName and Guid comparison to avoid define name collisions.
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenH The TemplateString object for header file
-#
-#
-def DynExPcdTokenNumberMapping(Info, AutoGenH):
- ExTokenCNameList = []
- PcdExList = []
- # Even it is the Library, the PCD is saved in the ModulePcdList
- PcdList = Info.ModulePcdList
- for Pcd in PcdList:
- if Pcd.Type in gDynamicExPcd:
- ExTokenCNameList.append(Pcd.TokenCName)
- PcdExList.append(Pcd)
- if len(ExTokenCNameList) == 0:
- return
- AutoGenH.Append('\n#define COMPAREGUID(Guid1, Guid2) (BOOLEAN)(*(CONST UINT64*)Guid1 == *(CONST UINT64*)Guid2 && *((CONST UINT64*)Guid1 + 1) == *((CONST UINT64*)Guid2 + 1))\n')
- # AutoGen for each PCD listed in a [PcdEx] section of a Module/Lib INF file.
- # Auto generate a macro for each TokenName that takes a Guid pointer as a parameter.
- # Use the Guid pointer to see if it matches any of the token space GUIDs.
- TokenCNameList = []
- for TokenCName in ExTokenCNameList:
- if TokenCName in TokenCNameList:
- continue
- Index = 0
- Count = ExTokenCNameList.count(TokenCName)
- for Pcd in PcdExList:
- RealTokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- RealTokenCName = PcdItem[0]
- break
- if Pcd.TokenCName == TokenCName:
- Index = Index + 1
- if Index == 1:
- AutoGenH.Append('\n#define __PCD_%s_ADDR_CMP(GuidPtr) (' % (RealTokenCName))
- AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
- % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
- else:
- AutoGenH.Append('\\\n (GuidPtr == &%s) ? _PCD_TOKEN_%s_%s:'
- % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
- if Index == Count:
- AutoGenH.Append('0 \\\n )\n')
- TokenCNameList.append(TokenCName)
-
- TokenCNameList = []
- for TokenCName in ExTokenCNameList:
- if TokenCName in TokenCNameList:
- continue
- Index = 0
- Count = ExTokenCNameList.count(TokenCName)
- for Pcd in PcdExList:
- RealTokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- RealTokenCName = PcdItem[0]
- break
- if Pcd.Type in gDynamicExPcd and Pcd.TokenCName == TokenCName:
- Index = Index + 1
- if Index == 1:
- AutoGenH.Append('\n#define __PCD_%s_VAL_CMP(GuidPtr) (' % (RealTokenCName))
- AutoGenH.Append('\\\n (GuidPtr == NULL) ? 0:')
- AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
- % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
- else:
- AutoGenH.Append('\\\n COMPAREGUID (GuidPtr, &%s) ? _PCD_TOKEN_%s_%s:'
- % (Pcd.TokenSpaceGuidCName, Pcd.TokenSpaceGuidCName, RealTokenCName))
- if Index == Count:
- AutoGenH.Append('0 \\\n )\n')
- # Autogen internal worker macro to compare GUIDs. Guid1 is a pointer to a GUID.
- # Guid2 is a C name for a GUID. Compare pointers first because optimizing compiler
- # can do this at build time on CONST GUID pointers and optimize away call to COMPAREGUID().
- # COMPAREGUID() will only be used if the Guid passed in is local to the module.
- AutoGenH.Append('#define _PCD_TOKEN_EX_%s(GuidPtr) __PCD_%s_ADDR_CMP(GuidPtr) ? __PCD_%s_ADDR_CMP(GuidPtr) : __PCD_%s_VAL_CMP(GuidPtr) \n'
- % (RealTokenCName, RealTokenCName, RealTokenCName, RealTokenCName))
- TokenCNameList.append(TokenCName)
-
-def GetPcdSize(Pcd):
- if Pcd.DatumType == 'VOID*':
- Value = Pcd.DefaultValue
- if Value in [None, '']:
- return 1
- elif Value[0] == 'L':
- return (len(Value) - 2) * 2
- elif Value[0] == '{':
- return len(Value.split(','))
- else:
- return len(Value) - 1
- if Pcd.DatumType == 'UINT64':
- return 8
- if Pcd.DatumType == 'UINT32':
- return 4
- if Pcd.DatumType == 'UINT16':
- return 2
- if Pcd.DatumType == 'UINT8':
- return 1
- if Pcd.DatumType == 'BOOLEAN':
- return 1
-
-
-## Create code for module PCDs
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-# @param Pcd The PCD object
-#
-def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
- TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName]
- PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
- #
- # Write PCDs
- #
- TokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- PcdTokenName = '_PCD_TOKEN_' + TokenCName
- PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
- PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
- FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
-
- if GlobalData.BuildOptionPcd:
- for PcdItem in GlobalData.BuildOptionPcd:
- if (Pcd.TokenSpaceGuidCName, TokenCName) == (PcdItem[0], PcdItem[1]):
- Pcd.DefaultValue = PcdItem[2]
- break
-
- if Pcd.Type in gDynamicExPcd:
- TokenNumber = int(Pcd.TokenValue, 0)
- # Add TokenSpaceGuidValue value to PcdTokenName to discriminate the DynamicEx PCDs with
- # different Guids but same TokenCName
- PcdExTokenName = '_PCD_TOKEN_' + Pcd.TokenSpaceGuidCName + '_' + TokenCName
- AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
- else:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
- # If one of the Source built modules listed in the DSC is not listed in FDF modules,
- # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
- # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
- # report warning message notify the PI that they are attempting to build a module
- # that must be included in a flash image in order to be functional. These Dynamic PCD
- # will not be added into the Database unless it is used by other modules that are
- # included in the FDF file.
- # In this case, just assign an invalid token number to make it pass build.
- if Pcd.Type in PCD_DYNAMIC_TYPE_LIST:
- TokenNumber = 0
- else:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- else:
- TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
- AutoGenH.Append('\n#define %s %dU\n' % (PcdTokenName, TokenNumber))
-
- EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + TokenCName + "." + Pcd.TokenSpaceGuidCName)
- if Pcd.Type not in gItemTypeStringDatabase:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if Pcd.DatumType not in gDatumSizeStringDatabase:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
-
- DatumSize = gDatumSizeStringDatabase[Pcd.DatumType]
- DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType]
- GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName
- SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + TokenCName
- SetModeStatusName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_S_' + TokenCName
- GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
-
- PcdExCNameList = []
- if Pcd.Type in gDynamicExPcd:
- if Info.IsLibrary:
- PcdList = Info.LibraryPcdList
- else:
- PcdList = Info.ModulePcdList
- for PcdModule in PcdList:
- if PcdModule.Type in gDynamicExPcd:
- PcdExCNameList.append(PcdModule.TokenCName)
- # Be compatible with the current code which using PcdToken and PcdGet/Set for DynamicEx Pcd.
- # If only PcdToken and PcdGet/Set used in all Pcds with different CName, it should succeed to build.
- # If PcdToken and PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
- if PcdExCNameList.count(Pcd.TokenCName) > 1:
- AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
- AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
- AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName))
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
- AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName))
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- elif Pcd.Type in gDynamicPcd:
- PcdList = []
- PcdCNameList = []
- PcdList.extend(Info.LibraryPcdList)
- PcdList.extend(Info.ModulePcdList)
- for PcdModule in PcdList:
- if PcdModule.Type in gDynamicPcd:
- PcdCNameList.append(PcdModule.TokenCName)
- if PcdCNameList.count(Pcd.TokenCName) > 1:
- EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName. They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
- else:
- AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
- else:
- AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
- else:
- PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
- Const = 'const'
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- Const = ''
- Type = ''
- Array = ''
- Value = Pcd.DefaultValue
- Unicode = False
- ValueNumber = 0
-
- if Pcd.DatumType == 'BOOLEAN':
- BoolValue = Value.upper()
- if BoolValue == 'TRUE' or BoolValue == '1':
- Value = '1U'
- elif BoolValue == 'FALSE' or BoolValue == '0':
- Value = '0U'
-
- if Pcd.DatumType in ['UINT64', 'UINT32', 'UINT16', 'UINT8']:
- try:
- if Value.upper().startswith('0X'):
- ValueNumber = int (Value, 16)
- else:
- ValueNumber = int (Value)
- except:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "PCD value is not valid dec or hex number for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if Pcd.DatumType == 'UINT64':
- if ValueNumber < 0:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- elif ValueNumber >= 0x10000000000000000:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if not Value.endswith('ULL'):
- Value += 'ULL'
- elif Pcd.DatumType == 'UINT32':
- if ValueNumber < 0:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- elif ValueNumber >= 0x100000000:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if not Value.endswith('U'):
- Value += 'U'
- elif Pcd.DatumType == 'UINT16':
- if ValueNumber < 0:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- elif ValueNumber >= 0x10000:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if not Value.endswith('U'):
- Value += 'U'
- elif Pcd.DatumType == 'UINT8':
- if ValueNumber < 0:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "PCD can't be set to negative value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- elif ValueNumber >= 0x100:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Too large PCD value for datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if not Value.endswith('U'):
- Value += 'U'
- if Pcd.DatumType == 'VOID*':
- if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '':
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
-
- ArraySize = int(Pcd.MaxDatumSize, 0)
- if Value[0] == '{':
- Type = '(VOID *)'
- else:
- if Value[0] == 'L':
- Unicode = True
- Value = Value.lstrip('L') #.strip('"')
- Value = eval(Value) # translate escape character
- NewValue = '{'
- for Index in range(0,len(Value)):
- if Unicode:
- NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
- else:
- NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', '
- if Unicode:
- ArraySize = ArraySize / 2;
-
- if ArraySize < (len(Value) + 1):
- EdkLogger.error("build", AUTOGEN_ERROR,
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- Value = NewValue + '0 }'
- Array = '[%d]' % ArraySize
- #
- # skip casting for fixed at build since it breaks ARM assembly.
- # Long term we need PCD macros that work in assembly
- #
- elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD:
- Value = "((%s)%s)" % (Pcd.DatumType, Value)
-
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- PcdValueName = '_PCD_PATCHABLE_VALUE_' + TokenCName
- else:
- PcdValueName = '_PCD_VALUE_' + TokenCName
-
- if Pcd.DatumType == 'VOID*':
- #
- # For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
- #
- if Unicode:
- AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
- AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array))
- AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
- else:
- AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
- AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array))
- AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
-
- PcdDataSize = GetPcdSize(Pcd)
- if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
- AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
- AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,FixPcdSizeTokenName))
-
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
- AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,PatchPcdSizeVariableName))
- AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName,PcdDataSize))
- elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
- AutoGenC.Append('volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
- AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
- AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
-
- PcdDataSize = GetPcdSize(Pcd)
- AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
-
- AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,PatchPcdSizeVariableName))
- AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED UINTN %s = %s;\n' % (PatchPcdSizeVariableName,PcdDataSize))
- else:
- PcdDataSize = GetPcdSize(Pcd)
- AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
- AutoGenH.Append('#define %s %s \n' % (GetModeSizeName,FixPcdSizeTokenName))
-
- AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value))
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName))
- AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array))
- AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName))
-
- if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE:
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, Pcd.TokenCName, Pcd.TokenCName, Pcd.TokenCName))
- else:
- AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
- AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS) \n' % (SetModeStatusName, PcdVariableName))
- else:
- AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
-
-## Create code for library module PCDs
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-# @param Pcd The PCD object
-#
-def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd):
- PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber
- TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
- TokenCName = Pcd.TokenCName
- for PcdItem in GlobalData.MixedPcd:
- if (TokenCName, TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- PcdTokenName = '_PCD_TOKEN_' + TokenCName
- FixPcdSizeTokenName = '_PCD_SIZE_' + TokenCName
- PatchPcdSizeTokenName = '_PCD_PATCHABLE_' + TokenCName +'_SIZE'
- PatchPcdSizeVariableName = '_gPcd_BinaryPatch_Size_' + TokenCName
-
- if GlobalData.BuildOptionPcd:
- for PcdItem in GlobalData.BuildOptionPcd:
- if (Pcd.TokenSpaceGuidCName, TokenCName) == (PcdItem[0], PcdItem[1]):
- Pcd.DefaultValue = PcdItem[2]
- break
-
- #
- # Write PCDs
- #
- if Pcd.Type in gDynamicExPcd:
- TokenNumber = int(Pcd.TokenValue, 0)
- else:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber:
- # If one of the Source built modules listed in the DSC is not listed in FDF modules,
- # and the INF lists a PCD can only use the PcdsDynamic access method (it is only
- # listed in the DEC file that declares the PCD as PcdsDynamic), then build tool will
- # report warning message notify the PI that they are attempting to build a module
- # that must be included in a flash image in order to be functional. These Dynamic PCD
- # will not be added into the Database unless it is used by other modules that are
- # included in the FDF file.
- # In this case, just assign an invalid token number to make it pass build.
- if Pcd.Type in PCD_DYNAMIC_TYPE_LIST:
- TokenNumber = 0
- else:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- else:
- TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]
-
- if Pcd.Type not in gItemTypeStringDatabase:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
- if Pcd.DatumType not in gDatumSizeStringDatabase:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, TokenCName),
- ExtraData="[%s]" % str(Info))
-
- DatumType = Pcd.DatumType
- DatumSize = gDatumSizeStringDatabaseH[DatumType]
- DatumSizeLib= gDatumSizeStringDatabaseLib[DatumType]
- GetModeName = '_PCD_GET_MODE_' + DatumSize + '_' + TokenCName
- SetModeName = '_PCD_SET_MODE_' + DatumSize + '_' + TokenCName
- SetModeStatusName = '_PCD_SET_MODE_' + DatumSize + '_S_' + TokenCName
- GetModeSizeName = '_PCD_GET_MODE_SIZE' + '_' + TokenCName
-
- Type = ''
- Array = ''
- if Pcd.DatumType == 'VOID*':
- if Pcd.DefaultValue[0]== '{':
- Type = '(VOID *)'
- Array = '[]'
- PcdItemType = Pcd.Type
- PcdExCNameList = []
- if PcdItemType in gDynamicExPcd:
- PcdExTokenName = '_PCD_TOKEN_' + TokenSpaceGuidCName + '_' + TokenCName
- AutoGenH.Append('\n#define %s %dU\n' % (PcdExTokenName, TokenNumber))
-
- if Info.IsLibrary:
- PcdList = Info.LibraryPcdList
- else:
- PcdList = Info.ModulePcdList
- for PcdModule in PcdList:
- if PcdModule.Type in gDynamicExPcd:
- PcdExCNameList.append(PcdModule.TokenCName)
- # Be compatible with the current code which using PcdGet/Set for DynamicEx Pcd.
- # If only PcdGet/Set used in all Pcds with different CName, it should succeed to build.
- # If PcdGet/Set used in the Pcds with different Guids but same CName, it should failed to build.
- if PcdExCNameList.count(Pcd.TokenCName) > 1:
- AutoGenH.Append('// Disabled the macros, as PcdToken and PcdGet/Set are not allowed in the case that more than one DynamicEx Pcds are different Guids but same CName.\n')
- AutoGenH.Append('// #define %s %s\n' % (PcdTokenName, PcdExTokenName))
- AutoGenH.Append('// #define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName))
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('// #define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('// #define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('#define %s %s\n' % (PcdTokenName, PcdExTokenName))
- AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s LibPcdGetExSize(&%s, %s)\n' % (GetModeSizeName,Pcd.TokenSpaceGuidCName, PcdTokenName))
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%sS(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- AutoGenH.Append('#define %s(Value) LibPcdSetEx%sS(&%s, %s, (Value))\n' % (SetModeStatusName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName))
- else:
- AutoGenH.Append('#define _PCD_TOKEN_%s %dU\n' % (TokenCName, TokenNumber))
- if PcdItemType in gDynamicPcd:
- PcdList = []
- PcdCNameList = []
- PcdList.extend(Info.LibraryPcdList)
- PcdList.extend(Info.ModulePcdList)
- for PcdModule in PcdList:
- if PcdModule.Type in gDynamicPcd:
- PcdCNameList.append(PcdModule.TokenCName)
- if PcdCNameList.count(Pcd.TokenCName) > 1:
- EdkLogger.error("build", AUTOGEN_ERROR, "More than one Dynamic Pcds [%s] are different Guids but same CName.They need to be changed to DynamicEx type to avoid the confliction.\n" % (TokenCName), ExtraData="[%s]" % str(Info.MetaFile.Path))
- else:
- AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s LibPcdGetSize(%s)\n' % (GetModeSizeName, PcdTokenName))
- if DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%sS(%s, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
- else:
- AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName))
- AutoGenH.Append('#define %s(Value) LibPcdSet%sS(%s, (Value))\n' % (SetModeStatusName, DatumSizeLib, PcdTokenName))
- if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE:
- PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName
- if DatumType == 'VOID*':
- ArraySize = int(Pcd.MaxDatumSize, 0)
- if Pcd.DefaultValue[0] == 'L':
- ArraySize = ArraySize / 2
- Array = '[%d]' % ArraySize
- DatumType = ['UINT8', 'UINT16'][Pcd.DefaultValue[0] == 'L']
- AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array))
- else:
- AutoGenH.Append('extern volatile %s %s%s;\n' % (DatumType, PcdVariableName, Array))
- AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName))
- PcdDataSize = GetPcdSize(Pcd)
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSize((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, TokenCName, TokenCName, TokenCName))
- AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtrAndSizeS((VOID *)_gPcd_BinaryPatch_%s, &_gPcd_BinaryPatch_Size_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeStatusName, TokenCName, TokenCName, TokenCName))
- AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, Pcd.MaxDatumSize))
- else:
- AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName))
- AutoGenH.Append('#define %s(Value) ((%s = (Value)), RETURN_SUCCESS)\n' % (SetModeStatusName, PcdVariableName))
- AutoGenH.Append('#define %s %s\n' % (PatchPcdSizeTokenName, PcdDataSize))
-
- AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,PatchPcdSizeVariableName))
- AutoGenH.Append('extern UINTN %s; \n' % PatchPcdSizeVariableName)
-
- if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG:
- key = ".".join((Pcd.TokenSpaceGuidCName,Pcd.TokenCName))
- PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + TokenCName
- if DatumType == 'VOID*' and Array == '[]':
- DatumType = ['UINT8', 'UINT16'][Pcd.DefaultValue[0] == 'L']
- AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array))
- AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName))
- AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName)
-
- if PcdItemType == TAB_PCDS_FIXED_AT_BUILD and (key in Info.ConstPcd or (Info.IsLibrary and not Info._ReferenceModules)):
- if Pcd.DatumType == 'VOID*':
- AutoGenH.Append('#define _PCD_VALUE_%s %s%s\n' %(TokenCName, Type, PcdVariableName))
- else:
- AutoGenH.Append('#define _PCD_VALUE_%s %s\n' %(TokenCName, Pcd.DefaultValue))
-
- if PcdItemType == TAB_PCDS_FIXED_AT_BUILD:
- PcdDataSize = GetPcdSize(Pcd)
- AutoGenH.Append('#define %s %s\n' % (FixPcdSizeTokenName, PcdDataSize))
- AutoGenH.Append('#define %s %s\n' % (GetModeSizeName,FixPcdSizeTokenName))
-
-## Create code for library constructor
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH):
- #
- # Library Constructors
- #
- ConstructorPrototypeString = TemplateString()
- ConstructorCallingString = TemplateString()
- if Info.IsLibrary:
- DependentLibraryList = [Info.Module]
- else:
- DependentLibraryList = Info.DependentLibraryList
- for Lib in DependentLibraryList:
- if len(Lib.ConstructorList) <= 0:
- continue
- Dict = {'Function':Lib.ConstructorList}
- if Lib.ModuleType in ['BASE', 'SEC']:
- ConstructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict))
- ConstructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict))
- elif Lib.ModuleType in ['PEI_CORE','PEIM']:
- ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
- ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
- elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER',
- 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']:
- ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
- ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
-
- if str(ConstructorPrototypeString) == '':
- ConstructorPrototypeList = []
- else:
- ConstructorPrototypeList = [str(ConstructorPrototypeString)]
- if str(ConstructorCallingString) == '':
- ConstructorCallingList = []
- else:
- ConstructorCallingList = [str(ConstructorCallingString)]
-
- Dict = {
- 'Type' : 'Constructor',
- 'FunctionPrototype' : ConstructorPrototypeList,
- 'FunctionCall' : ConstructorCallingList
- }
- if Info.IsLibrary:
- AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
- else:
- if Info.ModuleType in ['BASE', 'SEC']:
- AutoGenC.Append(gLibraryString['BASE'].Replace(Dict))
- elif Info.ModuleType in ['PEI_CORE','PEIM']:
- AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
- elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER',
- 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']:
- AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
-
-## Create code for library destructor
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH):
- #
- # Library Destructors
- #
- DestructorPrototypeString = TemplateString()
- DestructorCallingString = TemplateString()
- if Info.IsLibrary:
- DependentLibraryList = [Info.Module]
- else:
- DependentLibraryList = Info.DependentLibraryList
- for Index in range(len(DependentLibraryList)-1, -1, -1):
- Lib = DependentLibraryList[Index]
- if len(Lib.DestructorList) <= 0:
- continue
- Dict = {'Function':Lib.DestructorList}
- if Lib.ModuleType in ['BASE', 'SEC']:
- DestructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict))
- DestructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict))
- elif Lib.ModuleType in ['PEI_CORE','PEIM']:
- DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict))
- DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict))
- elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER',
- 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_CORE']:
- DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict))
- DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict))
-
- if str(DestructorPrototypeString) == '':
- DestructorPrototypeList = []
- else:
- DestructorPrototypeList = [str(DestructorPrototypeString)]
- if str(DestructorCallingString) == '':
- DestructorCallingList = []
- else:
- DestructorCallingList = [str(DestructorCallingString)]
-
- Dict = {
- 'Type' : 'Destructor',
- 'FunctionPrototype' : DestructorPrototypeList,
- 'FunctionCall' : DestructorCallingList
- }
- if Info.IsLibrary:
- AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict)
- else:
- if Info.ModuleType in ['BASE', 'SEC']:
- AutoGenC.Append(gLibraryString['BASE'].Replace(Dict))
- elif Info.ModuleType in ['PEI_CORE','PEIM']:
- AutoGenC.Append(gLibraryString['PEI'].Replace(Dict))
- elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER',
- 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION','SMM_CORE']:
- AutoGenC.Append(gLibraryString['DXE'].Replace(Dict))
-
-
-## Create code for ModuleEntryPoint
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH):
- if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']:
- return
- #
- # Module Entry Points
- #
- NumEntryPoints = len(Info.Module.ModuleEntryPointList)
- if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification:
- PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION']
- else:
- PiSpecVersion = '0x00000000'
- if 'UEFI_SPECIFICATION_VERSION' in Info.Module.Specification:
- UefiSpecVersion = Info.Module.Specification['UEFI_SPECIFICATION_VERSION']
- else:
- UefiSpecVersion = '0x00000000'
- Dict = {
- 'Function' : Info.Module.ModuleEntryPointList,
- 'PiSpecVersion' : PiSpecVersion + 'U',
- 'UefiSpecVersion': UefiSpecVersion + 'U'
- }
-
- if Info.ModuleType in ['PEI_CORE', 'DXE_CORE', 'SMM_CORE']:
- if Info.SourceFileList <> None and Info.SourceFileList <> []:
- if NumEntryPoints != 1:
- EdkLogger.error(
- "build",
- AUTOGEN_ERROR,
- '%s must have exactly one entry point' % Info.ModuleType,
- File=str(Info),
- ExtraData= ", ".join(Info.Module.ModuleEntryPointList)
- )
- if Info.ModuleType == 'PEI_CORE':
- AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict))
- AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType == 'DXE_CORE':
- AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict))
- AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType == 'SMM_CORE':
- AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict))
- AutoGenH.Append(gSmmCoreEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType == 'PEIM':
- if NumEntryPoints < 2:
- AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict))
- else:
- AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict))
- AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType in ['DXE_RUNTIME_DRIVER','DXE_DRIVER','DXE_SAL_DRIVER','UEFI_DRIVER']:
- if NumEntryPoints < 2:
- AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict))
- else:
- AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict))
- AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType == 'DXE_SMM_DRIVER':
- if NumEntryPoints == 0:
- AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict))
- else:
- AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict))
- AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict))
- elif Info.ModuleType == 'UEFI_APPLICATION':
- if NumEntryPoints < 2:
- AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict))
- else:
- AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict))
- AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict))
-
-## Create code for ModuleUnloadImage
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH):
- if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']:
- return
- #
- # Unload Image Handlers
- #
- NumUnloadImage = len(Info.Module.ModuleUnloadImageList)
- Dict = {'Count':str(NumUnloadImage) + 'U', 'Function':Info.Module.ModuleUnloadImageList}
- if NumUnloadImage < 2:
- AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict))
- else:
- AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict))
- AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict))
-
-## Create code for GUID
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH):
- if Info.ModuleType in ["USER_DEFINED", "BASE"]:
- GuidType = "GUID"
- else:
- GuidType = "EFI_GUID"
-
- if Info.GuidList:
- if not Info.IsLibrary:
- AutoGenC.Append("\n// Guids\n")
- AutoGenH.Append("\n// Guids\n")
- #
- # GUIDs
- #
- for Key in Info.GuidList:
- if not Info.IsLibrary:
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key]))
- AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
-
-## Create code for protocol
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH):
- if Info.ModuleType in ["USER_DEFINED", "BASE"]:
- GuidType = "GUID"
- else:
- GuidType = "EFI_GUID"
-
- if Info.ProtocolList:
- if not Info.IsLibrary:
- AutoGenC.Append("\n// Protocols\n")
- AutoGenH.Append("\n// Protocols\n")
- #
- # Protocol GUIDs
- #
- for Key in Info.ProtocolList:
- if not Info.IsLibrary:
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key]))
- AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
-
-## Create code for PPI
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH):
- if Info.ModuleType in ["USER_DEFINED", "BASE"]:
- GuidType = "GUID"
- else:
- GuidType = "EFI_GUID"
-
- if Info.PpiList:
- if not Info.IsLibrary:
- AutoGenC.Append("\n// PPIs\n")
- AutoGenH.Append("\n// PPIs\n")
- #
- # PPI GUIDs
- #
- for Key in Info.PpiList:
- if not Info.IsLibrary:
- AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key]))
- AutoGenH.Append('extern %s %s;\n' % (GuidType, Key))
-
-## Create code for PCD
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreatePcdCode(Info, AutoGenC, AutoGenH):
-
- # Collect Token Space GUIDs used by DynamicEc PCDs
- TokenSpaceList = []
- for Pcd in Info.ModulePcdList:
- if Pcd.Type in gDynamicExPcd and Pcd.TokenSpaceGuidCName not in TokenSpaceList:
- TokenSpaceList += [Pcd.TokenSpaceGuidCName]
-
- # Add extern declarations to AutoGen.h if one or more Token Space GUIDs were found
- if TokenSpaceList <> []:
- AutoGenH.Append("\n// Definition of PCD Token Space GUIDs used in this module\n\n")
- if Info.ModuleType in ["USER_DEFINED", "BASE"]:
- GuidType = "GUID"
- else:
- GuidType = "EFI_GUID"
- for Item in TokenSpaceList:
- AutoGenH.Append('extern %s %s;\n' % (GuidType, Item))
-
- if Info.IsLibrary:
- if Info.ModulePcdList:
- AutoGenH.Append("\n// PCD definitions\n")
- for Pcd in Info.ModulePcdList:
- CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd)
- DynExPcdTokenNumberMapping (Info, AutoGenH)
- else:
- if Info.ModulePcdList:
- AutoGenH.Append("\n// Definition of PCDs used in this module\n")
- AutoGenC.Append("\n// Definition of PCDs used in this module\n")
- for Pcd in Info.ModulePcdList:
- CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd)
- DynExPcdTokenNumberMapping (Info, AutoGenH)
- if Info.LibraryPcdList:
- AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n")
- AutoGenC.Append("\n// Definition of PCDs used in libraries\n")
- for Pcd in Info.LibraryPcdList:
- CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd)
- CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH)
-
-## Create code for unicode string definition
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
-# @param UniGenBinBuffer Buffer to store uni string package data
-#
-def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH, UniGenCFlag, UniGenBinBuffer):
- WorkingDir = os.getcwd()
- os.chdir(Info.WorkspaceDir)
-
- IncList = [Info.MetaFile.Dir]
- # Get all files under [Sources] section in inf file for EDK-II module
- EDK2Module = True
- SrcList = [F for F in Info.SourceFileList]
- if Info.AutoGenVersion < 0x00010005:
- EDK2Module = False
- # Get all files under the module directory for EDK-I module
- Cwd = os.getcwd()
- os.chdir(Info.MetaFile.Dir)
- for Root, Dirs, Files in os.walk("."):
- if 'CVS' in Dirs:
- Dirs.remove('CVS')
- if '.svn' in Dirs:
- Dirs.remove('.svn')
- for File in Files:
- File = PathClass(os.path.join(Root, File), Info.MetaFile.Dir)
- if File in SrcList:
- continue
- SrcList.append(File)
- os.chdir(Cwd)
-
- if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1:
- CompatibleMode = True
- else:
- CompatibleMode = False
-
- #
- # -s is a temporary option dedicated for building .UNI files with ISO 639-2 language codes of EDK Shell in EDK2
- #
- if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1:
- if CompatibleMode:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "-c and -s build options should be used exclusively",
- ExtraData="[%s]" % str(Info))
- ShellMode = True
- else:
- ShellMode = False
-
- #RFC4646 is only for EDKII modules and ISO639-2 for EDK modules
- if EDK2Module:
- FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.RFCLanguages]
- else:
- FilterInfo = [EDK2Module] + [Info.PlatformInfo.Platform.ISOLanguages]
- Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, Info.IncludePathList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode, UniGenCFlag, UniGenBinBuffer, FilterInfo)
- if CompatibleMode or UniGenCFlag:
- AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n")
- AutoGenC.Append(Code)
- AutoGenC.Append("\n")
- AutoGenH.Append("\n//\n//Unicode String ID\n//\n")
- AutoGenH.Append(Header)
- if CompatibleMode or UniGenCFlag:
- AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name)
- os.chdir(WorkingDir)
-
-def CreateIdfFileCode(Info, AutoGenC, StringH, IdfGenCFlag, IdfGenBinBuffer):
- if len(Info.IdfFileList) > 0:
- ImageFiles = IdfFileClassObject(sorted (Info.IdfFileList))
- if ImageFiles.ImageFilesDict:
- Index = 1
- PaletteIndex = 1
- IncList = [Info.MetaFile.Dir]
- SrcList = [F for F in Info.SourceFileList]
- SkipList = ['.jpg', '.png', '.bmp', '.inf', '.idf']
- FileList = GetFileList(SrcList, IncList, SkipList)
- ValueStartPtr = 60
- StringH.Append("\n//\n//Image ID\n//\n")
- ImageInfoOffset = 0
- PaletteInfoOffset = 0
- ImageBuffer = pack('x')
- PaletteBuffer = pack('x')
- BufferStr = ''
- PaletteStr = ''
- FileDict = {}
- for Idf in ImageFiles.ImageFilesDict:
- if ImageFiles.ImageFilesDict[Idf]:
- for FileObj in ImageFiles.ImageFilesDict[Idf]:
- for sourcefile in Info.SourceFileList:
- if FileObj.FileName == sourcefile.File:
- if not sourcefile.Ext.upper() in ['.PNG', '.BMP', '.JPG']:
- EdkLogger.error("build", AUTOGEN_ERROR, "The %s's postfix must be one of .bmp, .jpg, .png" % (FileObj.FileName), ExtraData="[%s]" % str(Info))
- FileObj.File = sourcefile
- break
- else:
- EdkLogger.error("build", AUTOGEN_ERROR, "The %s in %s is not defined in the driver's [Sources] section" % (FileObj.FileName, Idf), ExtraData="[%s]" % str(Info))
-
- for FileObj in ImageFiles.ImageFilesDict[Idf]:
- ID = FileObj.ImageID
- File = FileObj.File
- if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
- EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=File.Path)
- SearchImageID (FileObj, FileList)
- if FileObj.Referenced:
- if (ValueStartPtr - len(DEFINE_STR + ID)) <= 0:
- Line = DEFINE_STR + ' ' + ID + ' ' + DecToHexStr(Index, 4) + '\n'
- else:
- Line = DEFINE_STR + ' ' + ID + ' ' * (ValueStartPtr - len(DEFINE_STR + ID)) + DecToHexStr(Index, 4) + '\n'
-
- if File not in FileDict:
- FileDict[File] = Index
- else:
- DuplicateBlock = pack('B', EFI_HII_IIBT_DUPLICATE)
- DuplicateBlock += pack('H', FileDict[File])
- ImageBuffer += DuplicateBlock
- BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
- TempBufferList = AscToHexList(DuplicateBlock)
- BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
- StringH.Append(Line)
- Index += 1
- continue
-
- TmpFile = open(File.Path, 'rb')
- Buffer = TmpFile.read()
- TmpFile.close()
- if File.Ext.upper() == '.PNG':
- TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_PNG)
- TempBuffer += pack('I', len(Buffer))
- TempBuffer += Buffer
- elif File.Ext.upper() == '.JPG':
- ImageType, = struct.unpack('4s', Buffer[6:10])
- if ImageType != 'JFIF':
- EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard JPG file." % File.Path)
- TempBuffer = pack('B', EFI_HII_IIBT_IMAGE_JPEG)
- TempBuffer += pack('I', len(Buffer))
- TempBuffer += Buffer
- elif File.Ext.upper() == '.BMP':
- TempBuffer, TempPalette = BmpImageDecoder(File, Buffer, PaletteIndex, FileObj.TransParent)
- if len(TempPalette) > 1:
- PaletteIndex += 1
- NewPalette = pack('H', len(TempPalette))
- NewPalette += TempPalette
- PaletteBuffer += NewPalette
- PaletteStr = WriteLine(PaletteStr, '// %s: %s: %s' % (DecToHexStr(PaletteIndex - 1, 4), ID, DecToHexStr(PaletteIndex - 1, 4)))
- TempPaletteList = AscToHexList(NewPalette)
- PaletteStr = WriteLine(PaletteStr, CreateArrayItem(TempPaletteList, 16) + '\n')
- ImageBuffer += TempBuffer
- BufferStr = WriteLine(BufferStr, '// %s: %s: %s' % (DecToHexStr(Index, 4), ID, DecToHexStr(Index, 4)))
- TempBufferList = AscToHexList(TempBuffer)
- BufferStr = WriteLine(BufferStr, CreateArrayItem(TempBufferList, 16) + '\n')
-
- StringH.Append(Line)
- Index += 1
-
- BufferStr = WriteLine(BufferStr, '// End of the Image Info')
- BufferStr = WriteLine(BufferStr, CreateArrayItem(DecToHexList(EFI_HII_IIBT_END, 2)) + '\n')
- ImageEnd = pack('B', EFI_HII_IIBT_END)
- ImageBuffer += ImageEnd
-
- if len(ImageBuffer) > 1:
- ImageInfoOffset = 12
- if len(PaletteBuffer) > 1:
- PaletteInfoOffset = 12 + len(ImageBuffer) - 1 # -1 is for the first empty pad byte of ImageBuffer
-
- IMAGE_PACKAGE_HDR = pack('=II', ImageInfoOffset, PaletteInfoOffset)
- # PACKAGE_HEADER_Length = PACKAGE_HEADER + ImageInfoOffset + PaletteInfoOffset + ImageBuffer Length + PaletteCount + PaletteBuffer Length
- if len(PaletteBuffer) > 1:
- PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1 + 2 + len(PaletteBuffer) - 1
- else:
- PACKAGE_HEADER_Length = 4 + 4 + 4 + len(ImageBuffer) - 1
- if PaletteIndex > 1:
- PALETTE_INFO_HEADER = pack('H', PaletteIndex - 1)
- # EFI_HII_PACKAGE_HEADER length max value is 0xFFFFFF
- Hex_Length = '%06X' % PACKAGE_HEADER_Length
- if PACKAGE_HEADER_Length > 0xFFFFFF:
- EdkLogger.error("build", AUTOGEN_ERROR, "The Length of EFI_HII_PACKAGE_HEADER exceed its maximum value", ExtraData="[%s]" % str(Info))
- PACKAGE_HEADER = pack('=HBB', int('0x' + Hex_Length[2:], 16), int('0x' + Hex_Length[0:2], 16), EFI_HII_PACKAGE_IMAGES)
-
- IdfGenBinBuffer.write(PACKAGE_HEADER)
- IdfGenBinBuffer.write(IMAGE_PACKAGE_HDR)
- if len(ImageBuffer) > 1 :
- IdfGenBinBuffer.write(ImageBuffer[1:])
- if PaletteIndex > 1:
- IdfGenBinBuffer.write(PALETTE_INFO_HEADER)
- if len(PaletteBuffer) > 1:
- IdfGenBinBuffer.write(PaletteBuffer[1:])
-
- if IdfGenCFlag:
- TotalLength = EFI_HII_ARRAY_SIZE_LENGTH + PACKAGE_HEADER_Length
- AutoGenC.Append("\n//\n//Image Pack Definition\n//\n")
- AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + Info.Module.BaseName + 'Images' + '[] = {\n')
- AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
- AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
- AllStr = WriteLine(AllStr, '// Image PACKAGE HEADER\n')
- IMAGE_PACKAGE_HDR_List = AscToHexList(PACKAGE_HEADER)
- IMAGE_PACKAGE_HDR_List += AscToHexList(IMAGE_PACKAGE_HDR)
- AllStr = WriteLine(AllStr, CreateArrayItem(IMAGE_PACKAGE_HDR_List, 16) + '\n')
- AllStr = WriteLine(AllStr, '// Image DATA\n')
- if BufferStr:
- AllStr = WriteLine(AllStr, BufferStr)
- if PaletteStr:
- AllStr = WriteLine(AllStr, '// Palette Header\n')
- PALETTE_INFO_HEADER_List = AscToHexList(PALETTE_INFO_HEADER)
- AllStr = WriteLine(AllStr, CreateArrayItem(PALETTE_INFO_HEADER_List, 16) + '\n')
- AllStr = WriteLine(AllStr, '// Palette Data\n')
- AllStr = WriteLine(AllStr, PaletteStr)
- AllStr = WriteLine(AllStr, '};')
- AutoGenC.Append(AllStr)
- AutoGenC.Append("\n")
- StringH.Append('\nextern unsigned char ' + Info.Module.BaseName + 'Images[];\n')
- StringH.Append("\n#define IMAGE_ARRAY_NAME %sImages\n" % Info.Module.BaseName)
-
-# typedef struct _EFI_HII_IMAGE_PACKAGE_HDR {
-# EFI_HII_PACKAGE_HEADER Header; # Standard package header, where Header.Type = EFI_HII_PACKAGE_IMAGES
-# UINT32 ImageInfoOffset;
-# UINT32 PaletteInfoOffset;
-# } EFI_HII_IMAGE_PACKAGE_HDR;
-
-# typedef struct {
-# UINT32 Length:24;
-# UINT32 Type:8;
-# UINT8 Data[];
-# } EFI_HII_PACKAGE_HEADER;
-
-# typedef struct _EFI_HII_IMAGE_BLOCK {
-# UINT8 BlockType;
-# UINT8 BlockBody[];
-# } EFI_HII_IMAGE_BLOCK;
-
-def BmpImageDecoder(File, Buffer, PaletteIndex, TransParent):
- ImageType, = struct.unpack('2s', Buffer[0:2])
- if ImageType!= 'BM': # BMP file type is 'BM'
- EdkLogger.error("build", FILE_TYPE_MISMATCH, "The file %s is not a standard BMP file." % File.Path)
- BMP_IMAGE_HEADER = collections.namedtuple('BMP_IMAGE_HEADER', ['bfSize','bfReserved1','bfReserved2','bfOffBits','biSize','biWidth','biHeight','biPlanes','biBitCount', 'biCompression', 'biSizeImage','biXPelsPerMeter','biYPelsPerMeter','biClrUsed','biClrImportant'])
- BMP_IMAGE_HEADER_STRUCT = struct.Struct('IHHIIIIHHIIIIII')
- BmpHeader = BMP_IMAGE_HEADER._make(BMP_IMAGE_HEADER_STRUCT.unpack_from(Buffer[2:]))
- #
- # Doesn't support compress.
- #
- if BmpHeader.biCompression != 0:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The compress BMP file %s is not support." % File.Path)
-
- # The Width and Height is UINT16 type in Image Package
- if BmpHeader.biWidth > 0xFFFF:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Width is exceed 0xFFFF." % File.Path)
- if BmpHeader.biHeight > 0xFFFF:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "The BMP file %s Height is exceed 0xFFFF." % File.Path)
-
- PaletteBuffer = pack('x')
- if BmpHeader.biBitCount == 1:
- if TransParent:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT_TRANS)
- else:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_1BIT)
- ImageBuffer += pack('B', PaletteIndex)
- Width = (BmpHeader.biWidth + 7)/8
- if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
- PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
- elif BmpHeader.biBitCount == 4:
- if TransParent:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT_TRANS)
- else:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_4BIT)
- ImageBuffer += pack('B', PaletteIndex)
- Width = (BmpHeader.biWidth + 1)/2
- if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
- PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
- elif BmpHeader.biBitCount == 8:
- if TransParent:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT_TRANS)
- else:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_8BIT)
- ImageBuffer += pack('B', PaletteIndex)
- Width = BmpHeader.biWidth
- if BmpHeader.bfOffBits > BMP_IMAGE_HEADER_STRUCT.size + 2:
- PaletteBuffer = Buffer[BMP_IMAGE_HEADER_STRUCT.size + 2 : BmpHeader.bfOffBits]
- elif BmpHeader.biBitCount == 24:
- if TransParent:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT_TRANS)
- else:
- ImageBuffer = pack('B', EFI_HII_IIBT_IMAGE_24BIT)
- Width = BmpHeader.biWidth * 3
- else:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "Only support the 1 bit, 4 bit, 8bit, 24 bit BMP files.", ExtraData="[%s]" % str(File.Path))
-
- ImageBuffer += pack('H', BmpHeader.biWidth)
- ImageBuffer += pack('H', BmpHeader.biHeight)
- Start = BmpHeader.bfOffBits
- End = BmpHeader.bfSize - 1
- for Height in range(0, BmpHeader.biHeight):
- if Width % 4 != 0:
- Start = End + (Width % 4) - 4 - Width
- else:
- Start = End - Width
- ImageBuffer += Buffer[Start + 1 : Start + Width + 1]
- End = Start
-
- # handle the Palette info, BMP use 4 bytes for R, G, B and Reserved info while EFI_HII_RGB_PIXEL only have the R, G, B info
- if PaletteBuffer and len(PaletteBuffer) > 1:
- PaletteTemp = pack('x')
- for Index in range(0, len(PaletteBuffer)):
- if Index % 4 == 3:
- continue
- PaletteTemp += PaletteBuffer[Index]
- PaletteBuffer = PaletteTemp[1:]
- return ImageBuffer, PaletteBuffer
-
-## Create common code
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateHeaderCode(Info, AutoGenC, AutoGenH):
- # file header
- AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'}))
- # header file Prologue
- AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-','_')}))
- AutoGenH.Append(gAutoGenHCppPrologueString)
- if Info.AutoGenVersion >= 0x00010005:
- # header files includes
- AutoGenH.Append("#include <%s>\n" % gBasicHeaderFile)
- if Info.ModuleType in gModuleTypeHeaderFile \
- and gModuleTypeHeaderFile[Info.ModuleType][0] != gBasicHeaderFile:
- AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0])
- #
- # if either PcdLib in [LibraryClasses] sections or there exist Pcd section, add PcdLib.h
- # As if modules only uses FixedPcd, then PcdLib is not needed in [LibraryClasses] section.
- #
- if 'PcdLib' in Info.Module.LibraryClasses or Info.Module.Pcds:
- AutoGenH.Append("#include <Library/PcdLib.h>\n")
-
- AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;')
- AutoGenH.Append('\nextern CHAR8 *gEfiCallerBaseName;\n\n')
-
- if Info.IsLibrary:
- return
-
- AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid))
-
- if Info.IsLibrary:
- return
- # C file header
- AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'}))
- if Info.AutoGenVersion >= 0x00010005:
- # C file header files includes
- if Info.ModuleType in gModuleTypeHeaderFile:
- for Inc in gModuleTypeHeaderFile[Info.ModuleType]:
- AutoGenC.Append("#include <%s>\n" % Inc)
- else:
- AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile)
-
- #
- # Publish the CallerId Guid
- #
- AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid))
- AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED CHAR8 *gEfiCallerBaseName = "%s";\n' % Info.Name)
-
-## Create common code for header file
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreateFooterCode(Info, AutoGenC, AutoGenH):
- AutoGenH.Append(gAutoGenHEpilogueString)
-
-## Create code for a module
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-# @param StringH The TemplateString object for header file
-# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
-# @param UniGenBinBuffer Buffer to store uni string package data
-# @param StringIdf The TemplateString object for header file
-# @param IdfGenCFlag IdfString is generated into AutoGen C file when it is set to True
-# @param IdfGenBinBuffer Buffer to store Idf string package data
-#
-def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, StringIdf, IdfGenCFlag, IdfGenBinBuffer):
- CreateHeaderCode(Info, AutoGenC, AutoGenH)
-
- if Info.AutoGenVersion >= 0x00010005:
- CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH)
- CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH)
- CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH)
- CreatePcdCode(Info, AutoGenC, AutoGenH)
- CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH)
- CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH)
- CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH)
- CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH)
-
- if Info.UnicodeFileList:
- FileName = "%sStrDefs.h" % Info.Name
- StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
- StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-','_')}))
- CreateUnicodeStringCode(Info, AutoGenC, StringH, UniGenCFlag, UniGenBinBuffer)
-
- GuidMacros = []
- for Guid in Info.Module.Guids:
- if Guid in Info.Module.GetGuidsUsedByPcd():
- continue
- GuidMacros.append('#define %s %s' % (Guid, Info.Module.Guids[Guid]))
- for Guid, Value in Info.Module.Protocols.items() + Info.Module.Ppis.items():
- GuidMacros.append('#define %s %s' % (Guid, Value))
- # supports FixedAtBuild usage in VFR file
- if Info.VfrFileList and Info.ModulePcdList:
- GuidMacros.append('#define %s %s' % ('FixedPcdGetBool(TokenName)', '_PCD_VALUE_##TokenName'))
- GuidMacros.append('#define %s %s' % ('FixedPcdGet8(TokenName)', '_PCD_VALUE_##TokenName'))
- GuidMacros.append('#define %s %s' % ('FixedPcdGet16(TokenName)', '_PCD_VALUE_##TokenName'))
- GuidMacros.append('#define %s %s' % ('FixedPcdGet32(TokenName)', '_PCD_VALUE_##TokenName'))
- GuidMacros.append('#define %s %s' % ('FixedPcdGet64(TokenName)', '_PCD_VALUE_##TokenName'))
- for Pcd in Info.ModulePcdList:
- if Pcd.Type == TAB_PCDS_FIXED_AT_BUILD:
- TokenCName = Pcd.TokenCName
- Value = Pcd.DefaultValue
- if Pcd.DatumType == 'BOOLEAN':
- BoolValue = Value.upper()
- if BoolValue == 'TRUE':
- Value = '1'
- elif BoolValue == 'FALSE':
- Value = '0'
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- TokenCName = PcdItem[0]
- break
- GuidMacros.append('#define %s %s' % ('_PCD_VALUE_'+TokenCName, Value))
-
- if Info.IdfFileList:
- GuidMacros.append('#include "%sImgDefs.h"' % Info.Name)
-
- if GuidMacros:
- StringH.Append('\n#ifdef VFRCOMPILE\n%s\n#endif\n' % '\n'.join(GuidMacros))
-
- StringH.Append("\n#endif\n")
- AutoGenH.Append('#include "%s"\n' % FileName)
-
- if Info.IdfFileList:
- FileName = "%sImgDefs.h" % Info.Name
- StringIdf.Append(gAutoGenHeaderString.Replace({'FileName':FileName}))
- StringIdf.Append(gAutoGenHPrologueString.Replace({'File':'IMAGEDEFS', 'Guid':Info.Guid.replace('-','_')}))
- CreateIdfFileCode(Info, AutoGenC, StringIdf, IdfGenCFlag, IdfGenBinBuffer)
-
- StringIdf.Append("\n#endif\n")
- AutoGenH.Append('#include "%s"\n' % FileName)
-
- CreateFooterCode(Info, AutoGenC, AutoGenH)
-
- # no generation of AutoGen.c for Edk modules without unicode file
- if Info.AutoGenVersion < 0x00010005 and len(Info.UnicodeFileList) == 0:
- AutoGenC.String = ''
-
-## Create the code file
-#
-# @param FilePath The path of code file
-# @param Content The content of code file
-# @param IsBinaryFile The flag indicating if the file is binary file or not
-#
-# @retval True If file content is changed or file doesn't exist
-# @retval False If the file exists and the content is not changed
-#
-def Generate(FilePath, Content, IsBinaryFile):
- return SaveFileOnChange(FilePath, Content, IsBinaryFile)
-
diff --git a/BaseTools/Source/Python/AutoGen/GenDepex.py b/BaseTools/Source/Python/AutoGen/GenDepex.py
deleted file mode 100644
index 5923a75ab7..0000000000
--- a/BaseTools/Source/Python/AutoGen/GenDepex.py
+++ /dev/null
@@ -1,448 +0,0 @@
-## @file
-# This file is used to generate DEPEX file for module's dependency expression
-#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-## Import Modules
-#
-import sys
-import Common.LongFilePathOs as os
-import re
-import traceback
-from Common.LongFilePathSupport import OpenLongFilePath as open
-from StringIO import StringIO
-from struct import pack
-from Common.BuildToolError import *
-from Common.Misc import SaveFileOnChange
-from Common.Misc import GuidStructureStringToGuidString
-from Common import EdkLogger as EdkLogger
-from Common.BuildVersion import gBUILD_VERSION
-
-## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
-gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
-
-## Mapping between module type and EFI phase
-gType2Phase = {
- "BASE" : None,
- "SEC" : "PEI",
- "PEI_CORE" : "PEI",
- "PEIM" : "PEI",
- "DXE_CORE" : "DXE",
- "DXE_DRIVER" : "DXE",
- "DXE_SMM_DRIVER" : "DXE",
- "DXE_RUNTIME_DRIVER": "DXE",
- "DXE_SAL_DRIVER" : "DXE",
- "UEFI_DRIVER" : "DXE",
- "UEFI_APPLICATION" : "DXE",
- "SMM_CORE" : "DXE",
-}
-
-## Convert dependency expression string into EFI internal representation
-#
-# DependencyExpression class is used to parse dependency expression string and
-# convert it into its binary form.
-#
-class DependencyExpression:
-
- ArchProtocols = set([
- '665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
- '26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
- '26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
- '1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
- '27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
- '27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
- 'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
- 'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
- '26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
- '6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
- '1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
- '665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
- ]
- )
-
- OpcodePriority = {
- "AND" : 1,
- "OR" : 1,
- "NOT" : 2,
- # "SOR" : 9,
- # "BEFORE": 9,
- # "AFTER" : 9,
- }
-
- Opcode = {
- "PEI" : {
- "PUSH" : 0x02,
- "AND" : 0x03,
- "OR" : 0x04,
- "NOT" : 0x05,
- "TRUE" : 0x06,
- "FALSE" : 0x07,
- "END" : 0x08
- },
-
- "DXE" : {
- "BEFORE": 0x00,
- "AFTER" : 0x01,
- "PUSH" : 0x02,
- "AND" : 0x03,
- "OR" : 0x04,
- "NOT" : 0x05,
- "TRUE" : 0x06,
- "FALSE" : 0x07,
- "END" : 0x08,
- "SOR" : 0x09
- }
- }
-
- # all supported op codes and operands
- SupportedOpcode = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR"]
- SupportedOperand = ["TRUE", "FALSE"]
-
- OpcodeWithSingleOperand = ['NOT', 'BEFORE', 'AFTER']
- OpcodeWithTwoOperand = ['AND', 'OR']
-
- # op code that should not be the last one
- NonEndingOpcode = ["AND", "OR", "NOT", 'SOR']
- # op code must not present at the same time
- ExclusiveOpcode = ["BEFORE", "AFTER"]
- # op code that should be the first one if it presents
- AboveAllOpcode = ["SOR", "BEFORE", "AFTER"]
-
- #
- # open and close brace must be taken as individual tokens
- #
- TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
-
- ## Constructor
- #
- # @param Expression The list or string of dependency expression
- # @param ModuleType The type of the module using the dependency expression
- #
- def __init__(self, Expression, ModuleType, Optimize=False):
- self.ModuleType = ModuleType
- self.Phase = gType2Phase[ModuleType]
- if type(Expression) == type([]):
- self.ExpressionString = " ".join(Expression)
- self.TokenList = Expression
- else:
- self.ExpressionString = Expression
- self.GetExpressionTokenList()
-
- self.PostfixNotation = []
- self.OpcodeList = []
-
- self.GetPostfixNotation()
- self.ValidateOpcode()
-
- EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
- if Optimize:
- self.Optimize()
- EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
-
- def __str__(self):
- return " ".join(self.TokenList)
-
- def __repr__(self):
- WellForm = ''
- for Token in self.PostfixNotation:
- if Token in self.SupportedOpcode:
- WellForm += "\n " + Token
- else:
- WellForm += ' ' + Token
- return WellForm
-
- ## Split the expression string into token list
- def GetExpressionTokenList(self):
- self.TokenList = self.TokenPattern.findall(self.ExpressionString)
-
- ## Convert token list into postfix notation
- def GetPostfixNotation(self):
- Stack = []
- LastToken = ''
- for Token in self.TokenList:
- if Token == "(":
- if LastToken not in self.SupportedOpcode + ['(', '', None]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
- ExtraData="Near %s" % LastToken)
- Stack.append(Token)
- elif Token == ")":
- if '(' not in Stack:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
- ExtraData=str(self))
- elif LastToken in self.SupportedOpcode + ['', None]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
- ExtraData="Near %s" % LastToken)
- while len(Stack) > 0:
- if Stack[-1] == '(':
- Stack.pop()
- break
- self.PostfixNotation.append(Stack.pop())
- elif Token in self.OpcodePriority:
- if Token == "NOT":
- if LastToken not in self.SupportedOpcode + ['(', '', None]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
- ExtraData="Near %s" % LastToken)
- elif LastToken in self.SupportedOpcode + ['(', '', None]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
- ExtraData="Near %s" % LastToken)
-
- while len(Stack) > 0:
- if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
- break
- self.PostfixNotation.append(Stack.pop())
- Stack.append(Token)
- self.OpcodeList.append(Token)
- else:
- if Token not in self.SupportedOpcode:
- # not OP, take it as GUID
- if LastToken not in self.SupportedOpcode + ['(', '', None]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
- ExtraData="Near %s" % LastToken)
- if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
- if Token not in self.SupportedOperand:
- self.PostfixNotation.append("PUSH")
- # check if OP is valid in this phase
- elif Token in self.Opcode[self.Phase]:
- if Token == "END":
- break
- self.OpcodeList.append(Token)
- else:
- EdkLogger.error("GenDepex", PARSER_ERROR,
- "Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
- ExtraData=str(self))
- self.PostfixNotation.append(Token)
- LastToken = Token
-
- # there should not be parentheses in Stack
- if '(' in Stack or ')' in Stack:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
- ExtraData=str(self))
- while len(Stack) > 0:
- self.PostfixNotation.append(Stack.pop())
- if self.PostfixNotation[-1] != 'END':
- self.PostfixNotation.append("END")
-
- ## Validate the dependency expression
- def ValidateOpcode(self):
- for Op in self.AboveAllOpcode:
- if Op in self.PostfixNotation:
- if Op != self.PostfixNotation[0]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
- ExtraData=str(self))
- if len(self.PostfixNotation) < 3:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
- ExtraData=str(self))
- for Op in self.ExclusiveOpcode:
- if Op in self.OpcodeList:
- if len(self.OpcodeList) > 1:
- EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
- ExtraData=str(self))
- if len(self.PostfixNotation) < 3:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
- ExtraData=str(self))
- if self.TokenList[-1] != 'END' and self.TokenList[-1] in self.NonEndingOpcode:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
- ExtraData=str(self))
- if self.TokenList[-1] == 'END' and self.TokenList[-2] in self.NonEndingOpcode:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
- ExtraData=str(self))
- if "END" in self.TokenList and "END" != self.TokenList[-1]:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
- ExtraData=str(self))
-
- ## Simply optimize the dependency expression by removing duplicated operands
- def Optimize(self):
- ValidOpcode = list(set(self.OpcodeList))
- if len(ValidOpcode) != 1 or ValidOpcode[0] not in ['AND', 'OR']:
- return
- Op = ValidOpcode[0]
- NewOperand = []
- AllOperand = set()
- for Token in self.PostfixNotation:
- if Token in self.SupportedOpcode or Token in NewOperand:
- continue
- AllOperand.add(Token)
- if Token == 'TRUE':
- if Op == 'AND':
- continue
- else:
- NewOperand.append(Token)
- break
- elif Token == 'FALSE':
- if Op == 'OR':
- continue
- else:
- NewOperand.append(Token)
- break
- NewOperand.append(Token)
-
- # don't generate depex if only TRUE operand left
- if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE':
- self.PostfixNotation = []
- return
-
- # don't generate depex if all operands are architecture protocols
- if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \
- Op == 'AND' and \
- self.ArchProtocols == set([GuidStructureStringToGuidString(Guid) for Guid in AllOperand]):
- self.PostfixNotation = []
- return
-
- if len(NewOperand) == 0:
- self.TokenList = list(AllOperand)
- else:
- self.TokenList = []
- while True:
- self.TokenList.append(NewOperand.pop(0))
- if NewOperand == []:
- break
- self.TokenList.append(Op)
- self.PostfixNotation = []
- self.GetPostfixNotation()
-
-
- ## Convert a GUID value in C structure format into its binary form
- #
- # @param Guid The GUID value in C structure format
- #
- # @retval array The byte array representing the GUID value
- #
- def GetGuidValue(self, Guid):
- GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
- GuidValueList = GuidValueString.split(",")
- if len(GuidValueList) != 11:
- EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
- return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
-
- ## Save the binary form of dependency expression in file
- #
- # @param File The path of file. If None is given, put the data on console
- #
- # @retval True If the file doesn't exist or file is changed
- # @retval False If file exists and is not changed.
- #
- def Generate(self, File=None):
- Buffer = StringIO()
- if len(self.PostfixNotation) == 0:
- return False
-
- for Item in self.PostfixNotation:
- if Item in self.Opcode[self.Phase]:
- Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
- elif Item in self.SupportedOpcode:
- EdkLogger.error("GenDepex", FORMAT_INVALID,
- "Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
- ExtraData=self.ExpressionString)
- else:
- Buffer.write(self.GetGuidValue(Item))
-
- FilePath = ""
- FileChangeFlag = True
- if File == None:
- sys.stdout.write(Buffer.getvalue())
- FilePath = "STDOUT"
- else:
- FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
-
- Buffer.close()
- return FileChangeFlag
-
-versionNumber = ("0.04" + " " + gBUILD_VERSION)
-__version__ = "%prog Version " + versionNumber
-__copyright__ = "Copyright (c) 2007-2010, Intel Corporation All rights reserved."
-__usage__ = "%prog [options] [dependency_expression_file]"
-
-## Parse command line options
-#
-# @retval OptionParser
-#
-def GetOptions():
- from optparse import OptionParser
-
- Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
-
- Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
- help="Specify the name of depex file to be generated")
- Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
- help="The type of module for which the dependency expression serves")
- Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
- help="The string of dependency expression. If this option presents, the input file will be ignored.")
- Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
- help="Do some simple optimization on the expression.")
- Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
- help="build with verbose information")
- Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
- Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
- help="build with little information")
-
- return Parser.parse_args()
-
-
-## Entrance method
-#
-# @retval 0 Tool was successful
-# @retval 1 Tool failed
-#
-def Main():
- EdkLogger.Initialize()
- Option, Input = GetOptions()
-
- # Set log level
- if Option.quiet:
- EdkLogger.SetLevel(EdkLogger.QUIET)
- elif Option.verbose:
- EdkLogger.SetLevel(EdkLogger.VERBOSE)
- elif Option.debug != None:
- EdkLogger.SetLevel(Option.debug + 1)
- else:
- EdkLogger.SetLevel(EdkLogger.INFO)
-
- try:
- if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
- EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
-
- DxsFile = ''
- if len(Input) > 0 and Option.Expression == "":
- DxsFile = Input[0]
- DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
- DxsString = gStartClosePattern.sub("\\1", DxsString)
- elif Option.Expression != "":
- if Option.Expression[0] == '"':
- DxsString = Option.Expression[1:-1]
- else:
- DxsString = Option.Expression
- else:
- EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
-
- Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
- if Option.OutputFile != None:
- FileChangeFlag = Dpx.Generate(Option.OutputFile)
- if not FileChangeFlag and DxsFile:
- #
- # Touch the output file if its time stamp is older than the original
- # DXS file to avoid re-invoke this tool for the dependency check in build rule.
- #
- if os.stat(DxsFile)[8] > os.stat(Option.OutputFile)[8]:
- os.utime(Option.OutputFile, None)
- else:
- Dpx.Generate()
- except BaseException, X:
- EdkLogger.quiet("")
- if Option != None and Option.debug != None:
- EdkLogger.quiet(traceback.format_exc())
- else:
- EdkLogger.quiet(str(X))
- return 1
-
- return 0
-
-if __name__ == '__main__':
- sys.exit(Main())
-
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py
deleted file mode 100644
index 0f3ddd5dd4..0000000000
--- a/BaseTools/Source/Python/AutoGen/GenMake.py
+++ /dev/null
@@ -1,1530 +0,0 @@
-## @file
-# Create makefile for MS nmake and GNU make
-#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-## Import Modules
-#
-import Common.LongFilePathOs as os
-import sys
-import string
-import re
-import os.path as path
-from Common.LongFilePathSupport import OpenLongFilePath as open
-from Common.MultipleWorkspace import MultipleWorkspace as mws
-from Common.BuildToolError import *
-from Common.Misc import *
-from Common.String import *
-from BuildEngine import *
-import Common.GlobalData as GlobalData
-
-## Regular expression for finding header file inclusions
-gIncludePattern = re.compile(r"^[ \t]*#?[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:\(?[\"<]?[ \t]*)([-\w.\\/() \t]+)(?:[ \t]*[\">]?\)?)", re.MULTILINE | re.UNICODE | re.IGNORECASE)
-
-## Regular expression for matching macro used in header file inclusion
-gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE)
-
-gIsFileMap = {}
-
-## pattern for include style in Edk.x code
-gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h"
-gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h"
-gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h"
-gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h"
-gIncludeMacroConversion = {
- "EFI_PROTOCOL_DEFINITION" : gProtocolDefinition,
- "EFI_GUID_DEFINITION" : gGuidDefinition,
- "EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition,
- "EFI_PROTOCOL_PRODUCER" : gProtocolDefinition,
- "EFI_PROTOCOL_CONSUMER" : gProtocolDefinition,
- "EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition,
- "EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition,
- "EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition,
- "EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition,
- "EFI_PPI_DEFINITION" : gPpiDefinition,
- "EFI_PPI_PRODUCER" : gPpiDefinition,
- "EFI_PPI_CONSUMER" : gPpiDefinition,
- "EFI_PPI_DEPENDENCY" : gPpiDefinition,
-}
-
-## default makefile type
-gMakeType = ""
-if sys.platform == "win32":
- gMakeType = "nmake"
-else:
- gMakeType = "gmake"
-
-
-## BuildFile class
-#
-# This base class encapsules build file and its generation. It uses template to generate
-# the content of build file. The content of build file will be got from AutoGen objects.
-#
-class BuildFile(object):
- ## template used to generate the build file (i.e. makefile if using make)
- _TEMPLATE_ = TemplateString('')
-
- _DEFAULT_FILE_NAME_ = "Makefile"
-
- ## default file name for each type of build file
- _FILE_NAME_ = {
- "nmake" : "Makefile",
- "gmake" : "GNUmakefile"
- }
-
- ## Fixed header string for makefile
- _MAKEFILE_HEADER = '''#
-# DO NOT EDIT
-# This file is auto-generated by build utility
-#
-# Module Name:
-#
-# %s
-#
-# Abstract:
-#
-# Auto-generated makefile for building modules, libraries or platform
-#
- '''
-
- ## Header string for each type of build file
- _FILE_HEADER_ = {
- "nmake" : _MAKEFILE_HEADER % _FILE_NAME_["nmake"],
- "gmake" : _MAKEFILE_HEADER % _FILE_NAME_["gmake"]
- }
-
- ## shell commands which can be used in build file in the form of macro
- # $(CP) copy file command
- # $(MV) move file command
- # $(RM) remove file command
- # $(MD) create dir command
- # $(RD) remove dir command
- #
- _SHELL_CMD_ = {
- "nmake" : {
- "CP" : "copy /y",
- "MV" : "move /y",
- "RM" : "del /f /q",
- "MD" : "mkdir",
- "RD" : "rmdir /s /q",
- },
-
- "gmake" : {
- "CP" : "cp -f",
- "MV" : "mv -f",
- "RM" : "rm -f",
- "MD" : "mkdir -p",
- "RD" : "rm -r -f",
- }
- }
-
- ## directory separator
- _SEP_ = {
- "nmake" : "\\",
- "gmake" : "/"
- }
-
- ## directory creation template
- _MD_TEMPLATE_ = {
- "nmake" : 'if not exist %(dir)s $(MD) %(dir)s',
- "gmake" : "$(MD) %(dir)s"
- }
-
- ## directory removal template
- _RD_TEMPLATE_ = {
- "nmake" : 'if exist %(dir)s $(RD) %(dir)s',
- "gmake" : "$(RD) %(dir)s"
- }
-
- _CD_TEMPLATE_ = {
- "nmake" : 'if exist %(dir)s cd %(dir)s',
- "gmake" : "test -e %(dir)s && cd %(dir)s"
- }
-
- _MAKE_TEMPLATE_ = {
- "nmake" : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s',
- "gmake" : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s'
- }
-
- _INCLUDE_CMD_ = {
- "nmake" : '!INCLUDE',
- "gmake" : "include"
- }
-
- _INC_FLAG_ = {"MSFT" : "/I", "GCC" : "-I", "INTEL" : "-I", "RVCT" : "-I"}
-
- ## Constructor of BuildFile
- #
- # @param AutoGenObject Object of AutoGen class
- #
- def __init__(self, AutoGenObject):
- self._AutoGenObject = AutoGenObject
- self._FileType = gMakeType
-
- ## Create build file
- #
- # @param FileType Type of build file. Only nmake and gmake are supported now.
- #
- # @retval TRUE The build file is created or re-created successfully
- # @retval FALSE The build file exists and is the same as the one to be generated
- #
- def Generate(self, FileType=gMakeType):
- if FileType not in self._FILE_NAME_:
- EdkLogger.error("build", PARAMETER_INVALID, "Invalid build type [%s]" % FileType,
- ExtraData="[%s]" % str(self._AutoGenObject))
- self._FileType = FileType
- FileContent = self._TEMPLATE_.Replace(self._TemplateDict)
- FileName = self._FILE_NAME_[FileType]
- return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False)
-
- ## Return a list of directory creation command string
- #
- # @param DirList The list of directory to be created
- #
- # @retval list The directory creation command list
- #
- def GetCreateDirectoryCommand(self, DirList):
- return [self._MD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList]
-
- ## Return a list of directory removal command string
- #
- # @param DirList The list of directory to be removed
- #
- # @retval list The directory removal command list
- #
- def GetRemoveDirectoryCommand(self, DirList):
- return [self._RD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList]
-
- def PlaceMacro(self, Path, MacroDefinitions={}):
- if Path.startswith("$("):
- return Path
- else:
- PathLength = len(Path)
- for MacroName in MacroDefinitions:
- MacroValue = MacroDefinitions[MacroName]
- MacroValueLength = len(MacroValue)
- if MacroValueLength <= PathLength and Path.startswith(MacroValue):
- Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:])
- break
- return Path
-
-## ModuleMakefile class
-#
-# This class encapsules makefie and its generation for module. It uses template to generate
-# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
-#
-class ModuleMakefile(BuildFile):
- ## template used to generate the makefile for module
- _TEMPLATE_ = TemplateString('''\
-${makefile_header}
-
-#
-# Platform Macro Definition
-#
-PLATFORM_NAME = ${platform_name}
-PLATFORM_GUID = ${platform_guid}
-PLATFORM_VERSION = ${platform_version}
-PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
-PLATFORM_DIR = ${platform_dir}
-PLATFORM_OUTPUT_DIR = ${platform_output_directory}
-
-#
-# Module Macro Definition
-#
-MODULE_NAME = ${module_name}
-MODULE_GUID = ${module_guid}
-MODULE_NAME_GUID = ${module_name_guid}
-MODULE_VERSION = ${module_version}
-MODULE_TYPE = ${module_type}
-MODULE_FILE = ${module_file}
-MODULE_FILE_BASE_NAME = ${module_file_base_name}
-BASE_NAME = $(MODULE_NAME)
-MODULE_RELATIVE_DIR = ${module_relative_directory}
-PACKAGE_RELATIVE_DIR = ${package_relative_directory}
-MODULE_DIR = ${module_dir}
-
-MODULE_ENTRY_POINT = ${module_entry_point}
-ARCH_ENTRY_POINT = ${arch_entry_point}
-IMAGE_ENTRY_POINT = ${image_entry_point}
-
-${BEGIN}${module_extra_defines}
-${END}
-#
-# Build Configuration Macro Definition
-#
-ARCH = ${architecture}
-TOOLCHAIN = ${toolchain_tag}
-TOOLCHAIN_TAG = ${toolchain_tag}
-TARGET = ${build_target}
-
-#
-# Build Directory Macro Definition
-#
-# PLATFORM_BUILD_DIR = ${platform_build_directory}
-BUILD_DIR = ${platform_build_directory}
-BIN_DIR = $(BUILD_DIR)${separator}${architecture}
-LIB_DIR = $(BIN_DIR)
-MODULE_BUILD_DIR = ${module_build_directory}
-OUTPUT_DIR = ${module_output_directory}
-DEBUG_DIR = ${module_debug_directory}
-DEST_DIR_OUTPUT = $(OUTPUT_DIR)
-DEST_DIR_DEBUG = $(DEBUG_DIR)
-
-#
-# Shell Command Macro
-#
-${BEGIN}${shell_command_code} = ${shell_command}
-${END}
-
-#
-# Tools definitions specific to this module
-#
-${BEGIN}${module_tool_definitions}
-${END}
-MAKE_FILE = ${makefile_path}
-
-#
-# Build Macro
-#
-${BEGIN}${file_macro}
-${END}
-
-COMMON_DEPS = ${BEGIN}${common_dependency_file} \\
- ${END}
-
-#
-# Overridable Target Macro Definitions
-#
-FORCE_REBUILD = force_build
-INIT_TARGET = init
-PCH_TARGET =
-BC_TARGET = ${BEGIN}${backward_compatible_target} ${END}
-CODA_TARGET = ${BEGIN}${remaining_build_target} \\
- ${END}
-
-#
-# Default target, which will build dependent libraries in addition to source files
-#
-
-all: mbuild
-
-
-#
-# Target used when called from platform makefile, which will bypass the build of dependent libraries
-#
-
-pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
-
-#
-# ModuleTarget
-#
-
-mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET)
-
-#
-# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets
-#
-
-tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET)
-
-#
-# Phony target which is used to force executing commands for a target
-#
-force_build:
-\t-@
-
-#
-# Target to update the FD
-#
-
-fds: mbuild gen_fds
-
-#
-# Initialization target: print build information and create necessary directories
-#
-init: info dirs
-
-info:
-\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
-
-dirs:
-${BEGIN}\t-@${create_directory_command}\n${END}
-
-strdefs:
-\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h
-
-#
-# GenLibsTarget
-#
-gen_libs:
-\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name}
-\t${END}@cd $(MODULE_BUILD_DIR)
-
-#
-# Build Flash Device Image
-#
-gen_fds:
-\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds
-\t@cd $(MODULE_BUILD_DIR)
-
-#
-# Individual Object Build Targets
-#
-${BEGIN}${file_build_target}
-${END}
-
-#
-# clean all intermediate files
-#
-clean:
-\t${BEGIN}${clean_command}
-\t${END}\t$(RM) AutoGenTimeStamp
-
-#
-# clean all generated files
-#
-cleanall:
-${BEGIN}\t${cleanall_command}
-${END}\t$(RM) *.pdb *.idb > NUL 2>&1
-\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi
-\t$(RM) AutoGenTimeStamp
-
-#
-# clean all dependent libraries built
-#
-cleanlib:
-\t${BEGIN}-@${library_build_command} cleanall
-\t${END}@cd $(MODULE_BUILD_DIR)\n\n''')
-
- _FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n")
- _BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n")
-
- ## Constructor of ModuleMakefile
- #
- # @param ModuleAutoGen Object of ModuleAutoGen class
- #
- def __init__(self, ModuleAutoGen):
- BuildFile.__init__(self, ModuleAutoGen)
- self.PlatformInfo = self._AutoGenObject.PlatformInfo
-
- self.ResultFileList = []
- self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
-
- self.SourceFileDatabase = {} # {file type : file path}
- self.DestFileDatabase = {} # {file type : file path}
- self.FileBuildTargetList = [] # [(src, target string)]
- self.BuildTargetList = [] # [target string]
- self.PendingBuildTargetList = [] # [FileBuildRule objects]
- self.CommonFileDependency = []
- self.FileListMacros = {}
- self.ListFileMacros = {}
-
- self.FileCache = {}
- self.FileDependency = []
- self.LibraryBuildCommandList = []
- self.LibraryFileList = []
- self.LibraryMakefileList = []
- self.LibraryBuildDirectoryList = []
- self.SystemLibraryList = []
- self.Macros = sdict()
- self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"]
- self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"]
- self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"]
- self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"]
- self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"]
- self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"]
-
- # Compose a dict object containing information used to do replacement in template
- def _CreateTemplateDict(self):
- if self._FileType not in self._SEP_:
- EdkLogger.error("build", PARAMETER_INVALID, "Invalid Makefile type [%s]" % self._FileType,
- ExtraData="[%s]" % str(self._AutoGenObject))
- Separator = self._SEP_[self._FileType]
-
- # break build if no source files and binary files are found
- if len(self._AutoGenObject.SourceFileList) == 0 and len(self._AutoGenObject.BinaryFileList) == 0:
- EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]"
- % (self._AutoGenObject.BuildTarget, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch),
- ExtraData="[%s]" % str(self._AutoGenObject))
-
- # convert dependent libraries to build command
- self.ProcessDependentLibrary()
- if len(self._AutoGenObject.Module.ModuleEntryPointList) > 0:
- ModuleEntryPoint = self._AutoGenObject.Module.ModuleEntryPointList[0]
- else:
- ModuleEntryPoint = "_ModuleEntryPoint"
-
- # Intel EBC compiler enforces EfiMain
- if self._AutoGenObject.AutoGenVersion < 0x00010005 and self._AutoGenObject.Arch == "EBC":
- ArchEntryPoint = "EfiMain"
- else:
- ArchEntryPoint = ModuleEntryPoint
-
- if self._AutoGenObject.Arch == "EBC":
- # EBC compiler always use "EfiStart" as entry point. Only applies to EdkII modules
- ImageEntryPoint = "EfiStart"
- elif self._AutoGenObject.AutoGenVersion < 0x00010005:
- # Edk modules use entry point specified in INF file
- ImageEntryPoint = ModuleEntryPoint
- else:
- # EdkII modules always use "_ModuleEntryPoint" as entry point
- ImageEntryPoint = "_ModuleEntryPoint"
-
- for k, v in self._AutoGenObject.Module.Defines.iteritems():
- if k not in self._AutoGenObject.Macros.keys():
- self._AutoGenObject.Macros[k] = v
-
- if 'MODULE_ENTRY_POINT' not in self._AutoGenObject.Macros.keys():
- self._AutoGenObject.Macros['MODULE_ENTRY_POINT'] = ModuleEntryPoint
- if 'ARCH_ENTRY_POINT' not in self._AutoGenObject.Macros.keys():
- self._AutoGenObject.Macros['ARCH_ENTRY_POINT'] = ArchEntryPoint
- if 'IMAGE_ENTRY_POINT' not in self._AutoGenObject.Macros.keys():
- self._AutoGenObject.Macros['IMAGE_ENTRY_POINT'] = ImageEntryPoint
-
- PCI_COMPRESS_Flag = False
- for k, v in self._AutoGenObject.Module.Defines.iteritems():
- if 'PCI_COMPRESS' == k and 'TRUE' == v:
- PCI_COMPRESS_Flag = True
-
- # tools definitions
- ToolsDef = []
- IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily]
- for Tool in self._AutoGenObject.BuildOption:
- for Attr in self._AutoGenObject.BuildOption[Tool]:
- Value = self._AutoGenObject.BuildOption[Tool][Attr]
- if Attr == "FAMILY":
- continue
- elif Attr == "PATH":
- ToolsDef.append("%s = %s" % (Tool, Value))
- else:
- # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
- if Tool == "MAKE":
- continue
- # Remove duplicated include path, if any
- if Attr == "FLAGS":
- Value = RemoveDupOption(Value, IncPrefix, self._AutoGenObject.IncludePathList)
- if Tool == "OPTROM" and PCI_COMPRESS_Flag:
- ValueList = Value.split()
- if ValueList:
- for i, v in enumerate(ValueList):
- if '-e' == v:
- ValueList[i] = '-ec'
- Value = ' '.join(ValueList)
-
- ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value))
- ToolsDef.append("")
-
- # generate the Response file and Response flag
- RespDict = self.CommandExceedLimit()
- RespFileList = os.path.join(self._AutoGenObject.OutputDir, 'respfilelist.txt')
- if RespDict:
- RespFileListContent = ''
- for Resp in RespDict.keys():
- RespFile = os.path.join(self._AutoGenObject.OutputDir, str(Resp).lower() + '.txt')
- StrList = RespDict[Resp].split(' ')
- UnexpandMacro = []
- NewStr = []
- for Str in StrList:
- if '$' in Str:
- UnexpandMacro.append(Str)
- else:
- NewStr.append(Str)
- UnexpandMacroStr = ' '.join(UnexpandMacro)
- NewRespStr = ' '.join(NewStr)
- SaveFileOnChange(RespFile, NewRespStr, False)
- ToolsDef.append("%s = %s" % (Resp, UnexpandMacroStr + ' @' + RespFile))
- RespFileListContent += '@' + RespFile + os.linesep
- RespFileListContent += NewRespStr + os.linesep
- SaveFileOnChange(RespFileList, RespFileListContent, False)
- else:
- if os.path.exists(RespFileList):
- os.remove(RespFileList)
-
- # convert source files and binary files to build targets
- self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList]
- if len(self.ResultFileList) == 0 and len(self._AutoGenObject.SourceFileList) <> 0:
- EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build",
- ExtraData="[%s]" % str(self._AutoGenObject))
-
- self.ProcessBuildTargetList()
-
- # Generate macros used to represent input files
- FileMacroList = [] # macro name = file list
- for FileListMacro in self.FileListMacros:
- FileMacro = self._FILE_MACRO_TEMPLATE.Replace(
- {
- "macro_name" : FileListMacro,
- "source_file" : self.FileListMacros[FileListMacro]
- }
- )
- FileMacroList.append(FileMacro)
-
- # INC_LIST is special
- FileMacro = ""
- IncludePathList = []
- for P in self._AutoGenObject.IncludePathList:
- IncludePathList.append(IncPrefix + self.PlaceMacro(P, self.Macros))
- if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros:
- self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix + P)
- FileMacro += self._FILE_MACRO_TEMPLATE.Replace(
- {
- "macro_name" : "INC",
- "source_file" : IncludePathList
- }
- )
- FileMacroList.append(FileMacro)
-
- # Generate macros used to represent files containing list of input files
- for ListFileMacro in self.ListFileMacros:
- ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro) - 5])
- FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName))
- SaveFileOnChange(
- ListFileName,
- "\n".join(self.ListFileMacros[ListFileMacro]),
- False
- )
-
- # Edk modules need <BaseName>StrDefs.h for string ID
- #if self._AutoGenObject.AutoGenVersion < 0x00010005 and len(self._AutoGenObject.UnicodeFileList) > 0:
- # BcTargetList = ['strdefs']
- #else:
- # BcTargetList = []
- BcTargetList = []
-
- MakefileName = self._FILE_NAME_[self._FileType]
- LibraryMakeCommandList = []
- for D in self.LibraryBuildDirectoryList:
- Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join(D, MakefileName)}
- LibraryMakeCommandList.append(Command)
-
- package_rel_dir = self._AutoGenObject.SourceDir
- current_dir = self.Macros["WORKSPACE"]
- found = False
- while not found and os.sep in package_rel_dir:
- index = package_rel_dir.index(os.sep)
- current_dir = mws.join(current_dir, package_rel_dir[:index])
- if os.path.exists(current_dir):
- for fl in os.listdir(current_dir):
- if fl.endswith('.dec'):
- found = True
- break
- package_rel_dir = package_rel_dir[index + 1:]
-
- MakefileTemplateDict = {
- "makefile_header" : self._FILE_HEADER_[self._FileType],
- "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
- "makefile_name" : MakefileName,
- "platform_name" : self.PlatformInfo.Name,
- "platform_guid" : self.PlatformInfo.Guid,
- "platform_version" : self.PlatformInfo.Version,
- "platform_relative_directory": self.PlatformInfo.SourceDir,
- "platform_output_directory" : self.PlatformInfo.OutputDir,
- "platform_dir" : self._AutoGenObject.Macros["PLATFORM_DIR"],
-
- "module_name" : self._AutoGenObject.Name,
- "module_guid" : self._AutoGenObject.Guid,
- "module_name_guid" : self._AutoGenObject._GetUniqueBaseName(),
- "module_version" : self._AutoGenObject.Version,
- "module_type" : self._AutoGenObject.ModuleType,
- "module_file" : self._AutoGenObject.MetaFile.Name,
- "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName,
- "module_relative_directory" : self._AutoGenObject.SourceDir,
- "module_dir" : mws.join (self.Macros["WORKSPACE"], self._AutoGenObject.SourceDir),
- "package_relative_directory": package_rel_dir,
- "module_extra_defines" : ["%s = %s" % (k, v) for k, v in self._AutoGenObject.Module.Defines.iteritems()],
-
- "architecture" : self._AutoGenObject.Arch,
- "toolchain_tag" : self._AutoGenObject.ToolChain,
- "build_target" : self._AutoGenObject.BuildTarget,
-
- "platform_build_directory" : self.PlatformInfo.BuildDir,
- "module_build_directory" : self._AutoGenObject.BuildDir,
- "module_output_directory" : self._AutoGenObject.OutputDir,
- "module_debug_directory" : self._AutoGenObject.DebugDir,
-
- "separator" : Separator,
- "module_tool_definitions" : ToolsDef,
-
- "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
- "shell_command" : self._SHELL_CMD_[self._FileType].values(),
-
- "module_entry_point" : ModuleEntryPoint,
- "image_entry_point" : ImageEntryPoint,
- "arch_entry_point" : ArchEntryPoint,
- "remaining_build_target" : self.ResultFileList,
- "common_dependency_file" : self.CommonFileDependency,
- "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
- "clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]),
- "cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]),
- "dependent_library_build_directory" : self.LibraryBuildDirectoryList,
- "library_build_command" : LibraryMakeCommandList,
- "file_macro" : FileMacroList,
- "file_build_target" : self.BuildTargetList,
- "backward_compatible_target": BcTargetList,
- }
-
- return MakefileTemplateDict
-
- def CommandExceedLimit(self):
- FlagDict = {
- 'CC' : { 'Macro' : '$(CC_FLAGS)', 'Value' : False},
- 'PP' : { 'Macro' : '$(PP_FLAGS)', 'Value' : False},
- 'APP' : { 'Macro' : '$(APP_FLAGS)', 'Value' : False},
- 'ASLPP' : { 'Macro' : '$(ASLPP_FLAGS)', 'Value' : False},
- 'VFRPP' : { 'Macro' : '$(VFRPP_FLAGS)', 'Value' : False},
- 'ASM' : { 'Macro' : '$(ASM_FLAGS)', 'Value' : False},
- 'ASLCC' : { 'Macro' : '$(ASLCC_FLAGS)', 'Value' : False},
- }
-
- RespDict = {}
- FileTypeList = []
- IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily]
-
- # base on the source files to decide the file type
- for File in self._AutoGenObject.SourceFileList:
- for type in self._AutoGenObject.FileTypes:
- if File in self._AutoGenObject.FileTypes[type]:
- if type not in FileTypeList:
- FileTypeList.append(type)
-
- # calculate the command-line length
- if FileTypeList:
- for type in FileTypeList:
- BuildTargets = self._AutoGenObject.BuildRules[type].BuildTargets
- for Target in BuildTargets:
- CommandList = BuildTargets[Target].Commands
- for SingleCommand in CommandList:
- Tool = ''
- SingleCommandLength = len(SingleCommand)
- SingleCommandList = SingleCommand.split()
- if len(SingleCommandList) > 0:
- for Flag in FlagDict.keys():
- if '$('+ Flag +')' in SingleCommandList[0]:
- Tool = Flag
- break
- if Tool:
- SingleCommandLength += len(self._AutoGenObject._BuildOption[Tool]['PATH'])
- for item in SingleCommandList[1:]:
- if FlagDict[Tool]['Macro'] in item:
- Str = self._AutoGenObject._BuildOption[Tool]['FLAGS']
- for Option in self._AutoGenObject.BuildOption.keys():
- for Attr in self._AutoGenObject.BuildOption[Option]:
- if Str.find(Option + '_' + Attr) != -1:
- Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
- while(Str.find('$(') != -1):
- for macro in self._AutoGenObject.Macros.keys():
- MacroName = '$('+ macro + ')'
- if (Str.find(MacroName) != -1):
- Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
- break
- else:
- break
- SingleCommandLength += len(Str)
- elif '$(INC)' in item:
- SingleCommandLength += self._AutoGenObject.IncludePathLength + len(IncPrefix) * len(self._AutoGenObject._IncludePathList)
- elif item.find('$(') != -1:
- Str = item
- for Option in self._AutoGenObject.BuildOption.keys():
- for Attr in self._AutoGenObject.BuildOption[Option]:
- if Str.find(Option + '_' + Attr) != -1:
- Str = Str.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
- while(Str.find('$(') != -1):
- for macro in self._AutoGenObject.Macros.keys():
- MacroName = '$('+ macro + ')'
- if (Str.find(MacroName) != -1):
- Str = Str.replace(MacroName, self._AutoGenObject.Macros[macro])
- break
- else:
- break
- SingleCommandLength += len(Str)
-
- if SingleCommandLength > GlobalData.gCommandMaxLength:
- FlagDict[Tool]['Value'] = True
-
- # generate the response file content by combine the FLAGS and INC
- for Flag in FlagDict.keys():
- if FlagDict[Flag]['Value']:
- Key = Flag + '_RESP'
- RespMacro = FlagDict[Flag]['Macro'].replace('FLAGS', 'RESP')
- Value = self._AutoGenObject.BuildOption[Flag]['FLAGS']
- for inc in self._AutoGenObject._IncludePathList:
- Value += ' ' + IncPrefix + inc
- for Option in self._AutoGenObject.BuildOption.keys():
- for Attr in self._AutoGenObject.BuildOption[Option]:
- if Value.find(Option + '_' + Attr) != -1:
- Value = Value.replace('$(' + Option + '_' + Attr + ')', self._AutoGenObject.BuildOption[Option][Attr])
- while (Value.find('$(') != -1):
- for macro in self._AutoGenObject.Macros.keys():
- MacroName = '$('+ macro + ')'
- if (Value.find(MacroName) != -1):
- Value = Value.replace(MacroName, self._AutoGenObject.Macros[macro])
- break
- else:
- break
-
- if self._AutoGenObject.ToolChainFamily == 'GCC':
- RespDict[Key] = Value.replace('\\', '/')
- else:
- RespDict[Key] = Value
- for Target in BuildTargets:
- for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
- if FlagDict[Flag]['Macro'] in SingleCommand:
- BuildTargets[Target].Commands[i] = SingleCommand.replace('$(INC)','').replace(FlagDict[Flag]['Macro'], RespMacro)
- return RespDict
-
- def ProcessBuildTargetList(self):
- #
- # Search dependency file list for each source file
- #
- ForceIncludedFile = []
- for File in self._AutoGenObject.AutoGenFileList:
- if File.Ext == '.h':
- ForceIncludedFile.append(File)
- SourceFileList = []
- for Target in self._AutoGenObject.IntroTargetList:
- SourceFileList.extend(Target.Inputs)
-
- self.FileDependency = self.GetFileDependency(
- SourceFileList,
- ForceIncludedFile,
- self._AutoGenObject.IncludePathList + self._AutoGenObject.BuildOptionIncPathList
- )
- DepSet = None
- for File in self.FileDependency:
- if not self.FileDependency[File]:
- self.FileDependency[File] = ['$(FORCE_REBUILD)']
- continue
-
- self._AutoGenObject.AutoGenDepSet |= set(self.FileDependency[File])
-
- # skip non-C files
- if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":
- continue
- elif DepSet == None:
- DepSet = set(self.FileDependency[File])
- else:
- DepSet &= set(self.FileDependency[File])
- # in case nothing in SourceFileList
- if DepSet == None:
- DepSet = set()
- #
- # Extract common files list in the dependency files
- #
- for File in DepSet:
- self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros))
-
- for File in self.FileDependency:
- # skip non-C files
- if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c":
- continue
- NewDepSet = set(self.FileDependency[File])
- NewDepSet -= DepSet
- self.FileDependency[File] = ["$(COMMON_DEPS)"] + list(NewDepSet)
-
- # Convert target description object to target string in makefile
- for Type in self._AutoGenObject.Targets:
- for T in self._AutoGenObject.Targets[Type]:
- # Generate related macros if needed
- if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros:
- self.FileListMacros[T.FileListMacro] = []
- if T.GenListFile and T.ListFileMacro not in self.ListFileMacros:
- self.ListFileMacros[T.ListFileMacro] = []
- if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros:
- self.ListFileMacros[T.IncListFileMacro] = []
-
- Deps = []
- # Add force-dependencies
- for Dep in T.Dependencies:
- Deps.append(self.PlaceMacro(str(Dep), self.Macros))
- # Add inclusion-dependencies
- if len(T.Inputs) == 1 and T.Inputs[0] in self.FileDependency:
- for F in self.FileDependency[T.Inputs[0]]:
- Deps.append(self.PlaceMacro(str(F), self.Macros))
- # Add source-dependencies
- for F in T.Inputs:
- NewFile = self.PlaceMacro(str(F), self.Macros)
- # In order to use file list macro as dependency
- if T.GenListFile:
- # gnu tools need forward slash path separater, even on Windows
- self.ListFileMacros[T.ListFileMacro].append(str(F).replace ('\\', '/'))
- self.FileListMacros[T.FileListMacro].append(NewFile)
- elif T.GenFileListMacro:
- self.FileListMacros[T.FileListMacro].append(NewFile)
- else:
- Deps.append(NewFile)
-
- # Use file list macro as dependency
- if T.GenFileListMacro:
- Deps.append("$(%s)" % T.FileListMacro)
-
- TargetDict = {
- "target" : self.PlaceMacro(T.Target.Path, self.Macros),
- "cmd" : "\n\t".join(T.Commands),
- "deps" : Deps
- }
- self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict))
-
- ## For creating makefile targets for dependent libraries
- def ProcessDependentLibrary(self):
- for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
- if not LibraryAutoGen.IsBinaryModule:
- self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))
-
- ## Return a list containing source file's dependencies
- #
- # @param FileList The list of source files
- # @param ForceInculeList The list of files which will be included forcely
- # @param SearchPathList The list of search path
- #
- # @retval dict The mapping between source file path and its dependencies
- #
- def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):
- Dependency = {}
- for F in FileList:
- Dependency[F] = self.GetDependencyList(F, ForceInculeList, SearchPathList)
- return Dependency
-
- ## Find dependencies for one source file
- #
- # By searching recursively "#include" directive in file, find out all the
- # files needed by given source file. The dependecies will be only searched
- # in given search path list.
- #
- # @param File The source file
- # @param ForceInculeList The list of files which will be included forcely
- # @param SearchPathList The list of search path
- #
- # @retval list The list of files the given source file depends on
- #
- def GetDependencyList(self, File, ForceList, SearchPathList):
- EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File)
- FileStack = [File] + ForceList
- DependencySet = set()
-
- if self._AutoGenObject.Arch not in gDependencyDatabase:
- gDependencyDatabase[self._AutoGenObject.Arch] = {}
- DepDb = gDependencyDatabase[self._AutoGenObject.Arch]
-
- while len(FileStack) > 0:
- F = FileStack.pop()
-
- FullPathDependList = []
- if F in self.FileCache:
- for CacheFile in self.FileCache[F]:
- FullPathDependList.append(CacheFile)
- if CacheFile not in DependencySet:
- FileStack.append(CacheFile)
- DependencySet.update(FullPathDependList)
- continue
-
- CurrentFileDependencyList = []
- if F in DepDb:
- CurrentFileDependencyList = DepDb[F]
- else:
- try:
- Fd = open(F.Path, 'r')
- except BaseException, X:
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path + "\n\t" + str(X))
-
- FileContent = Fd.read()
- Fd.close()
- if len(FileContent) == 0:
- continue
-
- if FileContent[0] == 0xff or FileContent[0] == 0xfe:
- FileContent = unicode(FileContent, "utf-16")
- IncludedFileList = gIncludePattern.findall(FileContent)
-
- for Inc in IncludedFileList:
- Inc = Inc.strip()
- # if there's macro used to reference header file, expand it
- HeaderList = gMacroPattern.findall(Inc)
- if len(HeaderList) == 1 and len(HeaderList[0]) == 2:
- HeaderType = HeaderList[0][0]
- HeaderKey = HeaderList[0][1]
- if HeaderType in gIncludeMacroConversion:
- Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey}
- else:
- # not known macro used in #include, always build the file by
- # returning a empty dependency
- self.FileCache[File] = []
- return []
- Inc = os.path.normpath(Inc)
- CurrentFileDependencyList.append(Inc)
- DepDb[F] = CurrentFileDependencyList
-
- CurrentFilePath = F.Dir
- PathList = [CurrentFilePath] + SearchPathList
- for Inc in CurrentFileDependencyList:
- for SearchPath in PathList:
- FilePath = os.path.join(SearchPath, Inc)
- if FilePath in gIsFileMap:
- if not gIsFileMap[FilePath]:
- continue
- # If isfile is called too many times, the performance is slow down.
- elif not os.path.isfile(FilePath):
- gIsFileMap[FilePath] = False
- continue
- else:
- gIsFileMap[FilePath] = True
- FilePath = PathClass(FilePath)
- FullPathDependList.append(FilePath)
- if FilePath not in DependencySet:
- FileStack.append(FilePath)
- break
- else:
- EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found "\
- "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList)))
-
- self.FileCache[F] = FullPathDependList
- DependencySet.update(FullPathDependList)
-
- DependencySet.update(ForceList)
- if File in DependencySet:
- DependencySet.remove(File)
- DependencyList = list(DependencySet) # remove duplicate ones
-
- return DependencyList
-
- _TemplateDict = property(_CreateTemplateDict)
-
-## CustomMakefile class
-#
-# This class encapsules makefie and its generation for module. It uses template to generate
-# the content of makefile. The content of makefile will be got from ModuleAutoGen object.
-#
-class CustomMakefile(BuildFile):
- ## template used to generate the makefile for module with custom makefile
- _TEMPLATE_ = TemplateString('''\
-${makefile_header}
-
-#
-# Platform Macro Definition
-#
-PLATFORM_NAME = ${platform_name}
-PLATFORM_GUID = ${platform_guid}
-PLATFORM_VERSION = ${platform_version}
-PLATFORM_RELATIVE_DIR = ${platform_relative_directory}
-PLATFORM_DIR = ${platform_dir}
-PLATFORM_OUTPUT_DIR = ${platform_output_directory}
-
-#
-# Module Macro Definition
-#
-MODULE_NAME = ${module_name}
-MODULE_GUID = ${module_guid}
-MODULE_NAME_GUID = ${module_name_guid}
-MODULE_VERSION = ${module_version}
-MODULE_TYPE = ${module_type}
-MODULE_FILE = ${module_file}
-MODULE_FILE_BASE_NAME = ${module_file_base_name}
-BASE_NAME = $(MODULE_NAME)
-MODULE_RELATIVE_DIR = ${module_relative_directory}
-MODULE_DIR = ${module_dir}
-
-#
-# Build Configuration Macro Definition
-#
-ARCH = ${architecture}
-TOOLCHAIN = ${toolchain_tag}
-TOOLCHAIN_TAG = ${toolchain_tag}
-TARGET = ${build_target}
-
-#
-# Build Directory Macro Definition
-#
-# PLATFORM_BUILD_DIR = ${platform_build_directory}
-BUILD_DIR = ${platform_build_directory}
-BIN_DIR = $(BUILD_DIR)${separator}${architecture}
-LIB_DIR = $(BIN_DIR)
-MODULE_BUILD_DIR = ${module_build_directory}
-OUTPUT_DIR = ${module_output_directory}
-DEBUG_DIR = ${module_debug_directory}
-DEST_DIR_OUTPUT = $(OUTPUT_DIR)
-DEST_DIR_DEBUG = $(DEBUG_DIR)
-
-#
-# Tools definitions specific to this module
-#
-${BEGIN}${module_tool_definitions}
-${END}
-MAKE_FILE = ${makefile_path}
-
-#
-# Shell Command Macro
-#
-${BEGIN}${shell_command_code} = ${shell_command}
-${END}
-
-${custom_makefile_content}
-
-#
-# Target used when called from platform makefile, which will bypass the build of dependent libraries
-#
-
-pbuild: init all
-
-
-#
-# ModuleTarget
-#
-
-mbuild: init all
-
-#
-# Build Target used in multi-thread build mode, which no init target is needed
-#
-
-tbuild: all
-
-#
-# Initialization target: print build information and create necessary directories
-#
-init:
-\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)]
-${BEGIN}\t-@${create_directory_command}\n${END}\
-
-''')
-
- ## Constructor of CustomMakefile
- #
- # @param ModuleAutoGen Object of ModuleAutoGen class
- #
- def __init__(self, ModuleAutoGen):
- BuildFile.__init__(self, ModuleAutoGen)
- self.PlatformInfo = self._AutoGenObject.PlatformInfo
- self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
-
- # Compose a dict object containing information used to do replacement in template
- def _CreateTemplateDict(self):
- Separator = self._SEP_[self._FileType]
- if self._FileType not in self._AutoGenObject.CustomMakefile:
- EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType,
- ExtraData="[%s]" % str(self._AutoGenObject))
- MakefilePath = mws.join(
- self._AutoGenObject.WorkspaceDir,
- self._AutoGenObject.CustomMakefile[self._FileType]
- )
- try:
- CustomMakefile = open(MakefilePath, 'r').read()
- except:
- EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(self._AutoGenObject),
- ExtraData=self._AutoGenObject.CustomMakefile[self._FileType])
-
- # tools definitions
- ToolsDef = []
- for Tool in self._AutoGenObject.BuildOption:
- # Don't generate MAKE_FLAGS in makefile. It's put in environment variable.
- if Tool == "MAKE":
- continue
- for Attr in self._AutoGenObject.BuildOption[Tool]:
- if Attr == "FAMILY":
- continue
- elif Attr == "PATH":
- ToolsDef.append("%s = %s" % (Tool, self._AutoGenObject.BuildOption[Tool][Attr]))
- else:
- ToolsDef.append("%s_%s = %s" % (Tool, Attr, self._AutoGenObject.BuildOption[Tool][Attr]))
- ToolsDef.append("")
-
- MakefileName = self._FILE_NAME_[self._FileType]
- MakefileTemplateDict = {
- "makefile_header" : self._FILE_HEADER_[self._FileType],
- "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName),
- "platform_name" : self.PlatformInfo.Name,
- "platform_guid" : self.PlatformInfo.Guid,
- "platform_version" : self.PlatformInfo.Version,
- "platform_relative_directory": self.PlatformInfo.SourceDir,
- "platform_output_directory" : self.PlatformInfo.OutputDir,
- "platform_dir" : self._AutoGenObject.Macros["PLATFORM_DIR"],
-
- "module_name" : self._AutoGenObject.Name,
- "module_guid" : self._AutoGenObject.Guid,
- "module_name_guid" : self._AutoGenObject._GetUniqueBaseName(),
- "module_version" : self._AutoGenObject.Version,
- "module_type" : self._AutoGenObject.ModuleType,
- "module_file" : self._AutoGenObject.MetaFile,
- "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName,
- "module_relative_directory" : self._AutoGenObject.SourceDir,
- "module_dir" : mws.join (self._AutoGenObject.WorkspaceDir, self._AutoGenObject.SourceDir),
-
- "architecture" : self._AutoGenObject.Arch,
- "toolchain_tag" : self._AutoGenObject.ToolChain,
- "build_target" : self._AutoGenObject.BuildTarget,
-
- "platform_build_directory" : self.PlatformInfo.BuildDir,
- "module_build_directory" : self._AutoGenObject.BuildDir,
- "module_output_directory" : self._AutoGenObject.OutputDir,
- "module_debug_directory" : self._AutoGenObject.DebugDir,
-
- "separator" : Separator,
- "module_tool_definitions" : ToolsDef,
-
- "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
- "shell_command" : self._SHELL_CMD_[self._FileType].values(),
-
- "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
- "custom_makefile_content" : CustomMakefile
- }
-
- return MakefileTemplateDict
-
- _TemplateDict = property(_CreateTemplateDict)
-
-## PlatformMakefile class
-#
-# This class encapsules makefie and its generation for platform. It uses
-# template to generate the content of makefile. The content of makefile will be
-# got from PlatformAutoGen object.
-#
-class PlatformMakefile(BuildFile):
- ## template used to generate the makefile for platform
- _TEMPLATE_ = TemplateString('''\
-${makefile_header}
-
-#
-# Platform Macro Definition
-#
-PLATFORM_NAME = ${platform_name}
-PLATFORM_GUID = ${platform_guid}
-PLATFORM_VERSION = ${platform_version}
-PLATFORM_FILE = ${platform_file}
-PLATFORM_DIR = ${platform_dir}
-PLATFORM_OUTPUT_DIR = ${platform_output_directory}
-
-#
-# Build Configuration Macro Definition
-#
-TOOLCHAIN = ${toolchain_tag}
-TOOLCHAIN_TAG = ${toolchain_tag}
-TARGET = ${build_target}
-
-#
-# Build Directory Macro Definition
-#
-BUILD_DIR = ${platform_build_directory}
-FV_DIR = ${platform_build_directory}${separator}FV
-
-#
-# Shell Command Macro
-#
-${BEGIN}${shell_command_code} = ${shell_command}
-${END}
-
-MAKE = ${make_path}
-MAKE_FILE = ${makefile_path}
-
-#
-# Default target
-#
-all: init build_libraries build_modules
-
-#
-# Initialization target: print build information and create necessary directories
-#
-init:
-\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}]
-\t${BEGIN}-@${create_directory_command}
-\t${END}
-#
-# library build target
-#
-libraries: init build_libraries
-
-#
-# module build target
-#
-modules: init build_libraries build_modules
-
-#
-# Build all libraries:
-#
-build_libraries:
-${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild
-${END}\t@cd $(BUILD_DIR)
-
-#
-# Build all modules:
-#
-build_modules:
-${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild
-${END}\t@cd $(BUILD_DIR)
-
-#
-# Clean intermediate files
-#
-clean:
-\t${BEGIN}-@${library_build_command} clean
-\t${END}${BEGIN}-@${module_build_command} clean
-\t${END}@cd $(BUILD_DIR)
-
-#
-# Clean all generated files except to makefile
-#
-cleanall:
-${BEGIN}\t${cleanall_command}
-${END}
-
-#
-# Clean all library files
-#
-cleanlib:
-\t${BEGIN}-@${library_build_command} cleanall
-\t${END}@cd $(BUILD_DIR)\n
-''')
-
- ## Constructor of PlatformMakefile
- #
- # @param ModuleAutoGen Object of PlatformAutoGen class
- #
- def __init__(self, PlatformAutoGen):
- BuildFile.__init__(self, PlatformAutoGen)
- self.ModuleBuildCommandList = []
- self.ModuleMakefileList = []
- self.IntermediateDirectoryList = []
- self.ModuleBuildDirectoryList = []
- self.LibraryBuildDirectoryList = []
- self.LibraryMakeCommandList = []
-
- # Compose a dict object containing information used to do replacement in template
- def _CreateTemplateDict(self):
- Separator = self._SEP_[self._FileType]
-
- PlatformInfo = self._AutoGenObject
- if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]:
- EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
- ExtraData="[%s]" % str(self._AutoGenObject))
-
- self.IntermediateDirectoryList = ["$(BUILD_DIR)"]
- self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList()
- self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList()
-
- MakefileName = self._FILE_NAME_[self._FileType]
- LibraryMakefileList = []
- LibraryMakeCommandList = []
- for D in self.LibraryBuildDirectoryList:
- D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir})
- Makefile = os.path.join(D, MakefileName)
- Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile}
- LibraryMakefileList.append(Makefile)
- LibraryMakeCommandList.append(Command)
- self.LibraryMakeCommandList = LibraryMakeCommandList
-
- ModuleMakefileList = []
- ModuleMakeCommandList = []
- for D in self.ModuleBuildDirectoryList:
- D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir})
- Makefile = os.path.join(D, MakefileName)
- Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile}
- ModuleMakefileList.append(Makefile)
- ModuleMakeCommandList.append(Command)
-
- MakefileTemplateDict = {
- "makefile_header" : self._FILE_HEADER_[self._FileType],
- "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
- "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"],
- "makefile_name" : MakefileName,
- "platform_name" : PlatformInfo.Name,
- "platform_guid" : PlatformInfo.Guid,
- "platform_version" : PlatformInfo.Version,
- "platform_file" : self._AutoGenObject.MetaFile,
- "platform_relative_directory": PlatformInfo.SourceDir,
- "platform_output_directory" : PlatformInfo.OutputDir,
- "platform_build_directory" : PlatformInfo.BuildDir,
- "platform_dir" : self._AutoGenObject.Macros["PLATFORM_DIR"],
-
- "toolchain_tag" : PlatformInfo.ToolChain,
- "build_target" : PlatformInfo.BuildTarget,
- "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
- "shell_command" : self._SHELL_CMD_[self._FileType].values(),
- "build_architecture_list" : self._AutoGenObject.Arch,
- "architecture" : self._AutoGenObject.Arch,
- "separator" : Separator,
- "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
- "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
- "library_makefile_list" : LibraryMakefileList,
- "module_makefile_list" : ModuleMakefileList,
- "library_build_command" : LibraryMakeCommandList,
- "module_build_command" : ModuleMakeCommandList,
- }
-
- return MakefileTemplateDict
-
- ## Get the root directory list for intermediate files of all modules build
- #
- # @retval list The list of directory
- #
- def GetModuleBuildDirectoryList(self):
- DirList = []
- for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
- if not ModuleAutoGen.IsBinaryModule:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
- return DirList
-
- ## Get the root directory list for intermediate files of all libraries build
- #
- # @retval list The list of directory
- #
- def GetLibraryBuildDirectoryList(self):
- DirList = []
- for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
- if not LibraryAutoGen.IsBinaryModule:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
- return DirList
-
- _TemplateDict = property(_CreateTemplateDict)
-
-## TopLevelMakefile class
-#
-# This class encapsules makefie and its generation for entrance makefile. It
-# uses template to generate the content of makefile. The content of makefile
-# will be got from WorkspaceAutoGen object.
-#
-class TopLevelMakefile(BuildFile):
- ## template used to generate toplevel makefile
- _TEMPLATE_ = TemplateString('''${BEGIN}\tGenFds -f ${fdf_file} --conf=${conf_directory} -o ${platform_build_directory} -t ${toolchain_tag} -b ${build_target} -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -C ${cap} ${END}${BEGIN} -D ${macro} ${END}''')
-
- ## Constructor of TopLevelMakefile
- #
- # @param Workspace Object of WorkspaceAutoGen class
- #
- def __init__(self, Workspace):
- BuildFile.__init__(self, Workspace)
- self.IntermediateDirectoryList = []
-
- # Compose a dict object containing information used to do replacement in template
- def _CreateTemplateDict(self):
- Separator = self._SEP_[self._FileType]
-
- # any platform autogen object is ok because we just need common information
- PlatformInfo = self._AutoGenObject
-
- if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]:
- EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!",
- ExtraData="[%s]" % str(self._AutoGenObject))
-
- for Arch in PlatformInfo.ArchList:
- self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch]))
- self.IntermediateDirectoryList.append("$(FV_DIR)")
-
- # TRICK: for not generating GenFds call in makefile if no FDF file
- MacroList = []
- if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "":
- FdfFileList = [PlatformInfo.FdfFile]
- # macros passed to GenFds
- MacroList.append('"%s=%s"' % ("EFI_SOURCE", GlobalData.gEfiSource.replace('\\', '\\\\')))
- MacroList.append('"%s=%s"' % ("EDK_SOURCE", GlobalData.gEdkSource.replace('\\', '\\\\')))
- MacroDict = {}
- MacroDict.update(GlobalData.gGlobalDefines)
- MacroDict.update(GlobalData.gCommandLineDefines)
- MacroDict.pop("EFI_SOURCE", "dummy")
- MacroDict.pop("EDK_SOURCE", "dummy")
- for MacroName in MacroDict:
- if MacroDict[MacroName] != "":
- MacroList.append('"%s=%s"' % (MacroName, MacroDict[MacroName].replace('\\', '\\\\')))
- else:
- MacroList.append('"%s"' % MacroName)
- else:
- FdfFileList = []
-
- # pass extra common options to external program called in makefile, currently GenFds.exe
- ExtraOption = ''
- LogLevel = EdkLogger.GetLevel()
- if LogLevel == EdkLogger.VERBOSE:
- ExtraOption += " -v"
- elif LogLevel <= EdkLogger.DEBUG_9:
- ExtraOption += " -d %d" % (LogLevel - 1)
- elif LogLevel == EdkLogger.QUIET:
- ExtraOption += " -q"
-
- if GlobalData.gCaseInsensitive:
- ExtraOption += " -c"
-
- if GlobalData.gIgnoreSource:
- ExtraOption += " --ignore-sources"
-
- if GlobalData.BuildOptionPcd:
- for index, option in enumerate(GlobalData.gCommand):
- if "--pcd" == option and GlobalData.gCommand[index+1]:
- pcdName, pcdValue = GlobalData.gCommand[index+1].split('=')
- if pcdValue.startswith('H'):
- pcdValue = 'H' + '"' + pcdValue[1:] + '"'
- ExtraOption += " --pcd " + pcdName + '=' + pcdValue
- elif pcdValue.startswith('L'):
- pcdValue = 'L' + '"' + pcdValue[1:] + '"'
- ExtraOption += " --pcd " + pcdName + '=' + pcdValue
- else:
- ExtraOption += " --pcd " + GlobalData.gCommand[index+1]
-
- MakefileName = self._FILE_NAME_[self._FileType]
- SubBuildCommandList = []
- for A in PlatformInfo.ArchList:
- Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)}
- SubBuildCommandList.append(Command)
-
- MakefileTemplateDict = {
- "makefile_header" : self._FILE_HEADER_[self._FileType],
- "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName),
- "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"],
- "platform_name" : PlatformInfo.Name,
- "platform_guid" : PlatformInfo.Guid,
- "platform_version" : PlatformInfo.Version,
- "platform_build_directory" : PlatformInfo.BuildDir,
- "conf_directory" : GlobalData.gConfDirectory,
-
- "toolchain_tag" : PlatformInfo.ToolChain,
- "build_target" : PlatformInfo.BuildTarget,
- "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(),
- "shell_command" : self._SHELL_CMD_[self._FileType].values(),
- 'arch' : list(PlatformInfo.ArchList),
- "build_architecture_list" : ','.join(PlatformInfo.ArchList),
- "separator" : Separator,
- "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList),
- "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList),
- "sub_build_command" : SubBuildCommandList,
- "fdf_file" : FdfFileList,
- "active_platform" : str(PlatformInfo),
- "fd" : PlatformInfo.FdTargetList,
- "fv" : PlatformInfo.FvTargetList,
- "cap" : PlatformInfo.CapTargetList,
- "extra_options" : ExtraOption,
- "macro" : MacroList,
- }
-
- return MakefileTemplateDict
-
- ## Get the root directory list for intermediate files of all modules build
- #
- # @retval list The list of directory
- #
- def GetModuleBuildDirectoryList(self):
- DirList = []
- for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList:
- if not ModuleAutoGen.IsBinaryModule:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir))
- return DirList
-
- ## Get the root directory list for intermediate files of all libraries build
- #
- # @retval list The list of directory
- #
- def GetLibraryBuildDirectoryList(self):
- DirList = []
- for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList:
- if not LibraryAutoGen.IsBinaryModule:
- DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir))
- return DirList
-
- _TemplateDict = property(_CreateTemplateDict)
-
-# This acts like the main() function for the script, unless it is 'import'ed into another script.
-if __name__ == '__main__':
- pass
-
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py
deleted file mode 100644
index fc9ac7178f..0000000000
--- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py
+++ /dev/null
@@ -1,1703 +0,0 @@
-## @file
-# Routines for generating Pcd Database
-#
-# Copyright (c) 2013 - 2016, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-from StringIO import StringIO
-from Common.Misc import *
-from Common.String import StringToArray
-from struct import pack
-from ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER
-from ValidCheckingInfoObject import VAR_CHECK_PCD_VARIABLE_TAB
-from ValidCheckingInfoObject import VAR_VALID_OBJECT_FACTORY
-from Common.VariableAttributes import VariableAttributes
-
-DATABASE_VERSION = 6
-
-gPcdDatabaseAutoGenC = TemplateString("""
-//
-// External PCD database debug information
-//
-#if 0
-${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
- /* SkuIdTable */
- { ${BEGIN}${SKUID_VALUE}, ${END} },
-${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */
-${END}
-${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */
-${END}
-${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */
-${END}
-${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */
-${END}
- /* VPD */
-${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */
-${END}
- /* ExMapTable */
- {
-${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} },
-${END}
- },
- /* LocalTokenNumberTable */
- {
-${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE},
-${END}
- },
- /* GuidTable */
- {
-${BEGIN} ${GUID_STRUCTURE},
-${END}
- },
-${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */
-${END}
-${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}] */
- {
- ${VARIABLE_HEAD_VALUE}
- },
-${END}
-/* SkuHead */
- {
- ${BEGIN} offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}${VARDEF_HEADER}) | ${TOKEN_TYPE}, /* */
- offsetof (${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.SkuHead) /* */
- ${END}
- },
- /* StringTable */
-${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
-${END}
- /* SizeTable */
- {
-${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */
-${END}
- },
-${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */
-${END}
-${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */
-${END}
-${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */
-${END}
-${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */
-${END}
-${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */
-${END}
-${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */
-${END}
- ${SYSTEM_SKU_ID_VALUE}
-};
-#endif
-""")
-
-## Mapping between PCD driver type and EFI phase
-gPcdPhaseMap = {
- "PEI_PCD_DRIVER" : "PEI",
- "DXE_PCD_DRIVER" : "DXE"
-}
-
-gPcdDatabaseAutoGenH = TemplateString("""
-#define PCD_${PHASE}_SERVICE_DRIVER_VERSION ${SERVICE_DRIVER_VERSION}
-
-//
-// External PCD database debug information
-//
-#if 0
-#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE}
-#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE}
-#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE}
-#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE}
-#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER}
-#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE}
-#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER}
-#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE}
-#define ${PHASE}_SKU_HEAD_SIZE ${SKU_HEAD_SIZE}
-#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY}
-#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY}
-#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY}
-#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY}
-#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY}
-
-typedef struct {
- UINT64 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE];
-${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}];
-${END}
-${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64};
-${END}
-${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}];
-${END}
-${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32};
-${END}
-${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}];
-${END}
- DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE];
- UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE];
- GUID GuidTable[${PHASE}_GUID_TABLE_SIZE];
-${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}];
-${END}
-${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}_Variable_Header[${VARIABLE_HEAD_NUMSKUS_DECL}];
-${END}
-${BEGIN} SKU_HEAD SkuHead[${PHASE}_SKU_HEAD_SIZE];
-${END}
-${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */
-${END}
- SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE];
-${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}];
-${END}
-${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16};
-${END}
-${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}];
-${END}
-${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8};
-${END}
-${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}];
-${END}
-${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN};
-${END}
-${SYSTEM_SKU_ID}
-} ${PHASE}_PCD_DATABASE_INIT;
-
-typedef struct {
-${PCD_DATABASE_UNINIT_EMPTY}
-${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}];
-${END}
-${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}];
-${END}
-${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}];
-${END}
-${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}];
-${END}
-${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}];
-${END}
-} ${PHASE}_PCD_DATABASE_UNINIT;
-
-typedef struct {
- //GUID Signature; // PcdDataBaseGuid
- //UINT32 BuildVersion;
- //UINT32 Length;
- //SKU_ID SystemSkuId; // Current SkuId value.
- //UINT32 UninitDataBaseSize;// Total size for PCD those default value with 0.
- //TABLE_OFFSET LocalTokenNumberTableOffset;
- //TABLE_OFFSET ExMapTableOffset;
- //TABLE_OFFSET GuidTableOffset;
- //TABLE_OFFSET StringTableOffset;
- //TABLE_OFFSET SizeTableOffset;
- //TABLE_OFFSET SkuIdTableOffset;
- //TABLE_OFFSET PcdNameTableOffset;
- //UINT16 LocalTokenCount; // LOCAL_TOKEN_NUMBER for all
- //UINT16 ExTokenCount; // EX_TOKEN_NUMBER for DynamicEx
- //UINT16 GuidTableCount; // The Number of Guid in GuidTable
- //UINT8 Pad[2];
- ${PHASE}_PCD_DATABASE_INIT Init;
- ${PHASE}_PCD_DATABASE_UNINIT Uninit;
-} ${PHASE}_PCD_DATABASE;
-
-#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER)
-#endif
-""")
-
-
-gEmptyPcdDatabaseAutoGenC = TemplateString("""
-//
-// External PCD database debug information
-//
-#if 0
-${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = {
- /* SkuIdTable */
- { 0 },
- /* ExMapTable */
- {
- {0, 0, 0}
- },
- /* LocalTokenNumberTable */
- {
- 0
- },
- /* GuidTable */
- {
- {0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}}
- },
- /* StringTable */
- { 0 },
- /* SkuHead */
- {
- 0, 0
- },
- /* SizeTable */
- {
- 0, 0
- },
- ${SYSTEM_SKU_ID_VALUE}
-};
-#endif
-""")
-
-## PackGuid
-#
-# Pack the GUID value in C structure format into data array
-#
-# @param GuidStructureValue: The GUID value in C structure format
-#
-# @retval Buffer: a data array contains the Guid
-#
-def PackGuid(GuidStructureValue):
- GuidString = GuidStructureStringToGuidString(GuidStructureValue)
- Guid = GuidString.split('-')
- Buffer = pack('=LHHBBBBBBBB',
- int(Guid[0], 16),
- int(Guid[1], 16),
- int(Guid[2], 16),
- int(Guid[3][-4:-2], 16),
- int(Guid[3][-2:], 16),
- int(Guid[4][-12:-10], 16),
- int(Guid[4][-10:-8], 16),
- int(Guid[4][-8:-6], 16),
- int(Guid[4][-6:-4], 16),
- int(Guid[4][-4:-2], 16),
- int(Guid[4][-2:], 16)
- )
- return Buffer
-
-def toHex(s):
- lst = []
- for ch in s:
- hv = hex(ord(ch)).replace('0x', ' ')
- if len(hv) == 1:
- hv = '0'+hv
- lst.append(hv)
- if lst:
- return reduce(lambda x,y:x+y, lst)
- else:
- return 'empty'
-## DbItemList
-#
-# The class holds the Pcd database items. ItemSize if not zero should match the item datum type in the C structure.
-# When the structure is changed, remember to check the ItemSize and the related PackStr in PackData()
-# RawDataList is the RawData that may need some kind of calculation or transformation,
-# the DataList corresponds to the data that need to be written to database. If DataList is not present, then RawDataList
-# will be written to the database.
-#
-class DbItemList:
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- self.ItemSize = ItemSize
- self.DataList = DataList
- self.RawDataList = RawDataList
- self.ListSize = 0
-
- def GetInterOffset(self, Index):
- Offset = 0
- if self.ItemSize == 0:
- #
- # Variable length, need to calculate one by one
- #
- assert(Index < len(self.RawDataList))
- for ItemIndex in xrange(Index):
- Offset += len(self.RawDataList[ItemIndex])
- else:
- for Datas in self.RawDataList:
- Offset = self.ItemSize * Index
-
- return Offset
-
- def GetListSize(self):
- if self.ListSize:
- return self.ListSize
- if len(self.RawDataList) == 0:
- self.ListSize = 0
- return self.ListSize
- if self.ItemSize == 0:
- self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
- else:
- self.ListSize = self.ItemSize * len(self.RawDataList)
- return self.ListSize
-
- def PackData(self):
- if self.ItemSize == 8:
- PackStr = "=Q"
- elif self.ItemSize == 4:
- PackStr = "=L"
- elif self.ItemSize == 2:
- PackStr = "=H"
- elif self.ItemSize == 1:
- PackStr = "=B"
- elif self.ItemSize == 0:
- PackStr = "=B"
- elif self.ItemSize == 16:
- # pack Guid
- PackStr = ''
- else:
- # should not reach here
- assert(False)
-
- Buffer = ''
- for Datas in self.RawDataList:
- if type(Datas) in (list, tuple):
- for Data in Datas:
- if PackStr:
- Buffer += pack(PackStr, GetIntegerValue(Data))
- else:
- Buffer += PackGuid(Data)
- else:
- if PackStr:
- Buffer += pack(PackStr, GetIntegerValue(Datas))
- else:
- Buffer += PackGuid(Datas)
-
- return Buffer
-
-## DbExMapTblItemList
-#
-# The class holds the ExMap table
-#
-class DbExMapTblItemList (DbItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
- def PackData(self):
- Buffer = ''
- PackStr = "=LHH"
- for Datas in self.RawDataList:
- Buffer += pack(PackStr,
- GetIntegerValue(Datas[0]),
- GetIntegerValue(Datas[1]),
- GetIntegerValue(Datas[2]))
- return Buffer
-
-## DbComItemList
-#
-# The DbComItemList is a special kind of DbItemList in case that the size of the List can not be computed by the
-# ItemSize multiply the ItemCount.
-#
-class DbComItemList (DbItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
- def GetInterOffset(self, Index):
- Offset = 0
- if self.ItemSize == 0:
- #
- # Variable length, need to calculte one by one
- # The only variable table is stringtable, it is not Composite item, should not reach here
- #
- assert(False)
- else:
- assert(Index < len(self.RawDataList))
- for ItemIndex in xrange(Index):
- Offset += len(self.RawDataList[ItemIndex]) * self.ItemSize
-
- return Offset
-
- def GetListSize(self):
- if self.ListSize:
- return self.ListSize
- if self.ItemSize == 0:
- assert(False)
- else:
- if len(self.RawDataList) == 0:
- self.ListSize = 0
- else:
- self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1]) * self.ItemSize
-
- return self.ListSize
-
- def PackData(self):
- if self.ItemSize == 8:
- PackStr = "=Q"
- elif self.ItemSize == 4:
- PackStr = "=L"
- elif self.ItemSize == 2:
- PackStr = "=H"
- elif self.ItemSize == 1:
- PackStr = "=B"
- elif self.ItemSize == 0:
- PackStr = "=B"
- else:
- assert(False)
-
- Buffer = ''
- for DataList in self.RawDataList:
- for Data in DataList:
- if type(Data) in (list, tuple):
- for SingleData in Data:
- Buffer += pack(PackStr, GetIntegerValue(SingleData))
- else:
- Buffer += pack(PackStr, GetIntegerValue(Data))
-
- return Buffer
-
-## DbVariableTableItemList
-#
-# The class holds the Variable header value table
-#
-class DbVariableTableItemList (DbComItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
- def PackData(self):
- PackStr = "=LLHHLHH"
- Buffer = ''
- for DataList in self.RawDataList:
- for Data in DataList:
- Buffer += pack(PackStr,
- GetIntegerValue(Data[0]),
- GetIntegerValue(Data[1]),
- GetIntegerValue(Data[2]),
- GetIntegerValue(Data[3]),
- GetIntegerValue(Data[4]),
- GetIntegerValue(Data[5]),
- GetIntegerValue(0))
- return Buffer
-
-class DbStringHeadTableItemList(DbItemList):
- def __init__(self,ItemSize,DataList=None,RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
-
- def GetInterOffset(self, Index):
- Offset = 0
- if self.ItemSize == 0:
- #
- # Variable length, need to calculate one by one
- #
- assert(Index < len(self.RawDataList))
- for ItemIndex in xrange(Index):
- Offset += len(self.RawDataList[ItemIndex])
- else:
- for innerIndex in range(Index):
- if type(self.RawDataList[innerIndex]) in (list, tuple):
- Offset += len(self.RawDataList[innerIndex]) * self.ItemSize
- else:
- Offset += self.ItemSize
-
- return Offset
-
- def GetListSize(self):
- if self.ListSize:
- return self.ListSize
- if len(self.RawDataList) == 0:
- self.ListSize = 0
- return self.ListSize
- if self.ItemSize == 0:
- self.ListSize = self.GetInterOffset(len(self.RawDataList) - 1) + len(self.RawDataList[len(self.RawDataList)-1])
- else:
- for Datas in self.RawDataList:
- if type(Datas) in (list, tuple):
- self.ListSize += len(Datas) * self.ItemSize
- else:
- self.ListSize += self.ItemSize
- return self.ListSize
-
-## DbSkuHeadTableItemList
-#
-# The class holds the Sku header value table
-#
-class DbSkuHeadTableItemList (DbItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
- def PackData(self):
- PackStr = "=LL"
- Buffer = ''
- for Data in self.RawDataList:
- Buffer += pack(PackStr,
- GetIntegerValue(Data[0]),
- GetIntegerValue(Data[1]))
- return Buffer
-
-## DbSizeTableItemList
-#
-# The class holds the size table
-#
-class DbSizeTableItemList (DbItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
- def GetListSize(self):
- length = 0
- for Data in self.RawDataList:
- length += (1 + len(Data[1]))
- return length * self.ItemSize
- def PackData(self):
- PackStr = "=H"
- Buffer = ''
- for Data in self.RawDataList:
- Buffer += pack(PackStr,
- GetIntegerValue(Data[0]))
- for subData in Data[1]:
- Buffer += pack(PackStr,
- GetIntegerValue(subData))
- return Buffer
-
-## DbStringItemList
-#
-# The class holds the string table
-#
-class DbStringItemList (DbComItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None, LenList=None):
- if DataList is None:
- DataList = []
- if RawDataList is None:
- RawDataList = []
- if LenList is None:
- LenList = []
-
- assert(len(RawDataList) == len(LenList))
- DataList = []
- # adjust DataList according to the LenList
- for Index in xrange(len(RawDataList)):
- Len = LenList[Index]
- RawDatas = RawDataList[Index]
- assert(Len >= len(RawDatas))
- ActualDatas = []
- for i in xrange(len(RawDatas)):
- ActualDatas.append(RawDatas[i])
- for i in xrange(len(RawDatas), Len):
- ActualDatas.append(0)
- DataList.append(ActualDatas)
- self.LenList = LenList
- DbComItemList.__init__(self, ItemSize, DataList, RawDataList)
- def GetInterOffset(self, Index):
- Offset = 0
-
- assert(Index < len(self.LenList))
- for ItemIndex in xrange(Index):
- Offset += self.LenList[ItemIndex]
-
- return Offset
-
- def GetListSize(self):
- if self.ListSize:
- return self.ListSize
-
- if len(self.LenList) == 0:
- self.ListSize = 0
- else:
- self.ListSize = self.GetInterOffset(len(self.LenList) - 1) + self.LenList[len(self.LenList)-1]
-
- return self.ListSize
-
- def PackData(self):
- self.RawDataList = self.DataList
- return DbComItemList.PackData(self)
-
-
-
-## Find the index in two list where the item matches the key separately
-#
-# @param Key1 The key used to search the List1
-# @param List1 The list that Key1 will be searched
-# @param Key2 The key used to search the List2
-# @param List2 The list that Key2 will be searched
-#
-# @retval Index The position inside the list where list1[Index] == Key1 and list2[Index] == Key2
-#
-def GetMatchedIndex(Key1, List1, Key2, List2):
- StartPos = 0
- while StartPos < len(List1):
- Index = List1.index(Key1, StartPos)
- if List2[Index] == Key2:
- return Index
- else:
- StartPos = Index + 1
-
- return -1
-
-
-## Get the integer value from string like "14U" or integer like 2
-#
-# @param Input The object that may be either a integer value or a string
-#
-# @retval Value The integer value that the input represents
-#
-def GetIntegerValue(Input):
- if type(Input) in (int, long):
- return Input
- String = Input
- if String.endswith("U"):
- String = String[:-1]
- if String.endswith("ULL"):
- String = String[:-3]
- if String.endswith("LL"):
- String = String[:-2]
-
- if String.startswith("0x") or String.startswith("0X"):
- return int(String, 16)
- elif String == '':
- return 0
- else:
- return int(String)
-
-
-## convert StringArray like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
-# to List like [0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00]
-#
-# @param StringArray A string array like {0x36, 0x00, 0x34, 0x00, 0x21, 0x00, 0x36, 0x00, 0x34, 0x00, 0x00, 0x00}
-#
-# @retval A list object of integer items
-#
-def StringArrayToList(StringArray):
- StringArray = StringArray[1:-1]
- StringArray = '[' + StringArray + ']'
- return eval(StringArray)
-
-
-## Convert TokenType String like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII" to TokenType value
-#
-# @param TokenType A TokenType string like "PCD_DATUM_TYPE_UINT32 | PCD_TYPE_HII"
-#
-# @retval A integer representation of the TokenType
-#
-def GetTokenTypeValue(TokenType):
- TokenTypeDict = {
- "PCD_TYPE_SHIFT":28,
- "PCD_TYPE_DATA":(0x0 << 28),
- "PCD_TYPE_HII":(0x8 << 28),
- "PCD_TYPE_VPD":(0x4 << 28),
- "PCD_TYPE_SKU_ENABLED":(0x2 << 28),
- "PCD_TYPE_STRING":(0x1 << 28),
-
- "PCD_DATUM_TYPE_SHIFT":24,
- "PCD_DATUM_TYPE_POINTER":(0x0 << 24),
- "PCD_DATUM_TYPE_UINT8":(0x1 << 24),
- "PCD_DATUM_TYPE_UINT16":(0x2 << 24),
- "PCD_DATUM_TYPE_UINT32":(0x4 << 24),
- "PCD_DATUM_TYPE_UINT64":(0x8 << 24),
-
- "PCD_DATUM_TYPE_SHIFT2":20,
- "PCD_DATUM_TYPE_UINT8_BOOLEAN":(0x1 << 20 | 0x1 << 24),
- }
- return eval(TokenType, TokenTypeDict)
-
-## construct the external Pcd database using data from Dict
-#
-# @param Dict A dictionary contains Pcd related tables
-#
-# @retval Buffer A byte stream of the Pcd database
-#
-def BuildExDataBase(Dict):
- # init Db items
- InitValueUint64 = Dict['INIT_DB_VALUE_UINT64']
- DbInitValueUint64 = DbComItemList(8, RawDataList = InitValueUint64)
- VardefValueUint64 = Dict['VARDEF_DB_VALUE_UINT64']
- DbVardefValueUint64 = DbItemList(8, RawDataList = VardefValueUint64)
- InitValueUint32 = Dict['INIT_DB_VALUE_UINT32']
- DbInitValueUint32 = DbComItemList(4, RawDataList = InitValueUint32)
- VardefValueUint32 = Dict['VARDEF_DB_VALUE_UINT32']
- DbVardefValueUint32 = DbItemList(4, RawDataList = VardefValueUint32)
- VpdHeadValue = Dict['VPD_DB_VALUE']
- DbVpdHeadValue = DbComItemList(4, RawDataList = VpdHeadValue)
- ExMapTable = zip(Dict['EXMAPPING_TABLE_EXTOKEN'], Dict['EXMAPPING_TABLE_LOCAL_TOKEN'], Dict['EXMAPPING_TABLE_GUID_INDEX'])
- DbExMapTable = DbExMapTblItemList(8, RawDataList = ExMapTable)
- LocalTokenNumberTable = Dict['LOCAL_TOKEN_NUMBER_DB_VALUE']
- DbLocalTokenNumberTable = DbItemList(4, RawDataList = LocalTokenNumberTable)
- GuidTable = Dict['GUID_STRUCTURE']
- DbGuidTable = DbItemList(16, RawDataList = GuidTable)
- StringHeadValue = Dict['STRING_DB_VALUE']
- # DbItemList to DbStringHeadTableItemList
- DbStringHeadValue = DbStringHeadTableItemList(4, RawDataList = StringHeadValue)
- VariableTable = Dict['VARIABLE_DB_VALUE']
- DbVariableTable = DbVariableTableItemList(20, RawDataList = VariableTable)
- NumberOfSkuEnabledPcd = GetIntegerValue(Dict['SKU_HEAD_SIZE'])
- Dict['SKUHEAD_TABLE_VALUE'] = [(0,0) for i in xrange(NumberOfSkuEnabledPcd)]
- SkuTable = Dict['SKUHEAD_TABLE_VALUE'] # Generated later
- DbSkuTable = DbSkuHeadTableItemList(8, RawDataList = SkuTable)
- Dict['STRING_TABLE_DB_VALUE'] = [StringArrayToList(x) for x in Dict['STRING_TABLE_VALUE']]
-
- StringTableValue = Dict['STRING_TABLE_DB_VALUE']
- # when calcute the offset, should use StringTableLen instead of StringTableValue, as string maxium len may be different with actual len
- StringTableLen = Dict['STRING_TABLE_LENGTH']
- DbStringTableLen = DbStringItemList(0, RawDataList = StringTableValue, LenList = StringTableLen)
-
-
- PcdTokenTable = Dict['PCD_TOKENSPACE']
- PcdTokenLen = Dict['PCD_TOKENSPACE_LENGTH']
- PcdTokenTableValue = [StringArrayToList(x) for x in Dict['PCD_TOKENSPACE']]
- DbPcdTokenTable = DbStringItemList(0, RawDataList = PcdTokenTableValue, LenList = PcdTokenLen)
-
- PcdCNameTable = Dict['PCD_CNAME']
- PcdCNameLen = Dict['PCD_CNAME_LENGTH']
- PcdCNameTableValue = [StringArrayToList(x) for x in Dict['PCD_CNAME']]
- DbPcdCNameTable = DbStringItemList(0, RawDataList = PcdCNameTableValue, LenList = PcdCNameLen)
-
- PcdNameOffsetTable = Dict['PCD_NAME_OFFSET']
- DbPcdNameOffsetTable = DbItemList(4,RawDataList = PcdNameOffsetTable)
-
- SizeTableValue = zip(Dict['SIZE_TABLE_MAXIMUM_LENGTH'], Dict['SIZE_TABLE_CURRENT_LENGTH'])
- DbSizeTableValue = DbSizeTableItemList(2, RawDataList = SizeTableValue)
- InitValueUint16 = Dict['INIT_DB_VALUE_UINT16']
- DbInitValueUint16 = DbComItemList(2, RawDataList = InitValueUint16)
- VardefValueUint16 = Dict['VARDEF_DB_VALUE_UINT16']
- DbVardefValueUint16 = DbItemList(2, RawDataList = VardefValueUint16)
- InitValueUint8 = Dict['INIT_DB_VALUE_UINT8']
- DbInitValueUint8 = DbComItemList(1, RawDataList = InitValueUint8)
- VardefValueUint8 = Dict['VARDEF_DB_VALUE_UINT8']
- DbVardefValueUint8 = DbItemList(1, RawDataList = VardefValueUint8)
- InitValueBoolean = Dict['INIT_DB_VALUE_BOOLEAN']
- DbInitValueBoolean = DbComItemList(1, RawDataList = InitValueBoolean)
- VardefValueBoolean = Dict['VARDEF_DB_VALUE_BOOLEAN']
- DbVardefValueBoolean = DbItemList(1, RawDataList = VardefValueBoolean)
- SkuidValue = Dict['SKUID_VALUE']
- DbSkuidValue = DbItemList(8, RawDataList = SkuidValue)
- SkuIndexValue = Dict['SKU_INDEX_VALUE']
- DbSkuIndexValue = DbItemList(8,RawDataList = SkuIndexValue)
-
- # Unit Db Items
- UnInitValueUint64 = Dict['UNINIT_GUID_DECL_UINT64']
- DbUnInitValueUint64 = DbItemList(8, RawDataList = UnInitValueUint64)
- UnInitValueUint32 = Dict['UNINIT_GUID_DECL_UINT32']
- DbUnInitValueUint32 = DbItemList(4, RawDataList = UnInitValueUint32)
- UnInitValueUint16 = Dict['UNINIT_GUID_DECL_UINT16']
- DbUnInitValueUint16 = DbItemList(2, RawDataList = UnInitValueUint16)
- UnInitValueUint8 = Dict['UNINIT_GUID_DECL_UINT8']
- DbUnInitValueUint8 = DbItemList(1, RawDataList = UnInitValueUint8)
- UnInitValueBoolean = Dict['UNINIT_GUID_DECL_BOOLEAN']
- DbUnInitValueBoolean = DbItemList(1, RawDataList = UnInitValueBoolean)
- PcdTokenNumberMap = Dict['PCD_ORDER_TOKEN_NUMBER_MAP']
-
- DbNameTotle = ["SkuidValue", "SkuIndexValue", "InitValueUint64", "VardefValueUint64", "InitValueUint32", "VardefValueUint32", "VpdHeadValue", "ExMapTable",
- "LocalTokenNumberTable", "GuidTable", "StringHeadValue", "PcdNameOffsetTable","VariableTable","SkuTable", "StringTableLen", "PcdTokenTable", "PcdCNameTable",
- "SizeTableValue", "InitValueUint16", "VardefValueUint16", "InitValueUint8", "VardefValueUint8", "InitValueBoolean",
- "VardefValueBoolean", "UnInitValueUint64", "UnInitValueUint32", "UnInitValueUint16", "UnInitValueUint8", "UnInitValueBoolean"]
-
- DbTotal = [SkuidValue, SkuIndexValue, InitValueUint64, VardefValueUint64, InitValueUint32, VardefValueUint32, VpdHeadValue, ExMapTable,
- LocalTokenNumberTable, GuidTable, StringHeadValue, PcdNameOffsetTable,VariableTable,SkuTable, StringTableLen, PcdTokenTable,PcdCNameTable,
- SizeTableValue, InitValueUint16, VardefValueUint16, InitValueUint8, VardefValueUint8, InitValueBoolean,
- VardefValueBoolean, UnInitValueUint64, UnInitValueUint32, UnInitValueUint16, UnInitValueUint8, UnInitValueBoolean]
- DbItemTotal = [DbSkuidValue, DbSkuIndexValue, DbInitValueUint64, DbVardefValueUint64, DbInitValueUint32, DbVardefValueUint32, DbVpdHeadValue, DbExMapTable,
- DbLocalTokenNumberTable, DbGuidTable, DbStringHeadValue, DbPcdNameOffsetTable,DbVariableTable,DbSkuTable, DbStringTableLen, DbPcdTokenTable, DbPcdCNameTable,
- DbSizeTableValue, DbInitValueUint16, DbVardefValueUint16, DbInitValueUint8, DbVardefValueUint8, DbInitValueBoolean,
- DbVardefValueBoolean, DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean]
-
- # VardefValueBoolean is the last table in the init table items
- InitTableNum = DbNameTotle.index("VardefValueBoolean") + 1
- # The FixedHeader length of the PCD_DATABASE_INIT, from Signature to Pad
- FixedHeaderLen = 72
-
- # Get offset of SkuId table in the database
- SkuIdTableOffset = FixedHeaderLen
- for DbIndex in xrange(len(DbTotal)):
- if DbTotal[DbIndex] is SkuidValue:
- break
- SkuIdTableOffset += DbItemTotal[DbIndex].GetListSize()
-
-
- # Get offset of SkuValue table in the database
- SkuTableOffset = FixedHeaderLen
- for DbIndex in xrange(len(DbTotal)):
- if DbTotal[DbIndex] is SkuTable:
- break
- elif DbItemTotal[DbIndex] is DbSkuIndexValue:
- if DbItemTotal[DbIndex].RawDataList:
- Count = 0
- for item in DbItemTotal[DbIndex].RawDataList:
- Count += len(item)
- SkuTableOffset += DbItemTotal[DbIndex].ItemSize * Count
- continue
- SkuTableOffset += DbItemTotal[DbIndex].GetListSize()
-
- # Fix up the LocalTokenNumberTable, SkuHeader table
- SkuHeaderIndex = 0
- if len(Dict['SKU_INDEX_VALUE']) > 0:
- SkuIndexIndexTable = [(0) for i in xrange(len(Dict['SKU_INDEX_VALUE']))]
- SkuIndexIndexTable[0] = 0 #Dict['SKU_INDEX_VALUE'][0][0]
- for i in range(1,len(Dict['SKU_INDEX_VALUE'])):
- SkuIndexIndexTable[i] = SkuIndexIndexTable[i-1]+Dict['SKU_INDEX_VALUE'][i-1][0] + 1
- for (LocalTokenNumberTableIndex, (Offset, Table)) in enumerate(LocalTokenNumberTable):
- DbIndex = 0
- DbOffset = FixedHeaderLen
- for DbIndex in xrange(len(DbTotal)):
- if DbTotal[DbIndex] is Table:
- DbOffset += DbItemTotal[DbIndex].GetInterOffset(Offset)
- break
- elif DbItemTotal[DbIndex] is DbSkuIndexValue:
- if DbItemTotal[DbIndex].RawDataList:
- Count = 0
- for item in DbItemTotal[DbIndex].RawDataList:
- Count += len(item)
- DbOffset += DbItemTotal[DbIndex].ItemSize * Count
- continue
- DbOffset += DbItemTotal[DbIndex].GetListSize()
- if DbIndex + 1 == InitTableNum:
- if DbOffset % 8:
- DbOffset += (8 - DbOffset % 8)
- else:
- assert(False)
-
- TokenTypeValue = Dict['TOKEN_TYPE'][LocalTokenNumberTableIndex]
- TokenTypeValue = GetTokenTypeValue(TokenTypeValue)
- LocalTokenNumberTable[LocalTokenNumberTableIndex] = DbOffset|int(TokenTypeValue)
- # if PCD_TYPE_SKU_ENABLED, then we need to fix up the SkuTable
-
- SkuIndexTabalOffset = SkuIdTableOffset + len(Dict['SKUID_VALUE']) * 8
- if (TokenTypeValue & (0x2 << 28)):
- SkuTable[SkuHeaderIndex] = (DbOffset|int(TokenTypeValue & ~(0x2<<28)), SkuIndexTabalOffset + SkuIndexIndexTable[PcdTokenNumberMap[LocalTokenNumberTableIndex]] * 8)
- LocalTokenNumberTable[LocalTokenNumberTableIndex] = (SkuTableOffset + SkuHeaderIndex * 8) | int(TokenTypeValue)
- SkuHeaderIndex += 1
-
-
- if SkuHeaderIndex == 0:
- SkuHeaderIndex = 1
- assert(SkuHeaderIndex == NumberOfSkuEnabledPcd)
-
- # resolve variable table offset
- for VariableEntries in VariableTable:
- skuindex = 0
- for VariableEntryPerSku in VariableEntries:
- (VariableHeadGuidIndex, VariableHeadStringIndex, SKUVariableOffset, VariableOffset, VariableRefTable, VariableAttribute) = VariableEntryPerSku[:]
- DbIndex = 0
- DbOffset = FixedHeaderLen
- for DbIndex in xrange(len(DbTotal)):
- if DbTotal[DbIndex] is VariableRefTable:
- DbOffset += DbItemTotal[DbIndex].GetInterOffset(VariableOffset)
- break
- elif DbItemTotal[DbIndex] is DbSkuIndexValue:
- if DbItemTotal[DbIndex].RawDataList:
- Count = 0
- for item in DbItemTotal[DbIndex].RawDataList:
- Count += len(item)
- DbOffset += DbItemTotal[DbIndex].ItemSize * Count
- continue
- DbOffset += DbItemTotal[DbIndex].GetListSize()
- if DbIndex + 1 == InitTableNum:
- if DbOffset % 8:
- DbOffset += (8 - DbOffset % 8)
- else:
- assert(False)
- if isinstance(VariableRefTable[0],list):
- DbOffset += skuindex * 4
- skuindex += 1
- if DbIndex >= InitTableNum:
- assert(False)
- VarAttr, VarProp = VariableAttributes.GetVarAttributes(VariableAttribute)
- VariableEntryPerSku[:] = (VariableHeadStringIndex, DbOffset, VariableHeadGuidIndex, SKUVariableOffset, VarAttr, VarProp)
-
- # calculate various table offset now
- DbTotalLength = FixedHeaderLen
- for DbIndex in xrange(len(DbItemTotal)):
- if DbItemTotal[DbIndex] is DbLocalTokenNumberTable:
- LocalTokenNumberTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbExMapTable:
- ExMapTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbGuidTable:
- GuidTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbStringTableLen:
- StringTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbSizeTableValue:
- SizeTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbSkuidValue:
- SkuIdTableOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbPcdNameOffsetTable:
- DbPcdNameOffset = DbTotalLength
- elif DbItemTotal[DbIndex] is DbSkuIndexValue:
- if DbItemTotal[DbIndex].RawDataList:
- Count = 0
- for item in DbItemTotal[DbIndex].RawDataList:
- Count += len(item)
- DbTotalLength += DbItemTotal[DbIndex].ItemSize * Count
- continue
-
- DbTotalLength += DbItemTotal[DbIndex].GetListSize()
- if not Dict['PCD_INFO_FLAG']:
- DbPcdNameOffset = 0
- LocalTokenCount = GetIntegerValue(Dict['LOCAL_TOKEN_NUMBER'])
- ExTokenCount = GetIntegerValue(Dict['EX_TOKEN_NUMBER'])
- GuidTableCount = GetIntegerValue(Dict['GUID_TABLE_SIZE'])
- SystemSkuId = GetIntegerValue(Dict['SYSTEM_SKU_ID_VALUE'])
- Pad = 0xDA
-
- UninitDataBaseSize = 0
- for Item in (DbUnInitValueUint64, DbUnInitValueUint32, DbUnInitValueUint16, DbUnInitValueUint8, DbUnInitValueBoolean):
- UninitDataBaseSize += Item.GetListSize()
-
- if (DbTotalLength - UninitDataBaseSize) % 8:
- DbTotalLength += (8 - (DbTotalLength - UninitDataBaseSize) % 8)
- # Construct the database buffer
- Guid = "{0x3c7d193c, 0x682c, 0x4c14, 0xa6, 0x8f, 0x55, 0x2d, 0xea, 0x4f, 0x43, 0x7e}"
- Guid = StringArrayToList(Guid)
- Buffer = pack('=LHHBBBBBBBB',
- Guid[0],
- Guid[1],
- Guid[2],
- Guid[3],
- Guid[4],
- Guid[5],
- Guid[6],
- Guid[7],
- Guid[8],
- Guid[9],
- Guid[10],
- )
-
- b = pack("=L", DATABASE_VERSION)
- Buffer += b
-
- b = pack('=L', DbTotalLength - UninitDataBaseSize)
-
- Buffer += b
- b = pack('=Q', SystemSkuId)
-
- Buffer += b
- b = pack('=L', UninitDataBaseSize)
-
- Buffer += b
- b = pack('=L', LocalTokenNumberTableOffset)
-
- Buffer += b
- b = pack('=L', ExMapTableOffset)
-
- Buffer += b
- b = pack('=L', GuidTableOffset)
-
- Buffer += b
- b = pack('=L', StringTableOffset)
-
- Buffer += b
- b = pack('=L', SizeTableOffset)
-
- Buffer += b
- b = pack('=L', SkuIdTableOffset)
-
- Buffer += b
- b = pack('=L', DbPcdNameOffset)
-
- Buffer += b
- b = pack('=H', LocalTokenCount)
-
- Buffer += b
- b = pack('=H', ExTokenCount)
-
- Buffer += b
- b = pack('=H', GuidTableCount)
-
- Buffer += b
- b = pack('=B', Pad)
-
- Buffer += b
- b = pack('=B', Pad)
-
- Buffer += b
-
- Index = 0
- for Item in DbItemTotal:
- Index +=1
- b = Item.PackData()
- Buffer += b
- if Index == InitTableNum:
- if len(Buffer) % 8:
- for num in range(8 - len(Buffer) % 8):
- b = pack('=B', Pad)
- Buffer += b
- break
- return Buffer
-
-## Create code for PCD database
-#
-# @param Info The ModuleAutoGen object
-# @param AutoGenC The TemplateString object for C code
-# @param AutoGenH The TemplateString object for header file
-#
-def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH):
- if Info.PcdIsDriver == "":
- return
- if Info.PcdIsDriver not in gPcdPhaseMap:
- EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver,
- ExtraData="[%s]" % str(Info))
-
- AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI')
- AutoGenH.Append(AdditionalAutoGenH.String)
-
- Phase = gPcdPhaseMap[Info.PcdIsDriver]
- if Phase == 'PEI':
- AutoGenC.Append(AdditionalAutoGenC.String)
-
- if Phase == 'DXE':
- AdditionalAutoGenH, AdditionalAutoGenC, PcdDbBuffer = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase)
- AutoGenH.Append(AdditionalAutoGenH.String)
- AutoGenC.Append(AdditionalAutoGenC.String)
-
- if Info.IsBinaryModule:
- DbFileName = os.path.join(Info.PlatformInfo.BuildDir, "FV", Phase + "PcdDataBase.raw")
- else:
- DbFileName = os.path.join(Info.OutputDir, Phase + "PcdDataBase.raw")
- DbFile = StringIO()
- DbFile.write(PcdDbBuffer)
- Changed = SaveFileOnChange(DbFileName, DbFile.getvalue(), True)
-
-## Create PCD database in DXE or PEI phase
-#
-# @param Platform The platform object
-# @retval tuple Two TemplateString objects for C code and header file,
-# respectively
-#
-def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
- AutoGenC = TemplateString()
- AutoGenH = TemplateString()
-
- Dict = {
- 'PHASE' : Phase,
- 'SERVICE_DRIVER_VERSION' : DATABASE_VERSION,
- 'GUID_TABLE_SIZE' : '1U',
- 'STRING_TABLE_SIZE' : '1U',
- 'SKUID_TABLE_SIZE' : '1U',
- 'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '0U',
- 'LOCAL_TOKEN_NUMBER' : '0U',
- 'EXMAPPING_TABLE_SIZE' : '1U',
- 'EX_TOKEN_NUMBER' : '0U',
- 'SIZE_TABLE_SIZE' : '2U',
- 'SKU_HEAD_SIZE' : '1U',
- 'GUID_TABLE_EMPTY' : 'TRUE',
- 'STRING_TABLE_EMPTY' : 'TRUE',
- 'SKUID_TABLE_EMPTY' : 'TRUE',
- 'DATABASE_EMPTY' : 'TRUE',
- 'EXMAP_TABLE_EMPTY' : 'TRUE',
- 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is emptry */',
- 'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;',
- 'SYSTEM_SKU_ID_VALUE' : '0U'
- }
-
-
- SkuObj = SkuClass(Platform.Platform.AvilableSkuIds, Platform.Platform.SkuIds)
- Dict['SYSTEM_SKU_ID_VALUE'] = Platform.Platform.SkuIds[SkuObj.SystemSkuId]
-
- Dict['PCD_INFO_FLAG'] = Platform.Platform.PcdInfoFlag
-
- for DatumType in ['UINT64','UINT32','UINT16','UINT8','BOOLEAN', "VOID*"]:
- Dict['VARDEF_CNAME_' + DatumType] = []
- Dict['VARDEF_GUID_' + DatumType] = []
- Dict['VARDEF_SKUID_' + DatumType] = []
- Dict['VARDEF_VALUE_' + DatumType] = []
- Dict['VARDEF_DB_VALUE_' + DatumType] = []
- for Init in ['INIT','UNINIT']:
- Dict[Init+'_CNAME_DECL_' + DatumType] = []
- Dict[Init+'_GUID_DECL_' + DatumType] = []
- Dict[Init+'_NUMSKUS_DECL_' + DatumType] = []
- Dict[Init+'_VALUE_' + DatumType] = []
- Dict[Init+'_DB_VALUE_'+DatumType] = []
-
- for Type in ['STRING_HEAD','VPD_HEAD','VARIABLE_HEAD']:
- Dict[Type + '_CNAME_DECL'] = []
- Dict[Type + '_GUID_DECL'] = []
- Dict[Type + '_NUMSKUS_DECL'] = []
- Dict[Type + '_VALUE'] = []
-
- Dict['STRING_DB_VALUE'] = []
- Dict['VPD_DB_VALUE'] = []
- Dict['VARIABLE_DB_VALUE'] = []
-
- Dict['STRING_TABLE_INDEX'] = []
- Dict['STRING_TABLE_LENGTH'] = []
- Dict['STRING_TABLE_CNAME'] = []
- Dict['STRING_TABLE_GUID'] = []
- Dict['STRING_TABLE_VALUE'] = []
- Dict['STRING_TABLE_DB_VALUE'] = []
-
- Dict['SIZE_TABLE_CNAME'] = []
- Dict['SIZE_TABLE_GUID'] = []
- Dict['SIZE_TABLE_CURRENT_LENGTH'] = []
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = []
-
- Dict['EXMAPPING_TABLE_EXTOKEN'] = []
- Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = []
- Dict['EXMAPPING_TABLE_GUID_INDEX'] = []
-
- Dict['GUID_STRUCTURE'] = []
- Dict['SKUID_VALUE'] = [0] # init Dict length
- Dict['VARDEF_HEADER'] = []
-
- Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = []
- Dict['VARIABLE_DB_VALUE'] = []
- Dict['SKUHEAD_TABLE_VALUE'] = []
- Dict['SKU_INDEX_VALUE'] = []
-
- Dict['PCD_TOKENSPACE'] = []
- Dict['PCD_CNAME'] = []
- Dict['PCD_TOKENSPACE_LENGTH'] = []
- Dict['PCD_CNAME_LENGTH'] = []
- Dict['PCD_TOKENSPACE_OFFSET'] = []
- Dict['PCD_CNAME_OFFSET'] = []
- Dict['PCD_TOKENSPACE_MAP'] = []
- Dict['PCD_NAME_OFFSET'] = []
-
- Dict['PCD_ORDER_TOKEN_NUMBER_MAP'] = {}
- PCD_STRING_INDEX_MAP = {}
-
- StringTableIndex = 0
- StringTableSize = 0
- NumberOfLocalTokens = 0
- NumberOfPeiLocalTokens = 0
- NumberOfDxeLocalTokens = 0
- NumberOfExTokens = 0
- NumberOfSizeItems = 0
- NumberOfSkuEnabledPcd = 0
- GuidList = []
- VarCheckTab = VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER()
- i = 0
- ReorderedDynPcdList = GetOrderedDynamicPcdList(Platform.DynamicPcdList, Platform.PcdTokenNumber)
- for Pcd in ReorderedDynPcdList:
- VoidStarTypeCurrSize = []
- i += 1
- CName = Pcd.TokenCName
- TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
-
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- CName = PcdItem[0]
-
- if GlobalData.BuildOptionPcd:
- for PcdItem in GlobalData.BuildOptionPcd:
- if (Pcd.TokenSpaceGuidCName, CName) == (PcdItem[0], PcdItem[1]):
- Pcd.DefaultValue = PcdItem[2]
- break
-
- EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase))
-
- if Pcd.Phase == 'PEI':
- NumberOfPeiLocalTokens += 1
- if Pcd.Phase == 'DXE':
- NumberOfDxeLocalTokens += 1
- if Pcd.Phase != Phase:
- continue
-
- #
- # TODO: need GetGuidValue() definition
- #
- TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue
- TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure)
- if Pcd.Type in gDynamicExPcd:
- if TokenSpaceGuid not in GuidList:
- GuidList += [TokenSpaceGuid]
- Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure)
- NumberOfExTokens += 1
-
- ValueList = []
- DbValueList = []
- StringHeadOffsetList = []
- StringDbOffsetList = []
- VpdHeadOffsetList = []
- VpdDbOffsetList = []
- VariableHeadValueList = []
- VariableDbValueList = []
- Pcd.InitString = 'UNINIT'
-
- if Pcd.DatumType == 'VOID*':
- if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
- Pcd.TokenTypeList = ['PCD_TYPE_STRING']
- else:
- Pcd.TokenTypeList = []
- elif Pcd.DatumType == 'BOOLEAN':
- Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8_BOOLEAN']
- else:
- Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType]
-
- if len(Pcd.SkuInfoList) > 1:
- Pcd.TokenTypeList += ['PCD_TYPE_SKU_ENABLED']
- NumberOfSkuEnabledPcd += 1
-
- SkuIndexTableTmp = []
- SkuIndexTableTmp.append(0)
- SkuIdIndex = 1
- VariableHeadList = []
- for SkuName in Pcd.SkuInfoList:
- Sku = Pcd.SkuInfoList[SkuName]
- SkuId = Sku.SkuId
- if SkuId == None or SkuId == '':
- continue
-
- if (SkuId + 'U') not in Dict['SKUID_VALUE']:
- Dict['SKUID_VALUE'].append(SkuId + 'U')
-
- SkuIndexTableTmp.append(SkuId+'U')
- SkuIdIndex += 1
-
- if len(Sku.VariableName) > 0:
- VariableGuidStructure = Sku.VariableGuidValue
- VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure)
- if Platform.Platform.VarCheckFlag:
- var_check_obj = VAR_CHECK_PCD_VARIABLE_TAB(VariableGuidStructure, StringToArray(Sku.VariableName))
- try:
- var_check_obj.push_back(VAR_VALID_OBJECT_FACTORY.Get_valid_object(Pcd, Sku.VariableOffset))
- VarAttr, _ = VariableAttributes.GetVarAttributes(Sku.VariableAttribute)
- var_check_obj.SetAttributes(VarAttr)
- var_check_obj.UpdateSize()
- VarCheckTab.push_back(var_check_obj)
- except Exception:
- ValidInfo = ''
- if Pcd.validateranges:
- ValidInfo = Pcd.validateranges[0]
- if Pcd.validlists:
- ValidInfo = Pcd.validlists[0]
- if ValidInfo:
- EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
- "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
- ExtraData = "[%s]" % str(ValidInfo))
- else:
- EdkLogger.error("build", PCD_VALIDATION_INFO_ERROR,
- "The PCD '%s.%s' Validation information defined in DEC file has incorrect format." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
- Pcd.TokenTypeList += ['PCD_TYPE_HII']
- Pcd.InitString = 'INIT'
- # Store all variable names of one HII PCD under different SKU to stringTable
- # and calculate the VariableHeadStringIndex
- if SkuIdIndex - 2 == 0:
- for SkuName2 in Pcd.SkuInfoList:
- SkuInfo = Pcd.SkuInfoList[SkuName2]
- if SkuInfo.SkuId == None or SkuInfo.SkuId == '':
- continue
- VariableNameStructure = StringToArray(SkuInfo.VariableName)
- if VariableNameStructure not in Dict['STRING_TABLE_VALUE']:
- Dict['STRING_TABLE_CNAME'].append(CName)
- Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
- if StringTableIndex == 0:
- Dict['STRING_TABLE_INDEX'].append('')
- else:
- Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
- VarNameSize = len(VariableNameStructure.replace(',',' ').split())
- Dict['STRING_TABLE_LENGTH'].append(VarNameSize )
- Dict['STRING_TABLE_VALUE'].append(VariableNameStructure)
- StringHeadOffsetList.append(str(StringTableSize) + 'U')
- VarStringDbOffsetList = []
- VarStringDbOffsetList.append(StringTableSize)
- Dict['STRING_DB_VALUE'].append(VarStringDbOffsetList)
- StringTableIndex += 1
- StringTableSize += len(VariableNameStructure.replace(',',' ').split())
- VariableHeadStringIndex = 0
- for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)):
- VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index]
- VariableHeadList.append(VariableHeadStringIndex)
-
- VariableHeadStringIndex = VariableHeadList[SkuIdIndex - 2]
- # store VariableGuid to GuidTable and get the VariableHeadGuidIndex
-
- if VariableGuid not in GuidList:
- GuidList += [VariableGuid]
- Dict['GUID_STRUCTURE'].append(VariableGuidStructure)
- VariableHeadGuidIndex = GuidList.index(VariableGuid)
-
- if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
- VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s), %dU, %sU' %
- (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid,
- VariableHeadGuidIndex, Sku.VariableOffset))
- else:
- VariableHeadValueList.append('%dU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s), %dU, %sU' %
- (VariableHeadStringIndex, Phase, CName, TokenSpaceGuid, SkuIdIndex,
- VariableHeadGuidIndex, Sku.VariableOffset))
- Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName)
- Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid)
- Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex)
- if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
- Dict['VARDEF_VALUE_' + Pcd.DatumType].append("%s_%s[%d]" % (Pcd.TokenCName, TokenSpaceGuid, SkuIdIndex))
- else:
- #
- # ULL (for UINT64) or U(other integer type) should be append to avoid
- # warning under linux building environment.
- #
- Dict['VARDEF_DB_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
-
- if Pcd.DatumType == "UINT64":
- Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "ULL")
- elif Pcd.DatumType in ("UINT32", "UINT16", "UINT8"):
- Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue + "U")
- elif Pcd.DatumType == "BOOLEAN":
- if eval(Sku.HiiDefaultValue) in [1,0]:
- Dict['VARDEF_VALUE_'+Pcd.DatumType].append(str(eval(Sku.HiiDefaultValue)) + "U")
- else:
- Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue)
-
- # construct the VariableHeader value
- if "PCD_TYPE_STRING" in Pcd.TokenTypeList:
- VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s)' %
- (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
- Phase, CName, TokenSpaceGuid))
- # the Pcd default value will be filled later on
- VariableOffset = len(Dict['STRING_DB_VALUE'])
- VariableRefTable = Dict['STRING_DB_VALUE']
- else:
- VariableHeadValueList.append('%dU, %dU, %sU, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' %
- (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset,
- Phase, CName, TokenSpaceGuid, SkuIdIndex))
- # the Pcd default value was filled before
- VariableOffset = len(Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]) - 1
- VariableRefTable = Dict['VARDEF_DB_VALUE_' + Pcd.DatumType]
- VariableDbValueList.append([VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, VariableOffset, VariableRefTable, Sku.VariableAttribute])
-
- elif Sku.VpdOffset != '':
- Pcd.TokenTypeList += ['PCD_TYPE_VPD']
- Pcd.InitString = 'INIT'
- VpdHeadOffsetList.append(str(Sku.VpdOffset) + 'U')
- VpdDbOffsetList.append(Sku.VpdOffset)
- # Also add the VOID* string of VPD PCD to SizeTable
- if Pcd.DatumType == 'VOID*':
- NumberOfSizeItems += 1
- # For VPD type of PCD, its current size is equal to its MAX size.
- VoidStarTypeCurrSize = [str(Pcd.MaxDatumSize) + 'U']
- continue
-
- if Pcd.DatumType == 'VOID*':
- Pcd.TokenTypeList += ['PCD_TYPE_STRING']
- Pcd.InitString = 'INIT'
- if Sku.HiiDefaultValue != '' and Sku.DefaultValue == '':
- Sku.DefaultValue = Sku.HiiDefaultValue
- if Sku.DefaultValue != '':
- NumberOfSizeItems += 1
- Dict['STRING_TABLE_CNAME'].append(CName)
- Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid)
-
- if StringTableIndex == 0:
- Dict['STRING_TABLE_INDEX'].append('')
- else:
- Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex)
- if Sku.DefaultValue[0] == 'L':
- DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
- Size = len(DefaultValueBinStructure.replace(',',' ').split())
- Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
- elif Sku.DefaultValue[0] == '"':
- DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
- Size = len(Sku.DefaultValue) - 2 + 1
- Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
- elif Sku.DefaultValue[0] == '{':
- DefaultValueBinStructure = StringToArray(Sku.DefaultValue)
- Size = len(Sku.DefaultValue.split(","))
- Dict['STRING_TABLE_VALUE'].append(DefaultValueBinStructure)
-
- StringHeadOffsetList.append(str(StringTableSize) + 'U')
- StringDbOffsetList.append(StringTableSize)
- if Pcd.MaxDatumSize != '':
- MaxDatumSize = int(Pcd.MaxDatumSize, 0)
- if MaxDatumSize < Size:
- EdkLogger.error("build", AUTOGEN_ERROR,
- "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
- ExtraData="[%s]" % str(Platform))
- else:
- MaxDatumSize = Size
- StringTabLen = MaxDatumSize
- if StringTabLen % 2:
- StringTabLen += 1
- if Sku.VpdOffset == '':
- VoidStarTypeCurrSize.append(str(Size) + 'U')
- Dict['STRING_TABLE_LENGTH'].append(StringTabLen)
- StringTableIndex += 1
- StringTableSize += (StringTabLen)
- else:
- if "PCD_TYPE_HII" not in Pcd.TokenTypeList:
- Pcd.TokenTypeList += ['PCD_TYPE_DATA']
- if Sku.DefaultValue == 'TRUE':
- Pcd.InitString = 'INIT'
- else:
- if int(Sku.DefaultValue, 0) != 0:
- Pcd.InitString = 'INIT'
- #
- # For UNIT64 type PCD's value, ULL should be append to avoid
- # warning under linux building environment.
- #
- if Pcd.DatumType == "UINT64":
- ValueList.append(Sku.DefaultValue + "ULL")
- elif Pcd.DatumType in ("UINT32", "UINT16", "UINT8"):
- ValueList.append(Sku.DefaultValue + "U")
- elif Pcd.DatumType == "BOOLEAN":
- if Sku.DefaultValue in ["1", "0"]:
- ValueList.append(Sku.DefaultValue + "U")
- else:
- ValueList.append(Sku.DefaultValue)
-
- DbValueList.append(Sku.DefaultValue)
-
- Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
- if Pcd.DatumType == 'VOID*':
- Dict['SIZE_TABLE_CNAME'].append(CName)
- Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid)
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(str(Pcd.MaxDatumSize) + 'U')
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append(VoidStarTypeCurrSize)
-
-
- SkuIndexTableTmp[0] = len(SkuIndexTableTmp) - 1
- if len(Pcd.SkuInfoList) > 1:
- Dict['SKU_INDEX_VALUE'].append(SkuIndexTableTmp)
-
- if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
- Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName)
- Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid)
- Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
- Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList))
- Dict['VARDEF_HEADER'].append('_Variable_Header')
- Dict['VARIABLE_DB_VALUE'].append(VariableDbValueList)
- else:
- Dict['VARDEF_HEADER'].append('')
- if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
- Dict['VPD_HEAD_CNAME_DECL'].append(CName)
- Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid)
- Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
- Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList))
- Dict['VPD_DB_VALUE'].append(VpdDbOffsetList)
- if 'PCD_TYPE_STRING' in Pcd.TokenTypeList:
- Dict['STRING_HEAD_CNAME_DECL'].append(CName)
- Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid)
- Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList))
- Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList))
- Dict['STRING_DB_VALUE'].append(StringDbOffsetList)
- PCD_STRING_INDEX_MAP[len(Dict['STRING_HEAD_CNAME_DECL']) -1 ] = len(Dict['STRING_DB_VALUE']) -1
- if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
- Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName)
- Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid)
- Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList))
- if Pcd.InitString == 'UNINIT':
- Dict['PCD_DATABASE_UNINIT_EMPTY'] = ''
- else:
- Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList))
- Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType].append(DbValueList)
-
- if Phase == 'PEI':
- NumberOfLocalTokens = NumberOfPeiLocalTokens
- if Phase == 'DXE':
- NumberOfLocalTokens = NumberOfDxeLocalTokens
-
- Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['PCD_CNAME'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['PCD_TOKENSPACE_MAP'] = ['' for x in range(NumberOfLocalTokens)]
- Dict['PCD_CNAME_LENGTH'] = [0 for x in range(NumberOfLocalTokens)]
- SkuEnablePcdIndex = 0
- for Pcd in ReorderedDynPcdList:
- CName = Pcd.TokenCName
- TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName
- if Pcd.Phase != Phase:
- continue
-
- TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName))
- GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1
- if Phase == 'DXE':
- GeneratedTokenNumber -= NumberOfPeiLocalTokens
-
- if len(Pcd.SkuInfoList) > 1:
- Dict['PCD_ORDER_TOKEN_NUMBER_MAP'][GeneratedTokenNumber] = SkuEnablePcdIndex
- SkuEnablePcdIndex += 1
-
- for PcdItem in GlobalData.MixedPcd:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
- CName = PcdItem[0]
-
- if GlobalData.BuildOptionPcd:
- for PcdItem in GlobalData.BuildOptionPcd:
- if (Pcd.TokenSpaceGuidCName, CName) == (PcdItem[0], PcdItem[1]):
- Pcd.DefaultValue = PcdItem[2]
- break
-
- EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName))
- EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase)
- EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber))
-
- #
- # following four Dict items hold the information for LocalTokenNumberTable
- #
- Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init'
- if Pcd.InitString == 'UNINIT':
- Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit'
-
- Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName
- Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid
- Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList)
-
- if Platform.Platform.PcdInfoFlag:
- TokenSpaceGuidCNameArray = StringToArray('"' + TokenSpaceGuidCName + '"' )
- if TokenSpaceGuidCNameArray not in Dict['PCD_TOKENSPACE']:
- Dict['PCD_TOKENSPACE'].append(TokenSpaceGuidCNameArray)
- Dict['PCD_TOKENSPACE_LENGTH'].append( len(TokenSpaceGuidCNameArray.split(",")) )
- Dict['PCD_TOKENSPACE_MAP'][GeneratedTokenNumber] = Dict['PCD_TOKENSPACE'].index(TokenSpaceGuidCNameArray)
- CNameBinArray = StringToArray('"' + CName + '"' )
- Dict['PCD_CNAME'][GeneratedTokenNumber] = CNameBinArray
-
- Dict['PCD_CNAME_LENGTH'][GeneratedTokenNumber] = len(CNameBinArray.split(","))
-
-
- Pcd.TokenTypeList = list(set(Pcd.TokenTypeList))
-
- # search the Offset and Table, used by LocalTokenNumberTableOffset
- if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
- # Find index by CName, TokenSpaceGuid
- Offset = GetMatchedIndex(CName, Dict['VARIABLE_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VARIABLE_HEAD_GUID_DECL'])
- assert(Offset != -1)
- Table = Dict['VARIABLE_DB_VALUE']
- if 'PCD_TYPE_VPD' in Pcd.TokenTypeList:
- Offset = GetMatchedIndex(CName, Dict['VPD_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['VPD_HEAD_GUID_DECL'])
- assert(Offset != -1)
- Table = Dict['VPD_DB_VALUE']
- if 'PCD_TYPE_STRING' in Pcd.TokenTypeList and 'PCD_TYPE_HII' not in Pcd.TokenTypeList:
- # Find index by CName, TokenSpaceGuid
- Offset = GetMatchedIndex(CName, Dict['STRING_HEAD_CNAME_DECL'], TokenSpaceGuid, Dict['STRING_HEAD_GUID_DECL'])
- Offset = PCD_STRING_INDEX_MAP[Offset]
- assert(Offset != -1)
- Table = Dict['STRING_DB_VALUE']
- if 'PCD_TYPE_DATA' in Pcd.TokenTypeList:
- # need to store whether it is in init table or not
- Offset = GetMatchedIndex(CName, Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType], TokenSpaceGuid, Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType])
- assert(Offset != -1)
- if Pcd.InitString == 'UNINIT':
- Table = Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType]
- else:
- Table = Dict[Pcd.InitString+'_DB_VALUE_'+Pcd.DatumType]
- Dict['LOCAL_TOKEN_NUMBER_DB_VALUE'][GeneratedTokenNumber] = (Offset, Table)
-
- #
- # Update VARDEF_HEADER
- #
- if 'PCD_TYPE_HII' in Pcd.TokenTypeList:
- Dict['VARDEF_HEADER'][GeneratedTokenNumber] = '_Variable_Header'
- else:
- Dict['VARDEF_HEADER'][GeneratedTokenNumber] = ''
-
-
- if Pcd.Type in gDynamicExPcd:
-
- if Phase == 'DXE':
- GeneratedTokenNumber += NumberOfPeiLocalTokens
- #
- # Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number.
- # For each EX type PCD, a PCD Token Number is assigned. When the
- # PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number,
- # the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by
- # Pcd Driver/PEIM in MdeModulePkg.
- # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted
- # to the EXMAPPING_TABLE.
- #
-
-
- Dict['EXMAPPING_TABLE_EXTOKEN'].append(str(Pcd.TokenValue) + 'U')
- Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(str(GeneratedTokenNumber + 1) + 'U')
- Dict['EXMAPPING_TABLE_GUID_INDEX'].append(str(GuidList.index(TokenSpaceGuid)) + 'U')
-
- if Platform.Platform.PcdInfoFlag:
- for index in range(len(Dict['PCD_TOKENSPACE_MAP'])):
- TokenSpaceIndex = StringTableSize
- for i in range(Dict['PCD_TOKENSPACE_MAP'][index]):
- TokenSpaceIndex += Dict['PCD_TOKENSPACE_LENGTH'][i]
- Dict['PCD_TOKENSPACE_OFFSET'].append(TokenSpaceIndex)
- for index in range(len(Dict['PCD_TOKENSPACE'])):
- StringTableSize += Dict['PCD_TOKENSPACE_LENGTH'][index]
- StringTableIndex += 1
- for index in range(len(Dict['PCD_CNAME'])):
- Dict['PCD_CNAME_OFFSET'].append(StringTableSize)
- Dict['PCD_NAME_OFFSET'].append(Dict['PCD_TOKENSPACE_OFFSET'][index])
- Dict['PCD_NAME_OFFSET'].append(StringTableSize)
- StringTableSize += Dict['PCD_CNAME_LENGTH'][index]
- StringTableIndex += 1
- if GuidList != []:
- Dict['GUID_TABLE_EMPTY'] = 'FALSE'
- Dict['GUID_TABLE_SIZE'] = str(len(GuidList)) + 'U'
- else:
- Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')]
-
- if StringTableIndex == 0:
- Dict['STRING_TABLE_INDEX'].append('')
- Dict['STRING_TABLE_LENGTH'].append(1)
- Dict['STRING_TABLE_CNAME'].append('')
- Dict['STRING_TABLE_GUID'].append('')
- Dict['STRING_TABLE_VALUE'].append('{ 0 }')
- else:
- Dict['STRING_TABLE_EMPTY'] = 'FALSE'
- Dict['STRING_TABLE_SIZE'] = str(StringTableSize) + 'U'
-
- if Dict['SIZE_TABLE_CNAME'] == []:
- Dict['SIZE_TABLE_CNAME'].append('')
- Dict['SIZE_TABLE_GUID'].append('')
- Dict['SIZE_TABLE_CURRENT_LENGTH'].append(['0U'])
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append('0U')
-
- if NumberOfLocalTokens != 0:
- Dict['DATABASE_EMPTY'] = 'FALSE'
- Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens
- Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens
-
- if NumberOfExTokens != 0:
- Dict['EXMAP_TABLE_EMPTY'] = 'FALSE'
- Dict['EXMAPPING_TABLE_SIZE'] = str(NumberOfExTokens) + 'U'
- Dict['EX_TOKEN_NUMBER'] = str(NumberOfExTokens) + 'U'
- else:
- Dict['EXMAPPING_TABLE_EXTOKEN'].append('0U')
- Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append('0U')
- Dict['EXMAPPING_TABLE_GUID_INDEX'].append('0U')
-
- if NumberOfSizeItems != 0:
- Dict['SIZE_TABLE_SIZE'] = str(NumberOfSizeItems * 2) + 'U'
-
- if NumberOfSkuEnabledPcd != 0:
- Dict['SKU_HEAD_SIZE'] = str(NumberOfSkuEnabledPcd) + 'U'
-
- for AvailableSkuNumber in SkuObj.SkuIdNumberSet:
- if AvailableSkuNumber not in Dict['SKUID_VALUE']:
- Dict['SKUID_VALUE'].append(AvailableSkuNumber)
- Dict['SKUID_VALUE'][0] = len(Dict['SKUID_VALUE']) - 1
-
- AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict))
- if NumberOfLocalTokens == 0:
- AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict))
- else:
- #
- # Update Size Table to the right order, it should be same with LocalTokenNumberTable
- #
- SizeCNameTempList = []
- SizeGuidTempList = []
- SizeCurLenTempList = []
- SizeMaxLenTempList = []
- ReOrderFlag = True
-
- if len(Dict['SIZE_TABLE_CNAME']) == 1:
- if not (Dict['SIZE_TABLE_CNAME'][0] and Dict['SIZE_TABLE_GUID'][0]):
- ReOrderFlag = False
-
- if ReOrderFlag:
- for Count in range(len(Dict['TOKEN_CNAME'])):
- for Count1 in range(len(Dict['SIZE_TABLE_CNAME'])):
- if Dict['TOKEN_CNAME'][Count] == Dict['SIZE_TABLE_CNAME'][Count1] and \
- Dict['TOKEN_GUID'][Count] == Dict['SIZE_TABLE_GUID'][Count1]:
- SizeCNameTempList.append(Dict['SIZE_TABLE_CNAME'][Count1])
- SizeGuidTempList.append(Dict['SIZE_TABLE_GUID'][Count1])
- SizeCurLenTempList.append(Dict['SIZE_TABLE_CURRENT_LENGTH'][Count1])
- SizeMaxLenTempList.append(Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count1])
-
- for Count in range(len(Dict['SIZE_TABLE_CNAME'])):
- Dict['SIZE_TABLE_CNAME'][Count] = SizeCNameTempList[Count]
- Dict['SIZE_TABLE_GUID'][Count] = SizeGuidTempList[Count]
- Dict['SIZE_TABLE_CURRENT_LENGTH'][Count] = SizeCurLenTempList[Count]
- Dict['SIZE_TABLE_MAXIMUM_LENGTH'][Count] = SizeMaxLenTempList[Count]
-
- AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict))
-
- if Platform.Platform.VarCheckFlag:
- dest = os.path.join(Platform.BuildDir, 'FV')
- VarCheckTab.dump(dest, Phase)
- Buffer = BuildExDataBase(Dict)
- return AutoGenH, AutoGenC, Buffer
-
-def GetOrderedDynamicPcdList(DynamicPcdList, PcdTokenNumberList):
- ReorderedDyPcdList = [None for i in range(len(DynamicPcdList))]
- for Pcd in DynamicPcdList:
- if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) in PcdTokenNumberList:
- ReorderedDyPcdList[PcdTokenNumberList[Pcd.TokenCName, Pcd.TokenSpaceGuidCName]-1] = Pcd
- return ReorderedDyPcdList
-
diff --git a/BaseTools/Source/Python/AutoGen/IdfClassObject.py b/BaseTools/Source/Python/AutoGen/IdfClassObject.py
deleted file mode 100644
index d6d4703370..0000000000
--- a/BaseTools/Source/Python/AutoGen/IdfClassObject.py
+++ /dev/null
@@ -1,162 +0,0 @@
-## @file
-# This file is used to collect all defined strings in Image Definition files
-#
-# Copyright (c) 2016, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-##
-# Import Modules
-#
-import Common.EdkLogger as EdkLogger
-import StringIO
-from Common.BuildToolError import *
-from Common.String import GetLineNo
-from Common.Misc import PathClass
-from Common.LongFilePathSupport import LongFilePath
-import re
-import os
-
-IMAGE_TOKEN = re.compile('IMAGE_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
-
-#
-# Value of different image information block types
-#
-EFI_HII_IIBT_END = 0x00
-EFI_HII_IIBT_IMAGE_1BIT = 0x10
-EFI_HII_IIBT_IMAGE_1BIT_TRANS = 0x11
-EFI_HII_IIBT_IMAGE_4BIT = 0x12
-EFI_HII_IIBT_IMAGE_4BIT_TRANS = 0x13
-EFI_HII_IIBT_IMAGE_8BIT = 0x14
-EFI_HII_IIBT_IMAGE_8BIT_TRANS = 0x15
-EFI_HII_IIBT_IMAGE_24BIT = 0x16
-EFI_HII_IIBT_IMAGE_24BIT_TRANS = 0x17
-EFI_HII_IIBT_IMAGE_JPEG = 0x18
-EFI_HII_IIBT_IMAGE_PNG = 0x19
-EFI_HII_IIBT_DUPLICATE = 0x20
-EFI_HII_IIBT_SKIP2 = 0x21
-EFI_HII_IIBT_SKIP1 = 0x22
-EFI_HII_IIBT_EXT1 = 0x30
-EFI_HII_IIBT_EXT2 = 0x31
-EFI_HII_IIBT_EXT4 = 0x32
-
-#
-# Value of HII package type
-#
-EFI_HII_PACKAGE_TYPE_ALL = 0x00
-EFI_HII_PACKAGE_TYPE_GUID = 0x01
-EFI_HII_PACKAGE_FORMS = 0x02
-EFI_HII_PACKAGE_STRINGS = 0x04
-EFI_HII_PACKAGE_FONTS = 0x05
-EFI_HII_PACKAGE_IMAGES = 0x06
-EFI_HII_PACKAGE_SIMPLE_FONTS = 0x07
-EFI_HII_PACKAGE_DEVICE_PATH = 0x08
-EFI_HII_PACKAGE_KEYBOARD_LAYOUT = 0x09
-EFI_HII_PACKAGE_ANIMATIONS = 0x0A
-EFI_HII_PACKAGE_END = 0xDF
-EFI_HII_PACKAGE_TYPE_SYSTEM_BEGIN = 0xE0
-EFI_HII_PACKAGE_TYPE_SYSTEM_END = 0xFF
-
-class IdfFileClassObject(object):
- def __init__(self, FileList = []):
- self.FileList = FileList
- self.ImageFilesDict = {}
- self.ImageIDList = []
- if len(self.FileList) > 0:
- self.LoadIdfFiles(FileList)
-
- def LoadIdfFiles(self, FileList):
- if len(FileList) > 0:
- for File in FileList:
- self.LoadIdfFile(File)
-
- def LoadIdfFile(self, File = None):
- if File == None:
- EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'No Image definition file is given.')
- self.File = File
-
- try:
- IdfFile = open(LongFilePath(File.Path), mode='r')
- FileIn = IdfFile.read()
- IdfFile.close()
- except:
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File)
-
- ImageFileList = []
- for Line in FileIn.splitlines():
- Line = Line.strip()
- Line = self.StripComments(Line)
- if len(Line) == 0:
- continue
-
- LineNo = GetLineNo(FileIn, Line, False)
- if not Line.startswith('#image '):
- EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is invalid.' % (Line, LineNo, File.Path))
-
- if Line.find('#image ') >= 0:
- LineDetails = Line.split()
- Len = len(LineDetails)
- if Len != 3 and Len != 4:
- EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The format is not match #image IMAGE_ID [TRANSPARENT] ImageFileName in Line %s of File %s.' % (LineNo, File.Path))
- if Len == 4 and LineDetails[2] != 'TRANSPARENT':
- EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'Please use the keyword "TRANSPARENT" to describe the transparency setting in Line %s of File %s.' % (LineNo, File.Path))
- MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', LineDetails[1], re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(LineDetails[1]):
- EdkLogger.error('Image Definition File Parser', FORMAT_INVALID, 'The Image token name %s defined in Idf file %s contains the invalid character.' % (LineDetails[1], File.Path))
- if LineDetails[1] not in self.ImageIDList:
- self.ImageIDList.append(LineDetails[1])
- else:
- EdkLogger.error("Image Definition File Parser", PARSER_ERROR, 'The %s in Line %s of File %s is already defined.' % (LineDetails[1], LineNo, File.Path))
- if Len == 4:
- ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], True)
- else:
- ImageFile = ImageFileObject(LineDetails[Len-1], LineDetails[1], False)
- ImageFileList.append(ImageFile)
- if ImageFileList:
- self.ImageFilesDict[File] = ImageFileList
-
- def StripComments(self, Line):
- Comment = '//'
- CommentPos = Line.find(Comment)
- while CommentPos >= 0:
- # if there are non matched quotes before the comment header
- # then we are in the middle of a string
- # but we need to ignore the escaped quotes and backslashes.
- if ((Line.count('"', 0, CommentPos) - Line.count('\\"', 0, CommentPos)) & 1) == 1:
- CommentPos = Line.find (Comment, CommentPos + 1)
- else:
- return Line[:CommentPos].strip()
- return Line.strip()
-
- def ImageDecoder(self, File):
- pass
-
-def SearchImageID(ImageFileObject, FileList):
- if FileList == []:
- return ImageFileObject
-
- for File in FileList:
- if os.path.isfile(File):
- Lines = open(File, 'r')
- for Line in Lines:
- ImageIdList = IMAGE_TOKEN.findall(Line)
- for ID in ImageIdList:
- EdkLogger.debug(EdkLogger.DEBUG_5, "Found ImageID identifier: " + ID)
- ImageFileObject.SetImageIDReferenced(ID)
-
-class ImageFileObject(object):
- def __init__(self, FileName, ImageID, TransParent = False):
- self.FileName = FileName
- self.File = ''
- self.ImageID = ImageID
- self.TransParent = TransParent
- self.Referenced = False
-
- def SetImageIDReferenced(self, ImageID):
- if ImageID == self.ImageID:
- self.Referenced = True
diff --git a/BaseTools/Source/Python/AutoGen/InfSectionParser.py b/BaseTools/Source/Python/AutoGen/InfSectionParser.py
deleted file mode 100644
index 7f78236548..0000000000
--- a/BaseTools/Source/Python/AutoGen/InfSectionParser.py
+++ /dev/null
@@ -1,107 +0,0 @@
-## @file
-# Parser a Inf file and Get specify section data.
-#
-# Copyright (c) 2007 - 2012, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-## Import Modules
-#
-
-import Common.EdkLogger as EdkLogger
-from Common.BuildToolError import *
-from Common.DataType import *
-
-
-class InfSectionParser():
- def __init__(self, FilePath):
- self._FilePath = FilePath
- self._FileSectionDataList = []
- self._ParserInf()
-
- def _ParserInf(self):
- Filename = self._FilePath
- FileLinesList = []
- UserExtFind = False
- FindEnd = True
- FileLastLine = False
- SectionLine = ''
- SectionData = []
-
- try:
- FileLinesList = open(Filename, "r", 0).readlines()
- except BaseException:
- EdkLogger.error("build", AUTOGEN_ERROR, 'File %s is opened failed.' % Filename)
-
- for Index in range(0, len(FileLinesList)):
- line = str(FileLinesList[Index]).strip()
- if Index + 1 == len(FileLinesList):
- FileLastLine = True
- NextLine = ''
- else:
- NextLine = str(FileLinesList[Index + 1]).strip()
- if UserExtFind and FindEnd == False:
- if line:
- SectionData.append(line)
- if line.lower().startswith(TAB_SECTION_START) and line.lower().endswith(TAB_SECTION_END):
- SectionLine = line
- UserExtFind = True
- FindEnd = False
-
- if (NextLine != '' and NextLine[0] == TAB_SECTION_START and \
- NextLine[-1] == TAB_SECTION_END) or FileLastLine:
- UserExtFind = False
- FindEnd = True
- self._FileSectionDataList.append({SectionLine: SectionData[:]})
- SectionData = []
- SectionLine = ''
-
-
- # Get depex expresion
- #
- # @return: a list include some dictionary that key is section and value is a list contain all data.
- def GetDepexExpresionList(self):
- DepexExpresionList = []
- if not self._FileSectionDataList:
- return DepexExpresionList
- for SectionDataDict in self._FileSectionDataList:
- for key in SectionDataDict.keys():
- if key.lower() == "[depex]" or key.lower().startswith("[depex."):
- SectionLine = key.lstrip(TAB_SECTION_START).rstrip(TAB_SECTION_END)
- SubSectionList = [SectionLine]
- if str(SectionLine).find(TAB_COMMA_SPLIT) > -1:
- SubSectionList = str(SectionLine).split(TAB_COMMA_SPLIT)
- for SubSection in SubSectionList:
- SectionList = SubSection.split(TAB_SPLIT)
- SubKey = ()
- if len(SectionList) == 1:
- SubKey = (TAB_ARCH_COMMON, TAB_ARCH_COMMON)
- elif len(SectionList) == 2:
- SubKey = (SectionList[1], TAB_ARCH_COMMON)
- elif len(SectionList) == 3:
- SubKey = (SectionList[1], SectionList[2])
- else:
- EdkLogger.error("build", AUTOGEN_ERROR, 'Section %s is invalid.' % key)
- DepexExpresionList.append({SubKey: SectionDataDict[key]})
- return DepexExpresionList
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py
deleted file mode 100644
index ed33554cd7..0000000000
--- a/BaseTools/Source/Python/AutoGen/StrGather.py
+++ /dev/null
@@ -1,656 +0,0 @@
-## @file
-# This file is used to parse a strings file and create or add to a string database
-# file.
-#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-##
-# Import Modules
-#
-import re
-import Common.EdkLogger as EdkLogger
-from Common.BuildToolError import *
-from UniClassObject import *
-from StringIO import StringIO
-from struct import pack, unpack
-from Common.LongFilePathSupport import OpenLongFilePath as open
-
-##
-# Static definitions
-#
-EFI_HII_SIBT_END = '0x00'
-EFI_HII_SIBT_STRING_SCSU = '0x10'
-EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
-EFI_HII_SIBT_STRINGS_SCSU = '0x12'
-EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
-EFI_HII_SIBT_STRING_UCS2 = '0x14'
-EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
-EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
-EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
-EFI_HII_SIBT_DUPLICATE = '0x20'
-EFI_HII_SIBT_SKIP2 = '0x21'
-EFI_HII_SIBT_SKIP1 = '0x22'
-EFI_HII_SIBT_EXT1 = '0x30'
-EFI_HII_SIBT_EXT2 = '0x31'
-EFI_HII_SIBT_EXT4 = '0x32'
-EFI_HII_SIBT_FONT = '0x40'
-
-EFI_HII_PACKAGE_STRINGS = '0x04'
-EFI_HII_PACKAGE_FORM = '0x02'
-
-StringPackageType = EFI_HII_PACKAGE_STRINGS
-StringPackageForm = EFI_HII_PACKAGE_FORM
-StringBlockType = EFI_HII_SIBT_STRING_UCS2
-StringSkipType = EFI_HII_SIBT_SKIP2
-
-HexHeader = '0x'
-
-COMMENT = '// '
-DEFINE_STR = '#define'
-COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
-NOT_REFERENCED = 'not referenced'
-COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
-CHAR_ARRAY_DEFIN = 'unsigned char'
-COMMON_FILE_NAME = 'Strings'
-OFFSET = 'offset'
-STRING = 'string'
-TO = 'to'
-STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
-COMPATIBLE_STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
-
-EFI_HII_ARRAY_SIZE_LENGTH = 4
-EFI_HII_PACKAGE_HEADER_LENGTH = 4
-EFI_HII_HDR_SIZE_LENGTH = 4
-EFI_HII_STRING_OFFSET_LENGTH = 4
-EFI_STRING_ID = 1
-EFI_STRING_ID_LENGTH = 2
-EFI_HII_LANGUAGE_WINDOW = 0
-EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
-EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
-EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
-
-H_C_FILE_HEADER = ['//', \
- '// DO NOT EDIT -- auto-generated file', \
- '//', \
- '// This file is generated by the StrGather utility', \
- '//']
-LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
-PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
-
-## Convert a dec number to a hex string
-#
-# Convert a dec number to a formatted hex string in length digit
-# The digit is set to default 8
-# The hex string starts with "0x"
-# DecToHexStr(1000) is '0x000003E8'
-# DecToHexStr(1000, 6) is '0x0003E8'
-#
-# @param Dec: The number in dec format
-# @param Digit: The needed digit of hex string
-#
-# @retval: The formatted hex string
-#
-def DecToHexStr(Dec, Digit = 8):
- return eval("'0x%0" + str(Digit) + "X' % int(Dec)")
-
-## Convert a dec number to a hex list
-#
-# Convert a dec number to a formatted hex list in size digit
-# The digit is set to default 8
-# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
-# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
-#
-# @param Dec: The number in dec format
-# @param Digit: The needed digit of hex list
-#
-# @retval: A list for formatted hex string
-#
-def DecToHexList(Dec, Digit = 8):
- Hex = eval("'%0" + str(Digit) + "X' % int(Dec)")
- List = []
- for Bit in range(Digit - 2, -1, -2):
- List.append(HexHeader + Hex[Bit:Bit + 2])
- return List
-
-## Convert a acsii string to a hex list
-#
-# Convert a acsii string to a formatted hex list
-# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
-#
-# @param Ascii: The acsii string
-#
-# @retval: A list for formatted hex string
-#
-def AscToHexList(Ascii):
- List = []
- for Item in Ascii:
- List.append('0x%02X' % ord(Item))
-
- return List
-
-## Create header of .h file
-#
-# Create a header of .h file
-#
-# @param BaseName: The basename of strings
-#
-# @retval Str: A string for .h file header
-#
-def CreateHFileHeader(BaseName):
- Str = ''
- for Item in H_C_FILE_HEADER:
- Str = WriteLine(Str, Item)
- Str = WriteLine(Str, '#ifndef _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
- Str = WriteLine(Str, '#define _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
- return Str
-
-## Create content of .h file
-#
-# Create content of .h file
-#
-# @param BaseName: The basename of strings
-# @param UniObjectClass A UniObjectClass instance
-# @param IsCompatibleMode Compatible mode
-# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
-#
-# @retval Str: A string of .h file content
-#
-def CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
- Str = ''
- ValueStartPtr = 60
- Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
- Str = WriteLine(Str, Line)
- Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
- Str = WriteLine(Str, Line)
- UnusedStr = ''
-
- #Group the referred/Unused STRING token together.
- for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
- StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
- Name = StringItem.StringName
- Token = StringItem.Token
- Referenced = StringItem.Referenced
- if Name != None:
- Line = ''
- if Referenced == True:
- if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
- Line = DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4)
- else:
- Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
- Str = WriteLine(Str, Line)
- else:
- if (ValueStartPtr - len(DEFINE_STR + Name)) <= 0:
- Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
- else:
- Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
- UnusedStr = WriteLine(UnusedStr, Line)
-
- Str = ''.join([Str, UnusedStr])
-
- Str = WriteLine(Str, '')
- if IsCompatibleMode or UniGenCFlag:
- Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
- return Str
-
-## Create a complete .h file
-#
-# Create a complet .h file with file header and file content
-#
-# @param BaseName: The basename of strings
-# @param UniObjectClass A UniObjectClass instance
-# @param IsCompatibleMode Compatible mode
-# @param UniGenCFlag UniString is generated into AutoGen C file when it is set to True
-#
-# @retval Str: A string of complete .h file
-#
-def CreateHFile(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag):
- HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniGenCFlag))
-
- return HFile
-
-## Create header of .c file
-#
-# Create a header of .c file
-#
-# @retval Str: A string for .c file header
-#
-def CreateCFileHeader():
- Str = ''
- for Item in H_C_FILE_HEADER:
- Str = WriteLine(Str, Item)
-
- return Str
-
-## Create a buffer to store all items in an array
-#
-# @param BinBuffer Buffer to contain Binary data.
-# @param Array: The array need to be formatted
-#
-def CreateBinBuffer(BinBuffer, Array):
- for Item in Array:
- BinBuffer.write(pack("B", int(Item, 16)))
-
-## Create a formatted string all items in an array
-#
-# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
-#
-# @param Array: The array need to be formatted
-# @param Width: The line length, the default value is set to 16
-#
-# @retval ArrayItem: A string for all formatted array items
-#
-def CreateArrayItem(Array, Width = 16):
- MaxLength = Width
- Index = 0
- Line = ' '
- ArrayItem = ''
-
- for Item in Array:
- if Index < MaxLength:
- Line = Line + Item + ', '
- Index = Index + 1
- else:
- ArrayItem = WriteLine(ArrayItem, Line)
- Line = ' ' + Item + ', '
- Index = 1
- ArrayItem = Write(ArrayItem, Line.rstrip())
-
- return ArrayItem
-
-## CreateCFileStringValue
-#
-# Create a line with string value
-#
-# @param Value: Value of the string
-#
-# @retval Str: A formatted string with string value
-#
-
-def CreateCFileStringValue(Value):
- Value = [StringBlockType] + Value
- Str = WriteLine('', CreateArrayItem(Value))
-
- return Str
-
-## GetFilteredLanguage
-#
-# apply get best language rules to the UNI language code list
-#
-# @param UniLanguageList: language code definition list in *.UNI file
-# @param LanguageFilterList: language code filter list of RFC4646 format in DSC file
-#
-# @retval UniLanguageListFiltered: the filtered language code
-#
-def GetFilteredLanguage(UniLanguageList, LanguageFilterList):
- UniLanguageListFiltered = []
- # if filter list is empty, then consider there is no filter
- if LanguageFilterList == []:
- UniLanguageListFiltered = UniLanguageList
- return UniLanguageListFiltered
- for Language in LanguageFilterList:
- # first check for exact match
- if Language in UniLanguageList:
- if Language not in UniLanguageListFiltered:
- UniLanguageListFiltered += [Language]
- # find the first one with the same/equivalent primary tag
- else:
- if Language.find('-') != -1:
- PrimaryTag = Language[0:Language.find('-')].lower()
- else:
- PrimaryTag = Language
-
- if len(PrimaryTag) == 3:
- PrimaryTag = LangConvTable.get(PrimaryTag)
-
- for UniLanguage in UniLanguageList:
- if UniLanguage.find('-') != -1:
- UniLanguagePrimaryTag = UniLanguage[0:UniLanguage.find('-')].lower()
- else:
- UniLanguagePrimaryTag = UniLanguage
-
- if len(UniLanguagePrimaryTag) == 3:
- UniLanguagePrimaryTag = LangConvTable.get(UniLanguagePrimaryTag)
-
- if PrimaryTag == UniLanguagePrimaryTag:
- if UniLanguage not in UniLanguageListFiltered:
- UniLanguageListFiltered += [UniLanguage]
- break
- else:
- # Here is rule 3 for "get best language"
- # If tag is not listed in the Unicode file, the default ("en") tag should be used for that language
- # for better processing, find the one that best suit for it.
- DefaultTag = 'en'
- if DefaultTag not in UniLanguageListFiltered:
- # check whether language code with primary code equivalent with DefaultTag already in the list, if so, use that
- for UniLanguage in UniLanguageList:
- if UniLanguage.startswith('en-') or UniLanguage.startswith('eng-'):
- if UniLanguage not in UniLanguageListFiltered:
- UniLanguageListFiltered += [UniLanguage]
- break
- else:
- UniLanguageListFiltered += [DefaultTag]
- return UniLanguageListFiltered
-
-
-## Create content of .c file
-#
-# Create content of .c file
-#
-# @param BaseName: The basename of strings
-# @param UniObjectClass A UniObjectClass instance
-# @param IsCompatibleMode Compatible mode
-# @param UniBinBuffer UniBinBuffer to contain UniBinary data.
-# @param FilterInfo Platform language filter information
-#
-# @retval Str: A string of .c file content
-#
-def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, UniBinBuffer, FilterInfo):
- #
- # Init array length
- #
- TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
- Str = ''
- Offset = 0
-
- EDK2Module = FilterInfo[0]
- if EDK2Module:
- LanguageFilterList = FilterInfo[1]
- else:
- # EDK module is using ISO639-2 format filter, convert to the RFC4646 format
- LanguageFilterList = [LangConvTable.get(F.lower()) for F in FilterInfo[1]]
-
- UniLanguageList = []
- for IndexI in range(len(UniObjectClass.LanguageDef)):
- UniLanguageList += [UniObjectClass.LanguageDef[IndexI][0]]
-
- UniLanguageListFiltered = GetFilteredLanguage(UniLanguageList, LanguageFilterList)
-
-
- #
- # Create lines for each language's strings
- #
- for IndexI in range(len(UniObjectClass.LanguageDef)):
- Language = UniObjectClass.LanguageDef[IndexI][0]
- if Language not in UniLanguageListFiltered:
- continue
-
- StringBuffer = StringIO()
- StrStringValue = ''
- ArrayLength = 0
- NumberOfUseOtherLangDef = 0
- Index = 0
- for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
- Item = UniObjectClass.OrderedStringListByToken[Language][IndexJ]
-
- Name = Item.StringName
- Value = Item.StringValueByteList
- Referenced = Item.Referenced
- Token = Item.Token
- UseOtherLangDef = Item.UseOtherLangDef
-
- if UseOtherLangDef != '' and Referenced:
- NumberOfUseOtherLangDef = NumberOfUseOtherLangDef + 1
- Index = Index + 1
- else:
- if NumberOfUseOtherLangDef > 0:
- StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
- CreateBinBuffer (StringBuffer, ([StringSkipType] + DecToHexList(NumberOfUseOtherLangDef, 4)))
- NumberOfUseOtherLangDef = 0
- ArrayLength = ArrayLength + 3
- if Referenced and Item.Token > 0:
- Index = Index + 1
- StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
- StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
- CreateBinBuffer (StringBuffer, [StringBlockType] + Value)
- ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
-
- #
- # EFI_HII_PACKAGE_HEADER
- #
- Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
- ArrayLength = Offset + ArrayLength + 1
-
- #
- # Create PACKAGE HEADER
- #
- Str = WriteLine(Str, '// PACKAGE HEADER\n')
- TotalLength = TotalLength + ArrayLength
-
- List = DecToHexList(ArrayLength, 6) + \
- [StringPackageType] + \
- DecToHexList(Offset) + \
- DecToHexList(Offset) + \
- DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
- DecToHexList(EFI_STRING_ID, 4) + \
- AscToHexList(Language) + \
- DecToHexList(0, 2)
- Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
-
- #
- # Create PACKAGE DATA
- #
- Str = WriteLine(Str, '// PACKAGE DATA\n')
- Str = Write(Str, StrStringValue)
-
- #
- # Add an EFI_HII_SIBT_END at last
- #
- Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
-
- #
- # Create binary UNI string
- #
- if UniBinBuffer:
- CreateBinBuffer (UniBinBuffer, List)
- UniBinBuffer.write (StringBuffer.getvalue())
- UniBinBuffer.write (pack("B", int(EFI_HII_SIBT_END, 16)))
- StringBuffer.close()
-
- #
- # Create line for string variable name
- # "unsigned char $(BaseName)Strings[] = {"
- #
- AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n')
-
- if IsCompatibleMode:
- #
- # Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
- #
- AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
- AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
- AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
- AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
- else:
- #
- # Create whole array length in UEFI mode
- #
- AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
- AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
-
- #
- # Join package data
- #
- AllStr = Write(AllStr, Str)
-
- return AllStr
-
-## Create end of .c file
-#
-# Create end of .c file
-#
-# @retval Str: A string of .h file end
-#
-def CreateCFileEnd():
- Str = Write('', '};')
- return Str
-
-## Create a .c file
-#
-# Create a complete .c file
-#
-# @param BaseName: The basename of strings
-# @param UniObjectClass A UniObjectClass instance
-# @param IsCompatibleMode Compatible Mode
-# @param FilterInfo Platform language filter information
-#
-# @retval CFile: A string of complete .c file
-#
-def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode, FilterInfo):
- CFile = ''
- #CFile = WriteLine(CFile, CreateCFileHeader())
- CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode, None, FilterInfo))
- CFile = WriteLine(CFile, CreateCFileEnd())
- return CFile
-
-## GetFileList
-#
-# Get a list for all files
-#
-# @param IncludeList: A list of all path to be searched
-# @param SkipList: A list of all types of file could be skipped
-#
-# @retval FileList: A list of all files found
-#
-def GetFileList(SourceFileList, IncludeList, SkipList):
- if IncludeList == None:
- EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
-
- FileList = []
- if SkipList == None:
- SkipList = []
-
- for File in SourceFileList:
- for Dir in IncludeList:
- if not os.path.exists(Dir):
- continue
- File = os.path.join(Dir, File.Path)
- #
- # Ignore Dir
- #
- if os.path.isfile(File) != True:
- continue
- #
- # Ignore file listed in skip list
- #
- IsSkip = False
- for Skip in SkipList:
- if os.path.splitext(File)[1].upper() == Skip.upper():
- EdkLogger.verbose("Skipped %s for string token uses search" % File)
- IsSkip = True
- break
-
- if not IsSkip:
- FileList.append(File)
-
- break
-
- return FileList
-
-## SearchString
-#
-# Search whether all string defined in UniObjectClass are referenced
-# All string used should be set to Referenced
-#
-# @param UniObjectClass: Input UniObjectClass
-# @param FileList: Search path list
-# @param IsCompatibleMode Compatible Mode
-#
-# @retval UniObjectClass: UniObjectClass after searched
-#
-def SearchString(UniObjectClass, FileList, IsCompatibleMode):
- if FileList == []:
- return UniObjectClass
-
- for File in FileList:
- if os.path.isfile(File):
- Lines = open(File, 'r')
- for Line in Lines:
- if not IsCompatibleMode:
- StringTokenList = STRING_TOKEN.findall(Line)
- else:
- StringTokenList = COMPATIBLE_STRING_TOKEN.findall(Line)
- for StrName in StringTokenList:
- EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
- UniObjectClass.SetStringReferenced(StrName)
-
- UniObjectClass.ReToken()
-
- return UniObjectClass
-
-## GetStringFiles
-#
-# This function is used for UEFI2.1 spec
-#
-#
-def GetStringFiles(UniFilList, SourceFileList, IncludeList, IncludePathList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False, UniGenCFlag = True, UniGenBinBuffer = None, FilterInfo = [True, []]):
- if len(UniFilList) > 0:
- if ShellMode:
- #
- # support ISO 639-2 codes in .UNI files of EDK Shell
- #
- Uni = UniFileClassObject(sorted (UniFilList), True, IncludePathList)
- else:
- Uni = UniFileClassObject(sorted (UniFilList), IsCompatibleMode, IncludePathList)
- else:
- EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
-
- FileList = GetFileList(SourceFileList, IncludeList, SkipList)
-
- Uni = SearchString(Uni, sorted (FileList), IsCompatibleMode)
-
- HFile = CreateHFile(BaseName, Uni, IsCompatibleMode, UniGenCFlag)
- CFile = None
- if IsCompatibleMode or UniGenCFlag:
- CFile = CreateCFile(BaseName, Uni, IsCompatibleMode, FilterInfo)
- if UniGenBinBuffer:
- CreateCFileContent(BaseName, Uni, IsCompatibleMode, UniGenBinBuffer, FilterInfo)
-
- return HFile, CFile
-
-#
-# Write an item
-#
-def Write(Target, Item):
- return ''.join([Target, Item])
-
-#
-# Write an item with a break line
-#
-def WriteLine(Target, Item):
- return ''.join([Target, Item, '\n'])
-
-# This acts like the main() function for the script, unless it is 'import'ed into another
-# script.
-if __name__ == '__main__':
- EdkLogger.info('start')
-
- UniFileList = [
- r'C:\\Edk\\Strings2.uni',
- r'C:\\Edk\\Strings.uni'
- ]
-
- SrcFileList = []
- for Root, Dirs, Files in os.walk('C:\\Edk'):
- for File in Files:
- SrcFileList.append(File)
-
- IncludeList = [
- r'C:\\Edk'
- ]
-
- SkipList = ['.inf', '.uni']
- BaseName = 'DriverSample'
- (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
- hfile = open('unistring.h', 'w')
- cfile = open('unistring.c', 'w')
- hfile.write(h)
- cfile.write(c)
-
- EdkLogger.info('end')
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py
deleted file mode 100644
index 856d19cda2..0000000000
--- a/BaseTools/Source/Python/AutoGen/UniClassObject.py
+++ /dev/null
@@ -1,701 +0,0 @@
-## @file
-# This file is used to collect all defined strings in multiple uni files
-#
-#
-# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.<BR>
-#
-# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-##
-# Import Modules
-#
-import Common.LongFilePathOs as os, codecs, re
-import distutils.util
-import Common.EdkLogger as EdkLogger
-import StringIO
-from Common.BuildToolError import *
-from Common.String import GetLineNo
-from Common.Misc import PathClass
-from Common.LongFilePathSupport import LongFilePath
-##
-# Static definitions
-#
-UNICODE_WIDE_CHAR = u'\\wide'
-UNICODE_NARROW_CHAR = u'\\narrow'
-UNICODE_NON_BREAKING_CHAR = u'\\nbr'
-UNICODE_UNICODE_CR = '\r'
-UNICODE_UNICODE_LF = '\n'
-
-NARROW_CHAR = u'\uFFF0'
-WIDE_CHAR = u'\uFFF1'
-NON_BREAKING_CHAR = u'\uFFF2'
-CR = u'\u000D'
-LF = u'\u000A'
-NULL = u'\u0000'
-TAB = u'\t'
-BACK_SLASH_PLACEHOLDER = u'\u0006'
-
-gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
-
-## Convert a python unicode string to a normal string
-#
-# Convert a python unicode string to a normal string
-# UniToStr(u'I am a string') is 'I am a string'
-#
-# @param Uni: The python unicode string
-#
-# @retval: The formatted normal string
-#
-def UniToStr(Uni):
- return repr(Uni)[2:-1]
-
-## Convert a unicode string to a Hex list
-#
-# Convert a unicode string to a Hex list
-# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
-#
-# @param Uni: The python unicode string
-#
-# @retval List: The formatted hex list
-#
-def UniToHexList(Uni):
- List = []
- for Item in Uni:
- Temp = '%04X' % ord(Item)
- List.append('0x' + Temp[2:4])
- List.append('0x' + Temp[0:2])
- return List
-
-LangConvTable = {'eng':'en', 'fra':'fr', \
- 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
- 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
- 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
- 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
- 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
- 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
- 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
- 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
- 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
- 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
- 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
- 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
- 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
- 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
- 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
- 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
- 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
- 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
- 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
- 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
- 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
- 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
- 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
- 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
- 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
- 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
- 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
- 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
- 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
- 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
- 'zho':'zh', 'zul':'zu'}
-
-## GetLanguageCode
-#
-# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
-# ISO 639-2 language codes supported in compatiblity mode
-# RFC 4646 language codes supported in native mode
-#
-# @param LangName: Language codes read from .UNI file
-#
-# @retval LangName: Valid lanugage code in RFC 4646 format or None
-#
-def GetLanguageCode(LangName, IsCompatibleMode, File):
- global LangConvTable
-
- length = len(LangName)
- if IsCompatibleMode:
- if length == 3 and LangName.isalpha():
- TempLangName = LangConvTable.get(LangName.lower())
- if TempLangName != None:
- return TempLangName
- return LangName
- else:
- EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
-
- if (LangName[0] == 'X' or LangName[0] == 'x') and LangName[1] == '-':
- return LangName
- if length == 2:
- if LangName.isalpha():
- return LangName
- elif length == 3:
- if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
- return LangName
- elif length == 5:
- if LangName[0:2].isalpha() and LangName[2] == '-':
- return LangName
- elif length >= 6:
- if LangName[0:2].isalpha() and LangName[2] == '-':
- return LangName
- if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
- return LangName
-
- EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
-
-## Ucs2Codec
-#
-# This is only a partial codec implementation. It only supports
-# encoding, and is primarily used to check that all the characters are
-# valid for UCS-2.
-#
-class Ucs2Codec(codecs.Codec):
- def __init__(self):
- self.__utf16 = codecs.lookup('utf-16')
-
- def encode(self, input, errors='strict'):
- for Char in input:
- CodePoint = ord(Char)
- if CodePoint >= 0xd800 and CodePoint <= 0xdfff:
- raise ValueError("Code Point is in range reserved for " +
- "UTF-16 surrogate pairs")
- elif CodePoint > 0xffff:
- raise ValueError("Code Point too large to encode in UCS-2")
- return self.__utf16.encode(input)
-
-TheUcs2Codec = Ucs2Codec()
-def Ucs2Search(name):
- if name == 'ucs-2':
- return codecs.CodecInfo(
- name=name,
- encode=TheUcs2Codec.encode,
- decode=TheUcs2Codec.decode)
- else:
- return None
-codecs.register(Ucs2Search)
-
-## StringDefClassObject
-#
-# A structure for language definition
-#
-class StringDefClassObject(object):
- def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
- self.StringName = ''
- self.StringNameByteList = []
- self.StringValue = ''
- self.StringValueByteList = ''
- self.Token = 0
- self.Referenced = Referenced
- self.UseOtherLangDef = UseOtherLangDef
- self.Length = 0
-
- if Name != None:
- self.StringName = Name
- self.StringNameByteList = UniToHexList(Name)
- if Value != None:
- self.StringValue = Value + u'\x00' # Add a NULL at string tail
- self.StringValueByteList = UniToHexList(self.StringValue)
- self.Length = len(self.StringValueByteList)
- if Token != None:
- self.Token = Token
-
- def __str__(self):
- return repr(self.StringName) + ' ' + \
- repr(self.Token) + ' ' + \
- repr(self.Referenced) + ' ' + \
- repr(self.StringValue) + ' ' + \
- repr(self.UseOtherLangDef)
-
- def UpdateValue(self, Value = None):
- if Value != None:
- self.StringValue = Value + u'\x00' # Add a NULL at string tail
- self.StringValueByteList = UniToHexList(self.StringValue)
- self.Length = len(self.StringValueByteList)
-
-## UniFileClassObject
-#
-# A structure for .uni file definition
-#
-class UniFileClassObject(object):
- def __init__(self, FileList = [], IsCompatibleMode = False, IncludePathList = []):
- self.FileList = FileList
- self.Token = 2
- self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
- self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
- self.OrderedStringDict = {} #{ u'LanguageIdentifier' : {StringName:(IndexInList)} }
- self.OrderedStringListByToken = {} #{ u'LanguageIdentifier' : {Token: StringDefClassObject} }
- self.IsCompatibleMode = IsCompatibleMode
- self.IncludePathList = IncludePathList
- if len(self.FileList) > 0:
- self.LoadUniFiles(FileList)
-
- #
- # Get Language definition
- #
- def GetLangDef(self, File, Line):
- Lang = distutils.util.split_quoted((Line.split(u"//")[0]))
- if len(Lang) != 3:
- try:
- FileIn = self.OpenUniFile(LongFilePath(File.Path))
- except UnicodeError, X:
- EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
- except:
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
- LineNo = GetLineNo(FileIn, Line, False)
- EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
- ExtraData="""%s\n\t*Correct format is like '#langdef en-US "English"'""" % Line, File=File, Line=LineNo)
- else:
- LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
- LangPrintName = Lang[2]
-
- IsLangInDef = False
- for Item in self.LanguageDef:
- if Item[0] == LangName:
- IsLangInDef = True
- break;
-
- if not IsLangInDef:
- self.LanguageDef.append([LangName, LangPrintName])
-
- #
- # Add language string
- #
- self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
- self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
-
- if not IsLangInDef:
- #
- # The found STRING tokens will be added into new language string list
- # so that the unique STRING identifier is reserved for all languages in the package list.
- #
- FirstLangName = self.LanguageDef[0][0]
- if LangName != FirstLangName:
- for Index in range (2, len (self.OrderedStringList[FirstLangName])):
- Item = self.OrderedStringList[FirstLangName][Index]
- if Item.UseOtherLangDef != '':
- OtherLang = Item.UseOtherLangDef
- else:
- OtherLang = FirstLangName
- self.OrderedStringList[LangName].append (StringDefClassObject(Item.StringName, '', Item.Referenced, Item.Token, OtherLang))
- self.OrderedStringDict[LangName][Item.StringName] = len(self.OrderedStringList[LangName]) - 1
- return True
-
- def OpenUniFile(self, FileName):
- #
- # Read file
- #
- try:
- UniFile = open(FileName, mode='rb')
- FileIn = UniFile.read()
- UniFile.close()
- except:
- EdkLogger.Error("build", FILE_OPEN_FAILURE, ExtraData=File)
-
- #
- # Detect Byte Order Mark at beginning of file. Default to UTF-8
- #
- Encoding = 'utf-8'
- if (FileIn.startswith(codecs.BOM_UTF16_BE) or
- FileIn.startswith(codecs.BOM_UTF16_LE)):
- Encoding = 'utf-16'
-
- self.VerifyUcs2Data(FileIn, FileName, Encoding)
-
- UniFile = StringIO.StringIO(FileIn)
- Info = codecs.lookup(Encoding)
- (Reader, Writer) = (Info.streamreader, Info.streamwriter)
- return codecs.StreamReaderWriter(UniFile, Reader, Writer)
-
- def VerifyUcs2Data(self, FileIn, FileName, Encoding):
- Ucs2Info = codecs.lookup('ucs-2')
- #
- # Convert to unicode
- #
- try:
- FileDecoded = codecs.decode(FileIn, Encoding)
- Ucs2Info.encode(FileDecoded)
- except:
- UniFile = StringIO.StringIO(FileIn)
- Info = codecs.lookup(Encoding)
- (Reader, Writer) = (Info.streamreader, Info.streamwriter)
- File = codecs.StreamReaderWriter(UniFile, Reader, Writer)
- LineNumber = 0
- ErrMsg = lambda Encoding, LineNumber: \
- '%s contains invalid %s characters on line %d.' % \
- (FileName, Encoding, LineNumber)
- while True:
- LineNumber = LineNumber + 1
- try:
- Line = File.readline()
- if Line == '':
- EdkLogger.error('Unicode File Parser', PARSER_ERROR,
- ErrMsg(Encoding, LineNumber))
- Ucs2Info.encode(Line)
- except:
- EdkLogger.error('Unicode File Parser', PARSER_ERROR,
- ErrMsg('UCS-2', LineNumber))
-
- #
- # Get String name and value
- #
- def GetStringObject(self, Item):
- Language = ''
- Value = ''
-
- Name = Item.split()[1]
- # Check the string name
- if Name != '':
- MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
- LanguageList = Item.split(u'#language ')
- for IndexI in range(len(LanguageList)):
- if IndexI == 0:
- continue
- else:
- Language = LanguageList[IndexI].split()[0]
- Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
- Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
- self.AddStringToList(Name, Language, Value)
-
- #
- # Get include file list and load them
- #
- def GetIncludeFile(self, Item, Dir):
- FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
- self.LoadUniFile(FileName)
-
- def StripComments(self, Line):
- Comment = u'//'
- CommentPos = Line.find(Comment)
- while CommentPos >= 0:
- # if there are non matched quotes before the comment header
- # then we are in the middle of a string
- # but we need to ignore the escaped quotes and backslashes.
- if ((Line.count(u'"', 0, CommentPos) - Line.count(u'\\"', 0, CommentPos)) & 1) == 1:
- CommentPos = Line.find (Comment, CommentPos + 1)
- else:
- return Line[:CommentPos].strip()
- return Line.strip()
-
-
- #
- # Pre-process before parse .uni file
- #
- def PreProcess(self, File):
- if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
- EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
-
- try:
- FileIn = self.OpenUniFile(LongFilePath(File.Path))
- except UnicodeError, X:
- EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
- except:
- EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
-
- Lines = []
- #
- # Use unique identifier
- #
- for Line in FileIn:
- Line = Line.strip()
- Line = Line.replace(u'\\\\', BACK_SLASH_PLACEHOLDER)
- Line = self.StripComments(Line)
-
- #
- # Ignore empty line
- #
- if len(Line) == 0:
- continue
-
-
- Line = Line.replace(u'/langdef', u'#langdef')
- Line = Line.replace(u'/string', u'#string')
- Line = Line.replace(u'/language', u'#language')
- Line = Line.replace(u'/include', u'#include')
-
- Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
- Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
- Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
-
- Line = Line.replace(u'\\r\\n', CR + LF)
- Line = Line.replace(u'\\n', CR + LF)
- Line = Line.replace(u'\\r', CR)
- Line = Line.replace(u'\\t', u' ')
- Line = Line.replace(u'\t', u' ')
- Line = Line.replace(u'\\"', u'"')
- Line = Line.replace(u"\\'", u"'")
- Line = Line.replace(BACK_SLASH_PLACEHOLDER, u'\\')
-
- StartPos = Line.find(u'\\x')
- while (StartPos != -1):
- EndPos = Line.find(u'\\', StartPos + 1, StartPos + 7)
- if EndPos != -1 and EndPos - StartPos == 6 :
- if re.match('[a-fA-F0-9]{4}', Line[StartPos + 2 : EndPos], re.UNICODE):
- EndStr = Line[EndPos: ]
- UniStr = ('\u' + (Line[StartPos + 2 : EndPos])).decode('unicode_escape')
- if EndStr.startswith(u'\\x') and len(EndStr) >= 7:
- if EndStr[6] == u'\\' and re.match('[a-fA-F0-9]{4}', EndStr[2 : 6], re.UNICODE):
- Line = Line[0 : StartPos] + UniStr + EndStr
- else:
- Line = Line[0 : StartPos] + UniStr + EndStr[1:]
- StartPos = Line.find(u'\\x', StartPos + 1)
-
- IncList = gIncludePattern.findall(Line)
- if len(IncList) == 1:
- for Dir in [File.Dir] + self.IncludePathList:
- IncFile = PathClass(str(IncList[0]), Dir)
- if os.path.isfile(IncFile.Path):
- Lines.extend(self.PreProcess(IncFile))
- break
- else:
- EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, Message="Cannot find include file", ExtraData=str(IncList[0]))
- continue
-
- Lines.append(Line)
-
- return Lines
-
- #
- # Load a .uni file
- #
- def LoadUniFile(self, File = None):
- if File == None:
- EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
- self.File = File
- #
- # Process special char in file
- #
- Lines = self.PreProcess(File)
-
- #
- # Get Unicode Information
- #
- for IndexI in range(len(Lines)):
- Line = Lines[IndexI]
- if (IndexI + 1) < len(Lines):
- SecondLine = Lines[IndexI + 1]
- if (IndexI + 2) < len(Lines):
- ThirdLine = Lines[IndexI + 2]
-
- #
- # Get Language def information
- #
- if Line.find(u'#langdef ') >= 0:
- self.GetLangDef(File, Line)
- continue
-
- Name = ''
- Language = ''
- Value = ''
- #
- # Get string def information format 1 as below
- #
- # #string MY_STRING_1
- # #language eng
- # My first English string line 1
- # My first English string line 2
- # #string MY_STRING_1
- # #language spa
- # Mi segunda secuencia 1
- # Mi segunda secuencia 2
- #
- if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
- SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
- ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
- Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
- Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
- for IndexJ in range(IndexI + 2, len(Lines)):
- if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
- Value = Value + Lines[IndexJ]
- else:
- IndexI = IndexJ
- break
- # Value = Value.replace(u'\r\n', u'')
- Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
- # Check the string name
- if not self.IsCompatibleMode and Name != '':
- MatchString = re.match('^[a-zA-Z][a-zA-Z0-9_]*$', Name, re.UNICODE)
- if MatchString == None or MatchString.end(0) != len(Name):
- EdkLogger.error('Unicode File Parser', FORMAT_INVALID, 'The string token name %s defined in UNI file %s contains the invalid character.' % (Name, self.File))
- self.AddStringToList(Name, Language, Value)
- continue
-
- #
- # Get string def information format 2 as below
- #
- # #string MY_STRING_1 #language eng "My first English string line 1"
- # "My first English string line 2"
- # #language spa "Mi segunda secuencia 1"
- # "Mi segunda secuencia 2"
- # #string MY_STRING_2 #language eng "My first English string line 1"
- # "My first English string line 2"
- # #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
- # "Mi segunda secuencia 2"
- #
- if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
- StringItem = Line
- for IndexJ in range(IndexI + 1, len(Lines)):
- if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
- IndexI = IndexJ
- break
- elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
- StringItem = StringItem + Lines[IndexJ]
- elif Lines[IndexJ].count(u'\"') >= 2:
- StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
- self.GetStringObject(StringItem)
- continue
-
- #
- # Load multiple .uni files
- #
- def LoadUniFiles(self, FileList):
- if len(FileList) > 0:
- for File in FileList:
- self.LoadUniFile(File)
-
- #
- # Add a string to list
- #
- def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
- for LangNameItem in self.LanguageDef:
- if Language == LangNameItem[0]:
- break
- else:
- EdkLogger.error('Unicode File Parser', FORMAT_NOT_SUPPORTED, "The language '%s' for %s is not defined in Unicode file %s." \
- % (Language, Name, self.File))
-
- if Language not in self.OrderedStringList:
- self.OrderedStringList[Language] = []
- self.OrderedStringDict[Language] = {}
-
- IsAdded = True
- if Name in self.OrderedStringDict[Language]:
- IsAdded = False
- if Value != None:
- ItemIndexInList = self.OrderedStringDict[Language][Name]
- Item = self.OrderedStringList[Language][ItemIndexInList]
- Item.UpdateValue(Value)
- Item.UseOtherLangDef = ''
-
- if IsAdded:
- Token = len(self.OrderedStringList[Language])
- if Index == -1:
- self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
- self.OrderedStringDict[Language][Name] = Token
- for LangName in self.LanguageDef:
- #
- # New STRING token will be added into all language string lists.
- # so that the unique STRING identifier is reserved for all languages in the package list.
- #
- if LangName[0] != Language:
- if UseOtherLangDef != '':
- OtherLangDef = UseOtherLangDef
- else:
- OtherLangDef = Language
- self.OrderedStringList[LangName[0]].append(StringDefClassObject(Name, '', Referenced, Token, OtherLangDef))
- self.OrderedStringDict[LangName[0]][Name] = len(self.OrderedStringList[LangName[0]]) - 1
- else:
- self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
- self.OrderedStringDict[Language][Name] = Index
-
- #
- # Set the string as referenced
- #
- def SetStringReferenced(self, Name):
- #
- # String stoken are added in the same order in all language string lists.
- # So, only update the status of string stoken in first language string list.
- #
- Lang = self.LanguageDef[0][0]
- if Name in self.OrderedStringDict[Lang]:
- ItemIndexInList = self.OrderedStringDict[Lang][Name]
- Item = self.OrderedStringList[Lang][ItemIndexInList]
- Item.Referenced = True
-
- #
- # Search the string in language definition by Name
- #
- def FindStringValue(self, Name, Lang):
- if Name in self.OrderedStringDict[Lang]:
- ItemIndexInList = self.OrderedStringDict[Lang][Name]
- return self.OrderedStringList[Lang][ItemIndexInList]
-
- return None
-
- #
- # Search the string in language definition by Token
- #
- def FindByToken(self, Token, Lang):
- for Item in self.OrderedStringList[Lang]:
- if Item.Token == Token:
- return Item
-
- return None
-
- #
- # Re-order strings and re-generate tokens
- #
- def ReToken(self):
- #
- # Retoken all language strings according to the status of string stoken in the first language string.
- #
- FirstLangName = self.LanguageDef[0][0]
-
- # Convert the OrderedStringList to be OrderedStringListByToken in order to faciliate future search by token
- for LangNameItem in self.LanguageDef:
- self.OrderedStringListByToken[LangNameItem[0]] = {}
-
- #
- # Use small token for all referred string stoken.
- #
- RefToken = 0
- for Index in range (0, len (self.OrderedStringList[FirstLangName])):
- FirstLangItem = self.OrderedStringList[FirstLangName][Index]
- if FirstLangItem.Referenced == True:
- for LangNameItem in self.LanguageDef:
- LangName = LangNameItem[0]
- OtherLangItem = self.OrderedStringList[LangName][Index]
- OtherLangItem.Referenced = True
- OtherLangItem.Token = RefToken
- self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
- RefToken = RefToken + 1
-
- #
- # Use big token for all unreferred string stoken.
- #
- UnRefToken = 0
- for Index in range (0, len (self.OrderedStringList[FirstLangName])):
- FirstLangItem = self.OrderedStringList[FirstLangName][Index]
- if FirstLangItem.Referenced == False:
- for LangNameItem in self.LanguageDef:
- LangName = LangNameItem[0]
- OtherLangItem = self.OrderedStringList[LangName][Index]
- OtherLangItem.Token = RefToken + UnRefToken
- self.OrderedStringListByToken[LangName][OtherLangItem.Token] = OtherLangItem
- UnRefToken = UnRefToken + 1
-
- #
- # Show the instance itself
- #
- def ShowMe(self):
- print self.LanguageDef
- #print self.OrderedStringList
- for Item in self.OrderedStringList:
- print Item
- for Member in self.OrderedStringList[Item]:
- print str(Member)
-
-# This acts like the main() function for the script, unless it is 'import'ed into another
-# script.
-if __name__ == '__main__':
- EdkLogger.Initialize()
- EdkLogger.SetLevel(EdkLogger.DEBUG_0)
- a = UniFileClassObject([PathClass("C:\\Edk\\Strings.uni"), PathClass("C:\\Edk\\Strings2.uni")])
- a.ReToken()
- a.ShowMe()
diff --git a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
deleted file mode 100644
index 92ede7a823..0000000000
--- a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Copyright (c) 2015, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-#
-# This file is used to collect the Variable checking information
-#
-
-# #
-# Import Modules
-#
-import os
-from Common.RangeExpression import RangeExpression
-from Common.Misc import *
-from StringIO import StringIO
-from struct import pack
-
-class VAR_CHECK_PCD_VARIABLE_TAB_CONTAINER(object):
- def __init__(self):
- self.var_check_info = []
-
- def push_back(self, var_check_tab):
- for tab in self.var_check_info:
- if tab.equal(var_check_tab):
- tab.merge(var_check_tab)
- break
- else:
- self.var_check_info.append(var_check_tab)
-
- def dump(self, dest, Phase):
-
- FormatMap = {}
- FormatMap[1] = "=B"
- FormatMap[2] = "=H"
- FormatMap[4] = "=L"
- FormatMap[8] = "=Q"
-
- if not os.path.isabs(dest):
- return
- if not os.path.exists(dest):
- os.mkdir(dest)
- BinFileName = "PcdVarCheck.bin"
- BinFilePath = os.path.join(dest, BinFileName)
- Buffer = ''
- index = 0
- for var_check_tab in self.var_check_info:
- index += 1
- realLength = 0
- realLength += 32
- Name = var_check_tab.Name[1:-1]
- NameChars = Name.split(",")
- realLength += len(NameChars)
- if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
- realLength += (4 - (realLength % 4))
- itemIndex = 0
- for item in var_check_tab.validtab:
- itemIndex += 1
- realLength += 5
- for v_data in item.data:
- if type(v_data) in (int, long):
- realLength += item.StorageWidth
- else:
- realLength += item.StorageWidth
- realLength += item.StorageWidth
- if (index == len(self.var_check_info)) :
- if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
- realLength += (4 - (realLength % 4))
- else:
- if realLength % 4:
- realLength += (4 - (realLength % 4))
- var_check_tab.Length = realLength
- realLength = 0
- index = 0
- for var_check_tab in self.var_check_info:
- index += 1
-
- b = pack("=H", var_check_tab.Revision)
- Buffer += b
- realLength += 2
-
- b = pack("=H", var_check_tab.HeaderLength)
- Buffer += b
- realLength += 2
-
- b = pack("=L", var_check_tab.Length)
- Buffer += b
- realLength += 4
-
- b = pack("=B", var_check_tab.Type)
- Buffer += b
- realLength += 1
-
- for i in range(0, 3):
- b = pack("=B", var_check_tab.Reserved)
- Buffer += b
- realLength += 1
-
- b = pack("=L", var_check_tab.Attributes)
- Buffer += b
- realLength += 4
-
- Guid = var_check_tab.Guid
- b = pack('=LHHBBBBBBBB',
- Guid[0],
- Guid[1],
- Guid[2],
- Guid[3],
- Guid[4],
- Guid[5],
- Guid[6],
- Guid[7],
- Guid[8],
- Guid[9],
- Guid[10],
- )
- Buffer += b
- realLength += 16
-
- Name = var_check_tab.Name[1:-1]
- NameChars = Name.split(",")
- for NameChar in NameChars:
- NameCharNum = int(NameChar, 16)
- b = pack("=B", NameCharNum)
- Buffer += b
- realLength += 1
-
- if (index < len(self.var_check_info) and realLength % 4) or (index == len(self.var_check_info) and len(var_check_tab.validtab) > 0 and realLength % 4):
- for i in range(4 - (realLength % 4)):
- b = pack("=B", var_check_tab.pad)
- Buffer += b
- realLength += 1
- itemIndex = 0
- for item in var_check_tab.validtab:
- itemIndex += 1
-
- b = pack("=B", item.Type)
- Buffer += b
- realLength += 1
-
- b = pack("=B", item.Length)
- Buffer += b
- realLength += 1
-
- b = pack("=H", int(item.VarOffset, 16))
- Buffer += b
- realLength += 2
-
- b = pack("=B", item.StorageWidth)
- Buffer += b
- realLength += 1
- for v_data in item.data:
- if type(v_data) in (int, long):
- b = pack(FormatMap[item.StorageWidth], v_data)
- Buffer += b
- realLength += item.StorageWidth
- else:
- b = pack(FormatMap[item.StorageWidth], v_data[0])
- Buffer += b
- realLength += item.StorageWidth
- b = pack(FormatMap[item.StorageWidth], v_data[1])
- Buffer += b
- realLength += item.StorageWidth
-
- if (index == len(self.var_check_info)) :
- if (itemIndex < len(var_check_tab.validtab)) and realLength % 4:
- for i in range(4 - (realLength % 4)):
- b = pack("=B", var_check_tab.pad)
- Buffer += b
- realLength += 1
- else:
- if realLength % 4:
- for i in range(4 - (realLength % 4)):
- b = pack("=B", var_check_tab.pad)
- Buffer += b
- realLength += 1
-
- DbFile = StringIO()
- if Phase == 'DXE' and os.path.exists(BinFilePath):
- BinFile = open(BinFilePath, "rb")
- BinBuffer = BinFile.read()
- BinFile.close()
- BinBufferSize = len(BinBuffer)
- if (BinBufferSize % 4):
- for i in range(4 - (BinBufferSize % 4)):
- b = pack("=B", VAR_CHECK_PCD_VARIABLE_TAB.pad)
- BinBuffer += b
- Buffer = BinBuffer + Buffer
- DbFile.write(Buffer)
- SaveFileOnChange(BinFilePath, DbFile.getvalue(), True)
-
-
-class VAR_CHECK_PCD_VARIABLE_TAB(object):
- pad = 0xDA
- def __init__(self, TokenSpaceGuid, PcdCName):
- self.Revision = 0x0001
- self.HeaderLength = 0
- self.Length = 0 # Length include this header
- self.Type = 0
- self.Reserved = 0
- self.Attributes = 0x00000000
- self.Guid = eval("[" + TokenSpaceGuid.replace("{", "").replace("}", "") + "]")
- self.Name = PcdCName
- self.validtab = []
-
- def UpdateSize(self):
- self.HeaderLength = 32 + len(self.Name.split(","))
- self.Length = 32 + len(self.Name.split(",")) + self.GetValidTabLen()
-
- def GetValidTabLen(self):
- validtablen = 0
- for item in self.validtab:
- validtablen += item.Length
- return validtablen
-
- def SetAttributes(self, attributes):
- self.Attributes = attributes
-
- def push_back(self, valid_obj):
- if valid_obj is not None:
- self.validtab.append(valid_obj)
-
- def equal(self, varchecktab):
- if self.Guid == varchecktab.Guid and self.Name == varchecktab.Name:
- return True
- else:
- return False
-
- def merge(self, varchecktab):
- for validobj in varchecktab.validtab:
- if validobj in self.validtab:
- continue
- self.validtab.append(validobj)
- self.UpdateSize()
-
-
-class VAR_CHECK_PCD_VALID_OBJ(object):
- def __init__(self, VarOffset, data, PcdDataType):
- self.Type = 1
- self.Length = 0 # Length include this header
- self.VarOffset = VarOffset
- self.StorageWidth = 0
- self.PcdDataType = PcdDataType.strip()
- self.rawdata = data
- self.data = set()
- self.ValidData = True
- self.updateStorageWidth()
- def updateStorageWidth(self):
- if self.PcdDataType == "UINT8" or self.PcdDataType == "BOOLEAN":
- self.StorageWidth = 1
- elif self.PcdDataType == "UINT16":
- self.StorageWidth = 2
- elif self.PcdDataType == "UINT32":
- self.StorageWidth = 4
- elif self.PcdDataType == "UINT64":
- self.StorageWidth = 8
- else:
- self.StorageWidth = 0
- self.ValidData = False
-
- def __eq__(self, validObj):
- if self.VarOffset == validObj.VarOffset:
- return True
- else:
- return False
-
-class VAR_CHECK_PCD_VALID_LIST(VAR_CHECK_PCD_VALID_OBJ):
- def __init__(self, VarOffset, validlist, PcdDataType):
- super(VAR_CHECK_PCD_VALID_LIST, self).__init__(VarOffset, validlist, PcdDataType)
- self.Type = 1
- self.update_data()
- self.update_size()
- def update_data(self):
- valid_num_list = []
- data_list = []
- for item in self.rawdata:
- valid_num_list.extend(item.split(','))
-
- for valid_num in valid_num_list:
- valid_num = valid_num.strip()
-
- if valid_num.startswith('0x') or valid_num.startswith('0X'):
- data_list.append(int(valid_num, 16))
- else:
- data_list.append(int(valid_num))
-
-
- self.data = set(data_list)
-
- def update_size(self):
- self.Length = 5 + len(self.data) * self.StorageWidth
-
-
-class VAR_CHECK_PCD_VALID_RANGE(VAR_CHECK_PCD_VALID_OBJ):
- def __init__(self, VarOffset, validrange, PcdDataType):
- super(VAR_CHECK_PCD_VALID_RANGE, self).__init__(VarOffset, validrange, PcdDataType)
- self.Type = 2
- self.update_data()
- self.update_size()
- def update_data(self):
- RangeExpr = ""
- data_list = []
- i = 0
- for item in self.rawdata:
- if i == 0:
- RangeExpr = "( " + item + " )"
- else:
- RangeExpr = RangeExpr + "OR ( " + item + " )"
- range_result = RangeExpression(RangeExpr, self.PcdDataType)(True)
- for rangelist in range_result:
- for obj in rangelist.pop():
- data_list.append((obj.start, obj.end))
- self.data = set(data_list)
-
- def update_size(self):
- self.Length = 5 + len(self.data) * 2 * self.StorageWidth
-
-
-class VAR_VALID_OBJECT_FACTORY(object):
- def __init__(self):
- pass
- @staticmethod
- def Get_valid_object(PcdClass, VarOffset):
- if PcdClass.validateranges:
- return VAR_CHECK_PCD_VALID_RANGE(VarOffset, PcdClass.validateranges, PcdClass.DatumType)
- if PcdClass.validlists:
- return VAR_CHECK_PCD_VALID_LIST(VarOffset, PcdClass.validlists, PcdClass.DatumType)
- else:
- return None
-
-if __name__ == "__main__":
- class TestObj(object):
- def __init__(self, number1):
- self.number_1 = number1
- def __eq__(self, testobj):
- if self.number_1 == testobj.number_1:
- return True
- else:
- return False
- test1 = TestObj(1)
- test2 = TestObj(2)
-
- testarr = [test1, test2]
- print TestObj(2) in testarr
- print TestObj(2) == test2
-
diff --git a/BaseTools/Source/Python/AutoGen/__init__.py b/BaseTools/Source/Python/AutoGen/__init__.py
deleted file mode 100644
index d2de425732..0000000000
--- a/BaseTools/Source/Python/AutoGen/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-## @file
-# Python 'AutoGen' package initialization file.
-#
-# This file is required to make Python interpreter treat the directory
-# as containing package.
-#
-# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-__all__ = ["AutoGen"]