diff options
Diffstat (limited to 'BaseTools/Source/Python')
148 files changed, 83492 insertions, 0 deletions
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py new file mode 100644 index 0000000000..86f33fbd86 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/AutoGen.py @@ -0,0 +1,1919 @@ +## @file +# Generate AutoGen.h, AutoGen.c and *.depex files +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import Modules +# +import os +import re +import os.path as path +import copy + +import GenC +import GenMake +import GenDepex + +from StrGather import * +from BuildEngine import BuildRule + +from Common.BuildToolError import * +from Common.DataType import * +from Common.Misc import * +from Common.String import * +import Common.GlobalData as GlobalData +from GenFds.FdfParser import * +from CommonDataClass.CommonClass import SkuInfoClass +from Workspace.BuildClassObject import * + +## Regular expression for splitting Dependency Expression stirng into tokens +gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)") + +## Mapping Makefile type +gMakeTypeMap = {"MSFT":"nmake", "GCC":"gmake"} + + +## Build rule configuration file +gBuildRuleFile = 'Conf/build_rule.txt' + +## default file name for AutoGen +gAutoGenCodeFileName = "AutoGen.c" +gAutoGenHeaderFileName = "AutoGen.h" +gAutoGenStringFileName = "%(module_name)sStrDefs.h" +gAutoGenDepexFileName = "%(module_name)s.depex" +gAutoGenSmmDepexFileName = "%(module_name)s.smm" + +## Base class for AutoGen +# +# This class just implements the cache mechanism of AutoGen objects. +# +class AutoGen(object): + # database to maintain the objects of xxxAutoGen + _CACHE_ = {} # (BuildTarget, ToolChain) : {ARCH : {platform file: AutoGen object}}} + + ## Factory method + # + # @param Class class object of real AutoGen class + # (WorkspaceAutoGen, ModuleAutoGen or PlatformAutoGen) + # @param Workspace Workspace directory or WorkspaceAutoGen object + # @param MetaFile The path of meta file + # @param Target Build target + # @param Toolchain Tool chain name + # @param Arch Target arch + # @param *args The specific class related parameters + # @param **kwargs The specific class related dict parameters + # + def __new__(Class, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs): + # check if the object has been created + Key = (Target, Toolchain) + if Key not in Class._CACHE_ or Arch not in Class._CACHE_[Key] \ + or MetaFile not in Class._CACHE_[Key][Arch]: + AutoGenObject = super(AutoGen, Class).__new__(Class) + # call real constructor + if not AutoGenObject._Init(Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs): + return None + if Key not in Class._CACHE_: + Class._CACHE_[Key] = {} + if Arch not in Class._CACHE_[Key]: + Class._CACHE_[Key][Arch] = {} + Class._CACHE_[Key][Arch][MetaFile] = AutoGenObject + else: + AutoGenObject = Class._CACHE_[Key][Arch][MetaFile] + + return AutoGenObject + + ## hash() operator + # + # The file path of platform file will be used to represent hash value of this object + # + # @retval int Hash value of the file path of platform file + # + def __hash__(self): + return hash(self.MetaFile) + + ## str() operator + # + # The file path of platform file will be used to represent this object + # + # @retval string String of platform file path + # + def __str__(self): + return str(self.MetaFile) + + ## "==" operator + def __eq__(self, Other): + return Other and self.MetaFile == Other + +## Workspace AutoGen class +# +# This class is used mainly to control the whole platform build for different +# architecture. This class will generate top level makefile. +# +class WorkspaceAutoGen(AutoGen): + ## Real constructor of WorkspaceAutoGen + # + # This method behaves the same as __init__ except that it needs explict invoke + # (in super class's __new__ method) + # + # @param WorkspaceDir Root directory of workspace + # @param ActivePlatform Meta-file of active platform + # @param Target Build target + # @param Toolchain Tool chain name + # @param ArchList List of architecture of current build + # @param MetaFileDb Database containing meta-files + # @param BuildConfig Configuration of build + # @param ToolDefinition Tool chain definitions + # @param FlashDefinitionFile File of flash definition + # @param Fds FD list to be generated + # @param Fvs FV list to be generated + # @param SkuId SKU id from command line + # + def _Init(self, WorkspaceDir, ActivePlatform, Target, Toolchain, ArchList, MetaFileDb, + BuildConfig, ToolDefinition, FlashDefinitionFile='', Fds=[], Fvs=[], SkuId=''): + self.MetaFile = ActivePlatform.MetaFile + self.WorkspaceDir = WorkspaceDir + self.Platform = ActivePlatform + self.BuildTarget = Target + self.ToolChain = Toolchain + self.ArchList = ArchList + self.SkuId = SkuId + + self.BuildDatabase = MetaFileDb + self.TargetTxt = BuildConfig + self.ToolDef = ToolDefinition + self.FdfFile = FlashDefinitionFile + self.FdTargetList = Fds + self.FvTargetList = Fvs + self.AutoGenObjectList = [] + + # there's many relative directory operations, so ... + os.chdir(self.WorkspaceDir) + + # parse FDF file to get PCDs in it, if any + if self.FdfFile != None and self.FdfFile != '': + Fdf = FdfParser(self.FdfFile.Path) + Fdf.ParseFile() + PcdSet = Fdf.Profile.PcdDict + ModuleList = Fdf.Profile.InfList + else: + PcdSet = {} + ModuleList = [] + + # apply SKU and inject PCDs from Flash Definition file + for Arch in self.ArchList: + Platform = self.BuildDatabase[self.MetaFile, Arch] + Platform.SkuName = self.SkuId + for Name, Guid in PcdSet: + Platform.AddPcd(Name, Guid, PcdSet[Name, Guid]) + + Pa = PlatformAutoGen(self, self.MetaFile, Target, Toolchain, Arch) + # + # Explicitly collect platform's dynamic PCDs + # + Pa.CollectPlatformDynamicPcds() + self.AutoGenObjectList.append(Pa) + + self._BuildDir = None + self._FvDir = None + self._MakeFileDir = None + self._BuildCommand = None + + return True + + def __repr__(self): + return "%s [%s]" % (self.MetaFile, ", ".join(self.ArchList)) + + ## Return the directory to store FV files + def _GetFvDir(self): + if self._FvDir == None: + self._FvDir = path.join(self.BuildDir, 'FV') + return self._FvDir + + ## Return the directory to store all intermediate and final files built + def _GetBuildDir(self): + return self.AutoGenObjectList[0].BuildDir + + ## Return the build output directory platform specifies + def _GetOutputDir(self): + return self.Platform.OutputDirectory + + ## Return platform name + def _GetName(self): + return self.Platform.PlatformName + + ## Return meta-file GUID + def _GetGuid(self): + return self.Platform.Guid + + ## Return platform version + def _GetVersion(self): + return self.Platform.Version + + ## Return paths of tools + def _GetToolDefinition(self): + return self.AutoGenObjectList[0].ToolDefinition + + ## Return directory of platform makefile + # + # @retval string Makefile directory + # + def _GetMakeFileDir(self): + if self._MakeFileDir == None: + self._MakeFileDir = self.BuildDir + return self._MakeFileDir + + ## Return build command string + # + # @retval string Build command string + # + def _GetBuildCommand(self): + if self._BuildCommand == None: + # BuildCommand should be all the same. So just get one from platform AutoGen + self._BuildCommand = self.AutoGenObjectList[0].BuildCommand + return self._BuildCommand + + ## Create makefile for the platform and mdoules in it + # + # @param CreateDepsMakeFile Flag indicating if the makefile for + # modules will be created as well + # + def CreateMakeFile(self, CreateDepsMakeFile=False): + # create makefile for platform + Makefile = GenMake.TopLevelMakefile(self) + if Makefile.Generate(): + EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for platform [%s] %s\n" % + (self.MetaFile, self.ArchList)) + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for platform [%s] %s\n" % + (self.MetaFile, self.ArchList)) + + if CreateDepsMakeFile: + for Pa in self.AutoGenObjectList: + Pa.CreateMakeFile(CreateDepsMakeFile) + + ## Create autogen code for platform and modules + # + # Since there's no autogen code for platform, this method will do nothing + # if CreateModuleCodeFile is set to False. + # + # @param CreateDepsCodeFile Flag indicating if creating module's + # autogen code file or not + # + def CreateCodeFile(self, CreateDepsCodeFile=False): + if not CreateDepsCodeFile: + return + for Pa in self.AutoGenObjectList: + Pa.CreateCodeFile(CreateDepsCodeFile) + + Name = property(_GetName) + Guid = property(_GetGuid) + Version = property(_GetVersion) + OutputDir = property(_GetOutputDir) + + ToolDefinition = property(_GetToolDefinition) # toolcode : tool path + + BuildDir = property(_GetBuildDir) + FvDir = property(_GetFvDir) + MakeFileDir = property(_GetMakeFileDir) + BuildCommand = property(_GetBuildCommand) + +## AutoGen class for platform +# +# PlatformAutoGen class will process the original information in platform +# file in order to generate makefile for platform. +# +class PlatformAutoGen(AutoGen): + # + # Used to store all PCDs for both PEI and DXE phase, in order to generate + # correct PCD database + # + _DynaPcdList_ = [] + _NonDynaPcdList_ = [] + + ## The real constructor of PlatformAutoGen + # + # This method is not supposed to be called by users of PlatformAutoGen. It's + # only used by factory method __new__() to do real initialization work for an + # object of PlatformAutoGen + # + # @param Workspace WorkspaceAutoGen object + # @param PlatformFile Platform file (DSC file) + # @param Target Build target (DEBUG, RELEASE) + # @param Toolchain Name of tool chain + # @param Arch arch of the platform supports + # + def _Init(self, Workspace, PlatformFile, Target, Toolchain, Arch): + EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch)) + GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target) + + self.MetaFile = PlatformFile + self.Workspace = Workspace + self.WorkspaceDir = Workspace.WorkspaceDir + self.ToolChain = Toolchain + self.BuildTarget = Target + self.Arch = Arch + self.SourceDir = PlatformFile.SubDir + self.SourceOverrideDir = None + self.FdTargetList = self.Workspace.FdTargetList + self.FvTargetList = self.Workspace.FvTargetList + + # flag indicating if the makefile/C-code file has been created or not + self.IsMakeFileCreated = False + self.IsCodeFileCreated = False + + self._Platform = None + self._Name = None + self._Guid = None + self._Version = None + + self._BuildRule = None + self._SourceDir = None + self._BuildDir = None + self._OutputDir = None + self._FvDir = None + self._MakeFileDir = None + self._FdfFile = None + + self._PcdTokenNumber = None # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber + self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...] + self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...] + + self._ToolDefinitions = None + self._ToolDefFile = None # toolcode : tool path + self._ToolChainFamily = None + self._BuildRuleFamily = None + self._BuildOption = None # toolcode : option + self._PackageList = None + self._ModuleAutoGenList = None + self._LibraryAutoGenList = None + self._BuildCommand = None + + # get the original module/package/platform objects + self.BuildDatabase = Workspace.BuildDatabase + return True + + def __repr__(self): + return "%s [%s]" % (self.MetaFile, self.Arch) + + ## Create autogen code for platform and modules + # + # Since there's no autogen code for platform, this method will do nothing + # if CreateModuleCodeFile is set to False. + # + # @param CreateModuleCodeFile Flag indicating if creating module's + # autogen code file or not + # + def CreateCodeFile(self, CreateModuleCodeFile=False): + # only module has code to be greated, so do nothing if CreateModuleCodeFile is False + if self.IsCodeFileCreated or not CreateModuleCodeFile: + return + + for Ma in self.ModuleAutoGenList: + Ma.CreateCodeFile(True) + + # don't do this twice + self.IsCodeFileCreated = True + + ## Create makefile for the platform and mdoules in it + # + # @param CreateModuleMakeFile Flag indicating if the makefile for + # modules will be created as well + # + def CreateMakeFile(self, CreateModuleMakeFile=False): + if CreateModuleMakeFile: + for ModuleFile in self.Platform.Modules: + Ma = ModuleAutoGen(self.Workspace, ModuleFile, self.BuildTarget, + self.ToolChain, self.Arch, self.MetaFile) + Ma.CreateMakeFile(True) + + # no need to create makefile for the platform more than once + if self.IsMakeFileCreated: + return + + # create makefile for platform + Makefile = GenMake.PlatformMakefile(self) + if Makefile.Generate(): + EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for platform [%s] [%s]\n" % + (self.MetaFile, self.Arch)) + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for platform [%s] [%s]\n" % + (self.MetaFile, self.Arch)) + self.IsMakeFileCreated = True + + ## Collect dynamic PCDs + # + # Gather dynamic PCDs list from each module and their settings from platform + # This interface should be invoked explicitly when platform action is created. + # + def CollectPlatformDynamicPcds(self): + # for gathering error information + NoDatumTypePcdList = set() + + self._GuidValue = {} + for F in self.Platform.Modules.keys(): + M = ModuleAutoGen(self.Workspace, F, self.BuildTarget, self.ToolChain, self.Arch, self.MetaFile) + #GuidValue.update(M.Guids) + for PcdFromModule in M.ModulePcdList+M.LibraryPcdList: + # make sure that the "VOID*" kind of datum has MaxDatumSize set + if PcdFromModule.DatumType == "VOID*" and PcdFromModule.MaxDatumSize == None: + NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, F)) + + if PcdFromModule.Type in GenC.gDynamicPcd or PcdFromModule.Type in GenC.gDynamicExPcd: + # + # If a dynamic PCD used by a PEM module/PEI module & DXE module, + # it should be stored in Pcd PEI database, If a dynamic only + # used by DXE module, it should be stored in DXE PCD database. + # The default Phase is DXE + # + if M.ModuleType in ["PEIM", "PEI_CORE"]: + PcdFromModule.Phase = "PEI" + if PcdFromModule not in self._DynaPcdList_: + self._DynaPcdList_.append(PcdFromModule) + elif PcdFromModule.Phase == 'PEI': + # overwrite any the same PCD existing, if Phase is PEI + Index = self._DynaPcdList_.index(PcdFromModule) + self._DynaPcdList_[Index] = PcdFromModule + elif PcdFromModule not in self._NonDynaPcdList_: + self._NonDynaPcdList_.append(PcdFromModule) + + # print out error information and break the build, if error found + if len(NoDatumTypePcdList) > 0: + NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList) + EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error", + File=self.MetaFile, + ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n" + % NoDatumTypePcdListString) + self._NonDynamicPcdList = self._NonDynaPcdList_ + self._DynamicPcdList = self._DynaPcdList_ + + # + # Sort dynamic PCD list to: + # 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should + # try to be put header of dynamicd List + # 2) If PCD is HII type, the PCD item should be put after unicode type PCD + # + # The reason of sorting is make sure the unicode string is in double-byte alignment in string table. + # + UnicodePcdArray = [] + HiiPcdArray = [] + OtherPcdArray = [] + for Pcd in self._DynamicPcdList: + # just pick the a value to determine whether is unicode string type + Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]] + PcdValue = Sku.DefaultValue + if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"): + # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex + UnicodePcdArray.append(Pcd) + elif len(Sku.VariableName) > 0: + # if found HII type PCD then insert to right of UnicodeIndex + HiiPcdArray.append(Pcd) + else: + OtherPcdArray.append(Pcd) + del self._DynamicPcdList[:] + self._DynamicPcdList.extend(UnicodePcdArray) + self._DynamicPcdList.extend(HiiPcdArray) + self._DynamicPcdList.extend(OtherPcdArray) + + + ## Return the platform build data object + def _GetPlatform(self): + if self._Platform == None: + self._Platform = self.BuildDatabase[self.MetaFile, self.Arch] + return self._Platform + + ## Return platform name + def _GetName(self): + return self.Platform.PlatformName + + ## Return the meta file GUID + def _GetGuid(self): + return self.Platform.Guid + + ## Return the platform version + def _GetVersion(self): + return self.Platform.Version + + ## Return the FDF file name + def _GetFdfFile(self): + if self._FdfFile == None: + if self.Workspace.FdfFile != "": + self._FdfFile= path.join(self.WorkspaceDir, self.Workspace.FdfFile) + else: + self._FdfFile = '' + return self._FdfFile + + ## Return the build output directory platform specifies + def _GetOutputDir(self): + return self.Platform.OutputDirectory + + ## Return the directory to store all intermediate and final files built + def _GetBuildDir(self): + if self._BuildDir == None: + if os.path.isabs(self.OutputDir): + self._BuildDir = path.join( + path.abspath(self.OutputDir), + self.BuildTarget + "_" + self.ToolChain, + ) + else: + self._BuildDir = path.join( + self.WorkspaceDir, + self.OutputDir, + self.BuildTarget + "_" + self.ToolChain, + ) + return self._BuildDir + + ## Return directory of platform makefile + # + # @retval string Makefile directory + # + def _GetMakeFileDir(self): + if self._MakeFileDir == None: + self._MakeFileDir = path.join(self.BuildDir, self.Arch) + return self._MakeFileDir + + ## Return build command string + # + # @retval string Build command string + # + def _GetBuildCommand(self): + if self._BuildCommand == None: + self._BuildCommand = [] + if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]: + self._BuildCommand += SplitOption(self.ToolDefinition["MAKE"]["PATH"]) + if "FLAGS" in self.ToolDefinition["MAKE"]: + NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip() + if NewOption != '': + self._BuildCommand += SplitOption(NewOption) + return self._BuildCommand + + ## Get tool chain definition + # + # Get each tool defition for given tool chain from tools_def.txt and platform + # + def _GetToolDefinition(self): + if self._ToolDefinitions == None: + ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary + if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase: + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration", + ExtraData="[%s]" % self.MetaFile) + self._ToolDefinitions = {} + DllPathList = set() + for Def in ToolDefinition: + Target, Tag, Arch, Tool, Attr = Def.split("_") + if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch: + continue + + Value = ToolDefinition[Def] + # don't record the DLL + if Attr == "DLL": + DllPathList.add(Value) + continue + + if Tool not in self._ToolDefinitions: + self._ToolDefinitions[Tool] = {} + self._ToolDefinitions[Tool][Attr] = Value + + ToolsDef = '' + MakePath = '' + if GlobalData.gOptions.SilentMode and "MAKE" in self._ToolDefinitions: + if "FLAGS" not in self._ToolDefinitions["MAKE"]: + self._ToolDefinitions["MAKE"]["FLAGS"] = "" + self._ToolDefinitions["MAKE"]["FLAGS"] += " -s" + MakeFlags = '' + for Tool in self._ToolDefinitions: + for Attr in self._ToolDefinitions[Tool]: + Value = self._ToolDefinitions[Tool][Attr] + if Tool in self.BuildOption and Attr in self.BuildOption[Tool]: + # check if override is indicated + if self.BuildOption[Tool][Attr].startswith('='): + Value = self.BuildOption[Tool][Attr][1:] + else: + Value += " " + self.BuildOption[Tool][Attr] + + if Attr == "PATH": + # Don't put MAKE definition in the file + if Tool == "MAKE": + MakePath = Value + else: + ToolsDef += "%s = %s\n" % (Tool, Value) + elif Attr != "DLL": + # Don't put MAKE definition in the file + if Tool == "MAKE": + if Attr == "FLAGS": + MakeFlags = Value + else: + ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value) + ToolsDef += "\n" + + SaveFileOnChange(self.ToolDefinitionFile, ToolsDef) + for DllPath in DllPathList: + os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"] + os.environ["MAKE_FLAGS"] = MakeFlags + + return self._ToolDefinitions + + ## Return the paths of tools + def _GetToolDefFile(self): + if self._ToolDefFile == None: + self._ToolDefFile = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch) + return self._ToolDefFile + + ## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'. + def _GetToolChainFamily(self): + if self._ToolChainFamily == None: + ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase + if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \ + or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \ + or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]: + EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \ + % self.ToolChain) + self._ToolChainFamily = "MSFT" + else: + self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain] + return self._ToolChainFamily + + def _GetBuildRuleFamily(self): + if self._BuildRuleFamily == None: + ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase + if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \ + or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \ + or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]: + EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \ + % self.ToolChain) + self._BuildRuleFamily = "MSFT" + else: + self._BuildRuleFamily = ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain] + return self._BuildRuleFamily + + ## Return the build options specific to this platform + def _GetBuildOptions(self): + if self._BuildOption == None: + self._BuildOption = self._ExpandBuildOption(self.Platform.BuildOptions) + return self._BuildOption + + ## Parse build_rule.txt in $(WORKSPACE)/Conf/build_rule.txt + # + # @retval BuildRule object + # + def _GetBuildRule(self): + if self._BuildRule == None: + BuildRuleFile = None + if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary: + BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF] + if BuildRuleFile in [None, '']: + BuildRuleFile = gBuildRuleFile + self._BuildRule = BuildRule(BuildRuleFile) + return self._BuildRule + + ## Summarize the packages used by modules in this platform + def _GetPackageList(self): + if self._PackageList == None: + self._PackageList = set() + for La in self.LibraryAutoGenList: + self._PackageList.update(La.DependentPackageList) + for Ma in self.ModuleAutoGenList: + self._PackageList.update(Ma.DependentPackageList) + self._PackageList = list(self._PackageList) + return self._PackageList + + ## Get list of non-dynamic PCDs + def _GetNonDynamicPcdList(self): + return self._NonDynamicPcdList + + ## Get list of dynamic PCDs + def _GetDynamicPcdList(self): + return self._DynamicPcdList + + ## Generate Token Number for all PCD + def _GetPcdTokenNumbers(self): + if self._PcdTokenNumber == None: + self._PcdTokenNumber = sdict() + TokenNumber = 1 + for Pcd in self.DynamicPcdList: + if Pcd.Phase == "PEI": + EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) + self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber + TokenNumber += 1 + + for Pcd in self.DynamicPcdList: + if Pcd.Phase == "DXE": + EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber)) + self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber + TokenNumber += 1 + + for Pcd in self.NonDynamicPcdList: + self._PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber + TokenNumber += 1 + return self._PcdTokenNumber + + ## Summarize ModuleAutoGen objects of all modules/libraries to be built for this platform + def _GetAutoGenObjectList(self): + self._ModuleAutoGenList = [] + self._LibraryAutoGenList = [] + for ModuleFile in self.Platform.Modules: + Ma = ModuleAutoGen( + self.Workspace, + ModuleFile, + self.BuildTarget, + self.ToolChain, + self.Arch, + self.MetaFile + ) + if Ma not in self._ModuleAutoGenList: + self._ModuleAutoGenList.append(Ma) + for La in Ma.LibraryAutoGenList: + if La not in self._LibraryAutoGenList: + self._LibraryAutoGenList.append(La) + + ## Summarize ModuleAutoGen objects of all modules to be built for this platform + def _GetModuleAutoGenList(self): + if self._ModuleAutoGenList == None: + self._GetAutoGenObjectList() + return self._ModuleAutoGenList + + ## Summarize ModuleAutoGen objects of all libraries to be built for this platform + def _GetLibraryAutoGenList(self): + if self._LibraryAutoGenList == None: + self._GetAutoGenObjectList() + return self._LibraryAutoGenList + + ## Test if a module is supported by the platform + # + # An error will be raised directly if the module or its arch is not supported + # by the platform or current configuration + # + def ValidModule(self, Module): + return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances + + ## Resolve the library classes in a module to library instances + # + # This method will not only resolve library classes but also sort the library + # instances according to the dependency-ship. + # + # @param Module The module from which the library classes will be resolved + # + # @retval library_list List of library instances sorted + # + def ApplyLibraryInstance(self, Module): + ModuleType = Module.ModuleType + + # for overridding library instances with module specific setting + PlatformModule = self.Platform.Modules[str(Module)] + + # add forced library instance + for LibraryClass in PlatformModule.LibraryClasses: + if LibraryClass.startswith("NULL"): + Module.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass] + + # R9 module + LibraryConsumerList = [Module] + Constructor = [] + ConsumedByList = sdict() + LibraryInstance = sdict() + + EdkLogger.verbose("") + EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch)) + while len(LibraryConsumerList) > 0: + M = LibraryConsumerList.pop() + for LibraryClassName in M.LibraryClasses: + if LibraryClassName not in LibraryInstance: + # override library instance for this module + if LibraryClassName in PlatformModule.LibraryClasses: + LibraryPath = PlatformModule.LibraryClasses[LibraryClassName] + else: + LibraryPath = self.Platform.LibraryClasses[LibraryClassName, ModuleType] + if LibraryPath == None or LibraryPath == "": + LibraryPath = M.LibraryClasses[LibraryClassName] + if LibraryPath == None or LibraryPath == "": + EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, + "Instance of library class [%s] is not found" % LibraryClassName, + File=self.MetaFile, + ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), self.Arch, str(Module))) + + LibraryModule = self.BuildDatabase[LibraryPath, self.Arch] + # for those forced library instance (NULL library), add a fake library class + if LibraryClassName.startswith("NULL"): + LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType])) + elif LibraryModule.LibraryClass == None \ + or len(LibraryModule.LibraryClass) == 0 \ + or (ModuleType != 'USER_DEFINED' + and ModuleType not in LibraryModule.LibraryClass[0].SupModList): + # only USER_DEFINED can link against any library instance despite of its SupModList + EdkLogger.error("build", OPTION_MISSING, + "Module type [%s] is not supported by library instance [%s]" \ + % (ModuleType, LibraryPath), File=self.MetaFile, + ExtraData="consumed by [%s]" % str(Module)) + + LibraryInstance[LibraryClassName] = LibraryModule + LibraryConsumerList.append(LibraryModule) + EdkLogger.verbose("\t" + str(LibraryClassName) + " : " + str(LibraryModule)) + else: + LibraryModule = LibraryInstance[LibraryClassName] + + if LibraryModule == None: + continue + + if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor: + Constructor.append(LibraryModule) + + if LibraryModule not in ConsumedByList: + ConsumedByList[LibraryModule] = [] + # don't add current module itself to consumer list + if M != Module: + if M in ConsumedByList[LibraryModule]: + continue + ConsumedByList[LibraryModule].append(M) + # + # Initialize the sorted output list to the empty set + # + SortedLibraryList = [] + # + # Q <- Set of all nodes with no incoming edges + # + LibraryList = [] #LibraryInstance.values() + Q = [] + for LibraryClassName in LibraryInstance: + M = LibraryInstance[LibraryClassName] + LibraryList.append(M) + if ConsumedByList[M] == []: + Q.insert(0, M) + + # + # start the DAG algorithm + # + while True: + EdgeRemoved = True + while Q == [] and EdgeRemoved: + EdgeRemoved = False + # for each node Item with a Constructor + for Item in LibraryList: + if Item not in Constructor: + continue + # for each Node without a constructor with an edge e from Item to Node + for Node in ConsumedByList[Item]: + if Node in Constructor: + continue + # remove edge e from the graph if Node has no constructor + ConsumedByList[Item].remove(Node) + EdgeRemoved = True + if ConsumedByList[Item] == []: + # insert Item into Q + Q.insert(0, Item) + break + if Q != []: + break + # DAG is done if there's no more incoming edge for all nodes + if Q == []: + break + + # remove node from Q + Node = Q.pop() + # output Node + SortedLibraryList.append(Node) + + # for each node Item with an edge e from Node to Item do + for Item in LibraryList: + if Node not in ConsumedByList[Item]: + continue + # remove edge e from the graph + ConsumedByList[Item].remove(Node) + + if ConsumedByList[Item] != []: + continue + # insert Item into Q, if Item has no other incoming edges + Q.insert(0, Item) + + # + # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle + # + for Item in LibraryList: + if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1: + ErrorMessage = "\tconsumed by " + "\n\tconsumed by ".join([str(L) for L in ConsumedByList[Item]]) + EdkLogger.error("build", BUILD_ERROR, 'Library [%s] with constructors has a cycle' % str(Item), + ExtraData=ErrorMessage, File=self.MetaFile) + if Item not in SortedLibraryList: + SortedLibraryList.append(Item) + + # + # Build the list of constructor and destructir names + # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order + # + SortedLibraryList.reverse() + return SortedLibraryList + + + ## Override PCD setting (type, value, ...) + # + # @param ToPcd The PCD to be overrided + # @param FromPcd The PCD overrideing from + # + def _OverridePcd(self, ToPcd, FromPcd, Module=""): + # + # in case there's PCDs coming from FDF file, which have no type given. + # at this point, ToPcd.Type has the type found from dependent + # package + # + if FromPcd != None: + if ToPcd.Pending and FromPcd.Type not in [None, '']: + ToPcd.Type = FromPcd.Type + elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \ + and ToPcd.Type != FromPcd.Type: + EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type", + ExtraData="%s.%s is defined as [%s] in module %s, but as [%s] in platform."\ + % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, + ToPcd.Type, Module, FromPcd.Type), + File=self.MetaFile) + + if FromPcd.MaxDatumSize not in [None, '']: + ToPcd.MaxDatumSize = FromPcd.MaxDatumSize + if FromPcd.DefaultValue not in [None, '']: + ToPcd.DefaultValue = FromPcd.DefaultValue + if FromPcd.TokenValue not in [None, '']: + ToPcd.TokenValue = FromPcd.TokenValue + if FromPcd.MaxDatumSize not in [None, '']: + ToPcd.MaxDatumSize = FromPcd.MaxDatumSize + if FromPcd.DatumType not in [None, '']: + ToPcd.DatumType = FromPcd.DatumType + if FromPcd.SkuInfoList not in [None, '', []]: + ToPcd.SkuInfoList = FromPcd.SkuInfoList + + # check the validation of datum + IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue) + if not IsValid: + EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile, + ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName)) + + if ToPcd.DatumType == "VOID*" and ToPcd.MaxDatumSize in ['', None]: + EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \ + % (ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName)) + Value = ToPcd.DefaultValue + if Value in [None, '']: + ToPcd.MaxDatumSize = 1 + elif Value[0] == 'L': + ToPcd.MaxDatumSize = str(len(Value) * 2) + elif Value[0] == '{': + ToPcd.MaxDatumSize = str(len(Value.split(','))) + else: + ToPcd.MaxDatumSize = str(len(Value)) + + # apply default SKU for dynamic PCDS if specified one is not available + if (ToPcd.Type in PCD_DYNAMIC_TYPE_LIST or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_LIST) \ + and ToPcd.SkuInfoList in [None, {}, '']: + if self.Platform.SkuName in self.Platform.SkuIds: + SkuName = self.Platform.SkuName + else: + SkuName = 'DEFAULT' + ToPcd.SkuInfoList = { + SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName], '', '', '', '', '', ToPcd.DefaultValue) + } + + ## Apply PCD setting defined platform to a module + # + # @param Module The module from which the PCD setting will be overrided + # + # @retval PCD_list The list PCDs with settings from platform + # + def ApplyPcdSetting(self, Module, Pcds): + # for each PCD in module + for Name,Guid in Pcds: + PcdInModule = Pcds[Name,Guid] + # find out the PCD setting in platform + if (Name,Guid) in self.Platform.Pcds: + PcdInPlatform = self.Platform.Pcds[Name,Guid] + else: + PcdInPlatform = None + # then override the settings if any + self._OverridePcd(PcdInModule, PcdInPlatform, Module) + # resolve the VariableGuid value + for SkuId in PcdInModule.SkuInfoList: + Sku = PcdInModule.SkuInfoList[SkuId] + if Sku.VariableGuid == '': continue + Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList) + if Sku.VariableGuidValue == None: + PackageList = "\n\t".join([str(P) for P in self.PackageList]) + EdkLogger.error( + 'build', + RESOURCE_NOT_AVAILABLE, + "Value of GUID [%s] is not found in" % Sku.VariableGuid, + ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \ + % (Guid, Name, str(Module)), + File=self.MetaFile + ) + + # override PCD settings with module specific setting + if Module in self.Platform.Modules: + PlatformModule = self.Platform.Modules[str(Module)] + for Key in PlatformModule.Pcds: + if Key in Pcds: + self._OverridePcd(Pcds[Key], PlatformModule.Pcds[Key], Module) + return Pcds.values() + + ## Resolve library names to library modules + # + # (for R8.x modules) + # + # @param Module The module from which the library names will be resolved + # + # @retval library_list The list of library modules + # + def ResolveLibraryReference(self, Module): + EdkLogger.verbose("") + EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), self.Arch)) + LibraryConsumerList = [Module] + + # "CompilerStub" is a must for R8 modules + if Module.Libraries: + Module.Libraries.append("CompilerStub") + LibraryList = [] + while len(LibraryConsumerList) > 0: + M = LibraryConsumerList.pop() + for LibraryName in M.Libraries: + Library = self.Platform.LibraryClasses[LibraryName, ':dummy:'] + if Library == None: + for Key in self.Platform.LibraryClasses.data.keys(): + if LibraryName.upper() == Key.upper(): + Library = self.Platform.LibraryClasses[Key, ':dummy:'] + break + if Library == None: + EdkLogger.warn("build", "Library [%s] is not found" % LibraryName, File=str(M), + ExtraData="\t%s [%s]" % (str(Module), self.Arch)) + continue + + if Library not in LibraryList: + LibraryList.append(Library) + LibraryConsumerList.append(Library) + EdkLogger.verbose("\t" + LibraryName + " : " + str(Library) + ' ' + str(type(Library))) + return LibraryList + + ## Expand * in build option key + # + # @param Options Options to be expanded + # + # @retval options Options expanded + # + def _ExpandBuildOption(self, Options): + BuildOptions = {} + for Key in Options: + Family = Key[0] + Target, Tag, Arch, Tool, Attr = Key[1].split("_") + # if tool chain family doesn't match, skip it + if Family and Tool in self.ToolDefinition and Family != self.ToolDefinition[Tool]["FAMILY"]: + continue + # expand any wildcard + if Target == "*" or Target == self.BuildTarget: + if Tag == "*" or Tag == self.ToolChain: + if Arch == "*" or Arch == self.Arch: + if Tool not in BuildOptions: + BuildOptions[Tool] = {} + if Attr != "FLAGS" or Attr not in BuildOptions[Tool]: + BuildOptions[Tool][Attr] = Options[Key] + else: + # append options for the same tool + BuildOptions[Tool][Attr] += " " + Options[Key] + return BuildOptions + + ## Append build options in platform to a module + # + # @param Module The module to which the build options will be appened + # + # @retval options The options appended with build options in platform + # + def ApplyBuildOption(self, Module): + PlatformOptions = self.BuildOption + ModuleOptions = self._ExpandBuildOption(Module.BuildOptions) + if Module in self.Platform.Modules: + PlatformModule = self.Platform.Modules[str(Module)] + PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions) + else: + PlatformModuleOptions = {} + + AllTools = set(ModuleOptions.keys() + PlatformOptions.keys() + PlatformModuleOptions.keys() + self.ToolDefinition.keys()) + BuildOptions = {} + for Tool in AllTools: + if Tool not in BuildOptions: + BuildOptions[Tool] = {} + + for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, PlatformModuleOptions]: + if Tool not in Options: + continue + for Attr in Options[Tool]: + Value = Options[Tool][Attr] + if Attr not in BuildOptions[Tool]: + BuildOptions[Tool][Attr] = "" + # check if override is indicated + if Value.startswith('='): + BuildOptions[Tool][Attr] = Value[1:] + else: + BuildOptions[Tool][Attr] += " " + Value + return BuildOptions + + Platform = property(_GetPlatform) + Name = property(_GetName) + Guid = property(_GetGuid) + Version = property(_GetVersion) + + OutputDir = property(_GetOutputDir) + BuildDir = property(_GetBuildDir) + MakeFileDir = property(_GetMakeFileDir) + FdfFile = property(_GetFdfFile) + + PcdTokenNumber = property(_GetPcdTokenNumbers) # (TokenCName, TokenSpaceGuidCName) : GeneratedTokenNumber + DynamicPcdList = property(_GetDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...] + NonDynamicPcdList = property(_GetNonDynamicPcdList) # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...] + PackageList = property(_GetPackageList) + + ToolDefinition = property(_GetToolDefinition) # toolcode : tool path + ToolDefinitionFile = property(_GetToolDefFile) # toolcode : lib path + ToolChainFamily = property(_GetToolChainFamily) + BuildRuleFamily = property(_GetBuildRuleFamily) + BuildOption = property(_GetBuildOptions) # toolcode : option + + BuildCommand = property(_GetBuildCommand) + BuildRule = property(_GetBuildRule) + ModuleAutoGenList = property(_GetModuleAutoGenList) + LibraryAutoGenList = property(_GetLibraryAutoGenList) + +## ModuleAutoGen class +# +# This class encapsules the AutoGen behaviors for the build tools. In addition to +# the generation of AutoGen.h and AutoGen.c, it will generate *.depex file according +# to the [depex] section in module's inf file. +# +class ModuleAutoGen(AutoGen): + ## The real constructor of ModuleAutoGen + # + # This method is not supposed to be called by users of ModuleAutoGen. It's + # only used by factory method __new__() to do real initialization work for an + # object of ModuleAutoGen + # + # @param Workspace EdkIIWorkspaceBuild object + # @param ModuleFile The path of module file + # @param Target Build target (DEBUG, RELEASE) + # @param Toolchain Name of tool chain + # @param Arch The arch the module supports + # @param PlatformFile Platform meta-file + # + def _Init(self, Workspace, ModuleFile, Target, Toolchain, Arch, PlatformFile): + EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen module [%s] [%s]" % (ModuleFile, Arch)) + GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (ModuleFile, Arch, Toolchain, Target) + + self.Workspace = Workspace + self.WorkspaceDir = Workspace.WorkspaceDir + + self.MetaFile = ModuleFile + self.PlatformInfo = PlatformAutoGen(Workspace, PlatformFile, Target, Toolchain, Arch) + # check if this module is employed by active platform + if not self.PlatformInfo.ValidModule(self.MetaFile): + EdkLogger.verbose("Module [%s] for [%s] is not employed by active platform\n" \ + % (self.MetaFile, Arch)) + return False + + self.SourceDir = self.MetaFile.SubDir + self.SourceOverrideDir = None + # use overrided path defined in DSC file + if self.MetaFile.Key in GlobalData.gOverrideDir: + self.SourceOverrideDir = GlobalData.gOverrideDir[self.MetaFile.Key] + + self.ToolChain = Toolchain + self.BuildTarget = Target + self.Arch = Arch + self.ToolChainFamily = self.PlatformInfo.ToolChainFamily + self.BuildRuleFamily = self.PlatformInfo.BuildRuleFamily + + self.IsMakeFileCreated = False + self.IsCodeFileCreated = False + + self.BuildDatabase = self.Workspace.BuildDatabase + + self._Module = None + self._Name = None + self._Guid = None + self._Version = None + self._ModuleType = None + self._ComponentType = None + self._PcdIsDriver = None + self._AutoGenVersion = None + self._LibraryFlag = None + self._CustomMakefile = None + self._Macro = None + + self._BuildDir = None + self._OutputDir = None + self._DebugDir = None + self._MakeFileDir = None + + self._IncludePathList = None + self._AutoGenFileList = None + self._UnicodeFileList = None + self._SourceFileList = None + self._ObjectFileList = None + self._BinaryFileList = None + + self._DependentPackageList = None + self._DependentLibraryList = None + self._LibraryAutoGenList = None + self._DerivedPackageList = None + self._ModulePcdList = None + self._LibraryPcdList = None + self._GuidList = None + self._ProtocolList = None + self._PpiList = None + self._DepexList = None + self._BuildOption = None + self._BuildTargets = None + self._IntroBuildTargetList = None + self._FinalBuildTargetList = None + self._FileTypes = None + self._BuildRules = None + + return True + + def __repr__(self): + return "%s [%s]" % (self.MetaFile, self.Arch) + + # Macros could be used in build_rule.txt (also Makefile) + def _GetMacros(self): + if self._Macro == None: + self._Macro = sdict() + self._Macro["WORKSPACE" ] = self.WorkspaceDir + self._Macro["MODULE_NAME" ] = self.Name + self._Macro["MODULE_GUID" ] = self.Guid + self._Macro["MODULE_VERSION" ] = self.Version + self._Macro["MODULE_TYPE" ] = self.ModuleType + self._Macro["MODULE_FILE" ] = str(self.MetaFile) + self._Macro["MODULE_FILE_BASE_NAME" ] = self.MetaFile.BaseName + self._Macro["MODULE_RELATIVE_DIR" ] = self.SourceDir + self._Macro["MODULE_DIR" ] = self.SourceDir + + self._Macro["BASE_NAME" ] = self.Name + + self._Macro["ARCH" ] = self.Arch + self._Macro["TOOLCHAIN" ] = self.ToolChain + self._Macro["TOOLCHAIN_TAG" ] = self.ToolChain + self._Macro["TARGET" ] = self.BuildTarget + + self._Macro["BUILD_DIR" ] = self.PlatformInfo.BuildDir + self._Macro["BIN_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch) + self._Macro["LIB_DIR" ] = os.path.join(self.PlatformInfo.BuildDir, self.Arch) + self._Macro["MODULE_BUILD_DIR" ] = self.BuildDir + self._Macro["OUTPUT_DIR" ] = self.OutputDir + self._Macro["DEBUG_DIR" ] = self.DebugDir + return self._Macro + + ## Return the module build data object + def _GetModule(self): + if self._Module == None: + self._Module = self.Workspace.BuildDatabase[self.MetaFile, self.Arch] + return self._Module + + ## Return the module name + def _GetBaseName(self): + return self.Module.BaseName + + ## Return the module SourceOverridePath + def _GetSourceOverridePath(self): + return self.Module.SourceOverridePath + + ## Return the module meta-file GUID + def _GetGuid(self): + return self.Module.Guid + + ## Return the module version + def _GetVersion(self): + return self.Module.Version + + ## Return the module type + def _GetModuleType(self): + return self.Module.ModuleType + + ## Return the component type (for R8.x style of module) + def _GetComponentType(self): + return self.Module.ComponentType + + ## Return the build type + def _GetBuildType(self): + return self.Module.BuildType + + ## Return the PCD_IS_DRIVER setting + def _GetPcdIsDriver(self): + return self.Module.PcdIsDriver + + ## Return the autogen version, i.e. module meta-file version + def _GetAutoGenVersion(self): + return self.Module.AutoGenVersion + + ## Check if the module is library or not + def _IsLibrary(self): + if self._LibraryFlag == None: + if self.Module.LibraryClass != None and self.Module.LibraryClass != []: + self._LibraryFlag = True + else: + self._LibraryFlag = False + return self._LibraryFlag + + ## Return the directory to store intermediate files of the module + def _GetBuildDir(self): + if self._BuildDir == None: + self._BuildDir = path.join( + self.PlatformInfo.BuildDir, + self.Arch, + self.SourceDir, + self.MetaFile.BaseName + ) + CreateDirectory(self._BuildDir) + return self._BuildDir + + ## Return the directory to store the intermediate object files of the mdoule + def _GetOutputDir(self): + if self._OutputDir == None: + self._OutputDir = path.join(self.BuildDir, "OUTPUT") + CreateDirectory(self._OutputDir) + return self._OutputDir + + ## Return the directory to store auto-gened source files of the mdoule + def _GetDebugDir(self): + if self._DebugDir == None: + self._DebugDir = path.join(self.BuildDir, "DEBUG") + CreateDirectory(self._DebugDir) + return self._DebugDir + + ## Return the path of custom file + def _GetCustomMakefile(self): + if self._CustomMakefile == None: + self._CustomMakefile = {} + for Type in self.Module.CustomMakefile: + if Type in gMakeTypeMap: + MakeType = gMakeTypeMap[Type] + else: + MakeType = 'nmake' + if self.SourceOverrideDir != None: + File = os.path.join(self.SourceOverrideDir, self.Module.CustomMakefile[Type]) + if not os.path.exists(File): + File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type]) + else: + File = os.path.join(self.SourceDir, self.Module.CustomMakefile[Type]) + self._CustomMakefile[MakeType] = File + return self._CustomMakefile + + ## Return the directory of the makefile + # + # @retval string The directory string of module's makefile + # + def _GetMakeFileDir(self): + return self.BuildDir + + ## Return build command string + # + # @retval string Build command string + # + def _GetBuildCommand(self): + return self.PlatformInfo.BuildCommand + + ## Get object list of all packages the module and its dependent libraries belong to + # + # @retval list The list of package object + # + def _GetDerivedPackageList(self): + PackageList = [] + for M in [self.Module] + self.DependentLibraryList: + for Package in M.Packages: + if Package in PackageList: + continue + PackageList.append(Package) + return PackageList + + ## Merge dependency expression + # + # @retval list The token list of the dependency expression after parsed + # + def _GetDepexTokenList(self): + if self._DepexList == None: + self._DepexList = {} + if self.IsLibrary or TAB_DEPENDENCY_EXPRESSION_FILE in self.FileTypes: + return self._DepexList + + if self.ModuleType == "DXE_SMM_DRIVER": + self._DepexList["DXE_DRIVER"] = [] + self._DepexList["SMM_DRIVER"] = [] + else: + self._DepexList[self.ModuleType] = [] + + for ModuleType in self._DepexList: + DepexList = self._DepexList[ModuleType] + # + # Append depex from dependent libraries, if not "BEFORE", "AFTER" expresion + # + for M in [self.Module] + self.DependentLibraryList: + Inherited = False + for D in M.Depex[self.Arch, ModuleType]: + if DepexList != []: + DepexList.append('AND') + DepexList.append('(') + DepexList.extend(D) + if DepexList[-1] == 'END': # no need of a END at this time + DepexList.pop() + DepexList.append(')') + Inherited = True + if Inherited: + EdkLogger.verbose("DEPEX[%s] (+%s) = %s" % (self.Name, M.BaseName, DepexList)) + if 'BEFORE' in DepexList or 'AFTER' in DepexList: + break + if len(DepexList) > 0: + EdkLogger.verbose('') + return self._DepexList + + ## Return the list of specification version required for the module + # + # @retval list The list of specification defined in module file + # + def _GetSpecification(self): + return self.Module.Specification + + ## Tool option for the module build + # + # @param PlatformInfo The object of PlatformBuildInfo + # @retval dict The dict containing valid options + # + def _GetModuleBuildOption(self): + if self._BuildOption == None: + self._BuildOption = self.PlatformInfo.ApplyBuildOption(self.Module) + return self._BuildOption + + ## Return a list of files which can be built from source + # + # What kind of files can be built is determined by build rules in + # $(WORKSPACE)/Conf/build_rule.txt and toolchain family. + # + def _GetSourceFileList(self): + if self._SourceFileList == None: + self._SourceFileList = [] + for F in self.Module.Sources: + # match tool chain + if F.TagName != "" and F.TagName != self.ToolChain: + EdkLogger.debug(EdkLogger.DEBUG_9, "The toolchain [%s] for processing file [%s] is found, " + "but [%s] is needed" % (F.TagName, str(F), self.ToolChain)) + continue + # match tool chain family + if F.ToolChainFamily != "" and F.ToolChainFamily != self.ToolChainFamily: + EdkLogger.debug( + EdkLogger.DEBUG_0, + "The file [%s] must be built by tools of [%s], " \ + "but current toolchain family is [%s]" \ + % (str(F), F.ToolChainFamily, self.ToolChainFamily)) + continue + + # add the file path into search path list for file including + if F.Dir not in self.IncludePathList and self.AutoGenVersion >= 0x00010005: + self.IncludePathList.insert(0, F.Dir) + self._SourceFileList.append(F) + self._ApplyBuildRule(F, TAB_UNKNOWN_FILE) + return self._SourceFileList + + ## Return the list of unicode files + def _GetUnicodeFileList(self): + if self._UnicodeFileList == None: + if TAB_UNICODE_FILE in self.FileTypes: + self._UnicodeFileList = self.FileTypes[TAB_UNICODE_FILE] + else: + self._UnicodeFileList = [] + return self._UnicodeFileList + + ## Return a list of files which can be built from binary + # + # "Build" binary files are just to copy them to build directory. + # + # @retval list The list of files which can be built later + # + def _GetBinaryFiles(self): + if self._BinaryFileList == None: + self._BinaryFileList = [] + for F in self.Module.Binaries: + if F.Target not in ['COMMON', '*'] and F.Target != self.BuildTarget: + continue + self._BinaryFileList.append(F) + self._ApplyBuildRule(F, F.Type) + return self._BinaryFileList + + def _GetBuildRules(self): + if self._BuildRules == None: + BuildRules = {} + BuildRuleDatabase = self.PlatformInfo.BuildRule + for Type in BuildRuleDatabase.FileTypeList: + #first try getting build rule by BuildRuleFamily + RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.BuildRuleFamily] + if not RuleObject: + # build type is always module type, but ... + if self.ModuleType != self.BuildType: + RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.BuildRuleFamily] + #second try getting build rule by ToolChainFamily + if not RuleObject: + RuleObject = BuildRuleDatabase[Type, self.BuildType, self.Arch, self.ToolChainFamily] + if not RuleObject: + # build type is always module type, but ... + if self.ModuleType != self.BuildType: + RuleObject = BuildRuleDatabase[Type, self.ModuleType, self.Arch, self.ToolChainFamily] + if not RuleObject: + continue + RuleObject = RuleObject.Instantiate(self.Macros) + BuildRules[Type] = RuleObject + for Ext in RuleObject.SourceFileExtList: + BuildRules[Ext] = RuleObject + self._BuildRules = BuildRules + return self._BuildRules + + def _ApplyBuildRule(self, File, FileType): + if self._BuildTargets == None: + self._IntroBuildTargetList = set() + self._FinalBuildTargetList = set() + self._BuildTargets = {} + self._FileTypes = {} + + LastTarget = None + RuleChain = [] + SourceList = [File] + Index = 0 + while Index < len(SourceList): + Source = SourceList[Index] + Index = Index + 1 + + if Source != File: + CreateDirectory(Source.Dir) + + if FileType in self.BuildRules: + RuleObject = self.BuildRules[FileType] + elif Source.Ext in self.BuildRules: + RuleObject = self.BuildRules[Source.Ext] + elif File.IsBinary and File == Source: + RuleObject = self.BuildRules[TAB_DEFAULT_BINARY_FILE] + else: + # stop at no more rules + if LastTarget: + self._FinalBuildTargetList.add(LastTarget) + break + + FileType = RuleObject.SourceFileType + if FileType not in self._FileTypes: + self._FileTypes[FileType] = set() + self._FileTypes[FileType].add(Source) + + # stop at STATIC_LIBRARY for library + if self.IsLibrary and FileType == TAB_STATIC_LIBRARY: + self._FinalBuildTargetList.add(LastTarget) + break + + Target = RuleObject.Apply(Source) + if not Target: + if LastTarget: + self._FinalBuildTargetList.add(LastTarget) + break + elif not Target.Outputs: + # Only do build for target with outputs + self._FinalBuildTargetList.add(Target) + + if FileType not in self._BuildTargets: + self._BuildTargets[FileType] = set() + self._BuildTargets[FileType].add(Target) + + if not Source.IsBinary and Source == File: + self._IntroBuildTargetList.add(Target) + + # to avoid cyclic rule + if FileType in RuleChain: + break + + RuleChain.append(FileType) + SourceList.extend(Target.Outputs) + LastTarget = Target + FileType = TAB_UNKNOWN_FILE + + def _GetTargets(self): + if self._BuildTargets == None: + self._IntroBuildTargetList = set() + self._FinalBuildTargetList = set() + self._BuildTargets = {} + self._FileTypes = {} + + #TRICK: call _GetSourceFileList to apply build rule for binary files + if self.SourceFileList: + pass + + #TRICK: call _GetBinaryFileList to apply build rule for binary files + if self.BinaryFileList: + pass + + return self._BuildTargets + + def _GetIntroTargetList(self): + self._GetTargets() + return self._IntroBuildTargetList + + def _GetFinalTargetList(self): + self._GetTargets() + return self._FinalBuildTargetList + + def _GetFileTypes(self): + self._GetTargets() + return self._FileTypes + + ## Get the list of package object the module depends on + # + # @retval list The package object list + # + def _GetDependentPackageList(self): + return self.Module.Packages + + ## Return the list of auto-generated code file + # + # @retval list The list of auto-generated file + # + def _GetAutoGenFileList(self): + if self._AutoGenFileList == None: + self._AutoGenFileList = {} + AutoGenC = TemplateString() + AutoGenH = TemplateString() + StringH = TemplateString() + GenC.CreateCode(self, AutoGenC, AutoGenH, StringH) + if str(AutoGenC) != "" and TAB_C_CODE_FILE in self.FileTypes: + AutoFile = PathClass(gAutoGenCodeFileName, self.DebugDir) + self._AutoGenFileList[AutoFile] = str(AutoGenC) + self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) + if str(AutoGenH) != "": + AutoFile = PathClass(gAutoGenHeaderFileName, self.DebugDir) + self._AutoGenFileList[AutoFile] = str(AutoGenH) + self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) + if str(StringH) != "": + AutoFile = PathClass(gAutoGenStringFileName % {"module_name":self.Name}, self.DebugDir) + self._AutoGenFileList[AutoFile] = str(StringH) + self._ApplyBuildRule(AutoFile, TAB_UNKNOWN_FILE) + return self._AutoGenFileList + + ## Return the list of library modules explicitly or implicityly used by this module + def _GetLibraryList(self): + if self._DependentLibraryList == None: + # only merge library classes and PCD for non-library module + if self.IsLibrary: + self._DependentLibraryList = [] + else: + if self.AutoGenVersion < 0x00010005: + self._DependentLibraryList = self.PlatformInfo.ResolveLibraryReference(self.Module) + else: + self._DependentLibraryList = self.PlatformInfo.ApplyLibraryInstance(self.Module) + return self._DependentLibraryList + + ## Get the list of PCDs from current module + # + # @retval list The list of PCD + # + def _GetModulePcdList(self): + if self._ModulePcdList == None: + # apply PCD settings from platform + self._ModulePcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, self.Module.Pcds) + return self._ModulePcdList + + ## Get the list of PCDs from dependent libraries + # + # @retval list The list of PCD + # + def _GetLibraryPcdList(self): + if self._LibraryPcdList == None: + Pcds = {} + if not self.IsLibrary: + # get PCDs from dependent libraries + for Library in self.DependentLibraryList: + for Key in Library.Pcds: + # skip duplicated PCDs + if Key in self.Module.Pcds or Key in Pcds: + continue + Pcds[Key] = copy.copy(Library.Pcds[Key]) + # apply PCD settings from platform + self._LibraryPcdList = self.PlatformInfo.ApplyPcdSetting(self.Module, Pcds) + else: + self._LibraryPcdList = [] + return self._LibraryPcdList + + ## Get the GUID value mapping + # + # @retval dict The mapping between GUID cname and its value + # + def _GetGuidList(self): + if self._GuidList == None: + self._GuidList = self.Module.Guids + for Library in self.DependentLibraryList: + self._GuidList.update(Library.Guids) + return self._GuidList + + ## Get the protocol value mapping + # + # @retval dict The mapping between protocol cname and its value + # + def _GetProtocolList(self): + if self._ProtocolList == None: + self._ProtocolList = self.Module.Protocols + for Library in self.DependentLibraryList: + self._ProtocolList.update(Library.Protocols) + return self._ProtocolList + + ## Get the PPI value mapping + # + # @retval dict The mapping between PPI cname and its value + # + def _GetPpiList(self): + if self._PpiList == None: + self._PpiList = self.Module.Ppis + for Library in self.DependentLibraryList: + self._PpiList.update(Library.Ppis) + return self._PpiList + + ## Get the list of include search path + # + # @retval list The list path + # + def _GetIncludePathList(self): + if self._IncludePathList == None: + self._IncludePathList = [] + if self.AutoGenVersion < 0x00010005: + for Inc in self.Module.Includes: + if Inc not in self._IncludePathList: + self._IncludePathList.append(Inc) + # for r8 modules + Inc = path.join(Inc, self.Arch.capitalize()) + if os.path.exists(Inc) and Inc not in self._IncludePathList: + self._IncludePathList.append(Inc) + # r8 module needs to put DEBUG_DIR at the end of search path and not to use SOURCE_DIR all the time + self._IncludePathList.append(self.DebugDir) + else: + self._IncludePathList.append(self.MetaFile.Dir) + self._IncludePathList.append(self.DebugDir) + + for Package in self.Module.Packages: + PackageDir = path.join(self.WorkspaceDir, Package.MetaFile.Dir) + if PackageDir not in self._IncludePathList: + self._IncludePathList.append(PackageDir) + for Inc in Package.Includes: + if Inc not in self._IncludePathList: + self._IncludePathList.append(str(Inc)) + return self._IncludePathList + + ## Create makefile for the module and its dependent libraries + # + # @param CreateLibraryMakeFile Flag indicating if or not the makefiles of + # dependent libraries will be created + # + def CreateMakeFile(self, CreateLibraryMakeFile=True): + if self.IsMakeFileCreated: + return + + if not self.IsLibrary and CreateLibraryMakeFile: + for LibraryAutoGen in self.LibraryAutoGenList: + LibraryAutoGen.CreateMakeFile() + + if len(self.CustomMakefile) == 0: + Makefile = GenMake.ModuleMakefile(self) + else: + Makefile = GenMake.CustomMakefile(self) + if Makefile.Generate(): + EdkLogger.debug(EdkLogger.DEBUG_9, "Generated makefile for module %s [%s]" % + (self.Name, self.Arch)) + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of makefile for module %s [%s]" % + (self.Name, self.Arch)) + + self.IsMakeFileCreated = True + + ## Create autogen code for the module and its dependent libraries + # + # @param CreateLibraryCodeFile Flag indicating if or not the code of + # dependent libraries will be created + # + def CreateCodeFile(self, CreateLibraryCodeFile=True): + if self.IsCodeFileCreated: + return + + if not self.IsLibrary and CreateLibraryCodeFile: + for LibraryAutoGen in self.LibraryAutoGenList: + LibraryAutoGen.CreateCodeFile() + + AutoGenList = [] + IgoredAutoGenList = [] + + for File in self.AutoGenFileList: + if GenC.Generate(File.Path, self.AutoGenFileList[File]): + #Ignore R8 AutoGen.c + if self.AutoGenVersion < 0x00010005 and File.Name == 'AutoGen.c': + continue + + AutoGenList.append(str(File)) + else: + IgoredAutoGenList.append(str(File)) + + for ModuleType in self.DepexList: + if len(self.DepexList[ModuleType]) == 0: + continue + Dpx = GenDepex.DependencyExpression(self.DepexList[ModuleType], ModuleType, True) + if ModuleType == 'SMM_DRIVER': + DpxFile = gAutoGenSmmDepexFileName % {"module_name" : self.Name} + else: + DpxFile = gAutoGenDepexFileName % {"module_name" : self.Name} + + if Dpx.Generate(path.join(self.OutputDir, DpxFile)): + AutoGenList.append(str(DpxFile)) + else: + IgoredAutoGenList.append(str(DpxFile)) + + if IgoredAutoGenList == []: + EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] files for module %s [%s]" % + (" ".join(AutoGenList), self.Name, self.Arch)) + elif AutoGenList == []: + EdkLogger.debug(EdkLogger.DEBUG_9, "Skipped the generation of [%s] files for module %s [%s]" % + (" ".join(IgoredAutoGenList), self.Name, self.Arch)) + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "Generated [%s] (skipped %s) files for module %s [%s]" % + (" ".join(AutoGenList), " ".join(IgoredAutoGenList), self.Name, self.Arch)) + + self.IsCodeFileCreated = True + return AutoGenList + + ## Summarize the ModuleAutoGen objects of all libraries used by this module + def _GetLibraryAutoGenList(self): + if self._LibraryAutoGenList == None: + self._LibraryAutoGenList = [] + for Library in self.DependentLibraryList: + La = ModuleAutoGen( + self.Workspace, + Library.MetaFile, + self.BuildTarget, + self.ToolChain, + self.Arch, + self.PlatformInfo.MetaFile + ) + if La not in self._LibraryAutoGenList: + self._LibraryAutoGenList.append(La) + for Lib in La.CodaTargetList: + self._ApplyBuildRule(Lib.Target, TAB_UNKNOWN_FILE) + return self._LibraryAutoGenList + + ## Return build command string + # + # @retval string Build command string + # + def _GetBuildCommand(self): + return self.PlatformInfo.BuildCommand + + + Module = property(_GetModule) + Name = property(_GetBaseName) + Guid = property(_GetGuid) + Version = property(_GetVersion) + ModuleType = property(_GetModuleType) + ComponentType = property(_GetComponentType) + BuildType = property(_GetBuildType) + PcdIsDriver = property(_GetPcdIsDriver) + AutoGenVersion = property(_GetAutoGenVersion) + Macros = property(_GetMacros) + Specification = property(_GetSpecification) + + IsLibrary = property(_IsLibrary) + + BuildDir = property(_GetBuildDir) + OutputDir = property(_GetOutputDir) + DebugDir = property(_GetDebugDir) + MakeFileDir = property(_GetMakeFileDir) + CustomMakefile = property(_GetCustomMakefile) + + IncludePathList = property(_GetIncludePathList) + AutoGenFileList = property(_GetAutoGenFileList) + UnicodeFileList = property(_GetUnicodeFileList) + SourceFileList = property(_GetSourceFileList) + BinaryFileList = property(_GetBinaryFiles) # FileType : [File List] + Targets = property(_GetTargets) + IntroTargetList = property(_GetIntroTargetList) + CodaTargetList = property(_GetFinalTargetList) + FileTypes = property(_GetFileTypes) + BuildRules = property(_GetBuildRules) + + DependentPackageList = property(_GetDependentPackageList) + DependentLibraryList = property(_GetLibraryList) + LibraryAutoGenList = property(_GetLibraryAutoGenList) + DerivedPackageList = property(_GetDerivedPackageList) + + ModulePcdList = property(_GetModulePcdList) + LibraryPcdList = property(_GetLibraryPcdList) + GuidList = property(_GetGuidList) + ProtocolList = property(_GetProtocolList) + PpiList = property(_GetPpiList) + DepexList = property(_GetDepexTokenList) + BuildOption = property(_GetModuleBuildOption) + BuildCommand = property(_GetBuildCommand) + +# This acts like the main() function for the script, unless it is 'import'ed into another script. +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py new file mode 100644 index 0000000000..cbe7d60f3f --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py @@ -0,0 +1,622 @@ +## @file +# The engine for building files +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import re +import copy +import string + +from Common.GlobalData import * +from Common.BuildToolError import * +from Common.Misc import tdict, PathClass +from Common.String import NormPath +from Common.DataType import * + +import Common.EdkLogger as EdkLogger + +## Convert file type to file list macro name +# +# @param FileType The name of file type +# +# @retval string The name of macro +# +def FileListMacro(FileType): + return "%sS" % FileType.replace("-", "_").upper() + +## Convert file type to list file macro name +# +# @param FileType The name of file type +# +# @retval string The name of macro +# +def ListFileMacro(FileType): + return "%s_LIST" % FileListMacro(FileType) + +class TargetDescBlock(object): + _Cache_ = {} # {TargetFile : TargetDescBlock object} + + # Factory method + def __new__(Class, Inputs, Outputs, Commands, Dependencies): + if Outputs[0] in Class._Cache_: + Tdb = Class._Cache_[Outputs[0]] + for File in Inputs: + Tdb.AddInput(File) + else: + Tdb = super(TargetDescBlock, Class).__new__(Class) + Tdb._Init(Inputs, Outputs, Commands, Dependencies) + #Class._Cache_[Outputs[0]] = Tdb + return Tdb + + def _Init(self, Inputs, Outputs, Commands, Dependencies): + self.Inputs = Inputs + self.Outputs = Outputs + self.Commands = Commands + self.Dependencies = Dependencies + if self.Outputs: + self.Target = self.Outputs[0] + else: + self.Target = None + + def __str__(self): + return self.Target.Path + + def __hash__(self): + return hash(self.Target.Path) + + def __eq__(self, Other): + if type(Other) == type(self): + return Other.Target.Path == self.Target.Path + else: + return str(Other) == self.Target.Path + + def AddInput(self, Input): + if Input not in self.Inputs: + self.Inputs.append(Input) + + def IsMultipleInput(self): + return len(self.Inputs) > 1 + + @staticmethod + def Renew(): + TargetDescBlock._Cache_ = {} + +## Class for one build rule +# +# This represents a build rule which can give out corresponding command list for +# building the given source file(s). The result can be used for generating the +# target for makefile. +# +class FileBuildRule: + INC_LIST_MACRO = "INC_LIST" + INC_MACRO = "INC" + + ## constructor + # + # @param Input The dictionary represeting input file(s) for a rule + # @param Output The list represeting output file(s) for a rule + # @param Command The list containing commands to generate the output from input + # + def __init__(self, Type, Input, Output, Command, ExtraDependency=None): + # The Input should not be empty + if not Input: + Input = [] + if not Output: + Output = [] + if not Command: + Command = [] + + self.FileListMacro = FileListMacro(Type) + self.ListFileMacro = ListFileMacro(Type) + self.IncListFileMacro = self.INC_LIST_MACRO + + self.SourceFileType = Type + # source files listed not in "*" or "?" pattern format + if not ExtraDependency: + self.ExtraSourceFileList = [] + else: + self.ExtraSourceFileList = ExtraDependency + + # + # Search macros used in command lines for <FILE_TYPE>_LIST and INC_LIST. + # If found, generate a file to keep the input files used to get over the + # limitation of command line length + # + self.MacroList = [] + self.CommandList = [] + for CmdLine in Command: + self.MacroList.extend(gMacroPattern.findall(CmdLine)) + # replace path separator with native one + self.CommandList.append(CmdLine) + + # Indicate what should be generated + if self.FileListMacro in self.MacroList: + self.GenFileListMacro = True + else: + self.GenFileListMacro = False + + if self.ListFileMacro in self.MacroList: + self.GenListFile = True + self.GenFileListMacro = True + else: + self.GenListFile = False + + if self.INC_LIST_MACRO in self.MacroList: + self.GenIncListFile = True + else: + self.GenIncListFile = False + + # Check input files + self.IsMultipleInput = False + self.SourceFileExtList = [] + for File in Input: + Base, Ext = os.path.splitext(File) + if Base.find("*") >= 0: + # There's "*" in the file name + self.IsMultipleInput = True + self.GenFileListMacro = True + elif Base.find("?") < 0: + # There's no "*" and "?" in file name + self.ExtraSourceFileList.append(File) + continue + if Ext not in self.SourceFileExtList: + self.SourceFileExtList.append(Ext) + + # Check output files + self.DestFileList = [] + for File in Output: + self.DestFileList.append(File) + + # All build targets generated by this rule for a module + self.BuildTargets = {} + + ## str() function support + # + # @retval string + # + def __str__(self): + SourceString = "" + SourceString += " %s %s %s" % (self.SourceFileType, " ".join(self.SourceFileExtList), self.ExtraSourceFileList) + DestString = ", ".join(self.DestFileList) + CommandString = "\n\t".join(self.CommandList) + return "%s : %s\n\t%s" % (DestString, SourceString, CommandString) + + ## Check if given file extension is supported by this rule + # + # @param FileExt The extension of a file + # + # @retval True If the extension is supported + # @retval False If the extension is not supported + # + def IsSupported(self, FileExt): + return FileExt in self.SourceFileExtList + + def Instantiate(self, Macros={}): + NewRuleObject = copy.copy(self) + NewRuleObject.BuildTargets = {} + NewRuleObject.DestFileList = [] + for File in self.DestFileList: + NewRuleObject.DestFileList.append(PathClass(NormPath(File, Macros))) + return NewRuleObject + + ## Apply the rule to given source file(s) + # + # @param SourceFile One file or a list of files to be built + # @param RelativeToDir The relative path of the source file + # @param PathSeparator Path separator + # + # @retval tuple (Source file in full path, List of individual sourcefiles, Destionation file, List of build commands) + # + def Apply(self, SourceFile): + if not self.CommandList or not self.DestFileList: + return None + + # source file + if self.IsMultipleInput: + SrcFileName = "" + SrcFileBase = "" + SrcFileExt = "" + SrcFileDir = "" + SrcPath = "" + # SourceFile must be a list + SrcFile = "$(%s)" % self.FileListMacro + else: + SrcFileName, SrcFileBase, SrcFileExt = SourceFile.Name, SourceFile.BaseName, SourceFile.Ext + if SourceFile.Root: + SrcFileDir = SourceFile.SubDir + if SrcFileDir == "": + SrcFileDir = "." + else: + SrcFileDir = "." + SrcFile = SourceFile.Path + SrcPath = SourceFile.Dir + + # destination file (the first one) + if self.DestFileList: + DestFile = self.DestFileList[0].Path + DestPath = self.DestFileList[0].Dir + DestFileName = self.DestFileList[0].Name + DestFileBase, DestFileExt = self.DestFileList[0].BaseName, self.DestFileList[0].Ext + else: + DestFile = "" + DestPath = "" + DestFileName = "" + DestFileBase = "" + DestFileExt = "" + + BuildRulePlaceholderDict = { + # source file + "src" : SrcFile, + "s_path" : SrcPath, + "s_dir" : SrcFileDir, + "s_name" : SrcFileName, + "s_base" : SrcFileBase, + "s_ext" : SrcFileExt, + # destination file + "dst" : DestFile, + "d_path" : DestPath, + "d_name" : DestFileName, + "d_base" : DestFileBase, + "d_ext" : DestFileExt, + } + + DstFile = [] + for File in self.DestFileList: + File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict) + File = string.Template(str(File)).safe_substitute(BuildRulePlaceholderDict) + DstFile.append(PathClass(File, IsBinary=True)) + + if DstFile[0] in self.BuildTargets: + TargetDesc = self.BuildTargets[DstFile[0]] + TargetDesc.AddInput(SourceFile) + else: + CommandList = [] + for CommandString in self.CommandList: + CommandString = string.Template(CommandString).safe_substitute(BuildRulePlaceholderDict) + CommandString = string.Template(CommandString).safe_substitute(BuildRulePlaceholderDict) + CommandList.append(CommandString) + TargetDesc = TargetDescBlock([SourceFile], DstFile, CommandList, self.ExtraSourceFileList) + TargetDesc.ListFileMacro = self.ListFileMacro + TargetDesc.FileListMacro = self.FileListMacro + TargetDesc.IncListFileMacro = self.IncListFileMacro + TargetDesc.GenFileListMacro = self.GenFileListMacro + TargetDesc.GenListFile = self.GenListFile + TargetDesc.GenIncListFile = self.GenIncListFile + self.BuildTargets[DstFile[0]] = TargetDesc + return TargetDesc + +## Class for build rules +# +# BuildRule class parses rules defined in a file or passed by caller, and converts +# the rule into FileBuildRule object. +# +class BuildRule: + _SectionHeader = "SECTIONHEADER" + _Section = "SECTION" + _SubSectionHeader = "SUBSECTIONHEADER" + _SubSection = "SUBSECTION" + _InputFile = "INPUTFILE" + _OutputFile = "OUTPUTFILE" + _ExtraDependency = "EXTRADEPENDENCY" + _Command = "COMMAND" + _UnknownSection = "UNKNOWNSECTION" + + _SubSectionList = [_InputFile, _OutputFile, _Command] + + _PATH_SEP = "(+)" + _FileTypePattern = re.compile("^[_a-zA-Z][_\-0-9a-zA-Z]*$") + _BinaryFileRule = FileBuildRule(TAB_DEFAULT_BINARY_FILE, [], [os.path.join("$(OUTPUT_DIR)", "${s_name}")], + ["$(CP) ${src} ${dst}"], []) + + ## Constructor + # + # @param File The file containing build rules in a well defined format + # @param Content The string list of build rules in a well defined format + # @param LineIndex The line number from which the parsing will begin + # @param SupportedFamily The list of supported tool chain families + # + def __init__(self, File=None, Content=None, LineIndex=0, SupportedFamily=["MSFT", "INTEL", "GCC", "RVCT"]): + self.RuleFile = File + # Read build rules from file if it's not none + if File != None: + try: + self.RuleContent = open(File, 'r').readlines() + except: + EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File) + elif Content != None: + self.RuleContent = Content + else: + EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given") + + self.SupportedToolChainFamilyList = SupportedFamily + self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object} + self.Ext2FileType = {} # {ext : file-type} + self.FileTypeList = set() + + self._LineIndex = LineIndex + self._State = "" + self._RuleInfo = tdict(True, 2) # {toolchain family : {"InputFile": {}, "OutputFile" : [], "Command" : []}} + self._FileType = '' + self._BuildTypeList = [] + self._ArchList = [] + self._FamilyList = [] + self._TotalToolChainFamilySet = set() + self._RuleObjectList = [] # FileBuildRule object list + + self.Parse() + + # some intrinsic rules + self.RuleDatabase[TAB_DEFAULT_BINARY_FILE, "COMMON", "COMMON", "COMMON"] = self._BinaryFileRule + self.FileTypeList.add(TAB_DEFAULT_BINARY_FILE) + + ## Parse the build rule strings + def Parse(self): + self._State = self._Section + for Index in range(self._LineIndex, len(self.RuleContent)): + # Clean up the line and replace path separator with native one + Line = self.RuleContent[Index].strip().replace(self._PATH_SEP, os.path.sep) + self.RuleContent[Index] = Line + + # skip empty or comment line + if Line == "" or Line[0] == "#": + continue + + # find out section header, enclosed by [] + if Line[0] == '[' and Line[-1] == ']': + # merge last section information into rule database + self.EndOfSection() + self._State = self._SectionHeader + # find out sub-section header, enclosed by <> + elif Line[0] == '<' and Line[-1] == '>': + if self._State != self._UnknownSection: + self._State = self._SubSectionHeader + + # call section handler to parse each (sub)section + self._StateHandler[self._State](self, Index) + # merge last section information into rule database + self.EndOfSection() + + ## Parse definitions under a section + # + # @param LineIndex The line index of build rule text + # + def ParseSection(self, LineIndex): + pass + + ## Parse definitions under a subsection + # + # @param LineIndex The line index of build rule text + # + def ParseSubSection(self, LineIndex): + # currenly nothing here + pass + + ## Placeholder for not supported sections + # + # @param LineIndex The line index of build rule text + # + def SkipSection(self, LineIndex): + pass + + ## Merge section information just got into rule database + def EndOfSection(self): + Database = self.RuleDatabase + # if there's specific toochain family, 'COMMON' doesn't make sense any more + if len(self._TotalToolChainFamilySet) > 1 and 'COMMON' in self._TotalToolChainFamilySet: + self._TotalToolChainFamilySet.remove('COMMON') + for Family in self._TotalToolChainFamilySet: + Input = self._RuleInfo[Family, self._InputFile] + Output = self._RuleInfo[Family, self._OutputFile] + Command = self._RuleInfo[Family, self._Command] + ExtraDependency = self._RuleInfo[Family, self._ExtraDependency] + + BuildRule = FileBuildRule(self._FileType, Input, Output, Command, ExtraDependency) + for BuildType in self._BuildTypeList: + for Arch in self._ArchList: + Database[self._FileType, BuildType, Arch, Family] = BuildRule + for FileExt in BuildRule.SourceFileExtList: + self.Ext2FileType[FileExt] = self._FileType + + ## Parse section header + # + # @param LineIndex The line index of build rule text + # + def ParseSectionHeader(self, LineIndex): + self._RuleInfo = tdict(True, 2) + self._BuildTypeList = [] + self._ArchList = [] + self._FamilyList = [] + self._TotalToolChainFamilySet = set() + FileType = '' + RuleNameList = self.RuleContent[LineIndex][1:-1].split(',') + for RuleName in RuleNameList: + Arch = 'COMMON' + BuildType = 'COMMON' + TokenList = [Token.strip().upper() for Token in RuleName.split('.')] + # old format: Build.File-Type + if TokenList[0] == "BUILD": + if len(TokenList) == 1: + EdkLogger.error("build", FORMAT_INVALID, "Invalid rule section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + FileType = TokenList[1] + if FileType == '': + EdkLogger.error("build", FORMAT_INVALID, "No file type given", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if self._FileTypePattern.match(FileType) == None: + EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, + ExtraData="Only character, number (non-first character), '_' and '-' are allowed in file type") + # new format: File-Type.Build-Type.Arch + else: + if FileType == '': + FileType = TokenList[0] + elif FileType != TokenList[0]: + EdkLogger.error("build", FORMAT_INVALID, + "Different file types are not allowed in the same rule section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if len(TokenList) > 1: + BuildType = TokenList[1] + if len(TokenList) > 2: + Arch = TokenList[2] + if BuildType not in self._BuildTypeList: + self._BuildTypeList.append(BuildType) + if Arch not in self._ArchList: + self._ArchList.append(Arch) + + if 'COMMON' in self._BuildTypeList and len(self._BuildTypeList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific build types must not be mixed with common one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if 'COMMON' in self._ArchList and len(self._ArchList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific ARCH must not be mixed with common one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + self._FileType = FileType + self._State = self._Section + self.FileTypeList.add(FileType) + + ## Parse sub-section header + # + # @param LineIndex The line index of build rule text + # + def ParseSubSectionHeader(self, LineIndex): + SectionType = "" + List = self.RuleContent[LineIndex][1:-1].split(',') + FamilyList = [] + for Section in List: + TokenList = Section.split('.') + Type = TokenList[0].strip().upper() + + if SectionType == "": + SectionType = Type + elif SectionType != Type: + EdkLogger.error("build", FORMAT_INVALID, + "Two different section types are not allowed in the same sub-section", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + + if len(TokenList) > 1: + Family = TokenList[1].strip().upper() + else: + Family = "COMMON" + + if Family not in FamilyList: + FamilyList.append(Family) + + self._FamilyList = FamilyList + self._TotalToolChainFamilySet.update(FamilyList) + self._State = SectionType.upper() + if 'COMMON' in FamilyList and len(FamilyList) > 1: + EdkLogger.error("build", FORMAT_INVALID, + "Specific tool chain family should not be mixed with general one", + File=self.RuleFile, Line=LineIndex+1, + ExtraData=self.RuleContent[LineIndex]) + if self._State not in self._StateHandler: + EdkLogger.error("build", FORMAT_INVALID, File=self.RuleFile, Line=LineIndex+1, + ExtraData="Unknown subsection: %s" % self.RuleContent[LineIndex]) + ## Parse <InputFile> sub-section + # + # @param LineIndex The line index of build rule text + # + def ParseInputFile(self, LineIndex): + FileList = [File.strip() for File in self.RuleContent[LineIndex].split(",")] + for ToolChainFamily in self._FamilyList: + InputFiles = self._RuleInfo[ToolChainFamily, self._State] + if InputFiles == None: + InputFiles = [] + self._RuleInfo[ToolChainFamily, self._State] = InputFiles + InputFiles.extend(FileList) + + ## Parse <ExtraDependency> sub-section + # + # @param LineIndex The line index of build rule text + # + def ParseCommon(self, LineIndex): + for ToolChainFamily in self._FamilyList: + Items = self._RuleInfo[ToolChainFamily, self._State] + if Items == None: + Items = [] + self._RuleInfo[ToolChainFamily, self._State] = Items + Items.append(self.RuleContent[LineIndex]) + + ## Get a build rule via [] operator + # + # @param FileExt The extension of a file + # @param ToolChainFamily The tool chain family name + # @param BuildVersion The build version number. "*" means any rule + # is applicalbe. + # + # @retval FileType The file type string + # @retval FileBuildRule The object of FileBuildRule + # + # Key = (FileExt, ModuleType, Arch, ToolChainFamily) + def __getitem__(self, Key): + if not Key: + return None + + if Key[0] in self.Ext2FileType: + Type = self.Ext2FileType[Key[0]] + elif Key[0].upper() in self.FileTypeList: + Type = Key[0].upper() + else: + return None + + if len(Key) > 1: + Key = (Type,) + Key[1:] + else: + Key = (Type,) + return self.RuleDatabase[Key] + + _StateHandler = { + _SectionHeader : ParseSectionHeader, + _Section : ParseSection, + _SubSectionHeader : ParseSubSectionHeader, + _SubSection : ParseSubSection, + _InputFile : ParseInputFile, + _OutputFile : ParseCommon, + _ExtraDependency : ParseCommon, + _Command : ParseCommon, + _UnknownSection : SkipSection, + } + +# This acts like the main() function for the script, unless it is 'import'ed into another +# script. +if __name__ == '__main__': + import sys + EdkLogger.Initialize() + if len(sys.argv) > 1: + Br = BuildRule(sys.argv[1]) + print str(Br[".c", "DXE_DRIVER", "IA32", "MSFT"][1]) + print + print str(Br[".c", "DXE_DRIVER", "IA32", "INTEL"][1]) + print + print str(Br[".c", "DXE_DRIVER", "IA32", "GCC"][1]) + print + print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1]) + print + print str(Br[".h", "ACPI_TABLE", "IA32", "INTEL"][1]) + print + print str(Br[".ac", "ACPI_TABLE", "IA32", "MSFT"][1]) + print + print str(Br[".s", "SEC", "IPF", "COMMON"][1]) + print + print str(Br[".s", "SEC"][1]) + diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py new file mode 100644 index 0000000000..b62a12708b --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenC.py @@ -0,0 +1,1931 @@ +## @file +# Routines for generating AutoGen.h and AutoGen.c +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import Modules +# +import string + +from Common import EdkLogger + +from Common.BuildToolError import * +from Common.DataType import * +from Common.Misc import * +from Common.String import StringToArray +from StrGather import * + +## PCD type string +gItemTypeStringDatabase = { + TAB_PCDS_FEATURE_FLAG : 'FixedAtBuild', + TAB_PCDS_FIXED_AT_BUILD : 'FixedAtBuild', + TAB_PCDS_PATCHABLE_IN_MODULE: 'BinaryPatch', + TAB_PCDS_DYNAMIC : '', + TAB_PCDS_DYNAMIC_DEFAULT : '', + TAB_PCDS_DYNAMIC_VPD : '', + TAB_PCDS_DYNAMIC_HII : '', + TAB_PCDS_DYNAMIC_EX : '', + TAB_PCDS_DYNAMIC_EX_DEFAULT : '', + TAB_PCDS_DYNAMIC_EX_VPD : '', + TAB_PCDS_DYNAMIC_EX_HII : '', +} + +## Dynamic PCD types +gDynamicPcd = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII] + +## Dynamic-ex PCD types +gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII] + +## Datum size +gDatumSizeStringDatabase = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOLEAN','VOID*':'8'} +gDatumSizeStringDatabaseH = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'BOOL','VOID*':'PTR'} +gDatumSizeStringDatabaseLib = {'UINT8':'8','UINT16':'16','UINT32':'32','UINT64':'64','BOOLEAN':'Bool','VOID*':'Ptr'} + +## Mapping between PCD driver type and EFI phase +gPcdPhaseMap = { + "PEI_PCD_DRIVER" : "PEI", + "DXE_PCD_DRIVER" : "DXE" +} + +gPcdDatabaseCommonAutoGenH = """ +// +// The following definition will be generated by build tool +// + +// +// Common definitions +// +typedef UINT8 SKU_ID; + +#define PCD_TYPE_SHIFT 28 + +#define PCD_TYPE_DATA (0x0 << PCD_TYPE_SHIFT) +#define PCD_TYPE_HII (0x8 << PCD_TYPE_SHIFT) +#define PCD_TYPE_VPD (0x4 << PCD_TYPE_SHIFT) +#define PCD_TYPE_SKU_ENABLED (0x2 << PCD_TYPE_SHIFT) +#define PCD_TYPE_STRING (0x1 << PCD_TYPE_SHIFT) + +#define PCD_TYPE_ALL_SET (PCD_TYPE_DATA | PCD_TYPE_HII | PCD_TYPE_VPD | PCD_TYPE_SKU_ENABLED | PCD_TYPE_STRING) + +#define PCD_DATUM_TYPE_SHIFT 24 + +#define PCD_DATUM_TYPE_POINTER (0x0 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT8 (0x1 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT16 (0x2 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT32 (0x4 << PCD_DATUM_TYPE_SHIFT) +#define PCD_DATUM_TYPE_UINT64 (0x8 << PCD_DATUM_TYPE_SHIFT) + +#define PCD_DATUM_TYPE_ALL_SET (PCD_DATUM_TYPE_POINTER | \\ + PCD_DATUM_TYPE_UINT8 | \\ + PCD_DATUM_TYPE_UINT16 | \\ + PCD_DATUM_TYPE_UINT32 | \\ + PCD_DATUM_TYPE_UINT64) + +#define PCD_DATABASE_OFFSET_MASK (~(PCD_TYPE_ALL_SET | PCD_DATUM_TYPE_ALL_SET)) + +typedef struct { + UINT32 ExTokenNumber; + UINT16 LocalTokenNumber; // PCD Number of this particular platform build + UINT16 ExGuidIndex; // Index of GuidTable +} DYNAMICEX_MAPPING; + +typedef struct { + UINT32 SkuDataStartOffset; //We have to use offsetof MACRO as we don't know padding done by compiler + UINT32 SkuIdTableOffset; //Offset from the PCD_DB +} SKU_HEAD; + +typedef struct { + UINT16 GuidTableIndex; // Offset in Guid Table in units of GUID. + UINT16 StringIndex; // Offset in String Table in units of UINT16. + UINT16 Offset; // Offset in Variable + UINT16 DefaultValueOffset; // Offset of the Default Value +} VARIABLE_HEAD; + +typedef struct { + UINT32 Offset; +} VPD_HEAD; + +typedef UINT16 STRING_HEAD; + +typedef UINT16 SIZE_INFO; + +#define offsetof(s,m) (UINT32) (UINTN) &(((s *)0)->m) + +""" + +gPcdDatabaseEpilogueAutoGenH = """ +typedef struct { + PEI_PCD_DATABASE PeiDb; + DXE_PCD_DATABASE DxeDb; +} PCD_DATABASE; + +#define PCD_TOTAL_TOKEN_NUMBER (PEI_LOCAL_TOKEN_NUMBER + DXE_LOCAL_TOKEN_NUMBER) + +""" + +gPcdDatabaseAutoGenH = TemplateString(""" +#define ${PHASE}_GUID_TABLE_SIZE ${GUID_TABLE_SIZE} +#define ${PHASE}_STRING_TABLE_SIZE ${STRING_TABLE_SIZE} +#define ${PHASE}_SKUID_TABLE_SIZE ${SKUID_TABLE_SIZE} +#define ${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE ${LOCAL_TOKEN_NUMBER_TABLE_SIZE} +#define ${PHASE}_LOCAL_TOKEN_NUMBER ${LOCAL_TOKEN_NUMBER} +#define ${PHASE}_EXMAPPING_TABLE_SIZE ${EXMAPPING_TABLE_SIZE} +#define ${PHASE}_EX_TOKEN_NUMBER ${EX_TOKEN_NUMBER} +#define ${PHASE}_SIZE_TABLE_SIZE ${SIZE_TABLE_SIZE} +#define ${PHASE}_GUID_TABLE_EMPTY ${GUID_TABLE_EMPTY} +#define ${PHASE}_STRING_TABLE_EMPTY ${STRING_TABLE_EMPTY} +#define ${PHASE}_SKUID_TABLE_EMPTY ${SKUID_TABLE_EMPTY} +#define ${PHASE}_DATABASE_EMPTY ${DATABASE_EMPTY} +#define ${PHASE}_EXMAP_TABLE_EMPTY ${EXMAP_TABLE_EMPTY} + +typedef struct { +${BEGIN} UINT64 ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}]; +${END} +${BEGIN} UINT64 ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64}; +${END} +${BEGIN} UINT32 ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}]; +${END} +${BEGIN} UINT32 ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32}; +${END} +${BEGIN} VPD_HEAD ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}]; +${END} + DYNAMICEX_MAPPING ExMapTable[${PHASE}_EXMAPPING_TABLE_SIZE]; + UINT32 LocalTokenNumberTable[${PHASE}_LOCAL_TOKEN_NUMBER_TABLE_SIZE]; + GUID GuidTable[${PHASE}_GUID_TABLE_SIZE]; +${BEGIN} STRING_HEAD ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}]; +${END} +${BEGIN} VARIABLE_HEAD ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}[${VARIABLE_HEAD_NUMSKUS_DECL}]; +${END} +${BEGIN} UINT8 StringTable${STRING_TABLE_INDEX}[${STRING_TABLE_LENGTH}]; /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */ +${END} + SIZE_INFO SizeTable[${PHASE}_SIZE_TABLE_SIZE]; +${BEGIN} UINT16 ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}]; +${END} +${BEGIN} UINT16 ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16}; +${END} +${BEGIN} UINT8 ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}]; +${END} +${BEGIN} UINT8 ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8}; +${END} +${BEGIN} BOOLEAN ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}]; +${END} +${BEGIN} BOOLEAN ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN}; +${END} + UINT8 SkuIdTable[${PHASE}_SKUID_TABLE_SIZE]; +${SYSTEM_SKU_ID} +} ${PHASE}_PCD_DATABASE_INIT; + +typedef struct { +${PCD_DATABASE_UNINIT_EMPTY} +${BEGIN} UINT64 ${UNINIT_CNAME_DECL_UINT64}_${UNINIT_GUID_DECL_UINT64}[${UNINIT_NUMSKUS_DECL_UINT64}]; +${END} +${BEGIN} UINT32 ${UNINIT_CNAME_DECL_UINT32}_${UNINIT_GUID_DECL_UINT32}[${UNINIT_NUMSKUS_DECL_UINT32}]; +${END} +${BEGIN} UINT16 ${UNINIT_CNAME_DECL_UINT16}_${UNINIT_GUID_DECL_UINT16}[${UNINIT_NUMSKUS_DECL_UINT16}]; +${END} +${BEGIN} UINT8 ${UNINIT_CNAME_DECL_UINT8}_${UNINIT_GUID_DECL_UINT8}[${UNINIT_NUMSKUS_DECL_UINT8}]; +${END} +${BEGIN} BOOLEAN ${UNINIT_CNAME_DECL_BOOLEAN}_${UNINIT_GUID_DECL_BOOLEAN}[${UNINIT_NUMSKUS_DECL_BOOLEAN}]; +${END} +} ${PHASE}_PCD_DATABASE_UNINIT; + +#define PCD_${PHASE}_SERVICE_DRIVER_VERSION 2 + +typedef struct { + ${PHASE}_PCD_DATABASE_INIT Init; + ${PHASE}_PCD_DATABASE_UNINIT Uninit; +} ${PHASE}_PCD_DATABASE; + +#define ${PHASE}_NEX_TOKEN_NUMBER (${PHASE}_LOCAL_TOKEN_NUMBER - ${PHASE}_EX_TOKEN_NUMBER) +""") + +gEmptyPcdDatabaseAutoGenC = TemplateString(""" +${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = { + /* ExMapTable */ + { + {0, 0, 0} + }, + /* LocalTokenNumberTable */ + { + 0 + }, + /* GuidTable */ + { + {0x00000000, 0x0000, 0x0000, {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}} + }, + /* StringTable */ + { 0 }, + /* SizeTable */ + { + 0, 0 + }, + /* SkuIdTable */ + { 0 }, + ${SYSTEM_SKU_ID_VALUE} +}; +""") + +gPcdDatabaseAutoGenC = TemplateString(""" +${PHASE}_PCD_DATABASE_INIT g${PHASE}PcdDbInit = { +${BEGIN} { ${INIT_VALUE_UINT64} }, /* ${INIT_CNAME_DECL_UINT64}_${INIT_GUID_DECL_UINT64}[${INIT_NUMSKUS_DECL_UINT64}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT64}, /* ${VARDEF_CNAME_UINT64}_${VARDEF_GUID_UINT64}_VariableDefault_${VARDEF_SKUID_UINT64} */ +${END} +${BEGIN} { ${INIT_VALUE_UINT32} }, /* ${INIT_CNAME_DECL_UINT32}_${INIT_GUID_DECL_UINT32}[${INIT_NUMSKUS_DECL_UINT32}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT32}, /* ${VARDEF_CNAME_UINT32}_${VARDEF_GUID_UINT32}_VariableDefault_${VARDEF_SKUID_UINT32} */ +${END} + /* VPD */ +${BEGIN} { ${VPD_HEAD_VALUE} }, /* ${VPD_HEAD_CNAME_DECL}_${VPD_HEAD_GUID_DECL}[${VPD_HEAD_NUMSKUS_DECL}] */ +${END} + /* ExMapTable */ + { +${BEGIN} { ${EXMAPPING_TABLE_EXTOKEN}, ${EXMAPPING_TABLE_LOCAL_TOKEN}, ${EXMAPPING_TABLE_GUID_INDEX} }, +${END} + }, + /* LocalTokenNumberTable */ + { +${BEGIN} offsetof(${PHASE}_PCD_DATABASE, ${TOKEN_INIT}.${TOKEN_CNAME}_${TOKEN_GUID}) | ${TOKEN_TYPE}, +${END} + }, + /* GuidTable */ + { +${BEGIN} ${GUID_STRUCTURE}, +${END} + }, +${BEGIN} { ${STRING_HEAD_VALUE} }, /* ${STRING_HEAD_CNAME_DECL}_${STRING_HEAD_GUID_DECL}[${STRING_HEAD_NUMSKUS_DECL}] */ +${END} +${BEGIN} /* ${VARIABLE_HEAD_CNAME_DECL}_${VARIABLE_HEAD_GUID_DECL}[${VARIABLE_HEAD_NUMSKUS_DECL}] */ + { + ${VARIABLE_HEAD_VALUE} + }, +${END} + /* StringTable */ +${BEGIN} ${STRING_TABLE_VALUE}, /* ${STRING_TABLE_CNAME}_${STRING_TABLE_GUID} */ +${END} + /* SizeTable */ + { +${BEGIN} ${SIZE_TABLE_MAXIMUM_LENGTH}, ${SIZE_TABLE_CURRENT_LENGTH}, /* ${SIZE_TABLE_CNAME}_${SIZE_TABLE_GUID} */ +${END} + }, +${BEGIN} { ${INIT_VALUE_UINT16} }, /* ${INIT_CNAME_DECL_UINT16}_${INIT_GUID_DECL_UINT16}[${INIT_NUMSKUS_DECL_UINT16}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT16}, /* ${VARDEF_CNAME_UINT16}_${VARDEF_GUID_UINT16}_VariableDefault_${VARDEF_SKUID_UINT16} */ +${END} +${BEGIN} { ${INIT_VALUE_UINT8} }, /* ${INIT_CNAME_DECL_UINT8}_${INIT_GUID_DECL_UINT8}[${INIT_NUMSKUS_DECL_UINT8}] */ +${END} +${BEGIN} ${VARDEF_VALUE_UINT8}, /* ${VARDEF_CNAME_UINT8}_${VARDEF_GUID_UINT8}_VariableDefault_${VARDEF_SKUID_UINT8} */ +${END} +${BEGIN} { ${INIT_VALUE_BOOLEAN} }, /* ${INIT_CNAME_DECL_BOOLEAN}_${INIT_GUID_DECL_BOOLEAN}[${INIT_NUMSKUS_DECL_BOOLEAN}] */ +${END} +${BEGIN} ${VARDEF_VALUE_BOOLEAN}, /* ${VARDEF_CNAME_BOOLEAN}_${VARDEF_GUID_BOOLEAN}_VariableDefault_${VARDEF_SKUID_BOOLEAN} */ +${END} + /* SkuIdTable */ + { ${BEGIN}${SKUID_VALUE}, ${END} }, + ${SYSTEM_SKU_ID_VALUE} +}; +""") + + +## AutoGen File Header Templates +gAutoGenHeaderString = TemplateString("""\ +/** + DO NOT EDIT + FILE auto-generated + Module name: + ${FileName} + Abstract: Auto-generated ${FileName} for building module or library. +**/ +""") + +gAutoGenHPrologueString = TemplateString(""" +#ifndef _${File}_${Guid} +#define _${File}_${Guid} + +""") + +gAutoGenHEpilogueString = """ +#endif +""" + +## PEI Core Entry Point Templates +gPeiCoreEntryPointPrototype = TemplateString(""" +${BEGIN} +VOID +EFIAPI +${Function} ( + IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData, + IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList, + IN VOID *Context + ); +${END} +""") + +gPeiCoreEntryPointString = TemplateString(""" +${BEGIN} +VOID +EFIAPI +ProcessModuleEntryPointList ( + IN CONST EFI_SEC_PEI_HAND_OFF *SecCoreData, + IN CONST EFI_PEI_PPI_DESCRIPTOR *PpiList, + IN VOID *Context + ) + +{ + ${Function} (SecCoreData, PpiList, Context); +} +${END} +""") + + +## DXE Core Entry Point Templates +gDxeCoreEntryPointPrototype = TemplateString(""" +${BEGIN} +VOID +EFIAPI +${Function} ( + IN VOID *HobStart + ); +${END} +""") + +gDxeCoreEntryPointString = TemplateString(""" +${BEGIN} +VOID +EFIAPI +ProcessModuleEntryPointList ( + IN VOID *HobStart + ) + +{ + ${Function} (HobStart); +} +${END} +""") + +## PEIM Entry Point Templates +gPeimEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ); +${END} +""") + +## SMM_CORE Entry Point Templates +gSmmCoreEntryPointString = TemplateString(""" +const UINT32 _gUefiDriverRevision = 0; +${BEGIN} +EFI_STATUS +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +""") + +gPeimEntryPointString = [ +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + return ${Function} (FileHandle, PeiServices); +} +${END} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT32 _gPeimRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) + +{ + EFI_STATUS Status; + EFI_STATUS CombinedStatus; + + CombinedStatus = EFI_LOAD_ERROR; +${BEGIN} + Status = ${Function} (FileHandle, PeiServices); + if (!EFI_ERROR (Status) || EFI_ERROR (CombinedStatus)) { + CombinedStatus = Status; + } +${END} + return CombinedStatus; +} +""") +] + +## DXE SMM Entry Point Templates +gDxeSmmEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gDxeSmmEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR; + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ +${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } +${END} + + return mDriverEntryPointStatus; +} +""") +] + +## UEFI Driver Entry Point Templates +gUefiDriverEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gUefiDriverEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (EFI_ERROR (Status)) { + ProcessLibraryDestructorList (gImageHandle, gST); + } + gBS->Exit (gImageHandle, Status, 0, NULL); +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; +const UINT32 _gDxeRevision = ${PiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + ${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } + ${END} + return mDriverEntryPointStatus; +} + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR; + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} +""") +] + + +## UEFI Application Entry Point Templates +gUefiApplicationEntryPointPrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ); +${END} +""") + +gUefiApplicationEntryPointString = [ +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + return ${Function} (ImageHandle, SystemTable); +} +${END} +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (EFI_ERROR (Status)) { + ProcessLibraryDestructorList (gImageHandle, gST); + } + gBS->Exit (gImageHandle, Status, 0, NULL); +} +"""), +TemplateString(""" +const UINT32 _gUefiDriverRevision = ${EfiSpecVersion}; + +EFI_STATUS +EFIAPI +ProcessModuleEntryPointList ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) + +{ + ${BEGIN} + if (SetJump (&mJumpContext) == 0) { + ExitDriver (${Function} (ImageHandle, SystemTable)); + ASSERT (FALSE); + } + ${END} + return mDriverEntryPointStatus; +} + +static BASE_LIBRARY_JUMP_BUFFER mJumpContext; +static EFI_STATUS mDriverEntryPointStatus = EFI_LOAD_ERROR; + +VOID +EFIAPI +ExitDriver ( + IN EFI_STATUS Status + ) +{ + if (!EFI_ERROR (Status) || EFI_ERROR (mDriverEntryPointStatus)) { + mDriverEntryPointStatus = Status; + } + LongJump (&mJumpContext, (UINTN)-1); + ASSERT (FALSE); +} +""") +] + +## UEFI Unload Image Templates +gUefiUnloadImagePrototype = TemplateString(""" +${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle + ); +${END} +""") + +gUefiUnloadImageString = [ +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + return EFI_SUCCESS; +} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +${BEGIN} +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + return ${Function} (ImageHandle); +} +${END} +"""), +TemplateString(""" +GLOBAL_REMOVE_IF_UNREFERENCED const UINT8 _gDriverUnloadImageCount = ${Count}; + +EFI_STATUS +EFIAPI +ProcessModuleUnloadList ( + IN EFI_HANDLE ImageHandle + ) +{ + EFI_STATUS Status; + + Status = EFI_SUCCESS; +${BEGIN} + if (EFI_ERROR (Status)) { + ${Function} (ImageHandle); + } else { + Status = ${Function} (ImageHandle); + } +${END} + return Status; +} +""") +] + +gLibraryStructorPrototype = { +'BASE' : TemplateString("""${BEGIN} +RETURN_STATUS +EFIAPI +${Function} ( + VOID + );${END} +"""), + +'PEI' : TemplateString("""${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + );${END} +"""), + +'DXE' : TemplateString("""${BEGIN} +EFI_STATUS +EFIAPI +${Function} ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + );${END} +"""), +} + +gLibraryStructorCall = { +'BASE' : TemplateString("""${BEGIN} + Status = ${Function} (); + ASSERT_EFI_ERROR (Status);${END} +"""), + +'PEI' : TemplateString("""${BEGIN} + Status = ${Function} (FileHandle, PeiServices); + ASSERT_EFI_ERROR (Status);${END} +"""), + +'DXE' : TemplateString("""${BEGIN} + Status = ${Function} (ImageHandle, SystemTable); + ASSERT_EFI_ERROR (Status);${END} +"""), +} + +## Library Constructor and Destructor Templates +gLibraryString = { +'BASE' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + VOID + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), + +'PEI' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + IN EFI_PEI_FILE_HANDLE FileHandle, + IN CONST EFI_PEI_SERVICES **PeiServices + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), + +'DXE' : TemplateString(""" +${BEGIN}${FunctionPrototype}${END} + +VOID +EFIAPI +ProcessLibrary${Type}List ( + IN EFI_HANDLE ImageHandle, + IN EFI_SYSTEM_TABLE *SystemTable + ) +{ +${BEGIN} EFI_STATUS Status; +${FunctionCall}${END} +} +"""), +} + +gSpecificationString = TemplateString(""" +${BEGIN} +#undef ${SpecificationName} +#define ${SpecificationName} ${SpecificationValue} +${END} +""") + +gBasicHeaderFile = "Base.h" + +gModuleTypeHeaderFile = { + "BASE" : [gBasicHeaderFile], + "SEC" : ["PiPei.h", "Library/DebugLib.h"], + "PEI_CORE" : ["PiPei.h", "Library/DebugLib.h", "Library/PeiCoreEntryPoint.h"], + "PEIM" : ["PiPei.h", "Library/DebugLib.h", "Library/PeimEntryPoint.h"], + "DXE_CORE" : ["PiDxe.h", "Library/DebugLib.h", "Library/DxeCoreEntryPoint.h"], + "DXE_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_SMM_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_RUNTIME_DRIVER": ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "DXE_SAL_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "UEFI_DRIVER" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiDriverEntryPoint.h"], + "UEFI_APPLICATION" : ["Uefi.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/UefiBootServicesTableLib.h", "Library/UefiApplicationEntryPoint.h"], + "SMM_DRIVER" : ["PiDxe.h", "Library/BaseLib.h", "Library/DebugLib.h", "Library/SmmDriverEntryPoint.h"], + "SMM_CORE" : ["PiDxe.h", "Library/DebugLib.h"], + "USER_DEFINED" : [gBasicHeaderFile] +} + +## Create code for module PCDs +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param Pcd The PCD object +# +def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd): + TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[Pcd.TokenSpaceGuidCName] + PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber + # + # Write PCDs + # + PcdTokenName = '_PCD_TOKEN_' + Pcd.TokenCName + if Pcd.Type in gDynamicExPcd: + TokenNumber = int(Pcd.TokenValue, 0) + else: + if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber: + EdkLogger.error("build", AUTOGEN_ERROR, + "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + TokenNumber = PcdTokenNumber[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] + AutoGenH.Append('\n#define %s %d\n' % (PcdTokenName, TokenNumber)) + + EdkLogger.debug(EdkLogger.DEBUG_3, "Creating code for " + Pcd.TokenCName + "." + Pcd.TokenSpaceGuidCName) + if Pcd.Type not in gItemTypeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + DatumSize = gDatumSizeStringDatabase[Pcd.DatumType] + DatumSizeLib = gDatumSizeStringDatabaseLib[Pcd.DatumType] + GetModeName = '_PCD_GET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + Pcd.TokenCName + SetModeName = '_PCD_SET_MODE_' + gDatumSizeStringDatabaseH[Pcd.DatumType] + '_' + Pcd.TokenCName + + if Pcd.Type in gDynamicExPcd: + AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, Pcd.TokenSpaceGuidCName, PcdTokenName)) + elif Pcd.Type in gDynamicPcd: + AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName)) + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName)) + else: + PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[Pcd.Type] + '_' + Pcd.TokenCName + Const = 'const' + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + Const = '' + Type = '' + Array = '' + Value = Pcd.DefaultValue
+ Unicode = False + if Pcd.DatumType == 'UINT64': + if not Value.endswith('ULL'): + Value += 'ULL' + if Pcd.DatumType == 'VOID*': + if Pcd.MaxDatumSize == None or Pcd.MaxDatumSize == '': + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown [MaxDatumSize] of PCD [%s.%s]" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + ArraySize = int(Pcd.MaxDatumSize, 0) + if Value[0] == '{': + Type = '(VOID *)' + else:
+ if Value[0] == 'L': + Unicode = True + Value = Value.lstrip('L') #.strip('"') + Value = eval(Value) # translate escape character + NewValue = '{' + for Index in range(0,len(Value)): + if Unicode: + NewValue = NewValue + str(ord(Value[Index]) % 0x10000) + ', '
+ else:
+ NewValue = NewValue + str(ord(Value[Index]) % 0x100) + ', ' + if Unicode:
+ ArraySize = ArraySize / 2;
+
+ if ArraySize < (len(Value) + 1): + ArraySize = len(Value) + 1 + Value = NewValue + '0 }'
+ Array = '[%d]' % ArraySize + # + # skip casting for fixed at build since it breaks ARM assembly. + # Long term we need PCD macros that work in assembly + # + elif Pcd.Type != TAB_PCDS_FIXED_AT_BUILD: + Value = "((%s)%s)" % (Pcd.DatumType, Value) + + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + PcdValueName = '_PCD_PATCHABLE_VALUE_' + Pcd.TokenCName + else: + PcdValueName = '_PCD_VALUE_' + Pcd.TokenCName + + if Pcd.DatumType == 'VOID*':
+ #
+ # For unicode, UINT16 array will be generated, so the alignment of unicode is guaranteed.
+ #
+ if Unicode:
+ AutoGenH.Append('#define _PCD_PATCHABLE_%s_SIZE %s\n' % (Pcd.TokenCName, Pcd.MaxDatumSize))
+ AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName))
+ AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT16 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value))
+ AutoGenH.Append('extern %s UINT16 %s%s;\n' %(Const, PcdVariableName, Array))
+ AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName))
+ else: + AutoGenH.Append('#define _PCD_PATCHABLE_%s_SIZE %s\n' % (Pcd.TokenCName, Pcd.MaxDatumSize)) + AutoGenH.Append('#define %s %s%s\n' %(PcdValueName, Type, PcdVariableName)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s UINT8 %s%s = %s;\n' % (Const, PcdVariableName, Array, Value)) + AutoGenH.Append('extern %s UINT8 %s%s;\n' %(Const, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' %(GetModeName, Type, PcdVariableName)) + elif Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED volatile %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) + AutoGenH.Append('extern volatile %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName)) + else: + AutoGenH.Append('#define %s %s\n' %(PcdValueName, Value)) + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s %s = %s;\n' %(Const, Pcd.DatumType, PcdVariableName, PcdValueName)) + AutoGenH.Append('extern %s %s %s%s;\n' % (Const, Pcd.DatumType, PcdVariableName, Array)) + AutoGenH.Append('#define %s %s%s\n' % (GetModeName, Type, PcdVariableName)) + + if Pcd.Type == TAB_PCDS_PATCHABLE_IN_MODULE: + if Pcd.DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPatchPcdSetPtr(_gPcd_BinaryPatch_%s, (UINTN)_PCD_PATCHABLE_%s_SIZE, (SizeOfBuffer), (Buffer))\n' % (SetModeName, Pcd.TokenCName, Pcd.TokenCName)) + else: + AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName)) + else: + AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName) + +## Create code for library module PCDs +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# @param Pcd The PCD object +# +def CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd): + PcdTokenNumber = Info.PlatformInfo.PcdTokenNumber + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + TokenCName = Pcd.TokenCName + TokenSpaceGuidValue = Pcd.TokenSpaceGuidValue #Info.GuidList[TokenSpaceGuidCName] + if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in PcdTokenNumber: + EdkLogger.error("build", AUTOGEN_ERROR, + "No generated token number for %s.%s\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + TokenNumber = PcdTokenNumber[TokenCName, TokenSpaceGuidCName] + + if Pcd.Type not in gItemTypeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown PCD type [%s] of PCD %s.%s" % (Pcd.Type, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Info)) + + DatumType = Pcd.DatumType + DatumSize = gDatumSizeStringDatabaseH[DatumType] + DatumSizeLib= gDatumSizeStringDatabaseLib[DatumType] + GetModeName = '_PCD_GET_MODE_' + DatumSize + '_' + TokenCName + SetModeName = '_PCD_SET_MODE_' + DatumSize + '_' + TokenCName + + Type = '' + Array = '' + if Pcd.DatumType == 'VOID*': + Type = '(VOID *)' + Array = '[]' + + AutoGenH.Append('#define _PCD_TOKEN_%s %d\n' % (TokenCName, TokenNumber)) + + PcdItemType = Pcd.Type + #if PcdItemType in gDynamicPcd: + # PcdItemType = TAB_PCDS_FIXED_AT_BUILD + # if (TokenCName, TokenSpaceGuidCName) in Info.PlatformInfo.Platform.Pcds: + # PcdItemType = Info.PlatformInfo.Platform.Pcds[TokenCName, TokenSpaceGuidCName].Type + if PcdItemType in gDynamicExPcd: + PcdTokenName = '_PCD_TOKEN_' + TokenCName + AutoGenH.Append('#define %s LibPcdGetEx%s(&%s, %s)\n' % (GetModeName, DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + if DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSetEx%s(&%s, %s, (SizeOfBuffer), (Buffer))\n' % (SetModeName,DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSetEx%s(&%s, %s, (Value))\n' % (SetModeName, DatumSizeLib, TokenSpaceGuidCName, PcdTokenName)) + if PcdItemType in gDynamicPcd: + PcdTokenName = '_PCD_TOKEN_' + TokenCName + AutoGenH.Append('#define %s LibPcdGet%s(%s)\n' % (GetModeName, DatumSizeLib, PcdTokenName)) + if DatumType == 'VOID*': + AutoGenH.Append('#define %s(SizeOfBuffer, Buffer) LibPcdSet%s(%s, (SizeOfBuffer), (Buffer))\n' %(SetModeName, DatumSizeLib, PcdTokenName)) + else: + AutoGenH.Append('#define %s(Value) LibPcdSet%s(%s, (Value))\n' % (SetModeName, DatumSizeLib, PcdTokenName)) + if PcdItemType == TAB_PCDS_PATCHABLE_IN_MODULE: + PcdVariableName = '_gPcd_' + gItemTypeStringDatabase[TAB_PCDS_PATCHABLE_IN_MODULE] + '_' + TokenCName + AutoGenH.Append('extern %s _gPcd_BinaryPatch_%s%s;\n' %(DatumType, TokenCName, Array) ) + AutoGenH.Append('#define %s %s_gPcd_BinaryPatch_%s\n' %(GetModeName, Type, TokenCName)) + AutoGenH.Append('#define %s(Value) (%s = (Value))\n' % (SetModeName, PcdVariableName)) + if PcdItemType == TAB_PCDS_FIXED_AT_BUILD or PcdItemType == TAB_PCDS_FEATURE_FLAG: + AutoGenH.Append('extern const %s _gPcd_FixedAtBuild_%s%s;\n' %(DatumType, TokenCName, Array)) + #AutoGenH.Append('#define _PCD_VALUE_%s _gPcd_FixedAtBuild_%s\n' %(TokenCName, TokenCName)) + AutoGenH.Append('#define %s %s_gPcd_FixedAtBuild_%s\n' %(GetModeName, Type, TokenCName)) + AutoGenH.Append('//#define %s ASSERT(FALSE) // It is not allowed to set value for a FIXED_AT_BUILD PCD\n' % SetModeName) + +## Create code for PCD database in DXE or PEI phase +# +# @param Platform The platform object +# @retval tuple Two TemplateString objects for C code and header file, +# respectively +# +def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase): + AutoGenC = TemplateString() + AutoGenH = TemplateString() + + Dict = { + 'PHASE' : Phase, + 'GUID_TABLE_SIZE' : '1', + 'STRING_TABLE_SIZE' : '1', + 'SKUID_TABLE_SIZE' : '1', + 'LOCAL_TOKEN_NUMBER_TABLE_SIZE' : '1', + 'LOCAL_TOKEN_NUMBER' : '0', + 'EXMAPPING_TABLE_SIZE' : '1', + 'EX_TOKEN_NUMBER' : '0', + 'SIZE_TABLE_SIZE' : '2', + 'GUID_TABLE_EMPTY' : 'TRUE', + 'STRING_TABLE_EMPTY' : 'TRUE', + 'SKUID_TABLE_EMPTY' : 'TRUE', + 'DATABASE_EMPTY' : 'TRUE', + 'EXMAP_TABLE_EMPTY' : 'TRUE', + 'PCD_DATABASE_UNINIT_EMPTY' : ' UINT8 dummy; /* PCD_DATABASE_UNINIT is emptry */', + 'SYSTEM_SKU_ID' : ' SKU_ID SystemSkuId;', + 'SYSTEM_SKU_ID_VALUE' : '0' + } + + for DatumType in ['UINT64','UINT32','UINT16','UINT8','BOOLEAN']: + Dict['VARDEF_CNAME_' + DatumType] = [] + Dict['VARDEF_GUID_' + DatumType] = [] + Dict['VARDEF_SKUID_' + DatumType] = [] + Dict['VARDEF_VALUE_' + DatumType] = [] + for Init in ['INIT','UNINIT']: + Dict[Init+'_CNAME_DECL_' + DatumType] = [] + Dict[Init+'_GUID_DECL_' + DatumType] = [] + Dict[Init+'_NUMSKUS_DECL_' + DatumType] = [] + Dict[Init+'_VALUE_' + DatumType] = [] + + for Type in ['STRING_HEAD','VPD_HEAD','VARIABLE_HEAD']: + Dict[Type + '_CNAME_DECL'] = [] + Dict[Type + '_GUID_DECL'] = [] + Dict[Type + '_NUMSKUS_DECL'] = [] + Dict[Type + '_VALUE'] = [] + + Dict['STRING_TABLE_INDEX'] = [] + Dict['STRING_TABLE_LENGTH'] = [] + Dict['STRING_TABLE_CNAME'] = [] + Dict['STRING_TABLE_GUID'] = [] + Dict['STRING_TABLE_VALUE'] = [] + + Dict['SIZE_TABLE_CNAME'] = [] + Dict['SIZE_TABLE_GUID'] = [] + Dict['SIZE_TABLE_CURRENT_LENGTH'] = [] + Dict['SIZE_TABLE_MAXIMUM_LENGTH'] = [] + + Dict['EXMAPPING_TABLE_EXTOKEN'] = [] + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'] = [] + Dict['EXMAPPING_TABLE_GUID_INDEX'] = [] + + Dict['GUID_STRUCTURE'] = [] + + Dict['SKUID_VALUE'] = [] + + if Phase == 'DXE': + Dict['SYSTEM_SKU_ID'] = '' + Dict['SYSTEM_SKU_ID_VALUE'] = '' + + StringTableIndex = 0 + StringTableSize = 0 + NumberOfLocalTokens = 0 + NumberOfPeiLocalTokens = 0 + NumberOfDxeLocalTokens = 0 + NumberOfExTokens = 0 + NumberOfSizeItems = 0 + GuidList = [] + + for Pcd in Platform.DynamicPcdList: + CName = Pcd.TokenCName + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + + EdkLogger.debug(EdkLogger.DEBUG_3, "PCD: %s %s (%s : %s)" % (CName, TokenSpaceGuidCName, Pcd.Phase, Phase)) + if Pcd.DatumType not in gDatumSizeStringDatabase: + EdkLogger.error("build", AUTOGEN_ERROR, + "Unknown datum type [%s] of PCD %s.%s" % (Pcd.DatumType, Pcd.TokenSpaceGuidCName, Pcd.TokenCName), + ExtraData="[%s]" % str(Platform)) + + if Pcd.Phase == 'PEI': + NumberOfPeiLocalTokens += 1 + if Pcd.Phase == 'DXE': + NumberOfDxeLocalTokens += 1 + if Pcd.Phase != Phase: + continue + + # + # TODO: need GetGuidValue() definition + # + TokenSpaceGuidStructure = Pcd.TokenSpaceGuidValue + TokenSpaceGuid = GuidStructureStringToGuidValueName(TokenSpaceGuidStructure) + if Pcd.Type in gDynamicExPcd: + if TokenSpaceGuid not in GuidList: + GuidList += [TokenSpaceGuid] + Dict['GUID_STRUCTURE'].append(TokenSpaceGuidStructure) + NumberOfExTokens += 1 + + ValueList = [] + StringHeadOffsetList = [] + VpdHeadOffsetList = [] + VariableHeadValueList = [] + Pcd.InitString = 'UNINIT' + + if Pcd.DatumType == 'VOID*': + Pcd.TokenTypeList = ['PCD_DATUM_TYPE_POINTER'] + elif Pcd.DatumType == 'BOOLEAN': + Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8'] + else: + Pcd.TokenTypeList = ['PCD_DATUM_TYPE_' + Pcd.DatumType] + + if len(Pcd.SkuInfoList) > 1: + Pcd.TokenTypeList += ['PCD_TYPE_SKU_ENABLED'] + + for SkuName in Pcd.SkuInfoList: + Sku = Pcd.SkuInfoList[SkuName] + SkuId = Sku.SkuId + if SkuId == None or SkuId == '': + continue + + if SkuId not in Dict['SKUID_VALUE']: + Dict['SKUID_VALUE'].append(SkuId) + + SkuIdIndex = Dict['SKUID_VALUE'].index(SkuId) + if len(Sku.VariableName) > 0: + Pcd.TokenTypeList += ['PCD_TYPE_HII'] + Pcd.InitString = 'INIT' + VariableNameStructure = StringToArray(Sku.VariableName) + if VariableNameStructure not in Dict['STRING_TABLE_VALUE']: + Dict['STRING_TABLE_CNAME'].append(CName) + Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid) + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + else: + Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) + + Dict['STRING_TABLE_LENGTH'].append((len(Sku.VariableName) - 3 + 1) * 2) + Dict['STRING_TABLE_VALUE'].append(VariableNameStructure) + StringTableIndex += 1 + StringTableSize += (len(Sku.VariableName) - 3 + 1) * 2 + + VariableHeadStringIndex = 0 + for Index in range(Dict['STRING_TABLE_VALUE'].index(VariableNameStructure)): + VariableHeadStringIndex += Dict['STRING_TABLE_LENGTH'][Index] + + VariableGuidStructure = Sku.VariableGuidValue + VariableGuid = GuidStructureStringToGuidValueName(VariableGuidStructure) + if VariableGuid not in GuidList: + GuidList += [VariableGuid] + Dict['GUID_STRUCTURE'].append(VariableGuidStructure) + VariableHeadGuidIndex = GuidList.index(VariableGuid) + + VariableHeadValueList.append('%d, %d, %s, offsetof(%s_PCD_DATABASE, Init.%s_%s_VariableDefault_%s)' % + (VariableHeadGuidIndex, VariableHeadStringIndex, Sku.VariableOffset, + Phase, CName, TokenSpaceGuid, SkuIdIndex)) + Dict['VARDEF_CNAME_'+Pcd.DatumType].append(CName) + Dict['VARDEF_GUID_'+Pcd.DatumType].append(TokenSpaceGuid) + Dict['VARDEF_SKUID_'+Pcd.DatumType].append(SkuIdIndex) + Dict['VARDEF_VALUE_'+Pcd.DatumType].append(Sku.HiiDefaultValue) + elif Sku.VpdOffset != '': + Pcd.TokenTypeList += ['PCD_TYPE_VPD'] + Pcd.InitString = 'INIT' + VpdHeadOffsetList.append(Sku.VpdOffset) + else: + if Pcd.DatumType == 'VOID*': + Pcd.TokenTypeList += ['PCD_TYPE_STRING'] + Pcd.InitString = 'INIT' + if Sku.DefaultValue != '': + NumberOfSizeItems += 1 + Dict['STRING_TABLE_CNAME'].append(CName) + Dict['STRING_TABLE_GUID'].append(TokenSpaceGuid) + + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + else: + Dict['STRING_TABLE_INDEX'].append('_%d' % StringTableIndex) + if Sku.DefaultValue[0] == 'L': + Size = (len(Sku.DefaultValue) - 3 + 1) * 2 + Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue)) + elif Sku.DefaultValue[0] == '"': + Size = len(Sku.DefaultValue) - 2 + 1 + Dict['STRING_TABLE_VALUE'].append(StringToArray(Sku.DefaultValue)) + elif Sku.DefaultValue[0] == '{': + Size = len(Sku.DefaultValue.replace(',',' ').split()) + Dict['STRING_TABLE_VALUE'].append(Sku.DefaultValue) + + StringHeadOffsetList.append(str(StringTableSize)) + Dict['SIZE_TABLE_CNAME'].append(CName) + Dict['SIZE_TABLE_GUID'].append(TokenSpaceGuid) + Dict['SIZE_TABLE_CURRENT_LENGTH'].append(Size) + Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize) + if Pcd.MaxDatumSize != '': + MaxDatumSize = int(Pcd.MaxDatumSize, 0) + if MaxDatumSize > Size: + Size = MaxDatumSize + Dict['STRING_TABLE_LENGTH'].append(Size) + StringTableIndex += 1 + StringTableSize += (Size) + else: + Pcd.TokenTypeList += ['PCD_TYPE_DATA'] + if Sku.DefaultValue == 'TRUE': + Pcd.InitString = 'INIT' + else: + try: + if int(Sku.DefaultValue, 0) != 0: + Pcd.InitString = 'INIT' + except: + pass + + # + # For UNIT64 type PCD's value, ULL should be append to avoid + # warning under linux building environment. + # + if Pcd.DatumType == "UINT64": + ValueList.append(Sku.DefaultValue + "ULL") + else: + ValueList.append(Sku.DefaultValue) + + Pcd.TokenTypeList = list(set(Pcd.TokenTypeList)) + + if 'PCD_TYPE_HII' in Pcd.TokenTypeList: + Dict['VARIABLE_HEAD_CNAME_DECL'].append(CName) + Dict['VARIABLE_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['VARIABLE_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['VARIABLE_HEAD_VALUE'].append('{ %s }\n' % ' },\n { '.join(VariableHeadValueList)) + if 'PCD_TYPE_VPD' in Pcd.TokenTypeList: + Dict['VPD_HEAD_CNAME_DECL'].append(CName) + Dict['VPD_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['VPD_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['VPD_HEAD_VALUE'].append('{ %s }' % ' }, { '.join(VpdHeadOffsetList)) + if 'PCD_TYPE_STRING' in Pcd.TokenTypeList: + Dict['STRING_HEAD_CNAME_DECL'].append(CName) + Dict['STRING_HEAD_GUID_DECL'].append(TokenSpaceGuid) + Dict['STRING_HEAD_NUMSKUS_DECL'].append(len(Pcd.SkuInfoList)) + Dict['STRING_HEAD_VALUE'].append(', '.join(StringHeadOffsetList)) + if 'PCD_TYPE_DATA' in Pcd.TokenTypeList: + Dict[Pcd.InitString+'_CNAME_DECL_'+Pcd.DatumType].append(CName) + Dict[Pcd.InitString+'_GUID_DECL_'+Pcd.DatumType].append(TokenSpaceGuid) + Dict[Pcd.InitString+'_NUMSKUS_DECL_'+Pcd.DatumType].append(len(Pcd.SkuInfoList)) + if Pcd.InitString == 'UNINIT': + Dict['PCD_DATABASE_UNINIT_EMPTY'] = '' + else: + Dict[Pcd.InitString+'_VALUE_'+Pcd.DatumType].append(', '.join(ValueList)) + + if Phase == 'PEI': + NumberOfLocalTokens = NumberOfPeiLocalTokens + if Phase == 'DXE': + NumberOfLocalTokens = NumberOfDxeLocalTokens + + Dict['TOKEN_INIT'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_CNAME'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_GUID'] = ['' for x in range(NumberOfLocalTokens)] + Dict['TOKEN_TYPE'] = ['' for x in range(NumberOfLocalTokens)] + + for Pcd in Platform.DynamicPcdList: + CName = Pcd.TokenCName + TokenSpaceGuidCName = Pcd.TokenSpaceGuidCName + if Pcd.Phase != Phase: + continue + + TokenSpaceGuid = GuidStructureStringToGuidValueName(Pcd.TokenSpaceGuidValue) #(Platform.PackageList, TokenSpaceGuidCName)) + GeneratedTokenNumber = Platform.PcdTokenNumber[CName, TokenSpaceGuidCName] - 1 + if Phase == 'DXE': + GeneratedTokenNumber -= NumberOfPeiLocalTokens + + EdkLogger.debug(EdkLogger.DEBUG_1, "PCD = %s.%s" % (CName, TokenSpaceGuidCName)) + EdkLogger.debug(EdkLogger.DEBUG_1, "phase = %s" % Phase) + EdkLogger.debug(EdkLogger.DEBUG_1, "GeneratedTokenNumber = %s" % str(GeneratedTokenNumber)) + + Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Init' + if Pcd.InitString == 'UNINIT': + Dict['TOKEN_INIT'][GeneratedTokenNumber] = 'Uninit' + Dict['TOKEN_CNAME'][GeneratedTokenNumber] = CName + Dict['TOKEN_GUID'][GeneratedTokenNumber] = TokenSpaceGuid + Dict['TOKEN_TYPE'][GeneratedTokenNumber] = ' | '.join(Pcd.TokenTypeList) + if Pcd.Type in gDynamicExPcd: + Dict['EXMAPPING_TABLE_EXTOKEN'].append(Pcd.TokenValue) + if Phase == 'DXE': + GeneratedTokenNumber += NumberOfPeiLocalTokens + # + # Per, PCD architecture specification, PCD Token Number is 1 based and 0 is defined as invalid token number. + # For each EX type PCD, a PCD Token Number is assigned. When the + # PCD Driver/PEIM map EX_GUID and EX_TOKEN_NUMBER to the PCD Token Number, + # the non-EX Protocol/PPI interface can be called to get/set the value. This assumption is made by + # Pcd Driver/PEIM in MdeModulePkg. + # Therefore, 1 is added to GeneratedTokenNumber to generate a PCD Token Number before being inserted + # to the EXMAPPING_TABLE. + # + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(GeneratedTokenNumber + 1) + Dict['EXMAPPING_TABLE_GUID_INDEX'].append(GuidList.index(TokenSpaceGuid)) + + if GuidList != []: + Dict['GUID_TABLE_EMPTY'] = 'FALSE' + Dict['GUID_TABLE_SIZE'] = len(GuidList) + else: + Dict['GUID_STRUCTURE'] = [GuidStringToGuidStructureString('00000000-0000-0000-0000-000000000000')] + + if StringTableIndex == 0: + Dict['STRING_TABLE_INDEX'].append('') + Dict['STRING_TABLE_LENGTH'].append(1) + Dict['STRING_TABLE_CNAME'].append('') + Dict['STRING_TABLE_GUID'].append('') + Dict['STRING_TABLE_VALUE'].append('{ 0 }') + else: + Dict['STRING_TABLE_EMPTY'] = 'FALSE' + Dict['STRING_TABLE_SIZE'] = StringTableSize + + if Dict['SIZE_TABLE_CNAME'] == []: + Dict['SIZE_TABLE_CNAME'].append('') + Dict['SIZE_TABLE_GUID'].append('') + Dict['SIZE_TABLE_CURRENT_LENGTH'].append(0) + Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(0) + + if NumberOfLocalTokens != 0: + Dict['DATABASE_EMPTY'] = 'FALSE' + Dict['LOCAL_TOKEN_NUMBER_TABLE_SIZE'] = NumberOfLocalTokens + Dict['LOCAL_TOKEN_NUMBER'] = NumberOfLocalTokens + + if NumberOfExTokens != 0: + Dict['EXMAP_TABLE_EMPTY'] = 'FALSE' + Dict['EXMAPPING_TABLE_SIZE'] = NumberOfExTokens + Dict['EX_TOKEN_NUMBER'] = NumberOfExTokens + else: + Dict['EXMAPPING_TABLE_EXTOKEN'].append(0) + Dict['EXMAPPING_TABLE_LOCAL_TOKEN'].append(0) + Dict['EXMAPPING_TABLE_GUID_INDEX'].append(0) + + if NumberOfSizeItems != 0: + Dict['SIZE_TABLE_SIZE'] = NumberOfSizeItems * 2 + + AutoGenH.Append(gPcdDatabaseAutoGenH.Replace(Dict)) + if NumberOfLocalTokens == 0: + AutoGenC.Append(gEmptyPcdDatabaseAutoGenC.Replace(Dict)) + else: + AutoGenC.Append(gPcdDatabaseAutoGenC.Replace(Dict)) + + return AutoGenH, AutoGenC + +## Create code for PCD database +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePcdDatabaseCode (Info, AutoGenC, AutoGenH): + if Info.PcdIsDriver == "": + return + if Info.PcdIsDriver not in gPcdPhaseMap: + EdkLogger.error("build", AUTOGEN_ERROR, "Not supported PcdIsDriver type:%s" % Info.PcdIsDriver, + ExtraData="[%s]" % str(Info)) + + AutoGenH.Append(gPcdDatabaseCommonAutoGenH) + AdditionalAutoGenH, AdditionalAutoGenC = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, 'PEI') + AutoGenH.Append(AdditionalAutoGenH.String) + + Phase = gPcdPhaseMap[Info.PcdIsDriver] + if Phase == 'PEI': + AutoGenC.Append(AdditionalAutoGenC.String) + + if Phase == 'DXE': + AdditionalAutoGenH, AdditionalAutoGenC = CreatePcdDatabasePhaseSpecificAutoGen (Info.PlatformInfo, Phase) + AutoGenH.Append(AdditionalAutoGenH.String) + AutoGenC.Append(AdditionalAutoGenC.String) + AutoGenH.Append(gPcdDatabaseEpilogueAutoGenH) + +## Create code for library constructor +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH): + # + # Library Constructors + # + ConstructorPrototypeString = TemplateString() + ConstructorCallingString = TemplateString() + if Info.IsLibrary: + DependentLibraryList = [Info.Module] + else: + DependentLibraryList = Info.DependentLibraryList + for Lib in DependentLibraryList: + if len(Lib.ConstructorList) <= 0: + continue + Dict = {'Function':Lib.ConstructorList} + if Lib.ModuleType in ['BASE', 'SEC']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict)) + elif Lib.ModuleType in ['PEI_CORE','PEIM']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) + elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_DRIVER', 'SMM_CORE']: + ConstructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) + ConstructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) + + if str(ConstructorPrototypeString) == '': + ConstructorPrototypeList = [] + else: + ConstructorPrototypeList = [str(ConstructorPrototypeString)] + if str(ConstructorCallingString) == '': + ConstructorCallingList = [] + else: + ConstructorCallingList = [str(ConstructorCallingString)] + + Dict = { + 'Type' : 'Constructor', + 'FunctionPrototype' : ConstructorPrototypeList, + 'FunctionCall' : ConstructorCallingList + } + if Info.IsLibrary: + AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict) + else: + if Info.ModuleType in ['BASE', 'SEC']: + AutoGenC.Append(gLibraryString['BASE'].Replace(Dict)) + elif Info.ModuleType in ['PEI_CORE','PEIM']: + AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) + elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_DRIVER', 'SMM_CORE']: + AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) + +## Create code for library destructor +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH): + # + # Library Destructors + # + DestructorPrototypeString = TemplateString() + DestructorCallingString = TemplateString() + if Info.IsLibrary: + DependentLibraryList = [Info.Module] + else: + DependentLibraryList = Info.DependentLibraryList + for Index in range(len(DependentLibraryList)-1, -1, -1): + Lib = DependentLibraryList[Index] + if len(Lib.DestructorList) <= 0: + continue + Dict = {'Function':Lib.DestructorList} + if Lib.ModuleType in ['BASE', 'SEC']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['BASE'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['BASE'].Replace(Dict)) + elif Lib.ModuleType in ['PEI_CORE','PEIM']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['PEI'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['PEI'].Replace(Dict)) + elif Lib.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_DRIVER', 'SMM_CORE']: + DestructorPrototypeString.Append(gLibraryStructorPrototype['DXE'].Replace(Dict)) + DestructorCallingString.Append(gLibraryStructorCall['DXE'].Replace(Dict)) + + if str(DestructorPrototypeString) == '': + DestructorPrototypeList = [] + else: + DestructorPrototypeList = [str(DestructorPrototypeString)] + if str(DestructorCallingString) == '': + DestructorCallingList = [] + else: + DestructorCallingList = [str(DestructorCallingString)] + + Dict = { + 'Type' : 'Destructor', + 'FunctionPrototype' : DestructorPrototypeList, + 'FunctionCall' : DestructorCallingList + } + if Info.IsLibrary: + AutoGenH.Append("${BEGIN}${FunctionPrototype}${END}", Dict) + else: + if Info.ModuleType in ['BASE', 'SEC']: + AutoGenC.Append(gLibraryString['BASE'].Replace(Dict)) + elif Info.ModuleType in ['PEI_CORE','PEIM']: + AutoGenC.Append(gLibraryString['PEI'].Replace(Dict)) + elif Info.ModuleType in ['DXE_CORE','DXE_DRIVER','DXE_SMM_DRIVER','DXE_RUNTIME_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER','UEFI_APPLICATION', 'SMM_DRIVER', 'SMM_CORE']: + AutoGenC.Append(gLibraryString['DXE'].Replace(Dict)) + + +## Create code for ModuleEntryPoint +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']: + return + # + # Module Entry Points + # + NumEntryPoints = len(Info.Module.ModuleEntryPointList) + if 'PI_SPECIFICATION_VERSION' in Info.Module.Specification: + PiSpecVersion = Info.Module.Specification['PI_SPECIFICATION_VERSION'] + else: + PiSpecVersion = 0 + if 'EFI_SPECIFICATION_VERSION' in Info.Module.Specification: + EfiSpecVersion = Info.Module.Specification['EFI_SPECIFICATION_VERSION'] + else: + EfiSpecVersion = 0 + Dict = { + 'Function' : Info.Module.ModuleEntryPointList, + 'PiSpecVersion' : PiSpecVersion, + 'EfiSpecVersion': EfiSpecVersion + } + + if Info.ModuleType in ['PEI_CORE', 'DXE_CORE', 'SMM_CORE']: + if NumEntryPoints != 1: + EdkLogger.error( + "build", + AUTOGEN_ERROR, + '%s must have exactly one entry point' % Info.ModuleType, + File=str(Info), + ExtraData= ", ".join(Info.Module.ModuleEntryPointList) + ) + if Info.ModuleType == 'PEI_CORE': + AutoGenC.Append(gPeiCoreEntryPointString.Replace(Dict)) + AutoGenH.Append(gPeiCoreEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'DXE_CORE': + AutoGenC.Append(gDxeCoreEntryPointString.Replace(Dict)) + AutoGenH.Append(gDxeCoreEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'SMM_CORE': + AutoGenC.Append(gSmmCoreEntryPointString.Replace(Dict)) + elif Info.ModuleType == 'PEIM': + if NumEntryPoints < 2: + AutoGenC.Append(gPeimEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gPeimEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gPeimEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType in ['DXE_RUNTIME_DRIVER','DXE_DRIVER','DXE_SMM_DRIVER', + 'DXE_SAL_DRIVER','UEFI_DRIVER', 'SMM_DRIVER']: + if Info.ModuleType in ['DXE_SMM_DRIVER', 'SMM_DRIVER']: + if NumEntryPoints == 0: + AutoGenC.Append(gDxeSmmEntryPointString[0].Replace(Dict)) + else: + AutoGenC.Append(gDxeSmmEntryPointString[1].Replace(Dict)) + AutoGenH.Append(gDxeSmmEntryPointPrototype.Replace(Dict)) + else: + if NumEntryPoints < 2: + AutoGenC.Append(gUefiDriverEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gUefiDriverEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gUefiDriverEntryPointPrototype.Replace(Dict)) + elif Info.ModuleType == 'UEFI_APPLICATION': + if NumEntryPoints < 2: + AutoGenC.Append(gUefiApplicationEntryPointString[NumEntryPoints].Replace(Dict)) + else: + AutoGenC.Append(gUefiApplicationEntryPointString[2].Replace(Dict)) + AutoGenH.Append(gUefiApplicationEntryPointPrototype.Replace(Dict)) + +## Create code for ModuleUnloadImage +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary or Info.ModuleType in ['USER_DEFINED', 'SEC']: + return + # + # Unload Image Handlers + # + NumUnloadImage = len(Info.Module.ModuleUnloadImageList) + Dict = {'Count':NumUnloadImage, 'Function':Info.Module.ModuleUnloadImageList} + if NumUnloadImage < 2: + AutoGenC.Append(gUefiUnloadImageString[NumUnloadImage].Replace(Dict)) + else: + AutoGenC.Append(gUefiUnloadImageString[2].Replace(Dict)) + AutoGenH.Append(gUefiUnloadImagePrototype.Replace(Dict)) + +## Create code for GUID +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.GuidList: + AutoGenC.Append("\n// Guids\n") + # + # GUIDs + # + for Key in Info.GuidList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.GuidList[Key])) + +## Create code for protocol +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.ProtocolList: + AutoGenC.Append("\n// Protocols\n") + # + # Protocol GUIDs + # + for Key in Info.ProtocolList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.ProtocolList[Key])) + +## Create code for PPI +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + return + + if Info.ModuleType in ["USER_DEFINED", "BASE"]: + GuidType = "GUID" + else: + GuidType = "EFI_GUID" + + if Info.PpiList: + AutoGenC.Append("\n// PPIs\n") + # + # PPI GUIDs + # + for Key in Info.PpiList: + AutoGenC.Append('GLOBAL_REMOVE_IF_UNREFERENCED %s %s = %s;\n' % (GuidType, Key, Info.PpiList[Key])) + +## Create code for PCD +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreatePcdCode(Info, AutoGenC, AutoGenH): + if Info.IsLibrary: + if Info.ModulePcdList: + AutoGenH.Append("\n// PCD definitions\n") + for Pcd in Info.ModulePcdList: + CreateLibraryPcdCode(Info, AutoGenC, AutoGenH, Pcd) + else: + if Info.ModulePcdList: + AutoGenH.Append("\n// Definition of PCDs used in this module\n") + AutoGenC.Append("\n// Definition of PCDs used in this module\n") + for Pcd in Info.ModulePcdList: + CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd) + + if Info.LibraryPcdList: + AutoGenH.Append("\n// Definition of PCDs used in libraries is in AutoGen.c\n") + AutoGenC.Append("\n// Definition of PCDs used in libraries\n") + for Pcd in Info.LibraryPcdList: + CreateModulePcdCode(Info, AutoGenC, AutoGenC, Pcd) + CreatePcdDatabaseCode(Info, AutoGenC, AutoGenH) + +## Create code for unicode string definition +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateUnicodeStringCode(Info, AutoGenC, AutoGenH): + WorkingDir = os.getcwd() + os.chdir(Info.WorkspaceDir) + + IncList = [Info.MetaFile.Dir] + # Get all files under [Sources] section in inf file for EDK-II module + SrcList = [F for F in Info.SourceFileList] + if Info.AutoGenVersion < 0x00010005: + # Get all files under the module directory for EDK-I module + Cwd = os.getcwd() + os.chdir(Info.MetaFile.Dir) + for Root, Dirs, Files in os.walk("."): + if 'CVS' in Dirs: + Dirs.remove('CVS') + if '.svn' in Dirs: + Dirs.remove('.svn') + for File in Files: + File = PathClass(os.path.join(Root, File), Info.MetaFile.Dir) + if File in SrcList: + continue + SrcList.append(File) + os.chdir(Cwd) + + if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-c') > -1: + CompatibleMode = True + else: + CompatibleMode = False + + # + # -s is a temporary option dedicated for building .UNI files with ISO 639-2 lanauge codes of EDK Shell in EDK2 + # + if 'BUILD' in Info.BuildOption and Info.BuildOption['BUILD']['FLAGS'].find('-s') > -1: + if CompatibleMode: + EdkLogger.error("build", AUTOGEN_ERROR, + "-c and -s build options should be used exclusively", + ExtraData="[%s]" % str(Info)) + ShellMode = True + else: + ShellMode = False + + Header, Code = GetStringFiles(Info.UnicodeFileList, SrcList, IncList, ['.uni', '.inf'], Info.Name, CompatibleMode, ShellMode) + AutoGenC.Append("\n//\n//Unicode String Pack Definition\n//\n") + AutoGenC.Append(Code) + AutoGenC.Append("\n") + AutoGenH.Append("\n//\n//Unicode String ID\n//\n") + AutoGenH.Append(Header) + AutoGenH.Append("\n#define STRING_ARRAY_NAME %sStrings\n" % Info.Name) + os.chdir(WorkingDir) + +## Create common code +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateHeaderCode(Info, AutoGenC, AutoGenH): + # file header + AutoGenH.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.h'})) + # header file Prologue + AutoGenH.Append(gAutoGenHPrologueString.Replace({'File':'AUTOGENH','Guid':Info.Guid.replace('-','_')})) + if Info.AutoGenVersion >= 0x00010005: + # specification macros + AutoGenH.Append(gSpecificationString.Replace({'SpecificationName':Info.Specification.keys(), + 'SpecificationValue':Info.Specification.values()})) + # header files includes + AutoGenH.Append("#include <%s>\n" % gBasicHeaderFile) + if Info.ModuleType in gModuleTypeHeaderFile \ + and gModuleTypeHeaderFile[Info.ModuleType][0] != gBasicHeaderFile: + AutoGenH.Append("#include <%s>\n" % gModuleTypeHeaderFile[Info.ModuleType][0]) + AutoGenH.Append('\nextern GUID gEfiCallerIdGuid;\n\n') + + if Info.IsLibrary: + return + + AutoGenH.Append("#define EFI_CALLER_ID_GUID \\\n %s\n" % GuidStringToGuidStructureString(Info.Guid)) + + if Info.IsLibrary: + return + # C file header + AutoGenC.Append(gAutoGenHeaderString.Replace({'FileName':'AutoGen.c'})) + if Info.AutoGenVersion >= 0x00010005: + # C file header files includes + if Info.ModuleType in gModuleTypeHeaderFile: + for Inc in gModuleTypeHeaderFile[Info.ModuleType]: + AutoGenC.Append("#include <%s>\n" % Inc) + else: + AutoGenC.Append("#include <%s>\n" % gBasicHeaderFile) + + # + # Publish the CallerId Guid + # + AutoGenC.Append('\nGLOBAL_REMOVE_IF_UNREFERENCED GUID gEfiCallerIdGuid = %s;\n' % GuidStringToGuidStructureString(Info.Guid)) + +## Create common code for header file +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateFooterCode(Info, AutoGenC, AutoGenH): + AutoGenH.Append(gAutoGenHEpilogueString) + +## Create code for a module +# +# @param Info The ModuleAutoGen object +# @param AutoGenC The TemplateString object for C code +# @param AutoGenH The TemplateString object for header file +# +def CreateCode(Info, AutoGenC, AutoGenH, StringH): + CreateHeaderCode(Info, AutoGenC, AutoGenH) + + if Info.AutoGenVersion >= 0x00010005: + CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH) + CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH) + CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH) + CreatePcdCode(Info, AutoGenC, AutoGenH) + CreateLibraryConstructorCode(Info, AutoGenC, AutoGenH) + CreateLibraryDestructorCode(Info, AutoGenC, AutoGenH) + CreateModuleEntryPointCode(Info, AutoGenC, AutoGenH) + CreateModuleUnloadImageCode(Info, AutoGenC, AutoGenH) + + if Info.UnicodeFileList: + FileName = "%sStrDefs.h" % Info.Name + StringH.Append(gAutoGenHeaderString.Replace({'FileName':FileName})) + StringH.Append(gAutoGenHPrologueString.Replace({'File':'STRDEFS', 'Guid':Info.Guid.replace('-','_')})) + CreateUnicodeStringCode(Info, AutoGenC, StringH) + StringH.Append("\n#endif\n") + AutoGenH.Append('#include "%s"\n' % FileName) + + CreateFooterCode(Info, AutoGenC, AutoGenH) + + # no generation of AutoGen.c for R8 modules without unicode file + if Info.AutoGenVersion < 0x00010005 and len(Info.UnicodeFileList) == 0: + AutoGenC.String = '' + +## Create the code file +# +# @param FilePath The path of code file +# @param Content The content of code file +# +# @retval True If file content is changed or file doesn't exist +# @retval False If the file exists and the content is not changed +# +def Generate(FilePath, Content): + return SaveFileOnChange(FilePath, Content, False) + diff --git a/BaseTools/Source/Python/AutoGen/GenDepex.py b/BaseTools/Source/Python/AutoGen/GenDepex.py new file mode 100644 index 0000000000..a3d07b83f2 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenDepex.py @@ -0,0 +1,441 @@ +## @file
+# This file is used to generate DEPEX file for module's dependency expression
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+## Import Modules
+#
+import sys
+import os
+import re
+import traceback
+
+from StringIO import StringIO
+from struct import pack
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+from Common.Misc import GuidStructureStringToGuidString
+from Common import EdkLogger as EdkLogger
+
+
+## Regular expression for matching "DEPENDENCY_START ... DEPENDENCY_END"
+gStartClosePattern = re.compile(".*DEPENDENCY_START(.+)DEPENDENCY_END.*", re.S)
+
+## Mapping between module type and EFI phase
+gType2Phase = {
+ "BASE" : None,
+ "SEC" : "PEI",
+ "PEI_CORE" : "PEI",
+ "PEIM" : "PEI",
+ "DXE_CORE" : "DXE",
+ "DXE_DRIVER" : "DXE",
+ "DXE_SMM_DRIVER" : "DXE",
+ "DXE_RUNTIME_DRIVER": "DXE",
+ "DXE_SAL_DRIVER" : "DXE",
+ "UEFI_DRIVER" : "DXE",
+ "UEFI_APPLICATION" : "DXE",
+ "SMM_DRIVER" : "DXE",
+}
+
+## Convert dependency expression string into EFI internal representation
+#
+# DependencyExpression class is used to parse dependency expression string and
+# convert it into its binary form.
+#
+class DependencyExpression:
+
+ ArchProtocols = set([
+ '665e3ff6-46cc-11d4-9a38-0090273fc14d', # 'gEfiBdsArchProtocolGuid'
+ '26baccb1-6f42-11d4-bce7-0080c73c8881', # 'gEfiCpuArchProtocolGuid'
+ '26baccb2-6f42-11d4-bce7-0080c73c8881', # 'gEfiMetronomeArchProtocolGuid'
+ '1da97072-bddc-4b30-99f1-72a0b56fff2a', # 'gEfiMonotonicCounterArchProtocolGuid'
+ '27cfac87-46cc-11d4-9a38-0090273fc14d', # 'gEfiRealTimeClockArchProtocolGuid'
+ '27cfac88-46cc-11d4-9a38-0090273fc14d', # 'gEfiResetArchProtocolGuid'
+ 'b7dfb4e1-052f-449f-87be-9818fc91b733', # 'gEfiRuntimeArchProtocolGuid'
+ 'a46423e3-4617-49f1-b9ff-d1bfa9115839', # 'gEfiSecurityArchProtocolGuid'
+ '26baccb3-6f42-11d4-bce7-0080c73c8881', # 'gEfiTimerArchProtocolGuid'
+ '6441f818-6362-4e44-b570-7dba31dd2453', # 'gEfiVariableWriteArchProtocolGuid'
+ '1e5668e2-8481-11d4-bcf1-0080c73c8881', # 'gEfiVariableArchProtocolGuid'
+ '665e3ff5-46cc-11d4-9a38-0090273fc14d' # 'gEfiWatchdogTimerArchProtocolGuid'
+ ]
+ )
+
+ OpcodePriority = {
+ "AND" : 1,
+ "OR" : 1,
+ "NOT" : 2,
+ # "SOR" : 9,
+ # "BEFORE": 9,
+ # "AFTER" : 9,
+ }
+
+ Opcode = {
+ "PEI" : {
+ "PUSH" : 0x02,
+ "AND" : 0x03,
+ "OR" : 0x04,
+ "NOT" : 0x05,
+ "TRUE" : 0x06,
+ "FALSE" : 0x07,
+ "END" : 0x08
+ },
+
+ "DXE" : {
+ "BEFORE": 0x00,
+ "AFTER" : 0x01,
+ "PUSH" : 0x02,
+ "AND" : 0x03,
+ "OR" : 0x04,
+ "NOT" : 0x05,
+ "TRUE" : 0x06,
+ "FALSE" : 0x07,
+ "END" : 0x08,
+ "SOR" : 0x09
+ }
+ }
+
+ # all supported op codes and operands
+ SupportedOpcode = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR"]
+ SupportedOperand = ["TRUE", "FALSE"]
+
+ OpcodeWithSingleOperand = ['NOT', 'BEFORE', 'AFTER']
+ OpcodeWithTwoOperand = ['AND', 'OR']
+
+ # op code that should not be the last one
+ NonEndingOpcode = ["AND", "OR", "NOT", 'SOR']
+ # op code must not present at the same time
+ ExclusiveOpcode = ["BEFORE", "AFTER"]
+ # op code that should be the first one if it presents
+ AboveAllOpcode = ["SOR", "BEFORE", "AFTER"]
+
+ #
+ # open and close brace must be taken as individual tokens
+ #
+ TokenPattern = re.compile("(\(|\)|\{[^{}]+\{?[^{}]+\}?[ ]*\}|\w+)")
+
+ ## Constructor
+ #
+ # @param Expression The list or string of dependency expression
+ # @param ModuleType The type of the module using the dependency expression
+ #
+ def __init__(self, Expression, ModuleType, Optimize=False):
+ self.ModuleType = ModuleType
+ self.Phase = gType2Phase[ModuleType]
+ if type(Expression) == type([]):
+ self.ExpressionString = " ".join(Expression)
+ self.TokenList = Expression
+ else:
+ self.ExpressionString = Expression
+ self.GetExpressionTokenList()
+
+ self.PostfixNotation = []
+ self.OpcodeList = []
+
+ self.GetPostfixNotation()
+ self.ValidateOpcode()
+
+ EdkLogger.debug(EdkLogger.DEBUG_8, repr(self))
+ if Optimize:
+ self.Optimize()
+ EdkLogger.debug(EdkLogger.DEBUG_8, "\n Optimized: " + repr(self))
+
+ def __str__(self):
+ return " ".join(self.TokenList)
+
+ def __repr__(self):
+ WellForm = ''
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode:
+ WellForm += "\n " + Token
+ else:
+ WellForm += ' ' + Token
+ return WellForm
+
+ ## Split the expression string into token list
+ def GetExpressionTokenList(self):
+ self.TokenList = self.TokenPattern.findall(self.ExpressionString)
+
+ ## Convert token list into postfix notation
+ def GetPostfixNotation(self):
+ Stack = []
+ LastToken = ''
+ for Token in self.TokenList:
+ if Token == "(":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before open parentheses",
+ ExtraData="Near %s" % LastToken)
+ Stack.append(Token)
+ elif Token == ")":
+ if '(' not in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ elif LastToken in self.SupportedOpcode + ['', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before close parentheses",
+ ExtraData="Near %s" % LastToken)
+ while len(Stack) > 0:
+ if Stack[-1] == '(':
+ Stack.pop()
+ break
+ self.PostfixNotation.append(Stack.pop())
+ elif Token in self.OpcodePriority:
+ if Token == "NOT":
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before NOT",
+ ExtraData="Near %s" % LastToken)
+ elif LastToken in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operand before " + Token,
+ ExtraData="Near %s" % LastToken)
+
+ while len(Stack) > 0:
+ if Stack[-1] == "(" or self.OpcodePriority[Token] >= self.OpcodePriority[Stack[-1]]:
+ break
+ self.PostfixNotation.append(Stack.pop())
+ Stack.append(Token)
+ self.OpcodeList.append(Token)
+ else:
+ if Token not in self.SupportedOpcode:
+ # not OP, take it as GUID
+ if LastToken not in self.SupportedOpcode + ['(', '', None]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: missing operator before %s" % Token,
+ ExtraData="Near %s" % LastToken)
+ if len(self.OpcodeList) == 0 or self.OpcodeList[-1] not in self.ExclusiveOpcode:
+ if Token not in self.SupportedOperand:
+ self.PostfixNotation.append("PUSH")
+ # check if OP is valid in this phase
+ elif Token in self.Opcode[self.Phase]:
+ if Token == "END":
+ break
+ self.OpcodeList.append(Token)
+ else:
+ EdkLogger.error("GenDepex", PARSER_ERROR,
+ "Opcode=%s doesn't supported in %s stage " % (Token, self.Phase),
+ ExtraData=str(self))
+ self.PostfixNotation.append(Token)
+ LastToken = Token
+
+ # there should not be parentheses in Stack
+ if '(' in Stack or ')' in Stack:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid dependency expression: mismatched parentheses",
+ ExtraData=str(self))
+ while len(Stack) > 0:
+ self.PostfixNotation.append(Stack.pop())
+ if self.PostfixNotation[-1] != 'END':
+ self.PostfixNotation.append("END")
+
+ ## Validate the dependency expression
+ def ValidateOpcode(self):
+ for Op in self.AboveAllOpcode:
+ if Op in self.PostfixNotation:
+ if Op != self.PostfixNotation[0]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the first opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ for Op in self.ExclusiveOpcode:
+ if Op in self.OpcodeList:
+ if len(self.OpcodeList) > 1:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "%s should be the only opcode in the expression" % Op,
+ ExtraData=str(self))
+ if len(self.PostfixNotation) < 3:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Missing operand for %s" % Op,
+ ExtraData=str(self))
+ if self.TokenList[-1] != 'END' and self.TokenList[-1] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-1],
+ ExtraData=str(self))
+ if self.TokenList[-1] == 'END' and self.TokenList[-2] in self.NonEndingOpcode:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra %s at the end of the dependency expression" % self.TokenList[-2],
+ ExtraData=str(self))
+ if "END" in self.TokenList and "END" != self.TokenList[-1]:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Extra expressions after END",
+ ExtraData=str(self))
+
+ ## Simply optimize the dependency expression by removing duplicated operands
+ def Optimize(self):
+ ValidOpcode = list(set(self.OpcodeList))
+ if len(ValidOpcode) != 1 or ValidOpcode[0] not in ['AND', 'OR']:
+ return
+ Op = ValidOpcode[0]
+ NewOperand = []
+ AllOperand = set()
+ for Token in self.PostfixNotation:
+ if Token in self.SupportedOpcode or Token in NewOperand:
+ continue
+ AllOperand.add(Token)
+ if Token == 'TRUE':
+ if Op == 'AND':
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ elif Token == 'FALSE':
+ if Op == 'OR':
+ continue
+ else:
+ NewOperand.append(Token)
+ break
+ NewOperand.append(Token)
+
+ # don't generate depex if only TRUE operand left
+ if self.ModuleType == 'PEIM' and len(NewOperand) == 1 and NewOperand[0] == 'TRUE':
+ self.PostfixNotation = []
+ return
+
+ # don't generate depex if all operands are architecture protocols
+ if self.ModuleType in ['UEFI_DRIVER', 'DXE_DRIVER', 'DXE_RUNTIME_DRIVER', 'DXE_SAL_DRIVER', 'DXE_SMM_DRIVER'] and \
+ Op == 'AND' and \
+ self.ArchProtocols == set([GuidStructureStringToGuidString(Guid) for Guid in AllOperand]):
+ self.PostfixNotation = []
+ return
+
+ if len(NewOperand) == 0:
+ self.TokenList = list(AllOperand)
+ else:
+ self.TokenList = []
+ while True:
+ self.TokenList.append(NewOperand.pop(0))
+ if NewOperand == []:
+ break
+ self.TokenList.append(Op)
+ self.PostfixNotation = []
+ self.GetPostfixNotation()
+
+
+ ## Convert a GUID value in C structure format into its binary form
+ #
+ # @param Guid The GUID value in C structure format
+ #
+ # @retval array The byte array representing the GUID value
+ #
+ def GetGuidValue(self, Guid):
+ GuidValueString = Guid.replace("{", "").replace("}", "").replace(" ", "")
+ GuidValueList = GuidValueString.split(",")
+ if len(GuidValueList) != 11:
+ EdkLogger.error("GenDepex", PARSER_ERROR, "Invalid GUID value string or opcode: %s" % Guid)
+ return pack("1I2H8B", *(int(value, 16) for value in GuidValueList))
+
+ ## Save the binary form of dependency expression in file
+ #
+ # @param File The path of file. If None is given, put the data on console
+ #
+ # @retval True If the file doesn't exist or file is changed
+ # @retval False If file exists and is not changed.
+ #
+ def Generate(self, File=None):
+ Buffer = StringIO()
+ if len(self.PostfixNotation) == 0:
+ return False
+
+ for Item in self.PostfixNotation:
+ if Item in self.Opcode[self.Phase]:
+ Buffer.write(pack("B", self.Opcode[self.Phase][Item]))
+ elif Item in self.SupportedOpcode:
+ EdkLogger.error("GenDepex", FORMAT_INVALID,
+ "Opcode [%s] is not expected in %s phase" % (Item, self.Phase),
+ ExtraData=self.ExpressionString)
+ else:
+ Buffer.write(self.GetGuidValue(Item))
+
+ FilePath = ""
+ FileChangeFlag = True
+ if File == None:
+ sys.stdout.write(Buffer.getvalue())
+ FilePath = "STDOUT"
+ else:
+ FileChangeFlag = SaveFileOnChange(File, Buffer.getvalue(), True)
+
+ Buffer.close()
+ return FileChangeFlag
+
+versionNumber = "0.04"
+__version__ = "%prog Version " + versionNumber
+__copyright__ = "Copyright (c) 2007-2008, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] [dependency_expression_file]"
+
+## Parse command line options
+#
+# @retval OptionParser
+#
+def GetOptions():
+ from optparse import OptionParser
+
+ Parser = OptionParser(description=__copyright__, version=__version__, usage=__usage__)
+
+ Parser.add_option("-o", "--output", dest="OutputFile", default=None, metavar="FILE",
+ help="Specify the name of depex file to be generated")
+ Parser.add_option("-t", "--module-type", dest="ModuleType", default=None,
+ help="The type of module for which the dependency expression serves")
+ Parser.add_option("-e", "--dependency-expression", dest="Expression", default="",
+ help="The string of dependency expression. If this option presents, the input file will be ignored.")
+ Parser.add_option("-m", "--optimize", dest="Optimize", default=False, action="store_true",
+ help="Do some simple optimization on the expression.")
+ Parser.add_option("-v", "--verbose", dest="verbose", default=False, action="store_true",
+ help="build with verbose information")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+ Parser.add_option("-q", "--quiet", dest="quiet", default=False, action="store_true",
+ help="build with little information")
+
+ return Parser.parse_args()
+
+
+## Entrance method
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Option, Input = GetOptions()
+
+ # Set log level
+ if Option.quiet:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.verbose:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.debug != None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ try:
+ if Option.ModuleType == None or Option.ModuleType not in gType2Phase:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "Module type is not specified or supported")
+
+ DxsFile = ''
+ if len(Input) > 0 and Option.Expression == "":
+ DxsFile = Input[0]
+ DxsString = open(DxsFile, 'r').read().replace("\n", " ").replace("\r", " ")
+ DxsString = gStartClosePattern.sub("\\1", DxsString)
+ elif Option.Expression != "":
+ if Option.Expression[0] == '"':
+ DxsString = Option.Expression[1:-1]
+ else:
+ DxsString = Option.Expression
+ else:
+ EdkLogger.error("GenDepex", OPTION_MISSING, "No expression string or file given")
+
+ Dpx = DependencyExpression(DxsString, Option.ModuleType, Option.Optimize)
+ if Option.OutputFile != None:
+ Dpx.Generate(Option.OutputFile)
+ else:
+ Dpx.Generate()
+ except BaseException, X:
+ EdkLogger.quiet("")
+ if Option != None and Option.debug != None:
+ EdkLogger.quiet(traceback.format_exc())
+ else:
+ EdkLogger.quiet(str(X))
+ return 1
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py new file mode 100644 index 0000000000..f689a8692d --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -0,0 +1,1389 @@ +## @file +# Create makefile for MS nmake and GNU make +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import Modules +# +import os +import sys +import string +import re +import os.path as path + +from Common.BuildToolError import * +from Common.Misc import * +from Common.String import * +from BuildEngine import * +import Common.GlobalData as GlobalData + +## Regular expression for finding header file inclusions +gIncludePattern = re.compile(r"^[ \t]*#[ \t]*include(?:[ \t]*(?:\\(?:\r\n|\r|\n))*[ \t]*)*(?:[\"<][ \t]*)([\w.\\/]+)(?:[ \t]*[\">])", re.MULTILINE|re.UNICODE) + +## Regular expression for matching macro used in header file inclusion +gMacroPattern = re.compile("([_A-Z][_A-Z0-9]*)[ \t]*\((.+)\)", re.UNICODE) + +## pattern for include style in R8.x code +gProtocolDefinition = "Protocol/%(HeaderKey)s/%(HeaderKey)s.h" +gGuidDefinition = "Guid/%(HeaderKey)s/%(HeaderKey)s.h" +gArchProtocolDefinition = "ArchProtocol/%(HeaderKey)s/%(HeaderKey)s.h" +gPpiDefinition = "Ppi/%(HeaderKey)s/%(HeaderKey)s.h" +gIncludeMacroConversion = { + "EFI_PROTOCOL_DEFINITION" : gProtocolDefinition, + "EFI_GUID_DEFINITION" : gGuidDefinition, + "EFI_ARCH_PROTOCOL_DEFINITION" : gArchProtocolDefinition, + "EFI_PROTOCOL_PRODUCER" : gProtocolDefinition, + "EFI_PROTOCOL_CONSUMER" : gProtocolDefinition, + "EFI_PROTOCOL_DEPENDENCY" : gProtocolDefinition, + "EFI_ARCH_PROTOCOL_PRODUCER" : gArchProtocolDefinition, + "EFI_ARCH_PROTOCOL_CONSUMER" : gArchProtocolDefinition, + "EFI_ARCH_PROTOCOL_DEPENDENCY" : gArchProtocolDefinition, + "EFI_PPI_DEFINITION" : gPpiDefinition, + "EFI_PPI_PRODUCER" : gPpiDefinition, + "EFI_PPI_CONSUMER" : gPpiDefinition, + "EFI_PPI_DEPENDENCY" : gPpiDefinition, +} + +## default makefile type +gMakeType = "" +if sys.platform == "win32": + gMakeType = "nmake" +else: + gMakeType = "gmake" + + +## BuildFile class +# +# This base class encapsules build file and its generation. It uses template to generate +# the content of build file. The content of build file will be got from AutoGen objects. +# +class BuildFile(object): + ## template used to generate the build file (i.e. makefile if using make) + _TEMPLATE_ = TemplateString('') + + _DEFAULT_FILE_NAME_ = "Makefile" + + ## default file name for each type of build file + _FILE_NAME_ = { + "nmake" : "Makefile", + "gmake" : "GNUmakefile" + } + + ## Fixed header string for makefile + _MAKEFILE_HEADER = '''# +# DO NOT EDIT +# This file is auto-generated by build utility +# +# Module Name: +# +# %s +# +# Abstract: +# +# Auto-generated makefile for building modules, libraries or platform +# + ''' + + ## Header string for each type of build file + _FILE_HEADER_ = { + "nmake" : _MAKEFILE_HEADER % _FILE_NAME_["nmake"], + "gmake" : _MAKEFILE_HEADER % _FILE_NAME_["gmake"] + } + + ## shell commands which can be used in build file in the form of macro + # $(CP) copy file command + # $(MV) move file command + # $(RM) remove file command + # $(MD) create dir command + # $(RD) remove dir command + # + _SHELL_CMD_ = { + "nmake" : { + "CP" : "copy /y", + "MV" : "move /y", + "RM" : "del /f /q", + "MD" : "mkdir", + "RD" : "rmdir /s /q", + }, + + "gmake" : { + "CP" : "cp -f", + "MV" : "mv -f", + "RM" : "rm -f", + "MD" : "mkdir -p", + "RD" : "rm -r -f", + } + } + + ## directory separator + _SEP_ = { + "nmake" : "\\", + "gmake" : "/" + } + + ## directory creation template + _MD_TEMPLATE_ = { + "nmake" : 'if not exist %(dir)s $(MD) %(dir)s', + "gmake" : "$(MD) %(dir)s" + } + + ## directory removal template + _RD_TEMPLATE_ = { + "nmake" : 'if exist %(dir)s $(RD) %(dir)s', + "gmake" : "$(RD) %(dir)s" + } + + _CD_TEMPLATE_ = { + "nmake" : 'if exist %(dir)s cd %(dir)s', + "gmake" : "test -e %(dir)s && cd %(dir)s" + } + + _MAKE_TEMPLATE_ = { + "nmake" : 'if exist %(file)s "$(MAKE)" $(MAKE_FLAGS) -f %(file)s', + "gmake" : 'test -e %(file)s && "$(MAKE)" $(MAKE_FLAGS) -f %(file)s' + } + + _INCLUDE_CMD_ = { + "nmake" : '!INCLUDE', + "gmake" : "include" + } + + _INC_FLAG_ = {"MSFT" : "/I", "GCC" : "-I", "INTEL" : "-I", "RVCT" : "-I"} + + ## Constructor of BuildFile + # + # @param AutoGenObject Object of AutoGen class + # + def __init__(self, AutoGenObject): + self._AutoGenObject = AutoGenObject + self._FileType = gMakeType + + ## Create build file + # + # @param FileType Type of build file. Only nmake and gmake are supported now. + # + # @retval TRUE The build file is created or re-created successfully + # @retval FALSE The build file exists and is the same as the one to be generated + # + def Generate(self, FileType=gMakeType): + if FileType not in self._FILE_NAME_: + EdkLogger.error("build", PARAMETER_INVALID, "Invalid build type [%s]" % FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + self._FileType = FileType + FileContent = self._TEMPLATE_.Replace(self._TemplateDict) + FileName = self._FILE_NAME_[FileType] + return SaveFileOnChange(os.path.join(self._AutoGenObject.MakeFileDir, FileName), FileContent, False) + + ## Return a list of directory creation command string + # + # @param DirList The list of directory to be created + # + # @retval list The directory creation command list + # + def GetCreateDirectoryCommand(self, DirList): + return [self._MD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList] + + ## Return a list of directory removal command string + # + # @param DirList The list of directory to be removed + # + # @retval list The directory removal command list + # + def GetRemoveDirectoryCommand(self, DirList): + return [self._RD_TEMPLATE_[self._FileType] % {'dir':Dir} for Dir in DirList] + + def PlaceMacro(self, Path, MacroDefinitions={}): + if Path.startswith("$("): + return Path + else: + PathLength = len(Path) + for MacroName in MacroDefinitions: + MacroValue = MacroDefinitions[MacroName] + MacroValueLength = len(MacroValue) + if MacroValueLength <= PathLength and Path.startswith(MacroValue): + Path = "$(%s)%s" % (MacroName, Path[MacroValueLength:]) + break + return Path + +## ModuleMakefile class +# +# This class encapsules makefie and its generation for module. It uses template to generate +# the content of makefile. The content of makefile will be got from ModuleAutoGen object. +# +class ModuleMakefile(BuildFile): + ## template used to generate the makefile for module + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_RELATIVE_DIR = ${platform_relative_directory} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Module Macro Definition +# +MODULE_NAME = ${module_name} +MODULE_GUID = ${module_guid} +MODULE_VERSION = ${module_version} +MODULE_TYPE = ${module_type} +MODULE_FILE = ${module_file} +MODULE_FILE_BASE_NAME = ${module_file_base_name} +BASE_NAME = $(MODULE_NAME) +MODULE_RELATIVE_DIR = ${module_relative_directory} +MODULE_DIR = $(WORKSPACE)${separator}${module_relative_directory} + +MODULE_ENTRY_POINT = ${module_entry_point} +ARCH_ENTRY_POINT = ${arch_entry_point} +IMAGE_ENTRY_POINT = ${image_entry_point} + +${BEGIN}${module_extra_defines} +${END} +# +# Build Configuration Macro Definition +# +ARCH = ${architecture} +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +# PLATFORM_BUILD_DIR = ${platform_build_directory} +BUILD_DIR = ${platform_build_directory} +BIN_DIR = $(BUILD_DIR)${separator}${architecture} +LIB_DIR = $(BIN_DIR) +MODULE_BUILD_DIR = ${module_build_directory} +OUTPUT_DIR = ${module_output_directory} +DEBUG_DIR = ${module_debug_directory} +DEST_DIR_OUTPUT = $(OUTPUT_DIR) +DEST_DIR_DEBUG = $(DEBUG_DIR) + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +# +# Tools definitions specific to this module +# +${BEGIN}${module_tool_definitions} +${END} +MAKE_FILE = ${makefile_path} + +# +# Build Macro +# +${BEGIN}${file_macro} +${END} + +COMMON_DEPS = ${BEGIN}${common_dependency_file} \\ + ${END} + +# +# Overridable Target Macro Definitions +# +FORCE_REBUILD = force_build +INIT_TARGET = init +PCH_TARGET = +BC_TARGET = ${BEGIN}${backward_compatible_target} ${END} +CODA_TARGET = ${BEGIN}${remaining_build_target} \\ + ${END} + +# +# Default target, which will build dependent libraries in addition to source files +# + +all: mbuild + + +# +# Target used when called from platform makefile, which will bypass the build of dependent libraries +# + +pbuild: $(INIT_TARGET) $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET) + +# +# ModuleTarget +# + +mbuild: $(INIT_TARGET) $(BC_TARGET) gen_libs $(PCH_TARGET) $(CODA_TARGET) + +# +# Build Target used in multi-thread build mode, which will bypass the init and gen_libs targets +# + +tbuild: $(BC_TARGET) $(PCH_TARGET) $(CODA_TARGET) + +# +# Phony target which is used to force executing commands for a target +# +force_build: +\t-@ + +# +# Target to update the FD +# + +fds: mbuild gen_fds + +# +# Initialization target: print build information and create necessary directories +# +init: info dirs + +info: +\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)] + +dirs: +${BEGIN}\t-@${create_directory_command}\n${END} + +strdefs: +\t-@$(CP) $(DEBUG_DIR)${separator}AutoGen.h $(DEBUG_DIR)${separator}$(MODULE_NAME)StrDefs.h + +# +# GenLibsTarget +# +gen_libs: +\t${BEGIN}@"$(MAKE)" $(MAKE_FLAGS) -f ${dependent_library_build_directory}${separator}${makefile_name} +\t${END}@cd $(MODULE_BUILD_DIR) + +# +# Build Flash Device Image +# +gen_fds: +\t@"$(MAKE)" $(MAKE_FLAGS) -f $(BUILD_DIR)${separator}${makefile_name} fds +\t@cd $(MODULE_BUILD_DIR) + +# +# Individual Object Build Targets +# +${BEGIN}${file_build_target} +${END} + +# +# clean all intermediate files +# +clean: +\t${BEGIN}${clean_command} +\t${END} + +# +# clean all generated files +# +cleanall: +${BEGIN}\t${cleanall_command} +${END}\t$(RM) *.pdb *.idb > NUL 2>&1 +\t$(RM) $(BIN_DIR)${separator}$(MODULE_NAME).efi + +# +# clean all dependent libraries built +# +cleanlib: +\t${BEGIN}-@${library_build_command} cleanall +\t${END}@cd $(MODULE_BUILD_DIR)\n\n''') + + _FILE_MACRO_TEMPLATE = TemplateString("${macro_name} = ${BEGIN} \\\n ${source_file}${END}\n") + _BUILD_TARGET_TEMPLATE = TemplateString("${BEGIN}${target} : ${deps}\n${END}\t${cmd}\n") + + ## Constructor of ModuleMakefile + # + # @param ModuleAutoGen Object of ModuleAutoGen class + # + def __init__(self, ModuleAutoGen): + BuildFile.__init__(self, ModuleAutoGen) + self.PlatformInfo = self._AutoGenObject.PlatformInfo + + self.ResultFileList = [] + self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"] + + self.SourceFileDatabase = {} # {file type : file path} + self.DestFileDatabase = {} # {file type : file path} + self.FileBuildTargetList = [] # [(src, target string)] + self.BuildTargetList = [] # [target string] + self.PendingBuildTargetList = [] # [FileBuildRule objects] + self.CommonFileDependency = [] + self.FileListMacros = {} + self.ListFileMacros = {} + + self.FileDependency = [] + self.LibraryBuildCommandList = [] + self.LibraryFileList = [] + self.LibraryMakefileList = [] + self.LibraryBuildDirectoryList = [] + self.SystemLibraryList = [] + self.Macros = sdict() + self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"] + self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"] + self.Macros["MODULE_BUILD_DIR"] = self._AutoGenObject.Macros["MODULE_BUILD_DIR"] + self.Macros["BIN_DIR" ] = self._AutoGenObject.Macros["BIN_DIR"] + self.Macros["BUILD_DIR" ] = self._AutoGenObject.Macros["BUILD_DIR"] + self.Macros["WORKSPACE" ] = self._AutoGenObject.Macros["WORKSPACE"] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + if self._FileType not in self._SEP_: + EdkLogger.error("build", PARAMETER_INVALID, "Invalid Makefile type [%s]" % self._FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + Separator = self._SEP_[self._FileType] + + # break build if no source files and binary files are found + if len(self._AutoGenObject.SourceFileList) == 0 and len(self._AutoGenObject.BinaryFileList) == 0: + EdkLogger.error("build", AUTOGEN_ERROR, "No files to be built in module [%s, %s, %s]" + % (self._AutoGenObject.BuildTarget, self._AutoGenObject.ToolChain, self._AutoGenObject.Arch), + ExtraData="[%s]" % str(self._AutoGenObject)) + + # convert dependent libaries to build command + self.ProcessDependentLibrary() + if len(self._AutoGenObject.Module.ModuleEntryPointList) > 0: + ModuleEntryPoint = self._AutoGenObject.Module.ModuleEntryPointList[0] + else: + ModuleEntryPoint = "_ModuleEntryPoint" + + # Intel EBC compiler enforces EfiMain + if self._AutoGenObject.AutoGenVersion < 0x00010005 and self._AutoGenObject.Arch == "EBC": + ArchEntryPoint = "EfiMain" + else: + ArchEntryPoint = ModuleEntryPoint + + if self._AutoGenObject.Arch == "EBC": + # EBC compiler always use "EfiStart" as entry point. Only applies to R9 modules + ImageEntryPoint = "EfiStart" + elif self._AutoGenObject.AutoGenVersion < 0x00010005: + # R8 modules use entry point specified in INF file + ImageEntryPoint = ModuleEntryPoint + else: + # R9 modules always use "_ModuleEntryPoint" as entry point + ImageEntryPoint = "_ModuleEntryPoint" + + # tools definitions + ToolsDef = [] + IncPrefix = self._INC_FLAG_[self._AutoGenObject.ToolChainFamily] + for Tool in self._AutoGenObject.BuildOption: + for Attr in self._AutoGenObject.BuildOption[Tool]: + Value = self._AutoGenObject.BuildOption[Tool][Attr] + if Attr == "FAMILY": + continue + elif Attr == "PATH": + ToolsDef.append("%s = %s" % (Tool, Value)) + else: + # Don't generate MAKE_FLAGS in makefile. It's put in environment variable. + if Tool == "MAKE": + continue + # Remove duplicated include path, if any + if Attr == "FLAGS": + Value = RemoveDupOption(Value, IncPrefix, self._AutoGenObject.IncludePathList) + ToolsDef.append("%s_%s = %s" % (Tool, Attr, Value)) + ToolsDef.append("") + + # convert source files and binary files to build targets + self.ResultFileList = [str(T.Target) for T in self._AutoGenObject.CodaTargetList] + if len(self.ResultFileList) == 0: + EdkLogger.error("build", AUTOGEN_ERROR, "Nothing to build", + ExtraData="[%s]" % str(self._AutoGenObject)) + + self.ProcessBuildTargetList() + + # Generate macros used to represent input files + FileMacroList = [] # macro name = file list + for FileListMacro in self.FileListMacros: + FileMacro = self._FILE_MACRO_TEMPLATE.Replace( + { + "macro_name" : FileListMacro, + "source_file" : self.FileListMacros[FileListMacro] + } + ) + FileMacroList.append(FileMacro) + + # INC_LIST is special + FileMacro = "" + IncludePathList = [] + for P in self._AutoGenObject.IncludePathList: + IncludePathList.append(IncPrefix+self.PlaceMacro(P, self.Macros)) + if FileBuildRule.INC_LIST_MACRO in self.ListFileMacros: + self.ListFileMacros[FileBuildRule.INC_LIST_MACRO].append(IncPrefix+P) + FileMacro += self._FILE_MACRO_TEMPLATE.Replace( + { + "macro_name" : "INC", + "source_file" : IncludePathList + } + ) + FileMacroList.append(FileMacro) + + # Generate macros used to represent files containing list of input files + for ListFileMacro in self.ListFileMacros: + ListFileName = os.path.join(self._AutoGenObject.OutputDir, "%s.lst" % ListFileMacro.lower()[:len(ListFileMacro)-5]) + FileMacroList.append("%s = %s" % (ListFileMacro, ListFileName)) + SaveFileOnChange( + ListFileName, + "\n".join(self.ListFileMacros[ListFileMacro]), + False + ) + + # R8 modules need <BaseName>StrDefs.h for string ID + #if self._AutoGenObject.AutoGenVersion < 0x00010005 and len(self._AutoGenObject.UnicodeFileList) > 0: + # BcTargetList = ['strdefs'] + #else: + # BcTargetList = [] + BcTargetList = [] + + MakefileName = self._FILE_NAME_[self._FileType] + LibraryMakeCommandList = [] + for D in self.LibraryBuildDirectoryList: + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join(D, MakefileName)} + LibraryMakeCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName), + "makefile_name" : MakefileName, + "platform_name" : self.PlatformInfo.Name, + "platform_guid" : self.PlatformInfo.Guid, + "platform_version" : self.PlatformInfo.Version, + "platform_relative_directory": self.PlatformInfo.SourceDir, + "platform_output_directory" : self.PlatformInfo.OutputDir, + + "module_name" : self._AutoGenObject.Name, + "module_guid" : self._AutoGenObject.Guid, + "module_version" : self._AutoGenObject.Version, + "module_type" : self._AutoGenObject.ModuleType, + "module_file" : self._AutoGenObject.MetaFile.Name, + "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName, + "module_relative_directory" : self._AutoGenObject.SourceDir, + "module_extra_defines" : ["%s = %s" % (k, v) for k,v in self._AutoGenObject.Module.Defines.iteritems()], + + "architecture" : self._AutoGenObject.Arch, + "toolchain_tag" : self._AutoGenObject.ToolChain, + "build_target" : self._AutoGenObject.BuildTarget, + + "platform_build_directory" : self.PlatformInfo.BuildDir, + "module_build_directory" : self._AutoGenObject.BuildDir, + "module_output_directory" : self._AutoGenObject.OutputDir, + "module_debug_directory" : self._AutoGenObject.DebugDir, + + "separator" : Separator, + "module_tool_definitions" : ToolsDef, + + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + + "module_entry_point" : ModuleEntryPoint, + "image_entry_point" : ImageEntryPoint, + "arch_entry_point" : ArchEntryPoint, + "remaining_build_target" : self.ResultFileList, + "common_dependency_file" : self.CommonFileDependency, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "clean_command" : self.GetRemoveDirectoryCommand(["$(OUTPUT_DIR)"]), + "cleanall_command" : self.GetRemoveDirectoryCommand(["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]), + "dependent_library_build_directory" : self.LibraryBuildDirectoryList, + "library_build_command" : LibraryMakeCommandList, + "file_macro" : FileMacroList, + "file_build_target" : self.BuildTargetList, + "backward_compatible_target": BcTargetList, + } + + return MakefileTemplateDict + + def ProcessBuildTargetList(self): + # + # Search dependency file list for each source file + # + ForceIncludedFile = [] + for File in self._AutoGenObject.AutoGenFileList: + if File.Ext == '.h': + ForceIncludedFile.append(File) + SourceFileList = [] + for Target in self._AutoGenObject.IntroTargetList: + SourceFileList.extend(Target.Inputs) + + self.FileDependency = self.GetFileDependency( + SourceFileList, + ForceIncludedFile, + self._AutoGenObject.IncludePathList + ) + DepSet = None + for File in self.FileDependency: + if not self.FileDependency[File]: + self.FileDependency[File] = ['$(FORCE_REBUILD)'] + continue + # skip non-C files + if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c": + continue + elif DepSet == None: + DepSet = set(self.FileDependency[File]) + else: + DepSet &= set(self.FileDependency[File]) + # in case nothing in SourceFileList + if DepSet == None: + DepSet = set() + # + # Extract comman files list in the dependency files + # + for File in DepSet: + self.CommonFileDependency.append(self.PlaceMacro(File.Path, self.Macros)) + + for File in self.FileDependency: + # skip non-C files + if File.Ext not in [".c", ".C"] or File.Name == "AutoGen.c": + continue + NewDepSet = set(self.FileDependency[File]) + NewDepSet -= DepSet + self.FileDependency[File] = ["$(COMMON_DEPS)"] + list(NewDepSet) + + # Convert target description object to target string in makefile + for Type in self._AutoGenObject.Targets: + for T in self._AutoGenObject.Targets[Type]: + # Generate related macros if needed + if T.GenFileListMacro and T.FileListMacro not in self.FileListMacros: + self.FileListMacros[T.FileListMacro] = [] + if T.GenListFile and T.ListFileMacro not in self.ListFileMacros: + self.ListFileMacros[T.ListFileMacro] = [] + if T.GenIncListFile and T.IncListFileMacro not in self.ListFileMacros: + self.ListFileMacros[T.IncListFileMacro] = [] + + Deps = [] + # Add force-dependencies + for Dep in T.Dependencies: + Deps.append(self.PlaceMacro(str(Dep), self.Macros)) + # Add inclusion-dependencies + if len(T.Inputs) == 1 and T.Inputs[0] in self.FileDependency: + for F in self.FileDependency[T.Inputs[0]]: + Deps.append(self.PlaceMacro(str(F), self.Macros)) + # Add source-dependencies + for F in T.Inputs: + NewFile = self.PlaceMacro(str(F), self.Macros) + # In order to use file list macro as dependency + if T.GenListFile: + self.ListFileMacros[T.ListFileMacro].append(str(F)) + self.FileListMacros[T.FileListMacro].append(NewFile) + elif T.GenFileListMacro: + self.FileListMacros[T.FileListMacro].append(NewFile) + else: + Deps.append(NewFile) + + # Use file list macro as dependency + if T.GenFileListMacro: + Deps.append("$(%s)" % T.FileListMacro) + + TargetDict = { + "target" : self.PlaceMacro(T.Target.Path, self.Macros), + "cmd" : "\n\t".join(T.Commands), + "deps" : Deps + } + self.BuildTargetList.append(self._BUILD_TARGET_TEMPLATE.Replace(TargetDict)) + + ## For creating makefile targets for dependent libraries + def ProcessDependentLibrary(self): + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros)) + + ## Return a list containing source file's dependencies + # + # @param FileList The list of source files + # @param ForceInculeList The list of files which will be included forcely + # @param SearchPathList The list of search path + # + # @retval dict The mapping between source file path and its dependencies + # + def GetFileDependency(self, FileList, ForceInculeList, SearchPathList): + Dependency = {} + for F in FileList: + Dependency[F] = self.GetDependencyList(F, ForceInculeList, SearchPathList) + return Dependency + + ## Find dependencies for one source file + # + # By searching recursively "#include" directive in file, find out all the + # files needed by given source file. The dependecies will be only searched + # in given search path list. + # + # @param File The source file + # @param ForceInculeList The list of files which will be included forcely + # @param SearchPathList The list of search path + # + # @retval list The list of files the given source file depends on + # + def GetDependencyList(self, File, ForceList, SearchPathList): + EdkLogger.debug(EdkLogger.DEBUG_1, "Try to get dependency files for %s" % File) + FileStack = [File] + ForceList + DependencySet = set() + MacroUsedByIncludedFile = False + + if self._AutoGenObject.Arch not in gDependencyDatabase: + gDependencyDatabase[self._AutoGenObject.Arch] = {} + DepDb = gDependencyDatabase[self._AutoGenObject.Arch] + + while len(FileStack) > 0: + F = FileStack.pop() + + CurrentFileDependencyList = [] + if F in DepDb: + CurrentFileDependencyList = DepDb[F] + for Dep in CurrentFileDependencyList: + if Dep not in FileStack and Dep not in DependencySet: + FileStack.append(Dep) + else: + try: + Fd = open(F.Path, 'r') + except BaseException, X: + EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=F.Path+"\n\t"+str(X)) + + FileContent = Fd.read() + Fd.close() + if len(FileContent) == 0: + continue + + if FileContent[0] == 0xff or FileContent[0] == 0xfe: + FileContent = unicode(FileContent, "utf-16") + IncludedFileList = gIncludePattern.findall(FileContent) + + CurrentFilePath = F.Dir + for Inc in IncludedFileList: + # if there's macro used to reference header file, expand it + HeaderList = gMacroPattern.findall(Inc) + if len(HeaderList) == 1 and len(HeaderList[0]) == 2: + HeaderType = HeaderList[0][0] + HeaderKey = HeaderList[0][1] + if HeaderType in gIncludeMacroConversion: + Inc = gIncludeMacroConversion[HeaderType] % {"HeaderKey" : HeaderKey} + else: + # not known macro used in #include + MacroUsedByIncludedFile = True + continue + Inc = os.path.normpath(Inc) + for SearchPath in [CurrentFilePath] + SearchPathList: + FilePath = os.path.join(SearchPath, Inc) + if not os.path.exists(FilePath) or FilePath in CurrentFileDependencyList: + continue + FilePath = PathClass(FilePath) + CurrentFileDependencyList.append(FilePath) + if FilePath not in FileStack and FilePath not in DependencySet: + FileStack.append(FilePath) + break + else: + EdkLogger.debug(EdkLogger.DEBUG_9, "%s included by %s was not found"\ + "in any given path:\n\t%s" % (Inc, F, "\n\t".join(SearchPathList))) + + if not MacroUsedByIncludedFile: + if F == File: + CurrentFileDependencyList += ForceList + # + # Don't keep the file in cache if it uses macro in included file. + # So it will be scanned again if another file includes this file. + # + DepDb[F] = CurrentFileDependencyList + DependencySet.update(CurrentFileDependencyList) + + # + # If there's macro used in included file, always build the file by + # returning a empty dependency + # + if MacroUsedByIncludedFile: + DependencyList = [] + else: + DependencyList = list(DependencySet) # remove duplicate ones + + return DependencyList + + _TemplateDict = property(_CreateTemplateDict) + +## CustomMakefile class +# +# This class encapsules makefie and its generation for module. It uses template to generate +# the content of makefile. The content of makefile will be got from ModuleAutoGen object. +# +class CustomMakefile(BuildFile): + ## template used to generate the makefile for module with custom makefile + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_RELATIVE_DIR = ${platform_relative_directory} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Module Macro Definition +# +MODULE_NAME = ${module_name} +MODULE_GUID = ${module_guid} +MODULE_VERSION = ${module_version} +MODULE_TYPE = ${module_type} +MODULE_FILE = ${module_file} +MODULE_FILE_BASE_NAME = ${module_file_base_name} +BASE_NAME = $(MODULE_NAME) +MODULE_RELATIVE_DIR = ${module_relative_directory} +MODULE_DIR = $(WORKSPACE)${separator}${module_relative_directory} + +# +# Build Configuration Macro Definition +# +ARCH = ${architecture} +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +# PLATFORM_BUILD_DIR = ${platform_build_directory} +BUILD_DIR = ${platform_build_directory} +BIN_DIR = $(BUILD_DIR)${separator}${architecture} +LIB_DIR = $(BIN_DIR) +MODULE_BUILD_DIR = ${module_build_directory} +OUTPUT_DIR = ${module_output_directory} +DEBUG_DIR = ${module_debug_directory} +DEST_DIR_OUTPUT = $(OUTPUT_DIR) +DEST_DIR_DEBUG = $(DEBUG_DIR) + +# +# Tools definitions specific to this module +# +${BEGIN}${module_tool_definitions} +${END} +MAKE_FILE = ${makefile_path} + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +${custom_makefile_content} + +# +# Target used when called from platform makefile, which will bypass the build of dependent libraries +# + +pbuild: init all + + +# +# ModuleTarget +# + +mbuild: init all + +# +# Build Target used in multi-thread build mode, which no init target is needed +# + +tbuild: all + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@echo Building ... $(MODULE_DIR)${separator}$(MODULE_FILE) [$(ARCH)] +${BEGIN}\t-@${create_directory_command}\n${END}\ + +''') + + ## Constructor of CustomMakefile + # + # @param ModuleAutoGen Object of ModuleAutoGen class + # + def __init__(self, ModuleAutoGen): + BuildFile.__init__(self, ModuleAutoGen) + self.PlatformInfo = self._AutoGenObject.PlatformInfo + self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + if self._FileType not in self._AutoGenObject.CustomMakefile: + EdkLogger.error('build', OPTION_NOT_SUPPORTED, "No custom makefile for %s" % self._FileType, + ExtraData="[%s]" % str(self._AutoGenObject)) + MakefilePath = os.path.join( + self._AutoGenObject.WorkspaceDir, + self._AutoGenObject.CustomMakefile[self._FileType] + ) + try: + CustomMakefile = open(MakefilePath, 'r').read() + except: + EdkLogger.error('build', FILE_OPEN_FAILURE, File=str(self._AutoGenObject), + ExtraData=self._AutoGenObject.CustomMakefile[self._FileType]) + + # tools definitions + ToolsDef = [] + for Tool in self._AutoGenObject.BuildOption: + # Don't generate MAKE_FLAGS in makefile. It's put in environment variable. + if Tool == "MAKE": + continue + for Attr in self._AutoGenObject.BuildOption[Tool]: + if Attr == "FAMILY": + continue + elif Attr == "PATH": + ToolsDef.append("%s = %s" % (Tool, self._AutoGenObject.BuildOption[Tool][Attr])) + else: + ToolsDef.append("%s_%s = %s" % (Tool, Attr, self._AutoGenObject.BuildOption[Tool][Attr])) + ToolsDef.append("") + + MakefileName = self._FILE_NAME_[self._FileType] + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(MODULE_BUILD_DIR)", MakefileName), + "platform_name" : self.PlatformInfo.Name, + "platform_guid" : self.PlatformInfo.Guid, + "platform_version" : self.PlatformInfo.Version, + "platform_relative_directory": self.PlatformInfo.SourceDir, + "platform_output_directory" : self.PlatformInfo.OutputDir, + + "module_name" : self._AutoGenObject.Name, + "module_guid" : self._AutoGenObject.Guid, + "module_version" : self._AutoGenObject.Version, + "module_type" : self._AutoGenObject.ModuleType, + "module_file" : self._AutoGenObject.MetaFile, + "module_file_base_name" : self._AutoGenObject.MetaFile.BaseName, + "module_relative_directory" : self._AutoGenObject.SourceDir, + + "architecture" : self._AutoGenObject.Arch, + "toolchain_tag" : self._AutoGenObject.ToolChain, + "build_target" : self._AutoGenObject.BuildTarget, + + "platform_build_directory" : self.PlatformInfo.BuildDir, + "module_build_directory" : self._AutoGenObject.BuildDir, + "module_output_directory" : self._AutoGenObject.OutputDir, + "module_debug_directory" : self._AutoGenObject.DebugDir, + + "separator" : Separator, + "module_tool_definitions" : ToolsDef, + + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "custom_makefile_content" : CustomMakefile + } + + return MakefileTemplateDict + + _TemplateDict = property(_CreateTemplateDict) + +## PlatformMakefile class +# +# This class encapsules makefie and its generation for platform. It uses +# template to generate the content of makefile. The content of makefile will be +# got from PlatformAutoGen object. +# +class PlatformMakefile(BuildFile): + ## template used to generate the makefile for platform + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} +PLATFORM_FILE = ${platform_file} +PLATFORM_DIR = $(WORKSPACE)${separator}${platform_relative_directory} +PLATFORM_OUTPUT_DIR = ${platform_output_directory} + +# +# Build Configuration Macro Definition +# +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +BUILD_DIR = ${platform_build_directory} +FV_DIR = ${platform_build_directory}${separator}FV + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +MAKE = ${make_path} +MAKE_FILE = ${makefile_path} + +# +# Default target +# +all: init build_libraries build_modules + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@echo Building ... $(PLATFORM_FILE) [${build_architecture_list}] +\t${BEGIN}-@${create_directory_command} +\t${END} +# +# library build target +# +libraries: init build_libraries + +# +# module build target +# +modules: init build_libraries build_modules + +# +# Build all libraries: +# +build_libraries: +${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${library_makefile_list} pbuild +${END}\t@cd $(BUILD_DIR) + +# +# Build all modules: +# +build_modules: +${BEGIN}\t@"$(MAKE)" $(MAKE_FLAGS) -f ${module_makefile_list} pbuild +${END}\t@cd $(BUILD_DIR) + +# +# Clean intermediate files +# +clean: +\t${BEGIN}-@${library_build_command} clean +\t${END}${BEGIN}-@${module_build_command} clean +\t${END}@cd $(BUILD_DIR) + +# +# Clean all generated files except to makefile +# +cleanall: +${BEGIN}\t${cleanall_command} +${END} + +# +# Clean all library files +# +cleanlib: +\t${BEGIN}-@${library_build_command} cleanall +\t${END}@cd $(BUILD_DIR)\n +''') + + ## Constructor of PlatformMakefile + # + # @param ModuleAutoGen Object of PlatformAutoGen class + # + def __init__(self, PlatformAutoGen): + BuildFile.__init__(self, PlatformAutoGen) + self.ModuleBuildCommandList = [] + self.ModuleMakefileList = [] + self.IntermediateDirectoryList = [] + self.ModuleBuildDirectoryList = [] + self.LibraryBuildDirectoryList = [] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + + PlatformInfo = self._AutoGenObject + if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]: + EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!", + ExtraData="[%s]" % str(self._AutoGenObject)) + + self.IntermediateDirectoryList = ["$(BUILD_DIR)"] + self.ModuleBuildDirectoryList = self.GetModuleBuildDirectoryList() + self.LibraryBuildDirectoryList = self.GetLibraryBuildDirectoryList() + + MakefileName = self._FILE_NAME_[self._FileType] + LibraryMakefileList = [] + LibraryMakeCommandList = [] + for D in self.LibraryBuildDirectoryList: + D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir}) + Makefile = os.path.join(D, MakefileName) + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile} + LibraryMakefileList.append(Makefile) + LibraryMakeCommandList.append(Command) + + ModuleMakefileList = [] + ModuleMakeCommandList = [] + for D in self.ModuleBuildDirectoryList: + D = self.PlaceMacro(D, {"BUILD_DIR":PlatformInfo.BuildDir}) + Makefile = os.path.join(D, MakefileName) + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":Makefile} + ModuleMakefileList.append(Makefile) + ModuleMakeCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName), + "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"], + "makefile_name" : MakefileName, + "platform_name" : PlatformInfo.Name, + "platform_guid" : PlatformInfo.Guid, + "platform_version" : PlatformInfo.Version, + "platform_file" : self._AutoGenObject.MetaFile, + "platform_relative_directory": PlatformInfo.SourceDir, + "platform_output_directory" : PlatformInfo.OutputDir, + "platform_build_directory" : PlatformInfo.BuildDir, + + "toolchain_tag" : PlatformInfo.ToolChain, + "build_target" : PlatformInfo.BuildTarget, + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + "build_architecture_list" : self._AutoGenObject.Arch, + "architecture" : self._AutoGenObject.Arch, + "separator" : Separator, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList), + "library_makefile_list" : LibraryMakefileList, + "module_makefile_list" : ModuleMakefileList, + "library_build_command" : LibraryMakeCommandList, + "module_build_command" : ModuleMakeCommandList, + } + + return MakefileTemplateDict + + ## Get the root directory list for intermediate files of all modules build + # + # @retval list The list of directory + # + def GetModuleBuildDirectoryList(self): + DirList = [] + for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir)) + return DirList + + ## Get the root directory list for intermediate files of all libraries build + # + # @retval list The list of directory + # + def GetLibraryBuildDirectoryList(self): + DirList = [] + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir)) + return DirList + + _TemplateDict = property(_CreateTemplateDict) + +## TopLevelMakefile class +# +# This class encapsules makefie and its generation for entrance makefile. It +# uses template to generate the content of makefile. The content of makefile +# will be got from WorkspaceAutoGen object. +# +class TopLevelMakefile(BuildFile): + ## template used to generate toplevel makefile + _TEMPLATE_ = TemplateString('''\ +${makefile_header} + +# +# Platform Macro Definition +# +PLATFORM_NAME = ${platform_name} +PLATFORM_GUID = ${platform_guid} +PLATFORM_VERSION = ${platform_version} + +# +# Build Configuration Macro Definition +# +TOOLCHAIN = ${toolchain_tag} +TOOLCHAIN_TAG = ${toolchain_tag} +TARGET = ${build_target} + +# +# Build Directory Macro Definition +# +BUILD_DIR = ${platform_build_directory} +FV_DIR = ${platform_build_directory}${separator}FV + +# +# Shell Command Macro +# +${BEGIN}${shell_command_code} = ${shell_command} +${END} + +MAKE = ${make_path} +MAKE_FILE = ${makefile_path} + +# +# Default target +# +all: modules fds + +# +# Initialization target: print build information and create necessary directories +# +init: +\t-@ +\t${BEGIN}-@${create_directory_command} +\t${END} +# +# library build target +# +libraries: init +${BEGIN}\t@cd $(BUILD_DIR)${separator}${arch} && "$(MAKE)" $(MAKE_FLAGS) libraries +${END}\t@cd $(BUILD_DIR) + +# +# module build target +# +modules: init +${BEGIN}\t@cd $(BUILD_DIR)${separator}${arch} && "$(MAKE)" $(MAKE_FLAGS) modules +${END}\t@cd $(BUILD_DIR) + +# +# Flash Device Image Target +# +fds: init +\t-@cd $(FV_DIR) +${BEGIN}\tGenFds -f ${fdf_file} -o $(BUILD_DIR) -t $(TOOLCHAIN) -b $(TARGET) -p ${active_platform} -a ${build_architecture_list} ${extra_options}${END}${BEGIN} -r ${fd} ${END}${BEGIN} -i ${fv} ${END}${BEGIN} -D ${macro} ${END} + +# +# run command for emulator platform only +# +run: +\tcd $(BUILD_DIR)${separator}IA32 && ".${separator}SecMain" +\tcd $(BUILD_DIR) + +# +# Clean intermediate files +# +clean: +${BEGIN}\t-@${sub_build_command} clean +${END}\t@cd $(BUILD_DIR) + +# +# Clean all generated files except to makefile +# +cleanall: +${BEGIN}\t${cleanall_command} +${END} + +# +# Clean all library files +# +cleanlib: +${BEGIN}\t-@${sub_build_command} cleanlib +${END}\t@cd $(BUILD_DIR)\n +''') + + ## Constructor of TopLevelMakefile + # + # @param Workspace Object of WorkspaceAutoGen class + # + def __init__(self, Workspace): + BuildFile.__init__(self, Workspace) + self.IntermediateDirectoryList = [] + + # Compose a dict object containing information used to do replacement in template + def _CreateTemplateDict(self): + Separator = self._SEP_[self._FileType] + + # any platform autogen object is ok because we just need common information + PlatformInfo = self._AutoGenObject + + if "MAKE" not in PlatformInfo.ToolDefinition or "PATH" not in PlatformInfo.ToolDefinition["MAKE"]: + EdkLogger.error("build", OPTION_MISSING, "No MAKE command defined. Please check your tools_def.txt!", + ExtraData="[%s]" % str(self._AutoGenObject)) + + for Arch in PlatformInfo.ArchList: + self.IntermediateDirectoryList.append(Separator.join(["$(BUILD_DIR)", Arch])) + self.IntermediateDirectoryList.append("$(FV_DIR)") + + # TRICK: for not generating GenFds call in makefile if no FDF file + MacroList = [] + if PlatformInfo.FdfFile != None and PlatformInfo.FdfFile != "": + FdfFileList = [PlatformInfo.FdfFile] + # macros passed to GenFds + for MacroName in GlobalData.gGlobalDefines: + MacroList.append('"%s=%s"' % (MacroName, GlobalData.gGlobalDefines[MacroName])) + else: + FdfFileList = [] + + # pass extra common options to external program called in makefile, currently GenFds.exe + ExtraOption = '' + LogLevel = EdkLogger.GetLevel() + if LogLevel == EdkLogger.VERBOSE: + ExtraOption += " -v" + elif LogLevel <= EdkLogger.DEBUG_9: + ExtraOption += " -d %d" % (LogLevel - 1) + elif LogLevel == EdkLogger.QUIET: + ExtraOption += " -q" + + if GlobalData.gCaseInsensitive: + ExtraOption += " -c" + + MakefileName = self._FILE_NAME_[self._FileType] + SubBuildCommandList = [] + for A in PlatformInfo.ArchList: + Command = self._MAKE_TEMPLATE_[self._FileType] % {"file":os.path.join("$(BUILD_DIR)", A, MakefileName)} + SubBuildCommandList.append(Command) + + MakefileTemplateDict = { + "makefile_header" : self._FILE_HEADER_[self._FileType], + "makefile_path" : os.path.join("$(BUILD_DIR)", MakefileName), + "make_path" : PlatformInfo.ToolDefinition["MAKE"]["PATH"], + "platform_name" : PlatformInfo.Name, + "platform_guid" : PlatformInfo.Guid, + "platform_version" : PlatformInfo.Version, + "platform_build_directory" : PlatformInfo.BuildDir, + + "toolchain_tag" : PlatformInfo.ToolChain, + "build_target" : PlatformInfo.BuildTarget, + "shell_command_code" : self._SHELL_CMD_[self._FileType].keys(), + "shell_command" : self._SHELL_CMD_[self._FileType].values(), + 'arch' : list(PlatformInfo.ArchList), + "build_architecture_list" : ','.join(PlatformInfo.ArchList), + "separator" : Separator, + "create_directory_command" : self.GetCreateDirectoryCommand(self.IntermediateDirectoryList), + "cleanall_command" : self.GetRemoveDirectoryCommand(self.IntermediateDirectoryList), + "sub_build_command" : SubBuildCommandList, + "fdf_file" : FdfFileList, + "active_platform" : str(PlatformInfo), + "fd" : PlatformInfo.FdTargetList, + "fv" : PlatformInfo.FvTargetList, + "extra_options" : ExtraOption, + "macro" : MacroList, + } + + return MakefileTemplateDict + + ## Get the root directory list for intermediate files of all modules build + # + # @retval list The list of directory + # + def GetModuleBuildDirectoryList(self): + DirList = [] + for ModuleAutoGen in self._AutoGenObject.ModuleAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, ModuleAutoGen.BuildDir)) + return DirList + + ## Get the root directory list for intermediate files of all libraries build + # + # @retval list The list of directory + # + def GetLibraryBuildDirectoryList(self): + DirList = [] + for LibraryAutoGen in self._AutoGenObject.LibraryAutoGenList: + DirList.append(os.path.join(self._AutoGenObject.BuildDir, LibraryAutoGen.BuildDir)) + return DirList + + _TemplateDict = property(_CreateTemplateDict) + +# This acts like the main() function for the script, unless it is 'import'ed into another script. +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/AutoGen/StrGather.py b/BaseTools/Source/Python/AutoGen/StrGather.py new file mode 100644 index 0000000000..e82ad3a10b --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/StrGather.py @@ -0,0 +1,532 @@ +# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+#
+#This file is used to parse a strings file and create or add to a string database file.
+#
+
+##
+# Import Modules
+#
+import re
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from UniClassObject import *
+
+##
+# Static definitions
+#
+EFI_HII_SIBT_END = '0x00'
+EFI_HII_SIBT_STRING_SCSU = '0x10'
+EFI_HII_SIBT_STRING_SCSU_FONT = '0x11'
+EFI_HII_SIBT_STRINGS_SCSU = '0x12'
+EFI_HII_SIBT_STRINGS_SCSU_FONT = '0x13'
+EFI_HII_SIBT_STRING_UCS2 = '0x14'
+EFI_HII_SIBT_STRING_UCS2_FONT = '0x15'
+EFI_HII_SIBT_STRINGS_UCS2 = '0x16'
+EFI_HII_SIBT_STRINGS_UCS2_FONT = '0x17'
+EFI_HII_SIBT_DUPLICATE = '0x20'
+EFI_HII_SIBT_SKIP2 = '0x21'
+EFI_HII_SIBT_SKIP1 = '0x22'
+EFI_HII_SIBT_EXT1 = '0x30'
+EFI_HII_SIBT_EXT2 = '0x31'
+EFI_HII_SIBT_EXT4 = '0x32'
+EFI_HII_SIBT_FONT = '0x40'
+
+EFI_HII_PACKAGE_STRINGS = '0x04'
+EFI_HII_PACKAGE_FORM = '0x02'
+
+StringPackageType = EFI_HII_PACKAGE_STRINGS
+StringPackageForm = EFI_HII_PACKAGE_FORM
+StringBlockType = EFI_HII_SIBT_STRING_UCS2
+StringSkipType = EFI_HII_SIBT_SKIP2
+
+HexHeader = '0x'
+
+COMMENT = '// '
+DEFINE_STR = '#define'
+COMMENT_DEFINE_STR = COMMENT + DEFINE_STR
+NOT_REFERENCED = 'not referenced'
+COMMENT_NOT_REFERENCED = ' ' + COMMENT + NOT_REFERENCED
+CHAR_ARRAY_DEFIN = 'unsigned char'
+COMMON_FILE_NAME = 'Strings'
+OFFSET = 'offset'
+STRING = 'string'
+TO = 'to'
+STRING_TOKEN = re.compile('STRING_TOKEN *\(([A-Z0-9_]+) *\)', re.MULTILINE | re.UNICODE)
+
+EFI_HII_ARRAY_SIZE_LENGTH = 4
+EFI_HII_PACKAGE_HEADER_LENGTH = 4
+EFI_HII_HDR_SIZE_LENGTH = 4
+EFI_HII_STRING_OFFSET_LENGTH = 4
+EFI_STRING_ID = 1
+EFI_STRING_ID_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW = 0
+EFI_HII_LANGUAGE_WINDOW_LENGTH = 2
+EFI_HII_LANGUAGE_WINDOW_NUMBER = 16
+EFI_HII_STRING_PACKAGE_HDR_LENGTH = EFI_HII_PACKAGE_HEADER_LENGTH + EFI_HII_HDR_SIZE_LENGTH + EFI_HII_STRING_OFFSET_LENGTH + EFI_HII_LANGUAGE_WINDOW_LENGTH * EFI_HII_LANGUAGE_WINDOW_NUMBER + EFI_STRING_ID_LENGTH
+
+H_C_FILE_HEADER = ['//', \
+ '// DO NOT EDIT -- auto-generated file', \
+ '//', \
+ '// This file is generated by the StrGather utility', \
+ '//']
+LANGUAGE_NAME_STRING_NAME = '$LANGUAGE_NAME'
+PRINTABLE_LANGUAGE_NAME_STRING_NAME = '$PRINTABLE_LANGUAGE_NAME'
+
+## Convert a dec number to a hex string
+#
+# Convert a dec number to a formatted hex string in length digit
+# The digit is set to default 8
+# The hex string starts with "0x"
+# DecToHexStr(1000) is '0x000003E8'
+# DecToHexStr(1000, 6) is '0x0003E8'
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex string
+#
+# @retval: The formatted hex string
+#
+def DecToHexStr(Dec, Digit = 8):
+ return eval("'0x%0" + str(Digit) + "X' % int(Dec)")
+
+## Convert a dec number to a hex list
+#
+# Convert a dec number to a formatted hex list in size digit
+# The digit is set to default 8
+# DecToHexList(1000) is ['0xE8', '0x03', '0x00', '0x00']
+# DecToHexList(1000, 6) is ['0xE8', '0x03', '0x00']
+#
+# @param Dec: The number in dec format
+# @param Digit: The needed digit of hex list
+#
+# @retval: A list for formatted hex string
+#
+def DecToHexList(Dec, Digit = 8):
+ Hex = eval("'%0" + str(Digit) + "X' % int(Dec)" )
+ List = []
+ for Bit in range(Digit - 2, -1, -2):
+ List.append(HexHeader + Hex[Bit:Bit + 2])
+ return List
+
+## Convert a acsii string to a hex list
+#
+# Convert a acsii string to a formatted hex list
+# AscToHexList('en-US') is ['0x65', '0x6E', '0x2D', '0x55', '0x53']
+#
+# @param Ascii: The acsii string
+#
+# @retval: A list for formatted hex string
+#
+def AscToHexList(Ascii):
+ List = []
+ for Item in Ascii:
+ List.append('0x%2X' % ord(Item))
+
+ return List
+
+## Create header of .h file
+#
+# Create a header of .h file
+#
+# @param BaseName: The basename of strings
+#
+# @retval Str: A string for .h file header
+#
+def CreateHFileHeader(BaseName):
+ Str = ''
+ for Item in H_C_FILE_HEADER:
+ Str = WriteLine(Str, Item)
+ Str = WriteLine(Str, '#ifndef _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
+ Str = WriteLine(Str, '#define _' + BaseName.upper() + '_STRINGS_DEFINE_H_')
+ return Str
+
+## Create content of .h file
+#
+# Create content of .h file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass: A UniObjectClass instance
+#
+# @retval Str: A string of .h file content
+#
+def CreateHFileContent(BaseName, UniObjectClass):
+ Str = ''
+ ValueStartPtr = 60
+ Line = COMMENT_DEFINE_STR + ' ' + LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(0, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ Line = COMMENT_DEFINE_STR + ' ' + PRINTABLE_LANGUAGE_NAME_STRING_NAME + ' ' * (ValueStartPtr - len(DEFINE_STR + PRINTABLE_LANGUAGE_NAME_STRING_NAME)) + DecToHexStr(1, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+ for Index in range(2, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]])):
+ StringItem = UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[0][0]][Index]
+ Name = StringItem.StringName
+ Token = StringItem.Token
+ Referenced = StringItem.Referenced
+ if Name != None:
+ Line = ''
+ if Referenced == True:
+ Line = DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4)
+ else:
+ Line = COMMENT_DEFINE_STR + ' ' + Name + ' ' * (ValueStartPtr - len(DEFINE_STR + Name)) + DecToHexStr(Token, 4) + COMMENT_NOT_REFERENCED
+ Str = WriteLine(Str, Line)
+
+ Str = WriteLine(Str, '')
+ Str = WriteLine(Str, 'extern unsigned char ' + BaseName + 'Strings[];')
+ return Str
+
+## Create a complete .h file
+#
+# Create a complet .h file with file header and file content
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass: A UniObjectClass instance
+#
+# @retval Str: A string of complete .h file
+#
+def CreateHFile(BaseName, UniObjectClass):
+ HFile = WriteLine('', CreateHFileContent(BaseName, UniObjectClass))
+
+ return HFile
+
+## Create header of .c file
+#
+# Create a header of .c file
+#
+# @retval Str: A string for .c file header
+#
+def CreateCFileHeader():
+ Str = ''
+ for Item in H_C_FILE_HEADER:
+ Str = WriteLine(Str, Item)
+
+ return Str
+
+## Create a formatted string all items in an array
+#
+# Use ',' to join each item in an array, and break an new line when reaching the width (default is 16)
+#
+# @param Array: The array need to be formatted
+# @param Width: The line length, the default value is set to 16
+#
+# @retval ArrayItem: A string for all formatted array items
+#
+def CreateArrayItem(Array, Width = 16):
+ MaxLength = Width
+ Index = 0
+ Line = ' '
+ ArrayItem = ''
+
+ for Item in Array:
+ if Index < MaxLength:
+ Line = Line + Item + ', '
+ Index = Index + 1
+ else:
+ ArrayItem = WriteLine(ArrayItem, Line)
+ Line = ' ' + Item + ', '
+ Index = 1
+ ArrayItem = Write(ArrayItem, Line.rstrip())
+
+ return ArrayItem
+
+## CreateCFileStringValue
+#
+# Create a line with string value
+#
+# @param Value: Value of the string
+#
+# @retval Str: A formatted string with string value
+#
+
+def CreateCFileStringValue(Value):
+ Value = [StringBlockType] + Value
+ Str = WriteLine('', CreateArrayItem(Value))
+
+ return Str
+
+
+## Create content of .c file
+#
+# Create content of .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass: A UniObjectClass instance
+#
+# @retval Str: A string of .c file content
+#
+def CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode):
+ #
+ # Init array length
+ #
+ TotalLength = EFI_HII_ARRAY_SIZE_LENGTH
+ Str = ''
+ Offset = 0
+
+ #
+ # Create lines for each language's strings
+ #
+ for IndexI in range(len(UniObjectClass.LanguageDef)):
+ Language = UniObjectClass.LanguageDef[IndexI][0]
+ LangPrintName = UniObjectClass.LanguageDef[IndexI][1]
+
+ StrStringValue = ''
+ ArrayLength = 0
+ NumberOfUseOhterLangDef = 0
+ Index = 0
+ for IndexJ in range(1, len(UniObjectClass.OrderedStringList[UniObjectClass.LanguageDef[IndexI][0]])):
+ Item = UniObjectClass.FindByToken(IndexJ, Language)
+ Name = Item.StringName
+ Value = Item.StringValueByteList
+ Referenced = Item.Referenced
+ Token = Item.Token
+ Length = Item.Length
+ UseOtherLangDef = Item.UseOtherLangDef
+
+ if UseOtherLangDef != '' and Referenced:
+ NumberOfUseOhterLangDef = NumberOfUseOhterLangDef + 1
+ Index = Index + 1
+ else:
+ if NumberOfUseOhterLangDef > 0:
+ StrStringValue = WriteLine(StrStringValue, CreateArrayItem([StringSkipType] + DecToHexList(NumberOfUseOhterLangDef, 4)))
+ NumberOfUseOhterLangDef = 0
+ ArrayLength = ArrayLength + 3
+ if Referenced and Item.Token > 0:
+ Index = Index + 1
+ StrStringValue = WriteLine(StrStringValue, "// %s: %s:%s" % (DecToHexStr(Index, 4), Name, DecToHexStr(Token, 4)))
+ StrStringValue = Write(StrStringValue, CreateCFileStringValue(Value))
+ Offset = Offset + Length
+ ArrayLength = ArrayLength + Item.Length + 1 # 1 is for the length of string type
+
+ #
+ # EFI_HII_PACKAGE_HEADER
+ #
+ Offset = EFI_HII_STRING_PACKAGE_HDR_LENGTH + len(Language) + 1
+ ArrayLength = Offset + ArrayLength + 1
+
+ #
+ # Create PACKAGE HEADER
+ #
+ Str = WriteLine(Str, '// PACKAGE HEADER\n')
+ TotalLength = TotalLength + ArrayLength
+
+ List = DecToHexList(ArrayLength, 6) + \
+ [StringPackageType] + \
+ DecToHexList(Offset) + \
+ DecToHexList(Offset) + \
+ DecToHexList(EFI_HII_LANGUAGE_WINDOW, EFI_HII_LANGUAGE_WINDOW_LENGTH * 2) * EFI_HII_LANGUAGE_WINDOW_NUMBER + \
+ DecToHexList(EFI_STRING_ID, 4) + \
+ AscToHexList(Language) + \
+ DecToHexList(0, 2)
+ Str = WriteLine(Str, CreateArrayItem(List, 16) + '\n')
+
+ #
+ # Create PACKAGE DATA
+ #
+ Str = WriteLine(Str, '// PACKAGE DATA\n')
+ Str = Write(Str, StrStringValue)
+
+ #
+ # Add an EFI_HII_SIBT_END at last
+ #
+ Str = WriteLine(Str, ' ' + EFI_HII_SIBT_END + ",")
+
+ #
+ # Create line for string variable name
+ # "unsigned char $(BaseName)Strings[] = {"
+ #
+ AllStr = WriteLine('', CHAR_ARRAY_DEFIN + ' ' + BaseName + COMMON_FILE_NAME + '[] = {\n' )
+
+ #
+ # Create FRAMEWORK_EFI_HII_PACK_HEADER in compatible mode
+ #
+ if IsCompatibleMode:
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Length')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength + 2)) + '\n')
+ AllStr = WriteLine(AllStr, '// FRAMEWORK PACKAGE HEADER Type')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(2, 4)) + '\n')
+
+ #
+ # Create whole array length in UEFI mode
+ #
+ if not IsCompatibleMode:
+ AllStr = WriteLine(AllStr, '// STRGATHER_OUTPUT_HEADER')
+ AllStr = WriteLine(AllStr, CreateArrayItem(DecToHexList(TotalLength)) + '\n')
+
+ #
+ # Join package data
+ #
+ AllStr = Write(AllStr, Str)
+
+ return AllStr
+
+## Create end of .c file
+#
+# Create end of .c file
+#
+# @retval Str: A string of .h file end
+#
+def CreateCFileEnd():
+ Str = Write('', '};')
+ return Str
+
+## Create a .c file
+#
+# Create a complete .c file
+#
+# @param BaseName: The basename of strings
+# @param UniObjectClass: A UniObjectClass instance
+#
+# @retval CFile: A string of complete .c file
+#
+def CreateCFile(BaseName, UniObjectClass, IsCompatibleMode):
+ CFile = ''
+ #CFile = WriteLine(CFile, CreateCFileHeader())
+ CFile = WriteLine(CFile, CreateCFileContent(BaseName, UniObjectClass, IsCompatibleMode))
+ CFile = WriteLine(CFile, CreateCFileEnd())
+ return CFile
+
+## GetFileList
+#
+# Get a list for all files
+#
+# @param IncludeList: A list of all path to be searched
+# @param SkipList: A list of all types of file could be skipped
+#
+# @retval FileList: A list of all files found
+#
+def GetFileList(SourceFileList, IncludeList, SkipList):
+ if IncludeList == None:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, "Include path for unicode file is not defined")
+
+ FileList = []
+ if SkipList == None:
+ SkipList = []
+
+ for File in SourceFileList:
+ for Dir in IncludeList:
+ if not os.path.exists(Dir):
+ continue
+ File = os.path.join(Dir, File.Path)
+ #
+ # Ignore Dir
+ #
+ if os.path.isfile(File) != True:
+ continue
+ #
+ # Ignore file listed in skip list
+ #
+ IsSkip = False
+ for Skip in SkipList:
+ if os.path.splitext(File)[1].upper() == Skip.upper():
+ EdkLogger.verbose("Skipped %s for string token uses search" % File)
+ IsSkip = True
+ break
+
+ if not IsSkip:
+ FileList.append(File)
+
+ break
+
+ return FileList
+
+## SearchString
+#
+# Search whether all string defined in UniObjectClass are referenced
+# All string used should be set to Referenced
+#
+# @param UniObjectClass: Input UniObjectClass
+# @param FileList: Search path list
+#
+# @retval UniObjectClass: UniObjectClass after searched
+#
+def SearchString(UniObjectClass, FileList):
+ if FileList == []:
+ return UniObjectClass
+
+ for File in FileList:
+ if os.path.isfile(File):
+ Lines = open(File, 'r')
+ for Line in Lines:
+ StringTokenList = STRING_TOKEN.findall(Line)
+ for StrName in StringTokenList:
+ EdkLogger.debug(EdkLogger.DEBUG_5, "Found string identifier: " + StrName)
+ UniObjectClass.SetStringReferenced(StrName)
+
+ UniObjectClass.ReToken()
+
+ return UniObjectClass
+
+## GetStringFiles
+#
+# This function is used for UEFI2.1 spec
+#
+#
+def GetStringFiles(UniFilList, SourceFileList, IncludeList, SkipList, BaseName, IsCompatibleMode = False, ShellMode = False):
+ Status = True
+ ErrorMessage = ''
+
+ if len(UniFilList) > 0:
+ if ShellMode:
+ #
+ # support ISO 639-2 codes in .UNI files of EDK Shell
+ #
+ Uni = UniFileClassObject(UniFilList, True)
+ else:
+ Uni = UniFileClassObject(UniFilList, IsCompatibleMode)
+ else:
+ EdkLogger.error("UnicodeStringGather", AUTOGEN_ERROR, 'No unicode files given')
+
+ FileList = GetFileList(SourceFileList, IncludeList, SkipList)
+
+ Uni = SearchString(Uni, FileList)
+
+ HFile = CreateHFile(BaseName, Uni)
+ CFile = CreateCFile(BaseName, Uni, IsCompatibleMode)
+
+ return HFile, CFile
+
+#
+# Write an item
+#
+def Write(Target, Item):
+ return Target + Item
+
+#
+# Write an item with a break line
+#
+def WriteLine(Target, Item):
+ return Target + Item + '\n'
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.info('start')
+
+ UniFileList = [
+ r'C:\\Edk\\Strings2.uni',
+ r'C:\\Edk\\Strings.uni'
+ ]
+
+ SrcFileList = []
+ for Root, Dirs, Files in os.walk('C:\\Edk'):
+ for File in Files:
+ SrcFileList.append(File)
+
+ IncludeList = [
+ r'C:\\Edk'
+ ]
+
+ SkipList = ['.inf', '.uni']
+ BaseName = 'DriverSample'
+ (h, c) = GetStringFiles(UniFileList, SrcFileList, IncludeList, SkipList, BaseName, True)
+ hfile = open('unistring.h', 'w')
+ cfile = open('unistring.c', 'w')
+ hfile.write(h)
+ cfile.write(c)
+
+ EdkLogger.info('end')
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py new file mode 100644 index 0000000000..412fa72df0 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -0,0 +1,530 @@ +# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+#
+#This file is used to collect all defined strings in multiple uni files
+#
+
+##
+# Import Modules
+#
+import os, codecs, re
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.String import GetLineNo
+from Common.Misc import PathClass
+
+##
+# Static definitions
+#
+UNICODE_WIDE_CHAR = u'\\wide'
+UNICODE_NARROW_CHAR = u'\\narrow'
+UNICODE_NON_BREAKING_CHAR = u'\\nbr'
+UNICODE_UNICODE_CR = '\r'
+UNICODE_UNICODE_LF = '\n'
+
+NARROW_CHAR = u'\uFFF0'
+WIDE_CHAR = u'\uFFF1'
+NON_BREAKING_CHAR = u'\uFFF2'
+CR = u'\u000D'
+LF = u'\u000A'
+NULL = u'\u0000'
+TAB = u'\t'
+BACK_SPLASH = u'\\'
+
+gIncludePattern = re.compile("^#include +[\"<]+([^\"< >]+)[>\"]+$", re.MULTILINE | re.UNICODE)
+
+## Convert a python unicode string to a normal string
+#
+# Convert a python unicode string to a normal string
+# UniToStr(u'I am a string') is 'I am a string'
+#
+# @param Uni: The python unicode string
+#
+# @retval: The formatted normal string
+#
+def UniToStr(Uni):
+ return repr(Uni)[2:-1]
+
+## Convert a unicode string to a Hex list
+#
+# Convert a unicode string to a Hex list
+# UniToHexList('ABC') is ['0x41', '0x00', '0x42', '0x00', '0x43', '0x00']
+#
+# @param Uni: The python unicode string
+#
+# @retval List: The formatted hex list
+#
+def UniToHexList(Uni):
+ List = []
+ for Item in Uni:
+ Temp = '%04X' % ord(Item)
+ List.append('0x' + Temp[2:4])
+ List.append('0x' + Temp[0:2])
+ return List
+
+LangConvTable = {'eng':'en', 'fra':'fr', \
+ 'aar':'aa', 'abk':'ab', 'ave':'ae', 'afr':'af', 'aka':'ak', 'amh':'am', \
+ 'arg':'an', 'ara':'ar', 'asm':'as', 'ava':'av', 'aym':'ay', 'aze':'az', \
+ 'bak':'ba', 'bel':'be', 'bul':'bg', 'bih':'bh', 'bis':'bi', 'bam':'bm', \
+ 'ben':'bn', 'bod':'bo', 'bre':'br', 'bos':'bs', 'cat':'ca', 'che':'ce', \
+ 'cha':'ch', 'cos':'co', 'cre':'cr', 'ces':'cs', 'chu':'cu', 'chv':'cv', \
+ 'cym':'cy', 'dan':'da', 'deu':'de', 'div':'dv', 'dzo':'dz', 'ewe':'ee', \
+ 'ell':'el', 'epo':'eo', 'spa':'es', 'est':'et', 'eus':'eu', 'fas':'fa', \
+ 'ful':'ff', 'fin':'fi', 'fij':'fj', 'fao':'fo', 'fry':'fy', 'gle':'ga', \
+ 'gla':'gd', 'glg':'gl', 'grn':'gn', 'guj':'gu', 'glv':'gv', 'hau':'ha', \
+ 'heb':'he', 'hin':'hi', 'hmo':'ho', 'hrv':'hr', 'hat':'ht', 'hun':'hu', \
+ 'hye':'hy', 'her':'hz', 'ina':'ia', 'ind':'id', 'ile':'ie', 'ibo':'ig', \
+ 'iii':'ii', 'ipk':'ik', 'ido':'io', 'isl':'is', 'ita':'it', 'iku':'iu', \
+ 'jpn':'ja', 'jav':'jv', 'kat':'ka', 'kon':'kg', 'kik':'ki', 'kua':'kj', \
+ 'kaz':'kk', 'kal':'kl', 'khm':'km', 'kan':'kn', 'kor':'ko', 'kau':'kr', \
+ 'kas':'ks', 'kur':'ku', 'kom':'kv', 'cor':'kw', 'kir':'ky', 'lat':'la', \
+ 'ltz':'lb', 'lug':'lg', 'lim':'li', 'lin':'ln', 'lao':'lo', 'lit':'lt', \
+ 'lub':'lu', 'lav':'lv', 'mlg':'mg', 'mah':'mh', 'mri':'mi', 'mkd':'mk', \
+ 'mal':'ml', 'mon':'mn', 'mar':'mr', 'msa':'ms', 'mlt':'mt', 'mya':'my', \
+ 'nau':'na', 'nob':'nb', 'nde':'nd', 'nep':'ne', 'ndo':'ng', 'nld':'nl', \
+ 'nno':'nn', 'nor':'no', 'nbl':'nr', 'nav':'nv', 'nya':'ny', 'oci':'oc', \
+ 'oji':'oj', 'orm':'om', 'ori':'or', 'oss':'os', 'pan':'pa', 'pli':'pi', \
+ 'pol':'pl', 'pus':'ps', 'por':'pt', 'que':'qu', 'roh':'rm', 'run':'rn', \
+ 'ron':'ro', 'rus':'ru', 'kin':'rw', 'san':'sa', 'srd':'sc', 'snd':'sd', \
+ 'sme':'se', 'sag':'sg', 'sin':'si', 'slk':'sk', 'slv':'sl', 'smo':'sm', \
+ 'sna':'sn', 'som':'so', 'sqi':'sq', 'srp':'sr', 'ssw':'ss', 'sot':'st', \
+ 'sun':'su', 'swe':'sv', 'swa':'sw', 'tam':'ta', 'tel':'te', 'tgk':'tg', \
+ 'tha':'th', 'tir':'ti', 'tuk':'tk', 'tgl':'tl', 'tsn':'tn', 'ton':'to', \
+ 'tur':'tr', 'tso':'ts', 'tat':'tt', 'twi':'tw', 'tah':'ty', 'uig':'ug', \
+ 'ukr':'uk', 'urd':'ur', 'uzb':'uz', 'ven':'ve', 'vie':'vi', 'vol':'vo', \
+ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
+ 'zho':'zh', 'zul':'zu'}
+
+## GetLanguageCode
+#
+# Check the language code read from .UNI file and convert ISO 639-2 codes to RFC 4646 codes if appropriate
+# ISO 639-2 language codes supported in compatiblity mode
+# RFC 4646 language codes supported in native mode
+#
+# @param LangName: Language codes read from .UNI file
+#
+# @retval LangName: Valid lanugage code in RFC 4646 format or None
+#
+def GetLanguageCode(LangName, IsCompatibleMode, File):
+ global LangConvTable
+
+ length = len(LangName)
+ if IsCompatibleMode:
+ if length == 3 and LangName.isalpha():
+ TempLangName = LangConvTable.get(LangName.lower())
+ if TempLangName != None:
+ return TempLangName
+ return LangName
+ else:
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid ISO 639-2 language code : %s" % LangName, File)
+
+ if length == 2:
+ if LangName.isalpha():
+ return LangName
+ elif length == 3:
+ if LangName.isalpha() and LangConvTable.get(LangName.lower()) == None:
+ return LangName
+ elif length == 5:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ elif length >= 6:
+ if LangName[0:2].isalpha() and LangName[2] == '-':
+ return LangName
+ if LangName[0:3].isalpha() and LangConvTable.get(LangName.lower()) == None and LangName[3] == '-':
+ return LangName
+
+ EdkLogger.error("Unicode File Parser", FORMAT_INVALID, "Invalid RFC 4646 language code : %s" % LangName, File)
+
+## StringDefClassObject
+#
+# A structure for language definition
+#
+class StringDefClassObject(object):
+ def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
+ self.StringName = ''
+ self.StringNameByteList = []
+ self.StringValue = ''
+ self.StringValueByteList = ''
+ self.Token = 0
+ self.Referenced = Referenced
+ self.UseOtherLangDef = UseOtherLangDef
+ self.Length = 0
+
+ if Name != None:
+ self.StringName = Name
+ self.StringNameByteList = UniToHexList(Name)
+ if Value != None:
+ self.StringValue = Value + u'\x00' # Add a NULL at string tail
+ self.StringValueByteList = UniToHexList(self.StringValue)
+ self.Length = len(self.StringValueByteList)
+ if Token != None:
+ self.Token = Token
+
+ def __str__(self):
+ return repr(self.StringName) + ' ' + \
+ repr(self.Token) + ' ' + \
+ repr(self.Referenced) + ' ' + \
+ repr(self.StringValue) + ' ' + \
+ repr(self.UseOtherLangDef)
+
+## UniFileClassObject
+#
+# A structure for .uni file definition
+#
+class UniFileClassObject(object):
+ def __init__(self, FileList = [], IsCompatibleMode = False):
+ self.FileList = FileList
+ self.Token = 2
+ self.LanguageDef = [] #[ [u'LanguageIdentifier', u'PrintableName'], ... ]
+ self.OrderedStringList = {} #{ u'LanguageIdentifier' : [StringDefClassObject] }
+ self.IsCompatibleMode = IsCompatibleMode
+
+ if len(self.FileList) > 0:
+ self.LoadUniFiles(FileList)
+
+ #
+ # Get Language definition
+ #
+ def GetLangDef(self, File, Line):
+ Lang = Line.split()
+ if len(Lang) != 3:
+ try:
+ FileIn = codecs.open(File, mode='rb', encoding='utf-16').read()
+ except UnicodeError, X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File);
+ LineNo = GetLineNo(FileIn, Line, False)
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, "Wrong language definition",
+ ExtraData="""%s\n\t*Correct format is like '#langdef eng "English"'""" % Line, File = File, Line = LineNo)
+ else:
+ LangName = GetLanguageCode(Lang[1], self.IsCompatibleMode, self.File)
+ LangPrintName = Lang[2][1:-1]
+
+ IsLangInDef = False
+ for Item in self.LanguageDef:
+ if Item[0] == LangName:
+ IsLangInDef = True
+ break;
+
+ if not IsLangInDef:
+ self.LanguageDef.append([LangName, LangPrintName])
+
+ #
+ # Add language string
+ #
+ self.AddStringToList(u'$LANGUAGE_NAME', LangName, LangName, 0, True, Index=0)
+ self.AddStringToList(u'$PRINTABLE_LANGUAGE_NAME', LangName, LangPrintName, 1, True, Index=1)
+
+ return True
+
+ #
+ # Get String name and value
+ #
+ def GetStringObject(self, Item):
+ Name = ''
+ Language = ''
+ Value = ''
+
+ Name = Item.split()[1]
+ LanguageList = Item.split(u'#language ')
+ for IndexI in range(len(LanguageList)):
+ if IndexI == 0:
+ continue
+ else:
+ Language = LanguageList[IndexI].split()[0]
+ Value = LanguageList[IndexI][LanguageList[IndexI].find(u'\"') + len(u'\"') : LanguageList[IndexI].rfind(u'\"')] #.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+
+ #
+ # Get include file list and load them
+ #
+ def GetIncludeFile(self, Item, Dir):
+ FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
+ self.LoadUniFile(FileName)
+
+ #
+ # Pre-process before parse .uni file
+ #
+ def PreProcess(self, File):
+ if not os.path.exists(File.Path) or not os.path.isfile(File.Path):
+ EdkLogger.error("Unicode File Parser", FILE_NOT_FOUND, ExtraData=File.Path)
+
+ Dir = File.Dir
+ try:
+ FileIn = codecs.open(File.Path, mode='rb', encoding='utf-16').readlines()
+ except UnicodeError, X:
+ EdkLogger.error("build", FILE_READ_FAILURE, "File read failure: %s" % str(X), ExtraData=File.Path);
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=File.Path);
+
+ Lines = []
+ #
+ # Use unique identifier
+ #
+ for Line in FileIn:
+ Line = Line.strip()
+ #
+ # Ignore comment line and empty line
+ #
+ if Line == u'' or Line.startswith(u'//'):
+ continue
+ Line = Line.replace(u'/langdef', u'#langdef')
+ Line = Line.replace(u'/string', u'#string')
+ Line = Line.replace(u'/language', u'#language')
+ Line = Line.replace(u'/include', u'#include')
+
+ Line = Line.replace(UNICODE_WIDE_CHAR, WIDE_CHAR)
+ Line = Line.replace(UNICODE_NARROW_CHAR, NARROW_CHAR)
+ Line = Line.replace(UNICODE_NON_BREAKING_CHAR, NON_BREAKING_CHAR)
+
+ Line = Line.replace(u'\\\\', u'\u0006')
+ Line = Line.replace(u'\\r\\n', CR + LF)
+ Line = Line.replace(u'\\n', CR + LF)
+ Line = Line.replace(u'\\r', CR)
+ Line = Line.replace(u'\\t', u'\t')
+ Line = Line.replace(u'''\"''', u'''"''')
+ Line = Line.replace(u'\t', u' ')
+ Line = Line.replace(u'\u0006', u'\\')
+
+# if Line.find(u'\\x'):
+# hex = Line[Line.find(u'\\x') + 2 : Line.find(u'\\x') + 6]
+# hex = "u'\\u" + hex + "'"
+
+ IncList = gIncludePattern.findall(Line)
+ if len(IncList) == 1:
+ Lines.extend(self.PreProcess(PathClass(str(IncList[0]), Dir)))
+ continue
+
+ Lines.append(Line)
+
+ return Lines
+
+ #
+ # Load a .uni file
+ #
+ def LoadUniFile(self, File = None):
+ if File == None:
+ EdkLogger.error("Unicode File Parser", PARSER_ERROR, 'No unicode file is given')
+ self.File = File
+ #
+ # Process special char in file
+ #
+ Lines = self.PreProcess(File)
+
+ #
+ # Get Unicode Information
+ #
+ for IndexI in range(len(Lines)):
+ Line = Lines[IndexI]
+ if (IndexI + 1) < len(Lines):
+ SecondLine = Lines[IndexI + 1]
+ if (IndexI + 2) < len(Lines):
+ ThirdLine = Lines[IndexI + 2]
+
+ #
+ # Get Language def information
+ #
+ if Line.find(u'#langdef ') >= 0:
+ self.GetLangDef(File, Line)
+ continue
+
+ Name = ''
+ Language = ''
+ Value = ''
+ #
+ # Get string def information format 1 as below
+ #
+ # #string MY_STRING_1
+ # #language eng
+ # My first English string line 1
+ # My first English string line 2
+ # #string MY_STRING_1
+ # #language spa
+ # Mi segunda secuencia 1
+ # Mi segunda secuencia 2
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') < 0 and \
+ SecondLine.find(u'#string ') < 0 and SecondLine.find(u'#language ') >= 0 and \
+ ThirdLine.find(u'#string ') < 0 and ThirdLine.find(u'#language ') < 0:
+ Name = Line[Line.find(u'#string ') + len(u'#string ') : ].strip(' ')
+ Language = SecondLine[SecondLine.find(u'#language ') + len(u'#language ') : ].strip(' ')
+ for IndexJ in range(IndexI + 2, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') < 0:
+ Value = Value + Lines[IndexJ]
+ else:
+ IndexI = IndexJ
+ break
+ # Value = Value.replace(u'\r\n', u'')
+ Language = GetLanguageCode(Language, self.IsCompatibleMode, self.File)
+ self.AddStringToList(Name, Language, Value)
+ continue
+
+ #
+ # Get string def information format 2 as below
+ #
+ # #string MY_STRING_1 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ # #string MY_STRING_2 #language eng "My first English string line 1"
+ # "My first English string line 2"
+ # #string MY_STRING_2 #language spa "Mi segunda secuencia 1"
+ # "Mi segunda secuencia 2"
+ #
+ if Line.find(u'#string ') >= 0 and Line.find(u'#language ') >= 0:
+ StringItem = Line
+ for IndexJ in range(IndexI + 1, len(Lines)):
+ if Lines[IndexJ].find(u'#string ') >= 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ IndexI = IndexJ
+ break
+ elif Lines[IndexJ].find(u'#string ') < 0 and Lines[IndexJ].find(u'#language ') >= 0:
+ StringItem = StringItem + Lines[IndexJ]
+ elif Lines[IndexJ].count(u'\"') >= 2:
+ StringItem = StringItem[ : StringItem.rfind(u'\"')] + Lines[IndexJ][Lines[IndexJ].find(u'\"') + len(u'\"') : ]
+ self.GetStringObject(StringItem)
+ continue
+
+ #
+ # Load multiple .uni files
+ #
+ def LoadUniFiles(self, FileList = []):
+ if len(FileList) > 0:
+ if len(FileList) > 1:
+ NewList = [];
+ for File in FileList:
+ NewList.append (File)
+ NewList.sort()
+ for File in NewList:
+ self.LoadUniFile(File)
+ else:
+ for File in FileList:
+ self.LoadUniFile(File)
+
+ #
+ # Add a string to list
+ #
+ def AddStringToList(self, Name, Language, Value, Token = None, Referenced = False, UseOtherLangDef = '', Index = -1):
+ if Language not in self.OrderedStringList:
+ self.OrderedStringList[Language] = []
+
+ IsAdded = False
+ for Item in self.OrderedStringList[Language]:
+ if Name == Item.StringName:
+ IsAdded = True
+ break
+ if not IsAdded:
+ Token = len(self.OrderedStringList[Language])
+ if Index == -1:
+ self.OrderedStringList[Language].append(StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+ else:
+ self.OrderedStringList[Language].insert(Index, StringDefClassObject(Name, Value, Referenced, Token, UseOtherLangDef))
+
+ #
+ # Set the string as referenced
+ #
+ def SetStringReferenced(self, Name):
+ for Lang in self.OrderedStringList:
+ for Item in self.OrderedStringList[Lang]:
+ if Name == Item.StringName:
+ Item.Referenced = True
+ break
+ #
+ # Search the string in language definition by Name
+ #
+ def FindStringValue(self, Name, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.StringName == Name:
+ return Item
+
+ return None
+
+ #
+ # Search the string in language definition by Token
+ #
+ def FindByToken(self, Token, Lang):
+ for Item in self.OrderedStringList[Lang]:
+ if Item.Token == Token:
+ return Item
+
+ return None
+
+ #
+ # Re-order strings and re-generate tokens
+ #
+ def ReToken(self):
+ #
+ # Search each string to find if it is defined for each language
+ # Use secondary language value to replace if missing in any one language
+ #
+ for IndexI in range(0, len(self.LanguageDef)):
+ LangKey = self.LanguageDef[IndexI][0]
+ for Item in self.OrderedStringList[LangKey]:
+ Name = Item.StringName
+ Value = Item.StringValue[0:-1]
+ Referenced = Item.Referenced
+ Index = self.OrderedStringList[LangKey].index(Item)
+ for IndexJ in range(0, len(self.LanguageDef)):
+ LangFind = self.LanguageDef[IndexJ][0]
+ if self.FindStringValue(Name, LangFind) == None:
+ EdkLogger.debug(EdkLogger.DEBUG_5, Name)
+ Token = len(self.OrderedStringList[LangFind])
+ self.AddStringToList(Name, LangFind, Value, Token, Referenced, LangKey, Index)
+
+ #
+ # Retoken
+ #
+ # First re-token the first language
+ LangName = self.LanguageDef[0][0]
+ ReferencedStringList = []
+ NotReferencedStringList = []
+ Token = 0
+ for Item in self.OrderedStringList[LangName]:
+ if Item.Referenced == True:
+ Item.Token = Token
+ ReferencedStringList.append(Item)
+ Token = Token + 1
+ else:
+ NotReferencedStringList.append(Item)
+ self.OrderedStringList[LangName] = ReferencedStringList
+ for Index in range(len(NotReferencedStringList)):
+ NotReferencedStringList[Index].Token = Token + Index
+ self.OrderedStringList[LangName].append(NotReferencedStringList[Index])
+
+ #
+ # Adjust the orders of other languages
+ #
+ for IndexOfLanguage in range(1, len(self.LanguageDef)):
+ for OrderedString in self.OrderedStringList[LangName]:
+ for UnOrderedString in self.OrderedStringList[self.LanguageDef[IndexOfLanguage][0]]:
+ if OrderedString.StringName == UnOrderedString.StringName:
+ UnOrderedString.Token = OrderedString.Token
+ break
+
+ #
+ # Show the instance itself
+ #
+ def ShowMe(self):
+ print self.LanguageDef
+ #print self.OrderedStringList
+ for Item in self.OrderedStringList:
+ print Item
+ for Member in self.OrderedStringList[Item]:
+ print str(Member)
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ a = UniFileClassObject(['C:\\Edk\\Strings.uni', 'C:\\Edk\\Strings2.uni'])
+ a.ReToken()
+ a.ShowMe()
diff --git a/BaseTools/Source/Python/AutoGen/__init__.py b/BaseTools/Source/Python/AutoGen/__init__.py new file mode 100644 index 0000000000..d6fa5ec126 --- /dev/null +++ b/BaseTools/Source/Python/AutoGen/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+__all__ = ["AutoGen"]
diff --git a/BaseTools/Source/Python/Common/BuildToolError.py b/BaseTools/Source/Python/Common/BuildToolError.py new file mode 100644 index 0000000000..982ea93659 --- /dev/null +++ b/BaseTools/Source/Python/Common/BuildToolError.py @@ -0,0 +1,152 @@ +## @file +# Standardized Error Hanlding infrastructures. +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +FILE_OPEN_FAILURE = 1 +FILE_WRITE_FAILURE = 2 +FILE_PARSE_FAILURE = 3 +FILE_READ_FAILURE = 4 +FILE_CREATE_FAILURE = 5 +FILE_CHECKSUM_FAILURE = 6 +FILE_COMPRESS_FAILURE = 7 +FILE_DECOMPRESS_FAILURE = 8 +FILE_MOVE_FAILURE = 9 +FILE_DELETE_FAILURE = 10 +FILE_COPY_FAILURE = 11 +FILE_POSITIONING_FAILURE = 12 +FILE_ALREADY_EXIST = 13 +FILE_NOT_FOUND = 14 +FILE_TYPE_MISMATCH = 15 +FILE_CASE_MISMATCH = 16 +FILE_DUPLICATED = 17 +FILE_UNKNOWN_ERROR = 0x0FFF + +OPTION_UNKNOWN = 0x1000 +OPTION_MISSING = 0x1001 +OPTION_CONFLICT = 0x1002 +OPTION_VALUE_INVALID = 0x1003 +OPTION_DEPRECATED = 0x1004 +OPTION_NOT_SUPPORTED = 0x1005 +OPTION_UNKNOWN_ERROR = 0x1FFF + +PARAMETER_INVALID = 0x2000 +PARAMETER_MISSING = 0x2001 +PARAMETER_UNKNOWN_ERROR =0x2FFF + +FORMAT_INVALID = 0x3000 +FORMAT_NOT_SUPPORTED = 0x3001 +FORMAT_UNKNOWN = 0x3002 +FORMAT_UNKNOWN_ERROR = 0x3FFF + +RESOURCE_NOT_AVAILABLE = 0x4000 +RESOURCE_ALLOCATE_FAILURE = 0x4001 +RESOURCE_FULL = 0x4002 +RESOURCE_OVERFLOW = 0x4003 +RESOURCE_UNDERRUN = 0x4004 +RESOURCE_UNKNOWN_ERROR = 0x4FFF + +ATTRIBUTE_NOT_AVAILABLE = 0x5000 +ATTRIBUTE_GET_FAILURE = 0x5001 +ATTRIBUTE_SET_FAILURE = 0x5002 +ATTRIBUTE_UPDATE_FAILURE = 0x5003 +ATTRIBUTE_ACCESS_DENIED = 0x5004 +ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF + +IO_NOT_READY = 0x6000 +IO_BUSY = 0x6001 +IO_TIMEOUT = 0x6002 +IO_UNKNOWN_ERROR = 0x6FFF + +COMMAND_FAILURE = 0x7000 + +CODE_ERROR = 0xC0DE + +AUTOGEN_ERROR = 0xF000 +PARSER_ERROR = 0xF001 +BUILD_ERROR = 0xF002 +GENFDS_ERROR = 0xF003 +ECC_ERROR = 0xF004 +EOT_ERROR = 0xF005 +DDC_ERROR = 0xF009 +WARNING_AS_ERROR = 0xF006 +MIGRATION_ERROR = 0xF010 +ABORT_ERROR = 0xFFFE +UNKNOWN_ERROR = 0xFFFF + +## Error message of each error code +gErrorMessage = { + FILE_NOT_FOUND : "File/directory not found", + FILE_OPEN_FAILURE : "File open failure", + FILE_WRITE_FAILURE : "File write failure", + FILE_PARSE_FAILURE : "File parse failure", + FILE_READ_FAILURE : "File read failure", + FILE_CREATE_FAILURE : "File create failure", + FILE_CHECKSUM_FAILURE : "Invalid checksum of file", + FILE_COMPRESS_FAILURE : "File compress failure", + FILE_DECOMPRESS_FAILURE : "File decompress failure", + FILE_MOVE_FAILURE : "File move failure", + FILE_DELETE_FAILURE : "File delete failure", + FILE_COPY_FAILURE : "File copy failure", + FILE_POSITIONING_FAILURE: "Failed to seeking position", + FILE_ALREADY_EXIST : "File or directory already exists", + FILE_TYPE_MISMATCH : "Incorrect file type", + FILE_CASE_MISMATCH : "File name case mismatch", + FILE_DUPLICATED : "Duplicated file found", + FILE_UNKNOWN_ERROR : "Unknown error encountered on file", + + OPTION_UNKNOWN : "Unknown option", + OPTION_MISSING : "Missing option", + OPTION_CONFLICT : "Conflict options", + OPTION_VALUE_INVALID : "Invalid value of option", + OPTION_DEPRECATED : "Deprecated option", + OPTION_NOT_SUPPORTED : "Unsupported option", + OPTION_UNKNOWN_ERROR : "Unknown error when processing options", + + PARAMETER_INVALID : "Invalid parameter", + PARAMETER_MISSING : "Missing parameter", + PARAMETER_UNKNOWN_ERROR : "Unknown error in parameters", + + FORMAT_INVALID : "Invalid syntax/format", + FORMAT_NOT_SUPPORTED : "Not supported syntax/format", + FORMAT_UNKNOWN : "Unknown format", + FORMAT_UNKNOWN_ERROR : "Unknown error in syntax/format ", + + RESOURCE_NOT_AVAILABLE : "Not available", + RESOURCE_ALLOCATE_FAILURE : "Allocate failure", + RESOURCE_FULL : "Full", + RESOURCE_OVERFLOW : "Overflow", + RESOURCE_UNDERRUN : "Underrun", + RESOURCE_UNKNOWN_ERROR : "Unkown error", + + ATTRIBUTE_NOT_AVAILABLE : "Not available", + ATTRIBUTE_GET_FAILURE : "Failed to retrieve", + ATTRIBUTE_SET_FAILURE : "Failed to set", + ATTRIBUTE_UPDATE_FAILURE: "Failed to update", + ATTRIBUTE_ACCESS_DENIED : "Access denied", + ATTRIBUTE_UNKNOWN_ERROR : "Unknown error when accessing", + + COMMAND_FAILURE : "Failed to execute command", + + IO_NOT_READY : "Not ready", + IO_BUSY : "Busy", + IO_TIMEOUT : "Timeout", + IO_UNKNOWN_ERROR : "Unknown error in IO operation", + + UNKNOWN_ERROR : "Unknown error", +} + +## Exception indicating a fatal error +class FatalError(Exception): + pass + +if __name__ == "__main__": + pass diff --git a/BaseTools/Source/Python/Common/DataType.py b/BaseTools/Source/Python/Common/DataType.py new file mode 100644 index 0000000000..8b6c4e4921 --- /dev/null +++ b/BaseTools/Source/Python/Common/DataType.py @@ -0,0 +1,401 @@ +## @file
+# This file is used to define common static strings used by INF/DEC/DSC files
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Common Definitions
+#
+TAB_SPLIT = '.'
+TAB_COMMENT_R8_START = '/*'
+TAB_COMMENT_R8_END = '*/'
+TAB_COMMENT_R8_SPLIT = '//'
+TAB_COMMENT_SPLIT = '#'
+TAB_EQUAL_SPLIT = '='
+TAB_VALUE_SPLIT = '|'
+TAB_COMMA_SPLIT = ','
+TAB_SPACE_SPLIT = ' '
+TAB_SECTION_START = '['
+TAB_SECTION_END = ']'
+TAB_OPTION_START = '<'
+TAB_OPTION_END = '>'
+TAB_SLASH = '\\'
+TAB_BACK_SLASH = '/'
+
+TAB_EDK_SOURCE = '$(EDK_SOURCE)'
+TAB_EFI_SOURCE = '$(EFI_SOURCE)'
+TAB_WORKSPACE = '$(WORKSPACE)'
+
+TAB_ARCH_NULL = ''
+TAB_ARCH_COMMON = 'COMMON'
+TAB_ARCH_IA32 = 'IA32'
+TAB_ARCH_X64 = 'X64'
+TAB_ARCH_IPF = 'IPF'
+TAB_ARCH_ARM = 'ARM'
+TAB_ARCH_EBC = 'EBC'
+
+ARCH_LIST = [TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_IPF, TAB_ARCH_ARM, TAB_ARCH_EBC]
+ARCH_LIST_FULL = [TAB_ARCH_COMMON] + ARCH_LIST
+
+SUP_MODULE_BASE = 'BASE'
+SUP_MODULE_SEC = 'SEC'
+SUP_MODULE_PEI_CORE = 'PEI_CORE'
+SUP_MODULE_PEIM = 'PEIM'
+SUP_MODULE_DXE_CORE = 'DXE_CORE'
+SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
+SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
+SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
+SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
+SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
+SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
+SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
+SUP_MODULE_SMM_DRIVER = 'SMM_DRIVER'
+SUP_MODULE_SMM_CORE = 'SMM_CORE'
+
+SUP_MODULE_LIST = [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
+ SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
+ SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_SMM_DRIVER, SUP_MODULE_SMM_CORE]
+SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(l for l in SUP_MODULE_LIST)
+
+EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+EDK_COMPONENT_TYPE_SECUARITY_CORE = 'SECUARITY_CORE'
+EDK_COMPONENT_TYPE_PEI_CORE = 'PEI_CORE'
+EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
+EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
+EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
+EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
+EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
+EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
+EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
+
+BINARY_FILE_TYPE_FW = 'FW'
+BINARY_FILE_TYPE_GUID = 'GUID'
+BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
+BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
+BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
+BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
+BINARY_FILE_TYPE_LIB = 'LIB'
+BINARY_FILE_TYPE_PE32 = 'PE32'
+BINARY_FILE_TYPE_PIC = 'PIC'
+BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
+BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
+BINARY_FILE_TYPE_TE = 'TE'
+BINARY_FILE_TYPE_VER = 'VER'
+BINARY_FILE_TYPE_UI = 'UI'
+BINARY_FILE_TYPE_BIN = 'BIN'
+BINARY_FILE_TYPE_FV = 'FV'
+
+PLATFORM_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
+PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
+
+TAB_LIBRARIES = 'Libraries'
+
+TAB_SOURCES = 'Sources'
+TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
+TAB_SOURCES_IPF = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_BINARIES = 'Binaries'
+TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_BINARIES_IPF = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_INCLUDES = 'Includes'
+TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
+TAB_INCLUDES_IPF = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_GUIDS = 'Guids'
+TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
+TAB_GUIDS_IPF = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PROTOCOLS = 'Protocols'
+TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PROTOCOLS_IPF = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PPIS = 'Ppis'
+TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PPIS_IPF = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_LIBRARY_CLASSES = 'LibraryClasses'
+TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARY_CLASSES_IPF = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PACKAGES = 'Packages'
+TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
+TAB_PACKAGES_IPF = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS = 'Pcds'
+TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
+TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
+TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
+TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
+TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
+TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
+TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
+TAB_PCDS_DYNAMIC = 'Dynamic'
+TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
+TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
+TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
+
+PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
+PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+## Dynamic-ex PCD types
+gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
+TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FIXED_AT_BUILD_IPF = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
+TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_PATCHABLE_IN_MODULE_IPF = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
+TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FEATURE_FLAG_IPF = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
+TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
+TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
+TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
+TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_EX_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
+TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
+TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
+TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
+TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, TAB_PCDS_DYNAMIC_HII_NULL]
+TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL]
+
+TAB_DEPEX = 'Depex'
+TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
+TAB_DEPEX_IPF = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IPF
+TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_SKUIDS = 'SkuIds'
+
+TAB_LIBRARIES = 'Libraries'
+TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARIES_IPF = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_COMPONENTS = 'Components'
+TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
+TAB_COMPONENTS_IPF = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_COMPONENTS_SOURCE_OVERRIDE_PATH = 'SOURCE_OVERRIDE_PATH'
+
+TAB_BUILD_OPTIONS = 'BuildOptions'
+
+TAB_DEFINE = 'DEFINE'
+TAB_NMAKE = 'Nmake'
+TAB_USER_EXTENSIONS = 'UserExtensions'
+TAB_INCLUDE = '!include'
+
+#
+# Common Define
+#
+TAB_COMMON_DEFINES = 'Defines'
+
+#
+# Inf Definitions
+#
+TAB_INF_DEFINES = TAB_COMMON_DEFINES
+TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
+TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
+TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
+TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
+TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
+TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
+TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
+TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
+TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
+TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
+TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
+TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
+TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
+TAB_INF_DEFINES_VERSION = 'VERSION' # for R8 inf, the same as VERSION_NUMBER
+TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
+TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
+TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H = 'TIANO_R8_FLASHMAP_H'
+TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
+TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
+TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
+TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
+TAB_INF_DEFINES_DEFINE = 'DEFINE'
+TAB_INF_DEFINES_SPEC = 'SPEC'
+TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
+TAB_INF_DEFINES_MACRO = '__MACROS__'
+TAB_INF_DEFINES_SHADOW = 'SHADOW'
+TAB_INF_FIXED_PCD = 'FixedPcd'
+TAB_INF_FEATURE_PCD = 'FeaturePcd'
+TAB_INF_PATCH_PCD = 'PatchPcd'
+TAB_INF_PCD = 'Pcd'
+TAB_INF_PCD_EX = 'PcdEx'
+
+#
+# Dec Definitions
+#
+TAB_DEC_DEFINES = TAB_COMMON_DEFINES
+TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
+TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
+TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
+TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+
+#
+# Dsc Definitions
+#
+TAB_DSC_DEFINES = TAB_COMMON_DEFINES
+TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
+TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
+TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
+TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
+TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
+TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
+TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
+TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
+TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
+TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
+TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
+TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+
+#
+# TargetTxt Definitions
+#
+TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
+TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
+TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
+TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
+TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
+TAB_TAT_DEFINES_TARGET = 'TARGET'
+TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
+
+#
+# ToolDef Definitions
+#
+TAB_TOD_DEFINES_TARGET = 'TARGET'
+TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
+TAB_TOD_DEFINES_FAMILY = 'FAMILY'
+TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
+
+#
+# Conditional Statements
+#
+TAB_IF = '!if'
+TAB_END_IF = '!endif'
+TAB_ELSE_IF = '!elseif'
+TAB_ELSE = '!else'
+TAB_IF_DEF = '!ifdef'
+TAB_IF_N_DEF = '!ifndef'
+TAB_IF_EXIST = '!if exist'
+
+#
+# Unknown section
+#
+TAB_UNKNOWN = 'UNKNOWN'
+
+#
+# Build database path
+#
+DATABASE_PATH = ":memory:" #"BuildDatabase.db"
+
+# used by ECC
+MODIFIER_LIST = ['IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', 'EFI_BOOTSERVICE', 'EFIAPI']
+
+# Dependency Expression
+DEPEX_SUPPORTED_OPCODE = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR", "TRUE", "FALSE", '(', ')']
+
+TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
+TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
+TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
+TAB_C_CODE_FILE = "C-CODE-FILE"
+TAB_C_HEADER_FILE = "C-HEADER-FILE"
+TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
+TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
+TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
+TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
+
diff --git a/BaseTools/Source/Python/Common/Database.py b/BaseTools/Source/Python/Common/Database.py new file mode 100644 index 0000000000..e645337a39 --- /dev/null +++ b/BaseTools/Source/Python/Common/Database.py @@ -0,0 +1,120 @@ +## @file
+# This file is used to create a database used by ECC tool
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os
+
+import EdkLogger as EdkLogger
+from CommonDataClass.DataClass import *
+from String import *
+from DataType import *
+
+from Table.TableDataModel import TableDataModel
+from Table.TableFile import TableFile
+from Table.TableInf import TableInf
+from Table.TableDec import TableDec
+from Table.TableDsc import TableDsc
+
+## Database
+#
+# This class defined the build databse
+# During the phase of initialization, the database will create all tables and
+# insert all records of table DataModel
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the ECC database
+#
+# @var Conn: Connection of the ECC database
+# @var Cur: Cursor of the connection
+# @var TblDataModel: Local instance for TableDataModel
+#
+class Database(object):
+ def __init__(self, DbPath):
+ if os.path.exists(DbPath):
+ os.remove(DbPath)
+ self.Conn = sqlite3.connect(DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=8192")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ self.Cur = self.Conn.cursor()
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+ self.TblInf = TableInf(self.Cur)
+ self.TblDec = TableDec(self.Cur)
+ self.TblDsc = TableDsc(self.Cur)
+
+ ## Initialize build database
+ #
+ # 1. Delete all old existing tables
+ # 2. Create new tables
+ # 3. Initialize table DataModel
+ #
+ def InitDatabase(self):
+ EdkLogger.verbose("\nInitialize ECC database started ...")
+ #
+ # Drop all old existing tables
+ #
+# self.TblDataModel.Drop()
+# self.TblDsc.Drop()
+# self.TblFile.Drop()
+
+ #
+ # Create new tables
+ #
+ self.TblDataModel.Create()
+ self.TblFile.Create()
+ self.TblInf.Create()
+ self.TblDec.Create()
+ self.TblDsc.Create()
+
+ #
+ # Initialize table DataModel
+ #
+ self.TblDataModel.InitTable()
+ EdkLogger.verbose("Initialize ECC database ... DONE!")
+
+ ## Query a table
+ #
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close entire database
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ self.Conn.commit()
+ self.Cur.close()
+ self.Conn.close()
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ Db = Database(DATABASE_PATH)
+ Db.InitDatabase()
+ Db.QueryTable(Db.TblDataModel)
+ Db.QueryTable(Db.TblFile)
+ Db.QueryTable(Db.TblDsc)
+ Db.Close()
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/Common/DecClassObject.py b/BaseTools/Source/Python/Common/DecClassObject.py new file mode 100644 index 0000000000..b95ff621cc --- /dev/null +++ b/BaseTools/Source/Python/Common/DecClassObject.py @@ -0,0 +1,563 @@ +## @file
+# This file is used to define each component of DEC file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from String import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass.CommonClass import PcdClass
+from BuildToolError import *
+from Table.TableDec import TableDec
+import Database
+from Parsing import *
+import GlobalData
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+
+## DecObject
+#
+# This class defined basic Dec object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DecObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dec
+#
+# This class defined the structure used in Dec object
+#
+# @param DecObject: Inherited from DecObject class
+# @param Filename: Input value for Filename of Dec file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPackage: Input value for IsToPackage
+# True is to transfer to PackageObject automatically
+# False is not to transfer to PackageObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DecDefines
+# @var UserExtensions: To store value for UserExtensions
+# @var Package: To store value for Package, it is a structure as PackageClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var Contents: To store value for Contents, it is a structure as DecContents
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dec(DecObject):
+ def __init__(self, Filename = None, IsToDatabase = False, IsToPackage = False, WorkspaceDir = None, Database = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = Identification()
+ self.Package = PackageClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblDec = Database.TblDec
+ self.FileID = -1
+
+ self.KeyList = [
+ TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
+ TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
+ ]
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Dec file if filename is not None
+ #
+ if Filename != None:
+ self.LoadDecFile(Filename)
+
+ #
+ # Transfer to Package Object if IsToPackage is True
+ #
+ if IsToPackage:
+ self.DecToPackage()
+
+ ## Load Dec file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dec file
+ #
+ def LoadDecFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
+
+ #
+ # Init DecTable
+ #
+ #self.TblDec.Table = "Dec%s" % self.FileID
+ #self.TblDec.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ ParseDefineMacro2(self.TblDec, self.RecordSet, GlobalData.gGlobalDefines)
+
+ ## Transfer to Package Object
+ #
+ # Transfer all contents of a Dec file to a standard Package Object
+ #
+ def DecToPackage(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Package Header
+ #
+ self.GenPackageHeader(ContainerFile)
+
+ #
+ # Generate Includes
+ #
+ self.GenIncludes(ContainerFile)
+
+ #
+ # Generate Guids
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ #
+ # Generate Protocols
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ #
+ # Generate Ppis
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(ContainerFile)
+
+ ## Get Package Header
+ #
+ # Gen Package Header of Dec as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPackageHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PackageHeader ...")
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2])
+ ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblDec.Exec(SqlCommand)
+
+ #
+ # Get detailed information
+ #
+ for Arch in self.SupArchList:
+ PackageHeader = PackageHeaderClass()
+
+ PackageHeader.Name = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_NAME, Arch, self.FileID)[0]
+ PackageHeader.Guid = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_GUID, Arch, self.FileID)[0]
+ PackageHeader.Version = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_VERSION, Arch, self.FileID)[0]
+ PackageHeader.FileName = self.Identification.FileName
+ PackageHeader.FullPath = self.Identification.FileFullPath
+ PackageHeader.DecSpecification = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_DEC_SPECIFICATION, Arch, self.FileID)[0]
+
+ self.Package.Header[Arch] = PackageHeader
+
+ ## GenIncludes
+ #
+ # Gen Includes of Dec
+ #
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = {}
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Includes, Record[0], Arch)
+
+ for Key in Includes.keys():
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Includes[Key]
+ self.Package.Includes.append(Include)
+
+ ## GenPpis
+ #
+ # Gen Ppis of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ #
+ # Get all Items
+ #
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Lists, (Name, Value), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3])
+ self.TblDec.Exec(SqlCommand)
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Package.GuidDeclarations
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Package.ProtocolDeclarations
+ elif Type == TAB_PPIS:
+ ListMember = self.Package.PpiDeclarations
+
+ for Key in Lists.keys():
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Key[0]
+ ListClass.Guid = Key[1]
+ ListClass.SupArchList = Lists[Key]
+ ListMember.append(ListClass)
+
+
+ ## GenLibraryClasses
+ #
+ # Gen LibraryClasses of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all Guids
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Record[0], 'LibraryClasses', ContainerFile, '<LibraryClassName>|<LibraryClassInstanceFilename>', Record[2])
+ else:
+ CheckFileExist(self.Identification.FileRelativePath, List[1], ContainerFile, 'LibraryClasses', Record[0])
+ MergeArches(LibraryClasses, (List[0], List[1]), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(List[0]), ConvertToSqlString2(List[1]), SUP_MODULE_LIST_STRING, Record[3])
+ self.TblDec.Exec(SqlCommand)
+
+
+ for Key in LibraryClasses.keys():
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = Key[0]
+ LibraryClass.RecommendedInstance = NormPath(Key[1])
+ LibraryClass.SupModuleList = SUP_MODULE_LIST
+ LibraryClass.SupArchList = LibraryClasses[Key]
+ self.Package.LibraryClassDeclarations.append(LibraryClass)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Dec
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+ #
+ # Get all Guids
+ #
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet1:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet2:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet3:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet4:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet5:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblDec.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key)
+ self.TblDec.Exec(SqlCommand)
+
+ for Key in Pcds.keys():
+ Pcd = PcdClass()
+ Pcd.CName = Key[1]
+ Pcd.Token = Key[4]
+ Pcd.TokenSpaceGuidCName = Key[0]
+ Pcd.DatumType = Key[3]
+ Pcd.DefaultValue = Key[2]
+ Pcd.ItemType = Key[5]
+ Pcd.SupArchList = Pcds[Key]
+ self.Package.PcdDeclarations.append(Pcd)
+
+ ## Show detailed information of Package
+ #
+ # Print all members and their values of Package class
+ #
+ def ShowPackage(self):
+ M = self.Package
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'DecSpecification =', M.Header[Arch].DecSpecification
+ print '\nIncludes =', M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nGuids =', M.GuidDeclarations
+ for Item in M.GuidDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nProtocols =', M.ProtocolDeclarations
+ for Item in M.ProtocolDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nPpis =', M.PpiDeclarations
+ for Item in M.PpiDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nLibraryClasses =', M.LibraryClassDeclarations
+ for Item in M.LibraryClassDeclarations:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
+ print '\nPcds =', M.PcdDeclarations
+ for Item in M.PcdDeclarations:
+ print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'Nt32Pkg/Nt32Pkg.dec')
+
+ Db = Database.Database('Dec.db')
+ Db.InitDatabase()
+
+ P = Dec(os.path.normpath(F), True, True, W, Db)
+ P.ShowPackage()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/DecClassObjectLight.py b/BaseTools/Source/Python/Common/DecClassObjectLight.py new file mode 100644 index 0000000000..7c572a56f0 --- /dev/null +++ b/BaseTools/Source/Python/Common/DecClassObjectLight.py @@ -0,0 +1,580 @@ +## @file
+# This file is used to define each component of DEC file in light mode
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from Misc import GetFiles
+from String import *
+from DataType import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass import CommonClass
+from BuildToolError import *
+from Parsing import *
+
+# Global variable
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+## DecObject
+#
+# This class defined basic Dec object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DecObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dec
+#
+# This class defined the structure used in Dec object
+#
+# @param DecObject: Inherited from DecObject class
+# @param Filename: Input value for Filename of Dec file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPackage: Input value for IsToPackage
+# True is to transfer to PackageObject automatically
+# False is not to transfer to PackageObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DecDefines
+# @var UserExtensions: To store value for UserExtensions
+# @var Package: To store value for Package, it is a structure as PackageClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var Contents: To store value for Contents, it is a structure as DecContents
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dec(DecObject):
+ def __init__(self, Filename = None, IsToPackage = False, WorkspaceDir = None, AllGuidVersionDict = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = IdentificationClass()
+ self.Package = PackageClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.AllGuidVersionDict = {}
+ if AllGuidVersionDict:
+ self.AllGuidVersionDict = AllGuidVersionDict
+
+ self.KeyList = [
+ TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
+ TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
+ ]
+ # Upper all KEYs to ignore case sensitive when parsing
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ # Init RecordSet
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ # Init Comment
+ self.SectionHeaderCommentDict = {}
+
+ # Load Dec file if filename is not None
+ if Filename != None:
+ self.LoadDecFile(Filename)
+
+ # Transfer to Package Object if IsToPackage is True
+ if IsToPackage:
+ self.DecToPackage()
+
+ ## Load Dec file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dec file
+ #
+ def LoadDecFile(self, Filename):
+ # Insert a record for file
+ Filename = NormPath(Filename)
+ self.Identification.FullPath = Filename
+ (self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
+ if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
+ self.Identification.PackagePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
+
+ # Init common datas
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ # Parse file content
+ IsFindBlockComment = False
+ ReservedLine = ''
+ Comment = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ # Remove comment block
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
+ Comment = Comment + Line.strip() + '\n'
+ ReservedLine = ''
+ else:
+ Comment = Comment + Line[len(ReservedLine):] + '\n'
+ IsFindBlockComment = True
+ if not ReservedLine:
+ continue
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ Comment = Comment + Line.strip() + '\n'
+ continue
+
+ # Remove comments at tail and remove spaces again
+ if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
+ Comment = Comment + Line.strip() + '\n'
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ ## Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ # Insert items data of previous section
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ # Parse the new section
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ if Comment:
+ if Comment.endswith('\n'):
+ Comment = Comment[:len(Comment) - len('\n')]
+ self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
+ Comment = ''
+ continue
+
+ # Not in any defined section
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ # Add a section item
+ SectionItemList.append([Line, LineNo, Comment])
+ Comment = ''
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ if Comment != '':
+ self.SectionHeaderCommentDict[Model] = Comment
+ Comment = ''
+
+ ## Package Object to DEC file
+ def PackageToDec(self, Package):
+ Dec = ''
+ DecList = sdict()
+ SectionHeaderCommentDict = {}
+ if Package == None:
+ return Dec
+
+ PackageHeader = Package.PackageHeader
+ TmpList = []
+ if PackageHeader.Name:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_NAME + ' = ' + PackageHeader.Name)
+ if PackageHeader.Guid:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_GUID + ' = ' + PackageHeader.Guid)
+ if PackageHeader.Version:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_VERSION + ' = ' + PackageHeader.Version)
+ if PackageHeader.DecSpecification:
+ TmpList.append(TAB_DEC_DEFINES_DEC_SPECIFICATION + ' = ' + PackageHeader.DecSpecification)
+ if Package.UserExtensions != None:
+ for Item in Package.UserExtensions.Defines:
+ TmpList.append(Item)
+ DecList['Defines'] =TmpList
+ if PackageHeader.Description != '':
+ SectionHeaderCommentDict['Defines'] = PackageHeader.Description
+
+ for Item in Package.Includes:
+ Key = 'Includes.' + Item.SupArchList
+ Value = Item.FilePath
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.GuidDeclarations:
+ Key = 'Guids.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.ProtocolDeclarations:
+ Key = 'Protocols.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.PpiDeclarations:
+ Key = 'Ppis.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.LibraryClassDeclarations:
+ Key = 'LibraryClasses.' + Item.SupArchList
+ Value = Item.LibraryClass + '|' + Item.RecommendedInstance
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.PcdDeclarations:
+ Key = 'Pcds' + Item.ItemType + '.' + Item.SupArchList
+ Value = Item.TokenSpaceGuidCName + '.' + Item.CName
+ if Item.DefaultValue != '':
+ Value = Value + '|' + Item.DefaultValue
+ if Item.DatumType != '':
+ Value = Value + '|' + Item.DatumType
+ if Item.Token != '':
+ Value = Value + '|' + Item.Token
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ # Transfer Package to Inf
+ for Key in DecList:
+ if Key in SectionHeaderCommentDict:
+ List = SectionHeaderCommentDict[Key].split('\r')
+ for Item in List:
+ Dec = Dec + Item + '\n'
+ Dec = Dec + '[' + Key + ']' + '\n'
+ for Value in DecList[Key]:
+ if type(Value) == type([]):
+ for SubValue in Value:
+ Dec = Dec + ' ' + SubValue + '\n'
+ else:
+ Dec = Dec + ' ' + Value + '\n'
+ Dec = Dec + '\n'
+
+ return Dec
+
+ ## Transfer to Package Object
+ #
+ # Transfer all contents of a Dec file to a standard Package Object
+ #
+ def DecToPackage(self):
+ # Init global information for the file
+ ContainerFile = self.Identification.FullPath
+
+ # Generate Package Header
+ self.GenPackageHeader(ContainerFile)
+
+ # Generate Includes
+ # Only for R8
+ self.GenIncludes(ContainerFile)
+
+ # Generate Guids
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ # Generate Protocols
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ # Generate Ppis
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ # Generate LibraryClasses
+ self.GenLibraryClasses(ContainerFile)
+
+ # Generate Pcds
+ self.GenPcds(ContainerFile)
+
+ # Init MiscFiles
+ self.GenMiscFiles(ContainerFile)
+
+ ## GenMiscFiles
+ #
+ def GenMiscFiles(self, ContainerFile):
+ MiscFiles = MiscFileClass()
+ MiscFiles.Name = 'ModuleFiles'
+ for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
+ File = CommonClass.FileClass()
+ File.Filename = Item
+ MiscFiles.Files.append(File)
+ self.Package.MiscFiles = MiscFiles
+
+ ## Get Package Header
+ #
+ # Gen Package Header of Dec as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPackageHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PackageHeader ...")
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ PackageHeader = PackageHeaderClass()
+ OtherDefines = []
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ OtherDefines.append(Record[0])
+ else:
+ Name = ValueList[0]
+ Value = ValueList[1]
+ if Name == TAB_DEC_DEFINES_PACKAGE_NAME:
+ PackageHeader.Name = Value
+ elif Name == TAB_DEC_DEFINES_PACKAGE_GUID:
+ PackageHeader.Guid = Value
+ elif Name == TAB_DEC_DEFINES_PACKAGE_VERSION:
+ PackageHeader.Version = Value
+ elif Name == TAB_DEC_DEFINES_DEC_SPECIFICATION:
+ PackageHeader.DecSpecification = Value
+ else:
+ OtherDefines.append(Record[0])
+
+ PackageHeader.FileName = self.Identification.FileName
+ PackageHeader.FullPath = self.Identification.FullPath
+ PackageHeader.RelaPath = self.Identification.RelaPath
+ PackageHeader.PackagePath = self.Identification.PackagePath
+ PackageHeader.ModulePath = self.Identification.ModulePath
+ PackageHeader.CombinePath = os.path.normpath(os.path.join(PackageHeader.PackagePath, PackageHeader.ModulePath, PackageHeader.FileName))
+
+ if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
+ PackageHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
+
+ self.Package.PackageHeader = PackageHeader
+ UE = UserExtensionsClass()
+ UE.Defines = OtherDefines
+ self.Package.UserExtensions = UE
+
+
+ ## GenIncludes
+ #
+ # Gen Includes of Dec
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = {}
+ # Get all Includes
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ Key = Record[0]
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Arch
+ self.Package.Includes.append(Include)
+
+ ## GenPpis
+ #
+ # Gen Ppis of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ # Get all Items
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ (Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Package.GuidDeclarations
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Package.ProtocolDeclarations
+ elif Type == TAB_PPIS:
+ ListMember = self.Package.PpiDeclarations
+
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Name
+ ListClass.Guid = Value
+ ListClass.SupArchList = Arch
+ ListMember.append(ListClass)
+
+ ## GenLibraryClasses
+ #
+ # Gen LibraryClasses of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ # Get all Guids
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
+ if len(List) != 2:
+ continue
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = List[0]
+ LibraryClass.RecommendedInstance = NormPath(List[1])
+ LibraryClass.SupArchList = Arch
+ self.Package.LibraryClassDeclarations.append(LibraryClass)
+
+ def AddPcd(self, CName, Token, TokenSpaceGuidCName, DatumType, DefaultValue, ItemType, Arch):
+ Pcd = CommonClass.PcdClass()
+ Pcd.CName = CName
+ Pcd.Token = Token
+ Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
+ Pcd.DatumType = DatumType
+ Pcd.DefaultValue = DefaultValue
+ Pcd.ItemType = ItemType
+ Pcd.SupArchList = Arch
+ self.Package.PcdDeclarations.append(Pcd)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Dec
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+ # Get all Pcds
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ # Go through each pcd
+ for Record in RecordSet1:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet2:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet3:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet4:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet5:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+
+ ## Show detailed information of Package
+ #
+ # Print all members and their values of Package class
+ #
+ def ShowPackage(self):
+ M = self.Package
+ print 'Filename =', M.PackageHeader.FileName
+ print 'FullPath =', M.PackageHeader.FullPath
+ print 'RelaPath =', M.PackageHeader.RelaPath
+ print 'PackagePath =', M.PackageHeader.PackagePath
+ print 'ModulePath =', M.PackageHeader.ModulePath
+ print 'CombinePath =', M.PackageHeader.CombinePath
+
+ print 'BaseName =', M.PackageHeader.Name
+ print 'Guid =', M.PackageHeader.Guid
+ print 'Version =', M.PackageHeader.Version
+ print 'DecSpecification =', M.PackageHeader.DecSpecification
+
+ print '\nIncludes ='#, M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nGuids ='#, M.GuidDeclarations
+ for Item in M.GuidDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nProtocols ='#, M.ProtocolDeclarations
+ for Item in M.ProtocolDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nPpis ='#, M.PpiDeclarations
+ for Item in M.PpiDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nLibraryClasses ='#, M.LibraryClassDeclarations
+ for Item in M.LibraryClassDeclarations:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
+ print '\nPcds ='#, M.PcdDeclarations
+ for Item in M.PcdDeclarations:
+ print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
+ print '\nUserExtensions =', M.UserExtensions.Defines
+ print '\n*** FileList ***'
+ for Item in M.MiscFiles.Files:
+ print Item.Filename
+ print '****************\n'
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/MdeModulePkg.dec')
+
+ P = Dec(os.path.normpath(F), True, W)
+ P.ShowPackage()
+ print P.PackageToDec(P.Package)
diff --git a/BaseTools/Source/Python/Common/Dictionary.py b/BaseTools/Source/Python/Common/Dictionary.py new file mode 100644 index 0000000000..3c968f5ec6 --- /dev/null +++ b/BaseTools/Source/Python/Common/Dictionary.py @@ -0,0 +1,75 @@ +## @file
+# Define a dictionary structure
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import EdkLogger
+from DataType import *
+
+## Convert a text file to a dictionary
+#
+# Convert a text file to a dictionary of (name:value) pairs.
+#
+# @retval 0 Convert successful
+# @retval 1 Open file failed
+#
+def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ Keys = []
+ for Line in F:
+ if Line.startswith(CommentCharacter):
+ continue
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
+ if ValueSplitFlag:
+ Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ else:
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Keys += [Key[0]]
+ F.close()
+ return 0
+ except:
+ EdkLogger.info('Open file failed')
+ return 1
+
+## Print the dictionary
+#
+# Print all items of dictionary one by one
+#
+# @param Dict: The dictionary to be printed
+#
+def printDict(Dict):
+ if Dict != None:
+ KeyList = Dict.keys()
+ for Key in KeyList:
+ if Dict[Key] != '':
+ print Key + ' = ' + str(Dict[Key])
+
+## Print the dictionary
+#
+# Print the items of dictionary which matched with input key
+#
+# @param list: The dictionary to be printed
+# @param key: The key of the item to be printed
+#
+def printList(Key, List):
+ if type(List) == type([]):
+ if len(List) > 0:
+ if key.find(TAB_SPLIT) != -1:
+ print "\n" + Key
+ for Item in List:
+ print Item
diff --git a/BaseTools/Source/Python/Common/DscClassObject.py b/BaseTools/Source/Python/Common/DscClassObject.py new file mode 100644 index 0000000000..ddccf6507d --- /dev/null +++ b/BaseTools/Source/Python/Common/DscClassObject.py @@ -0,0 +1,1434 @@ +## @file
+# This file is used to define each component of DSC file
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import EdkLogger as EdkLogger
+import Database
+from String import *
+from Parsing import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from CommonDataClass.PlatformClass import *
+from CommonDataClass.CommonClass import SkuInfoClass
+from BuildToolError import *
+from Misc import sdict
+import GlobalData
+from Table.TableDsc import TableDsc
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
+ TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+## DscObject
+#
+# This class defined basic Dsc object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DscObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dsc
+#
+# This class defined the structure used in Dsc object
+#
+# @param DscObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPlatform: Input value for IsToPlatform
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var _NullClassIndex: To store value for _NullClassIndex, default is 0
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DscDefines
+# @var Contents: To store value for Contents, it is a structure as DscContents
+# @var UserExtensions: To store value for UserExtensions
+# @var Platform: To store value for Platform, it is a structure as PlatformClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dsc(DscObject):
+ _NullClassIndex = 0
+
+ def __init__(self, Filename = None, IsToDatabase = False, IsToPlatform = False, WorkspaceDir = None, Database = None):
+ self.Identification = Identification()
+ self.Platform = PlatformClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblDsc = Database.TblDsc
+
+
+ self.KeyList = [
+ TAB_SKUIDS, TAB_LIBRARIES, TAB_LIBRARY_CLASSES, TAB_BUILD_OPTIONS, TAB_PCDS_FIXED_AT_BUILD_NULL, \
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_HII_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, \
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, \
+ TAB_COMPONENTS, TAB_DSC_DEFINES
+ ]
+
+ self.PcdToken = {}
+
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+# self.RecordSet = {}
+# for Key in self.KeyList:
+# self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Dsc file if filename is not None
+ #
+ if Filename != None:
+ self.LoadDscFile(Filename)
+
+ #
+ # Transfer to Platform Object if IsToPlatform is True
+ #
+ if IsToPlatform:
+ self.DscToPlatform()
+
+ ## Transfer to Platform Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def DscToPlatform(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Platform Header
+ #
+ self.GenPlatformHeader(ContainerFile)
+
+ #
+ # Generate BuildOptions
+ #
+ self.GenBuildOptions(ContainerFile)
+
+ #
+ # Generate SkuInfos
+ #
+ self.GenSkuInfos(ContainerFile)
+
+ #
+ # Generate Libraries
+ #
+ self.GenLibraries(ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(DataType.TAB_PCDS_FIXED_AT_BUILD, ContainerFile)
+ self.GenPcds(DataType.TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile)
+ self.GenFeatureFlagPcds(DataType.TAB_PCDS_FEATURE_FLAG, ContainerFile)
+ self.GenDynamicDefaultPcds(DataType.TAB_PCDS_DYNAMIC_DEFAULT, ContainerFile)
+ self.GenDynamicDefaultPcds(DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT, ContainerFile)
+ self.GenDynamicHiiPcds(DataType.TAB_PCDS_DYNAMIC_HII, ContainerFile)
+ self.GenDynamicHiiPcds(DataType.TAB_PCDS_DYNAMIC_EX_HII, ContainerFile)
+ self.GenDynamicVpdPcds(DataType.TAB_PCDS_DYNAMIC_VPD, ContainerFile)
+ self.GenDynamicVpdPcds(DataType.TAB_PCDS_DYNAMIC_EX_VPD, ContainerFile)
+
+ #
+ # Generate Components
+ #
+ self.GenComponents(ContainerFile)
+
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in self.PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblDsc.Table, ".".join((self.PcdToken[Key][0], self.PcdToken[Key][1])), Key)
+ self.TblDsc.Exec(SqlCommand)
+ #End of DscToPlatform
+
+ ## Get Platform Header
+ #
+ # Gen Platform Header of Dsc as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenPlatformHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PlatformHeader ...")
+ #
+ # Update all defines item in database
+ #
+ SqlCommand = """select ID, Value1, Arch, StartLine from %s
+ where Model = %s
+ and BelongsToFile = %s
+ and Enabled > -1""" % (self.TblDsc.Table, MODEL_META_DATA_HEADER, self.FileID)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[1], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[1], 'Defines', ContainerFile, '<Key> = <Value>', Record[3])
+ ID, Value1, Value2, Arch = Record[0], ValueList[0], ValueList[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblDsc.Exec(SqlCommand)
+
+ #
+ # Get detailed information
+ #
+ for Arch in DataType.ARCH_LIST:
+ PlatformHeader = PlatformHeaderClass()
+
+ PlatformHeader.Name = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_NAME, Arch, self.FileID)[0]
+ PlatformHeader.Guid = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_GUID, Arch, self.FileID)[0]
+ PlatformHeader.Version = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_VERSION, Arch, self.FileID)[0]
+ PlatformHeader.FileName = self.Identification.FileName
+ PlatformHeader.FullPath = self.Identification.FileFullPath
+ PlatformHeader.DscSpecification = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_DSC_SPECIFICATION, Arch, self.FileID)[0]
+
+ PlatformHeader.SkuIdName = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_SKUID_IDENTIFIER, Arch, self.FileID)
+ PlatformHeader.SupArchList = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES, Arch, self.FileID)
+ PlatformHeader.BuildTargets = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BUILD_TARGETS, Arch, self.FileID)
+ PlatformHeader.OutputDirectory = NormPath(QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_OUTPUT_DIRECTORY, Arch, self.FileID)[0])
+ PlatformHeader.BuildNumber = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BUILD_NUMBER, Arch, self.FileID)[0]
+ PlatformHeader.MakefileName = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_MAKEFILE_NAME, Arch, self.FileID)[0]
+
+ PlatformHeader.BsBaseAddress = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BS_BASE_ADDRESS, Arch, self.FileID)[0]
+ PlatformHeader.RtBaseAddress = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_RT_BASE_ADDRESS, Arch, self.FileID)[0]
+
+ self.Platform.Header[Arch] = PlatformHeader
+ Fdf = PlatformFlashDefinitionFileClass()
+ Fdf.FilePath = NormPath(QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_FLASH_DEFINITION, Arch, self.FileID)[0])
+ self.Platform.FlashDefinitionFile = Fdf
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Dsc
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_META_DATA_BUILD_OPTION, self.FileID)
+
+ #
+ # Get all BuildOptions
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_BUILD_OPTION, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_BUILD_OPTIONS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (Family, ToolChain, Flag) = GetBuildOption(NewItem, Filename, -1)
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (Family, ToolChain, Flag) = GetBuildOption(Record[0], ContainerFile, Record[2])
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(Family), ConvertToSqlString2(ToolChain), ConvertToSqlString2(Flag), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ for Key in BuildOptions.keys():
+ BuildOption = BuildOptionClass(Key[0], Key[1], Key[2])
+ BuildOption.SupArchList = BuildOptions[Key]
+ self.Platform.BuildOptions.BuildOptionList.append(BuildOption)
+
+ ## GenSkuInfos
+ #
+ # Gen SkuInfos of Dsc
+ # <Integer>|<UiName>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenSkuInfos(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SKUIDS)
+ #
+ # SkuIds
+ # <Integer>|<UiName>
+ #
+ self.Platform.SkuInfos.SkuInfoList['DEFAULT'] = '0'
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_SKU_ID, self.FileID)
+
+ #
+ # Get all SkuInfos
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_SKU_ID, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_SKUIDS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ List = GetSplitValueList(NewItem)
+ if len(List) != 2:
+ RaiseParserError(NewItem, TAB_SKUIDS, Filename, '<Integer>|<UiName>')
+ else:
+ self.Platform.SkuInfos.SkuInfoList[List[1]] = List[0]
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ List = GetSplitValueList(Record[0])
+ if len(List) != 2:
+ RaiseParserError(Record[0], TAB_SKUIDS, ContainerFile, '<Integer>|<UiName>')
+ else:
+ self.Platform.SkuInfos.SkuInfoList[List[1]] = List[0]
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(List[0]), ConvertToSqlString2(List[1]), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ ## GenLibraries
+ #
+ # Gen Libraries of Dsc
+ # <PathAndFilename>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenLibraries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARIES)
+ Libraries = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_LIBRARY_INSTANCE, self.FileID)
+
+ #
+ # Get all Libraries
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_INSTANCE, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_LIBRARIES, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ MergeArches(Libraries, NewItem, Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ MergeArches(Libraries, Record[0], Arch)
+
+ for Key in Libraries.keys():
+ Library = PlatformLibraryClass()
+ Library.FilePath = NormPath(Key)
+ Library.SupArchList = Libraries[Key]
+ self.Platform.Libraries.LibraryList.append(Library)
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClasses of Dsc
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_LIBRARY_CLASS, self.FileID)
+
+ #
+ # Get all LibraryClasses
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_CLASS, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_LIBRARY_CLASSES, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ MergeArches(LibraryClasses, GetLibraryClass([NewItem, IncludeFile[4]], Filename, self.WorkspaceDir, -1), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (LibClassName, LibClassIns, SupModelList) = GetLibraryClass([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(LibraryClasses, (LibClassName, LibClassIns, SupModelList), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(LibClassName), ConvertToSqlString2(LibClassIns), ConvertToSqlString2(SupModelList), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ for Key in LibraryClasses.keys():
+ Library = PlatformLibraryClass()
+ Library.Name = Key[0]
+ Library.FilePath = NormPath(Key[1])
+ Library.SupModuleList = GetSplitValueList(Key[2])
+ Library.SupArchList = LibraryClasses[Key]
+ self.Platform.LibraryClasses.LibraryList.append(Library)
+
+ ## Gen Pcds
+ #
+ # Gen Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ if Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
+ Model = MODEL_PCD_PATCHABLE_IN_MODULE
+ elif Type == DataType.TAB_PCDS_FIXED_AT_BUILD:
+ Model = MODEL_PCD_FIXED_AT_BUILD
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all Pcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type) = GetPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type) = GetPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Key in Pcds:
+ Pcd = PcdClass(Key[0], '', Key[1], Key[3], Key[4], Key[2], Key[5], [], {}, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen FeatureFlagPcds
+ #
+ # Gen FeatureFlagPcds of Dsc file as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenFeatureFlagPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ if Type == DataType.TAB_PCDS_FEATURE_FLAG:
+ Model = MODEL_PCD_FEATURE_FLAG
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all FeatureFlagPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (TokenName, TokenGuidCName, Value, Type) = GetFeatureFlagPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (TokenName, TokenGuidCName, Value, Type) = GetFeatureFlagPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Key in Pcds:
+ Pcd = PcdClass(Key[0], '', Key[1], '', '', Key[2], Key[3], [], {}, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicDefaultPcds
+ #
+ # Gen DynamicDefaultPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicDefaultPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_DEFAULT:
+ Model = MODEL_PCD_DYNAMIC_DEFAULT
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT:
+ Model = MODEL_PCD_DYNAMIC_EX_DEFAULT
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicDefaultPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5, K6) = GetDynamicDefaultPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5, K6) = GetDynamicDefaultPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[6], self.Platform.SkuInfos.SkuInfoList, '', '', '', '', '', Key[2])
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], Key[3], Key[4], Key[2], Key[5], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicHiiPcds
+ #
+ # Gen DynamicHiiPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicHiiPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_HII:
+ Model = MODEL_PCD_DYNAMIC_HII
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_HII:
+ Model = MODEL_PCD_DYNAMIC_EX_HII
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicHiiPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5, K6, K7, K8) = GetDynamicHiiPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, K7, K8, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5, K6, K7, K8) = GetDynamicHiiPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, K7, K8, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[8], self.Platform.SkuInfos.SkuInfoList, Key[2], Key[3], Key[4], Key[5], '', '')
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], '', Key[6], Key[5], Key[7], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicVpdPcds
+ #
+ # Gen DynamicVpdPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicVpdPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_VPD:
+ Model = MODEL_PCD_DYNAMIC_VPD
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_VPD:
+ Model = MODEL_PCD_DYNAMIC_EX_VPD
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicVpdPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5) = GetDynamicVpdPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5) = GetDynamicVpdPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[5], self.Platform.SkuInfos.SkuInfoList, '', '', '', '', Key[2], '')
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], '', Key[3], '', Key[4], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+
+ ## Get Component
+ #
+ # Get Component section defined in Dsc file
+ #
+ # @param ContainerFile: The file which describes the Components, used for error report
+ #
+ # @retval PlatformModuleClass() A instance for PlatformModuleClass
+ #
+ def GenComponents(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_COMPONENTS)
+ Components = sdict()
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_META_DATA_COMPONENT, self.FileID)
+
+ #
+ # Get all Components
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_COMPONENT, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_COMPONENTS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ NewItems = []
+ GetComponents(open(Filename, 'r').read(), TAB_COMPONENTS, NewItems, TAB_COMMENT_SPLIT)
+ for NewComponent in NewItems:
+ MergeArches(Components, self.GenComponent(NewComponent, Filename), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ Lib, Bo, Pcd = [], [], []
+
+ SubLibSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_CLASS, Record[3], self.FileID)
+ for SubLib in SubLibSet:
+ Lib.append(TAB_VALUE_SPLIT.join([SubLib[0],SubLib[4]]))
+
+ SubBoSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_BUILD_OPTION, Record[3], self.FileID)
+ for SubBo in SubBoSet:
+ Bo.append(SubBo[0])
+
+ SubPcdSet1 = QueryDscItem(self.TblDsc, MODEL_PCD_FIXED_AT_BUILD, Record[3], self.FileID)
+ SubPcdSet2 = QueryDscItem(self.TblDsc, MODEL_PCD_PATCHABLE_IN_MODULE, Record[3], self.FileID)
+ SubPcdSet3 = QueryDscItem(self.TblDsc, MODEL_PCD_FEATURE_FLAG, Record[3], self.FileID)
+ SubPcdSet4 = QueryDscItem(self.TblDsc, MODEL_PCD_DYNAMIC_EX_DEFAULT, Record[3], self.FileID)
+ SubPcdSet5 = QueryDscItem(self.TblDsc, MODEL_PCD_DYNAMIC_DEFAULT, Record[3], self.FileID)
+ for SubPcd in SubPcdSet1:
+ Pcd.append([DataType.TAB_PCDS_FIXED_AT_BUILD, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet2:
+ Pcd.append([DataType.TAB_PCDS_PATCHABLE_IN_MODULE, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet3:
+ Pcd.append([DataType.TAB_PCDS_FEATURE_FLAG, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet4:
+ Pcd.append([DataType.TAB_PCDS_DYNAMIC_EX, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet5:
+ Pcd.append([DataType.TAB_PCDS_DYNAMIC, SubPcd[0], SubPcd[3]])
+ Item = [Record[0], Lib, Bo, Pcd]
+ MergeArches(Components, self.GenComponent(Item, ContainerFile), Arch)
+
+ for Key in Components.keys():
+ Key.SupArchList = Components[Key]
+ self.Platform.Modules.ModuleList.append(Key)
+
+ ## Get Component
+ #
+ # Get Component section defined in Dsc file
+ #
+ # @param Item: Contents includes a component block
+ # @param ContainerFile: The file which describes the library class, used for error report
+ #
+ # @retval PlatformModuleClass() A instance for PlatformModuleClass
+ #
+ def GenComponent(self, Item, ContainerFile, LineNo = -1):
+ (InfFilename, ExecFilename) = GetExec(Item[0])
+ LibraryClasses = Item[1]
+ BuildOptions = Item[2]
+ Pcds = Item[3]
+ Component = PlatformModuleClass()
+ Component.FilePath = NormPath(InfFilename)
+ Component.ExecFilePath = NormPath(ExecFilename)
+ CheckFileType(Component.FilePath, '.Inf', ContainerFile, 'component name', Item[0], LineNo)
+ CheckFileExist(self.WorkspaceDir, Component.FilePath, ContainerFile, 'component', Item[0], LineNo)
+ for Lib in LibraryClasses:
+ List = GetSplitValueList(Lib)
+ if len(List) != 2:
+ RaiseParserError(Lib, 'LibraryClasses', ContainerFile, '<ClassName>|<InfFilename>')
+ LibName = List[0]
+ LibFile = NormPath(List[1])
+ if LibName == "" or LibName == "NULL":
+ LibName = "NULL%d" % self._NullClassIndex
+ self._NullClassIndex += 1
+ CheckFileType(List[1], '.Inf', ContainerFile, 'library instance of component ', Lib, LineNo)
+ CheckFileExist(self.WorkspaceDir, LibFile, ContainerFile, 'library instance of component', Lib, LineNo)
+ Component.LibraryClasses.LibraryList.append(PlatformLibraryClass(LibName, LibFile))
+ for BuildOption in BuildOptions:
+ Key = GetBuildOption(BuildOption, ContainerFile)
+ Component.ModuleSaBuildOption.BuildOptionList.append(BuildOptionClass(Key[0], Key[1], Key[2]))
+ for Pcd in Pcds:
+ Type = Pcd[0]
+ List = GetSplitValueList(Pcd[1])
+ PcdId = Pcd[2]
+
+ TokenInfo = None
+ #
+ # For FeatureFlag
+ #
+ if Type == DataType.TAB_PCDS_FEATURE_FLAG:
+ if len(List) != 2:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>|TRUE/FALSE')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', '', List[1], Type, [], {}, []))
+ #
+ # For FixedAtBuild or PatchableInModule
+ #
+ if Type == DataType.TAB_PCDS_FIXED_AT_BUILD or Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
+ List.append('')
+ if len(List) != 3 and len(List) != 4:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>|<Value>[|<MaxDatumSize>]')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', List[2], List[1], Type, [], {}, []))
+
+ #
+ # For Dynamic or DynamicEx
+ #
+ if Type == DataType.TAB_PCDS_DYNAMIC or Type == DataType.TAB_PCDS_DYNAMIC_EX:
+ if len(List) != 1:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', '', '', Type, [], {}, []))
+
+ #
+ # Add to PcdToken
+ #
+ self.PcdToken[PcdId] = (TokenInfo[0], TokenInfo[1])
+
+ return Component
+ #End of GenComponent
+
+ ## Gen SkuInfoList
+ #
+ # Gen SkuInfoList section defined in Dsc file
+ #
+ # @param SkuNameList: Input value for SkuNameList
+ # @param SkuInfo: Input value for SkuInfo
+ # @param VariableName: Input value for VariableName
+ # @param VariableGuid: Input value for VariableGuid
+ # @param VariableOffset: Input value for VariableOffset
+ # @param HiiDefaultValue: Input value for HiiDefaultValue
+ # @param VpdOffset: Input value for VpdOffset
+ # @param DefaultValue: Input value for DefaultValue
+ #
+ # @retval (False, SkuName) Not found in section SkuId Dsc file
+ # @retval (True, SkuInfoList) Found in section SkuId of Dsc file
+ #
+ def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName = '', VariableGuid = '', VariableOffset = '', HiiDefaultValue = '', VpdOffset = '', DefaultValue = ''):
+ SkuNameList = GetSplitValueList(SkuNameList)
+ if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:
+ SkuNameList = ['DEFAULT']
+ SkuInfoList = {}
+ for Item in SkuNameList:
+ if Item not in SkuInfo:
+ return False, Item
+ Sku = SkuInfoClass(Item, SkuInfo[Item], VariableName, VariableGuid, VariableOffset, HiiDefaultValue, VpdOffset, DefaultValue)
+ SkuInfoList[Item] = Sku
+
+ return True, SkuInfoList
+
+ ## Parse Include statement
+ #
+ # Get include file path
+ #
+ # 1. Insert a record into TblFile ???
+ # 2. Insert a record into TblDsc
+ # Value1: IncludeFilePath
+ #
+ # @param LineValue: The line of incude statement
+ def ParseInclude(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "!include statement '%s' found in section %s" % (LineValue, SectionName))
+ SectionModel = Section[SectionName.upper()]
+ IncludeFile = CleanString(LineValue[LineValue.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
+ Table.Insert(Model, IncludeFile, '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Parse DEFINE statement
+ #
+ # Get DEFINE macros
+ #
+ # 1. Insert a record into TblDsc
+ # Value1: Macro Name
+ # Value2: Macro Value
+ #
+ def ParseDefine(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
+ SectionModel = Section[SectionName.upper()]
+ Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
+ Table.Insert(Model, Define[0], Define[1], '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Parse Defines section
+ #
+ # Get one item in defines section
+ #
+ # Value1: Item Name
+ # Value2: Item Value
+ #
+ def ParseDefinesSection(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "Parse '%s' found in section %s" % (LineValue, SectionName))
+ Defines = GetSplitValueList(LineValue, TAB_EQUAL_SPLIT, 1)
+ if len(Defines) != 2:
+ RaiseParserError(LineValue, SectionName, Filename, '', StartLine)
+ self.TblDsc.Insert(Model, Defines[0], Defines[1], '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Insert conditional statements
+ #
+ # Pop an item from IfDefList
+ # Insert conditional statements to database
+ #
+ # @param Filename: Path of parsing file
+ # @param IfDefList: A list stored current conditional statements
+ # @param EndLine: The end line no
+ # @param ArchList: Support arch list
+ #
+ def InsertConditionalStatement(self, Filename, FileID, BelongsToItem, IfDefList, EndLine, ArchList):
+ (Value1, Value2, Value3, Model, StartColumn, EndColumn, Enabled) = ('', '', '', -1, -1, -1, 0)
+ if IfDefList == []:
+ ErrorMsg = 'Not suited conditional statement in file %s' % Filename
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, Filename, RaiseError = EdkLogger.IsRaiseError)
+ else:
+ #
+ # Get New Dsc item ID
+ #
+ DscID = self.TblDsc.GetCount() + 1
+
+ #
+ # Pop the conditional statements which is closed
+ #
+ PreviousIf = IfDefList.pop()
+ EdkLogger.debug(EdkLogger.DEBUG_5, 'Previous IfDef: ' + str(PreviousIf))
+
+ #
+ # !ifdef and !ifndef
+ #
+ if PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF):
+ Value1 = PreviousIf[0]
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+ #
+ # !if and !elseif
+ #
+ elif PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, Model):
+ List = PreviousIf[0].split(' ')
+ Value1 = List[0]
+ Value2 = List[1]
+ Value3 = List[2]
+ Value3 = SplitString(Value3)
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+ #
+ # !else
+ #
+ elif PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, Model):
+ Value1 = PreviousIf[0].strip()
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+
+ ## Load Dsc file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dsc file
+ #
+ def LoadDscFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
+
+ #
+ # Init DscTable
+ #
+ #self.TblDsc.Table = "Dsc%s" % FileID
+ #self.TblDsc.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ self.InsertSectionItemsIntoDatabase(self.FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ CurrentSection = TAB_UNKNOWN
+ continue
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ self.InsertSectionItemsIntoDatabase(self.FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+
+ #
+ # Parse conditional statements
+ #
+ self.ParseConditionalStatement()
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ #ParseDefineMacro2(self.TblDsc, self.RecordSet, GlobalData.gGlobalDefines)
+ ParseDefineMacro(self.TblDsc, GlobalData.gGlobalDefines)
+
+
+ ## ParseConditionalStatement
+ #
+ # Search all conditional statement and disable no match records
+ #
+ def ParseConditionalStatement(self):
+ #
+ # Disabled all !if/!elif/!ifdef statements without DEFINE
+ #
+ SqlCommand = """select A.StartLine, A.EndLine from %s as A
+ where A.Model in (%s, %s, %s)
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and A.Value1 not in (select B.Value1 from %s as B
+ where B.Model = %s
+ and B.Enabled = 0
+ and A.StartLine > B.StartLine
+ and A.Arch = B.Arch
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile) """ % \
+ (self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF, \
+ self.FileID, \
+ self.TblDsc.Table, \
+ MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[0], Record[1])
+ self.TblDsc.Exec(SqlCommand)
+
+ #
+ # Disabled !ifndef with DEFINE
+ #
+ SqlCommand = """select A.StartLine, A.EndLine from %s as A
+ where A.Model = %s
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and A.Value1 in (select B.Value1 from %s as B
+ where B.Model = %s
+ and B.Enabled = 0
+ and A.StartLine > B.StartLine
+ and A.Arch = B.Arch
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile)""" % \
+ (self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF, \
+ self.FileID, \
+ self.TblDsc.Table, \
+ MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[0], Record[1])
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ #
+ # Disabled !if, !elif and !else with un-match value
+ #
+ SqlCommand = """select A.Model, A.Value1, A.Value2, A.Value3, A.StartLine, A.EndLine, B.Value2 from %s as A join %s as B
+ where A.Model in (%s, %s)
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and B.Enabled = 0
+ and B.Model = %s
+ and A.Value1 = B.Value1
+ and A.StartLine > B.StartLine
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile""" % \
+ (self.TblDsc.Table, self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, \
+ self.FileID, MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ DisabledList = []
+ for Record in RecordSet:
+ if Record[0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
+ if not self.Compare(Record[6], Record[2], Record[3]):
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[4], Record[5])
+ self.TblDsc.Exec(SqlCommand)
+ else:
+ DisabledList.append(Record[1])
+ continue
+ if Record[0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE and Record[1] in DisabledList:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[4], Record[5])
+ self.TblDsc.Exec(SqlCommand)
+
+ ## Compare
+ #
+ # Compare two values
+ # @param Value1:
+ # @param CompareType:
+ # @param Value2:
+ #
+ def Compare(self, Value1, CompareType, Value2):
+ Command = """Value1 %s Value2""" %CompareType
+ return eval(Command)
+
+ ## First time to insert records to database
+ #
+ # Insert item data of a section to database
+ # @param FileID: The ID of belonging file
+ # @param Filename: The name of belonging file
+ # @param CurrentSection: The name of currect section
+ # @param SectionItemList: A list of items of the section
+ # @param ArchList: A list of arches
+ # @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
+ # @param IfDefList: A list of all conditional statements
+ #
+ def InsertSectionItemsIntoDatabase(self, FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON.upper()
+
+ Model = Section[CurrentSection.upper()]
+ #Records = self.RecordSet[Model]
+
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
+
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ #
+ # Parse '!ifdef'
+ #
+ if LineValue.upper().find(TAB_IF_DEF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF_N_DEF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF))
+ continue
+
+ #
+ # Parse '!ifndef'
+ #
+ if LineValue.upper().find(TAB_IF_N_DEF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF_N_DEF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF))
+ continue
+
+ #
+ # Parse '!endif'
+ #
+ if LineValue.upper().find(TAB_END_IF.upper()) > -1:
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine, Arch)
+ continue
+ #
+ # Parse '!if'
+ #
+ if LineValue.upper().find(TAB_IF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IF))
+ continue
+
+ #
+ # Parse '!elseif'
+ #
+ if LineValue.upper().find(TAB_ELSE_IF.upper()) > -1:
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine - 1, Arch)
+ IfDefList.append((LineValue[len(TAB_ELSE_IF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IF))
+ continue
+
+ #
+ # Parse '!else'
+ #
+ if LineValue.upper().find(TAB_ELSE.upper()) > -1:
+ Key = IfDefList[-1][0].split(' ' , 1)[0].strip()
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine, Arch)
+ IfDefList.append((Key, StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE))
+ continue
+
+ #
+ # Parse !include statement first
+ #
+ if LineValue.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
+ self.ParseInclude(LineValue, StartLine, self.TblDsc, FileID, Filename, CurrentSection, MODEL_META_DATA_INCLUDE, Arch)
+ continue
+
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ self.ParseDefine(LineValue, StartLine, self.TblDsc, FileID, Filename, CurrentSection, MODEL_META_DATA_DEFINE, Arch)
+ continue
+
+ #
+ # At last parse other sections
+ #
+ if CurrentSection == TAB_LIBRARY_CLASSES or CurrentSection in TAB_PCD_DYNAMIC_TYPE_LIST or CurrentSection in TAB_PCD_DYNAMIC_EX_TYPE_LIST:
+ ID = self.TblDsc.Insert(Model, LineValue, Third, '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ #Records.append([LineValue, Arch, StartLine, ID, Third])
+ continue
+ elif CurrentSection != TAB_COMPONENTS:
+ ID = self.TblDsc.Insert(Model, LineValue, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ #Records.append([LineValue, Arch, StartLine, ID, Third])
+ continue
+
+ #
+ # Parse COMPONENT section
+ #
+ if CurrentSection == TAB_COMPONENTS:
+ Components = []
+ GetComponent(SectionItemList, Components)
+ for Component in Components:
+ EdkLogger.debug(4, "Parsing component %s ..." %Component)
+ DscItmeID = self.TblDsc.Insert(MODEL_META_DATA_COMPONENT, Component[0], '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[1]:
+ List = GetSplitValueList(Item, MaxSplit = 2)
+ LibName, LibIns = '', ''
+ if len(List) == 2:
+ LibName = List[0]
+ LibIns = List[1]
+ else:
+ LibName = List[0]
+ self.TblDsc.Insert(MODEL_EFI_LIBRARY_CLASS, LibName, LibIns, '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[2]:
+ self.TblDsc.Insert(MODEL_META_DATA_BUILD_OPTION, Item, '', '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[3]:
+ Model = Section[Item[0].upper()]
+ self.TblDsc.Insert(Model, Item[1], '', '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Show detailed information of Dsc
+ #
+ # Print all members and their values of Dsc class
+ #
+ def ShowDsc(self):
+ print TAB_SECTION_START + TAB_INF_DEFINES + TAB_SECTION_END
+ printDict(self.Defines.DefinesDictionary)
+
+ for Key in self.KeyList:
+ for Arch in DataType.ARCH_LIST_FULL:
+ Command = "printList(TAB_SECTION_START + '" + \
+ Key + DataType.TAB_SPLIT + Arch + \
+ "' + TAB_SECTION_END, self.Contents[arch]." + Key + ')'
+ eval(Command)
+
+ ## Show detailed information of Platform
+ #
+ # Print all members and their values of Platform class
+ #
+ def ShowPlatform(self):
+ M = self.Platform
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'DscSpecification =', M.Header[Arch].DscSpecification
+ print 'SkuId =', M.Header[Arch].SkuIdName
+ print 'SupArchList =', M.Header[Arch].SupArchList
+ print 'BuildTargets =', M.Header[Arch].BuildTargets
+ print 'OutputDirectory =', M.Header[Arch].OutputDirectory
+ print 'BuildNumber =', M.Header[Arch].BuildNumber
+ print 'MakefileName =', M.Header[Arch].MakefileName
+ print 'BsBaseAddress =', M.Header[Arch].BsBaseAddress
+ print 'RtBaseAddress =', M.Header[Arch].RtBaseAddress
+ print 'Define =', M.Header[Arch].Define
+ print 'Fdf =', M.FlashDefinitionFile.FilePath
+ print '\nBuildOptions =', M.BuildOptions, M.BuildOptions.IncludeFiles
+ for Item in M.BuildOptions.BuildOptionList:
+ print '\t', 'ToolChainFamily =', Item.ToolChainFamily, 'ToolChain =', Item.ToolChain, 'Option =', Item.Option, 'Arch =', Item.SupArchList
+ print '\nSkuIds =', M.SkuInfos.SkuInfoList, M.SkuInfos.IncludeFiles
+ print '\nLibraries =', M.Libraries, M.Libraries.IncludeFiles
+ for Item in M.Libraries.LibraryList:
+ print '\t', Item.FilePath, Item.SupArchList, Item.Define
+ print '\nLibraryClasses =', M.LibraryClasses, M.LibraryClasses.IncludeFiles
+ for Item in M.LibraryClasses.LibraryList:
+ print '\t', Item.Name, Item.FilePath, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPcds =', M.DynamicPcdBuildDefinitions
+ for Item in M.DynamicPcdBuildDefinitions:
+ print '\tCname=', Item.CName, 'TSG=', Item.TokenSpaceGuidCName, 'Value=', Item.DefaultValue, 'Token=', Item.Token, 'Type=', Item.ItemType, 'Datum=', Item.DatumType, 'Size=', Item.MaxDatumSize, 'Arch=', Item.SupArchList, Item.SkuInfoList
+ for Sku in Item.SkuInfoList.values():
+ print '\t\t', str(Sku)
+ print '\nComponents =', M.Modules.ModuleList, M.Modules.IncludeFiles
+ for Item in M.Modules.ModuleList:
+ print '\t', Item.FilePath, Item.ExecFilePath, Item.SupArchList
+ for Lib in Item.LibraryClasses.LibraryList:
+ print '\t\tLib:', Lib.Name, Lib.FilePath
+ for Bo in Item.ModuleSaBuildOption.BuildOptionList:
+ print '\t\tBuildOption:', Bo.ToolChainFamily, Bo.ToolChain, Bo.Option
+ for Pcd in Item.PcdBuildDefinitions:
+ print '\t\tPcd:', Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.MaxDatumSize, Pcd.DefaultValue, Pcd.ItemType
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'Nt32Pkg/Nt32Pkg.dsc')
+
+ Db = Database.Database('Dsc.db')
+ Db.InitDatabase()
+
+ P = Dsc(os.path.normpath(F), True, True, W, Db)
+ P.ShowPlatform()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspace.py b/BaseTools/Source/Python/Common/EdkIIWorkspace.py new file mode 100644 index 0000000000..a494e814a6 --- /dev/null +++ b/BaseTools/Source/Python/Common/EdkIIWorkspace.py @@ -0,0 +1,318 @@ +## @file
+# This is the base class for applications that operate on an EDK II Workspace
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, sys, time
+from DataType import *
+
+## EdkIIWorkspace
+#
+# Collect WorkspaceDir from the environment, the Verbose command line flag, and detect an icon bitmap file.
+#
+# @var StartTime: Time of build system starting
+# @var PrintRunTime: Printable time of build system running
+# @var PrintRunStatus: Printable status of build system running
+# @var RunStatus: Status of build system running
+#
+class EdkIIWorkspace:
+ def __init__(self):
+ self.StartTime = time.time()
+ self.PrintRunTime = False
+ self.PrintRunStatus = False
+ self.RunStatus = ''
+
+ #
+ # Check environment valiable 'WORKSPACE'
+ #
+ if os.environ.get('WORKSPACE') == None:
+ print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
+ return False
+
+ self.CurrentWorkingDir = os.getcwd()
+
+ self.WorkspaceDir = os.path.realpath(os.environ.get('WORKSPACE'))
+ (Drive, Path) = os.path.splitdrive(self.WorkspaceDir)
+ if Drive == '':
+ (Drive, CwdPath) = os.path.splitdrive(self.CurrentWorkingDir)
+ if Drive != '':
+ self.WorkspaceDir = Drive + Path
+ else:
+ self.WorkspaceDir = Drive.upper() + Path
+
+ self.WorkspaceRelativeWorkingDir = self.WorkspaceRelativePath (self.CurrentWorkingDir)
+
+ try:
+ #
+ # Load TianoCoreOrgLogo, used for GUI tool
+ #
+ self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF)
+ except:
+ self.Icon = None
+
+ self.Verbose = False
+ for Arg in sys.argv:
+ if Arg.lower() == '-v':
+ self.Verbose = True
+
+ ## Close build system
+ #
+ # Close build system and print running time and status
+ #
+ def Close(self):
+ if self.PrintRunTime:
+ Seconds = int(time.time() - self.StartTime)
+ if Seconds < 60:
+ print 'Run Time: %d seconds' % (Seconds)
+ else:
+ Minutes = Seconds / 60
+ Seconds = Seconds % 60
+ if Minutes < 60:
+ print 'Run Time: %d minutes %d seconds' % (Minutes, Seconds)
+ else:
+ Hours = Minutes / 60
+ Minutes = Minutes % 60
+ print 'Run Time: %d hours %d minutes %d seconds' % (Hours, Minutes, Seconds)
+ if self.RunStatus != '':
+ print self.RunStatus
+
+ ## Convert to a workspace relative filename
+ #
+ # Convert a full path filename to a workspace relative filename.
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval None Workspace dir is not found in the full path
+ # @retval string The relative filename
+ #
+ def WorkspaceRelativePath(self, FileName):
+ FileName = os.path.realpath(FileName)
+ if FileName.find(self.WorkspaceDir) != 0:
+ return None
+ return FileName.replace (self.WorkspaceDir, '').strip('\\').strip('/')
+
+ ## Convert to a full path filename
+ #
+ # Convert a workspace relative filename to a full path filename.
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval string The full path filename
+ #
+ def WorkspaceFile(self, FileName):
+ return os.path.realpath(os.path.join(self.WorkspaceDir,FileName))
+
+ ## Convert to a real path filename
+ #
+ # Convert ${WORKSPACE} to real path
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval string The full path filename
+ #
+ def WorkspacePathConvert(self, FileName):
+ return os.path.realpath(FileName.replace(TAB_WORKSPACE, self.WorkspaceDir))
+
+ ## Convert XML into a DOM
+ #
+ # Parse an XML file into a DOM and return the DOM.
+ #
+ # @param FileName: The filename to be parsed
+ #
+ # @retval XmlParseFile (self.WorkspaceFile(FileName))
+ #
+ def XmlParseFile (self, FileName):
+ if self.Verbose:
+ print FileName
+ return XmlParseFile (self.WorkspaceFile(FileName))
+
+ ## Convert a XML section
+ #
+ # Parse a section of an XML file into a DOM(Document Object Model) and return the DOM.
+ #
+ # @param FileName: The filename to be parsed
+ # @param SectionTag: The tag name of the section to be parsed
+ #
+ # @retval XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
+ #
+ def XmlParseFileSection (self, FileName, SectionTag):
+ if self.Verbose:
+ print FileName
+ return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
+
+ ## Save a XML file
+ #
+ # Save a DOM(Document Object Model) into an XML file.
+ #
+ # @param Dom: The Dom to be saved
+ # @param FileName: The filename
+ #
+ # @retval XmlSaveFile (Dom, self.WorkspaceFile(FileName))
+ #
+ def XmlSaveFile (self, Dom, FileName):
+ if self.Verbose:
+ print FileName
+ return XmlSaveFile (Dom, self.WorkspaceFile(FileName))
+
+ ## Convert Text File To Dictionary
+ #
+ # Convert a workspace relative text file to a dictionary of (name:value) pairs.
+ #
+ # @param FileName: Text filename
+ # @param Dictionary: Dictionary to store data
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ # @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+ # @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+ #
+ # @retval ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+ #
+ def ConvertTextFileToDictionary(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ if self.Verbose:
+ print FileName
+ return ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+
+ ## Convert Dictionary To Text File
+ #
+ # Convert a dictionary of (name:value) pairs to a workspace relative text file.
+ #
+ # @param FileName: Text filename
+ # @param Dictionary: Dictionary to store data
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ # @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+ # @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+ #
+ # @retval ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+ #
+ def ConvertDictionaryToTextFile(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ if self.Verbose:
+ print FileName
+ return ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+
+## Convert Text File To Dictionary
+#
+# Convert a text file to a dictionary of (name:value) pairs.
+#
+# @param FileName: Text filename
+# @param Dictionary: Dictionary to store data
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Convert successfully
+# @retval False Open file failed
+#
+def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ except:
+ return False
+ Keys = []
+ for Line in F:
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
+ if ValueSplitFlag:
+ Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ else:
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Keys += [Key[0]]
+ F.close()
+ return True
+
+## Convert Dictionary To Text File
+#
+# Convert a dictionary of (name:value) pairs to a text file.
+#
+# @param FileName: Text filename
+# @param Dictionary: Dictionary to store data
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Convert successfully
+# @retval False Open file failed
+#
+def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ Lines = []
+ Lines = F.readlines()
+ F.close()
+ except:
+ Lines = []
+ Keys = Dictionary.keys()
+ MaxLength = 0
+ for Key in Keys:
+ if len(Key) > MaxLength:
+ MaxLength = len(Key)
+ Index = 0
+ for Line in Lines:
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:
+ if ValueSplitFlag:
+ Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, ' '.join(Dictionary[Key[0]]))
+ else:
+ Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])
+ Lines.pop(Index)
+ if Key[0] in Keys:
+ Lines.insert(Index,Line)
+ Keys.remove(Key[0])
+ Index += 1
+ for RemainingKey in Keys:
+ if ValueSplitFlag:
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey]))
+ else:
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])
+ Lines.append(Line)
+ try:
+ F = open(FileName,'w')
+ except:
+ return False
+ F.writelines(Lines)
+ F.close()
+ return True
+
+## Create a new directory
+#
+# @param Directory: Directory to be created
+#
+def CreateDirectory(Directory):
+ if not os.access(Directory, os.F_OK):
+ os.makedirs (Directory)
+
+## Create a new file
+#
+# @param Directory: Directory to be created
+# @param FileName: Filename to be created
+# @param Mode: The mode of open file, defautl is 'w'
+#
+def CreateFile(Directory, FileName, Mode='w'):
+ CreateDirectory (Directory)
+ return open(os.path.join(Directory, FileName), Mode)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Nothing to do here. Could do some unit tests
+ pass
\ No newline at end of file diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py new file mode 100644 index 0000000000..82ab1796ad --- /dev/null +++ b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py @@ -0,0 +1,1669 @@ +## @file
+# This file is used to define each component of the build database
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, string, copy, pdb, copy
+import EdkLogger
+import DataType
+from InfClassObject import *
+from DecClassObject import *
+from DscClassObject import *
+from String import *
+from BuildToolError import *
+from Misc import sdict
+import Database as Database
+import time as time
+
+## PcdClassObject
+#
+# This Class is used for PcdObject
+#
+# @param object: Inherited from object class
+# @param Name: Input value for Name of Pcd, default is None
+# @param Guid: Input value for Guid of Pcd, default is None
+# @param Type: Input value for Type of Pcd, default is None
+# @param DatumType: Input value for DatumType of Pcd, default is None
+# @param Value: Input value for Value of Pcd, default is None
+# @param Token: Input value for Token of Pcd, default is None
+# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
+# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
+# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+#
+# @var TokenCName: To store value for TokenCName
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var Type: To store value for Type
+# @var DatumType: To store value for DatumType
+# @var TokenValue: To store value for TokenValue
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var SkuInfoList: To store value for SkuInfoList
+# @var IsOverrided: To store value for IsOverrided
+# @var Phase: To store value for Phase, default is "DXE"
+#
+class PcdClassObject(object):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False):
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.Type = Type
+ self.DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+ self.SkuInfoList = SkuInfoList
+ self.IsOverrided = IsOverrided
+ self.Phase = "DXE"
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a signle line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
+ 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
+ 'Type=' + str(self.Type) + ', ' + \
+ 'DatumType=' + str(self.DatumType) + ', ' + \
+ 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
+ 'TokenValue=' + str(self.TokenValue) + ', ' + \
+ 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
+ for Item in self.SkuInfoList.values():
+ Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
+ Rtn = Rtn + str(self.IsOverrided)
+
+ return Rtn
+
+ ## Override __eq__ function
+ #
+ # Check whether pcds are the same
+ #
+ # @retval False The two pcds are different
+ # @retval True The two pcds are the same
+ #
+ def __eq__(self, Other):
+ return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+
+ ## Override __hash__ function
+ #
+ # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
+ #
+ # @retval truple() Key for hash table
+ #
+ def __hash__(self):
+ return hash((self.TokenCName, self.TokenSpaceGuidCName))
+
+## LibraryClassObject
+#
+# This Class defines LibraryClassObject used in BuildDatabase
+#
+# @param object: Inherited from object class
+# @param Name: Input value for LibraryClassName, default is None
+# @param SupModList: Input value for SupModList, default is []
+# @param Type: Input value for Type, default is None
+#
+# @var LibraryClass: To store value for LibraryClass
+# @var SupModList: To store value for SupModList
+# @var Type: To store value for Type
+#
+class LibraryClassObject(object):
+ def __init__(self, Name = None, SupModList = [], Type = None):
+ self.LibraryClass = Name
+ self.SupModList = SupModList
+ if Type != None:
+ self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
+
+## ModuleBuildClassObject
+#
+# This Class defines ModuleBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var BaseName: To store value for BaseName
+# @var ModuleType: To store value for ModuleType
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var PcdIsDriver: To store value for PcdIsDriver
+# @var BinaryModule: To store value for BinaryModule
+# @var CustomMakefile: To store value for CustomMakefile
+# @var Specification: To store value for Specification
+# @var Shadow To store value for Shadow
+# @var LibraryClass: To store value for LibraryClass, it is a list structure as
+# [ LibraryClassObject, ...]
+# @var ModuleEntryPointList: To store value for ModuleEntryPointList
+# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
+# @var ConstructorList: To store value for ConstructorList
+# @var DestructorList: To store value for DestructorList
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryClassObject, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFilesClassObject, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolName, ... ]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiName, ... ]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidName, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var Packages: To store value for Packages, it is a list structure as
+# [ DecFileName, ... ]
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue}
+# @var Depex: To store value for Depex
+#
+class ModuleBuildClassObject(object):
+ def __init__(self):
+ self.AutoGenVersion = 0
+ self.DescFilePath = ''
+ self.BaseName = ''
+ self.ModuleType = ''
+ self.Guid = ''
+ self.Version = ''
+ self.PcdIsDriver = ''
+ self.BinaryModule = ''
+ self.Shadow = ''
+ self.CustomMakefile = {}
+ self.Specification = {}
+ self.LibraryClass = []
+ self.ModuleEntryPointList = []
+ self.ModuleUnloadImageList = []
+ self.ConstructorList = []
+ self.DestructorList = []
+
+ self.Binaries = []
+ self.Sources = []
+ self.LibraryClasses = sdict()
+ self.Libraries = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Guids = []
+ self.Includes = []
+ self.Packages = []
+ self.Pcds = {}
+ self.BuildOptions = {}
+ self.Depex = ''
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether ModuleBuildClassObjects are the same
+ #
+ # @retval False The two ModuleBuildClassObjects are different
+ # @retval True The two ModuleBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.DescFilePath == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## PackageBuildClassObject
+#
+# This Class defines PackageBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var PackageName: To store value for PackageName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var Protocols: To store value for Protocols, it is a set structure as
+# { [ProtocolName] : Protocol Guid, ... }
+# @var Ppis: To store value for Ppis, it is a set structure as
+# { [PpiName] : Ppi Guid, ... }
+# @var Guids: To store value for Guids, it is a set structure as
+# { [GuidName] : Guid, ... }
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName] : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+#
+class PackageBuildClassObject(object):
+ def __init__(self):
+ self.DescFilePath = ''
+ self.PackageName = ''
+ self.Guid = ''
+ self.Version = ''
+
+ self.Protocols = {}
+ self.Ppis = {}
+ self.Guids = {}
+ self.Includes = []
+ self.LibraryClasses = {}
+ self.Pcds = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether PackageBuildClassObjects are the same
+ #
+ # @retval False The two PackageBuildClassObjects are different
+ # @retval True The two PackageBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.DescFilePath == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## PlatformBuildClassObject
+#
+# This Class defines PlatformBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var PlatformName: To store value for PlatformName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var DscSpecification: To store value for DscSpecification
+# @var OutputDirectory: To store value for OutputDirectory
+# @var FlashDefinition: To store value for FlashDefinition
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var SkuIds: To store value for SkuIds, it is a set structure as
+# { 'SkuName' : SkuId, '!include' : includefilename, ...}
+# @var Modules: To store value for Modules, it is a list structure as
+# [ InfFileName, ... ]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ InfFileName, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue }
+#
+class PlatformBuildClassObject(object):
+ def __init__(self):
+ self.DescFilePath = ''
+ self.PlatformName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.DscSpecification = ''
+ self.OutputDirectory = ''
+ self.FlashDefinition = ''
+ self.BuildNumber = ''
+ self.MakefileName = ''
+
+ self.SkuIds = {}
+ self.Modules = []
+ self.LibraryInstances = []
+ self.LibraryClasses = {}
+ self.Libraries = {}
+ self.Pcds = {}
+ self.BuildOptions = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether PlatformBuildClassObjects are the same
+ #
+ # @retval False The two PlatformBuildClassObjects are different
+ # @retval True The two PlatformBuildClassObjects are the same
+ #
+ def __eq__(self, other):
+ return self.DescFilePath == str(other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## ItemBuild
+#
+# This Class defines Module/Platform/Package databases for build system
+#
+# @param object: Inherited from object class
+# @param Arch: Build arch
+# @param Platform: Build Platform
+# @param Package: Build Package
+# @param Module: Build Module
+#
+# @var Arch: To store value for Build Arch
+# @var PlatformDatabase: To store value for PlatformDatabase, it is a set structure as
+# { [DscFileName] : PlatformBuildClassObject, ...}
+# @var PackageDatabase: To store value for PackageDatabase, it is a set structure as
+# { [DecFileName] : PacakgeBuildClassObject, ...}
+# @var ModuleDatabase: To store value for ModuleDatabase, it is a list structure as
+# { [InfFileName] : ModuleBuildClassObject, ...}
+#
+class ItemBuild(object):
+ def __init__(self, Arch, Platform = None, Package = None, Module = None):
+ self.Arch = Arch
+ self.PlatformDatabase = {}
+ self.PackageDatabase = {}
+ self.ModuleDatabase = {}
+
+## WorkspaceBuild
+#
+# This class is used to parse active platform to init all inf/dec/dsc files
+# Generate module/package/platform databases for build
+#
+# @param object: Inherited from object class
+# @param ActivePlatform: Input value for current active platform
+# @param WorkspaceDir: Input value for current WorkspaceDir
+#
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+# @var BuildTarget: To store value for WorkspaceDir, selection scope is in below list
+# RELEASE | DEBUG
+# @var SkuId: To store value for SkuId
+# @var Fdf: To store value for Fdf
+# @var FdTargetList: To store value for FdTargetList
+# @var FvTargetList: To store value for FvTargetList
+# @var TargetTxt: To store value for TargetTxt, it is a set structure as
+# TargetTxtClassObject
+# @var ToolDef: To store value for ToolDef, it is a set structure as
+# ToolDefClassObject
+# @var InfDatabase: To store value for InfDatabase, it is a set structure as
+# { [InfFileName] : InfClassObject}
+# @var DecDatabase: To store value for DecDatabase, it is a set structure as
+# { [DecFileName] : DecClassObject}
+# @var DscDatabase: To store value for DscDatabase, it is a set structure as
+# { [DscFileName] : DscClassObject}
+# @var Build: To store value for DscDatabase, it is a set structure as
+# ItemBuild
+# @var DscFileName: To store value for Active Platform
+# @var UnFoundPcdInDsc: To store values for the pcds defined in INF/DEC but not found in DSC, it is a set structure as
+# { (PcdGuid, PcdCName, Arch) : DecFileName }
+#
+class WorkspaceBuild(object):
+ def __init__(self, ActivePlatform, WorkspaceDir):
+ self.WorkspaceDir = NormPath(WorkspaceDir)
+ self.SupArchList = []
+ self.BuildTarget = []
+ self.SkuId = ''
+ self.Fdf = ''
+ self.FdTargetList = []
+ self.FvTargetList = []
+ self.TargetTxt = None
+ self.ToolDef = None
+
+ self.InfDatabase = {}
+ self.DecDatabase = {}
+ self.DscDatabase = {}
+
+ self.UnFoundPcdInDsc = {}
+
+ #
+ # Init build for all arches
+ #
+ self.Build = {}
+ for Arch in DataType.ARCH_LIST:
+ self.Build[Arch] = ItemBuild(Arch)
+
+ #
+ # Init build database
+ #
+ self.Db = Database.Database(DATABASE_PATH)
+ self.Db.InitDatabase()
+
+ #
+ # Get active platform
+ #
+ self.DscFileName = NormPath(ActivePlatform)
+ File = self.WorkspaceFile(self.DscFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ self.DscDatabase[self.DscFileName] = Dsc(File, False, True, self.WorkspaceDir, self.Db)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData = File)
+
+ #
+ # Parse platform to get module
+ #
+ for DscFile in self.DscDatabase.keys():
+ Platform = self.DscDatabase[DscFile].Platform
+
+ #
+ # Get global information
+ #
+ Tmp = set()
+ for Arch in DataType.ARCH_LIST:
+ for Item in Platform.Header[Arch].SupArchList:
+ Tmp.add(Item)
+ self.SupArchList = list(Tmp)
+ Tmp = set()
+ for Arch in DataType.ARCH_LIST:
+ for Item in Platform.Header[Arch].BuildTargets:
+ Tmp.add(Item)
+ self.BuildTarget = list(Tmp)
+ for Arch in self.SupArchList:
+ self.SkuId = Platform.Header[Arch].SkuIdName
+ self.Fdf = Platform.FlashDefinitionFile.FilePath
+
+ #
+ # Get all inf files
+ #
+ for Item in Platform.LibraryClasses.LibraryList:
+ for Arch in Item.SupArchList:
+ self.AddToInfDatabase(Item.FilePath)
+
+ for Item in Platform.Libraries.LibraryList:
+ for Arch in Item.SupArchList:
+ self.AddToInfDatabase(Item.FilePath)
+
+ for Item in Platform.Modules.ModuleList:
+ for Arch in Item.SupArchList:
+ #
+ # Add modules
+ #
+ Module = Item.FilePath
+ self.AddToInfDatabase(Module)
+ #
+ # Add library used in modules
+ #
+ for Lib in Item.LibraryClasses.LibraryList:
+ self.AddToInfDatabase(Lib.FilePath)
+ self.UpdateLibraryClassOfModule(Module, Lib.Name, Arch, Lib.FilePath)
+
+ #
+ # Parse module to get package
+ #
+ for InfFile in self.InfDatabase.keys():
+ Module = self.InfDatabase[InfFile].Module
+ #
+ # Get all dec
+ #
+ for Item in Module.PackageDependencies:
+ for Arch in Item.SupArchList:
+ self.AddToDecDatabase(Item.FilePath)
+ # End of self.Init()
+
+ ## Generate PlatformDatabase
+ #
+ # Go through each arch to get all items in DscDatabase to PlatformDatabase
+ #
+ def GenPlatformDatabase(self, PcdsSet={}):
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ for Arch in self.SupArchList:
+ Pb = PlatformBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Dsc
+ Pb.PlatformName = Platform.Header[Arch].Name
+ if Pb.PlatformName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of platform %s is not defined for arch %s" % (Dsc, Arch))
+ Pb.Guid = Platform.Header[Arch].Guid
+ Pb.Version = Platform.Header[Arch].Version
+ Pb.DscSpecification = Platform.Header[Arch].DscSpecification
+ Pb.OutputDirectory = Platform.Header[Arch].OutputDirectory
+ Pb.FlashDefinition = Platform.FlashDefinitionFile.FilePath
+ Pb.BuildNumber = Platform.Header[Arch].BuildNumber
+
+ #
+ # SkuId
+ #
+ for Key in Platform.SkuInfos.SkuInfoList.keys():
+ Pb.SkuIds[Key] = Platform.SkuInfos.SkuInfoList[Key]
+
+ #
+ # Module
+ #
+ for Item in Platform.Modules.ModuleList:
+ if Arch in Item.SupArchList:
+ Pb.Modules.append(Item.FilePath)
+
+ #
+ # BuildOptions
+ #
+ for Item in Platform.BuildOptions.BuildOptionList:
+ if Arch in Item.SupArchList:
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = Item.Option
+
+ #
+ # LibraryClass
+ #
+ for Item in Platform.LibraryClasses.LibraryList:
+ SupModuleList = self.FindSupModuleListOfLibraryClass(Item, Platform.LibraryClasses.LibraryList, Arch)
+ if Arch in Item.SupArchList:
+ for ModuleType in SupModuleList:
+ Pb.LibraryClasses[(Item.Name, ModuleType)] = Item.FilePath
+
+ #
+ # Libraries
+ #
+ for Item in Platform.Libraries.LibraryList:
+ for ItemArch in Item.SupArchList:
+ Library = self.InfDatabase[Item.FilePath]
+ if ItemArch not in Library.Module.Header:
+ continue
+ Pb.Libraries[Library.Module.Header[ItemArch].Name] = Item.FilePath
+
+ #
+ # Pcds
+ #
+ for Item in Platform.DynamicPcdBuildDefinitions:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ DatumType = Item.DatumType
+ Value = Item.DefaultValue
+ Token = Item.Token
+ MaxDatumSize = Item.MaxDatumSize
+ SkuInfoList = Item.SkuInfoList
+ Pb.Pcds[(Name, Guid)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+
+ for (Name, Guid) in PcdsSet:
+ Value = PcdsSet[Name, Guid]
+ for PcdType in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
+ for Dec in self.Build[Arch].PackageDatabase:
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, PcdType) in Pcds:
+ Pcd = Pcds[(Name, Guid, PcdType)]
+ Type = PcdType
+ DatumType = Pcd.DatumType
+ Token = Pcd.TokenValue
+ MaxDatumSize = Pcd.MaxDatumSize
+ SkuInfoList = Pcd.SkuInfoList
+ Pb.Pcds[(Name, Guid)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+ break
+ else:
+ # nothing found
+ continue
+ # found in one package, find next PCD
+ break
+ else:
+ EdkLogger.error("AutoGen", PARSER_ERROR, "PCD is not found in any package", ExtraData="%s.%s" % (Guid, Name))
+ #
+ # Add to database
+ #
+ self.Build[Arch].PlatformDatabase[Dsc] = Pb
+ Pb = None
+
+ ## Generate PackageDatabase
+ #
+ # Go through each arch to get all items in DecDatabase to PackageDatabase
+ #
+ def GenPackageDatabase(self):
+ for Dec in self.DecDatabase.keys():
+ Package = self.DecDatabase[Dec].Package
+
+ for Arch in self.SupArchList:
+ Pb = PackageBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Dec
+ Pb.PackageName = Package.Header[Arch].Name
+ if Pb.PackageName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of package %s is not defined for arch %s" % (Dec, Arch))
+
+ Pb.Guid = Package.Header[Arch].Guid
+ Pb.Version = Package.Header[Arch].Version
+
+ #
+ # Protocols
+ #
+ for Item in Package.ProtocolDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Protocols[Item.CName] = Item.Guid
+
+ #
+ # Ppis
+ #
+ for Item in Package.PpiDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Ppis[Item.CName] = Item.Guid
+
+ #
+ # Guids
+ #
+ for Item in Package.GuidDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Guids[Item.CName] = Item.Guid
+
+ #
+ # Includes
+ #
+ for Item in Package.Includes:
+ if Arch in Item.SupArchList:
+ Pb.Includes.append(Item.FilePath)
+
+ #
+ # LibraryClasses
+ #
+ for Item in Package.LibraryClassDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.LibraryClasses[Item.LibraryClass] = Item.RecommendedInstance
+
+ #
+ # Pcds
+ #
+ for Item in Package.PcdDeclarations:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ DatumType = Item.DatumType
+ Value = Item.DefaultValue
+ Token = Item.Token
+ MaxDatumSize = Item.MaxDatumSize
+ SkuInfoList = Item.SkuInfoList
+ Pb.Pcds[(Name, Guid, Type)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+
+ #
+ # Add to database
+ #
+ self.Build[Arch].PackageDatabase[Dec] = Pb
+ Pb = None
+
+ ## Generate ModuleDatabase
+ #
+ # Go through each arch to get all items in InfDatabase to ModuleDatabase
+ #
+ def GenModuleDatabase(self, InfList = []):
+ for Inf in self.InfDatabase.keys():
+ Module = self.InfDatabase[Inf].Module
+
+ for Arch in self.SupArchList:
+ if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList) or Arch not in Module.Header:
+ continue
+
+ ModuleHeader = Module.Header[Arch]
+ Pb = ModuleBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Inf
+ Pb.BaseName = ModuleHeader.Name
+ if Pb.BaseName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of module %s is not defined for arch %s" % (Inf, Arch))
+ Pb.Guid = ModuleHeader.Guid
+ Pb.Version = ModuleHeader.Version
+ Pb.ModuleType = ModuleHeader.ModuleType
+ Pb.PcdIsDriver = ModuleHeader.PcdIsDriver
+ Pb.BinaryModule = ModuleHeader.BinaryModule
+ Pb.CustomMakefile = ModuleHeader.CustomMakefile
+ Pb.Shadow = ModuleHeader.Shadow
+
+ #
+ # Specs os Defines
+ #
+ Pb.Specification = ModuleHeader.Specification
+ Pb.Specification[TAB_INF_DEFINES_EDK_RELEASE_VERSION] = ModuleHeader.EdkReleaseVersion
+ Pb.Specification[TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION] = ModuleHeader.EfiSpecificationVersion
+ Pb.AutoGenVersion = int(ModuleHeader.InfVersion, 0)
+
+ #
+ # LibraryClass of Defines
+ #
+ for Item in ModuleHeader.LibraryClass:
+ Pb.LibraryClass.append(LibraryClassObject(Item.LibraryClass, Item.SupModuleList, None))
+
+ #
+ # Module image and library of Defines
+ #
+ for Item in Module.ExternImages:
+ if Item.ModuleEntryPoint != '' and Item.ModuleEntryPoint not in Pb.ModuleEntryPointList:
+ Pb.ModuleEntryPointList.append(Item.ModuleEntryPoint)
+ if Item.ModuleUnloadImage != '' and Item.ModuleUnloadImage not in Pb.ModuleUnloadImageList:
+ Pb.ModuleUnloadImageList.append(Item.ModuleUnloadImage)
+ for Item in Module.ExternLibraries:
+ if Item.Constructor != '' and Item.Constructor not in Pb.ConstructorList:
+ Pb.ConstructorList.append(Item.Constructor)
+ if Item.Destructor != '' and Item.Destructor not in Pb.DestructorList:
+ Pb.DestructorList.append(Item.Destructor)
+
+ #
+ # Binaries
+ #
+ for Item in Module.Binaries:
+ if Arch in Item.SupArchList:
+ FileName = Item.BinaryFile
+ FileType = Item.FileType
+ Target = Item.Target
+ FeatureFlag = Item.FeatureFlag
+ Pb.Binaries.append(ModuleBinaryFileClass(FileName, FileType, Target, FeatureFlag, Arch.split()))
+
+ #
+ # Sources
+ #
+ for Item in Module.Sources:
+ if Arch in Item.SupArchList:
+ SourceFile = Item.SourceFile
+ TagName = Item.TagName
+ ToolCode = Item.ToolCode
+ ToolChainFamily = Item.ToolChainFamily
+ FeatureFlag = Item.FeatureFlag
+ Pb.Sources.append(ModuleSourceFileClass(SourceFile, TagName, ToolCode, ToolChainFamily, FeatureFlag))
+
+ #
+ # Protocols
+ #
+ for Item in Module.Protocols:
+ if Arch in Item.SupArchList:
+ Pb.Protocols.append(Item.CName)
+
+ #
+ # Ppis
+ #
+ for Item in Module.Ppis:
+ if Arch in Item.SupArchList:
+ Pb.Ppis.append(Item.CName)
+
+ #
+ # Guids
+ #
+ for Item in Module.Guids:
+ if Arch in Item.SupArchList:
+ Pb.Ppis.append(Item.CName)
+
+ #
+ # Includes
+ #
+ for Item in Module.Includes:
+ if Arch in Item.SupArchList:
+ Pb.Includes.append(Item.FilePath)
+
+ #
+ # Packages
+ #
+ for Item in Module.PackageDependencies:
+ if Arch in Item.SupArchList:
+ Pb.Packages.append(Item.FilePath)
+
+ #
+ # BuildOptions
+ #
+ for Item in Module.BuildOptions:
+ if Arch in Item.SupArchList:
+ if (Item.ToolChainFamily, Item.ToolChain) not in Pb.BuildOptions:
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = Item.Option
+ else:
+ OptionString = Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)]
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = OptionString + " " + Item.Option
+ self.FindBuildOptions(Arch, Inf, Pb.BuildOptions)
+
+ #
+ # Depex
+ #
+ for Item in Module.Depex:
+ if Arch in Item.SupArchList:
+ Pb.Depex = Pb.Depex + Item.Depex + ' '
+ Pb.Depex = Pb.Depex.strip()
+
+ #
+ # LibraryClasses
+ #
+ for Item in Module.LibraryClasses:
+ if Arch in Item.SupArchList:
+ Lib = Item.LibraryClass
+ RecommendedInstance = Item.RecommendedInstance
+ if Pb.LibraryClass != []:
+ #
+ # For Library
+ #
+ for Libs in Pb.LibraryClass:
+ for Type in Libs.SupModList:
+ Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)
+ if Instance == None:
+ Instance = RecommendedInstance
+ Pb.LibraryClasses[(Lib, Type)] = Instance
+ else:
+ #
+ # For Module
+ #
+ Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)
+ if Instance == None:
+ Instance = RecommendedInstance
+ Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance
+
+ #
+ # Libraries
+ #
+ for Item in Module.Libraries:
+ if Arch in Item.SupArchList:
+ Pb.Libraries.append(Item.Library)
+
+ #
+ # Pcds
+ #
+ for Item in Module.PcdCodes:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ Pb.Pcds[(Name, Guid)] = self.FindPcd(Arch, Inf, Name, Guid, Type)
+
+ #
+ # Add to database
+ #
+ self.Build[Arch].ModuleDatabase[Inf] = Pb
+ Pb = None
+
+ ## Update Libraries Of Platform Database
+ #
+ # @param InfList: A list for all inf files
+ #
+ def UpdateLibrariesOfPlatform(self, InfList = []):
+ for Arch in self.SupArchList:
+ PlatformDatabase = self.Build[Arch].PlatformDatabase
+ for Dsc in PlatformDatabase:
+ Platform = PlatformDatabase[Dsc]
+ for Inf in Platform.Modules:
+ if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):
+ continue
+ Module = self.Build[Arch].ModuleDatabase[Inf]
+ if Module.LibraryClass == None or Module.LibraryClass == []:
+ self.UpdateLibrariesOfModule(Platform, Module, Arch)
+ for Key in Module.LibraryClasses:
+ Lib = Module.LibraryClasses[Key]
+ if Lib not in Platform.LibraryInstances:
+ Platform.LibraryInstances.append(Lib)
+
+
+ ## Update Libraries Of Module Database
+ #
+ # @param Module: The module need to be updated libraries
+ # @param Arch: The supportted arch of the module
+ #
+ def UpdateLibrariesOfModule(self, Platform, Module, Arch):
+ ModuleDatabase = self.Build[Arch].ModuleDatabase
+ ModuleType = Module.ModuleType
+
+ # check R8 module
+ if Module.AutoGenVersion < 0x00010005:
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
+ LibraryConsumerList = [Module]
+
+ # "CompilerStub" is a must for R8 modules
+ Module.Libraries.append("CompilerStub")
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryName in M.Libraries:
+ if LibraryName not in Platform.Libraries:
+ EdkLogger.warn("AutoGen", "Library [%s] is not found" % LibraryName,
+ ExtraData="\t%s [%s]" % (str(Module), Arch))
+ continue
+
+ LibraryFile = Platform.Libraries[LibraryName]
+ if (LibraryName, ModuleType) not in Module.LibraryClasses:
+ Module.LibraryClasses[LibraryName, ModuleType] = LibraryFile
+ LibraryConsumerList.append(ModuleDatabase[LibraryFile])
+ EdkLogger.verbose("\t" + LibraryName + " : " + LibraryFile)
+ return
+
+ # R9 module
+ LibraryConsumerList = [Module]
+ Constructor = []
+ ConsumedByList = sdict()
+ LibraryInstance = sdict()
+
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for Key, LibraryPath in M.LibraryClasses.iteritems():
+ # The "Key" is in format of (library_class_name, supported_module_type)
+ if ModuleType != "USER_DEFINED" and ModuleType not in Key:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "%s for module type %s is not supported (%s)" % (Key + (LibraryPath,)))
+ continue
+
+ LibraryClassName = Key[0]
+ if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:
+ if LibraryPath == None or LibraryPath == "":
+ LibraryInstance[LibraryClassName] = None
+ continue
+ LibraryModule = ModuleDatabase[LibraryPath]
+ LibraryInstance[LibraryClassName] = LibraryModule
+ LibraryConsumerList.append(LibraryModule)
+ EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))
+ elif LibraryPath == None or LibraryPath == "":
+ continue
+ else:
+ LibraryModule = LibraryInstance[LibraryClassName]
+
+ if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
+ Constructor.append(LibraryModule)
+
+ if LibraryModule not in ConsumedByList:
+ ConsumedByList[LibraryModule] = []
+ if M != Module:
+ if M in ConsumedByList[LibraryModule]:
+ continue
+ ConsumedByList[LibraryModule].append(M)
+ #
+ # Initialize the sorted output list to the empty set
+ #
+ SortedLibraryList = []
+ #
+ # Q <- Set of all nodes with no incoming edges
+ #
+ LibraryList = [] #LibraryInstance.values()
+ Q = []
+ for LibraryClassName in LibraryInstance:
+ M = LibraryInstance[LibraryClassName]
+ if M == None:
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "Library instance for library class [%s] is not found" % LibraryClassName,
+ ExtraData="\t%s [%s]" % (str(Module), Arch))
+ LibraryList.append(M)
+ #
+ # check if there're duplicate library classes
+ #
+ for Lc in M.LibraryClass:
+ if Lc.SupModList != None and ModuleType not in Lc.SupModList:
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),
+ ExtraData="\t%s" % str(Module))
+
+ if Lc.LibraryClass in LibraryInstance and str(M) != str(LibraryInstance[Lc.LibraryClass]):
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "More than one library instance found for library class [%s] in module [%s]" % (Lc.LibraryClass, Module),
+ ExtraData="\t%s\n\t%s" % (LibraryInstance[Lc.LibraryClass], str(M))
+ )
+ if ConsumedByList[M] == []:
+ Q.insert(0, M)
+ #
+ # while Q is not empty do
+ #
+ while Q != []:
+ #
+ # remove node from Q
+ #
+ Node = Q.pop()
+ #
+ # output Node
+ #
+ SortedLibraryList.append(Node)
+ #
+ # for each node Item with an edge e from Node to Item do
+ #
+ for Item in LibraryList:
+ if Node not in ConsumedByList[Item]:
+ continue
+ #
+ # remove edge e from the graph
+ #
+ ConsumedByList[Item].remove(Node)
+ #
+ # If Item has no other incoming edges then
+ #
+ if ConsumedByList[Item] == []:
+ #
+ # insert Item into Q
+ #
+ Q.insert(0, Item)
+
+ EdgeRemoved = True
+ while Q == [] and EdgeRemoved:
+ EdgeRemoved = False
+ #
+ # for each node Item with a Constructor
+ #
+ for Item in LibraryList:
+ if Item in Constructor:
+ #
+ # for each Node without a constructor with an edge e from Item to Node
+ #
+ for Node in ConsumedByList[Item]:
+ if Node not in Constructor:
+ #
+ # remove edge e from the graph
+ #
+ ConsumedByList[Item].remove(Node)
+ EdgeRemoved = True
+ if ConsumedByList[Item] == []:
+ #
+ # insert Item into Q
+ #
+ Q.insert(0, Item)
+ break
+ if Q != []:
+ break
+
+ #
+ # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
+ #
+ for Item in LibraryList:
+ if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
+ ErrorMessage = 'Library [%s] with constructors has a cycle' % str(Item)
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR, ErrorMessage,
+ "\tconsumed by " + "\n\tconsumed by ".join([str(L) for L in ConsumedByList[Item]]))
+ if Item not in SortedLibraryList:
+ SortedLibraryList.append(Item)
+
+ #
+ # Build the list of constructor and destructir names
+ # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
+ #
+ SortedLibraryList.reverse()
+ Module.LibraryClasses = sdict()
+ for L in SortedLibraryList:
+ for Lc in L.LibraryClass:
+ Module.LibraryClasses[Lc.LibraryClass, ModuleType] = str(L)
+ #
+ # Merge PCDs from library instance
+ #
+ for Key in L.Pcds:
+ if Key not in Module.Pcds:
+ LibPcd = L.Pcds[Key]
+ Module.Pcds[Key] = self.FindPcd(Arch, str(Module), LibPcd.TokenCName, LibPcd.TokenSpaceGuidCName, LibPcd.Type)
+ #
+ # Merge GUIDs from library instance
+ #
+ for CName in L.Guids:
+ if CName not in Module.Guids:
+ Module.Guids.append(CName)
+ #
+ # Merge Protocols from library instance
+ #
+ for CName in L.Protocols:
+ if CName not in Module.Protocols:
+ Module.Protocols.append(CName)
+ #
+ # Merge Ppis from library instance
+ #
+ for CName in L.Ppis:
+ if CName not in Module.Ppis:
+ Module.Ppis.append(CName)
+
+ ## GenBuildDatabase
+ #
+ # Generate build database for all arches
+ #
+ # @param PcdsSet: Pcd list for override from Fdf parse result
+ # @param InfList: Inf list for override from Fdf parse result
+ #
+ def GenBuildDatabase(self, PcdsSet = {}, InfList = []):
+ #
+ # Add additional inf file defined in Fdf file
+ #
+ for InfFile in InfList:
+ self.AddToInfDatabase(NormPath(InfFile))
+
+ #
+ # Generate PlatformDatabase, PackageDatabase and ModuleDatabase
+ #
+ self.GenPackageDatabase()
+ self.GenPlatformDatabase(PcdsSet)
+ self.GenModuleDatabase(InfList)
+
+ self.Db.Close()
+
+ #
+ # Update Libraries Of Platform
+ #
+ self.UpdateLibrariesOfPlatform(InfList)
+
+ #
+ # Output used Pcds not found in DSC file
+ #
+ self.ShowUnFoundPcds()
+
+ ## ShowUnFoundPcds()
+ #
+ # If there is any pcd used but not defined in DSC
+ # Print warning message on screen and output a list of pcds
+ #
+ def ShowUnFoundPcds(self):
+ if self.UnFoundPcdInDsc != {}:
+ WrnMessage = '**** WARNING ****\n'
+ WrnMessage += 'The following Pcds were not defined in the DSC file: %s\n' % self.DscFileName
+ WrnMessage += 'The default values were obtained from the DEC file that declares the PCD and the PCD default value\n'
+ for (Guid, Name, Type, Arch) in self.UnFoundPcdInDsc:
+ Dec = self.UnFoundPcdInDsc[(Guid, Name, Type, Arch)]
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, Type) in Pcds:
+ Pcd = Pcds[(Name, Guid, Type)]
+ PcdItemTypeUsed = Pcd.Type
+ DefaultValue = Pcd.DefaultValue
+ WrnMessage += '%s.%s: Defined in file %s, PcdItemType is Pcds%s, DefaultValue is %s\n' % (Guid, Name, Dec, PcdItemTypeUsed, DefaultValue)
+ EdkLogger.verbose(WrnMessage)
+
+ ## Create a full path with workspace dir
+ #
+ # Convert Filename with workspace dir to create a full path
+ #
+ # @param Filename: The filename need to be added workspace dir
+ #
+ # @retval string Full path
+ #
+ def WorkspaceFile(self, Filename):
+ return WorkspaceFile(self.WorkspaceDir, Filename)
+
+ ## Update LibraryClass of Module
+ #
+ # If a module of a platform has its own override libraryclass but the libraryclass not defined in the module
+ # Add this libraryclass to the module
+ #
+ # @param InfFileName: InfFileName specificed in platform
+ # @param LibraryClass: LibraryClass specificed in platform
+ # @param Arch: Supportted Arch
+ # @param InstanceFilePath: InstanceFilePath specificed in platform
+ #
+ def UpdateLibraryClassOfModule(self, InfFileName, LibraryClass, Arch, InstanceFilePath):
+ #
+ # Update the library instance itself to add this libraryclass name
+ #
+ LibraryModule = self.InfDatabase[InstanceFilePath].Module
+ LibList = LibraryModule.Header[Arch].LibraryClass
+ NotFound = True
+ for Lib in LibList:
+ #
+ # Find this LibraryClass
+ #
+ if Lib.LibraryClass == LibraryClass:
+ NotFound = False;
+ break;
+ if NotFound:
+ NewLib = LibraryClassClass()
+ NewLib.LibraryClass = LibraryClass
+ NewLib.SupModuleList = DataType.SUP_MODULE_LIST # LibraryModule.Header[Arch].ModuleType.split()
+ LibraryModule.Header[Arch].LibraryClass.append(NewLib)
+
+ #
+ # Add it to LibraryClasses Section for the module which is using the library
+ #
+ Module = self.InfDatabase[InfFileName].Module
+ LibList = Module.LibraryClasses
+ NotFound = True
+ for Lib in LibList:
+ #
+ # Find this LibraryClass
+ #
+ if Lib.LibraryClass == LibraryClass:
+ if Arch in Lib.SupArchList:
+ return
+ else:
+ Lib.SupArchList.append(Arch)
+ return
+ if NotFound:
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = LibraryClass
+ Lib.SupArchList = [Arch]
+ Module.LibraryClasses.append(Lib)
+
+ ## Add Inf file to InfDatabase
+ #
+ # Create a Inf instance for input inf file and add it to InfDatabase
+ #
+ # @param InfFileName: The InfFileName need to be added to database
+ #
+ def AddToInfDatabase(self, InfFileName):
+ File = self.WorkspaceFile(InfFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ if InfFileName not in self.InfDatabase:
+ self.InfDatabase[InfFileName] = Inf(File, False, True, self.WorkspaceDir, self.Db, self.SupArchList)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData=File)
+
+ ## Add Dec file to DecDatabase
+ #
+ # Create a Dec instance for input dec file and add it to DecDatabase
+ #
+ # @param DecFileName: The DecFileName need to be added to database
+ #
+ def AddToDecDatabase(self, DecFileName):
+ File = self.WorkspaceFile(DecFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ if DecFileName not in self.DecDatabase:
+ self.DecDatabase[DecFileName] = Dec(File, False, True, self.WorkspaceDir, self.Db, self.SupArchList)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData=File)
+
+ ## Search LibraryClass Instance for Module
+ #
+ # Search PlatformBuildDatabase to find LibraryClass Instance for Module
+ # Return the instance if found
+ #
+ # @param Lib: Input value for Library Class Name
+ # @param Arch: Supportted Arch
+ # @param ModuleType: Supportted ModuleType
+ # @param ModuleName: Input value for Module Name
+ #
+ # @retval string Found LibraryClass Instance file path
+ #
+ def FindLibraryClassInstanceOfModule(self, Lib, Arch, ModuleType, ModuleName):
+ #
+ # First find if exist in <LibraryClass> of <Components> from dsc file
+ #
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ for Module in Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for LibraryClass in Module.LibraryClasses.LibraryList:
+ if LibraryClass.Name == Lib:
+ return LibraryClass.FilePath
+ #
+ #Second find if exist in <LibraryClass> of <LibraryClasses> from dsc file
+ #
+ return self.FindLibraryClassInstanceOfLibrary(Lib, Arch, ModuleType)
+
+ ## Search LibraryClass Instance for Library
+ #
+ # Search PlatformBuildDatabase to find LibraryClass Instance for Library
+ # Return the instance if found
+ #
+ # @param Lib: Input value for Library Class Name
+ # @param Arch: Supportted Arch
+ # @param Type: Supportted Library Usage Type
+ #
+ # @retval string Found LibraryClass Instance file path
+ # @retval None Not Found
+ #
+ def FindLibraryClassInstanceOfLibrary(self, Lib, Arch, Type):
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ if (Lib, Type) in self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses:
+ return self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses[(Lib, Type)]
+ elif (Lib, '') in self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses:
+ return self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses[(Lib, '')]
+ return None
+
+ ## Find BuildOptions
+ #
+ # Search DscDatabase to find component definition of ModuleName
+ # Override BuildOption if it is defined in component
+ #
+ # @param Arch: Supportted Arch
+ # @param ModuleName: The module which has buildoption definition in component of platform
+ # @param BuildOptions: The set of all buildopitons
+ #
+ def FindBuildOptions(self, Arch, ModuleName, BuildOptions):
+ for Dsc in self.DscDatabase.keys():
+ #
+ # First find if exist in <BuildOptions> of <Components> from dsc file
+ # if find, use that override the one defined in inf file
+ #
+ Platform = self.DscDatabase[Dsc].Platform
+ for Module in Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for BuildOption in Module.ModuleSaBuildOption.BuildOptionList:
+ #
+ # Add to BuildOptions
+ #
+ BuildOptions[(BuildOption.ToolChainFamily, BuildOption.ToolChain)] = BuildOption.Option
+
+ ## Find Pcd
+ #
+ # Search platform database, package database, module database and PcdsSet from Fdf
+ # Return found Pcd
+ #
+ # @param Arch: Supportted Arch
+ # @param ModuleName: The module which has pcd definition in component of platform
+ # @param Name: Name of Pcd
+ # @param Guid: Guid of Pcd
+ # @param Type: Type of Pcd
+ #
+ # @retval PcdClassObject An instance for PcdClassObject with all members filled
+ #
+ def FindPcd(self, Arch, ModuleName, Name, Guid, Type):
+ NewType = ''
+ DatumType = ''
+ Value = ''
+ Token = ''
+ MaxDatumSize = ''
+ SkuInfoList = {}
+ IsOverrided = False
+ IsFoundInDsc = False
+ IsFoundInDec = False
+ FoundInDecFile = ''
+
+ #
+ # Second get information from platform database
+ #
+ OwnerPlatform = ''
+ for Dsc in self.Build[Arch].PlatformDatabase.keys():
+ Pcds = self.Build[Arch].PlatformDatabase[Dsc].Pcds
+ if (Name, Guid) in Pcds:
+ OwnerPlatform = Dsc
+ Pcd = Pcds[(Name, Guid)]
+ if Pcd.Type != '' and Pcd.Type != None:
+ NewType = Pcd.Type
+ if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:
+ NewType = DataType.TAB_PCDS_DYNAMIC
+ elif NewType in DataType.PCD_DYNAMIC_EX_TYPE_LIST:
+ NewType = DataType.TAB_PCDS_DYNAMIC_EX
+ else:
+ NewType = Type
+
+ if Type != '' and Type != NewType:
+ ErrorMsg = "PCD %s.%s is declared as [%s] in module\n\t%s\n\n"\
+ " But it's used as [%s] in platform\n\t%s"\
+ % (Guid, Name, Type, ModuleName, NewType, OwnerPlatform)
+ EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
+
+
+ if Pcd.DatumType != '' and Pcd.DatumType != None:
+ DatumType = Pcd.DatumType
+ if Pcd.TokenValue != '' and Pcd.TokenValue != None:
+ Token = Pcd.TokenValue
+ if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:
+ Value = Pcd.DefaultValue
+ if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:
+ MaxDatumSize = Pcd.MaxDatumSize
+ SkuInfoList = Pcd.SkuInfoList
+
+ IsOverrided = True
+ IsFoundInDsc = True
+ break
+
+ #
+ # Third get information from <Pcd> of <Compontents> from module database
+ #
+ for Dsc in self.DscDatabase.keys():
+ for Module in self.DscDatabase[Dsc].Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for Pcd in Module.PcdBuildDefinitions:
+ if (Name, Guid) == (Pcd.CName, Pcd.TokenSpaceGuidCName):
+ if Pcd.DefaultValue != '':
+ Value = Pcd.DefaultValue
+ if Pcd.MaxDatumSize != '':
+ MaxDatumSize = Pcd.MaxDatumSize
+
+ IsFoundInDsc = True
+ IsOverrided = True
+ break
+
+ #
+ # First get information from package database
+ #
+ Pcd = None
+ if NewType == '':
+ if Type != '':
+ PcdTypeList = [Type]
+ else:
+ PcdTypeList = ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]
+
+ for Dec in self.Build[Arch].PackageDatabase.keys():
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ for PcdType in PcdTypeList:
+ if (Name, Guid, PcdType) in Pcds:
+ Pcd = Pcds[(Name, Guid, PcdType)]
+ NewType = PcdType
+ IsOverrided = True
+ IsFoundInDec = True
+ FoundInDecFile = Dec
+ break
+ else:
+ continue
+ break
+ else:
+ for Dec in self.Build[Arch].PackageDatabase.keys():
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, NewType) in Pcds:
+ Pcd = Pcds[(Name, Guid, NewType)]
+ IsOverrided = True
+ IsFoundInDec = True
+ FoundInDecFile = Dec
+ break
+
+ if not IsFoundInDec:
+ ErrorMsg = "Pcd '%s.%s [%s]' defined in module '%s' is not found in any package for Arch '%s'" % (Guid, Name, NewType, ModuleName, Arch)
+ EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
+
+ #
+ # Not found in any platform and fdf
+ #
+ if not IsFoundInDsc:
+ Value = Pcd.DefaultValue
+ if NewType.startswith("Dynamic") and SkuInfoList == {}:
+ SkuIds = self.Build[Arch].PlatformDatabase.values()[0].SkuIds
+ SkuInfoList['DEFAULT'] = SkuInfoClass(SkuIdName='DEFAULT', SkuId=SkuIds['DEFAULT'], DefaultValue=Value)
+ self.UnFoundPcdInDsc[(Guid, Name, NewType, Arch)] = FoundInDecFile
+ #elif Type != '' and NewType.startswith("Dynamic"):
+ # NewType = Pcd.Type
+ DatumType = Pcd.DatumType
+ if Token in [None, '']:
+ Token = Pcd.TokenValue
+ if DatumType == "VOID*" and MaxDatumSize in ['', None]:
+ EdkLogger.verbose("No MaxDatumSize specified for PCD %s.%s in module [%s]" % (Guid, Name, ModuleName))
+ if Value[0] == 'L':
+ MaxDatumSize = str(len(Value) * 2)
+ elif Value[0] == '{':
+ MaxDatumSize = str(len(Value.split(',')))
+ else:
+ MaxDatumSize = str(len(Value))
+
+ return PcdClassObject(Name, Guid, NewType, DatumType, Value, Token, MaxDatumSize, SkuInfoList, IsOverrided)
+
+ ## Find Supportted Module List Of LibraryClass
+ #
+ # Search in InfDatabase, find the supmodulelist of the libraryclass
+ #
+ # @param LibraryClass: LibraryClass name for search
+ # @param OverridedLibraryClassList: A list of all LibraryClass
+ # @param Arch: Supportted Arch
+ #
+ # @retval list SupModuleList
+ #
+ def FindSupModuleListOfLibraryClass(self, LibraryClass, OverridedLibraryClassList, Arch):
+ Name = LibraryClass.Name
+ FilePath = LibraryClass.FilePath
+ SupModuleList = copy.copy(LibraryClass.SupModuleList)
+
+ #
+ # If the SupModuleList means all, remove overrided module types of platform
+ #
+ if SupModuleList == DataType.SUP_MODULE_LIST:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "\tLibraryClass %s supports all module types" % Name)
+ for Item in OverridedLibraryClassList:
+ #
+ # Find a library class (Item) with the same name
+ #
+ if Item.Name == Name:
+ #
+ # Do nothing if it is itself
+ #
+ if Item.SupModuleList == DataType.SUP_MODULE_LIST:
+ continue
+ #
+ # If not itself, check arch first
+ #
+ if Arch in LibraryClass.SupArchList:
+ #
+ # If arch is supportted, remove all related module type
+ #
+ if Arch in Item.SupArchList:
+ for ModuleType in Item.SupModuleList:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "\tLibraryClass %s has specific defined module types" % Name)
+ if ModuleType in SupModuleList:
+ SupModuleList.remove(ModuleType)
+
+ return SupModuleList
+
+ ## Find Module inf Platform
+ #
+ # Check if the module is defined in <Compentent> of <Platform>
+ #
+ # @param Inf: Inf file (Module) need to be searched
+ # @param Arch: Supportted Arch
+ # @param InfList: A list for all Inf file
+ #
+ # @retval True Mudule Found
+ # @retval Flase Module Not Found
+ #
+ def IsModuleDefinedInPlatform(self, Inf, Arch, InfList):
+ for Dsc in self.DscDatabase.values():
+ for LibraryClass in Dsc.Platform.LibraryClasses.LibraryList:
+ if Inf == LibraryClass.FilePath and Arch in LibraryClass.SupArchList:
+ return True
+ for Module in Dsc.Platform.Modules.ModuleList:
+ if Inf == Module.FilePath and Arch in Module.SupArchList:
+ return True
+ for Item in Module.LibraryClasses.LibraryList:
+ if Inf == Item.FilePath:
+ return True
+ for Library in Dsc.Platform.Libraries.LibraryList:
+ if Inf == Library.FilePath and Arch in Library.SupArchList:
+ return True
+
+ return False
+
+ ## Show all content of the workspacebuild
+ #
+ # Print each item of the workspacebuild with (Key = Value) pair
+ #
+ def ShowWorkspaceBuild(self):
+ print self.DscDatabase
+ print self.InfDatabase
+ print self.DecDatabase
+ print 'SupArchList', self.SupArchList
+ print 'BuildTarget', self.BuildTarget
+ print 'SkuId', self.SkuId
+
+ for Arch in self.SupArchList:
+ print Arch
+ print 'Platform'
+ for Platform in self.Build[Arch].PlatformDatabase.keys():
+ P = self.Build[Arch].PlatformDatabase[Platform]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'PlatformName = ', P.PlatformName
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'OutputDirectory = ', P.OutputDirectory
+ print 'FlashDefinition = ', P.FlashDefinition
+ print 'SkuIds = ', P.SkuIds
+ print 'Modules = ', P.Modules
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print 'BuildOptions = ', P.BuildOptions
+ print ''
+ # End of Platform
+
+ print 'package'
+ for Package in self.Build[Arch].PackageDatabase.keys():
+ P = self.Build[Arch].PackageDatabase[Package]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'PackageName = ', P.PackageName
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'Protocols = ', P.Protocols
+ print 'Ppis = ', P.Ppis
+ print 'Guids = ', P.Guids
+ print 'Includes = ', P.Includes
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print ''
+ # End of Package
+
+ print 'module'
+ for Module in self.Build[Arch].ModuleDatabase.keys():
+ P = self.Build[Arch].ModuleDatabase[Module]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'BaseName = ', P.BaseName
+ print 'ModuleType = ', P.ModuleType
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'CustomMakefile = ', P.CustomMakefile
+ print 'Specification = ', P.Specification
+ print 'Shadow = ', P.Shadow
+ print 'PcdIsDriver = ', P.PcdIsDriver
+ for Lib in P.LibraryClass:
+ print 'LibraryClassDefinition = ', Lib.LibraryClass, 'SupModList = ', Lib.SupModList
+ print 'ModuleEntryPointList = ', P.ModuleEntryPointList
+ print 'ModuleUnloadImageList = ', P.ModuleUnloadImageList
+ print 'ConstructorList = ', P.ConstructorList
+ print 'DestructorList = ', P.DestructorList
+
+ print 'Binaries = '
+ for item in P.Binaries:
+ print item.BinaryFile, item.FeatureFlag, item.SupArchList
+ print 'Sources = '
+ for item in P.Sources:
+ print item.SourceFile
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Protocols = ', P.Protocols
+ print 'Ppis = ', P.Ppis
+ print 'Guids = ', P.Guids
+ print 'Includes = ', P.Includes
+ print 'Packages = ', P.Packages
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print 'BuildOptions = ', P.BuildOptions
+ print 'Depex = ', P.Depex
+ print ''
+ # End of Module
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ print 'Start!', time.strftime('%H:%M:%S', time.localtime())
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ Ewb = WorkspaceBuild('Nt32Pkg/Nt32Pkg.dsc', W)
+ Ewb.GenBuildDatabase({('PcdDevicePathSupportDevicePathFromText', 'gEfiMdeModulePkgTokenSpaceGuid') : 'KKKKKKKKKKKKKKKKKKKKK'}, ['Test.Inf'])
+ print 'Done!', time.strftime('%H:%M:%S', time.localtime())
+ Ewb.ShowWorkspaceBuild()
diff --git a/BaseTools/Source/Python/Common/EdkLogger.py b/BaseTools/Source/Python/Common/EdkLogger.py new file mode 100644 index 0000000000..ce4cfa14bb --- /dev/null +++ b/BaseTools/Source/Python/Common/EdkLogger.py @@ -0,0 +1,269 @@ +## @file +# This file implements the log mechanism for Python tools. +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## Import modules +import sys, os, logging +import traceback +from BuildToolError import * + +## Log level constants +DEBUG_0 = 1 +DEBUG_1 = 2 +DEBUG_2 = 3 +DEBUG_3 = 4 +DEBUG_4 = 5 +DEBUG_5 = 6 +DEBUG_6 = 7 +DEBUG_7 = 8 +DEBUG_8 = 9 +DEBUG_9 = 10 +VERBOSE = 15 +INFO = 20 +WARN = 30 +QUIET = 40 +ERROR = 50 + +IsRaiseError = True + +# Tool name +_ToolName = os.path.basename(sys.argv[0]) + +# For validation purpose +_LogLevels = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5, DEBUG_6, DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO, ERROR, QUIET] + +# For DEBUG level (All DEBUG_0~9 are applicable) +_DebugLogger = logging.getLogger("tool_debug") +_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S") + +# For VERBOSE, INFO, WARN level +_InfoLogger = logging.getLogger("tool_info") +_InfoFormatter = logging.Formatter("%(message)s") + +# For ERROR level +_ErrorLogger = logging.getLogger("tool_error") +_ErrorFormatter = logging.Formatter("%(message)s") + +# String templates for ERROR/WARN/DEBUG log message +_ErrorMessageTemplate = '\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s' +_ErrorMessageTemplateWithoutFile = '\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s' +_WarningMessageTemplate = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s' +_WarningMessageTemplateWithoutFile = '%(tool)s: : warning: %(msg)s' +_DebugMessageTemplate = '%(file)s(%(line)s): debug: \n %(msg)s' + +# +# Flag used to take WARN as ERROR. +# By default, only ERROR message will break the tools execution. +# +_WarningAsError = False + +## Log debug message +# +# @param Level DEBUG level (DEBUG0~9) +# @param Message Debug information +# @param ExtraData More information associated with "Message" +# +def debug(Level, Message, ExtraData=None): + if _DebugLogger.level > Level: + return + if Level > DEBUG_9: + return + + # Find out the caller method information + CallerStack = traceback.extract_stack()[-2] + TemplateDict = { + "file" : CallerStack[0], + "line" : CallerStack[1], + "msg" : Message, + } + + if ExtraData != None: + LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData + else: + LogText = _DebugMessageTemplate % TemplateDict + + _DebugLogger.log(Level, LogText) + +## Log verbose message +# +# @param Message Verbose information +# +def verbose(Message): + return _InfoLogger.log(VERBOSE, Message) + +## Log warning message +# +# Warning messages are those which might be wrong but won't fail the tool. +# +# @param ToolName The name of the tool. If not given, the name of caller +# method will be used. +# @param Message Warning information +# @param File The name of file which caused the warning. +# @param Line The line number in the "File" which caused the warning. +# @param ExtraData More information associated with "Message" +# +def warn(ToolName, Message, File=None, Line=None, ExtraData=None): + if _InfoLogger.level > WARN: + return + + # if no tool name given, use caller's source file name as tool name + if ToolName == None or ToolName == "": + ToolName = os.path.basename(traceback.extract_stack()[-2][0]) + + if Line == None: + Line = "..." + else: + Line = "%d" % Line + + TemplateDict = { + "tool" : ToolName, + "file" : File, + "line" : Line, + "msg" : Message, + } + + if File != None: + LogText = _WarningMessageTemplate % TemplateDict + else: + LogText = _WarningMessageTemplateWithoutFile % TemplateDict + + if ExtraData != None: + LogText += "\n %s" % ExtraData + + _InfoLogger.log(WARN, LogText) + + # Raise an execption if indicated + if _WarningAsError == True: + raise FatalError(WARNING_AS_ERROR) + +## Log INFO message +info = _InfoLogger.info + +## Log ERROR message +# +# Once an error messages is logged, the tool's execution will be broken by raising +# an execption. If you don't want to break the execution later, you can give +# "RaiseError" with "False" value. +# +# @param ToolName The name of the tool. If not given, the name of caller +# method will be used. +# @param ErrorCode The error code +# @param Message Warning information +# @param File The name of file which caused the error. +# @param Line The line number in the "File" which caused the warning. +# @param ExtraData More information associated with "Message" +# @param RaiseError Raise an exception to break the tool's executuion if +# it's True. This is the default behavior. +# +def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError): + if Line == None: + Line = "..." + else: + Line = "%d" % Line + + if Message == None: + if ErrorCode in gErrorMessage: + Message = gErrorMessage[ErrorCode] + else: + Message = gErrorMessage[UNKNOWN_ERROR] + + if ExtraData == None: + ExtraData = "" + + TemplateDict = { + "tool" : _ToolName, + "file" : File, + "line" : Line, + "errorcode" : ErrorCode, + "msg" : Message, + "extra" : ExtraData + } + + if File != None: + LogText = _ErrorMessageTemplate % TemplateDict + else: + LogText = _ErrorMessageTemplateWithoutFile % TemplateDict + + _ErrorLogger.log(ERROR, LogText) + if RaiseError: + raise FatalError(ErrorCode) + +# Log information which should be always put out +quiet = _ErrorLogger.error + +## Initialize log system +def Initialize(): + # + # Since we use different format to log different levels of message into different + # place (stdout or stderr), we have to use different "Logger" objects to do this. + # + # For DEBUG level (All DEBUG_0~9 are applicable) + _DebugLogger.setLevel(INFO) + _DebugChannel = logging.StreamHandler(sys.stdout) + _DebugChannel.setFormatter(_DebugFormatter) + _DebugLogger.addHandler(_DebugChannel) + + # For VERBOSE, INFO, WARN level + _InfoLogger.setLevel(INFO) + _InfoChannel = logging.StreamHandler(sys.stdout) + _InfoChannel.setFormatter(_InfoFormatter) + _InfoLogger.addHandler(_InfoChannel) + + # For ERROR level + _ErrorLogger.setLevel(INFO) + _ErrorCh = logging.StreamHandler(sys.stderr) + _ErrorCh.setFormatter(_ErrorFormatter) + _ErrorLogger.addHandler(_ErrorCh) + +## Set log level +# +# @param Level One of log level in _LogLevel +def SetLevel(Level): + if Level not in _LogLevels: + info("Not supported log level (%d). Use default level instead." % Level) + Level = INFO + _DebugLogger.setLevel(Level) + _InfoLogger.setLevel(Level) + _ErrorLogger.setLevel(Level) + +## Get current log level +def GetLevel(): + return _InfoLogger.getEffectiveLevel() + +## Raise up warning as error +def SetWarningAsError(): + global _WarningAsError + _WarningAsError = True + +## Specify a file to store the log message as well as put on console +# +# @param LogFile The file path used to store the log message +# +def SetLogFile(LogFile): + if os.path.exists(LogFile): + os.remove(LogFile) + + _Ch = logging.FileHandler(LogFile) + _Ch.setFormatter(_DebugFormatter) + _DebugLogger.addHandler(_Ch) + + _Ch= logging.FileHandler(LogFile) + _Ch.setFormatter(_InfoFormatter) + _InfoLogger.addHandler(_Ch) + + _Ch = logging.FileHandler(LogFile) + _Ch.setFormatter(_ErrorFormatter) + _ErrorLogger.addHandler(_Ch) + +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/Common/FdfClassObject.py b/BaseTools/Source/Python/Common/FdfClassObject.py new file mode 100644 index 0000000000..e0df1c20c2 --- /dev/null +++ b/BaseTools/Source/Python/Common/FdfClassObject.py @@ -0,0 +1,116 @@ +## @file
+# This file is used to define each component of FDF file
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from FdfParserLite import FdfParser
+from Table.TableFdf import TableFdf
+from CommonDataClass.DataClass import MODEL_FILE_FDF, MODEL_PCD, MODEL_META_DATA_COMPONENT
+from String import NormPath
+
+## FdfObject
+#
+# This class defined basic Fdf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class FdfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Fdf
+#
+# This class defined the structure used in Fdf object
+#
+# @param FdfObject: Inherited from FdfObject class
+# @param Filename: Input value for Ffilename of Fdf file, default is None
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+class Fdf(FdfObject):
+ def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
+ self.WorkspaceDir = WorkspaceDir
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblFdf = Database.TblFdf
+ self.FileID = -1
+ self.FileList = {}
+
+ #
+ # Load Fdf file if filename is not None
+ #
+ if Filename != None:
+ self.LoadFdfFile(Filename)
+
+ #
+ # Insert a FDF file record into database
+ #
+ def InsertFile(self, Filename):
+ FileID = -1
+ Filename = NormPath(Filename)
+ if Filename not in self.FileList:
+ FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
+ self.FileList[Filename] = FileID
+
+ return self.FileList[Filename]
+
+
+ ## Load Fdf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Fdf file
+ #
+ def LoadFdfFile(self, Filename):
+ FileList = []
+ #
+ # Parse Fdf file
+ #
+ Filename = NormPath(Filename)
+ Fdf = FdfParser(Filename)
+ Fdf.ParseFile()
+
+ #
+ # Insert inf file and pcd information
+ #
+ if self.IsToDatabase:
+ (Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
+ (0, '', '', '', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
+ for Index in range(0, len(Fdf.Profile.PcdDict)):
+ pass
+ for Key in Fdf.Profile.PcdDict.keys():
+ Model = MODEL_PCD
+ Value1 = ''
+ Value2 = ".".join((Key[1], Key[0]))
+ FileName = Fdf.Profile.PcdFileLineDict[Key][0]
+ StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ for Index in range(0, len(Fdf.Profile.InfList)):
+ Model = MODEL_META_DATA_COMPONENT
+ Value1 = Fdf.Profile.InfList[Index]
+ Value2 = ''
+ FileName = Fdf.Profile.InfFileLineList[Index][0]
+ StartLine = Fdf.Profile.InfFileLineList[Index][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/Common/FdfParserLite.py b/BaseTools/Source/Python/Common/FdfParserLite.py new file mode 100644 index 0000000000..59006fa5c5 --- /dev/null +++ b/BaseTools/Source/Python/Common/FdfParserLite.py @@ -0,0 +1,3603 @@ +## @file
+# parse FDF file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import re
+import os
+
+import CommonDataClass.FdfClass
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+IncludeFileList = []
+# Macro passed from command line, which has greatest priority and can NOT be overridden by those in FDF
+InputMacroDict = {}
+# All Macro values when parsing file, not replace existing Macro
+AllMacroList = []
+
+def GetRealFileLine (File, Line):
+
+ InsertedLines = 0
+ for Profile in IncludeFileList:
+ if Line >= Profile.InsertStartLineNumber and Line < Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ return (Profile.FileName, Line - Profile.InsertStartLineNumber + 1)
+ if Line >= Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ InsertedLines += Profile.InsertAdjust + len(Profile.FileLinesList)
+
+ return (File, Line - InsertedLines)
+
+## The exception class that used to report error messages when parsing FDF
+#
+# Currently the "ToolName" is set to be "FDF Parser".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+
+ FileLineTuple = GetRealFileLine(File, Line)
+ self.FileName = FileLineTuple[0]
+ self.LineNumber = FileLineTuple[1]
+ self.message = Str + str(self.LineNumber)
+ self.ToolName = 'FDF Parser'
+
+## The MACRO class that used to record macro value data when parsing include file
+#
+#
+class MacroProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName, Line):
+ self.FileName = FileName
+ self.DefinedAtLine = Line
+ self.MacroName = None
+ self.MacroValue = None
+
+## The Include file content class that used to record file data when parsing include file
+#
+# May raise Exception when opening file.
+#
+class IncludeFileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileName = FileName
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
+
+ self.InsertStartLineNumber = None
+ self.InsertAdjust = 0
+
+## The FDF content class that used to record file data when parsing FDF
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
+
+ self.PcdDict = {}
+ self.InfList = []
+
+ self.PcdFileLineDict = {}
+ self.InfFileLineList = []
+
+ self.FdDict = {}
+ self.FvDict = {}
+ self.CapsuleList = []
+# self.VtfList = []
+# self.RuleDict = {}
+
+## The syntax parser for FDF
+#
+# PreprocessFile method should be called prior to ParseFile
+# CycleReferenceCheck method can detect cycles in FDF contents
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class FdfParser(object):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile(FileName)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+ self.CurrentFdName = None
+ self.CurrentFvName = None
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ self.__WipeOffArea = []
+
+ ## __IsWhiteSpace() method
+ #
+ # Whether char at current FileBufferPos is whitespace
+ #
+ # @param self The object pointer
+ # @param Char The char to test
+ # @retval True The char is a kind of white space
+ # @retval False The char is NOT a kind of white space
+ #
+ def __IsWhiteSpace(self, Char):
+ if Char in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_SPACE, T_CHAR_TAB, T_CHAR_LF):
+ return True
+ else:
+ return False
+
+ ## __SkipWhiteSpace() method
+ #
+ # Skip white spaces from current char, return number of chars skipped
+ #
+ # @param self The object pointer
+ # @retval Count The number of chars skipped
+ #
+ def __SkipWhiteSpace(self):
+ Count = 0
+ while not self.__EndOfFile():
+ Count += 1
+ if self.__CurrentChar() in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_LF, T_CHAR_SPACE, T_CHAR_TAB):
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ else:
+ Count = Count - 1
+ return Count
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ if self.CurrentLineNumber > len(self.Profile.FileLinesList):
+ return True
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ def __StringToList(self):
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesList]
+ self.Profile.FileLinesList[-1].append(' ')
+
+ def __ReplaceMacros(self, Str, File, Line):
+ MacroEnd = 0
+ while Str.find('$(', MacroEnd) >= 0:
+ MacroStart = Str.find('$(', MacroEnd)
+ if Str.find(')', MacroStart) > 0:
+ MacroEnd = Str.find(')', MacroStart)
+ Name = Str[MacroStart + 2 : MacroEnd]
+ Value = None
+ if Name in InputMacroDict:
+ Value = InputMacroDict[Name]
+
+ else:
+ for Profile in AllMacroList:
+ if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
+ Value = Profile.MacroValue
+
+ if Value != None:
+ Str = Str.replace('$(' + Name + ')', Value)
+ MacroEnd = MacroStart + len(Value)
+
+ else:
+ raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber)
+ return Str
+
+ def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):
+ if StartPos[0] == EndPos[0]:
+ Offset = StartPos[1]
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+ return
+
+ Offset = StartPos[1]
+ while self.Profile.FileLinesList[StartPos[0]][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+
+ Line = StartPos[0]
+ while Line < EndPos[0]:
+ Offset = 0
+ while self.Profile.FileLinesList[Line][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[Line][Offset] = Value
+ Offset += 1
+ Line += 1
+
+ Offset = 0
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[EndPos[0]][Offset] = Value
+ Offset += 1
+
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ # HashComment in quoted string " " is ignored.
+ InString = False
+
+ while not self.__EndOfFile():
+
+ if self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE and not InComment:
+ InString = not InString
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ if InComment and HashComment:
+ InComment = False
+ HashComment = False
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH and not self.__EndOfLine():
+ InComment = True
+ DoubleSlashComment = True
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not self.__EndOfLine() and not InString:
+ InComment = True
+ HashComment = True
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessIncludeFile(self):
+
+ while self.__GetNextToken():
+
+ if self.__Token == '!include':
+ IncludeLine = self.CurrentLineNumber
+ IncludeOffset = self.CurrentOffsetWithinLine - len('!include')
+ if not self.__GetNextToken():
+ raise Warning("expected include file name At Line ", self.FileName, self.CurrentLineNumber)
+ IncFileName = self.__Token
+ if not os.path.isabs(IncFileName):
+ if IncFileName.startswith('$(WORKSPACE)'):
+ Str = IncFileName.replace('$(WORKSPACE)', os.environ.get('WORKSPACE'))
+ if os.path.exists(Str):
+ if not os.path.isabs(Str):
+ Str = os.path.abspath(Str)
+ IncFileName = Str
+ else:
+ # file is in the same dir with FDF file
+ FullFdf = self.FileName
+ if not os.path.isabs(self.FileName):
+ FullFdf = os.path.join(os.environ.get('WORKSPACE'), self.FileName)
+
+ IncFileName = os.path.join(os.path.dirname(FullFdf), IncFileName)
+
+ if not os.path.exists(os.path.normpath(IncFileName)):
+ raise Warning("Include file not exists At Line ", self.FileName, self.CurrentLineNumber)
+
+ IncFileProfile = IncludeFileProfile(os.path.normpath(IncFileName))
+
+ CurrentLine = self.CurrentLineNumber
+ CurrentOffset = self.CurrentOffsetWithinLine
+ # list index of the insertion, note that line number is 'CurrentLine + 1'
+ InsertAtLine = CurrentLine
+ IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
+ # deal with remaining portions after "!include filename", if exists.
+ if self.__GetNextToken():
+ if self.CurrentLineNumber == CurrentLine:
+ RemainingLine = self.__CurrentLine()[CurrentOffset:]
+ self.Profile.FileLinesList.insert(self.CurrentLineNumber, RemainingLine)
+ IncFileProfile.InsertAdjust += 1
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ for Line in IncFileProfile.FileLinesList:
+ self.Profile.FileLinesList.insert(InsertAtLine, Line)
+ self.CurrentLineNumber += 1
+ InsertAtLine += 1
+
+ IncludeFileList.append(IncFileProfile)
+
+ # comment out the processed include file statement
+ TempList = list(self.Profile.FileLinesList[IncludeLine - 1])
+ TempList.insert(IncludeOffset, '#')
+ self.Profile.FileLinesList[IncludeLine - 1] = ''.join(TempList)
+
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessConditionalStatement(self):
+ # IfList is a stack of if branches with elements of list [Pos, CondSatisfied, BranchDetermined]
+ IfList = []
+ while self.__GetNextToken():
+ if self.__Token == 'DEFINE':
+ DefineLine = self.CurrentLineNumber - 1
+ DefineOffset = self.CurrentOffsetWithinLine - len('DEFINE')
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ pass
+ Value = self.__Token
+ if not Macro in InputMacroDict:
+ FileLineTuple = GetRealFileLine(self.FileName, DefineLine + 1)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+ self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!ifdef', '!ifndef', '!if'):
+ IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ IfList.append([IfStartPos, None, None])
+ CondLabel = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ NotDefineFlag = False
+ if CondLabel == '!ifndef':
+ NotDefineFlag = True
+ if CondLabel == '!ifdef' or CondLabel == '!ifndef':
+ if NotFlag:
+ raise Warning("'NOT' operation not allowed for Macro name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if CondLabel == '!if':
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ else:
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1)
+ if NotDefineFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!elseif', '!else'):
+ ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ if len(IfList) <= 0:
+ raise Warning("Missing !if statement At Line ", self.FileName, self.CurrentLineNumber)
+ if IfList[-1][1]:
+ IfList[-1] = [ElseStartPos, False, True]
+ self.__WipeOffArea.append((ElseStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], ElseStartPos))
+ IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
+ if self.__Token == '!elseif':
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, IfList[-1][2]]
+
+ if IfList[-1][1]:
+ if IfList[-1][2]:
+ IfList[-1][1] = False
+ else:
+ IfList[-1][2] = True
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+
+ elif self.__Token == '!endif':
+ if IfList[-1][1]:
+ self.__WipeOffArea.append(((self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len('!endif')), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ IfList.pop()
+
+
+ if len(IfList) > 0:
+ raise Warning("Missing !endif At Line ", self.FileName, self.CurrentLineNumber)
+ self.Rewind()
+
+ def __EvaluateConditional(self, Name, Line, Op = None, Value = None):
+
+ FileLineTuple = GetRealFileLine(self.FileName, Line)
+ if Name in InputMacroDict:
+ MacroValue = InputMacroDict[Name]
+ if Op == None:
+ if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number At Line ", self.FileName, Line)
+
+ for Profile in AllMacroList:
+ if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
+ if Op == None:
+ if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != Profile.MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == Profile.MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(Profile.MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number At Line ", self.FileName, Line)
+
+ return False
+
+ ## __IsToken() method
+ #
+ # Check whether input string is found from current char position along
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsToken(self, String, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __IsKeyword() method
+ #
+ # Check whether input keyword is found from current char position along, whole word only!
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param Keyword The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsKeyword(self, KeyWord, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(KeyWord.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(KeyWord)
+ if index == 0:
+ followingChar = self.__CurrentLine()[self.CurrentOffsetWithinLine + len(KeyWord)]
+ if not str(followingChar).isspace() and followingChar not in SEPERATOR_TUPLE:
+ return False
+ self.CurrentOffsetWithinLine += len(KeyWord)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __GetNextWord() method
+ #
+ # Get next C name from file lines
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a C name string, file buffer pointer moved forward
+ # @retval False Not able to find a C name string, file buffer pointer not changed
+ #
+ def __GetNextWord(self):
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+
+ TempChar = self.__CurrentChar()
+ StartPos = self.CurrentOffsetWithinLine
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') or TempChar == '_':
+ self.__GetOneChar()
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') \
+ or (TempChar >= '0' and TempChar <= '9') or TempChar == '_' or TempChar == '-':
+ self.__GetOneChar()
+
+ else:
+ break
+
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+
+ return False
+
+ ## __GetNextToken() method
+ #
+ # Get next token unit before a seperator
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a token unit, file buffer pointer moved forward
+ # @retval False Not able to find a token unit, file buffer pointer not changed
+ #
+ def __GetNextToken(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ StartLine = self.CurrentLineNumber
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and TempChar not in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ break
+ else:
+ break
+# else:
+# return False
+
+ EndPos = self.CurrentOffsetWithinLine
+ if self.CurrentLineNumber != StartLine:
+ EndPos = len(self.Profile.FileLinesList[StartLine-1])
+ self.__Token = self.Profile.FileLinesList[StartLine-1][StartPos : EndPos]
+ if StartPos != self.CurrentOffsetWithinLine:
+ return True
+ else:
+ return False
+
+ def __GetNextOp(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space
+ if not str(TempChar).isspace():
+ self.__GetOneChar()
+ else:
+ break
+ else:
+ return False
+
+ if StartPos != self.CurrentOffsetWithinLine:
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ else:
+ return False
+ ## __GetNextGuid() method
+ #
+ # Get next token unit before a seperator
+ # If found, the GUID string is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a registry format GUID, file buffer pointer moved forward
+ # @retval False Not able to find a registry format GUID, file buffer pointer not changed
+ #
+ def __GetNextGuid(self):
+
+ if not self.__GetNextToken():
+ return False
+ p = re.compile('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}')
+ if p.match(self.__Token) != None:
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __UndoToken() method
+ #
+ # Go back one token unit in file buffer
+ #
+ # @param self The object pointer
+ #
+ def __UndoToken(self):
+ self.__UndoOneChar()
+ while self.__CurrentChar().isspace():
+ if not self.__UndoOneChar():
+ self.__GetOneChar()
+ return
+
+
+ StartPos = self.CurrentOffsetWithinLine
+ CurrentLine = self.CurrentLineNumber
+ while CurrentLine == self.CurrentLineNumber:
+
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and not TempChar in SEPERATOR_TUPLE:
+ if not self.__UndoOneChar():
+ break
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ return
+ else:
+ break
+
+ self.__GetOneChar()
+
+ ## __HexDigit() method
+ #
+ # Whether char input is a Hex data bit
+ #
+ # @param self The object pointer
+ # @param TempChar The char to test
+ # @retval True The char is a Hex data bit
+ # @retval False The char is NOT a Hex data bit
+ #
+ def __HexDigit(self, TempChar):
+ if (TempChar >= 'a' and TempChar <= 'f') or (TempChar >= 'A' and TempChar <= 'F') \
+ or (TempChar >= '0' and TempChar <= '9'):
+ return True
+ else:
+ return False
+
+ def __IsHex(self, HexStr):
+ if not HexStr.upper().startswith("0X"):
+ return False
+ if len(self.__Token) <= 2:
+ return False
+ charList = [c for c in HexStr[2 : ] if not self.__HexDigit( c)]
+ if len(charList) == 0:
+ return True
+ else:
+ return False
+ ## __GetNextHexNumber() method
+ #
+ # Get next HEX data before a seperator
+ # If found, the HEX data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a HEX data, file buffer pointer moved forward
+ # @retval False Not able to find a HEX data, file buffer pointer not changed
+ #
+ def __GetNextHexNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__IsHex(self.__Token):
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextDecimalNumber() method
+ #
+ # Get next decimal data before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a decimal data, file buffer pointer moved forward
+ # @retval False Not able to find a decimal data, file buffer pointer not changed
+ #
+ def __GetNextDecimalNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__Token.isdigit():
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextPcdName() method
+ #
+ # Get next PCD token space C name and PCD C name pair before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval Tuple PCD C name and PCD token space C name pair
+ #
+ def __GetNextPcdName(self):
+ if not self.__GetNextWord():
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+ pcdTokenSpaceCName = self.__Token
+
+ if not self.__IsToken( "."):
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+ pcdCName = self.__Token
+
+ return (pcdCName, pcdTokenSpaceCName)
+
+ ## __GetStringData() method
+ #
+ # Get string contents quoted in ""
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a string data, file buffer pointer moved forward
+ # @retval False Not able to find a string data, file buffer pointer not changed
+ #
+ def __GetStringData(self):
+ if self.__Token.startswith("\"") or self.__Token.startswith("L\""):
+ self.__UndoToken()
+ self.__SkipToToken("\"")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\""):
+ raise Warning("Missing Quote \" for String At Line ", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \" for String At Line ", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\"')
+ return True
+
+ elif self.__Token.startswith("\'") or self.__Token.startswith("L\'"):
+ self.__UndoToken()
+ self.__SkipToToken("\'")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\'"):
+ raise Warning("Missing Quote \' for String At Line ", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \' for String At Line ", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\'')
+ return True
+
+ else:
+ return False
+
+ ## __SkipToToken() method
+ #
+ # Search forward in file buffer for the string
+ # The skipped chars are put into self.__SkippedChars
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find the string, file buffer pointer moved forward
+ # @retval False Not able to find the string, file buffer pointer not changed
+ #
+ def __SkipToToken(self, String, IgnoreCase = False):
+ StartPos = self.GetFileBufferPos()
+
+ self.__SkippedChars = ""
+ while not self.__EndOfFile():
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__SkippedChars += String
+ return True
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ self.SetFileBufferPos( StartPos)
+ self.__SkippedChars = ""
+ return False
+
+ ## GetFileBufferPos() method
+ #
+ # Return the tuple of current line and offset within the line
+ #
+ # @param self The object pointer
+ # @retval Tuple Line number and offset pair
+ #
+ def GetFileBufferPos(self):
+ return (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ ## SetFileBufferPos() method
+ #
+ # Restore the file buffer position
+ #
+ # @param self The object pointer
+ # @param Pos The new file buffer position
+ #
+ def SetFileBufferPos(self, Pos):
+ (self.CurrentLineNumber, self.CurrentOffsetWithinLine) = Pos
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+
+ try:
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessIncludeFile()
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessConditionalStatement()
+ self.__StringToList()
+ for Pos in self.__WipeOffArea:
+ self.__ReplaceFragment(Pos[0], Pos[1])
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+
+ while self.__GetDefines():
+ pass
+
+ Index = 0
+ while Index < len(self.Profile.FileLinesList):
+ FileLineTuple = GetRealFileLine(self.FileName, Index + 1)
+ self.Profile.FileLinesList[Index] = self.__ReplaceMacros(self.Profile.FileLinesList[Index], FileLineTuple[0], FileLineTuple[1])
+ Index += 1
+
+ while self.__GetFd():
+ pass
+
+ while self.__GetFv():
+ pass
+
+ while self.__GetCapsule():
+ pass
+
+# while self.__GetVtf():
+# pass
+#
+# while self.__GetRule():
+# pass
+
+
+ except Warning, X:
+ self.__UndoToken()
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ X.message += '\nGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
+ 'Previous Token: \"%s\" At line: %d, Offset Within Line: %d\n' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :].rstrip('\n').rstrip('\r'), FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise
+
+ ## __GetDefines() method
+ #
+ # Get Defines section contents and store its data into AllMacrosList
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Defines
+ # @retval False Not able to find a Defines
+ #
+ def __GetDefines(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[DEFINES"):
+ if not S.startswith("[FD.") and not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [DEFINES], [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[DEFINES", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [DEFINES", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetNextWord():
+ Macro = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken() or self.__Token.startswith('['):
+ raise Warning("expected MACRO value", self.FileName, self.CurrentLineNumber)
+ Value = self.__Token
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+
+ return False
+
+ ## __GetFd() method
+ #
+ # Get FD section contents and store its data into FD dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FD
+ # @retval False Not able to find a FD
+ #
+ def __GetFd(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FD."):
+ if not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE."):
+ raise Warning("Unknown section At Line ", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FD.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [FD.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ FdName = self.__GetUiName()
+ self.CurrentFdName = FdName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FdObj = CommonDataClass.FdfClass.FDClassObject()
+ FdObj.FdUiName = self.CurrentFdName
+ self.Profile.FdDict[self.CurrentFdName] = FdObj
+ Status = self.__GetCreateFile(FdObj)
+ if not Status:
+ raise Warning("FD name error At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetTokenStatements(FdObj):
+ return False
+
+ self.__GetDefineStatements(FdObj)
+
+ self.__GetSetStatements(FdObj)
+
+ if not self.__GetRegionLayout(FdObj):
+ raise Warning("expected region layout At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetRegionLayout(FdObj):
+ pass
+ return True
+
+ ## __GetUiName() method
+ #
+ # Return the UI name of a section
+ #
+ # @param self The object pointer
+ # @retval FdName UI name
+ #
+ def __GetUiName(self):
+ FdName = ""
+ if self.__GetNextWord():
+ FdName = self.__Token
+
+ return FdName
+
+ ## __GetCreateFile() method
+ #
+ # Return the output file name of object
+ #
+ # @param self The object pointer
+ # @param Obj object whose data will be stored in file
+ # @retval FdName UI name
+ #
+ def __GetCreateFile(self, Obj):
+
+ if self.__IsKeyword( "CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ FileName = self.__Token
+ Obj.CreateFileName = FileName
+
+ return True
+
+ ## __GetTokenStatements() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statement is got
+ # @retval True Successfully find a token statement
+ # @retval False Not able to find a token statement
+ #
+ def __GetTokenStatements(self, Obj):
+ if not self.__IsKeyword( "BaseAddress"):
+ raise Warning("BaseAddress missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex base address At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.BaseAddress = self.__Token
+
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.BaseAddressPcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = long(Obj.BaseAddress, 0)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ if not self.__IsKeyword( "Size"):
+ raise Warning("Size missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex size At Line ", self.FileName, self.CurrentLineNumber)
+
+
+ Obj.Size = long(self.__Token, 0)
+
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.SizePcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Obj.Size
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ if not self.__IsKeyword( "ErasePolarity"):
+ raise Warning("ErasePolarity missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Erase Polarity At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token != "1" and self.__Token != "0":
+ raise Warning("expected 1 or 0 Erase Polarity At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.ErasePolarity = self.__Token
+
+ Status = self.__GetBlockStatements(Obj)
+ return Status
+
+ ## __GetAddressStatements() method
+ #
+ # Get address statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom address statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetAddressStatements(self, Obj):
+
+ if self.__IsKeyword("BsBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address At Line ", self.FileName, self.CurrentLineNumber)
+
+ BsAddress = long(self.__Token, 0)
+ Obj.BsBaseAddress = BsAddress
+
+ if self.__IsKeyword("RtBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address At Line ", self.FileName, self.CurrentLineNumber)
+
+ RtAddress = long(self.__Token, 0)
+ Obj.RtBaseAddress = RtAddress
+
+ ## __GetBlockStatements() method
+ #
+ # Get block statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatements(self, Obj):
+
+ if not self.__GetBlockStatement(Obj):
+ raise Warning("expected block statement At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetBlockStatement(Obj):
+ pass
+ return True
+
+ ## __GetBlockStatement() method
+ #
+ # Get block statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatement(self, Obj):
+ if not self.__IsKeyword( "BlockSize"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber() and not self.__GetNextDecimalNumber():
+ raise Warning("expected Hex block size At Line ", self.FileName, self.CurrentLineNumber)
+
+ BlockSize = long(self.__Token, 0)
+ BlockSizePcd = None
+ if self.__IsToken( "|"):
+ PcdPair = self.__GetNextPcdName()
+ BlockSizePcd = PcdPair
+ self.Profile.PcdDict[PcdPair] = BlockSize
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ BlockNumber = None
+ if self.__IsKeyword( "NumBlocks"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected block numbers At Line ", self.FileName, self.CurrentLineNumber)
+
+ BlockNumber = long(self.__Token, 0)
+
+ Obj.BlockSizeList.append((BlockSize, BlockNumber, BlockSizePcd))
+ return True
+
+ ## __GetDefineStatements() method
+ #
+ # Get define statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatements(self, Obj):
+ while self.__GetDefineStatement( Obj):
+ pass
+
+ ## __GetDefineStatement() method
+ #
+ # Get define statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatement(self, Obj):
+ if self.__IsKeyword("DEFINE"):
+ self.__GetNextToken()
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ Macro = '$(' + Macro + ')'
+ Obj.DefineVarDict[Macro] = Value
+ return True
+
+ return False
+
+ ## __GetSetStatements() method
+ #
+ # Get set statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatements(self, Obj):
+ while self.__GetSetStatement(Obj):
+ pass
+
+ ## __GetSetStatement() method
+ #
+ # Get set statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatement(self, Obj):
+ if self.__IsKeyword("SET"):
+ PcdPair = self.__GetNextPcdName()
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ if Value.startswith("{"):
+ # deal with value with {}
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Value += self.__SkippedChars
+
+ Obj.SetVarDict[PcdPair] = Value
+ self.Profile.PcdDict[PcdPair] = Value
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
+ return True
+
+ return False
+
+ ## __GetRegionLayout() method
+ #
+ # Get region layout for FD
+ #
+ # @param self The object pointer
+ # @param Fd for whom region is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetRegionLayout(self, Fd):
+ if not self.__GetNextHexNumber():
+ return False
+
+ RegionObj = CommonDataClass.FdfClass.RegionClassObject()
+ RegionObj.Offset = long(self.__Token, 0)
+ Fd.RegionList.append(RegionObj)
+
+ if not self.__IsToken( "|"):
+ raise Warning("expected '|' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Region Size At Line ", self.FileName, self.CurrentLineNumber)
+ RegionObj.Size = long(self.__Token, 0)
+
+ if not self.__GetNextWord():
+ return True
+
+ if not self.__Token in ("SET", "FV", "FILE", "DATA"):
+ self.__UndoToken()
+ RegionObj.PcdOffset = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdOffset] = RegionObj.Offset + long(Fd.BaseAddress, 0)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple
+ if self.__IsToken( "|"):
+ RegionObj.PcdSize = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdSize] = RegionObj.Size
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple
+
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "SET":
+ self.__UndoToken()
+ self.__GetSetStatements( RegionObj)
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "FV":
+ self.__UndoToken()
+ self.__GetRegionFvType( RegionObj)
+
+ elif self.__Token == "FILE":
+ self.__UndoToken()
+ self.__GetRegionFileType( RegionObj)
+
+ else:
+ self.__UndoToken()
+ self.__GetRegionDataType( RegionObj)
+
+ return True
+
+ ## __GetRegionFvType() method
+ #
+ # Get region fv data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFvType(self, RegionObj):
+
+ if not self.__IsKeyword( "FV"):
+ raise Warning("expected Keyword 'FV' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FV"
+ RegionObj.RegionDataList.append(self.__Token)
+
+ while self.__IsKeyword( "FV"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionFileType() method
+ #
+ # Get region file data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFileType(self, RegionObj):
+
+ if not self.__IsKeyword( "FILE"):
+ raise Warning("expected Keyword 'FILE' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FILE"
+ RegionObj.RegionDataList.append( self.__Token)
+
+ while self.__IsKeyword( "FILE"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FILE name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionDataType() method
+ #
+ # Get region array data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionDataType(self, RegionObj):
+
+ if not self.__IsKeyword( "DATA"):
+ raise Warning("expected Region Data type At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ if not self.__GetNextHexNumber():
+ raise Warning("Invalid Hex number At Line ", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionType = "DATA"
+ RegionObj.RegionDataList.append( DataString)
+
+ while self.__IsKeyword( "DATA"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ self.__GetNextHexNumber()
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionDataList.append( DataString)
+
+ ## __GetFv() method
+ #
+ # Get FV section contents and store its data into FV dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FV
+ # @retval False Not able to find a FV
+ #
+ def __GetFv(self):
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FV."):
+ if not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE."):
+ raise Warning("Unknown section or section appear sequence error \n(The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.]) At Line ", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FV.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("Unknown Keyword At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__GetUiName()
+ self.CurrentFvName = FvName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj = CommonDataClass.FdfClass.FvClassObject()
+ FvObj.UiFvName = self.CurrentFvName
+ self.Profile.FvDict[self.CurrentFvName] = FvObj
+
+ Status = self.__GetCreateFile(FvObj)
+ if not Status:
+ raise Warning("FV name error At Line ", self.FileName, self.CurrentLineNumber)
+
+ self.__GetDefineStatements(FvObj)
+
+ self.__GetAddressStatements(FvObj)
+
+ self.__GetBlockStatement(FvObj)
+
+ self.__GetSetStatements(FvObj)
+
+ self.__GetFvAlignment(FvObj)
+
+ self.__GetFvAttributes(FvObj)
+
+ self.__GetFvNameGuid(FvObj)
+
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+
+ while True:
+ isInf = self.__GetInfStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ isFile = self.__GetFileStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## __GetFvAlignment() method
+ #
+ # Get alignment for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom alignment is got
+ # @retval True Successfully find a alignment statement
+ # @retval False Not able to find a alignment statement
+ #
+ def __GetFvAlignment(self, Obj):
+
+ if not self.__IsKeyword( "FvAlignment"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token.upper() not in ("1", "2", "4", "8", "16", "32", "64", "128", "256", "512", \
+ "1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K", \
+ "1M", "2M", "4M", "8M", "16M", "32M", "64M", "128M", "256M", "512M", \
+ "1G", "2G"):
+ raise Warning("Unknown alignment value At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.FvAlignment = self.__Token
+ return True
+
+ ## __GetFvAttributes() method
+ #
+ # Get attributes for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom attribute is got
+ # @retval None
+ #
+ def __GetFvAttributes(self, FvObj):
+
+ while self.__GetNextWord():
+ name = self.__Token
+ if name not in ("ERASE_POLARITY", "MEMORY_MAPPED", \
+ "STICKY_WRITE", "LOCK_CAP", "LOCK_STATUS", "WRITE_ENABLED_CAP", \
+ "WRITE_DISABLED_CAP", "WRITE_STATUS", "READ_ENABLED_CAP", \
+ "READ_DISABLED_CAP", "READ_STATUS", "READ_LOCK_CAP", \
+ "READ_LOCK_STATUS", "WRITE_LOCK_CAP", "WRITE_LOCK_STATUS", \
+ "WRITE_POLICY_RELIABLE"):
+ self.__UndoToken()
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvAttributeDict[name] = self.__Token
+
+ return
+
+ ## __GetFvNameGuid() method
+ #
+ # Get FV GUID for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom GUID is got
+ # @retval None
+ #
+ def __GetFvNameGuid(self, FvObj):
+
+ if not self.__IsKeyword( "FvNameGuid"):
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ raise Warning("expected FV GUID value", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvNameGuid = self.__Token
+
+ return
+
+ ## __GetAprioriSection() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param FvObj for whom apriori is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find apriori statement
+ # @retval False Not able to find apriori statement
+ #
+ def __GetAprioriSection(self, FvObj, MacroDict = {}):
+
+ if not self.__IsKeyword( "APRIORI"):
+ return False
+
+ if not self.__IsKeyword("PEI") and not self.__IsKeyword("DXE"):
+ raise Warning("expected Apriori file type At Line ", self.FileName, self.CurrentLineNumber)
+ AprType = self.__Token
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ AprSectionObj = CommonDataClass.FdfClass.AprioriSectionClassObject()
+ AprSectionObj.AprioriType = AprType
+
+ self.__GetDefineStatements(AprSectionObj)
+ MacroDict.update(AprSectionObj.DefineVarDict)
+
+ while True:
+ IsInf = self.__GetInfStatement( AprSectionObj, MacroDict = MacroDict)
+ IsFile = self.__GetFileStatement( AprSectionObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.AprioriSectionList.append(AprSectionObj)
+ return True
+
+ ## __GetInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def __GetInfStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "INF"):
+ return False
+
+ ffsInf = CommonDataClass.FdfClass.FfsInfStatementClassObject()
+ self.__GetInfOptions( ffsInf)
+
+ if not self.__GetNextToken():
+ raise Warning("expected INF file path At Line ", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self.__Token
+
+# if ffsInf.InfFileName.find('$') >= 0:
+# ffsInf.InfFileName = GenFdsGlobalVariable.GenFdsGlobalVariable.MacroExtend(ffsInf.InfFileName, MacroDict)
+
+ if not ffsInf.InfFileName in self.Profile.InfList:
+ self.Profile.InfList.append(ffsInf.InfFileName)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.InfFileLineList.append(FileLineTuple)
+
+ if self.__IsToken('|'):
+ if self.__IsKeyword('RELOCS_STRIPPED'):
+ ffsInf.KeepReloc = False
+ elif self.__IsKeyword('RELOCS_RETAINED'):
+ ffsInf.KeepReloc = True
+ else:
+ raise Warning("Unknown reloc strip flag At Line ", self.FileName, self.CurrentLineNumber)
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = ffsInf
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## __GetInfOptions() method
+ #
+ # Get options for INF
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom option is got
+ #
+ def __GetInfOptions(self, FfsInfObj):
+
+ if self.__IsKeyword( "RuleOverride"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Rule name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.Rule = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Version At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Version = self.__Token
+
+ if self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Ui = self.__Token
+
+ if self.__IsKeyword( "USE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected ARCH name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.UseArch = self.__Token
+
+
+ if self.__GetNextToken():
+ p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if p.match(self.__Token):
+ FfsInfObj.KeyStringList.append(self.__Token)
+ if not self.__IsToken(","):
+ return
+ else:
+ self.__UndoToken()
+ return
+
+ while self.__GetNextToken():
+ if not p.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ ## __GetFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def __GetFileStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "FILE"):
+ return False
+
+ FfsFileObj = CommonDataClass.FdfClass.FileStatementClassObject()
+
+ if not self.__GetNextWord():
+ raise Warning("expected FFS type At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvFileType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ if not self.__GetNextWord():
+ raise Warning("expected File GUID", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ FfsFileObj.NameGuid = self.__Token
+
+ self.__GetFilePart( FfsFileObj, MacroDict.copy())
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = FfsFileObj
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+ ## __FileCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a file type.
+ #
+ # @param self The object pointer
+ # @param FileType The file type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __FileCouldHaveRelocFlag (self, FileType):
+ if FileType in ('SEC', 'PEI_CORE', 'PEIM', 'PEI_DXE_COMBO'):
+ return True
+ else:
+ return False
+
+ ## __SectionCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a section type.
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __SectionCouldHaveRelocFlag (self, SectionType):
+ if SectionType in ('TE', 'PE32'):
+ return True
+ else:
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get components for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom component is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetFilePart(self, FfsFileObj, MacroDict = {}):
+
+ self.__GetFileOpts( FfsFileObj)
+
+ if not self.__IsToken("{"):
+# if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+# if self.__FileCouldHaveRelocFlag(FfsFileObj.FvFileType):
+# if self.__Token == 'RELOCS_STRIPPED':
+# FfsFileObj.KeepReloc = False
+# else:
+# FfsFileObj.KeepReloc = True
+# else:
+# raise Warning("File type %s could not have reloc strip flag At Line %d" % (FfsFileObj.FvFileType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+#
+# if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name or section data At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "FV":
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvName = self.__Token
+
+ elif self.__Token == "FD":
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FD name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FdName = self.__Token
+
+ elif self.__Token in ("DEFINE", "APRIORI", "SECTION"):
+ self.__UndoToken()
+ self.__GetSectionData( FfsFileObj, MacroDict)
+ else:
+ FfsFileObj.FileName = self.__Token
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetFileOpts() method
+ #
+ # Get options for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom options is got
+ #
+ def __GetFileOpts(self, FfsFileObj):
+
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ FfsFileObj.KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+ if self.__IsKeyword( "FIXED", True):
+ FfsFileObj.Fixed = True
+
+ if self.__IsKeyword( "CHECKSUM", True):
+ FfsFileObj.CheckSum = True
+
+ if self.__GetAlignment():
+ FfsFileObj.Alignment = self.__Token
+
+
+
+ ## __GetAlignment() method
+ #
+ # Return the alignment value
+ #
+ # @param self The object pointer
+ # @retval True Successfully find alignment
+ # @retval False Not able to find alignment
+ #
+ def __GetAlignment(self):
+ if self.__IsKeyword( "Align", True):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value At Line ", self.FileName, self.CurrentLineNumber)
+ return True
+
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get section data for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom section is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetSectionData(self, FfsFileObj, MacroDict = {}):
+ Dict = {}
+ Dict.update(MacroDict)
+
+ self.__GetDefineStatements(FfsFileObj)
+
+ Dict.update(FfsFileObj.DefineVarDict)
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+
+ while True:
+ IsLeafSection = self.__GetLeafSection(FfsFileObj, Dict)
+ IsEncapSection = self.__GetEncapsulationSec(FfsFileObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ ## __GetLeafSection() method
+ #
+ # Get leaf section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetLeafSection(self, Obj, MacroDict = {}):
+
+ OldPos = self.GetFileBufferPos()
+
+ if not self.__IsKeyword( "SECTION"):
+ if len(Obj.SectionList) == 0:
+ raise Warning("expected SECTION At Line ", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ BuildNum = None
+ if self.__IsKeyword( "BUILD_NUM"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Build number value At Line ", self.FileName, self.CurrentLineNumber)
+
+ BuildNum = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected version At Line ", self.FileName, self.CurrentLineNumber)
+ VerSectionObj = CommonDataClass.FdfClass.VerSectionClassObject()
+ VerSectionObj.Alignment = AlignValue
+ VerSectionObj.BuildNum = BuildNum
+ if self.__GetStringData():
+ VerSectionObj.StringData = self.__Token
+ else:
+ VerSectionObj.FileName = self.__Token
+ Obj.SectionList.append(VerSectionObj)
+
+ elif self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI At Line ", self.FileName, self.CurrentLineNumber)
+ UiSectionObj = CommonDataClass.FdfClass.UiSectionClassObject()
+ UiSectionObj.Alignment = AlignValue
+ if self.__GetStringData():
+ UiSectionObj.StringData = self.__Token
+ else:
+ UiSectionObj.FileName = self.__Token
+ Obj.SectionList.append(UiSectionObj)
+
+ elif self.__IsKeyword( "FV_IMAGE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextWord():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__Token.upper()
+ FvObj = None
+
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ FvObj.UiFvName = FvName
+ self.__GetDefineStatements(FvObj)
+ MacroDict.update(FvObj.DefineVarDict)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj, MacroDict.copy())
+ IsFile = self.__GetFileStatement(FvObj, MacroDict.copy())
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()
+ FvImageSectionObj.Alignment = AlignValue
+ if FvObj != None:
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+ else:
+ FvImageSectionObj.FvName = FvName
+
+ Obj.SectionList.append(FvImageSectionObj)
+
+ elif self.__IsKeyword("PEI_DEPEX_EXP") or self.__IsKeyword("DXE_DEPEX_EXP"):
+ DepexSectionObj = CommonDataClass.FdfClass.DepexSectionClassObject()
+ DepexSectionObj.Alignment = AlignValue
+ DepexSectionObj.DepexType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected Depex expression ending '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DepexSectionObj.Expression = self.__SkippedChars.rstrip('}')
+ Obj.SectionList.append(DepexSectionObj)
+
+ else:
+
+ if not self.__GetNextWord():
+ raise Warning("expected section type At Line ", self.FileName, self.CurrentLineNumber)
+
+ # Encapsulation section appear, UndoToken and return
+ if self.__Token == "COMPRESS" or self.__Token == "GUIDED":
+ self.SetFileBufferPos(OldPos)
+ return False
+
+ if self.__Token not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown section type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ # DataSection
+ DataSectionObj = CommonDataClass.FdfClass.DataSectionClassObject()
+ DataSectionObj.Alignment = AlignValue
+ DataSectionObj.SecType = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Obj.FvFileType) and self.__SectionCouldHaveRelocFlag(DataSectionObj.SecType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ DataSectionObj.KeepReloc = False
+ else:
+ DataSectionObj.KeepReloc = True
+ else:
+ raise Warning("File type %s, section type %s, could not have reloc strip flag At Line %d" % (Obj.FvFileType, DataSectionObj.SecType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("="):
+ if not self.__GetNextToken():
+ raise Warning("expected section file path At Line ", self.FileName, self.CurrentLineNumber)
+ DataSectionObj.SectFileName = self.__Token
+ else:
+ if not self.__GetCglSection(DataSectionObj):
+ return False
+
+ Obj.SectionList.append(DataSectionObj)
+
+ return True
+
+ ## __GetCglSection() method
+ #
+ # Get compressed or GUIDed section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param AlignValue alignment value for complex section
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetCglSection(self, Obj, AlignValue = None):
+
+ if self.__IsKeyword( "COMPRESS"):
+ type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CommonDataClass.FdfClass.CompressSectionClassObject()
+ CompressSectionObj.Alignment = AlignValue
+ CompressSectionObj.CompType = type
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(CompressSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(CompressSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(CompressSectionObj)
+
+# else:
+# raise Warning("Compress type not known At Line ")
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = CommonDataClass.FdfClass.GuidSectionClassObject()
+ GuidSectionObj.Alignment = AlignValue
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(GuidSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(GuidSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetGuidAttri() method
+ #
+ # Get attributes for GUID section
+ #
+ # @param self The object pointer
+ # @retval AttribDict Dictionary of key-value pair of section attributes
+ #
+ def __GetGuidAttrib(self):
+
+ AttribDict = {}
+ AttribDict["PROCESSING_REQUIRED"] = False
+ AttribDict["AUTH_STATUS_VALID"] = False
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ")
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ return AttribDict
+
+ ## __GetEncapsulationSec() method
+ #
+ # Get encapsulation section for FILE
+ #
+ # @param self The object pointer
+ # @param FfsFile for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEncapsulationSec(self, FfsFileObj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__IsKeyword( "SECTION"):
+ if len(FfsFileObj.SectionList) == 0:
+ raise Warning("expected SECTION At Line ", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ if not self.__GetCglSection(FfsFileObj, AlignValue):
+ self.SetFileBufferPos(OldPos)
+ return False
+ else:
+ return True
+
+ ## __GetCapsule() method
+ #
+ # Get capsule section contents and store its data into capsule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a capsule
+ # @retval False Not able to find a capsule
+ #
+ def __GetCapsule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[CAPSULE."):
+ if not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[CAPSULE.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Capsule.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj = CommonDataClass.FdfClass.CapsuleClassObject()
+
+ CapsuleName = self.__GetUiName()
+ if not CapsuleName:
+ raise Warning("expected capsule name At line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.UiCapsuleName = CapsuleName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.CreateFile = self.__Token
+
+ self.__GetCapsuleStatements(CapsuleObj)
+ self.Profile.CapsuleList.append(CapsuleObj)
+ return True
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom statements are got
+ #
+ def __GetCapsuleStatements(self, Obj):
+ self.__GetCapsuleTokens(Obj)
+ self.__GetDefineStatements(Obj)
+ self.__GetSetStatements(Obj)
+
+ self.__GetCapsuleData(Obj)
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get token statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statements are got
+ #
+ def __GetCapsuleTokens(self, Obj):
+
+ if not self.__IsKeyword("CAPSULE_GUID"):
+ raise Warning("expected 'CAPSULE_GUID' At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__CurrentLine().find("=") != -1:
+ NameValue = self.__CurrentLine().split("=")
+ Obj.TokensDict[NameValue[0].strip()] = NameValue[1].strip()
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __GetCapsuleData() method
+ #
+ # Get capsule data for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom capsule data are got
+ #
+ def __GetCapsuleData(self, Obj):
+
+ while True:
+ IsInf = self.__GetInfStatement(Obj, True)
+ IsFile = self.__GetFileStatement(Obj, True)
+ IsFv = self.__GetFvStatement(Obj)
+ if not IsInf and not IsFile and not IsFv:
+ break
+
+ ## __GetFvStatement() method
+ #
+ # Get FV for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom FV is got
+ # @retval True Successfully find a FV statement
+ # @retval False Not able to find a FV statement
+ #
+ def __GetFvStatement(self, CapsuleObj):
+
+ if not self.__IsKeyword("FV"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+# CapsuleFv = CapsuleData.CapsuleFv()
+# CapsuleFv.FvName = self.__Token
+# CapsuleObj.CapsuleDataList.append(CapsuleFv)
+ return True
+
+ ## __GetRule() method
+ #
+ # Get Rule section contents and store its data into rule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Rule
+ # @retval False Not able to find a Rule
+ #
+ def __GetRule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[RULE."):
+ if not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+ self.__UndoToken()
+ if not self.__IsToken("[Rule.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Rule.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".")
+ if Arch.upper() not in ("IA32", "X64", "IPF", "EBC", "ARM", "COMMON"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+
+ ModuleType = self.__GetModuleType()
+
+ TemplateName = ""
+ if self.__IsToken("."):
+ if not self.__GetNextWord():
+ raise Warning("expected template name At Line ", self.FileName, self.CurrentLineNumber)
+ TemplateName = self.__Token
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ RuleObj = self.__GetRuleFileStatements()
+ RuleObj.Arch = Arch.upper()
+ RuleObj.ModuleType = ModuleType
+ RuleObj.TemplateName = TemplateName
+ if TemplateName == '' :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() ] = RuleObj
+ else :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() + \
+ '.' + \
+ TemplateName.upper() ] = RuleObj
+# self.Profile.RuleList.append(rule)
+ return True
+
+ ## __GetModuleType() method
+ #
+ # Return the module type
+ #
+ # @param self The object pointer
+ # @retval string module type
+ #
+ def __GetModuleType(self):
+
+ if not self.__GetNextWord():
+ raise Warning("expected Module type At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("SEC", "PEI_CORE", "PEIM", "DXE_CORE", \
+ "DXE_DRIVER", "DXE_SAL_DRIVER", \
+ "DXE_SMM_DRIVER", "DXE_RUNTIME_DRIVER", \
+ "UEFI_DRIVER", "UEFI_APPLICATION", "USER_DEFINED", "DEFAULT", "BASE", \
+ "SECURITY_CORE", "COMBINED_PEIM_DRIVER", "PIC_PEIM", "RELOCATABLE_PEIM", \
+ "PE32_PEIM", "BS_DRIVER", "RT_DRIVER", "SAL_RT_DRIVER", "APPLICATION"):
+ raise Warning("Unknown Module type At line ", self.FileName, self.CurrentLineNumber)
+ return self.__Token
+
+ ## __GetFileExtension() method
+ #
+ # Return the file extension
+ #
+ # @param self The object pointer
+ # @retval string file name extension
+ #
+ def __GetFileExtension(self):
+ if not self.__IsToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Ext = ""
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z][a-zA-Z0-9]*)')
+ if Pattern.match(self.__Token):
+ Ext = self.__Token
+ return '.' + Ext
+ else:
+ raise Warning("Unknown file extension At Line ", self.FileName, self.CurrentLineNumber)
+
+ else:
+ raise Warning("expected file extension At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleFileStatement() method
+ #
+ # Get rule contents
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def __GetRuleFileStatements(self):
+
+ if not self.__IsKeyword("FILE"):
+ raise Warning("expected FILE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected FV type At Line ", self.FileName, self.CurrentLineNumber)
+
+ Type = self.__Token.strip().upper()
+ if Type not in ("RAW", "FREEFORM", "SEC", "PEI_CORE", "PEIM",\
+ "PEI_DXE_COMBO", "DRIVER", "DXE_CORE", "APPLICATION", "FV_IMAGE"):
+ raise Warning("Unknown FV type At line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsKeyword("$(NAMED_GUID)"):
+ if not self.__GetNextWord():
+ raise Warning("expected $(NAMED_GUID)", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ NameGuid = self.__Token
+
+ KeepReloc = None
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Type):
+ if self.__Token == 'RELOCS_STRIPPED':
+ KeepReloc = False
+ else:
+ KeepReloc = True
+ else:
+ raise Warning("File type %s could not have reloc strip flag At Line %d" % (Type, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ KeyStringList = []
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+
+ Fixed = False
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ CheckSum = False
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ AlignValue = ""
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if self.__IsToken("{"):
+ # Complex file rule expected
+ Rule = RuleComplexFile.RuleComplexFile()
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(Rule)
+ IsLeaf = self.__GetEfiSection(Rule)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken("}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ return Rule
+
+ elif self.__IsToken("|"):
+ # Ext rule expected
+ Ext = self.__GetFileExtension()
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileExtension = Ext
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ return Rule
+
+ else:
+ # Simple file rule expected
+ if not self.__GetNextWord():
+ raise Warning("expected leaf section type At Line ", self.FileName, self.CurrentLineNumber)
+
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "PEI_DEPEX", "VERSION", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown leaf section name '%s'" % SectionName, self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+ Rule.SectionType = SectionName
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileName = self.__Token
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+ return Rule
+
+ ## __GetEfiSection() method
+ #
+ # Get section list for Rule
+ #
+ # @param self The object pointer
+ # @param Obj for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEfiSection(self, Obj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__GetNextWord():
+ return False
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ self.__UndoToken()
+ return False
+
+ if SectionName == "FV_IMAGE":
+ FvImageSectionObj = FvImageSection.FvImageSection()
+ if self.__IsKeyword("FV_IMAGE"):
+ pass
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ self.__GetDefineStatements(FvObj)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj)
+ self.__GetAprioriSection(FvObj)
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj)
+ IsFile = self.__GetFileStatement(FvObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+
+ else:
+ if not self.__IsKeyword("FV"):
+ raise Warning("expected 'FV' At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword("FV"):
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsToken('|'):
+ FvImageSectionObj.FvFileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ FvImageSectionObj.FvFileName = self.__Token
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected FV file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(FvImageSectionObj)
+ return True
+
+ EfiSectionObj = EfiSection.EfiSection()
+ EfiSectionObj.SectionType = SectionName
+
+ if not self.__GetNextToken():
+ raise Warning("expected file type At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "STRING":
+ if not self.__RuleSectionCouldHaveString(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have string data At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken('='):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Quoted String At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ EfiSectionObj.StringData = self.__Token
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number At Line ", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ else:
+ EfiSectionObj.FileType = self.__Token
+ self.__CheckRuleSectionFileType(EfiSectionObj.SectionType, EfiSectionObj.FileType)
+
+ if self.__IsKeyword("Optional"):
+ if not self.__RuleSectionCouldBeOptional(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT be optional At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.Optional = True
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number At Line ", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ if self.__GetAlignment():
+ EfiSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__SectionCouldHaveRelocFlag(EfiSectionObj.SectionType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ EfiSectionObj.KeepReloc = False
+ else:
+ EfiSectionObj.KeepReloc = True
+ if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ else:
+ raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsToken('|'):
+ EfiSectionObj.FileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+
+ if self.__Token.startswith('PCD'):
+ self.__UndoToken()
+ self.__GetNextWord()
+
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ EfiSectionObj.FileName = self.__Token
+
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected section file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(EfiSectionObj)
+ return True
+
+ ## __RuleSectionCouldBeOptional() method
+ #
+ # Get whether a section could be optional
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could be optional
+ # @retval False section never optional
+ #
+ def __RuleSectionCouldBeOptional(self, SectionType):
+ if SectionType in ("DXE_DEPEX", "UI", "VERSION", "PEI_DEPEX", "RAW", "SMM_DEPEX"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveBuildNum() method
+ #
+ # Get whether a section could have build number information
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have build number information
+ # @retval False section never have build number information
+ #
+ def __RuleSectionCouldHaveBuildNum(self, SectionType):
+ if SectionType in ("VERSION"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveString() method
+ #
+ # Get whether a section could have string
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have string
+ # @retval False section never have string
+ #
+ def __RuleSectionCouldHaveString(self, SectionType):
+ if SectionType in ("UI", "VERSION"):
+ return True
+ else:
+ return False
+
+ ## __CheckRuleSectionFileType() method
+ #
+ # Get whether a section matches a file type
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @param FileType The file type to check
+ #
+ def __CheckRuleSectionFileType(self, SectionType, FileType):
+ if SectionType == "COMPAT16":
+ if FileType not in ("COMPAT16", "SEC_COMPAT16"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PE32":
+ if FileType not in ("PE32", "SEC_PE32"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PIC":
+ if FileType not in ("PIC", "PIC"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "TE":
+ if FileType not in ("TE", "SEC_TE"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "RAW":
+ if FileType not in ("BIN", "SEC_BIN", "RAW", "ASL", "ACPI"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "DXE_DEPEX":
+ if FileType not in ("DXE_DEPEX", "SEC_DXE_DEPEX"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "UI":
+ if FileType not in ("UI", "SEC_UI"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "VERSION":
+ if FileType not in ("VERSION", "SEC_VERSION"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PEI_DEPEX":
+ if FileType not in ("PEI_DEPEX", "SEC_PEI_DEPEX"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "GUID":
+ if FileType not in ("PE32", "SEC_GUID"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleEncapsulationSection() method
+ #
+ # Get encapsulation section for Rule
+ #
+ # @param self The object pointer
+ # @param Rule for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetRuleEncapsulationSection(self, Rule):
+
+ if self.__IsKeyword( "COMPRESS"):
+ Type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ Type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection.CompressSection()
+
+ CompressSectionObj.CompType = Type
+ # Recursive sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(CompressSectionObj)
+ IsLeaf = self.__GetEfiSection(CompressSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(CompressSectionObj)
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ if self.__IsKeyword( "$(NAMED_GUID)"):
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection.GuidSection()
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+
+ # Efi sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(GuidSectionObj)
+ IsLeaf = self.__GetEfiSection(GuidSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetVtf() method
+ #
+ # Get VTF section contents and store its data into VTF list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a VTF
+ # @retval False Not able to find a VTF
+ #
+ def __GetVtf(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[VTF."):
+ if not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[VTF.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [VTF.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".").upper()
+ if Arch not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected VTF name At Line ", self.FileName, self.CurrentLineNumber)
+ Name = self.__Token.upper()
+
+ VtfObj = Vtf.Vtf()
+ VtfObj.UiName = Name
+ VtfObj.KeyArch = Arch
+
+ if self.__IsToken(","):
+ if not self.__GetNextWord():
+ raise Warning("expected Arch list At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+ VtfObj.ArchList = self.__Token.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("IA32_RST_BIN"):
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Reset file At Line ", self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ResetBin = self.__Token
+
+ while self.__GetComponentStatement(VtfObj):
+ pass
+
+ self.Profile.VtfList.append(VtfObj)
+ return True
+
+ ## __GetComponentStatement() method
+ #
+ # Get components in VTF
+ #
+ # @param self The object pointer
+ # @param VtfObj for whom component is got
+ # @retval True Successfully find a component
+ # @retval False Not able to find a component
+ #
+ def __GetComponentStatement(self, VtfObj):
+
+ if not self.__IsKeyword("COMP_NAME"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected Component Name At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj = ComponentStatement.ComponentStatement()
+ CompStatementObj.CompName = self.__Token
+
+ if not self.__IsKeyword("COMP_LOC"):
+ raise Warning("expected COMP_LOC At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompLoc = ""
+ if self.__GetNextWord():
+ CompStatementObj.CompLoc = self.__Token
+ if self.__IsToken('|'):
+ if not self.__GetNextWord():
+ raise Warning("Expected Region Name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token not in ("F", "N", "S"): #, "H", "L", "PH", "PL"): not support
+ raise Warning("Unknown location type At line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.FilePos = self.__Token
+ else:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ if not self.__IsKeyword("COMP_TYPE"):
+ raise Warning("expected COMP_TYPE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component type At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("FIT", "PAL_B", "PAL_A", "OEM"):
+ if not self.__Token.startswith("0x") or len(self.__Token) < 3 or len(self.__Token) > 4 or \
+ not self.__HexDigit(self.__Token[2]) or not self.__HexDigit(self.__Token[-1]):
+ raise Warning("Unknown location type At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompType = self.__Token
+
+ if not self.__IsKeyword("COMP_VER"):
+ raise Warning("expected COMP_VER At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)
+
+ Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
+ if Pattern.match(self.__Token) == None:
+ raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompVer = self.__Token
+
+ if not self.__IsKeyword("COMP_CS"):
+ raise Warning("expected COMP_CS At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component CS At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("1", "0"):
+ raise Warning("Unknown Component CS At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompCs = self.__Token
+
+
+ if not self.__IsKeyword("COMP_BIN"):
+ raise Warning("expected COMP_BIN At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component file At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompBin = self.__Token
+
+ if not self.__IsKeyword("COMP_SYM"):
+ raise Warning("expected COMP_SYM At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component symbol file At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompSym = self.__Token
+
+ if not self.__IsKeyword("COMP_SIZE"):
+ raise Warning("expected COMP_SIZE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("-"):
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextDecimalNumber():
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextHexNumber():
+ CompStatementObj.CompSize = self.__Token
+ else:
+ raise Warning("Unknown size At line ", self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ComponentStatementList.append(CompStatementObj)
+ return True
+
+ ## __GetFvInFd() method
+ #
+ # Get FV list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval FvList list of FV in FD
+ #
+ def __GetFvInFd (self, FdName):
+
+ FvList = []
+ if FdName.upper() in self.Profile.FdDict.keys():
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == 'FV':
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData != None and elementRegionData.upper() not in FvList:
+ FvList.append(elementRegionData.upper())
+ return FvList
+
+ ## __GetReferencedFdFvTuple() method
+ #
+ # Get FD and FV list referenced by a FFS file
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def __GetReferencedFdFvTuple(self, FvObj, RefFdList = [], RefFvList = []):
+
+ for FfsObj in FvObj.FfsList:
+ if isinstance(FfsObj, FfsFileStatement.FileStatement):
+ if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ RefFvList.append(FfsObj.FvName.upper())
+ elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ RefFdList.append(FfsObj.FdName.upper())
+ else:
+ self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
+
+ ## __GetReferencedFdFvTupleFromSection() method
+ #
+ # Get FD and FV list referenced by a FFS section
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param FdList referenced FD by section
+ # @param FvList referenced FV by section
+ #
+ def __GetReferencedFdFvTupleFromSection(self, FfsFile, FdList = [], FvList = []):
+
+ SectionStack = []
+ SectionStack.extend(FfsFile.SectionList)
+ while SectionStack != []:
+ SectionObj = SectionStack.pop()
+ if isinstance(SectionObj, FvImageSection.FvImageSection):
+ if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ FvList.append(SectionObj.FvName.upper())
+ if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ FvList.append(SectionObj.Fv.UiFvName.upper())
+ self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
+
+ if isinstance(SectionObj, CompressSection.CompressSection) or isinstance(SectionObj, GuidSection.GuidSection):
+ SectionStack.extend(SectionObj.SectionList)
+
+ ## CycleReferenceCheck() method
+ #
+ # Check whether cycle reference exists in FDF
+ #
+ # @param self The object pointer
+ # @retval True cycle reference exists
+ # @retval False Not exists cycle reference
+ #
+ def CycleReferenceCheck(self):
+
+ CycleRefExists = False
+
+ try:
+ for FvName in self.Profile.FvDict.keys():
+ LogStr = "Cycle Reference Checking for FV: %s\n" % FvName
+ RefFvStack = []
+ RefFvStack.append(FvName)
+ FdAnalyzedList = []
+
+ while RefFvStack != []:
+ FvNameFromStack = RefFvStack.pop()
+ if FvNameFromStack.upper() in self.Profile.FvDict.keys():
+ FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
+ else:
+ continue
+
+ RefFdList = []
+ RefFvList = []
+ self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
+ continue
+
+ LogStr += "FD %s is referenced by FV %s\n" % (RefFdName, FvNameFromStack)
+ FvInFdList = self.__GetFvInFd(RefFdName)
+ if FvInFdList != []:
+ LogStr += "FD %s contains FV: " % RefFdName
+ for FvObj in FvInFdList:
+ LogStr += FvObj
+ LogStr += ' \n'
+ if FvObj not in RefFvStack:
+ RefFvStack.append(FvObj)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+ FdAnalyzedList.append(RefFdName)
+
+ for RefFvName in RefFvList:
+ LogStr += "FV %s is referenced by FV %s\n" % (RefFvName, FvNameFromStack)
+ if RefFvName not in RefFvStack:
+ RefFvStack.append(RefFvName)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+
+ except Warning:
+ print LogStr
+
+ finally:
+ return CycleRefExists
+
+if __name__ == "__main__":
+ parser = FdfParser("..\LakeportX64Pkg.fdf")
+ try:
+ parser.ParseFile()
+ parser.CycleReferenceCheck()
+ except Warning, X:
+ print X.message
+ else:
+ print "Success!"
+
diff --git a/BaseTools/Source/Python/Common/GlobalData.py b/BaseTools/Source/Python/Common/GlobalData.py new file mode 100644 index 0000000000..d56152ec8a --- /dev/null +++ b/BaseTools/Source/Python/Common/GlobalData.py @@ -0,0 +1,37 @@ +## @file +# This file is used to define common static strings used by INF/DEC/DSC files +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. + +import re + +gIsWindows = None + +gEdkCompatibilityPkg = "EdkCompatibilityPkg" +gWorkspace = "." +gEdkSource = "EdkCompatibilityPkg" +gEfiSource = "." +gEcpSource = "EdkCompatibilityPkg" + +gOptions = None +gCaseInsensitive = False +gGlobalDefines = {} +gAllFiles = None + +gEdkGlobal = {} +gOverrideDir = {} + +# for debug trace purpose when problem occurs +gProcessingFile = '' +gBuildingModule = '' + +## Regular expression for matching macro used in DSC/DEC/INF file inclusion +gMacroPattern = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE) + diff --git a/BaseTools/Source/Python/Common/Identification.py b/BaseTools/Source/Python/Common/Identification.py new file mode 100644 index 0000000000..a9b2f33d55 --- /dev/null +++ b/BaseTools/Source/Python/Common/Identification.py @@ -0,0 +1,58 @@ +## @file
+# This file is used to define the identification of INF/DEC/DSC files
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+## Identification
+#
+# This class defined basic Identification information structure which is used by INF/DEC/DSC files
+#
+# @param object: Inherited from object class
+#
+# @var FileName: To store data for Filename
+# @var FileFullPath: To store data for full path of the file
+# @var FileRelativePath: To store data for relative path of the file
+# @var RunStatus: Status of build system running
+#
+class Identification(object):
+ def __init__(self):
+ self.FileName = ''
+ self.FileFullPath = ''
+ self.FileRelativePath = ''
+ self.PackagePath = ''
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileName(self, FileFullPath, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileFullPath(self, FileName, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileRelativePath(self, FileName, FileFullPath):
+ pass
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ id = Identification()
diff --git a/BaseTools/Source/Python/Common/InfClassObject.py b/BaseTools/Source/Python/Common/InfClassObject.py new file mode 100644 index 0000000000..a772840227 --- /dev/null +++ b/BaseTools/Source/Python/Common/InfClassObject.py @@ -0,0 +1,1116 @@ +## @file
+# This file is used to define each component of INF file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+from CommonDataClass.CommonClass import LibraryClassClass
+from CommonDataClass.ModuleClass import *
+from String import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from BuildToolError import *
+from Misc import sdict
+import GlobalData
+from Table.TableInf import TableInf
+import Database
+from Parsing import *
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+gComponentType2ModuleType = {
+ "LIBRARY" : "BASE",
+ "SECURITY_CORE" : "SEC",
+ "PEI_CORE" : "PEI_CORE",
+ "COMBINED_PEIM_DRIVER" : "PEIM",
+ "PIC_PEIM" : "PEIM",
+ "RELOCATABLE_PEIM" : "PEIM",
+ "PE32_PEIM" : "PEIM",
+ "BS_DRIVER" : "DXE_DRIVER",
+ "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
+ "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
+# "BS_DRIVER" : "DXE_SMM_DRIVER",
+# "BS_DRIVER" : "UEFI_DRIVER",
+ "APPLICATION" : "UEFI_APPLICATION",
+ "LOGO" : "BASE",
+}
+
+gNmakeFlagPattern = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
+gNmakeFlagName2ToolCode = {
+ "C" : "CC",
+ "LIB" : "SLINK",
+ "LINK" : "DLINK",
+}
+
+class InfHeader(ModuleHeaderClass):
+ _Mapping_ = {
+ #
+ # Required Fields
+ #
+ TAB_INF_DEFINES_BASE_NAME : "Name",
+ TAB_INF_DEFINES_FILE_GUID : "Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
+ #
+ # Optional Fields
+ #
+ TAB_INF_DEFINES_INF_VERSION : "InfVersion",
+ TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName",
+ TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber",
+ TAB_INF_DEFINES_BUILD_TYPE : "BuildType",
+ TAB_INF_DEFINES_FFS_EXT : "FfsExt",
+ TAB_INF_DEFINES_FV_EXT : "FvExt",
+ TAB_INF_DEFINES_SOURCE_FV : "SourceFv",
+ TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber",
+ TAB_INF_DEFINES_VERSION_STRING : "VersionString",
+ TAB_INF_DEFINES_VERSION : "Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver",
+ TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H : "TianoR8FlashMap_h",
+ TAB_INF_DEFINES_SHADOW : "Shadow",
+# TAB_INF_DEFINES_LIBRARY_CLASS : "LibraryClass",
+# TAB_INF_DEFINES_ENTRY_POINT : "ExternImages",
+# TAB_INF_DEFINES_UNLOAD_IMAGE : "ExternImages",
+# TAB_INF_DEFINES_CONSTRUCTOR : ,
+# TAB_INF_DEFINES_DESTRUCTOR : ,
+# TAB_INF_DEFINES_DEFINE : "Define",
+# TAB_INF_DEFINES_SPEC : "Specification",
+# TAB_INF_DEFINES_CUSTOM_MAKEFILE : "CustomMakefile",
+# TAB_INF_DEFINES_MACRO :
+ }
+
+ def __init__(self):
+ ModuleHeaderClass.__init__(self)
+ self.VersionNumber = ''
+ self.VersionString = ''
+ #print self.__dict__
+ def __setitem__(self, key, value):
+ self.__dict__[self._Mapping_[key]] = value
+ def __getitem__(self, key):
+ return self.__dict__[self._Mapping_[key]]
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._Mapping_
+
+## InfObject
+#
+# This class defined basic Inf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class InfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Inf
+#
+# This class defined the structure used in Inf object
+#
+# @param InfObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToModule: Input value for IsToModule
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var UserExtensions: To store value for UserExtensions
+# @var Module: To store value for Module, it is a structure as ModuleClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Inf
+#
+class Inf(InfObject):
+ def __init__(self, Filename = None, IsToDatabase = False, IsToModule = False, WorkspaceDir = None, Database = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = Identification()
+ self.Module = ModuleClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblInf = Database.TblInf
+ self.FileID = -1
+ #self.TblInf = TableInf(Database.Cur)
+
+ self.KeyList = [
+ TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS,
+ TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_LIBRARIES,
+ TAB_INF_FIXED_PCD, TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD,
+ TAB_INF_PCD_EX, TAB_DEPEX, TAB_NMAKE, TAB_INF_DEFINES
+ ]
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Inf file if filename is not None
+ #
+ if Filename != None:
+ self.LoadInfFile(Filename)
+
+ #
+ # Transfer to Module Object if IsToModule is True
+ #
+ if IsToModule:
+ self.InfToModule()
+
+ ## Transfer to Module Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def InfToModule(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Package Header
+ #
+ self.GenModuleHeader(ContainerFile)
+
+ #
+ # Generate BuildOptions
+ #
+ self.GenBuildOptions(ContainerFile)
+
+ #
+ # Generate Includes
+ #
+ self.GenIncludes(ContainerFile)
+
+ #
+ # Generate Libraries
+ #
+ self.GenLibraries(ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Packages
+ #
+ self.GenPackages(ContainerFile)
+
+ #
+ # Generate Nmakes
+ #
+ self.GenNmakes(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(ContainerFile)
+
+ #
+ # Generate Sources
+ #
+ self.GenSources(ContainerFile)
+
+ #
+ # Generate UserExtensions
+ #
+ self.GenUserExtensions(ContainerFile)
+
+ #
+ # Generate Guids
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ #
+ # Generate Protocols
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ #
+ # Generate Ppis
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ #
+ # Generate Depexes
+ #
+ self.GenDepexes(ContainerFile)
+
+ #
+ # Generate Binaries
+ #
+ self.GenBinaries(ContainerFile)
+
+ ## Parse [Defines] section
+ #
+ # Parse [Defines] section into InfDefines object
+ #
+ # @param InfFile The path of the INF file
+ # @param Section The title of "Defines" section
+ # @param Lines The content of "Defines" section
+ #
+ def ParseDefines(self, InfFile, Section, Lines):
+ TokenList = Section.split(TAB_SPLIT)
+ if len(TokenList) == 3:
+ RaiseParserError(Section, "Defines", InfFile, "[xx.yy.%s] format (with platform) is not supported")
+ if len(TokenList) == 2:
+ Arch = TokenList[1].upper()
+ else:
+ Arch = TAB_ARCH_COMMON
+
+ if Arch not in self.Defines:
+ self.Defines[Arch] = InfDefines()
+ GetSingleValueOfKeyFromLines(Lines, self.Defines[Arch].DefinesDictionary,
+ TAB_COMMENT_SPLIT, TAB_EQUAL_SPLIT, False, None)
+
+ ## Load Inf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Inf file
+ #
+ def LoadInfFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
+
+ #
+ # Init InfTable
+ #
+ #self.TblInf.Table = "Inf%s" % self.FileID
+ #self.TblInf.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ if Line[1:3] == "--":
+ continue
+ Model = Section[CurrentSection.upper()]
+ #
+ # Insert items data of previous section
+ #
+ InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ CurrentSection = TAB_UNKNOWN
+ continue
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ ParseDefineMacro2(self.TblInf, self.RecordSet, GlobalData.gGlobalDefines)
+
+ ## Show detailed information of Module
+ #
+ # Print all members and their values of Module class
+ #
+ def ShowModule(self):
+ M = self.Module
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'InfVersion =', M.Header[Arch].InfVersion
+ print 'EfiSpecificationVersion =', M.Header[Arch].EfiSpecificationVersion
+ print 'EdkReleaseVersion =', M.Header[Arch].EdkReleaseVersion
+ print 'ModuleType =', M.Header[Arch].ModuleType
+ print 'BinaryModule =', M.Header[Arch].BinaryModule
+ print 'ComponentType =', M.Header[Arch].ComponentType
+ print 'MakefileName =', M.Header[Arch].MakefileName
+ print 'BuildNumber =', M.Header[Arch].BuildNumber
+ print 'BuildType =', M.Header[Arch].BuildType
+ print 'FfsExt =', M.Header[Arch].FfsExt
+ print 'FvExt =', M.Header[Arch].FvExt
+ print 'SourceFv =', M.Header[Arch].SourceFv
+ print 'PcdIsDriver =', M.Header[Arch].PcdIsDriver
+ print 'TianoR8FlashMap_h =', M.Header[Arch].TianoR8FlashMap_h
+ print 'Shadow =', M.Header[Arch].Shadow
+ print 'LibraryClass =', M.Header[Arch].LibraryClass
+ for Item in M.Header[Arch].LibraryClass:
+ print Item.LibraryClass, DataType.TAB_VALUE_SPLIT.join(Item.SupModuleList)
+ print 'CustomMakefile =', M.Header[Arch].CustomMakefile
+ print 'Define =', M.Header[Arch].Define
+ print 'Specification =', M.Header[Arch].Specification
+ for Item in self.Module.ExternImages:
+ print '\nEntry_Point = %s, UnloadImage = %s' % (Item.ModuleEntryPoint, Item.ModuleUnloadImage)
+ for Item in self.Module.ExternLibraries:
+ print 'Constructor = %s, Destructor = %s' % (Item.Constructor, Item.Destructor)
+ print '\nBuildOptions =', M.BuildOptions
+ for Item in M.BuildOptions:
+ print Item.ToolChainFamily, Item.ToolChain, Item.Option, Item.SupArchList
+ print '\nIncludes =', M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nLibraries =', M.Libraries
+ for Item in M.Libraries:
+ print Item.Library, Item.SupArchList
+ print '\nLibraryClasses =', M.LibraryClasses
+ for Item in M.LibraryClasses:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPackageDependencies =', M.PackageDependencies
+ for Item in M.PackageDependencies:
+ print Item.FilePath, Item.SupArchList, Item.FeatureFlag
+ print '\nNmake =', M.Nmake
+ for Item in M.Nmake:
+ print Item.Name, Item.Value, Item.SupArchList
+ print '\nPcds =', M.PcdCodes
+ for Item in M.PcdCodes:
+ print '\tCName=',Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList
+ print '\nSources =', M.Sources
+ for Source in M.Sources:
+ print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList
+ print '\nUserExtensions =', M.UserExtensions
+ for UserExtension in M.UserExtensions:
+ print UserExtension.UserID, UserExtension.Identifier,UserExtension.Content
+ print '\nGuids =', M.Guids
+ for Item in M.Guids:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nProtocols =', M.Protocols
+ for Item in M.Protocols:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nPpis =', M.Ppis
+ for Item in M.Ppis:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nDepex =', M.Depex
+ for Item in M.Depex:
+ print Item.Depex, Item.SupArchList, Item.Define
+ print '\nBinaries =', M.Binaries
+ for Binary in M.Binaries:
+ print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList
+
+ ## Convert [Defines] section content to ModuleHeaderClass
+ #
+ # Convert [Defines] section content to ModuleHeaderClass
+ #
+ # @param Defines The content under [Defines] section
+ # @param ModuleHeader An object of ModuleHeaderClass
+ # @param Arch The supported ARCH
+ #
+ def GenModuleHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate ModuleHeader ...")
+ File = self.Identification.FileFullPath
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2])
+ ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblInf.Exec(SqlCommand)
+
+ for Arch in DataType.ARCH_LIST:
+ ModuleHeader = InfHeader()
+ ModuleHeader.FileName = self.Identification.FileName
+ ModuleHeader.FullPath = self.Identification.FileFullPath
+ DefineList = QueryDefinesItem2(self.TblInf, Arch, self.FileID)
+
+ NotProcessedDefineList = []
+ for D in DefineList:
+ if D[0] in ModuleHeader:
+ ModuleHeader[D[0]] = GetSplitValueList(D[1])[0]
+ else:
+ NotProcessedDefineList.append(D)
+
+ if ModuleHeader.ComponentType == "LIBRARY":
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = ModuleHeader.Name
+ Lib.SupModuleList = DataType.SUP_MODULE_LIST
+ ModuleHeader.LibraryClass.append(Lib)
+
+ # we need to make some key defines resolved first
+ for D in NotProcessedDefineList:
+ if D[0] == TAB_INF_DEFINES_LIBRARY_CLASS:
+ List = GetSplitValueList(D[1], DataType.TAB_VALUE_SPLIT, 1)
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = CleanString(List[0])
+ if len(List) == 1:
+ Lib.SupModuleList = DataType.SUP_MODULE_LIST
+ elif len(List) == 2:
+ Lib.SupModuleList = GetSplitValueList(CleanString(List[1]), ' ')
+ ModuleHeader.LibraryClass.append(Lib)
+ elif D[0] == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
+ List = D[1].split(DataType.TAB_VALUE_SPLIT)
+ if len(List) == 2:
+ ModuleHeader.CustomMakefile[CleanString(List[0])] = CleanString(List[1])
+ else:
+ RaiseParserError(D[1], 'CUSTOM_MAKEFILE of Defines', File, 'CUSTOM_MAKEFILE=<Family>|<Filename>', D[2])
+ elif D[0] == TAB_INF_DEFINES_ENTRY_POINT:
+ Image = ModuleExternImageClass()
+ Image.ModuleEntryPoint = CleanString(D[1])
+ self.Module.ExternImages.append(Image)
+ elif D[0] == TAB_INF_DEFINES_UNLOAD_IMAGE:
+ Image = ModuleExternImageClass()
+ Image.ModuleUnloadImage = CleanString(D[1])
+ self.Module.ExternImages.append(Image)
+ elif D[0] == TAB_INF_DEFINES_CONSTRUCTOR:
+ LibraryClass = ModuleExternLibraryClass()
+ LibraryClass.Constructor = CleanString(D[1])
+ self.Module.ExternLibraries.append(LibraryClass)
+ elif D[0] == TAB_INF_DEFINES_DESTRUCTOR:
+ LibraryClass = ModuleExternLibraryClass()
+ LibraryClass.Destructor = CleanString(D[1])
+ self.Module.ExternLibraries.append(LibraryClass)
+ elif D[0] == TAB_INF_DEFINES_DEFINE:
+ List = D[1].split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, 'DEFINE of Defines', File, 'DEFINE <Word> = <Word>', D[2])
+ else:
+ ModuleHeader.Define[CleanString(List[0])] = CleanString(List[1])
+ elif D[0] == TAB_INF_DEFINES_SPEC:
+ List = D[1].split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, 'SPEC of Defines', File, 'SPEC <Word> = <Version>', D[2])
+ else:
+ ModuleHeader.Specification[CleanString(List[0])] = CleanString(List[1])
+
+ #
+ # Get version of INF
+ #
+ if ModuleHeader.InfVersion != "":
+ # R9 inf
+ VersionNumber = ModuleHeader.VersionNumber
+ VersionString = ModuleHeader.VersionString
+ if len(VersionNumber) > 0 and len(VersionString) == 0:
+ EdkLogger.warn(2000, 'VERSION_NUMBER depricated; INF file %s should be modified to use VERSION_STRING instead.' % self.Identification.FileFullPath)
+ ModuleHeader.Version = VersionNumber
+ if len(VersionString) > 0:
+ if len(VersionNumber) > 0:
+ EdkLogger.warn(2001, 'INF file %s defines both VERSION_NUMBER and VERSION_STRING, using VERSION_STRING' % self.Identification.FileFullPath)
+ ModuleHeader.Version = VersionString
+ else:
+ # R8 inf
+ ModuleHeader.InfVersion = "0x00010000"
+ if ModuleHeader.ComponentType in gComponentType2ModuleType:
+ ModuleHeader.ModuleType = gComponentType2ModuleType[ModuleHeader.ComponentType]
+ elif ModuleHeader.ComponentType != '':
+ EdkLogger.error("Parser", PARSER_ERROR, "Unsupported R8 component type [%s]" % ModuleHeader.ComponentType, ExtraData=File, RaiseError = EdkLogger.IsRaiseError)
+
+ self.Module.Header[Arch] = ModuleHeader
+
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Inf
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ #
+ # Get all BuildOptions
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Family, ToolChain, Flag) = GetBuildOption(Record[0], ContainerFile, Record[2])
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Family), ConvertToSqlString2(ToolChain), ConvertToSqlString2(Flag), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in BuildOptions.keys():
+ BuildOption = BuildOptionClass(Key[0], Key[1], Key[2])
+ BuildOption.SupArchList = BuildOptions[Key]
+ self.Module.BuildOptions.append(BuildOption)
+
+ ## GenIncludes
+ #
+ # Gen Includes of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = sdict()
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Includes, Record[0], Arch)
+
+ for Key in Includes.keys():
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Includes[Key]
+ self.Module.Includes.append(Include)
+
+ ## GenLibraries
+ #
+ # Gen Libraries of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARIES)
+ Libraries = sdict()
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_INSTANCE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Libraries, Record[0], Arch)
+
+ for Key in Libraries.keys():
+ Library = ModuleLibraryClass()
+ # replace macro and remove file extension
+ Library.Library = Key.rsplit('.', 1)[0]
+ Library.SupArchList = Libraries[Key]
+ self.Module.Libraries.append(Library)
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClass of Inf
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all LibraryClasses
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(LibraryClasses, (LibClassName, LibClassIns, Pcd, SupModelList), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(LibClassName), ConvertToSqlString2(LibClassIns), ConvertToSqlString2(SupModelList), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in LibraryClasses.keys():
+ KeyList = Key[0].split(DataType.TAB_VALUE_SPLIT)
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = Key[0]
+ LibraryClass.RecommendedInstance = NormPath(Key[1])
+ LibraryClass.FeatureFlag = Key[2]
+ LibraryClass.SupArchList = LibraryClasses[Key]
+ LibraryClass.SupModuleList = GetSplitValueList(Key[3])
+ self.Module.LibraryClasses.append(LibraryClass)
+
+ ## GenPackages
+ #
+ # Gen Packages of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenPackages(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES)
+ Packages = {}
+ #
+ # Get all Packages
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Package, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(Packages, (Package, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Package), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+
+ for Key in Packages.keys():
+ Package = ModulePackageDependencyClass()
+ Package.FilePath = NormPath(Key[0])
+ Package.SupArchList = Packages[Key]
+ Package.FeatureFlag = Key[1]
+ self.Module.PackageDependencies.append(Package)
+
+ ## GenNmakes
+ #
+ # Gen Nmakes of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenNmakes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_NMAKE)
+ Nmakes = sdict()
+ #
+ # Get all Nmakes
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_NMAKE]
+
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Nmakes, Record[0], Arch)
+
+ for Key in Nmakes.keys():
+ List = GetSplitValueList(Key, DataType.TAB_EQUAL_SPLIT, MaxSplit=1)
+ if len(List) != 2:
+ RaiseParserError(Key, 'Nmake', ContainerFile, '<MacroName> = <Value>')
+ continue
+ Nmake = ModuleNmakeClass()
+ Nmake.Name = List[0]
+ Nmake.Value = List[1]
+ Nmake.SupArchList = Nmakes[Key]
+ self.Module.Nmake.append(Nmake)
+
+ # convert R8 format to R9 format
+ if Nmake.Name == "IMAGE_ENTRY_POINT":
+ Image = ModuleExternImageClass()
+ Image.ModuleEntryPoint = Nmake.Value
+ self.Module.ExternImages.append(Image)
+ elif Nmake.Name == "DPX_SOURCE":
+ Source = ModuleSourceFileClass(NormPath(Nmake.Value), "", "", "", "", Nmake.SupArchList)
+ self.Module.Sources.append(Source)
+ else:
+ ToolList = gNmakeFlagPattern.findall(Nmake.Name)
+ if len(ToolList) == 0 or len(ToolList) != 1:
+ EdkLogger.warn("\nParser", "Don't know how to do with MACRO: %s" % Nmake.Name,
+ ExtraData=ContainerFile)
+ else:
+ if ToolList[0] in gNmakeFlagName2ToolCode:
+ Tool = gNmakeFlagName2ToolCode[ToolList[0]]
+ else:
+ Tool = ToolList[0]
+ BuildOption = BuildOptionClass("MSFT", "*_*_*_%s_FLAGS" % Tool, Nmake.Value)
+ BuildOption.SupArchList = Nmake.SupArchList
+ self.Module.BuildOptions.append(BuildOption)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Inf
+ # <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+
+ #
+ # Get all Guids
+ #
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet1:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ if self.Module.Header[Arch].LibraryClass != {}:
+ pass
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet2:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet3:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet4:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet5:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], "", ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key)
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Pcds.keys():
+ Pcd = PcdClass()
+ Pcd.CName = Key[1]
+ Pcd.TokenSpaceGuidCName = Key[0]
+ Pcd.DefaultValue = Key[2]
+ Pcd.ItemType = Key[3]
+ Pcd.SupArchList = Pcds[Key]
+ self.Module.PcdCodes.append(Pcd)
+
+ ## GenSources
+ #
+ # Gen Sources of Inf
+ # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenSources(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES)
+ Sources = {}
+
+ #
+ # Get all Nmakes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2])
+ MergeArches(Sources, (Filename, Family, TagName, ToolCode, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s', Value5 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Filename), ConvertToSqlString2(Family), ConvertToSqlString2(TagName), ConvertToSqlString2(ToolCode), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Sources.keys():
+ Source = ModuleSourceFileClass(Key[0], Key[2], Key[3], Key[1], Key[4], Sources[Key])
+ self.Module.Sources.append(Source)
+
+ ## GenUserExtensions
+ #
+ # Gen UserExtensions of Inf
+ #
+ def GenUserExtensions(self, ContainerFile):
+# #
+# # UserExtensions
+# #
+# if self.UserExtensions != '':
+# UserExtension = UserExtensionsClass()
+# Lines = self.UserExtensions.splitlines()
+# List = GetSplitValueList(Lines[0], DataType.TAB_SPLIT, 2)
+# if len(List) != 3:
+# RaiseParserError(Lines[0], 'UserExtensions', File, "UserExtensions.UserId.'Identifier'")
+# else:
+# UserExtension.UserID = List[1]
+# UserExtension.Identifier = List[2][0:-1].replace("'", '').replace('\"', '')
+# for Line in Lines[1:]:
+# UserExtension.Content = UserExtension.Content + CleanString(Line) + '\n'
+# self.Module.UserExtensions.append(UserExtension)
+ pass
+
+ ## GenDepexes
+ #
+ # Gen Depex of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenDepexes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX)
+ Depex = {}
+ #
+ # Get all Depexes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_DEPEX]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ Line = ''
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ Line = Line + Record[0] + ' '
+ if Line != '':
+ MergeArches(Depex, Line, Arch)
+
+ for Key in Depex.keys():
+ Dep = ModuleDepexClass()
+ Dep.Depex = Key
+ Dep.SupArchList = Depex[Key]
+ self.Module.Depex.append(Dep)
+
+ ## GenBinaries
+ #
+ # Gen Binary of Inf
+ # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenBinaries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES)
+ Binaries = {}
+
+ #
+ # Get all Guids
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2])
+ MergeArches(Binaries, (FileType, Filename, Target, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(FileType), ConvertToSqlString2(Filename), ConvertToSqlString2(Target), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Binaries.keys():
+ Binary = ModuleBinaryFileClass(NormPath(Key[1]), Key[0], Key[2], Key[3], Binaries[Key])
+ self.Module.Binaries.append(Binary)
+
+ ## GenGuids
+ #
+ # Gen Guids of Inf
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ #
+ # Get all Items
+ #
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Lists, (Name, Value), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Module.Guids
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Module.Protocols
+ elif Type == TAB_PPIS:
+ ListMember = self.Module.Ppis
+
+ for Key in Lists.keys():
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Key[0]
+ ListClass.SupArchList = Lists[Key]
+ ListClass.FeatureFlag = Key[1]
+ ListMember.append(ListClass)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf')
+
+ Db = Database.Database('Inf.db')
+ Db.InitDatabase()
+
+ P = Inf(os.path.normpath(F), True, True, W, Db)
+ P.ShowModule()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/InfClassObjectLight.py b/BaseTools/Source/Python/Common/InfClassObjectLight.py new file mode 100644 index 0000000000..a655828e6a --- /dev/null +++ b/BaseTools/Source/Python/Common/InfClassObjectLight.py @@ -0,0 +1,876 @@ +## @file
+# This file is used to define each component of INF file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+
+from CommonDataClass.ModuleClass import *
+from CommonDataClass import CommonClass
+from String import *
+from DataType import *
+from BuildToolError import *
+from Misc import sdict
+from Misc import GetFiles
+from Parsing import *
+
+# Global variable
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+gComponentType2ModuleType = {
+ "LIBRARY" : "BASE",
+ "SECURITY_CORE" : "SEC",
+ "PEI_CORE" : "PEI_CORE",
+ "COMBINED_PEIM_DRIVER" : "PEIM",
+ "PIC_PEIM" : "PEIM",
+ "RELOCATABLE_PEIM" : "PEIM",
+ "PE32_PEIM" : "PEIM",
+ "BS_DRIVER" : "DXE_DRIVER",
+ "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
+ "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
+ "APPLICATION" : "UEFI_APPLICATION",
+ "LOGO" : "BASE",
+}
+
+class InfHeader(ModuleHeaderClass):
+ _Mapping_ = {
+ # Required Fields
+ TAB_INF_DEFINES_BASE_NAME : "Name",
+ TAB_INF_DEFINES_FILE_GUID : "Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
+
+ # Optional Fields
+ TAB_INF_DEFINES_INF_VERSION : "InfVersion",
+ TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName",
+ TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber",
+ TAB_INF_DEFINES_BUILD_TYPE : "BuildType",
+ TAB_INF_DEFINES_FFS_EXT : "FfsExt",
+ TAB_INF_DEFINES_FV_EXT : "FvExt",
+ TAB_INF_DEFINES_SOURCE_FV : "SourceFv",
+ TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber",
+ TAB_INF_DEFINES_VERSION_STRING : "VersionString",
+ TAB_INF_DEFINES_VERSION : "Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver",
+ TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H : "TianoR8FlashMap_h",
+ TAB_INF_DEFINES_SHADOW : "Shadow",
+ }
+
+ def __init__(self):
+ ModuleHeaderClass.__init__(self)
+ self.VersionNumber = ''
+ self.VersionString = ''
+ #print self.__dict__
+ def __setitem__(self, key, value):
+ self.__dict__[self._Mapping_[key]] = value
+ def __getitem__(self, key):
+ return self.__dict__[self._Mapping_[key]]
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._Mapping_
+
+## InfObject
+#
+# This class defined basic Inf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class InfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Inf
+#
+# This class defined the structure used in Inf object
+#
+# @param InfObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToModule: Input value for IsToModule
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var UserExtensions: To store value for UserExtensions
+# @var Module: To store value for Module, it is a structure as ModuleClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Inf
+#
+class Inf(InfObject):
+ def __init__(self, Filename = None, IsToModule = False, WorkspaceDir = None, PackageDir = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = IdentificationClass()
+ self.Module = ModuleClass()
+ self.WorkspaceDir = WorkspaceDir
+ self.PackageDir = PackageDir
+ self.SupArchList = SupArchList
+
+ self.KeyList = [
+ TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS,
+ TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_INF_FIXED_PCD,
+ TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD, TAB_INF_PCD_EX,
+ TAB_DEPEX, TAB_INF_DEFINES
+ ]
+ # Upper all KEYs to ignore case sensitive when parsing
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ # Init RecordSet
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ # Init Comment
+ self.SectionHeaderCommentDict = {}
+
+ # Load Inf file if filename is not None
+ if Filename != None:
+ self.LoadInfFile(Filename)
+
+ # Transfer to Module Object if IsToModule is True
+ if IsToModule:
+ self.InfToModule()
+
+ ## Module Object to INF file
+ def ModuleToInf(self, Module):
+ Inf = ''
+ InfList = sdict()
+ SectionHeaderCommentDict = {}
+ if Module == None:
+ return Inf
+
+ ModuleHeader = Module.ModuleHeader
+ TmpList = []
+ # Common define items
+ if ModuleHeader.Name:
+ TmpList.append(TAB_INF_DEFINES_BASE_NAME + ' = ' + ModuleHeader.Name)
+ if ModuleHeader.Guid:
+ TmpList.append(TAB_INF_DEFINES_FILE_GUID + ' = ' + ModuleHeader.Guid)
+ if ModuleHeader.Version:
+ TmpList.append(TAB_INF_DEFINES_VERSION_STRING + ' = ' + ModuleHeader.Version)
+ if ModuleHeader.ModuleType:
+ TmpList.append(TAB_INF_DEFINES_MODULE_TYPE + ' = ' + ModuleHeader.ModuleType)
+ if ModuleHeader.PcdIsDriver:
+ TmpList.append(TAB_INF_DEFINES_PCD_IS_DRIVER + ' = ' + ModuleHeader.PcdIsDriver)
+ # Externs
+ for Item in Module.Externs:
+ if Item.EntryPoint:
+ TmpList.append(TAB_INF_DEFINES_ENTRY_POINT + ' = ' + Item.EntryPoint)
+ if Item.UnloadImage:
+ TmpList.append(TAB_INF_DEFINES_UNLOAD_IMAGE + ' = ' + Item.UnloadImage)
+ if Item.Constructor:
+ TmpList.append(TAB_INF_DEFINES_CONSTRUCTOR + ' = ' + Item.Constructor)
+ if Item.Destructor:
+ TmpList.append(TAB_INF_DEFINES_DESTRUCTOR + ' = ' + Item.Destructor)
+ # Other define items
+ if Module.UserExtensions != None:
+ for Item in Module.UserExtensions.Defines:
+ TmpList.append(Item)
+ InfList['Defines'] = TmpList
+ if ModuleHeader.Description != '':
+ SectionHeaderCommentDict['Defines'] = ModuleHeader.Description
+
+ if Module.UserExtensions != None:
+ InfList['BuildOptions'] = Module.UserExtensions.BuildOptions
+
+ for Item in Module.Includes:
+ Key = 'Includes.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.FilePath)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.LibraryClasses:
+ Key = 'LibraryClasses.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.LibraryClass
+ if Item.RecommendedInstance:
+ NewValue = NewValue + '|' + Item.RecommendedInstance
+ if Item.FeatureFlag:
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.PackageDependencies:
+ Key = 'Packages.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.FilePath)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.PcdCodes:
+ Key = 'Pcds' + Item.ItemType + '.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.TokenSpaceGuidCName + '.' + Item.CName
+ if Item.DefaultValue != '':
+ NewValue = NewValue + '|' + Item.DefaultValue
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Sources:
+ Key = 'Sources.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.SourceFile
+ if Item.ToolChainFamily != '':
+ NewValue = NewValue + '|' + Item.ToolChainFamily
+ if Item.TagName != '':
+ NewValue = NewValue + '|' + Item.TagName
+ if Item.ToolCode != '':
+ NewValue = NewValue + '|' + Item.ToolCode
+ if Item.FeatureFlag != '':
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ if Item.HelpText != '':
+ SectionHeaderCommentDict[Key] = Item.HelpText
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Guids:
+ Key = 'Guids.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Protocols:
+ Key = 'Protocols.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Ppis:
+ Key = 'Ppis.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.PeiDepex:
+ Key = 'Depex'
+ Value = Module.PeiDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.DxeDepex:
+ Key = 'Depex'
+ Value = Module.DxeDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.SmmDepex:
+ Key = 'Depex'
+ Value = Module.SmmDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Binaries:
+ Key = 'Binaries.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.FileType + '|' + Item.BinaryFile + '|' + Item.Target
+ if Item.FeatureFlag != '':
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ # Transfer Module to Inf
+ for Key in InfList:
+ if Key in SectionHeaderCommentDict:
+ List = SectionHeaderCommentDict[Key].split('\r')
+ for Item in List:
+ Inf = Inf + Item + '\n'
+ Inf = Inf + '[' + Key + ']' + '\n'
+ for Value in InfList[Key]:
+ if type(Value) == type([]):
+ for SubValue in Value:
+ Inf = Inf + ' ' + SubValue + '\n'
+ else:
+ Inf = Inf + ' ' + Value + '\n'
+ Inf = Inf + '\n'
+
+ return Inf
+
+
+ ## Transfer to Module Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def InfToModule(self):
+ # Init global information for the file
+ ContainerFile = self.Identification.FullPath
+
+ # Generate Module Header
+ self.GenModuleHeader(ContainerFile)
+
+ # Generate BuildOptions
+ self.GenBuildOptions(ContainerFile)
+
+ # Generate Includes
+ self.GenIncludes(ContainerFile)
+
+ # Generate LibraryClasses
+ self.GenLibraryClasses(ContainerFile)
+
+ # Generate Packages
+ self.GenPackages(ContainerFile)
+
+ # Generate Pcds
+ self.GenPcds(ContainerFile)
+
+ # Generate Sources
+ self.GenSources(ContainerFile)
+
+ # Generate Guids
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ # Generate Protocols
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ # Generate Ppis
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ # Generate Depexes
+ self.GenDepexes(ContainerFile)
+
+ # Generate Binaries
+ self.GenBinaries(ContainerFile)
+
+ # Init MiscFiles
+ self.GenMiscFiles(ContainerFile)
+
+ ## GenMiscFiles
+ #
+ def GenMiscFiles(self, ContainerFile):
+ MiscFiles = MiscFileClass()
+ MiscFiles.Name = 'ModuleFiles'
+ for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
+ File = CommonClass.FileClass()
+ File.Filename = Item
+ MiscFiles.Files.append(File)
+ self.Module.MiscFiles = MiscFiles
+
+ ## Load Inf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Inf file
+ #
+ def LoadInfFile(self, Filename):
+ # Insert a record for file
+ Filename = NormPath(Filename)
+
+ self.Identification.FullPath = Filename
+ (self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
+ if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
+ self.Identification.ModulePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
+ if self.PackageDir:
+ self.Identification.PackagePath = self.PackageDir
+ if self.Identification.ModulePath.find(self.PackageDir) == 0:
+ self.Identification.ModulePath = self.Identification.ModulePath[len(self.PackageDir) + 1:]
+
+ # Init common datas
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ # Parse file content
+ IsFindBlockComment = False
+ ReservedLine = ''
+ Comment = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ # Remove comment block
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
+ Comment = Comment + Line.strip() + '\n'
+ ReservedLine = ''
+ else:
+ Comment = Comment + Line[len(ReservedLine):] + '\n'
+ IsFindBlockComment = True
+ if not ReservedLine:
+ continue
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ Comment = Comment + Line.strip() + '\n'
+ continue
+
+ # Remove comments at tail and remove spaces again
+ if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
+ Comment = Comment + Line.strip() + '\n'
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ ## Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ if Line[1:3] == "--":
+ continue
+ Model = Section[CurrentSection.upper()]
+ # Insert items data of previous section
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+
+ # Parse the new section
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ if Comment:
+ if Comment.endswith('\n'):
+ Comment = Comment[:len(Comment) - len('\n')]
+ self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
+ Comment = ''
+ continue
+
+ # Not in any defined section
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ # Add a section item
+ SectionItemList.append([Line, LineNo, Comment])
+ Comment = ''
+ # End of parse
+ #End of For
+
+ # Insert items data of last section
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ if Comment != '':
+ self.SectionHeaderCommentDict[Model] = Comment
+ Comment = ''
+
+ ## Show detailed information of Module
+ #
+ # Print all members and their values of Module class
+ #
+ def ShowModule(self):
+ M = self.Module
+ print 'Filename =', M.ModuleHeader.FileName
+ print 'FullPath =', M.ModuleHeader.FullPath
+ print 'RelaPath =', M.ModuleHeader.RelaPath
+ print 'PackagePath =', M.ModuleHeader.PackagePath
+ print 'ModulePath =', M.ModuleHeader.ModulePath
+ print 'CombinePath =', M.ModuleHeader.CombinePath
+
+ print 'BaseName =', M.ModuleHeader.Name
+ print 'Guid =', M.ModuleHeader.Guid
+ print 'Version =', M.ModuleHeader.Version
+
+ print '\nIncludes ='
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nLibraryClasses ='
+ for Item in M.LibraryClasses:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.RecommendedInstanceGuid, Item.RecommendedInstanceVersion, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPackageDependencies ='
+ for Item in M.PackageDependencies:
+ print Item.FilePath, Item.SupArchList, Item.FeatureFlag
+ print '\nPcds ='
+ for Item in M.PcdCodes:
+ print '\tCName=',Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList
+ print '\nSources ='
+ for Source in M.Sources:
+ print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList
+ print '\nGuids ='
+ for Item in M.Guids:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nProtocols ='
+ for Item in M.Protocols:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nPpis ='
+ for Item in M.Ppis:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nDepex ='
+ for Item in M.Depex:
+ print Item.Depex, Item.SupArchList, Item.Define
+ print '\nBinaries ='
+ for Binary in M.Binaries:
+ print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList
+ print '\n*** FileList ***'
+ for Item in M.MiscFiles.Files:
+ print Item.Filename
+ print '****************\n'
+
+ ## Convert [Defines] section content to ModuleHeaderClass
+ #
+ # Convert [Defines] section content to ModuleHeaderClass
+ #
+ # @param Defines The content under [Defines] section
+ # @param ModuleHeader An object of ModuleHeaderClass
+ # @param Arch The supported ARCH
+ #
+ def GenModuleHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate ModuleHeader ...")
+ # Update all defines item in database
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+
+ ModuleHeader = ModuleHeaderClass()
+ ModuleExtern = ModuleExternClass()
+ OtherDefines = []
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ OtherDefines.append(Record[0])
+ else:
+ Name = ValueList[0]
+ Value = ValueList[1]
+ if Name == TAB_INF_DEFINES_BASE_NAME:
+ ModuleHeader.Name = Value
+ ModuleHeader.BaseName = Value
+ elif Name == TAB_INF_DEFINES_FILE_GUID:
+ ModuleHeader.Guid = Value
+ elif Name == TAB_INF_DEFINES_VERSION_STRING:
+ ModuleHeader.Version = Value
+ elif Name == TAB_INF_DEFINES_PCD_IS_DRIVER:
+ ModuleHeader.PcdIsDriver = Value
+ elif Name == TAB_INF_DEFINES_MODULE_TYPE:
+ ModuleHeader.ModuleType = Value
+ elif Name == TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION:
+ ModuleHeader.UefiSpecificationVersion = Value
+ elif Name == TAB_INF_DEFINES_PI_SPECIFICATION_VERSION:
+ ModuleHeader.PiSpecificationVersion = Value
+ elif Name == TAB_INF_DEFINES_ENTRY_POINT:
+ ModuleExtern.EntryPoint = Value
+ elif Name == TAB_INF_DEFINES_UNLOAD_IMAGE:
+ ModuleExtern.UnloadImage = Value
+ elif Name == TAB_INF_DEFINES_CONSTRUCTOR:
+ ModuleExtern.Constructor = Value
+ elif Name == TAB_INF_DEFINES_DESTRUCTOR:
+ ModuleExtern.Destructor = Value
+ else:
+ OtherDefines.append(Record[0])
+ ModuleHeader.FileName = self.Identification.FileName
+ ModuleHeader.FullPath = self.Identification.FullPath
+ ModuleHeader.RelaPath = self.Identification.RelaPath
+ ModuleHeader.PackagePath = self.Identification.PackagePath
+ ModuleHeader.ModulePath = self.Identification.ModulePath
+ ModuleHeader.CombinePath = os.path.normpath(os.path.join(ModuleHeader.PackagePath, ModuleHeader.ModulePath, ModuleHeader.FileName))
+
+ if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
+ ModuleHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
+ self.Module.ModuleHeader = ModuleHeader
+ self.Module.Externs.append(ModuleExtern)
+ UE = self.Module.UserExtensions
+ if UE == None:
+ UE = UserExtensionsClass()
+ UE.Defines = OtherDefines
+ self.Module.UserExtensions = UE
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Inf
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ # Get all BuildOptions
+ RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION]
+ UE = self.Module.UserExtensions
+ if UE == None:
+ UE = UserExtensionsClass()
+ for Record in RecordSet:
+ UE.BuildOptions.append(Record[0])
+ self.Module.UserExtensions = UE
+
+ ## GenIncludes
+ #
+ # Gen Includes of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = sdict()
+ # Get all Includes
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+ for Record in RecordSet:
+ Include = IncludeClass()
+ Include.FilePath = Record[0]
+ Include.SupArchList = Record[1]
+ if GenerateHelpText(Record[5], ''):
+ Include.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.Includes.append(Include)
+ #self.Module.FileList.extend(GetFiles(os.path.normpath(os.path.join(self.Identification.FileRelativePath, Include.FilePath)), ['CVS', '.svn']))
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClass of Inf
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ # Get all LibraryClasses
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+ for Record in RecordSet:
+ (LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ LibraryClass = CommonClass.LibraryClassClass()
+ LibraryClass.LibraryClass = LibClassName
+ LibraryClass.RecommendedInstance = LibClassIns
+ LibraryClass.FeatureFlag = Pcd
+ LibraryClass.SupArchList = Record[1]
+ LibraryClass.SupModuleList = Record[4]
+ if GenerateHelpText(Record[5], ''):
+ LibraryClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.LibraryClasses.append(LibraryClass)
+
+ ## GenPackages
+ #
+ # Gen Packages of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenPackages(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES)
+ Packages = {}
+ # Get all Packages
+ RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE]
+ for Record in RecordSet:
+ (PackagePath, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2])
+ Package = ModulePackageDependencyClass()
+ Package.FilePath = NormPath(PackagePath)
+ Package.SupArchList = Record[1]
+ Package.FeatureFlag = Pcd
+ if GenerateHelpText(Record[5], ''):
+ Package.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.PackageDependencies.append(Package)
+
+ def AddPcd(self, CName, TokenSpaceGuidCName, DefaultValue, ItemType, Arch, HelpTextList):
+ Pcd = PcdClass()
+ Pcd.CName = CName
+ Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
+ Pcd.DefaultValue = DefaultValue
+ Pcd.ItemType = ItemType
+ Pcd.SupArchList = Arch
+ if GenerateHelpText(HelpTextList, ''):
+ Pcd.HelpTextList.append(GenerateHelpText(HelpTextList, ''))
+ self.Module.PcdCodes.append(Pcd)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Inf
+ # <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+
+ # Get all Pcds
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ # Go through each arch
+ for Record in RecordSet1:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet2:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet3:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet4:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet5:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], '', ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+
+ ## GenSources
+ #
+ # Gen Sources of Inf
+ # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenSources(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES)
+ Sources = {}
+
+ # Get all Sources
+ RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE]
+ for Record in RecordSet:
+ (Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
+ Source = ModuleSourceFileClass(Filename, TagName, ToolCode, Family, Pcd, Record[1])
+ if GenerateHelpText(Record[5], ''):
+ Source.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ if MODEL_EFI_SOURCE_FILE in self.SectionHeaderCommentDict:
+ Source.HelpText = self.SectionHeaderCommentDict[MODEL_EFI_SOURCE_FILE]
+ self.Module.Sources.append(Source)
+ #self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
+
+ ## GenDepexes
+ #
+ # Gen Depex of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenDepexes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX)
+ Depex = {}
+ # Get all Depexes
+ RecordSet = self.RecordSet[MODEL_EFI_DEPEX]
+ DepexString = ''
+ for Record in RecordSet:
+ DepexString = DepexString + Record[0] + '\n'
+ Dep = ModuleDepexClass()
+ if DepexString.endswith('\n'):
+ DepexString = DepexString[:len(DepexString) - len('\n')]
+ Dep.Depex = DepexString
+ if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
+ self.Module.SmmDepex = Dep
+ elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
+ self.Module.PeiDepex = Dep
+ else:
+ self.Module.DxeDepex = Dep
+# for Record in RecordSet:
+#
+# Dep = ModuleDepexClass()
+# Dep.Depex = Record[0]
+# Dep.SupArchList = Record[1]
+# if GenerateHelpText(Record[5], ''):
+# Dep.HelpTextList.append(GenerateHelpText(Record[5], ''))
+# DepexString = DepexString + Dep
+# List.append(Dep)
+# self.Module.Depex = List
+# if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
+# self.Module.SmmDepex = List
+# elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
+# self.Module.PeiDepex = List
+# else:
+# self.Module.DxeDepex = List
+
+ ## GenBinaries
+ #
+ # Gen Binary of Inf
+ # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenBinaries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES)
+ Binaries = {}
+
+ # Get all Guids
+ RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE]
+ for Record in RecordSet:
+ (FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
+ Binary = ModuleBinaryFileClass(Filename, FileType, Target, Pcd, Record[1])
+ if GenerateHelpText(Record[5], ''):
+ Binary.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.Binaries.append(Binary)
+ #self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
+
+ ## GenGuids
+ #
+ # Gen Guids of Inf
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ # Get all Items
+ if Type == TAB_GUIDS:
+ ListMember = self.Module.Guids
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Module.Protocols
+ elif Type == TAB_PPIS:
+ ListMember = self.Module.Ppis
+
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+ for Record in RecordSet:
+ (Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2])
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Name
+ ListClass.SupArchList = Record[1]
+ ListClass.FeatureFlag = Value
+ if GenerateHelpText(Record[5], ''):
+ ListClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ ListMember.append(ListClass)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf')
+
+ P = Inf(os.path.normpath(F), True, W, 'MdeModulePkg')
+ P.ShowModule()
+ print P.ModuleToInf(P.Module)
diff --git a/BaseTools/Source/Python/Common/MigrationUtilities.py b/BaseTools/Source/Python/Common/MigrationUtilities.py new file mode 100644 index 0000000000..8573f0b692 --- /dev/null +++ b/BaseTools/Source/Python/Common/MigrationUtilities.py @@ -0,0 +1,567 @@ +## @file
+# Contains several utilitities shared by migration tools.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+from optparse import OptionParser
+from Common.BuildToolError import *
+from XmlRoutines import *
+from CommonDataClass.CommonClass import *
+
+## Set all fields of CommonClass object.
+#
+# Set all attributes of CommonClass object from XML Dom object of XmlCommon.
+#
+# @param Common The destine CommonClass object.
+# @param XmlCommon The source XML Dom object.
+#
+def SetCommon(Common, XmlCommon):
+ XmlTag = "Usage"
+ Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = "FeatureFlag"
+ Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
+
+ XmlTag = "SupArchList"
+ Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = XmlNodeName(XmlCommon) + "/" + "HelpText"
+ Common.HelpText = XmlElement(XmlCommon, XmlTag)
+
+
+## Set some fields of CommonHeaderClass object.
+#
+# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from
+# XML Dom object of XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
+ XmlParentTag = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParentTag + "/" + NameTag
+ CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "GuidValue"
+ CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "Version"
+ CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
+
+ CommonHeader.FileName = os.path.basename(FileName)
+ CommonHeader.FullPath = os.path.abspath(FileName)
+
+
+## Regular expression to match specification and value.
+mReSpecification = re.compile(r"(?P<Specification>\w+)\s+(?P<Value>\w*)")
+
+## Add specification to specification dictionary.
+#
+# Abstract specification name, value pair from Specification String and add them
+# to specification dictionary.
+#
+# @param SpecificationDict The destine Specification dictionary.
+# @param SpecificationString The source Specification String from which the
+# specification name and value pair is abstracted.
+#
+def AddToSpecificationDict(SpecificationDict, SpecificationString):
+ """Abstract specification name, value pair from Specification String"""
+ for SpecificationMatch in mReSpecification.finditer(SpecificationString):
+ Specification = SpecificationMatch.group("Specification")
+ Value = SpecificationMatch.group("Value")
+ SpecificationDict[Specification] = Value
+
+## Set all fields of CommonHeaderClass object.
+#
+# Set all attributes of CommonHeaderClass object from XML Dom object of
+# XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetCommonHeader(CommonHeader, XmlCommonHeader):
+ """Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
+ XmlParent = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParent + "/" + "Abstract"
+ CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Description"
+ CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Copyright"
+ CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "License"
+ CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Specification"
+ Specification = XmlElement(XmlCommonHeader, XmlTag)
+
+ AddToSpecificationDict(CommonHeader.Specification, Specification)
+
+ XmlTag = XmlParent + "/" + "ModuleType"
+ CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
+
+
+## Load a new Cloned Record class object.
+#
+# Read an input XML ClonedRecord DOM object and return an object of Cloned Record
+# contained in the DOM object.
+#
+# @param XmlCloned A child XML DOM object in a Common XML DOM.
+#
+# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.
+#
+def LoadClonedRecord(XmlCloned):
+ ClonedRecord = ClonedRecordClass()
+
+ XmlTag = "Id"
+ ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
+
+ XmlTag = "FarGuid"
+ ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageGuid"
+ ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageVersion"
+ ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleGuid"
+ ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleVersion"
+ ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
+
+ return ClonedRecord
+
+
+## Load a new Guid/Protocol/Ppi common class object.
+#
+# Read an input XML Guid/Protocol/Ppi DOM object and return an object of
+# Guid/Protocol/Ppi contained in the DOM object.
+#
+# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.
+#
+# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object
+# created by XmlGuidProtocolPpiCommon.
+#
+def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
+ GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
+
+ XmlTag = "Name"
+ GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
+ if XmlParent == "Entry":
+ XmlTag = "%s/C_Name" % XmlParent
+ elif XmlParent == "GuidCNames":
+ XmlTag = "%s/GuidCName" % XmlParent
+ else:
+ XmlTag = "%s/%sCName" % (XmlParent, XmlParent)
+
+ GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlTag = XmlParent + "/" + "GuidValue"
+ GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ if XmlParent.endswith("Notify"):
+ GuidProtocolPpiCommon.Notify = True
+
+ XmlTag = "GuidTypeList"
+ GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.SupModuleList = SupModules.split()
+
+ SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
+
+ return GuidProtocolPpiCommon
+
+
+## Load a new Pcd class object.
+#
+# Read an input XML Pcd DOM object and return an object of Pcd
+# contained in the DOM object.
+#
+# @param XmlPcd A child XML DOM object in a Common XML DOM.
+#
+# @retvel Pcd A new Pcd object created by XmlPcd.
+#
+def LoadPcd(XmlPcd):
+ """Return a new PcdClass object equivalent to XmlPcd"""
+ Pcd = PcdClass()
+
+ XmlTag = "PcdEntry/C_Name"
+ Pcd.CName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/Token"
+ Pcd.Token = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/TokenSpaceGuidCName"
+ Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DatumType"
+ Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/MaxDatumSize"
+ Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DefaultValue"
+ Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdItemType"
+ Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/ValidUsage"
+ Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
+
+ XmlTag = "SupModuleList"
+ Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
+
+ SetCommon(Pcd, XmlPcd)
+
+ return Pcd
+
+
+## Load a new LibraryClass class object.
+#
+# Read an input XML LibraryClass DOM object and return an object of LibraryClass
+# contained in the DOM object.
+#
+# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.
+#
+# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.
+#
+def LoadLibraryClass(XmlLibraryClass):
+ LibraryClass = LibraryClassClass()
+
+ XmlTag = "LibraryClass/Keyword"
+ LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
+ if LibraryClass.LibraryClass == "":
+ XmlTag = "Name"
+ LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
+
+ XmlTag = "LibraryClass/IncludeHeader"
+ LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
+
+ XmlTag = "RecommendedInstanceVersion"
+ RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
+
+ XmlTag = "RecommendedInstanceGuid"
+ RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.SupModuleList = SupModules.split()
+
+ SetCommon(LibraryClass, XmlLibraryClass)
+
+ return LibraryClass
+
+
+## Load a new Build Option class object.
+#
+# Read an input XML BuildOption DOM object and return an object of Build Option
+# contained in the DOM object.
+#
+# @param XmlBuildOption A child XML DOM object in a Common XML DOM.
+#
+# @retvel BuildOption A new Build Option object created by XmlBuildOption.
+#
+def LoadBuildOption(XmlBuildOption):
+ """Return a new BuildOptionClass object equivalent to XmlBuildOption"""
+ BuildOption = BuildOptionClass()
+
+ BuildOption.Option = XmlElementData(XmlBuildOption)
+
+ XmlTag = "BuildTargets"
+ BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ XmlTag = "ToolChainFamily"
+ BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "TagName"
+ BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "ToolCode"
+ BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "SupArchList"
+ BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ return BuildOption
+
+
+## Load a new User Extensions class object.
+#
+# Read an input XML UserExtensions DOM object and return an object of User
+# Extensions contained in the DOM object.
+#
+# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.
+#
+# @retvel UserExtensions A new User Extensions object created by
+# XmlUserExtensions.
+#
+def LoadUserExtensions(XmlUserExtensions):
+ UserExtensions = UserExtensionsClass()
+
+ XmlTag = "UserID"
+ UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ XmlTag = "Identifier"
+ UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ UserExtensions.Content = XmlElementData(XmlUserExtensions)
+
+ return UserExtensions
+
+
+## Store content to a text file object.
+#
+# Write some text file content to a text file object. The contents may echo
+# in screen in a verbose way.
+#
+# @param TextFile The text file object.
+# @param Content The string object to be written to a text file.
+#
+def StoreTextFile(TextFile, Content):
+ EdkLogger.verbose(Content)
+ TextFile.write(Content)
+
+
+## Add item to a section.
+#
+# Add an Item with specific CPU architecture to section dictionary.
+# The possible duplication is ensured to be removed.
+#
+# @param Section Section dictionary indexed by CPU architecture.
+# @param Arch CPU architecture: Ia32, X64, Ipf, ARM, Ebc or Common.
+# @param Item The Item to be added to section dictionary.
+#
+def AddToSection(Section, Arch, Item):
+ SectionArch = Section.get(Arch, [])
+ if Item not in SectionArch:
+ SectionArch.append(Item)
+ Section[Arch] = SectionArch
+
+
+## Get section contents.
+#
+# Return the content of section named SectionName.
+# the contents is based on Methods and ObjectLists.
+#
+# @param SectionName The name of the section.
+# @param Method A function returning a string item of an object.
+# @param ObjectList The list of object.
+#
+# @retval Section The string content of a section.
+#
+def GetSection(SectionName, Method, ObjectList):
+ SupportedArches = ["common", "Ia32", "X64", "Ipf", "Ebc", "ARM"]
+ SectionDict = {}
+ for Object in ObjectList:
+ Item = Method(Object)
+ if Item == "":
+ continue
+ Item = " %s" % Item
+ Arches = Object.SupArchList
+ if len(Arches) == 0:
+ AddToSection(SectionDict, "common", Item)
+ else:
+ for Arch in SupportedArches:
+ if Arch.upper() in Arches:
+ AddToSection(SectionDict, Arch, Item)
+
+ Section = ""
+ for Arch in SupportedArches:
+ SectionArch = "\n".join(SectionDict.get(Arch, []))
+ if SectionArch != "":
+ Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
+ Section += "\n"
+ if Section != "":
+ Section += "\n"
+ return Section
+
+
+## Store file header to a text file.
+#
+# Write standard file header to a text file. The content includes copyright,
+# abstract, description and license extracted from CommonHeader class object.
+#
+# @param TextFile The text file object.
+# @param CommonHeader The source CommonHeader class object.
+#
+def StoreHeader(TextFile, CommonHeader):
+ CopyRight = CommonHeader.Copyright
+ Abstract = CommonHeader.Abstract
+ Description = CommonHeader.Description
+ License = CommonHeader.License
+
+ Header = "#/** @file\n#\n"
+ Header += "# " + Abstract + "\n#\n"
+ Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
+ Header += "# " + CopyRight + "\n#\n"
+ Header += "# " + License.replace("\n", "\n# ").replace(" ", " ")
+ Header += "\n#\n#**/\n\n"
+
+ StoreTextFile(TextFile, Header)
+
+## Store file header to a text file.
+#
+# Write Defines section to a text file. DefinesTupleList determines the content.
+#
+# @param TextFile The text file object.
+# @param DefinesTupleList The list of (Tag, Value) to be added as one item.
+#
+def StoreDefinesSection(TextFile, DefinesTupleList):
+ Section = "[Defines]\n"
+ for DefineItem in DefinesTupleList:
+ Section += " %-30s = %s\n" % DefineItem
+
+ Section += "\n\n"
+ StoreTextFile(TextFile, Section)
+
+
+## Return one User Extension section.
+#
+# Read the input UserExtentsions class object and return one section.
+#
+# @param UserExtensions An input UserExtensions class object.
+#
+# @retval UserExtensionSection A section representing UserExtensions object.
+#
+def GetUserExtensions(UserExtensions):
+ UserId = UserExtensions.UserID
+ Identifier = UserExtensions.Identifier
+ Content = UserExtensions.Content
+
+ return "[UserExtensions.%s.%s]\n %s\n\n" % (UserId, Identifier, Content)
+
+## Regular expression to match an equation.
+mReEquation = re.compile(r"\s*(\S+)\s*=\s*(\S*)\s*")
+
+## Return a value tuple matching information in a text fle.
+#
+# Parse the text file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the text file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetTextFileInfo(FileName, TagTuple):
+ ValueTuple = [""] * len(TagTuple)
+ try:
+ for Line in open(FileName):
+ Line = Line.split("#", 1)[0]
+ MatchEquation = mReEquation.match(Line)
+ if MatchEquation:
+ Tag = MatchEquation.group(1).upper()
+ Value = MatchEquation.group(2)
+ for Index in range(len(TagTuple)):
+ if TagTuple[Index] == Tag:
+ ValueTuple[Index] = Value
+ except:
+ EdkLogger.info("IO Error in reading file %s" % FileName)
+
+ return ValueTuple
+
+
+## Return a value tuple matching information in an XML fle.
+#
+# Parse the XML file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the XML file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetXmlFileInfo(FileName, TagTuple):
+ XmlDom = XmlParseFile(FileName)
+ return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
+
+
+## Parse migration command line options
+#
+# Use standard Python module optparse to parse command line option of this tool.
+#
+# @param Source The source file type.
+# @param Destinate The destinate file type.
+#
+# @retval Options A optparse object containing the parsed options.
+# @retval InputFile Path of an source file to be migrated.
+#
+def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):
+ # use clearer usage to override default usage message
+ UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
+ Version = "%s Version %.2f" % (ToolName, VersionNumber)
+ Copyright = "Copyright (c) 2007, Intel Corporation. All rights reserved."
+
+ Parser = OptionParser(description=Copyright, version=Version, usage=UsageString)
+ Parser.add_option("-o", "--output", dest="OutputFile", help="The name of the %s file to be created." % Destinate)
+ Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help="Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate))
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
+
+ Options, Args = Parser.parse_args()
+
+ # Set logging level
+ if Options.verbose:
+ EdkLogger.setLevel(EdkLogger.VERBOSE)
+ elif Options.quiet:
+ EdkLogger.setLevel(EdkLogger.QUIET)
+ else:
+ EdkLogger.setLevel(EdkLogger.INFO)
+
+ # error check
+ if len(Args) == 0:
+ raise MigrationError(PARAMETER_MISSING, name="Input file", usage=Parser.get_usage())
+ if len(Args) > 1:
+ raise MigrationError(PARAMETER_INVALID, name="Too many input files", usage=Parser.get_usage())
+
+ InputFile = Args[0]
+ if not os.path.exists(InputFile):
+ raise MigrationError(FILE_NOT_FOUND, name=InputFile)
+
+ if Options.OutputFile:
+ if Options.AutoWrite:
+ raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage())
+ else:
+ if Options.AutoWrite:
+ Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower()
+ else:
+ raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage())
+
+ return Options, InputFile
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py new file mode 100644 index 0000000000..14f6550f29 --- /dev/null +++ b/BaseTools/Source/Python/Common/Misc.py @@ -0,0 +1,1327 @@ +## @file +# Common routines used by all tools +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import sys +import string +import thread +import threading +import time +import re +import cPickle +from UserDict import IterableUserDict +from UserList import UserList + +from Common import EdkLogger as EdkLogger +from Common import GlobalData as GlobalData + +from BuildToolError import * + +## Regular expression used to find out place holders in string template +gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE) + +## Dictionary used to store file time stamp for quick re-access +gFileTimeStampCache = {} # {file path : file time stamp} + +## Dictionary used to store dependencies of files +gDependencyDatabase = {} # arch : {file path : [dependent files list]} + +## callback routine for processing variable option +# +# This function can be used to process variable number of option values. The +# typical usage of it is specify architecure list on command line. +# (e.g. <tool> -a IA32 X64 IPF) +# +# @param Option Standard callback function parameter +# @param OptionString Standard callback function parameter +# @param Value Standard callback function parameter +# @param Parser Standard callback function parameter +# +# @retval +# +def ProcessVariableArgument(Option, OptionString, Value, Parser): + assert Value is None + Value = [] + RawArgs = Parser.rargs + while RawArgs: + Arg = RawArgs[0] + if (Arg[:2] == "--" and len(Arg) > 2) or \ + (Arg[:1] == "-" and len(Arg) > 1 and Arg[1] != "-"): + break + Value.append(Arg) + del RawArgs[0] + setattr(Parser.values, Option.dest, Value) + +## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C structure style +# +# @param Guid The GUID string +# +# @retval string The GUID string in C structure style +# +def GuidStringToGuidStructureString(Guid): + GuidList = Guid.split('-') + Result = '{' + for Index in range(0,3,1): + Result = Result + '0x' + GuidList[Index] + ', ' + Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4] + for Index in range(0,12,2): + Result = Result + ', 0x' + GuidList[4][Index:Index+2] + Result += '}}' + return Result + +## Convert GUID structure in byte array to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +# +# @param GuidValue The GUID value in byte array +# +# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format +# +def GuidStructureByteArrayToGuidString(GuidValue): + guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "") + guidValueList = guidValueString.split(",") + if len(guidValueList) != 16: + return '' + #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue) + try: + return "%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x" % ( + int(guidValueList[3], 16), + int(guidValueList[2], 16), + int(guidValueList[1], 16), + int(guidValueList[0], 16), + int(guidValueList[5], 16), + int(guidValueList[4], 16), + int(guidValueList[7], 16), + int(guidValueList[6], 16), + int(guidValueList[8], 16), + int(guidValueList[9], 16), + int(guidValueList[10], 16), + int(guidValueList[11], 16), + int(guidValueList[12], 16), + int(guidValueList[13], 16), + int(guidValueList[14], 16), + int(guidValueList[15], 16) + ) + except: + return '' + +## Convert GUID string in C structure style to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx +# +# @param GuidValue The GUID value in C structure format +# +# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format +# +def GuidStructureStringToGuidString(GuidValue): + guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "") + guidValueList = guidValueString.split(",") + if len(guidValueList) != 11: + return '' + #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue) + try: + return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % ( + int(guidValueList[0], 16), + int(guidValueList[1], 16), + int(guidValueList[2], 16), + int(guidValueList[3], 16), + int(guidValueList[4], 16), + int(guidValueList[5], 16), + int(guidValueList[6], 16), + int(guidValueList[7], 16), + int(guidValueList[8], 16), + int(guidValueList[9], 16), + int(guidValueList[10], 16) + ) + except: + return '' + +## Convert GUID string in C structure style to xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx +# +# @param GuidValue The GUID value in C structure format +# +# @retval string The GUID value in xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx format +# +def GuidStructureStringToGuidValueName(GuidValue): + guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "") + guidValueList = guidValueString.split(",") + if len(guidValueList) != 11: + EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue) + return "%08x_%04x_%04x_%02x%02x_%02x%02x%02x%02x%02x%02x" % ( + int(guidValueList[0], 16), + int(guidValueList[1], 16), + int(guidValueList[2], 16), + int(guidValueList[3], 16), + int(guidValueList[4], 16), + int(guidValueList[5], 16), + int(guidValueList[6], 16), + int(guidValueList[7], 16), + int(guidValueList[8], 16), + int(guidValueList[9], 16), + int(guidValueList[10], 16) + ) + +## Create directories +# +# @param Directory The directory name +# +def CreateDirectory(Directory): + if Directory == None or Directory.strip() == "": + return True + try: + if not os.access(Directory, os.F_OK): + os.makedirs(Directory) + except: + return False + return True + +## Remove directories, including files and sub-directories in it +# +# @param Directory The directory name +# +def RemoveDirectory(Directory, Recursively=False): + if Directory == None or Directory.strip() == "" or not os.path.exists(Directory): + return + if Recursively: + CurrentDirectory = os.getcwd() + os.chdir(Directory) + for File in os.listdir("."): + if os.path.isdir(File): + RemoveDirectory(File, Recursively) + else: + os.remove(File) + os.chdir(CurrentDirectory) + os.rmdir(Directory) + +## Check if given file is changed or not +# +# This method is used to check if a file is changed or not between two build +# actions. It makes use a cache to store files timestamp. +# +# @param File The path of file +# +# @retval True If the given file is changed, doesn't exist, or can't be +# found in timestamp cache +# @retval False If the given file is changed +# +def IsChanged(File): + if not os.path.exists(File): + return True + + FileState = os.stat(File) + TimeStamp = FileState[-2] + + if File in gFileTimeStampCache and TimeStamp == gFileTimeStampCache[File]: + FileChanged = False + else: + FileChanged = True + gFileTimeStampCache[File] = TimeStamp + + return FileChanged + +## Store content in file +# +# This method is used to save file only when its content is changed. This is +# quite useful for "make" system to decide what will be re-built and what won't. +# +# @param File The path of file +# @param Content The new content of the file +# @param IsBinaryFile The flag indicating if the file is binary file or not +# +# @retval True If the file content is changed and the file is renewed +# @retval False If the file content is the same +# +def SaveFileOnChange(File, Content, IsBinaryFile=True): + if not IsBinaryFile: + Content = Content.replace("\n", os.linesep) + + if os.path.exists(File): + try: + if Content == open(File, "rb").read(): + return False + except: + EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File) + + CreateDirectory(os.path.dirname(File)) + try: + if GlobalData.gIsWindows: + try: + from PyUtility import SaveFileToDisk + if not SaveFileToDisk(File, Content): + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File) + except: + Fd = open(File, "wb") + Fd.write(Content) + Fd.close() + else: + Fd = open(File, "wb") + Fd.write(Content) + Fd.close() + except: + EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File) + + return True + +## Make a Python object persistent on file system +# +# @param Data The object to be stored in file +# @param File The path of file to store the object +# +def DataDump(Data, File): + Fd = None + try: + Fd = open(File, 'wb') + cPickle.dump(Data, Fd, cPickle.HIGHEST_PROTOCOL) + except: + EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False) + finally: + if Fd != None: + Fd.close() + +## Restore a Python object from a file +# +# @param File The path of file stored the object +# +# @retval object A python object +# @retval None If failure in file operation +# +def DataRestore(File): + Data = None + Fd = None + try: + Fd = open(File, 'rb') + Data = cPickle.load(Fd) + except Exception, e: + EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e))) + Data = None + finally: + if Fd != None: + Fd.close() + return Data + +## Retrieve and cache the real path name in file system +# +# @param Root The root directory of path relative to +# +# @retval str The path string if the path exists +# @retval None If path doesn't exist +# +class DirCache: + _CACHE_ = {} + + def __init__(self, Root): + self._Root = Root + for F in os.listdir(Root): + self._CACHE_[F.upper()] = F + + # =[] operator + def __getitem__(self, Path): + Path = Path[len(os.path.commonprefix([Path, self._Root])):] + if not Path: + return self._Root + if Path and Path[0] == os.path.sep: + Path = Path[1:] + Path = Path.upper() + if Path in self._CACHE_: + return os.path.join(self._Root, self._CACHE_[Path]) + + IndexList = [] + LastSepIndex = -1 + SepIndex = Path.find(os.path.sep) + while SepIndex > -1: + Parent = Path[:SepIndex] + if Parent not in self._CACHE_: + break + LastSepIndex = SepIndex + SepIndex = Path.find(os.path.sep, LastSepIndex + 1) + + if LastSepIndex == -1: + return None + + Cwd = os.getcwd() + os.chdir(self._Root) + SepIndex = LastSepIndex + while SepIndex > -1: + ParentKey = Path[:SepIndex] + if ParentKey not in self._CACHE_: + os.chdir(Cwd) + return None + + ParentDir = self._CACHE_[ParentKey] + for F in os.listdir(ParentDir): + Dir = os.path.join(ParentDir, F) + self._CACHE_[Dir.upper()] = Dir + + SepIndex = Path.find(os.path.sep, SepIndex + 1) + + os.chdir(Cwd) + if Path not in self._CACHE_: + return None + return os.path.join(self._Root, self._CACHE_[Path]) + +## Get all files of a directory +# +# @param Root: Root dir +# @param SkipList : The files need be skipped +# +# @retval A list of all files +# +def GetFiles(Root, SkipList=None, FullPath = True): + OriPath = Root + FileList = [] + for Root, Dirs, Files in os.walk(Root): + if SkipList: + for Item in SkipList: + if Item in Dirs: + Dirs.remove(Item) + + for File in Files: + File = os.path.normpath(os.path.join(Root, File)) + if not FullPath: + File = File[len(OriPath) + 1:] + FileList.append(File) + + return FileList + +## Check if gvien file exists or not +# +# @param File File name or path to be checked +# @param Dir The directory the file is relative to +# +# @retval True if file exists +# @retval False if file doesn't exists +# +def ValidFile(File, Ext=None): + if Ext != None: + Dummy, FileExt = os.path.splitext(File) + if FileExt.lower() != Ext.lower(): + return False + if not os.path.exists(File): + return False + return True + +def RealPath(File, Dir='', OverrideDir=''): + NewFile = os.path.normpath(os.path.join(Dir, File)) + NewFile = GlobalData.gAllFiles[NewFile] + if not NewFile and OverrideDir: + NewFile = os.path.normpath(os.path.join(OverrideDir, File)) + NewFile = GlobalData.gAllFiles[NewFile] + return NewFile + +def RealPath2(File, Dir='', OverrideDir=''): + NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))] + if NewFile: + if Dir: + if Dir[-1] == os.path.sep: + return NewFile[len(Dir):], NewFile[0:len(Dir)] + else: + return NewFile[len(Dir)+1:], NewFile[0:len(Dir)] + else: + return NewFile, '' + + if OverrideDir: + NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))] + if NewFile: + return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)] + return None, None + +## Check if gvien file exists or not +# +# +def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''): + NewFile = File + if Ext != None: + Dummy, FileExt = os.path.splitext(File) + if FileExt.lower() != Ext.lower(): + return False, File + + # Replace the R8 macros + if OverrideDir != '' and OverrideDir != None: + if OverrideDir.find('$(EFI_SOURCE)') > -1: + OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource) + if OverrideDir.find('$(EDK_SOURCE)') > -1: + OverrideDir = OverrideDir.replace('$(EDK_SOURCE)', EdkSource) + + # Replace the default dir to current dir + if Dir == '.': + Dir = os.getcwd() + Dir = Dir[len(Workspace)+1:] + + # First check if File has R8 definition itself + if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: + NewFile = File.replace('$(EFI_SOURCE)', EfiSource) + NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource) + NewFile = AllFiles[os.path.normpath(NewFile)] + if NewFile != None: + return True, NewFile + + # Second check the path with override value + if OverrideDir != '' and OverrideDir != None: + NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))] + if NewFile != None: + return True, NewFile + + # Last check the path with normal definitions + File = os.path.join(Dir, File) + NewFile = AllFiles[os.path.normpath(File)] + if NewFile != None: + return True, NewFile + + return False, File + +## Check if gvien file exists or not +# +# +def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''): + # Replace the R8 macros + if OverrideDir != '' and OverrideDir != None: + if OverrideDir.find('$(EFI_SOURCE)') > -1: + OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource) + if OverrideDir.find('$(EDK_SOURCE)') > -1: + OverrideDir = OverrideDir.replace('$(EDK_SOURCE)', EdkSource) + + # Replace the default dir to current dir + # Dir is current module dir related to workspace + if Dir == '.': + Dir = os.getcwd() + Dir = Dir[len(Workspace)+1:] + + NewFile = File + RelaPath = AllFiles[os.path.normpath(Dir)] + NewRelaPath = RelaPath + + while(True): + # First check if File has R8 definition itself + if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1: + File = File.replace('$(EFI_SOURCE)', EfiSource) + File = File.replace('$(EDK_SOURCE)', EdkSource) + NewFile = AllFiles[os.path.normpath(File)] + if NewFile != None: + NewRelaPath = os.path.dirname(NewFile) + File = os.path.basename(NewFile) + #NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1] + break + + # Second check the path with override value + if OverrideDir != '' and OverrideDir != None: + NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))] + if NewFile != None: + #NewRelaPath = os.path.dirname(NewFile) + NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1] + break + + # Last check the path with normal definitions + NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))] + if NewFile != None: + break + + # No file found + break + + return NewRelaPath, RelaPath, File + + +def GetRelPath(Path1, Path2): + FileName = os.path.basename(Path2) + L1 = os.path.normpath(Path1).split(os.path.normpath('/')) + L2 = os.path.normpath(Path2).split(os.path.normpath('/')) + for Index in range(0, len(L1)): + if L1[Index] != L2[Index]: + FileName = '../' * (len(L1) - Index) + for Index2 in range(Index, len(L2)): + FileName = os.path.join(FileName, L2[Index2]) + break + return os.path.normpath(FileName) + + +## Get GUID value from given packages +# +# @param CName The CName of the GUID +# @param PackageList List of packages looking-up in +# +# @retval GuidValue if the CName is found in any given package +# @retval None if the CName is not found in all given packages +# +def GuidValue(CName, PackageList): + for P in PackageList: + if CName in P.Guids: + return P.Guids[CName] + return None + +## Get Protocol value from given packages +# +# @param CName The CName of the GUID +# @param PackageList List of packages looking-up in +# +# @retval GuidValue if the CName is found in any given package +# @retval None if the CName is not found in all given packages +# +def ProtocolValue(CName, PackageList): + for P in PackageList: + if CName in P.Protocols: + return P.Protocols[CName] + return None + +## Get PPI value from given packages +# +# @param CName The CName of the GUID +# @param PackageList List of packages looking-up in +# +# @retval GuidValue if the CName is found in any given package +# @retval None if the CName is not found in all given packages +# +def PpiValue(CName, PackageList): + for P in PackageList: + if CName in P.Ppis: + return P.Ppis[CName] + return None + +## A string template class +# +# This class implements a template for string replacement. A string template +# looks like following +# +# ${BEGIN} other_string ${placeholder_name} other_string ${END} +# +# The string between ${BEGIN} and ${END} will be repeated as many times as the +# length of "placeholder_name", which is a list passed through a dict. The +# "placeholder_name" is the key name of the dict. The ${BEGIN} and ${END} can +# be not used and, in this case, the "placeholder_name" must not a list and it +# will just be replaced once. +# +class TemplateString(object): + _REPEAT_START_FLAG = "BEGIN" + _REPEAT_END_FLAG = "END" + + class Section(object): + _LIST_TYPES = [type([]), type(set()), type((0,))] + + def __init__(self, TemplateSection, PlaceHolderList): + self._Template = TemplateSection + self._PlaceHolderList = [] + + # Split the section into sub-sections according to the position of placeholders + if PlaceHolderList: + self._SubSectionList = [] + SubSectionStart = 0 + # + # The placeholders passed in must be in the format of + # + # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint + # + for PlaceHolder,Start,End in PlaceHolderList: + self._SubSectionList.append(TemplateSection[SubSectionStart:Start]) + self._SubSectionList.append(TemplateSection[Start:End]) + self._PlaceHolderList.append(PlaceHolder) + SubSectionStart = End + if SubSectionStart < len(TemplateSection): + self._SubSectionList.append(TemplateSection[SubSectionStart:]) + else: + self._SubSectionList = [TemplateSection] + + def __str__(self): + return self._Template + " : " + str(self._PlaceHolderList) + + def Instantiate(self, PlaceHolderValues): + RepeatTime = -1 + RepeatPlaceHolders = {} + NonRepeatPlaceHolders = {} + + for PlaceHolder in self._PlaceHolderList: + if PlaceHolder not in PlaceHolderValues: + continue + Value = PlaceHolderValues[PlaceHolder] + if type(Value) in self._LIST_TYPES: + if RepeatTime < 0: + RepeatTime = len(Value) + elif RepeatTime != len(Value): + EdkLogger.error( + "TemplateString", + PARAMETER_INVALID, + "${%s} has different repeat time from others!" % PlaceHolder, + ExtraData=str(self._Template) + ) + RepeatPlaceHolders["${%s}" % PlaceHolder] = Value + else: + NonRepeatPlaceHolders["${%s}" % PlaceHolder] = Value + + if NonRepeatPlaceHolders: + StringList = [] + for S in self._SubSectionList: + if S not in NonRepeatPlaceHolders: + StringList.append(S) + else: + StringList.append(str(NonRepeatPlaceHolders[S])) + else: + StringList = self._SubSectionList + + if RepeatPlaceHolders: + TempStringList = [] + for Index in range(RepeatTime): + for S in StringList: + if S not in RepeatPlaceHolders: + TempStringList.append(S) + else: + TempStringList.append(str(RepeatPlaceHolders[S][Index])) + StringList = TempStringList + + return "".join(StringList) + + ## Constructor + def __init__(self, Template=None): + self.String = '' + self._Template = Template + self._TemplateSectionList = self._Parse(Template) + + ## str() operator + # + # @retval string The string replaced + # + def __str__(self): + return self.String + + ## Split the template string into fragments per the ${BEGIN} and ${END} flags + # + # @retval list A list of TemplateString.Section objects + # + def _Parse(self, Template): + SectionStart = 0 + SearchFrom = 0 + MatchEnd = 0 + PlaceHolderList = [] + TemplateSectionList = [] + while Template: + MatchObj = gPlaceholderPattern.search(Template, SearchFrom) + if not MatchObj: + if MatchEnd < len(Template): + TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList) + TemplateSectionList.append(TemplateSection) + break + + MatchString = MatchObj.group(1) + MatchStart = MatchObj.start() + MatchEnd = MatchObj.end() + + if MatchString == self._REPEAT_START_FLAG: + if MatchStart > SectionStart: + TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList) + TemplateSectionList.append(TemplateSection) + SectionStart = MatchEnd + PlaceHolderList = [] + elif MatchString == self._REPEAT_END_FLAG: + TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList) + TemplateSectionList.append(TemplateSection) + SectionStart = MatchEnd + PlaceHolderList = [] + else: + PlaceHolderList.append((MatchString, MatchStart - SectionStart, MatchEnd - SectionStart)) + SearchFrom = MatchEnd + return TemplateSectionList + + ## Replace the string template with dictionary of placeholders and append it to previous one + # + # @param AppendString The string template to append + # @param Dictionary The placeholder dictionaries + # + def Append(self, AppendString, Dictionary=None): + if Dictionary: + SectionList = self._Parse(AppendString) + self.String += "".join([S.Instantiate(Dictionary) for S in SectionList]) + else: + self.String += AppendString + + ## Replace the string template with dictionary of placeholders + # + # @param Dictionary The placeholder dictionaries + # + # @retval str The string replaced with placeholder values + # + def Replace(self, Dictionary=None): + return "".join([S.Instantiate(Dictionary) for S in self._TemplateSectionList]) + +## Progress indicator class +# +# This class makes use of thread to print progress on console. +# +class Progressor: + # for avoiding deadloop + _StopFlag = None + _ProgressThread = None + _CheckInterval = 0.25 + + ## Constructor + # + # @param OpenMessage The string printed before progress charaters + # @param CloseMessage The string printed after progress charaters + # @param ProgressChar The charater used to indicate the progress + # @param Interval The interval in seconds between two progress charaters + # + def __init__(self, OpenMessage="", CloseMessage="", ProgressChar='.', Interval=1.0): + self.PromptMessage = OpenMessage + self.CodaMessage = CloseMessage + self.ProgressChar = ProgressChar + self.Interval = Interval + if Progressor._StopFlag == None: + Progressor._StopFlag = threading.Event() + + ## Start to print progress charater + # + # @param OpenMessage The string printed before progress charaters + # + def Start(self, OpenMessage=None): + if OpenMessage != None: + self.PromptMessage = OpenMessage + Progressor._StopFlag.clear() + if Progressor._ProgressThread == None: + Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry) + Progressor._ProgressThread.setDaemon(False) + Progressor._ProgressThread.start() + + ## Stop printing progress charater + # + # @param CloseMessage The string printed after progress charaters + # + def Stop(self, CloseMessage=None): + OriginalCodaMessage = self.CodaMessage + if CloseMessage != None: + self.CodaMessage = CloseMessage + self.Abort() + self.CodaMessage = OriginalCodaMessage + + ## Thread entry method + def _ProgressThreadEntry(self): + sys.stdout.write(self.PromptMessage + " ") + sys.stdout.flush() + TimeUp = 0.0 + while not Progressor._StopFlag.isSet(): + if TimeUp <= 0.0: + sys.stdout.write(self.ProgressChar) + sys.stdout.flush() + TimeUp = self.Interval + time.sleep(self._CheckInterval) + TimeUp -= self._CheckInterval + sys.stdout.write(" " + self.CodaMessage + "\n") + sys.stdout.flush() + + ## Abort the progress display + @staticmethod + def Abort(): + if Progressor._StopFlag != None: + Progressor._StopFlag.set() + if Progressor._ProgressThread != None: + Progressor._ProgressThread.join() + Progressor._ProgressThread = None + +## A dict which can access its keys and/or values orderly +# +# The class implements a new kind of dict which its keys or values can be +# accessed in the order they are added into the dict. It guarantees the order +# by making use of an internal list to keep a copy of keys. +# +class sdict(IterableUserDict): + ## Constructor + def __init__(self): + IterableUserDict.__init__(self) + self._key_list = [] + + ## [] operator + def __setitem__(self, key, value): + if key not in self._key_list: + self._key_list.append(key) + IterableUserDict.__setitem__(self, key, value) + + ## del operator + def __delitem__(self, key): + self._key_list.remove(key) + IterableUserDict.__delitem__(self, key) + + ## used in "for k in dict" loop to ensure the correct order + def __iter__(self): + return self.iterkeys() + + ## len() support + def __len__(self): + return len(self._key_list) + + ## "in" test support + def __contains__(self, key): + return key in self._key_list + + ## indexof support + def index(self, key): + return self._key_list.index(key) + + ## insert support + def insert(self, key, newkey, newvalue, order): + index = self._key_list.index(key) + if order == 'BEFORE': + self._key_list.insert(index, newkey) + IterableUserDict.__setitem__(self, newkey, newvalue) + elif order == 'AFTER': + self._key_list.insert(index + 1, newkey) + IterableUserDict.__setitem__(self, newkey, newvalue) + + ## append support + def append(self, sdict): + for key in sdict: + if key not in self._key_list: + self._key_list.append(key) + IterableUserDict.__setitem__(self, key, sdict[key]) + + def has_key(self, key): + return key in self._key_list + + ## Empty the dict + def clear(self): + self._key_list = [] + IterableUserDict.clear(self) + + ## Return a copy of keys + def keys(self): + keys = [] + for key in self._key_list: + keys.append(key) + return keys + + ## Return a copy of values + def values(self): + values = [] + for key in self._key_list: + values.append(self[key]) + return values + + ## Return a copy of (key, value) list + def items(self): + items = [] + for key in self._key_list: + items.append((key, self[key])) + return items + + ## Iteration support + def iteritems(self): + return iter(self.items()) + + ## Keys interation support + def iterkeys(self): + return iter(self.keys()) + + ## Values interation support + def itervalues(self): + return iter(self.values()) + + ## Return value related to a key, and remove the (key, value) from the dict + def pop(self, key, *dv): + value = None + if key in self._key_list: + value = self[key] + self.__delitem__(key) + elif len(dv) != 0 : + value = kv[0] + return value + + ## Return (key, value) pair, and remove the (key, value) from the dict + def popitem(self): + key = self._key_list[-1] + value = self[key] + self.__delitem__(key) + return key, value + + def update(self, dict=None, **kwargs): + if dict != None: + for k, v in dict.items(): + self[k] = v + if len(kwargs): + for k, v in kwargs.items(): + self[k] = v + +## Dictionary with restricted keys +# +class rdict(dict): + ## Constructor + def __init__(self, KeyList): + for Key in KeyList: + dict.__setitem__(self, Key, "") + + ## []= operator + def __setitem__(self, key, value): + if key not in self: + EdkLogger.error("RestrictedDict", ATTRIBUTE_SET_FAILURE, "Key [%s] is not allowed" % key, + ExtraData=", ".join(dict.keys(self))) + dict.__setitem__(self, key, value) + + ## =[] operator + def __getitem__(self, key): + if key not in self: + return "" + return dict.__getitem__(self, key) + + ## del operator + def __delitem__(self, key): + EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="del") + + ## Empty the dict + def clear(self): + for Key in self: + self.__setitem__(Key, "") + + ## Return value related to a key, and remove the (key, value) from the dict + def pop(self, key, *dv): + EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="pop") + + ## Return (key, value) pair, and remove the (key, value) from the dict + def popitem(self): + EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="popitem") + +## Dictionary using prioritized list as key +# +class tdict: + _ListType = type([]) + _TupleType = type(()) + _Wildcard = 'COMMON' + _ValidWildcardList = ['COMMON', 'DEFAULT', 'ALL', '*', 'PLATFORM'] + + def __init__(self, _Single_=False, _Level_=2): + self._Level_ = _Level_ + self.data = {} + self._Single_ = _Single_ + + # =[] operator + def __getitem__(self, key): + KeyType = type(key) + RestKeys = None + if KeyType == self._ListType or KeyType == self._TupleType: + FirstKey = key[0] + if len(key) > 1: + RestKeys = key[1:] + elif self._Level_ > 1: + RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + else: + FirstKey = key + if self._Level_ > 1: + RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + + if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList: + FirstKey = self._Wildcard + + if self._Single_: + return self._GetSingleValue(FirstKey, RestKeys) + else: + return self._GetAllValues(FirstKey, RestKeys) + + def _GetSingleValue(self, FirstKey, RestKeys): + Value = None + #print "%s-%s" % (FirstKey, self._Level_) , + if self._Level_ > 1: + if FirstKey == self._Wildcard: + if FirstKey in self.data: + Value = self.data[FirstKey][RestKeys] + if Value == None: + for Key in self.data: + Value = self.data[Key][RestKeys] + if Value != None: break + else: + if FirstKey in self.data: + Value = self.data[FirstKey][RestKeys] + if Value == None and self._Wildcard in self.data: + #print "Value=None" + Value = self.data[self._Wildcard][RestKeys] + else: + if FirstKey == self._Wildcard: + if FirstKey in self.data: + Value = self.data[FirstKey] + if Value == None: + for Key in self.data: + Value = self.data[Key] + if Value != None: break + else: + if FirstKey in self.data: + Value = self.data[FirstKey] + elif self._Wildcard in self.data: + Value = self.data[self._Wildcard] + return Value + + def _GetAllValues(self, FirstKey, RestKeys): + Value = [] + if self._Level_ > 1: + if FirstKey == self._Wildcard: + for Key in self.data: + Value += self.data[Key][RestKeys] + else: + if FirstKey in self.data: + Value += self.data[FirstKey][RestKeys] + if self._Wildcard in self.data: + Value += self.data[self._Wildcard][RestKeys] + else: + if FirstKey == self._Wildcard: + for Key in self.data: + Value.append(self.data[Key]) + else: + if FirstKey in self.data: + Value.append(self.data[FirstKey]) + if self._Wildcard in self.data: + Value.append(self.data[self._Wildcard]) + return Value + + ## []= operator + def __setitem__(self, key, value): + KeyType = type(key) + RestKeys = None + if KeyType == self._ListType or KeyType == self._TupleType: + FirstKey = key[0] + if len(key) > 1: + RestKeys = key[1:] + else: + RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + else: + FirstKey = key + if self._Level_ > 1: + RestKeys = [self._Wildcard for i in range(0, self._Level_-1)] + + if FirstKey in self._ValidWildcardList: + FirstKey = self._Wildcard + + if FirstKey not in self.data and self._Level_ > 0: + self.data[FirstKey] = tdict(self._Single_, self._Level_ - 1) + + if self._Level_ > 1: + self.data[FirstKey][RestKeys] = value + else: + self.data[FirstKey] = value + + def SetGreedyMode(self): + self._Single_ = False + if self._Level_ > 1: + for Key in self.data: + self.data[Key].SetGreedyMode() + + def SetSingleMode(self): + self._Single_ = True + if self._Level_ > 1: + for Key in self.data: + self.data[Key].SetSingleMode() + +## Boolean chain list +# +class Blist(UserList): + def __init__(self, initlist=None): + UserList.__init__(self, initlist) + def __setitem__(self, i, item): + if item not in [True, False]: + if item == 0: + item = False + else: + item = True + self.data[i] = item + def _GetResult(self): + Value = True + for item in self.data: + Value &= item + return Value + Result = property(_GetResult) + +def ParseConsoleLog(Filename): + Opr = open(os.path.normpath(Filename), 'r') + Opw = open(os.path.normpath(Filename + '.New'), 'w+') + for Line in Opr.readlines(): + if Line.find('.efi') > -1: + Line = Line[Line.rfind(' ') : Line.rfind('.efi')].strip() + Opw.write('%s\n' % Line) + + Opr.close() + Opw.close() + +## check format of PCD value against its the datum type +# +# For PCD value setting +# +def CheckPcdDatum(Type, Value): + if Type == "VOID*": + if not ((Value.startswith('L"') or Value.startswith('"') and Value.endswith('"')) + or (Value.startswith('{') and Value.endswith('}')) + ): + return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\ + ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type) + elif Type == 'BOOLEAN': + if Value not in ['TRUE', 'FALSE']: + return False, "Invalid value [%s] of type [%s]; must be TRUE or FALSE" % (Value, Type) + elif type(Value) == type(""): + try: + Value = long(Value, 0) + except: + return False, "Invalid value [%s] of type [%s];"\ + " must be a hexadecimal, decimal or octal in C language format."\ + % (Value, Type) + + return True, "" + +## Split command line option string to list +# +# subprocess.Popen needs the args to be a sequence. Otherwise there's problem +# in non-windows platform to launch command +# +def SplitOption(OptionString): + OptionList = [] + LastChar = " " + OptionStart = 0 + QuotationMark = "" + for Index in range(0, len(OptionString)): + CurrentChar = OptionString[Index] + if CurrentChar in ['"', "'"]: + if QuotationMark == CurrentChar: + QuotationMark = "" + elif QuotationMark == "": + QuotationMark = CurrentChar + continue + elif QuotationMark: + continue + + if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]: + if Index > OptionStart: + OptionList.append(OptionString[OptionStart:Index-1]) + OptionStart = Index + LastChar = CurrentChar + OptionList.append(OptionString[OptionStart:]) + return OptionList + +def CommonPath(PathList): + P1 = min(PathList).split(os.path.sep) + P2 = max(PathList).split(os.path.sep) + for Index in xrange(min(len(P1), len(P2))): + if P1[Index] != P2[Index]: + return os.path.sep.join(P1[:Index]) + return os.path.sep.join(P1) + +class PathClass(object): + def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False, + Arch='COMMON', ToolChainFamily='', Target='', TagName='', ToolCode=''): + self.Arch = Arch + self.File = str(File) + if os.path.isabs(self.File): + self.Root = '' + self.AlterRoot = '' + else: + self.Root = str(Root) + self.AlterRoot = str(AlterRoot) + + # Remove any '.' and '..' in path + if self.Root: + self.Path = os.path.normpath(os.path.join(self.Root, self.File)) + self.Root = os.path.normpath(CommonPath([self.Root, self.Path])) + # eliminate the side-effect of 'C:' + if self.Root[-1] == ':': + self.Root += os.path.sep + # file path should not start with path separator + if self.Root[-1] == os.path.sep: + self.File = self.Path[len(self.Root):] + else: + self.File = self.Path[len(self.Root)+1:] + else: + self.Path = os.path.normpath(self.File) + + self.SubDir, self.Name = os.path.split(self.File) + self.BaseName, self.Ext = os.path.splitext(self.Name) + + if self.Root: + if self.SubDir: + self.Dir = os.path.join(self.Root, self.SubDir) + else: + self.Dir = self.Root + else: + self.Dir = self.SubDir + + if IsBinary: + self.Type = Type + else: + self.Type = self.Ext.lower() + + self.IsBinary = IsBinary + self.Target = Target + self.TagName = TagName + self.ToolCode = ToolCode + self.ToolChainFamily = ToolChainFamily + + self._Key = None + + ## Convert the object of this class to a string + # + # Convert member Path of the class to a string + # + # @retval string Formatted String + # + def __str__(self): + return self.Path + + ## Override __eq__ function + # + # Check whether PathClass are the same + # + # @retval False The two PathClass are different + # @retval True The two PathClass are the same + # + def __eq__(self, Other): + if type(Other) == type(self): + return self.Path == Other.Path + else: + return self.Path == str(Other) + + ## Override __hash__ function + # + # Use Path as key in hash table + # + # @retval string Key for hash table + # + def __hash__(self): + return hash(self.Path) + + def _GetFileKey(self): + if self._Key == None: + self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target + return self._Key + + def Validate(self, Type='', CaseSensitive=True): + if GlobalData.gCaseInsensitive: + CaseSensitive = False + if Type and Type.lower() != self.Type: + return FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % (self.File, Type, self.Type) + + RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot) + if not RealRoot and not RealFile: + return FILE_NOT_FOUND, self.File + + ErrorCode = 0 + ErrorInfo = '' + if RealRoot != self.Root or RealFile != self.File: + if CaseSensitive and (RealFile != self.File or (RealRoot != self.Root and RealRoot != self.AlterRoot)): + ErrorCode = FILE_CASE_MISMATCH + ErrorInfo = self.File + '\n\t' + RealFile + " [in file system]" + + self.SubDir, self.Name = os.path.split(RealFile) + self.BaseName, self.Ext = os.path.splitext(self.Name) + if self.SubDir: + self.Dir = os.path.join(RealRoot, self.SubDir) + else: + self.Dir = RealRoot + self.File = RealFile + self.Root = RealRoot + self.Path = os.path.join(RealRoot, RealFile) + return ErrorCode, ErrorInfo + + Key = property(_GetFileKey) + +## +# +# This acts like the main() function for the script, unless it is 'import'ed into another +# script. +# +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/Common/Parsing.py b/BaseTools/Source/Python/Common/Parsing.py new file mode 100644 index 0000000000..755f7901b5 --- /dev/null +++ b/BaseTools/Source/Python/Common/Parsing.py @@ -0,0 +1,935 @@ +## @file
+# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from String import *
+from CommonDataClass.DataClass import *
+from DataType import *
+
+## ParseContent
+#
+# Parse content of a DSC/INF/DEC file
+#
+def ParseContent(Lines, ):
+ for Line in Lines:
+ LineNo = LineNo + 1
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ self.InsertSectionItemsIntoDatabase(FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ CurrentSection = ItemList[0]
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros in section [Header] and its section
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overrided by Global Macros
+ #
+ for Key in GlobalMacro.keys():
+ Macros[Key] = GlobalMacro[Key]
+
+ #
+ # Replace the Macros
+ #
+ for Key in RecordSets.keys():
+ if RecordSets[Key] != []:
+ for Item in RecordSets[Key]:
+ Item[0] = ReplaceMacro(Item[0], Macros)
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro(Table, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+#***************************************************************************************************************************************************
+# The follow SqlCommand (expr replace) is not supported in Sqlite 3.3.4 which is used in Python 2.5 *
+# Reserved Only *
+# SqlCommand = """update %s set Value1 = replace(Value1, '%s', '%s') *
+# where ID in (select ID from %s *
+# where Model = %s *
+# and Value1 like '%%%s%%' *
+# and StartLine > %s *
+# and Enabled > -1 *
+# and Arch = '%s')""" % \ *
+# (self.TblDsc.Table, Record[0], Record[1], self.TblDsc.Table, Record[2], Record[1], Record[3], Record[4]) *
+#***************************************************************************************************************************************************
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overrided by Global Macros
+ #
+ for Key in GlobalMacro.keys():
+ Macros[Key] = GlobalMacro[Key]
+
+ #
+ # Found all defined macro and replaced
+ #
+ SqlCommand = """select ID, Value1 from %s
+ where Model != %s
+ and Value1 like '%%$(%%' and Value1 like '%%)%%'
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ FoundRecords = Table.Exec(SqlCommand)
+ for FoundRecord in FoundRecords:
+ NewValue = ReplaceMacro(FoundRecord[1], Macros)
+ SqlCommand = """update %s set Value1 = '%s'
+ where ID = %s""" % (Table.Table, ConvertToSqlString2(NewValue), FoundRecord[0])
+ Table.Exec(SqlCommand)
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem(Table, Name, Arch, BelongsToFile):
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(TAB_ARCH_COMMON.upper()), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) == 1:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ return [RecordSet[0][0]]
+ else:
+ return GetSplitValueList(RecordSet[0][0])
+ elif len(RecordSet) < 1:
+ return ['']
+ elif len(RecordSet) > 1:
+ RetVal = []
+ for Record in RecordSet:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ RetVal.append(Record[0])
+ else:
+ Items = GetSplitValueList(Record[0])
+ for Item in Items:
+ RetVal.append(Item)
+ return RetVal
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem2(Table, Arch, BelongsToFile):
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(TAB_ARCH_COMMON), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+
+ return RecordSet
+
+##QueryDscItem
+#
+# Search all dsc item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDscItem(Table, Model, BelongsToItem, BelongsToFile):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem, BelongsToFile)
+ return Table.Exec(SqlCommand)
+
+##QueryDecItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDecItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+##QueryInfItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryInfItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+## GetBuildOption
+#
+# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
+# Return (Family, ToolFlag, Flag)
+#
+# @param String: String with BuildOption statement
+# @param File: The file which defines build option, used in error report
+#
+# @retval truple() A truple structure as (Family, ToolChain, Flag)
+#
+def GetBuildOption(String, File, LineNo = -1):
+ if String.find(TAB_EQUAL_SPLIT) < 0:
+ RaiseParserError(String, 'BuildOptions', File, '[<Family>:]<ToolFlag>=Flag', LineNo)
+ (Family, ToolChain, Flag) = ('', '', '')
+ List = GetSplitValueList(String, TAB_EQUAL_SPLIT, MaxSplit = 1)
+ if List[0].find(':') > -1:
+ Family = List[0][ : List[0].find(':')].strip()
+ ToolChain = List[0][List[0].find(':') + 1 : ].strip()
+ else:
+ ToolChain = List[0].strip()
+ Flag = List[1].strip()
+
+ return (Family, ToolChain, Flag)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ List = GetSplitValueList(Item[0])
+ SupMod = SUP_MODULE_LIST_STRING
+ if len(List) != 2:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>|<LibraryInstance>')
+ else:
+ CheckFileType(List[1], '.Inf', ContainerFile, 'library class instance', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, List[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (List[0], List[1], SupMod)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
+ SupMod = SUP_MODULE_LIST_STRING
+
+ if len(ItemList) > 5:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]')
+ else:
+ CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if ItemList[2] != '':
+ CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', ContainerFile, LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (ItemList[0], ItemList[1], ItemList[2], SupMod)
+
+## CheckPcdTokenInfo
+#
+# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
+#
+# @param TokenInfoString: String to be checked
+# @param Section: Used for error report
+# @param File: Used for error report
+#
+# @retval True PcdTokenInfo is in correct format
+#
+def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>'
+ if TokenInfoString != '' and TokenInfoString != None:
+ TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
+ if len(TokenInfoList) == 2:
+ return True
+
+ RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
+
+## Get Pcd
+#
+# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+
+ if len(List) < 4 or len(List) > 6:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]', LineNo)
+ else:
+ Value = List[1]
+ MaximumDatumSize = List[2]
+ Token = List[3]
+
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
+
+## Get FeatureFlagPcd
+#
+# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], Type)
+#
+def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value = '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', LineNo)
+ else:
+ Value = List[1]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, Type)
+
+## Get DynamicDefaultPcd
+#
+# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 4 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
+ else:
+ Value = List[1]
+ DatumTyp = List[2]
+ MaxDatumSize = List[3]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
+
+## Get DynamicHiiPcd
+#
+# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], List[4], List[5], Type)
+#
+def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2, L3, L4, L5 = '', '', '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 6 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]', LineNo)
+ else:
+ L1, L2, L3, L4, L5 = List[1], List[2], List[3], List[4], List[5]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, L3, L4, L5, Type)
+
+## Get DynamicVpdPcd
+#
+# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], Type)
+#
+def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2 = '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT)
+ if len(List) < 3 or len(List) > 4:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]', LineNo)
+ else:
+ L1, L2 = List[1], List[2]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, Type)
+
+## GetComponent
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param KeyValues: To store data after parsing
+#
+# @retval True Get component successfully
+#
+def GetComponent(Lines, KeyValues):
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ for Line in Lines:
+ Line = Line[0]
+
+ #
+ # Ignore !include statement
+ #
+ if Line.upper().find(TAB_INCLUDE.upper() + ' ') > -1 or Line.upper().find(TAB_DEFINE + ' ') > -1:
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], DataType.TAB_COMMENT_SPLIT)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## GetExec
+#
+# Parse a string with format "InfFilename [EXEC = ExecFilename]"
+# Return (InfFilename, ExecFilename)
+#
+# @param String: String with EXEC statement
+#
+# @retval truple() A pair as (InfFilename, ExecFilename)
+#
+def GetExec(String):
+ InfFilename = ''
+ ExecFilename = ''
+ if String.find('EXEC') > -1:
+ InfFilename = String[ : String.find('EXEC')].strip()
+ ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
+ else:
+ InfFilename = String.strip()
+
+ return (InfFilename, ExecFilename)
+
+## GetComponents
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get component successfully
+#
+def GetComponents(Lines, Key, KeyValues, CommentCharacter):
+ if Lines.find(DataType.TAB_SECTION_END) > -1:
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line == None or Line == '':
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## Get Source
+#
+# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3], List[4])
+#
+def GetSource(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
+ List = GetSplitValueList(ItemNew)
+ if len(List) < 5 or len(List) > 9:
+ RaiseParserError(Item, 'Sources', ContainerFile, '<Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]', LineNo)
+ List[0] = NormPath(List[0])
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', Item, LineNo)
+ if List[4] != '':
+ CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3], List[4])
+
+## Get Binary
+#
+# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3])
+#
+def GetBinary(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if len(List) != 4 and len(List) != 5:
+ RaiseParserError(Item, 'Binaries', ContainerFile, "<FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]", LineNo)
+ else:
+ if List[3] != '':
+ CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfInf(Item, Type, ContainerFile, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], Type, ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
+#
+# @param Item: String as <GuidCName>=<GuidValue>
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo = -1):
+ List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', LineNo)
+
+ return (List[0], List[1])
+
+## GetPackage
+#
+# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetPackage(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', List[0], LineNo)
+
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Pcd Values of Inf
+#
+# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+#
+# @param Item: The string describes pcd
+# @param Type: The type of Pcd
+# @param File: The file which describes the pcd, used for error report
+#
+# @retval (TokenSpcCName, TokenCName, Value, ItemType) Formatted Pcd Item
+#
+def GetPcdOfInf(Item, Type, File, LineNo):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
+ TokenGuid, TokenName, Value, InfType = '', '', '', ''
+
+ if Type == TAB_PCDS_FIXED_AT_BUILD:
+ InfType = TAB_INF_FIXED_PCD
+ elif Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ InfType = TAB_INF_PATCH_PCD
+ elif Type == TAB_PCDS_FEATURE_FLAG:
+ InfType = TAB_INF_FEATURE_PCD
+ elif Type == TAB_PCDS_DYNAMIC_EX:
+ InfType = TAB_INF_PCD_EX
+ elif Type == TAB_PCDS_DYNAMIC:
+ InfType = TAB_INF_PCD
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
+ if len(List) < 2 or len(List) > 3:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ Value = List[1]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, Type)
+
+
+## Get Pcd Values of Dec
+#
+# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+# @retval (TokenSpcCName, TokenCName, Value, DatumType, Token, ItemType) Formatted Pcd Item
+#
+def GetPcdOfDec(Item, Type, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
+ TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 4:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ Value = List[1]
+ DatumType = List[2]
+ Token = List[3]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, DatumType, Token, Type)
+
+## Parse DEFINE statement
+#
+# Get DEFINE macros
+#
+# 1. Insert a record into TblDec
+# Value1: Macro Name
+# Value2: Macro Value
+#
+def ParseDefine(LineValue, StartLine, Table, FileID, Filename, SectionName, SectionModel, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
+ Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
+ Table.Insert(MODEL_META_DATA_DEFINE, Define[0], Define[1], '', '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+## InsertSectionItems
+#
+# Insert item data of a section to a dict
+#
+def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, RecordSet):
+ # Insert each item data of a section
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine, Comment = SectionItem[0], SectionItem[1], SectionItem[1], SectionItem[2]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ # And then parse DEFINE statement
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ continue
+
+ # At last parse other sections
+ ID = -1
+ Records.append([LineValue, Arch, StartLine, ID, Third, Comment])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## Insert records to database
+#
+# Insert item data of a section to database
+# @param Table: The Table to be inserted
+# @param FileID: The ID of belonging file
+# @param Filename: The name of belonging file
+# @param CurrentSection: The name of currect section
+# @param SectionItemList: A list of items of the section
+# @param ArchList: A list of arches
+# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
+# @param IfDefList: A list of all conditional statements
+# @param RecordSet: A dict of all parsed records
+#
+def InsertSectionItemsIntoDatabase(Table, FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, RecordSet):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ ParseDefine(LineValue, StartLine, Table, FileID, Filename, CurrentSection, Model, Arch)
+ continue
+
+ #
+ # At last parse other sections
+ #
+ ID = Table.Insert(Model, LineValue, Third, Third, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ Records.append([LineValue, Arch, StartLine, ID, Third])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## GenMetaDatSectionItem
+def GenMetaDatSectionItem(Key, Value, List):
+ if Key not in List:
+ List[Key] = [Value]
+ else:
+ List[Key].append(Value)
\ No newline at end of file diff --git a/BaseTools/Source/Python/Common/PyUtility.pyd b/BaseTools/Source/Python/Common/PyUtility.pyd Binary files differnew file mode 100644 index 0000000000..5bb57d91e0 --- /dev/null +++ b/BaseTools/Source/Python/Common/PyUtility.pyd diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py new file mode 100644 index 0000000000..5da0cacfb0 --- /dev/null +++ b/BaseTools/Source/Python/Common/String.py @@ -0,0 +1,703 @@ +## @file
+# This file is used to define common string related functions used in parsing process
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import re
+import DataType
+import os.path
+import string
+import EdkLogger as EdkLogger
+
+from GlobalData import *
+from BuildToolError import *
+
+## GetSplitValueList
+#
+# Get a value list from a string with multiple values splited with SplitTag
+# The default SplitTag is DataType.TAB_VALUE_SPLIT
+# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
+#
+# @param String: The input string to be splitted
+# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
+# @param MaxSplit: The max number of split values, default is -1
+#
+# @retval list() A list for splitted string
+#
+def GetSplitValueList(String, SplitTag = DataType.TAB_VALUE_SPLIT, MaxSplit = -1):
+ return map(lambda l: l.strip(), String.split(SplitTag, MaxSplit))
+
+## MergeArches
+#
+# Find a key's all arches in dict, add the new arch to the list
+# If not exist any arch, set the arch directly
+#
+# @param Dict: The input value for Dict
+# @param Key: The input value for Key
+# @param Arch: The Arch to be added or merged
+#
+def MergeArches(Dict, Key, Arch):
+ if Key in Dict.keys():
+ Dict[Key].append(Arch)
+ else:
+ Dict[Key] = Arch.split()
+
+## GenDefines
+#
+# Parse a string with format "DEFINE <VarName> = <PATH>"
+# Generate a map Defines[VarName] = PATH
+# Return False if invalid format
+#
+# @param String: String with DEFINE statement
+# @param Arch: Supportted Arch
+# @param Defines: DEFINE statement to be parsed
+#
+# @retval 0 DEFINE statement found, and valid
+# @retval 1 DEFINE statement found, but not valid
+# @retval -1 DEFINE statement not found
+#
+def GenDefines(String, Arch, Defines):
+ if String.find(DataType.TAB_DEFINE + ' ') > -1:
+ List = String.replace(DataType.TAB_DEFINE + ' ', '').split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) == 2:
+ Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
+ return 0
+ else:
+ return -1
+
+ return 1
+
+## GenInclude
+#
+# Parse a string with format "!include <Filename>"
+# Return the file path
+# Return False if invalid format or NOT FOUND
+#
+# @param String: String with INCLUDE statement
+# @param IncludeFiles: INCLUDE statement to be parsed
+# @param Arch: Supportted Arch
+#
+# @retval True
+# @retval False
+#
+def GenInclude(String, IncludeFiles, Arch):
+ if String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
+ IncludeFile = CleanString(String[String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
+ MergeArches(IncludeFiles, IncludeFile, Arch)
+ return True
+ else:
+ return False
+
+## GetLibraryClassesWithModuleType
+#
+# Get Library Class definition when no module type defined
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get library classes successfully
+#
+def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
+ newKey = SplitModuleType(Key)
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), newKey[1]])
+
+ return True
+
+## GetDynamics
+#
+# Get Dynamic Pcds
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get Dynamic Pcds successfully
+#
+def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
+ #
+ # Get SkuId Name List
+ #
+ SkuIdNameList = SplitModuleType(Key)
+
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
+
+ return True
+
+## SplitModuleType
+#
+# Split ModuleType out of section defien to get key
+# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [ 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
+#
+# @param Key: String to be parsed
+#
+# @retval ReturnValue A list for module types
+#
+def SplitModuleType(Key):
+ KeyList = Key.split(DataType.TAB_SPLIT)
+ #
+ # Fill in for arch
+ #
+ KeyList.append('')
+ #
+ # Fill in for moduletype
+ #
+ KeyList.append('')
+ ReturnValue = []
+ KeyValue = KeyList[0]
+ if KeyList[1] != '':
+ KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
+ ReturnValue.append(KeyValue)
+ ReturnValue.append(GetSplitValueList(KeyList[2]))
+
+ return ReturnValue
+
+## Replace macro in strings list
+#
+# This method replace macros used in a given string list. The macros are
+# given in a dictionary.
+#
+# @param StringList StringList to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval NewList A new string list whose macros are replaced
+#
+def ReplaceMacros(StringList, MacroDefinitions={}, SelfReplacement = False):
+ NewList = []
+ for String in StringList:
+ if type(String) == type(''):
+ NewList.append(ReplaceMacro(String, MacroDefinitions, SelfReplacement))
+ else:
+ NewList.append(String)
+
+ return NewList
+
+## Replace macro in string
+#
+# This method replace macros used in given string. The macros are given in a
+# dictionary.
+#
+# @param String String to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval string The string whose macros are replaced
+#
+def ReplaceMacro(String, MacroDefinitions={}, SelfReplacement = False):
+ LastString = String
+ while MacroDefinitions:
+ MacroUsed = gMacroPattern.findall(String)
+ # no macro found in String, stop replacing
+ if len(MacroUsed) == 0:
+ break
+
+ for Macro in MacroUsed:
+ if Macro not in MacroDefinitions:
+ if SelfReplacement:
+ String = String.replace("$(%s)" % Macro, '')
+ continue
+ String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
+ # in case there's macro not defined
+ if String == LastString:
+ break
+ LastString = String
+
+ return String
+
+## NormPath
+#
+# Create a normal path
+# And replace DFEINE in the path
+#
+# @param Path: The input value for Path to be converted
+# @param Defines: A set for DEFINE statement
+#
+# @retval Path Formatted path
+#
+def NormPath(Path, Defines = {}):
+ IsRelativePath = False
+ if Path:
+ if Path[0] == '.':
+ IsRelativePath = True
+ #
+ # Replace with Define
+ #
+ if Defines:
+ Path = ReplaceMacro(Path, Defines)
+ #
+ # To local path format
+ #
+ Path = os.path.normpath(Path)
+
+ if IsRelativePath and Path[0] != '.':
+ Path = os.path.join('.', Path)
+
+ return Path
+
+## CleanString
+#
+# Remove comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace R8's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
+ #
+ # remove comments
+ #
+ Line = Line.split(CommentCharacter, 1)[0];
+ #
+ # remove whitespace again
+ #
+ Line = Line.strip();
+
+ return Line
+
+## GetMultipleValuesOfKeyFromLines
+#
+# Parse multiple strings to clean comment and spaces
+# The result is saved to KeyValues
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Successfully executed
+#
+def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues += [Line]
+
+ return True
+
+## GetDefineValue
+#
+# Parse a DEFINE statement to get defined value
+# DEFINE Key Value
+#
+# @param String: The content to be parsed
+# @param Key: The key of DEFINE statement
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval string The defined value
+#
+def GetDefineValue(String, Key, CommentCharacter):
+ String = CleanString(String)
+ return String[String.find(Key + ' ') + len(Key + ' ') : ]
+
+## GetSingleValueOfKeyFromLines
+#
+# Parse multiple strings as below to get value of each definition line
+# Key1 = Value1
+# Key2 = Value2
+# The result is saved to Dictionary
+#
+# @param Lines: The content to be parsed
+# @param Dictionary: To store data after parsing
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Successfully executed
+#
+def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ Lines = Lines.split('\n')
+ Keys = []
+ Value = ''
+ DefineValues = ['']
+ SpecValues = ['']
+
+ for Line in Lines:
+ #
+ # Handle DEFINE and SPEC
+ #
+ if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
+ if '' in DefineValues:
+ DefineValues.remove('')
+ DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
+ continue
+ if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
+ if '' in SpecValues:
+ SpecValues.remove('')
+ SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
+ continue
+
+ #
+ # Handle Others
+ #
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter:
+ #
+ # Remove comments and white spaces
+ #
+ LineList[1] = CleanString(LineList[1], CommentCharacter)
+ if ValueSplitFlag:
+ Value = map(string.strip, LineList[1].split(ValueSplitCharacter))
+ else:
+ Value = CleanString(LineList[1], CommentCharacter).splitlines()
+
+ if Key[0] in Dictionary:
+ if Key[0] not in Keys:
+ Dictionary[Key[0]] = Value
+ Keys.append(Key[0])
+ else:
+ Dictionary[Key[0]].extend(Value)
+ else:
+ Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
+
+ if DefineValues == []:
+ DefineValues = ['']
+ if SpecValues == []:
+ SpecValues = ['']
+ Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
+ Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
+
+ return True
+
+## The content to be parsed
+#
+# Do pre-check for a file before it is parsed
+# Check $()
+# Check []
+#
+# @param FileName: Used for error report
+# @param FileContent: File content to be parsed
+# @param SupSectionTag: Used for error report
+#
+def PreCheck(FileName, FileContent, SupSectionTag):
+ LineNo = 0
+ IsFailed = False
+ NewFileContent = ''
+ for Line in FileContent.splitlines():
+ LineNo = LineNo + 1
+ #
+ # Clean current line
+ #
+ Line = CleanString(Line)
+
+ #
+ # Remove commented line
+ #
+ if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
+ Line = ''
+ #
+ # Check $()
+ #
+ if Line.find('$') > -1:
+ if Line.find('$(') < 0 or Line.find(')') < 0:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Check []
+ #
+ if Line.find('[') > -1 or Line.find(']') > -1:
+ #
+ # Only get one '[' or one ']'
+ #
+ if not (Line.find('[') > -1 and Line.find(']') > -1):
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Regenerate FileContent
+ #
+ NewFileContent = NewFileContent + Line + '\r\n'
+
+ if IsFailed:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ return NewFileContent
+
+## CheckFileType
+#
+# Check if the Filename is including ExtName
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param ExtName: Ext name of the file to be checked
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval True The file type is correct
+#
+def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo = -1):
+ if CheckFilename != '' and CheckFilename != None:
+ (Root, Ext) = os.path.splitext(CheckFilename)
+ if Ext.upper() != ExtName.upper():
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo,
+ File=ContainerFilename, RaiseError = EdkLogger.IsRaiseError)
+
+ return True
+
+## CheckFileExist
+#
+# Check if the file exists
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param WorkspaceDir: Current workspace dir
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval The file full path if the file exists
+#
+def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo = -1):
+ CheckFile = ''
+ if CheckFilename != '' and CheckFilename != None:
+ CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
+ if not os.path.isfile(CheckFile):
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Can't find file '%s' defined in section '%s'" % (CheckFile, SectionName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg,
+ File=ContainerFilename, Line = LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ return CheckFile
+
+## GetLineNo
+#
+# Find the index of a line in a file
+#
+# @param FileContent: Search scope
+# @param Line: Search key
+#
+# @retval int Index of the line
+# @retval -1 The line is not found
+#
+def GetLineNo(FileContent, Line, IsIgnoreComment = True):
+ LineList = FileContent.splitlines()
+ for Index in range(len(LineList)):
+ if LineList[Index].find(Line) > -1:
+ #
+ # Ignore statement in comment
+ #
+ if IsIgnoreComment:
+ if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
+ continue
+ return Index + 1
+
+ return -1
+
+## RaiseParserError
+#
+# Raise a parser error
+#
+# @param Line: String which has error
+# @param Section: Used for error report
+# @param File: File which has the string
+# @param Format: Correct format
+#
+def RaiseParserError(Line, Section, File, Format = '', LineNo = -1):
+ if LineNo == -1:
+ LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
+ ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section)
+ if Format != '':
+ Format = "Correct format is " + Format
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError = EdkLogger.IsRaiseError)
+
+## WorkspaceFile
+#
+# Return a full path with workspace dir
+#
+# @param WorkspaceDir: Workspace dir
+# @param Filename: Relative file name
+#
+# @retval string A full path
+#
+def WorkspaceFile(WorkspaceDir, Filename):
+ return os.path.join(NormPath(WorkspaceDir), NormPath(Filename))
+
+## Split string
+#
+# Revmove '"' which startswith and endswith string
+#
+# @param String: The string need to be splited
+#
+# @retval String: The string after removed '""'
+#
+def SplitString(String):
+ if String.startswith('\"'):
+ String = String[1:]
+ if String.endswith('\"'):
+ String = String[:-1]
+
+ return String
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in each item of StringList
+#
+# @param StringList: A list for strings to be converted
+#
+def ConvertToSqlString(StringList):
+ return map(lambda s: s.replace("'", "''") , StringList)
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in the String
+#
+# @param String: A String to be converted
+#
+def ConvertToSqlString2(String):
+ return String.replace("'", "''")
+
+#
+# Remove comment block
+#
+def RemoveBlockComment(Lines):
+ IsFindBlockComment = False
+ IsFindBlockCode = False
+ ReservedLine = ''
+ NewLines = []
+
+ for Line in Lines:
+ Line = Line.strip()
+ #
+ # Remove comment block
+ #
+ if Line.find(DataType.TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, DataType.TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(DataType.TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, DataType.TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ NewLines.append('')
+ continue
+
+ NewLines.append(Line)
+ return NewLines
+
+#
+# Get String of a List
+#
+def GetStringOfList(List, Split = ' '):
+ if type(List) != type([]):
+ return List
+ Str = ''
+ for Item in List:
+ Str = Str + Item + Split
+
+ return Str.strip()
+
+#
+# Get HelpTextList from HelpTextClassList
+#
+def GetHelpTextList(HelpTextClassList):
+ List = []
+ if HelpTextClassList:
+ for HelpText in HelpTextClassList:
+ if HelpText.String.endswith('\n'):
+ HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
+ List.extend(HelpText.String.split('\n'))
+
+ return List
+
+def StringToArray(String):
+ if isinstance(String, unicode):
+ if len(unicode) ==0:
+ return "{0x00, 0x00}"
+ return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String])
+ elif String.startswith('L"'):
+ if String == "L\"\"":
+ return "{0x00, 0x00}"
+ else:
+ return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String[2:-1]])
+ elif String.startswith('"'):
+ if String == "\"\"":
+ return "{0x00}";
+ else:
+ return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
+ else:
+ return '{%s, 0}' % ', '.join(String.split())
+
+def StringArrayLength(String):
+ if isinstance(String, unicode):
+ return (len(String) + 1) * 2 + 1;
+ elif String.startswith('L"'):
+ return (len(String) - 3 + 1) * 2
+ elif String.startswith('"'):
+ return (len(String) - 2 + 1)
+ else:
+ return len(String.split()) + 1
+
+def RemoveDupOption(OptionString, Which="/I", Against=None):
+ OptionList = OptionString.split()
+ ValueList = []
+ if Against:
+ ValueList += Against
+ for Index in range(len(OptionList)):
+ Opt = OptionList[Index]
+ if not Opt.startswith(Which):
+ continue
+ if len(Opt) > len(Which):
+ Val = Opt[len(Which):]
+ else:
+ Val = ""
+ if Val in ValueList:
+ OptionList[Index] = ""
+ else:
+ ValueList.append(Val)
+ return " ".join(OptionList)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py new file mode 100644 index 0000000000..70178f54ce --- /dev/null +++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py @@ -0,0 +1,174 @@ +## @file
+# This file is used to define each component of Target.txt file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import EdkLogger
+import DataType
+from BuildToolError import *
+import GlobalData
+
+gDefaultTargetTxtFile = "Conf/target.txt"
+
+## TargetTxtClassObject
+#
+# This class defined content used in file target.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of target.txt
+#
+# @var TargetTxtDictionary: To store keys and values defined in target.txt
+#
+class TargetTxtClassObject(object):
+ def __init__(self, Filename = None):
+ self.TargetTxtDictionary = {
+ DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
+ DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD : '',
+ DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
+ DataType.TAB_TAT_DEFINES_TARGET : [],
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
+ DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
+ DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
+ }
+ if Filename != None:
+ self.LoadTargetTxtFile(Filename)
+
+ ## LoadTargetTxtFile
+ #
+ # Load target.txt file and parse it, return a set structure to store keys and values
+ #
+ # @param Filename: Input value for full path of target.txt
+ #
+ # @retval set() A set structure to store keys and values
+ # @retval 1 Error happenes in parsing
+ #
+ def LoadTargetTxtFile(self, Filename):
+ if os.path.exists(Filename) and os.path.isfile(Filename):
+ return self.ConvertTextFileToDict(Filename, '#', '=')
+ else:
+ EdkLogger.error("Target.txt Parser", FILE_NOT_FOUND, ExtraData=Filename)
+ return 1
+
+ ## ConvertTextFileToDict
+ #
+ # Convert a text file to a dictionary of (name:value) pairs.
+ # The data is saved to self.TargetTxtDictionary
+ #
+ # @param FileName: Text filename
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ #
+ # @retval 0 Convert successfully
+ # @retval 1 Open file failed
+ #
+ def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
+ F = None
+ try:
+ F = open(FileName,'r')
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
+ if F != None:
+ F.close()
+
+ for Line in F:
+ Line = Line.strip()
+ if Line.startswith(CommentCharacter) or Line == '':
+ continue
+
+ LineList = Line.split(KeySplitCharacter, 1)
+ Key = LineList[0].strip()
+ if len(LineList) == 2:
+ Value = LineList[1].strip()
+ else:
+ Value = ""
+
+ if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
+ self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
+ elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
+ self.TargetTxtDictionary[Key] = Value.split()
+ elif Key == DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD:
+ if Value not in ["Enable", "Disable"]:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid setting of [%s]: %s." % (Key, Value),
+ ExtraData="\tSetting must be one of [Enable, Disable]",
+ File=FileName)
+ self.TargetTxtDictionary[Key] = Value
+ elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
+ try:
+ V = int(Value, 0)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
+ File=FileName)
+ self.TargetTxtDictionary[Key] = Value
+ #elif Key not in GlobalData.gGlobalDefines:
+ # GlobalData.gGlobalDefines[Key] = Value
+
+ F.close()
+ return 0
+
+ ## Print the dictionary
+ #
+ # Print all items of dictionary one by one
+ #
+ # @param Dict: The dictionary to be printed
+ #
+ def printDict(Dict):
+ if Dict != None:
+ KeyList = Dict.keys()
+ for Key in KeyList:
+ if Dict[Key] != '':
+ print Key + ' = ' + str(Dict[Key])
+
+ ## Print the dictionary
+ #
+ # Print the items of dictionary which matched with input key
+ #
+ # @param list: The dictionary to be printed
+ # @param key: The key of the item to be printed
+ #
+ def printList(Key, List):
+ if type(List) == type([]):
+ if len(List) > 0:
+ if Key.find(TAB_SPLIT) != -1:
+ print "\n" + Key
+ for Item in List:
+ print Item
+## TargetTxtDict
+#
+# Load target.txt in input workspace dir
+#
+# @param WorkSpace: Workspace dir
+#
+# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
+#
+def TargetTxtDict(WorkSpace):
+ Target = TargetTxtClassObject()
+ Target.LoadTargetTxtFile(os.path.normpath(os.path.join(WorkSpace, gDefaultTargetTxtFile)))
+ return Target
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+ Target = TargetTxtDict(os.getenv("WORKSPACE"))
+ print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
+ print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET]
+ print Target.TargetTxtDictionary
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py new file mode 100644 index 0000000000..5a9a3096bb --- /dev/null +++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py @@ -0,0 +1,217 @@ +## @file
+# This file is used to define each component of tools_def.txt file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+
+from Dictionary import *
+from BuildToolError import *
+from TargetTxtClassObject import *
+
+##
+# Static vailabes used for pattern
+#
+gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
+gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
+gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
+gDefaultToolsDefFile = "Conf/tools_def.txt"
+
+## ToolDefClassObject
+#
+# This class defined content used in file tools_def.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of tools_def.txt
+#
+# @var ToolsDefTxtDictionary: To store keys and values defined in target.txt
+# @var MacroDictionary: To store keys and values defined in DEFINE statement
+#
+class ToolDefClassObject(object):
+ def __init__(self, FileName = None):
+ self.ToolsDefTxtDictionary = {}
+ self.MacroDictionary = {}
+ for Env in os.environ:
+ self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
+
+ if FileName != None:
+ self.LoadToolDefFile(FileName)
+
+ ## LoadToolDefFile
+ #
+ # Load target.txt file and parse it, return a set structure to store keys and values
+ #
+ # @param Filename: Input value for full path of tools_def.txt
+ #
+ def LoadToolDefFile(self, FileName):
+ FileContent = []
+ if os.path.isfile(FileName):
+ try:
+ F = open(FileName,'r')
+ FileContent = F.readlines()
+ except:
+ EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
+ else:
+ EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName)
+
+ self.ToolsDefTxtDatabase = {
+ TAB_TOD_DEFINES_TARGET : [],
+ TAB_TOD_DEFINES_TOOL_CHAIN_TAG : [],
+ TAB_TOD_DEFINES_TARGET_ARCH : [],
+ TAB_TOD_DEFINES_COMMAND_TYPE : []
+ }
+
+ for Index in range(len(FileContent)):
+ Line = FileContent[Index].strip()
+ if Line == "" or Line[0] == '#':
+ continue
+ NameValuePair = Line.split("=", 1)
+ if len(NameValuePair) != 2:
+ EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1))
+ continue
+
+ Name = NameValuePair[0].strip()
+ Value = NameValuePair[1].strip()
+
+ if Name == "IDENTIFIER":
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value))
+ continue
+
+ MacroDefinition = gMacroDefPattern.findall(Name)
+ if MacroDefinition != []:
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ MacroName = MacroDefinition[0].strip()
+ self.MacroDictionary["DEF(%s)" % MacroName] = Value
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value))
+ continue
+
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ List = Name.split('_')
+ if len(List) != 5:
+ EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name))
+ continue
+ elif List[4] == '*':
+ EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name))
+ continue
+ else:
+ self.ToolsDefTxtDictionary[Name] = Value
+ if List[0] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]]
+ if List[1] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]]
+ if List[2] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]]
+ if List[3] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]]
+ if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == '*' and List[3] == '*':
+ if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value:
+ EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name))
+ if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == '*' and List[3] == '*':
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \
+ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE]))
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
+
+ KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]
+ for Index in range(3,-1,-1):
+ for Key in dict(self.ToolsDefTxtDictionary):
+ List = Key.split('_')
+ if List[Index] == '*':
+ for String in self.ToolsDefTxtDatabase[KeyList[Index]]:
+ List[Index] = String
+ NewKey = '%s_%s_%s_%s_%s' % tuple(List)
+ if NewKey not in self.ToolsDefTxtDictionary:
+ self.ToolsDefTxtDictionary[NewKey] = self.ToolsDefTxtDictionary[Key]
+ continue
+ del self.ToolsDefTxtDictionary[Key]
+ elif List[Index] not in self.ToolsDefTxtDatabase[KeyList[Index]]:
+ del self.ToolsDefTxtDictionary[Key]
+
+ ## ExpandMacros
+ #
+ # Replace defined macros with real value
+ #
+ # @param Value: The string with unreplaced macros
+ #
+ # @retval Value: The string which has been replaced with real value
+ #
+ def ExpandMacros(self, Value):
+ EnvReference = gEnvRefPattern.findall(Value)
+ for Ref in EnvReference:
+ if Ref not in self.MacroDictionary:
+ return False, Ref
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+
+ MacroReference = gMacroRefPattern.findall(Value)
+ for Ref in MacroReference:
+ if Ref not in self.MacroDictionary:
+ return False, Ref
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+
+ return True, Value
+
+## ToolDefDict
+#
+# Load tools_def.txt in input workspace dir
+#
+# @param WorkSpace: Workspace dir
+#
+# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
+#
+def ToolDefDict(WorkSpace):
+ Target = TargetTxtDict(WorkSpace)
+ ToolDef = ToolDefClassObject()
+ if DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
+ gDefaultToolsDefFile = Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(WorkSpace, gDefaultToolsDefFile)))
+ return ToolDef
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ ToolDef = ToolDefDict(os.getenv("WORKSPACE"))
+ pass
diff --git a/BaseTools/Source/Python/Common/XmlParser.py b/BaseTools/Source/Python/Common/XmlParser.py new file mode 100644 index 0000000000..4d60115925 --- /dev/null +++ b/BaseTools/Source/Python/Common/XmlParser.py @@ -0,0 +1,1754 @@ +## @file
+# This file is used to parse a xml file of .PKG file
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from xml.dom import minidom
+from XmlRoutines import *
+from CommonDataClass.DistributionPackageClass import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass.ModuleClass import *
+from Common.String import GetStringOfList
+
+#
+# Get Help Text
+#
+def GetHelpTextList(HelpText):
+ HelpTextList = []
+ for HT in HelpText:
+ HelpTextObj = HelpTextClass()
+ HelpTextObj.Lang = HT.Lang
+ HelpTextObj.String = HT.HelpText
+ HelpTextList.append(HelpTextObj)
+ return HelpTextList
+
+# HeaderXml
+class HeaderXml(object):
+ def __init__(self):
+ self.Name = ''
+ self.BaseName = ''
+ self.GUID = ''
+ self.Version = ''
+ self.Copyright = ''
+ self.License = ''
+ self.Abstract = ''
+ self.Description = ''
+
+ def FromXml(self, Item, Key):
+ self.Name = XmlElement(Item, '%s/Name' % Key)
+ self.BaseName = XmlAttribute(XmlNode(Item, '%s/Name' % Key), 'BaseName')
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ self.Copyright = XmlElement(Item, '%s/Copyright' % Key)
+ self.License = XmlElement(Item, '%s/License' % Key)
+ self.Abstract = XmlElement(Item, '%s/Abstract' % Key)
+ self.Description = XmlElement(Item, '%s/Description' % Key)
+
+ ModuleHeader = ModuleHeaderClass()
+ ModuleHeader.Name = self.Name
+ ModuleHeader.BaseName = self.BaseName
+ ModuleHeader.Guid = self.GUID
+ ModuleHeader.Version = self.Version
+ ModuleHeader.Copyright = self.Copyright
+ ModuleHeader.License = self.License
+ ModuleHeader.Abstract = self.Abstract
+ ModuleHeader.Description = self.Description
+
+ return ModuleHeader
+
+ def ToXml(self, Header, Key):
+ Element1 = CreateXmlElement('Name', Header.Name, [], [['BaseName', Header.BaseName]])
+ Element2 = CreateXmlElement('GUID', Header.Guid, [], [['Version', Header.Version]])
+ AttributeList = []
+ NodeList = [Element1,
+ Element2,
+ ['Abstract', Header.Abstract],
+ ['Copyright', Header.Copyright],
+ ['License', Header.License],
+ ['Description', Header.Description],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "Name = %s BaseName = %s GUID = %s Version = %s Copyright = %s License = %s Abstract = %s Description = %s" \
+ % (self.Name, self.BaseName, self.GUID, self.Version, self.Copyright, self.License, self.Abstract, self.Description)
+
+# DistributionPackageHeaderXml
+class DistributionPackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.ReadOnly = False
+ self.RePackage = True
+ self.Vendor = ''
+ self.Date = ''
+ self.Signature = ''
+ self.XmlSpecification = ''
+
+ def FromXml(self, Item, Key):
+ self.ReadOnly = XmlAttribute(XmlNode(Item, '%s' % Key), 'ReadOnly')
+ self.RePackage = XmlAttribute(XmlNode(Item, '%s' % Key), 'RePackage')
+ self.Vendor = XmlElement(Item, '%s/Vendor' % Key)
+ self.Date = XmlElement(Item, '%s/Date' % Key)
+ self.Signature = XmlElement(Item, '%s/Signature' % Key)
+ self.XmlSpecification = XmlElement(Item, '%s/XmlSpecification' % Key)
+ self.Header.FromXml(Item, Key)
+
+ DistributionPackageHeader = DistributionPackageHeaderClass()
+ DistributionPackageHeader.ReadOnly = self.ReadOnly
+ DistributionPackageHeader.RePackage = self.RePackage
+ DistributionPackageHeader.Name = self.Header.Name
+ DistributionPackageHeader.BaseName = self.Header.BaseName
+ DistributionPackageHeader.Guid = self.Header.GUID
+ DistributionPackageHeader.Version = self.Header.Version
+ DistributionPackageHeader.Vendor = self.Vendor
+ DistributionPackageHeader.Date = self.Date
+ DistributionPackageHeader.Copyright = self.Header.Copyright
+ DistributionPackageHeader.License = self.Header.License
+ DistributionPackageHeader.Abstract = self.Header.Abstract
+ DistributionPackageHeader.Description = self.Header.Description
+ DistributionPackageHeader.Signature = self.Signature
+ DistributionPackageHeader.XmlSpecification = self.XmlSpecification
+
+ return DistributionPackageHeader
+
+ def ToXml(self, DistributionPackageHeader, Key):
+ Element1 = CreateXmlElement('Name', DistributionPackageHeader.Name, [], [['BaseName', DistributionPackageHeader.BaseName]])
+ Element2 = CreateXmlElement('GUID', DistributionPackageHeader.Guid, [], [['Version', DistributionPackageHeader.Version]])
+ AttributeList = [['ReadOnly', str(DistributionPackageHeader.ReadOnly)], ['RePackage', str(DistributionPackageHeader.RePackage)]]
+ NodeList = [Element1,
+ Element2,
+ ['Vendor', DistributionPackageHeader.Vendor],
+ ['Date', DistributionPackageHeader.Date],
+ ['Copyright', DistributionPackageHeader.Copyright],
+ ['License', DistributionPackageHeader.License],
+ ['Abstract', DistributionPackageHeader.Abstract],
+ ['Description', DistributionPackageHeader.Description],
+ ['Signature', DistributionPackageHeader.Signature],
+ ['XmlSpecification', DistributionPackageHeader.XmlSpecification],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "ReadOnly = %s RePackage = %s Vendor = %s Date = %s Signature = %s XmlSpecification = %s %s" \
+ % (self.ReadOnly, self.RePackage, self.Vendor, self.Date, self.Signature, self.XmlSpecification, self.Header)
+
+# PackageHeaderXml
+class PackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.PackagePath = ''
+
+ def FromXml(self, Item, Key):
+ self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key)
+ self.Header.FromXml(Item, Key)
+
+ PackageHeader = PackageHeaderClass()
+ PackageHeader.Name = self.Header.Name
+ PackageHeader.BaseName = self.Header.BaseName
+ PackageHeader.Guid = self.Header.GUID
+ PackageHeader.Version = self.Header.Version
+ PackageHeader.Copyright = self.Header.Copyright
+ PackageHeader.License = self.Header.License
+ PackageHeader.Abstract = self.Header.Abstract
+ PackageHeader.Description = self.Header.Description
+ PackageHeader.CombinePath = self.PackagePath
+
+ return PackageHeader
+
+ def ToXml(self, PackageHeader, Key):
+ Element1 = CreateXmlElement('Name', PackageHeader.Name, [], [['BaseName', PackageHeader.BaseName]])
+ Element2 = CreateXmlElement('GUID', PackageHeader.Guid, [], [['Version', PackageHeader.Version]])
+ AttributeList = []
+ NodeList = [Element1,
+ Element2,
+ ['Copyright', PackageHeader.Copyright],
+ ['License', PackageHeader.License],
+ ['Abstract', PackageHeader.Abstract],
+ ['Description', PackageHeader.Description],
+ ['PackagePath', PackageHeader.CombinePath],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "PackagePath = %s %s" \
+ % (self.PackagePath, self.Header)
+
+# ClonedFromXml
+class ClonedFromXml(object):
+ def __init__(self):
+ self.GUID = ''
+ self.Version = ''
+
+ def FromXml(self, Item, Key):
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+
+ if self.GUID == '' and self.Version == '':
+ return None
+
+ ClonedFrom = ClonedRecordClass()
+ ClonedFrom.PackageGuid = self.GUID
+ ClonedFrom.PackageVersion = self.Version
+
+ return ClonedFrom
+
+ def ToXml(self, ClonedFrom, Key):
+ Root = minidom.Document()
+ Element1 = CreateXmlElement('GUID', ClonedFrom.PackageGuid, [], [['Version', ClonedFrom.PackageVersion]])
+ AttributeList = []
+ NodeList = [Element1]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "GUID = %s Version = %s" % (self.GUID, self.Version)
+
+# CommonDefinesXml
+class CommonDefinesXml(object):
+ def __init__(self):
+ self.Usage = ''
+ self.SupArchList = ''
+ self.SupModList = ''
+ self.FeatureFlag = ''
+
+ def FromXml(self, Item, Key):
+ self.Usage = XmlAttribute(Item, 'Usage')
+ self.SupArchList = XmlAttribute(Item, 'SupArchList')
+ self.SupModList = XmlAttribute(Item, 'SupModList')
+ self.FeatureFlag = XmlAttribute(Item, 'FeatureFlag')
+
+ def ToXml(self):
+ pass
+
+ def __str__(self):
+ return "Usage = %s SupArchList = %s SupModList = %s FeatureFlag = %s" % (self.Usage, self.SupArchList, self.SupModList, self.FeatureFlag)
+
+# HelpTextXml
+class HelpTextXml(object):
+ def __init__(self):
+ self.HelpText = ''
+ self.Lang = ''
+
+ def FromXml(self, Item, Key):
+ self.HelpText = XmlElement(Item, 'HelpText')
+ self.Lang = XmlAttribute(Item, 'Lang')
+
+ def ToXml(self, HelpText, Key = 'HelpText'):
+ return CreateXmlElement('%s' % Key, HelpText.String, [], [['Lang', HelpText.Lang]])
+
+ def __str__(self):
+ return "HelpText = %s Lang = %s" % (self.HelpText, self.Lang)
+
+# LibraryClassXml
+class LibraryClassXml(object):
+ def __init__(self):
+ self.Keyword = ''
+ self.HeaderFile = ''
+ self.RecommendedInstanceGuid = ''
+ self.RecommendedInstanceVersion = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.Keyword = XmlAttribute(XmlNode(Item, '%s' % Key), 'Keyword')
+ if self.Keyword == '':
+ self.Keyword = XmlElement(Item, '%s/Keyword' % Key)
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.RecommendedInstanceGuid = XmlElement(Item, '%s/RecommendedInstance/GUID' % Key)
+ self.RecommendedInstanceVersion = XmlAttribute(XmlNode(Item, '%s/RecommendedInstance/GUID' % Key), 'Version')
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = self.Keyword
+ LibraryClass.IncludeHeader = self.HeaderFile
+ LibraryClass.SupArchList = self.CommonDefines.SupArchList
+ LibraryClass.SupModuleList = self.CommonDefines.SupModList
+ LibraryClass.RecommendedInstanceGuid = self.RecommendedInstanceGuid
+ LibraryClass.RecommendedInstanceVersion = self.RecommendedInstanceVersion
+ LibraryClass.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return LibraryClass
+
+ def ToXml(self, LibraryClass, Key):
+ Element1 = CreateXmlElement('GUID', LibraryClass.RecommendedInstanceGuid, [], [['Version', LibraryClass.RecommendedInstanceVersion]])
+ Element2 = CreateXmlElement('RecommendedInstance', '', [Element1], [])
+ AttributeList = [['Keyword', LibraryClass.LibraryClass],
+ ['SupArchList', GetStringOfList(LibraryClass.SupArchList)],
+ ['SupModList', GetStringOfList(LibraryClass.SupModuleList)]
+ ]
+ NodeList = [['HeaderFile', LibraryClass.IncludeHeader],
+ Element2
+ ]
+ for Item in LibraryClass.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Keyword = %s HeaderFile = %s RecommendedInstanceGuid = %s RecommendedInstanceVersion = %s %s" \
+ % (self.Keyword, self.HeaderFile, self.RecommendedInstanceGuid, self.RecommendedInstanceVersion, \
+ self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# IndustryStandardHeaderXml
+class IndustryStandardHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeClass()
+ Include.FilePath = self.HeaderFile
+ Include.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Include
+
+ def ToXml(self, IndustryStandardHeader, Key):
+ AttributeList = []
+ NodeList = [['HeaderFile', IndustryStandardHeader.FilePath]]
+ for Item in IndustryStandardHeader.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s" % (self.HeaderFile)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# PackageIncludeHeaderXml
+class PackageIncludeHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s/HeaderFile' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeClass()
+ Include.FilePath = self.HeaderFile
+ Include.SupArchList = self.CommonDefines.SupArchList
+ Include.SupModuleList = self.CommonDefines.SupModList
+ Include.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Include
+
+ def ToXml(self, PackageIncludeHeader, Key):
+ AttributeList = [['SupArchList', PackageIncludeHeader.SupArchList],
+ ['SupModList', PackageIncludeHeader.SupModuleList]
+ ]
+ NodeList = [['HeaderFile', PackageIncludeHeader.FilePath]]
+ for Item in PackageIncludeHeader.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s\n\t%s" % (self.HeaderFile, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+#GUID/Protocol/Ppi
+class GuidProtocolPpiXml(object):
+ def __init__(self):
+ self.UiName = ''
+ self.GuidTypes = ''
+ self.Notify = ''
+ self.CName = ''
+ self.GuidValue = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.UiName = XmlAttribute(XmlNode(Item, '%s' % Key), 'UiName')
+ self.GuidTypes = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidTypes')
+ self.GuidType = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidType')
+ self.Notify = XmlAttribute(XmlNode(Item, '%s' % Key), 'Notify')
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.GuidValue = XmlElement(Item, '%s/GuidValue' % Key)
+ self.VariableName = XmlElement(Item, '%s/VariableName' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ GuidProtocolPpi = GuidProtocolPpiCommonClass()
+ GuidProtocolPpi.Name = self.UiName
+ GuidProtocolPpi.CName = self.CName
+ GuidProtocolPpi.Guid = self.GuidValue
+ GuidProtocolPpi.VariableName = self.VariableName
+ GuidProtocolPpi.Notify = self.Notify
+ GuidProtocolPpi.Usage = self.CommonDefines.Usage
+ GuidProtocolPpi.FeatureFlag = self.CommonDefines.FeatureFlag
+ GuidProtocolPpi.SupArchList = self.CommonDefines.SupArchList
+ GuidProtocolPpi.SupModuleList = self.CommonDefines.SupModList
+ GuidProtocolPpi.GuidTypeLists = self.GuidTypes
+ GuidProtocolPpi.GuidTypeList = self.GuidType
+ GuidProtocolPpi.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ AttributeList = [['Usage', GetStringOfList(GuidProtocolPpi.Usage)],
+ ['UiName', GuidProtocolPpi.Name],
+ ['GuidTypes', GetStringOfList(GuidProtocolPpi.GuidTypeLists)],
+ ['GuidType', GetStringOfList(GuidProtocolPpi.GuidTypeList)],
+ ['Notify', str(GuidProtocolPpi.Notify)],
+ ['SupArchList', GetStringOfList(GuidProtocolPpi.SupArchList)],
+ ['SupModList', GetStringOfList(GuidProtocolPpi.SupModuleList)],
+ ['FeatureFlag', GuidProtocolPpi.FeatureFlag]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.CName],
+ ['GuidValue', GuidProtocolPpi.Guid],
+ ['VariableName', GuidProtocolPpi.VariableName]
+ ]
+ for Item in GuidProtocolPpi.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "UiName = %s Notify = %s GuidTypes = %s CName = %s GuidValue = %s %s" \
+ % (self.UiName, self.Notify, self.GuidTypes, self.CName, self.GuidValue, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# PcdErrorXml
+class PcdErrorXml(object):
+ def __init__(self):
+ self.ValidValueList = ''
+ self.ValidValueListLang = ''
+ self.ValidValueRange = ''
+ self.Expression = ''
+ self.ErrorNumber = ''
+ self.ErrorMessage = []
+
+ def FromXml(self, Item, Key):
+ self.ValidValueList = XmlElement(Item, '%s/ValidValueList' % Key)
+ self.ValidValueListLang = XmlAttribute(XmlNode(Item, '%s/ValidValueList' % Key), 'Lang')
+ self.ValidValueRange = XmlElement(Item, '%s/ValidValueRange' % Key)
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ self.ErrorNumber = XmlElement(Item, '%s/ErrorNumber' % Key)
+ for ErrMsg in XmlList(Item, '%s/ErrorMessage' % Key):
+ ErrorMessageString = XmlElement(ErrMsg, 'ErrorMessage')
+ ErrorMessageLang = XmlAttribute(XmlNode(ErrMsg, 'ErrorMessage'), 'Lang')
+ self.ErrorMessage.append((ErrorMessageLang, ErrorMessageString))
+
+ Error = PcdErrorClass()
+ Error.ValidValueList = self.ValidValueList
+ Error.ValidValueListLang = self.ValidValueListLang
+ Error.ValidValueRange = self.ValidValueRange
+ Error.Expression = self.Expression
+ Error.ErrorNumber = self.ErrorNumber
+ Error.ErrorMessage = self.ErrorMessage
+
+ return Error
+
+ def ToXml(self, PcdError, Key):
+ AttributeList = []
+ Element1 = CreateXmlElement('ValidValueList', PcdError.ValidValueList, [], [['Lang', PcdError.ValidValueListLang]])
+ NodeList = [Element1,
+ ['ValidValueRange', PcdError.ValidValueRange],
+ ['Expression', PcdError.Expression],
+ ['ErrorNumber', PcdError.ErrorNumber],
+ ]
+ for Item in PcdError.ErrorMessage:
+ Element = CreateXmlElement('ErrorMessage', Item[1], [], [['Lang', Item[0]]])
+ NodeList.append(Element)
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "ValidValueList = %s ValidValueListLang = %s ValidValueRange = %s Expression = %s ErrorNumber = %s %s" \
+ % (self.ValidValueList, self.ValidValueListLang, self.ValidValueRange, self.Expression, self.ErrorNumber, self.ErrorMessage)
+
+# PcdEntryXml
+class PcdEntryXml(object):
+ def __init__(self):
+ self.PcdItemType = ''
+ self.PcdUsage = ''
+ self.TokenSpaceGuidCName = ''
+ self.TokenSpaceGuidValue = ''
+ self.Token = ''
+ self.CName = ''
+ self.PcdCName = ''
+ self.DatumType = ''
+ self.ValidUsage = ''
+ self.DefaultValue = ''
+ self.MaxDatumSize = ''
+ self.Value = ''
+ self.Offset = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+ self.PcdError = []
+
+ def FromXml(self, Item, Key):
+ self.PcdItemType = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
+ self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
+ self.TokenSpaceGuidCName = XmlElement(Item, '%s/TokenSpaceGuidCName' % Key)
+ self.TokenSpaceGuidValue = XmlElement(Item, '%s/TokenSpaceGuidValue' % Key)
+ self.Token = XmlElement(Item, '%s/Token' % Key)
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.PcdCName = XmlElement(Item, '%s/PcdCName' % Key)
+ self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
+ self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
+ self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
+ self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
+ self.Value = XmlElement(Item, '%s/Value' % Key)
+ self.Offset = XmlElement(Item, '%s/Offset' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
+ PcdErrorObj = PcdErrorXml()
+ PcdErrorObj.FromXml(PcdErrorItem, 'PcdError')
+ self.PcdError.append(PcdErrorObj)
+
+ PcdEntry = PcdClass()
+ PcdEntry.SupArchList = self.CommonDefines.SupArchList
+ PcdEntry.SupModuleList = self.CommonDefines.SupModList
+ PcdEntry.TokenSpaceGuidCName = self.TokenSpaceGuidCName
+ PcdEntry.TokenSpaceGuidValue = self.TokenSpaceGuidValue
+ PcdEntry.Token = self.Token
+ PcdEntry.CName = self.CName
+ PcdEntry.PcdCName = self.PcdCName
+ PcdEntry.DatumType = self.DatumType
+ PcdEntry.ValidUsage = self.ValidUsage
+ PcdEntry.PcdUsage = self.PcdUsage
+ PcdEntry.Usage = self.CommonDefines.Usage
+ PcdEntry.DefaultValue = self.DefaultValue
+ PcdEntry.Value = self.Value
+ PcdEntry.Offset = self.Offset
+ PcdEntry.MaxDatumSize = self.MaxDatumSize
+ PcdEntry.FeatureFlag = self.CommonDefines.FeatureFlag
+ PcdEntry.PcdItemType = self.PcdItemType
+ PcdEntry.HelpTextList = GetHelpTextList(self.HelpText)
+ PcdEntry.PcdErrors = self.PcdError
+
+ return PcdEntry
+
+ def ToXml(self, PcdEntry, Key):
+ AttributeList = [['SupArchList', GetStringOfList(PcdEntry.SupArchList)],
+ ['PcdUsage', PcdEntry.PcdUsage],
+ ['PcdItemType', PcdEntry.PcdItemType],
+ ['FeatureFlag', PcdEntry.FeatureFlag],
+ ['SupModList', GetStringOfList(PcdEntry.SupModuleList)]
+ ]
+ NodeList = [['TokenSpaceGuidCName', PcdEntry.TokenSpaceGuidCName],
+ ['TokenSpaceGuidValue', PcdEntry.TokenSpaceGuidValue],
+ ['Token', PcdEntry.Token],
+ ['CName', PcdEntry.CName],
+ ['PcdCName', PcdEntry.PcdCName],
+ ['DatumType', PcdEntry.DatumType],
+ ['ValidUsage', GetStringOfList(PcdEntry.ValidUsage)],
+ ['DefaultValue', PcdEntry.DefaultValue],
+ ['Value', PcdEntry.Value],
+ ['Offset', PcdEntry.Offset],
+ ['MaxDatumSize', PcdEntry.MaxDatumSize],
+ ]
+ for Item in PcdEntry.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ for Item in PcdEntry.PcdErrors:
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "PcdItemType = %s PcdUsage = %s TokenSpaceGuidCName = %s TokenSpaceGuidValue = %s Token = %s CName = %s PcdCName = %s DatumType = %s ValidUsage = %s DefaultValue = %s MaxDatumSize = %s Value = %s Offset = %s %s" \
+ % (self.PcdItemType, self.PcdUsage, self.TokenSpaceGuidCName, self.TokenSpaceGuidValue, self.Token, self.CName, self.PcdCName, self.DatumType, self.ValidUsage, self.DefaultValue, self.MaxDatumSize, self.Value, self.Offset, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ for Item in self.PcdError:
+ Str = Str + "\n\tPcdError:" + str(Item)
+ return Str
+
+# PcdCheckXml
+class PcdCheckXml(object):
+ def __init__(self):
+ self.PcdCheck = ''
+
+ def FromXml(self, Item, Key):
+ self.PcdCheck = XmlElement(Item, 'PcdCheck')
+
+ return self.PcdCheck
+
+ def ToXml(self, PcdCheck, Key):
+ Root = CreateXmlElement('%s' % Key, PcdCheck, [], [])
+ return Root
+
+ def __str__(self):
+ return "PcdCheck = %s" % (self.PcdCheck)
+
+# MiscellaneousFileXml
+class MiscellaneousFileXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.Files = []
+
+ def FromXml(self, Item, Key):
+ self.Header.FromXml(Item, Key)
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ self.Files.append([Filename, Executable])
+
+ MiscFile = MiscFileClass()
+ MiscFile.Copyright = self.Header.Copyright
+ MiscFile.License = self.Header.License
+ MiscFile.Abstract = self.Header.Abstract
+ MiscFile.Description = self.Header.Description
+ for File in self.Files:
+ FileObj = FileClass()
+ FileObj.Filename = File[0]
+ FileObj.Executable = File[1]
+ MiscFile.Files.append(FileObj)
+
+ return MiscFile
+
+ def FromXml2(self, Item, Key):
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ self.Files.append([Filename, Executable])
+
+ MiscFile = MiscFileClass()
+ MiscFile.Name = self.Header.Name
+ MiscFile.Copyright = self.Header.Copyright
+ MiscFile.License = self.Header.License
+ MiscFile.Abstract = self.Header.Abstract
+ MiscFile.Description = self.Header.Description
+ for File in self.Files:
+ FileObj = FileClass()
+ FileObj.Filename = File[0]
+ FileObj.Executable = File[1]
+ MiscFile.Files.append(FileObj)
+
+ return MiscFile
+
+
+ def ToXml(self, MiscFile, Key):
+ if MiscFile:
+ NodeList = [['Copyright', MiscFile.Copyright],
+ ['License', MiscFile.License],
+ ['Abstract', MiscFile.Abstract],
+ ['Description', MiscFile.Description],
+ ]
+ if MiscFile != None:
+ for File in MiscFile.Files:
+ NodeList.append(CreateXmlElement('Filename', File.Filename, [], [['Executable', File.Executable]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def ToXml2(self, MiscFile, Key):
+ if MiscFile:
+ NodeList = [['Name', MiscFile.Name],
+ ['Copyright', MiscFile.Copyright],
+ ['License', MiscFile.License],
+ ['Abstract', MiscFile.Abstract],
+ ['Description', MiscFile.Description],
+ ]
+ HeaderNode = CreateXmlElement('Header', '', NodeList, [])
+ NodeList = [HeaderNode]
+
+ for File in MiscFile.Files:
+ NodeList.append(CreateXmlElement('Filename', File.Filename, [], [['Executable', File.Executable]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = str(self.Header)
+ for Item in self.Files:
+ Str = Str + '\n\tFilename:' + str(Item)
+ return Str
+
+# UserExtensionsXml
+class UserExtensionsXml(object):
+ def __init__(self):
+ self.UserId = ''
+ self.Identifier = ''
+ self.Defines = []
+ self.BuildOptions = []
+
+ def FromXml(self, Item, Key):
+ self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
+ self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
+ for SubItem in XmlList(Item, '%s/Define' % Key):
+ self.Defines.append(XmlElement(SubItem, '%s/Define' % Key))
+ for SubItem in XmlList(Item, '%s/BuildOption' % Key):
+ self.BuildOptions.append(XmlElement(SubItem, '%s/BuildOption' % Key))
+
+ UserExtension = UserExtensionsClass()
+ UserExtension.UserID = self.UserId
+ UserExtension.Identifier = self.Identifier
+ UserExtension.Defines = self.Defines
+ UserExtension.BuildOptions = self.BuildOptions
+
+ return UserExtension
+
+ def ToXml(self, UserExtension, Key):
+ AttributeList = [['UserId', str(UserExtension.UserID)],
+ ['Identifier', str(UserExtension.Identifier)]
+ ]
+ NodeList = []
+ for Item in UserExtension.Defines:
+ NodeList.append(['Define', Item])
+ for Item in UserExtension.BuildOptions:
+ NodeList.append(['BuildOption', Item])
+ Root = CreateXmlElement('%s' % Key, UserExtension.Content, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "UserId = %s Identifier = %s" % (self.UserId, self.Identifier)
+ Str = Str + '\n\tDefines:' + str(self.Defines)
+ Str = Str + '\n\tBuildOptions:' + str(self.BuildOptions)
+ return Str
+
+# BootModeXml
+class BootModeXml(object):
+ def __init__(self):
+ self.SupportedBootModes = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.SupportedBootModes = XmlElement(Item, '%s/SupportedBootModes' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ BootMode = ModuleBootModeClass()
+ BootMode.Name = self.SupportedBootModes
+ BootMode.SupArchList = self.CommonDefines.SupArchList
+ BootMode.Usage = self.CommonDefines.Usage
+ BootMode.FeatureFlag = self.CommonDefines.FeatureFlag
+ BootMode.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return BootMode
+
+ def ToXml(self, BootMode, Key):
+ AttributeList = [['Usage', BootMode.Usage],
+ ['SupArchList', GetStringOfList(BootMode.SupArchList)],
+ ['FeatureFlag', BootMode.FeatureFlag],
+ ]
+ NodeList = [['SupportedBootModes', BootMode.Name]]
+ for Item in BootMode.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "SupportedBootModes = %s %s" % (self.SupportedBootModes, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# EventXml
+class EventXml(object):
+ def __init__(self):
+ self.EventType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.EventType = XmlAttribute(XmlNode(Item, '%s' % Key), 'EventType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Event = ModuleEventClass()
+ Event.Type = self.EventType
+ Event.GuidCName = self.Name
+ Event.SupArchList = self.CommonDefines.SupArchList
+ Event.Usage = self.CommonDefines.Usage
+ Event.FeatureFlag = self.CommonDefines.FeatureFlag
+ Event.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Event
+
+ def ToXml(self, Event, Key):
+ AttributeList = [['EventType', Event.Type],
+ ['Usage', Event.Usage],
+ ['SupArchList', GetStringOfList(Event.SupArchList)],
+ ['FeatureFlag', Event.FeatureFlag],
+ ]
+ NodeList = []
+ for Item in Event.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, Event.GuidCName, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "EventType = %s %s" % (self.EventType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# HobXml
+class HobXml(object):
+ def __init__(self):
+ self.HobType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HobType = XmlAttribute(XmlNode(Item, '%s' % Key), 'HobType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Hob = ModuleHobClass()
+ Hob.Type = self.HobType
+ Hob.GuidCName = self.Name
+ Hob.SupArchList = self.CommonDefines.SupArchList
+ Hob.Usage = self.CommonDefines.Usage
+ Hob.FeatureFlag = self.CommonDefines.FeatureFlag
+ Hob.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Hob
+
+ def ToXml(self, Hob, Key):
+ AttributeList = [['EventType', Hob.Type],
+ ['Usage', Hob.Usage],
+ ['SupArchList', GetStringOfList(Hob.SupArchList)],
+ ['FeatureFlag', Hob.FeatureFlag],
+ ]
+ NodeList = []
+ for Item in Hob.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, Hob.GuidCName, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HobType = %s %s" % (self.HobType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# ModulePropertyXml
+class ModulePropertyXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.ModuleType = ''
+ self.Path = ''
+ self.PcdIsDriver = ''
+ self.UefiSpecificationVersion = ''
+ self.PiSpecificationVersion = ''
+ self.Specification = ''
+ self.SpecificationVersion = ''
+ self.BootModes = []
+ self.Events = []
+ self.HOBs = []
+
+ def FromXml(self, Item, Key, Header = None):
+ self.CommonDefines.FromXml(Item, Key)
+ self.ModuleType = XmlElement(Item, '%s/ModuleType' % Key)
+ self.Path = XmlElement(Item, '%s/Path' % Key)
+ self.PcdIsDriver = XmlElement(Item, '%s/PcdIsDriver' % Key)
+ self.UefiSpecificationVersion = XmlElement(Item, '%s/UefiSpecificationVersion' % Key)
+ self.PiSpecificationVersion = XmlElement(Item, '%s/PiSpecificationVersion' % Key)
+ self.Specification = XmlElement(Item, '%s/Specification' % Key)
+ self.SpecificationVersion = XmlAttribute(XmlNode(Item, '%s/Specification' % Key), 'Version')
+ for SubItem in XmlList(Item, '%s/BootMode' % Key):
+ A = BootModeXml()
+ BootMode = A.FromXml(SubItem, 'BootMode')
+ self.BootModes.append(BootMode)
+ for SubItem in XmlList(Item, '%s/Event' % Key):
+ A = EventXml()
+ Event = A.FromXml(SubItem, 'Event')
+ self.Events.append(Event)
+ for SubItem in XmlList(Item, '%s/HOB' % Key):
+ A = HobXml()
+ Hob = A.FromXml(SubItem, 'HOB')
+ self.HOBs.append(Hob)
+
+ if Header == None:
+ Header = ModuleHeaderClass()
+
+ Header.ModuleType = self.ModuleType
+ Header.SupArchList = self.CommonDefines.SupArchList
+ Header.SupModuleList = self.CommonDefines.SupModList
+ Header.CombinePath = self.Path
+ Header.PcdIsDriver = self.PcdIsDriver
+ Header.UefiSpecificationVersion = self.UefiSpecificationVersion
+ Header.PiSpecificationVersion = self.PiSpecificationVersion
+
+ return Header, self.BootModes, self.Events, self.HOBs
+
+
+ def ToXml(self, Header, BootModes, Events, Hobs, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Header.SupArchList)],
+ ['SupModList', GetStringOfList(Header.SupModuleList)],
+ ]
+ NodeList = [['ModuleType', Header.ModuleType],
+ ['Path', Header.CombinePath],
+ ['PcdIsDriver', Header.PcdIsDriver],
+ ['UefiSpecificationVersion', Header.UefiSpecificationVersion],
+ ['PiSpecificationVersion', Header.PiSpecificationVersion],
+ ]
+ for Item in BootModes:
+ Tmp = BootModeXml()
+ NodeList.append(Tmp.ToXml(Item, 'BootMode'))
+ for Item in Events:
+ Tmp = EventXml()
+ NodeList.append(Tmp.ToXml(Item, 'Event'))
+ for Item in Hobs:
+ Tmp = HobXml()
+ NodeList.append(Tmp.ToXml(Item, 'Hob'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "ModuleType = %s Path = %s PcdIsDriver = %s UefiSpecificationVersion = %s PiSpecificationVersion = %s Specification = %s SpecificationVersion = %s %s" \
+ % (self.ModuleType, self.Path, self.PcdIsDriver, self.UefiSpecificationVersion, self.PiSpecificationVersion, \
+ self.Specification, self.SpecificationVersion, self.CommonDefines)
+ for Item in self.BootModes:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.Events:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.HOBs:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# SourceFileXml
+class SourceFileXml(object):
+ def __init__(self):
+ self.SourceFile = ''
+ self.ToolChainFamily = ''
+ self.FileType = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.ToolChainFamily = XmlAttribute(Item, 'Family')
+ self.FileType = XmlAttribute(Item, 'FileType')
+ self.SourceFile = XmlElement(Item, 'Filename')
+ self.CommonDefines.FromXml(Item, Key)
+
+ SourceFile = ModuleSourceFileClass()
+ SourceFile.SourceFile = self.SourceFile
+ SourceFile.FileType = self.FileType
+ SourceFile.ToolChainFamily = self.ToolChainFamily
+ SourceFile.SupArchList = self.CommonDefines.SupArchList
+ SourceFile.FeatureFlag = self.CommonDefines.FeatureFlag
+
+ return SourceFile
+
+ def ToXml(self, SourceFile, Key):
+ AttributeList = [['SupArchList', GetStringOfList(SourceFile.SupArchList)],
+ ['Family', SourceFile.ToolChainFamily],
+ ['FileType', SourceFile.FileType],
+ ['FeatureFlag', SourceFile.FeatureFlag],
+ ]
+ Root = CreateXmlElement('%s' % Key, SourceFile.SourceFile, [], AttributeList)
+
+ return Root
+
+# FilenameXml
+class FilenameXml(object):
+ def __init__(self):
+ self.OS = ''
+ self.Family = ''
+ self.FileType = ''
+ self.Filename = ''
+ self.Executable = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.OS = XmlAttribute(Item, 'OS')
+ self.Family = XmlAttribute(Item, 'Family')
+ self.FileType = XmlAttribute(Item, 'FileType')
+ self.Filename = XmlElement(Item, 'Filename')
+ self.Executable = XmlElement(Item, 'Executable')
+ self.CommonDefines.FromXml(Item, Key)
+
+ Filename = FileClass()
+ Filename.Family = self.Family
+ Filename.FileType = self.FileType
+ Filename.Filename = self.Filename
+ Filename.Executable = self.Executable
+ Filename.SupArchList = self.CommonDefines.SupArchList
+ Filename.FeatureFlag = self.CommonDefines.FeatureFlag
+
+ return Filename
+
+ def ToXml(self, Filename, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Filename.SupArchList)],
+ ['Family', Filename.Family],
+ ['FileType', Filename.FileType],
+ ['Executable', Filename.Executable],
+ ['FeatureFlag', Filename.FeatureFlag],
+ ]
+ NodeList = [['Filename', Filename.Filename],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "OS = %s Family = %s FileType = %s Filename = %s Executable = %s %s" \
+ % (self.OS, self.Family, self.FileType, self.Filename, self.Executable, self.CommonDefines)
+
+class BinaryFileXml(object):
+ def __init__(self):
+ self.Filenames = []
+ self.PatchPcdValues = []
+ self.PcdExValues = []
+ self.LibraryInstances = []
+ self.BuildFlags = []
+
+ def FromXml(self, Item, Key):
+ BinaryFile = ModuleBinaryFileClass()
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ A = FilenameXml()
+ B = A.FromXml(SubItem, 'Filename')
+ BinaryFile.Filenames.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/PatchPcdValue' % Key):
+ A = PcdEntryXml()
+ B = A.FromXml(SubItem, 'PatchPcdValue')
+ BinaryFile.PatchPcdValues.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/PcdExValue' % Key):
+ A = PcdEntryXml()
+ B = A.FromXml(SubItem, 'PcdExValue')
+ BinaryFile.PatchPcdValues.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/LibraryInstances/GUID' % Key):
+ GUID = XmlElement(SubItem, 'GUID')
+ Version = XmlAttribute(XmlNode(SubItem, 'GUID'), 'Version')
+ BinaryFile.LibraryInstances.append([GUID, Version])
+ for SubItem in XmlList(Item, '%s/AsBuilt/BuildFlags' % Key):
+ BinaryFile.BuildFlags.append(XmlElement(SubItem, 'BuildFlags'))
+
+ return BinaryFile
+
+ def ToXml(self, BinaryFile, Key):
+ NodeList = []
+ for Item in BinaryFile.Filenames:
+ Tmp = FilenameXml()
+ NodeList.append(Tmp.ToXml(Item, 'Filename'))
+ AsBuiltNodeList = []
+ for Item in BinaryFile.PatchPcdValues:
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml(Item, 'PatchPcdValue'))
+ for Item in BinaryFile.PcdExValues:
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml(Item, 'PcdExValue'))
+ LibNodeList = []
+ for Item in BinaryFile.LibraryInstances:
+ LibNode = CreateXmlElement('GUID', Item[0], [], [['Version', Item[1]]])
+ LibNodeList.append(LibNode)
+ if LibNodeList:
+ AsBuiltNodeList.append(CreateXmlElement('LibraryInstances', '', LibNodeList, []))
+ for Item in BinaryFile.BuildFlags:
+ AsBuiltNodeList.append(CreateXmlElement('BuildFlags', Item, [], []))
+ Element = CreateXmlElement('AsBuilt', '', AsBuiltNodeList, [])
+ NodeList.append(Element)
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = "BinaryFiles:"
+ for Item in self.Filenames:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PatchPcdValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PcdExValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.LibraryInstances:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.BuildFlags:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# PackageXml
+class PackageXml(object):
+ def __init__(self):
+ self.Description = ''
+ self.Guid = ''
+ self.Version = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.Description = XmlElement(Item, '%s/Description' % Key)
+ self.Guid = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+
+ PackageDependency = ModulePackageDependencyClass()
+ PackageDependency.FilePath = self.Description
+ PackageDependency.PackageGuid = self.Guid
+ PackageDependency.PackageVersion = self.Version
+ PackageDependency.FeatureFlag = self.CommonDefines.FeatureFlag
+ PackageDependency.SupArchList = self.CommonDefines.SupArchList
+
+ return PackageDependency
+
+ def ToXml(self, PackageDependency, Key):
+ AttributeList = [['SupArchList', GetStringOfList(PackageDependency.SupArchList)],
+ ['FeatureFlag', PackageDependency.FeatureFlag],
+ ]
+ Element1 = CreateXmlElement('GUID', PackageDependency.PackageGuid, [], [['Version', PackageDependency.PackageVersion]])
+ NodeList = [['Description', PackageDependency.FilePath],
+ Element1,
+ ]
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Description = %s Guid = %s Version = %s %s" \
+ % (self.Description, self.Guid, self.Version, self.CommonDefines)
+ return Str
+
+# ExternXml
+class ExternXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.EntryPoint = ''
+ self.UnloadImage = ''
+ self.Constructor = ''
+ self.Destructor = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.CommonDefines.FromXml(Item, Key)
+ self.EntryPoint = XmlElement(Item, '%s/EntryPoint' % Key)
+ self.UnloadImage = XmlElement(Item, '%s/UnloadImage' % Key)
+ self.Constructor = XmlElement(Item, '%s/Constructor' % Key)
+ self.Destructor = XmlElement(Item, '%s/Destructor' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Extern = ModuleExternClass()
+ Extern.EntryPoint = self.EntryPoint
+ Extern.UnloadImage = self.UnloadImage
+ Extern.Constructor = self.Constructor
+ Extern.Destructor = self.Destructor
+ Extern.SupArchList = self.CommonDefines.SupArchList
+ Extern.FeatureFlag = self.CommonDefines.FeatureFlag
+ Extern.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Extern
+
+ def ToXml(self, Extern, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Extern.SupArchList)],
+ ['FeatureFlag', Extern.FeatureFlag],
+ ]
+ NodeList = [['EntryPoint', Extern.EntryPoint],
+ ['UnloadImage', Extern.UnloadImage],
+ ['Constructor', Extern.Constructor],
+ ['Destructor', Extern.Destructor],
+ ]
+ for Item in Extern.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "EntryPoint = %s UnloadImage = %s Constructor = %s Destructor = %s %s" \
+ % (self.EntryPoint, self.UnloadImage, self.Constructor, self.Destructor, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+# DepexXml
+class DepexXml(object):
+ def __init__(self):
+ self.Expression = ''
+ #self.HelpText = HelpTextXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Depex = ModuleDepexClass()
+ Depex.Depex = self.Expression
+ Depex.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Depex
+
+ def ToXml(self, Depex, Key):
+ AttributeList = []
+ NodeList = [['Expression', Depex.Depex],
+ ]
+ for Item in Depex.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Expression = %s" % (self.Expression)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# PackageSurfaceAreaXml
+class PackageSurfaceAreaXml(object):
+ def __init__(self):
+ self.Package = None
+
+ def FromXml(self, Item, Key):
+ # Create a package object
+ Package = PackageClass()
+
+ # Header
+ Tmp = PackageHeaderXml()
+ PackageHeader = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/Header'), 'Header')
+ Package.PackageHeader = PackageHeader
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ if ClonedFrom:
+ Package.PackageHeader.ClonedFrom.append(ClonedFrom)
+
+ # LibraryClass
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Package.LibraryClassDeclarations.append(LibraryClass)
+
+ # IndustryStandardHeader
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
+ Tmp = IndustryStandardHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'IndustryStandardHeader')
+ Package.IndustryStdHeaders.append(Include)
+
+ # PackageHeader
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
+ Tmp = PackageIncludeHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'PackageHeader')
+ Package.PackageIncludePkgHeaders.append(Include)
+
+ # Guid
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.GuidDeclarations.append(GuidProtocolPpi)
+
+ # Protocol
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.ProtocolDeclarations.append(GuidProtocolPpi)
+
+ # Ppi
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.PpiDeclarations.append(GuidProtocolPpi)
+
+ # PcdEntry
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml(SubItem, 'PcdEntry')
+ Package.PcdDeclarations.append(PcdEntry)
+
+ # PcdCheck
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdRelationshipChecks/PcdCheck'):
+ Tmp = PcdCheckXml()
+ PcdCheck = Tmp.FromXml(SubItem, 'PcdCheck')
+ Package.PcdChecks.append(PcdCheck)
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ Package.MiscFiles = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ Package.UserExtensions = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/UserExtensions'), 'UserExtensions')
+
+ # Modules
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/Modules/ModuleSurfaceArea'):
+ Tmp = ModuleSurfaceAreaXml()
+ Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')
+ Package.Modules[(Module.ModuleHeader.Guid, Module.ModuleHeader.Version, Module.ModuleHeader.CombinePath)] = Module
+
+ self.Package = Package
+ return self.Package
+
+ def ToXml(self, Package):
+ # Create PackageSurfaceArea node
+ DomPackage = minidom.Document().createElement('PackageSurfaceArea')
+
+ # Header
+ Tmp = PackageHeaderXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.PackageHeader, 'Header'))
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ if Package.PackageHeader.ClonedFrom != []:
+ DomPackage.appendChild(Tmp.ToXml(Package.PackageHeader.ClonedFrom[0], 'ClonedFrom'))
+
+ # LibraryClass
+ LibraryClassNode = CreateXmlElement('LibraryClassDeclarations', '', [], [])
+ for LibraryClass in Package.LibraryClassDeclarations:
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
+ DomPackage.appendChild(LibraryClassNode)
+
+ # IndustryStandardHeader
+ IndustryStandardHeaderNode = CreateXmlElement('IndustryStandardIncludes', '', [], [])
+ for Include in Package.IndustryStdHeaders:
+ Tmp = IndustryStandardHeaderXml()
+ IndustryStandardHeaderNode.appendChild(Tmp.ToXml(Include, 'IndustryStandardHeader'))
+ DomPackage.appendChild(IndustryStandardHeaderNode)
+
+ # PackageHeader
+ PackageIncludeHeaderNode = CreateXmlElement('PackageIncludes', '', [], [])
+ for Include in Package.PackageIncludePkgHeaders:
+ Tmp = PackageIncludeHeaderXml()
+ PackageIncludeHeaderNode.appendChild(Tmp.ToXml(Include, 'PackageHeader'))
+ DomPackage.appendChild(PackageIncludeHeaderNode)
+
+ # Guid
+ GuidProtocolPpiNode = CreateXmlElement('GuidDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.GuidDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # Protocol
+ GuidProtocolPpiNode = CreateXmlElement('ProtocolDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.ProtocolDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # Ppi
+ GuidProtocolPpiNode = CreateXmlElement('PpiDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.PpiDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # PcdEntry
+ PcdEntryNode = CreateXmlElement('PcdDeclarations', '', [], [])
+ for PcdEntry in Package.PcdDeclarations:
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml(PcdEntry, 'PcdEntry'))
+ DomPackage.appendChild(PcdEntryNode)
+
+ # PcdCheck
+ PcdCheckNode = CreateXmlElement('PcdRelationshipChecks', '', [], [])
+ for PcdCheck in Package.PcdChecks:
+ Tmp = PcdCheckXml()
+ PcdCheckNode.appendChild(Tmp.ToXml(PcdCheck, 'PcdCheck'))
+ DomPackage.appendChild(PcdCheckNode)
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.MiscFiles, 'MiscellaneousFiles'))
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.UserExtensions, 'UserExtensions'))
+
+ # Modules
+ ModuleNode = CreateXmlElement('Modules', '', [], [])
+ for Module in Package.Modules.values():
+ Tmp = ModuleSurfaceAreaXml()
+ ModuleNode.appendChild(Tmp.ToXml(Module))
+ DomPackage.appendChild(ModuleNode)
+
+ return DomPackage
+
+# ModuleXml
+class ModuleSurfaceAreaXml(object):
+ def __init__(self):
+ self.Module = None
+
+ def FromXml(self, Item, Key):
+ # Create a package object
+ Module = ModuleClass()
+
+ # Header
+ Tmp = HeaderXml()
+ ModuleHeader = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/Header'), 'Header')
+ Module.ModuleHeader = ModuleHeader
+
+ # ModuleProperties
+ Tmp = ModulePropertyXml()
+ (Header, BootModes, Events, HOBs) = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ModuleProperties'), 'ModuleProperties', ModuleHeader)
+ Module.ModuleHeader = Header
+ Module.BootModes = BootModes
+ Module.Events = Events
+ Module.Hobs = HOBs
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ if ClonedFrom:
+ Module.ModuleHeader.ClonedFrom.append(ClonedFrom)
+
+ # LibraryClass
+ #LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Module.LibraryClasses.append(LibraryClass)
+
+ # SourceFile
+ #SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename'):
+ Tmp = SourceFileXml()
+ SourceFile = Tmp.FromXml(SubItem, 'Filename')
+ Module.Sources.append(SourceFile)
+
+ # BinaryFile
+ #BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile'):
+ Tmp = BinaryFileXml()
+ BinaryFile = Tmp.FromXml(SubItem, 'BinaryFile')
+ Module.Binaries.append(BinaryFile)
+
+ # PackageDependencies
+ #PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
+ Tmp = PackageXml()
+ PackageDependency = Tmp.FromXml(SubItem, 'Package')
+ Module.PackageDependencies.append(PackageDependency)
+
+ # Guid
+ #GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'GuidCName')
+ Module.Guids.append(GuidProtocolPpi)
+
+ # Protocol
+ #GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Protocol')
+ Module.Protocols.append(GuidProtocolPpi)
+
+ # Ppi
+ #GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Ppi')
+ Module.Ppis.append(GuidProtocolPpi)
+
+ # Extern
+ #ExternNode = CreateXmlElement('Externs', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
+ Tmp = ExternXml()
+ Extern = Tmp.FromXml(SubItem, 'Extern')
+ Module.Externs.append(Extern)
+
+ # PcdCoded
+ #PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml(SubItem, 'PcdEntry')
+ Module.PcdCodes.append(PcdEntry)
+
+ # PeiDepex
+ #DepexNode = CreateXmlElement('PeiDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.PeiDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/PeiDepex'), 'PeiDepex')
+
+ # DxeDepex
+ #DepexNode = CreateXmlElement('DxeDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.DxeDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/DxeDepex'), 'DxeDepex')
+
+ # SmmDepex
+ #DepexNode = CreateXmlElement('SmmDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.SmmDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/DxeDepex'), 'SmmDepex')
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ Module.MiscFiles = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ Module.UserExtensions = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/UserExtensions'), 'UserExtensions')
+
+ # return the module object
+ self.Module = Module
+ return self.Module
+
+ def ToXml(self, Module):
+ # Create root node of module surface area
+ DomModule = minidom.Document().createElement('ModuleSurfaceArea')
+
+ # Header
+ Tmp = HeaderXml()
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader, 'Header'))
+
+ # ModuleProperties
+ Tmp = ModulePropertyXml()
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader, Module.BootModes, Module.Events, Module.Hobs, 'ModuleProperties'))
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ if Module.ModuleHeader.ClonedFrom != []:
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader.ClonedFrom[0], 'ClonedFrom'))
+
+ # LibraryClass
+ LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
+ for LibraryClass in Module.LibraryClasses:
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
+ DomModule.appendChild(LibraryClassNode)
+
+ # SourceFile
+ SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
+ for SourceFile in Module.Sources:
+ Tmp = SourceFileXml()
+ SourceFileNode.appendChild(Tmp.ToXml(SourceFile, 'Filename'))
+ DomModule.appendChild(SourceFileNode)
+
+ # BinaryFile
+ BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
+ for BinaryFile in Module.Binaries:
+ Tmp = BinaryFileXml()
+ BinaryFileNode.appendChild(Tmp.ToXml(BinaryFile, 'BinaryFile'))
+ DomModule.appendChild(BinaryFileNode)
+
+ # PackageDependencies
+ PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
+ for PackageDependency in Module.PackageDependencies:
+ Tmp = PackageXml()
+ PackageDependencyNode.appendChild(Tmp.ToXml(PackageDependency, 'Package'))
+ DomModule.appendChild(PackageDependencyNode)
+
+ # Guid
+ GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
+ for GuidProtocolPpi in Module.Guids:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'GuidCName'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Protocol
+ GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
+ for GuidProtocolPpi in Module.Protocols:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Protocol'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Ppi
+ GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
+ for GuidProtocolPpi in Module.Ppis:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Ppi'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Extern
+ ExternNode = CreateXmlElement('Externs', '', [], [])
+ for Extern in Module.Externs:
+ Tmp = ExternXml()
+ ExternNode.appendChild(Tmp.ToXml(Extern, 'Extern'))
+ DomModule.appendChild(ExternNode)
+
+ # PcdCoded
+ PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
+ for PcdEntry in Module.PcdCodes:
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml(PcdEntry, 'PcdEntry'))
+ DomModule.appendChild(PcdEntryNode)
+
+ # PeiDepex
+ if Module.PeiDepex:
+ DepexNode = CreateXmlElement('PeiDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.PeiDepex, 'PeiDepex'))
+
+ # DxeDepex
+ if Module.DxeDepex:
+ DepexNode = CreateXmlElement('DxeDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.DxeDepex, 'DxeDepex'))
+
+ # SmmDepex
+ if Module.SmmDepex:
+ DepexNode = CreateXmlElement('SmmDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.SmmDepex, 'SmmDepex'))
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ DomModule.appendChild(Tmp.ToXml(Module.MiscFiles, 'MiscellaneousFiles'))
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ DomModule.appendChild(Tmp.ToXml(Module.UserExtensions, 'UserExtensions'))
+
+ return DomModule
+
+# DistributionPackageXml
+class DistributionPackageXml(object):
+ def __init__(self):
+ self.Dp = DistributionPackageClass()
+
+ def FromXml(self, Filename = None):
+ if Filename != None:
+ self.Dp = DistributionPackageClass()
+
+ # Load to XML
+ self.Pkg = XmlParseFile(Filename)
+
+ # Parse Header information
+ Tmp = DistributionPackageHeaderXml()
+ DistributionPackageHeader = Tmp.FromXml(XmlNode(self.Pkg, '/DistributionPackage/DistributionHeader'), 'DistributionHeader')
+ self.Dp.Header = DistributionPackageHeader
+
+ # Parse each PackageSurfaceArea
+ for Item in XmlList(self.Pkg, '/DistributionPackage/PackageSurfaceArea'):
+ Psa = PackageSurfaceAreaXml()
+ Package = Psa.FromXml(Item, 'PackageSurfaceArea')
+ self.Dp.PackageSurfaceArea[(Package.PackageHeader.Guid, Package.PackageHeader.Version, Package.PackageHeader.CombinePath)] = Package
+
+ # Parse each ModuleSurfaceArea
+ for Item in XmlList(self.Pkg, '/DistributionPackage/ModuleSurfaceArea'):
+ Msa = ModuleSurfaceAreaXml()
+ Module = Msa.FromXml(Item, 'ModuleSurfaceArea')
+ self.Dp.ModuleSurfaceArea[(Module.ModuleHeader.Guid, Module.ModuleHeader.Version, Module.ModuleHeader.CombinePath)] = Module
+
+ # Parse Tools
+ Tmp = MiscellaneousFileXml()
+ self.Dp.Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
+
+ # Parse MiscFiles
+ Tmp = MiscellaneousFileXml()
+ self.Dp.MiscellaneousFiles = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ return self.Dp
+
+ def ToXml(self, Dp):
+ if Dp != None:
+ # Parse DistributionPackageHeader
+ Attrs = [['xmlns', 'http://www.uefi.org/2008/2.1'],
+ ['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
+ ]
+ Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
+
+ Tmp = DistributionPackageHeaderXml()
+ Root.appendChild(Tmp.ToXml(Dp.Header, 'DistributionHeader'))
+
+ # Parse each PackageSurfaceArea
+ for Package in Dp.PackageSurfaceArea.values():
+ Psa = PackageSurfaceAreaXml()
+ DomPackage = Psa.ToXml(Package)
+ Root.appendChild(DomPackage)
+
+ # Parse each ModuleSurfaceArea
+ for Module in Dp.ModuleSurfaceArea.values():
+ Msa = ModuleSurfaceAreaXml()
+ DomModule = Msa.ToXml(Module)
+ Root.appendChild(DomModule)
+
+ # Parse Tools
+ Tmp = MiscellaneousFileXml()
+ #Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
+ Root.appendChild(Tmp.ToXml2(Dp.Tools, 'Tools'))
+
+ # Parse MiscFiles
+ Tmp = MiscellaneousFileXml()
+ #Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/MiscellaneousFiles'), 'MiscellaneousFiles')
+ Root.appendChild(Tmp.ToXml2(Dp.MiscellaneousFiles, 'MiscellaneousFiles'))
+
+ return Root.toprettyxml(indent = ' ')
+
+ return ''
+
+if __name__ == '__main__':
+ M = DistributionPackageXml()
+ M.FromXml('C:\Test.xml')
+ print M.ToXml(M.Dp)
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/Common/XmlRoutines.py b/BaseTools/Source/Python/Common/XmlRoutines.py new file mode 100644 index 0000000000..e5fedae83d --- /dev/null +++ b/BaseTools/Source/Python/Common/XmlRoutines.py @@ -0,0 +1,228 @@ +## @file
+# This is an XML API that uses a syntax similar to XPath, but it is written in
+# standard python so that no extra python packages are required to use it.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import xml.dom.minidom
+
+## Create a element of XML
+#
+# @param Name
+# @param String
+# @param NodeList
+# @param AttributeList
+#
+# @revel Element
+#
+def CreateXmlElement(Name, String, NodeList, AttributeList):
+ Doc = xml.dom.minidom.Document()
+ Element = Doc.createElement(Name)
+ if String != '' and String != None:
+ Element.appendChild(Doc.createTextNode(String))
+
+ for Item in NodeList:
+ if type(Item) == type([]):
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key != None and Value != '' and Value != None:
+ Node = Doc.createElement(Key)
+ Node.appendChild(Doc.createTextNode(Value))
+ Element.appendChild(Node)
+ else:
+ Element.appendChild(Item)
+ for Item in AttributeList:
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key != None and Value != '' and Value != None:
+ Element.setAttribute(Key, Value)
+
+ return Element
+
+## Get a list of XML nodes using XPath style syntax.
+#
+# Return a list of XML DOM nodes from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Nodes A list of XML nodes matching XPath style Sting.
+#
+def XmlList(Dom, String):
+ if String == None or String == "" or Dom == None or Dom == "":
+ return []
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Nodes = [Dom]
+ Index = 0
+ End = len(TagList) - 1
+ while Index <= End:
+ ChildNodes = []
+ for Node in Nodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes.extend(Node.childNodes)
+ else:
+ ChildNodes.append(Node)
+ Nodes = ChildNodes
+ ChildNodes = []
+ Index += 1
+
+ return Nodes
+
+
+## Get a single XML node using XPath style syntax.
+#
+# Return a single XML DOM node from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Node A single XML node matching XPath style Sting.
+#
+def XmlNode(Dom, String):
+ if String == None or String == "" or Dom == None or Dom == "":
+ return ""
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Index = 0
+ End = len(TagList) - 1
+ ChildNodes = [Dom]
+ while Index <= End:
+ for Node in ChildNodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes = Node.childNodes
+ else:
+ return Node
+ break
+ Index += 1
+ return ""
+
+
+## Get a single XML element using XPath style syntax.
+#
+# Return a single XML element from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Strin A XPath style path.
+#
+# @revel Element An XML element matching XPath style Sting.
+#
+def XmlElement(Dom, String):
+ try:
+ return XmlNode(Dom, String).firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a single XML element of the current node.
+#
+# Return a single XML element specified by the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element An XML element in current root Dom.
+#
+def XmlElementData(Dom):
+ try:
+ return Dom.firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a list of XML elements using XPath style syntax.
+#
+# Return a list of XML elements from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM object.
+# @param String A XPath style path.
+#
+# @revel Elements A list of XML elements matching XPath style Sting.
+#
+def XmlElementList(Dom, String):
+ return map(XmlElementData, XmlList(Dom, String))
+
+
+## Get the XML attribute of the current node.
+#
+# Return a single XML attribute named Attribute from the current root Dom.
+# If the input Dom or Attribute is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Attribute The name of Attribute.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlAttribute(Dom, Attribute):
+ try:
+ return Dom.getAttribute(Attribute).strip()
+ except:
+ return ''
+
+
+## Get the XML node name of the current node.
+#
+# Return a single XML node name from the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlNodeName(Dom):
+ try:
+ return Dom.nodeName.strip()
+ except:
+ return ''
+
+## Parse an XML file.
+#
+# Parse the input XML file named FileName and return a XML DOM it stands for.
+# If the input File is not a valid XML file, then an empty string is returned.
+#
+# @param FileName The XML file name.
+#
+# @revel Dom The Dom object achieved from the XML file.
+#
+def XmlParseFile(FileName):
+ try:
+ XmlFile = open(FileName)
+ Dom = xml.dom.minidom.parse(XmlFile)
+ XmlFile.close()
+ return Dom
+ except Exception, X:
+ print X
+ return ""
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ # Nothing to do here. Could do some unit tests.
+ A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
+ B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
+ C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
+ print C.toprettyxml(indent = " ")
+ pass
diff --git a/BaseTools/Source/Python/Common/__init__.py b/BaseTools/Source/Python/Common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/Common/__init__.py diff --git a/BaseTools/Source/Python/CommonDataClass/CommonClass.py b/BaseTools/Source/Python/CommonDataClass/CommonClass.py new file mode 100644 index 0000000000..763550fe47 --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/CommonClass.py @@ -0,0 +1,473 @@ +## @file
+# This file is used to define common items of class object
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+
+#
+# Generate help text
+#
+def GenerateHelpText(Text, Lang):
+ if Text:
+ Ht = HelpTextClass()
+ Ht.Lang = Lang
+ Ht.String = Text
+
+ return Ht
+
+ return None
+
+## CommonClass
+#
+# This class defined common items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+# @param Usage: Input value for Usage, default is []
+# @param FeatureFlag: Input value for FeatureFalg, default is ''
+# @param SupArchList: Input value for SupArchList, default is []
+# @param HelpText: Input value for HelpText, default is ''
+#
+# @var Usage: To store value for Usage, selection scope is in below list
+# ALWAYS_CONSUMED | SOMETIMES_CONSUMED | ALWAYS_PRODUCED | SOMETIMES_PRODUCED | TO_START | BY_START | PRIVATE
+# @var FeatureFlag: To store value for FeatureFlag
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+# @var HelpText: To store value for HelpText
+#
+class CommonClass(object):
+ def __init__(self, Usage = None, FeatureFlag = '', SupArchList = None, HelpText = ''):
+ self.Usage = Usage
+ if self.Usage == None:
+ self.Usage = []
+ self.FeatureFlag = FeatureFlag
+ self.SupArchList = SupArchList
+ if self.SupArchList == None:
+ self.SupArchList = []
+ self.HelpText = HelpText
+ self.HelpTextList = []
+
+## CommonClass
+#
+# This class defined common items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var Abstract: To store value for Abstract
+# @var Description: To store value for Description
+# @var Copyright: To store value for Copyright
+# @var License: To store value for License
+# @var Specification: To store value for Specification
+#
+class CommonHeaderClass(object):
+ def __init__(self):
+ self.Abstract = ''
+ self.Description = ''
+ self.Copyright = ''
+ self.License = ''
+ self.Specification = {}
+
+## HelpTextClass
+#
+# This class defined HelpText item used in PKG file
+#
+# @param object: Inherited from object class
+#
+# @var Lang: To store value for Lang
+# @var String: To store value for String
+#
+class HelpTextClass(object):
+ def __init__(self):
+ self.Lang = ''
+ self.String = ''
+
+## DefineClass
+#
+# This class defined item DEFINE used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var Define: To store value for Define, it is a set structure as
+# { (DefineName, Arch) : DefineValue, ... }
+#
+class DefineClass(object):
+ def __init__(self):
+ self.Define = {}
+
+## ClonedRecordClass
+#
+# This class defined ClonedRecord items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var Id: To store value for Id
+# @var FarGuid: To store value for FarGuid
+# @var PackageGuid: To store value for PackageGuid
+# @var PackageVersion: To store value for PackageVersion
+# @var ModuleGuid: To store value for ModuleGuid
+# @var ModuleVersion: To store value for ModuleVersion
+#
+class ClonedRecordClass(object):
+ def __init__(self):
+ self.Id = 0
+ self.FarGuid = ''
+ self.PackageGuid = ''
+ self.PackageVersion = ''
+ self.ModuleGuid = ''
+ self.ModuleVersion = ''
+
+## IdentificationClass
+#
+# This class defined Identification items used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var Name: To store value for Name
+# ModuleName(Inf) / PackageName(Dec) / PlatformName(Dsc)
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var FileName: To store value for FileName
+# @var FullPath: To store value for FullPath
+#
+class IdentificationClass(object):
+ def __init__(self):
+ self.Name = ''
+ self.BaseName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.FileName = ''
+ self.FullPath = ''
+ self.RelaPath = ''
+ self.PackagePath = ''
+ self.ModulePath = ''
+ self.CombinePath = ''
+
+## IncludeStatementClass
+#
+# This class defined IncludeFiles item used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var IncludeFiles: To store value for IncludeFiles
+# It is a set structure as { IncludeFile : [Arch1, Arch2, ...], ... }
+#
+class IncludeStatementClass(object):
+ def __init__(self):
+ self.IncludeFiles = {}
+
+## GuidProtocolPpiCommonClass
+#
+# This class defined Guid, Protocol and Ppi like items used in Module/Platform/Package files
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var Name: To store value for Name
+# @var CName: To store value for CName
+# @var Guid: To store value for Guid
+# @var Notify: To store value for Notify
+# @var GuidTypeList: To store value for GuidTypeList, selection scope is in below list
+# DATA_HUB_RECORD | EFI_EVENT | EFI_SYSTEM_CONFIGURATION_TABLE | EFI_VARIABLE | GUID | HII_PACKAGE_LIST | HOB | TOKEN_SPACE_GUID
+# @var SupModuleList: To store value for SupModuleList, selection scope is in below list
+# BASE | SEC | PEI_CORE | PEIM | DXE_CORE | DXE_DRIVER | DXE_RUNTIME_DRIVER | DXE_SAL_DRIVER | DXE_SMM_DRIVER | UEFI_DRIVER | UEFI_APPLICATION | USER_DEFINED
+#
+class GuidProtocolPpiCommonClass(CommonClass):
+ def __init__(self):
+ self.Name = ''
+ self.CName = ''
+ self.Guid = ''
+ self.VariableName = ''
+ self.Notify = False
+ self.GuidTypeList = []
+ self.GuidTypeLists = []
+ self.SupModuleList = []
+ CommonClass.__init__(self)
+
+## LibraryClassClass
+#
+# This class defined Library item used in Module/Platform/Package files
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param DefineClass: Inherited from DefineClass class
+#
+# @var LibraryClass: To store value for LibraryClass
+# @var IncludeHeader: To store value for IncludeHeader
+# @var RecommendedInstanceVersion: To store value for RecommendedInstanceVersion
+# @var RecommendedInstanceGuid: To store value for RecommendedInstanceGuid
+# @var RecommendedInstance: To store value for RecommendedInstance, selection scope is in below list
+# DATA_HUB_RECORD | EFI_EVENT | EFI_SYSTEM_CONFIGURATION_TABLE | EFI_VARIABLE | GUID | HII_PACKAGE_LIST | HOB | TOKEN_SPACE_GUID
+# @var SupModuleList: To store value for SupModuleList, selection scope is in below list
+# BASE | SEC | PEI_CORE | PEIM | DXE_CORE | DXE_DRIVER | DXE_RUNTIME_DRIVER | DXE_SAL_DRIVER | DXE_SMM_DRIVER | UEFI_DRIVER | UEFI_APPLICATION | USER_DEFINED
+#
+class LibraryClassClass(CommonClass, DefineClass):
+ def __init__(self):
+ self.LibraryClass = ''
+ self.IncludeHeader = ''
+ self.RecommendedInstanceVersion = ''
+ self.RecommendedInstanceGuid = ''
+ self.RecommendedInstance = ''
+ self.SupModuleList = []
+ CommonClass.__init__(self)
+ DefineClass.__init__(self)
+
+## GuidClass
+#
+# This class defined Guid item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonClass: Inherited from GuidProtocolPpiCommonClass class
+#
+class GuidClass(GuidProtocolPpiCommonClass):
+ def __init__(self):
+ GuidProtocolPpiCommonClass.__init__(self)
+
+## ProtocolClass
+#
+# This class defined Protocol item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonClass: Inherited from GuidProtocolPpiCommonClass class
+#
+class ProtocolClass(GuidProtocolPpiCommonClass):
+ def __init__(self):
+ GuidProtocolPpiCommonClass.__init__(self)
+
+## PpiClass
+#
+# This class defined Ppi item used in Module/Platform/Package files
+#
+# @param GuidProtocolPpiCommonClass: Inherited from GuidProtocolPpiCommonClass class
+#
+class PpiClass(GuidProtocolPpiCommonClass):
+ def __init__(self):
+ GuidProtocolPpiCommonClass.__init__(self)
+
+## SkuInfoClass
+#
+# This class defined SkuInfo item used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+# @param SkuIdName: Input value for SkuIdName, default is ''
+# @param SkuId: Input value for SkuId, default is ''
+# @param VariableName: Input value for VariableName, default is ''
+# @param VariableGuid: Input value for VariableGuid, default is ''
+# @param VariableOffset: Input value for VariableOffset, default is ''
+# @param HiiDefaultValue: Input value for HiiDefaultValue, default is ''
+# @param VpdOffset: Input value for VpdOffset, default is ''
+# @param DefaultValue: Input value for DefaultValue, default is ''
+#
+# @var SkuIdName: To store value for SkuIdName
+# @var SkuId: To store value for SkuId
+# @var VariableName: To store value for VariableName
+# @var VariableGuid: To store value for VariableGuid
+# @var VariableOffset: To store value for VariableOffset
+# @var HiiDefaultValue: To store value for HiiDefaultValue
+# @var VpdOffset: To store value for VpdOffset
+# @var DefaultValue: To store value for DefaultValue
+#
+class SkuInfoClass(object):
+ def __init__(self, SkuIdName = '', SkuId = '', VariableName = '', VariableGuid = '', VariableOffset = '',
+ HiiDefaultValue = '', VpdOffset = '', DefaultValue = '', VariableGuidValue = ''):
+ self.SkuIdName = SkuIdName
+ self.SkuId = SkuId
+
+ #
+ # Used by Hii
+ #
+ self.VariableName = VariableName
+ self.VariableGuid = VariableGuid
+ self.VariableGuidValue = VariableGuidValue
+ self.VariableOffset = VariableOffset
+ self.HiiDefaultValue = HiiDefaultValue
+
+ #
+ # Used by Vpd
+ #
+ self.VpdOffset = VpdOffset
+
+ #
+ # Used by Default
+ #
+ self.DefaultValue = DefaultValue
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a signle line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = Rtn = 'SkuId = ' + str(self.SkuId) + "," + \
+ 'SkuIdName = ' + str(self.SkuIdName) + "," + \
+ 'VariableName = ' + str(self.VariableName) + "," + \
+ 'VariableGuid = ' + str(self.VariableGuid) + "," + \
+ 'VariableOffset = ' + str(self.VariableOffset) + "," + \
+ 'HiiDefaultValue = ' + str(self.HiiDefaultValue) + "," + \
+ 'VpdOffset = ' + str(self.VpdOffset) + "," + \
+ 'DefaultValue = ' + str(self.DefaultValue) + ","
+ return Rtn
+## PcdErrorClass
+#
+#
+#
+class PcdErrorClass(object):
+ def __init__(self):
+ self.ValidValueList = ''
+ self.ValidValueListLang = ''
+ self.ValidValueRange = ''
+ self.Expression = ''
+ self.ErrorNumber = ''
+ self.ErrorMessage = []
+
+## PcdClass
+#
+# This class defined Pcd item used in Module/Platform/Package files
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param CName: Input value for CName, default is ''
+# @param Token: Input value for Token, default is ''
+# @param TokenSpaceGuidCName: Input value for TokenSpaceGuidCName, default is ''
+# @param DatumType: Input value for DatumType, default is ''
+# @param MaxDatumSize: Input value for MaxDatumSize, default is ''
+# @param DefaultValue: Input value for DefaultValue, default is ''
+# @param ItemType: Input value for ItemType, default is ''
+# @param ValidUsage: Input value for ValidUsage, default is []
+# @param SkuInfoList: Input value for SkuInfoList, default is {}
+# @param SupModuleList: Input value for SupModuleList, default is []
+#
+# @var CName: To store value for CName
+# @var Token: To store value for Token
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var DatumType: To store value for DatumType, selection scope is in below list
+# UINT8 | UINT16 | UINT32 | UINT64 | VOID* | BOOLEAN
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var DefaultValue: To store value for DefaultValue
+# @var ItemType: To store value for ItemType, selection scope is in below list
+# FEATURE_FLAG | FIXED_AT_BUILD | PATCHABLE_IN_MODULE | DYNAMIC | DYNAMIC_EX
+# @var ValidUsage: To store value for ValidUsage, selection scope is in below list
+# FEATURE_FLAG | FIXED_AT_BUILD | PATCHABLE_IN_MODULE | DYNAMIC | DYNAMIC_EX
+# @var SkuInfoList: To store value for SkuInfoList
+# It is a set structure as { [SkuIdName] : SkuInfoClass }
+# @var SupModuleList: To store value for SupModuleList, selection scope is in below list
+# BASE | SEC | PEI_CORE | PEIM | DXE_CORE | DXE_DRIVER | DXE_RUNTIME_DRIVER | DXE_SAL_DRIVER | DXE_SMM_DRIVER | UEFI_DRIVER | UEFI_APPLICATION | USER_DEFINED
+#
+class PcdClass(CommonClass):
+ def __init__(self, CName = '', Token = '', TokenSpaceGuidCName = '', DatumType = '', MaxDatumSize = '', DefaultValue = '', ItemType = '', ValidUsage = None, SkuInfoList = None, SupModuleList = None):
+ self.CName = CName
+ self.Token = Token
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ self.DatumType = DatumType
+ self.MaxDatumSize = MaxDatumSize
+ self.DefaultValue = DefaultValue
+ self.ItemType = ItemType
+ self.ValidUsage = ValidUsage
+ self.PcdItemType = ''
+ self.TokenSpaceGuidValue = ''
+ self.PcdUsage = ''
+ self.PcdCName = ''
+ self.Value = ''
+ self.Offset = ''
+ if self.ValidUsage == None:
+ self.ValidUsage = []
+ self.SkuInfoList = SkuInfoList
+ if self.SkuInfoList == None:
+ self.SkuInfoList = {}
+ self.SupModuleList = SupModuleList
+ if self.SupModuleList == None:
+ self.SupModuleList = []
+ CommonClass.__init__(self)
+ self.PcdErrors = []
+
+## BuildOptionClass
+#
+# This class defined BuildOption item used in Module/Platform/Package files
+#
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+# @param ToolChainFamily: Input value for ToolChainFamily, default is ''
+# @param ToolChain: Input value for ToolChain, default is ''
+# @param Option: Input value for Option, default is ''
+#
+# @var Statement: To store value for Statement
+# It is a string in a special format as "Family:Target_TagName_Tarch_ToolCode_FLAGS = String"
+# @var ToolChainFamily: To store value for ToolChainFamily
+# @var ToolChain: To store value for ToolChain
+# @var Option: To store value for Option
+# @var BuildTarget: To store value for BuildTarget
+# @var TagName: To store value for TagName
+# @var ToolCode: To store value for ToolCode
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+#
+class BuildOptionClass(IncludeStatementClass):
+ def __init__(self, ToolChainFamily = '', ToolChain = '', Option = ''):
+ IncludeStatementClass.__init__(self)
+ self.Statement = ''
+ self.ToolChainFamily = ToolChainFamily
+ self.ToolChain = ToolChain
+ self.Option = Option
+ self.BuildTarget = ''
+ self.TagName = ''
+ self.ToolCode = ''
+ self.SupArchList = []
+
+## IncludeClass
+#
+# This class defined Include item used in Module/Platform/Package files
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var FilePath: To store value for FilePath
+# @var ModuleType: To store value for ModuleType
+# @var Comment: To store value for Comment
+#
+class IncludeClass(CommonClass):
+ def __init__(self):
+ self.FilePath = ''
+ self.ModuleType = ''
+ self.SupModuleList = []
+ self.Comment = ''
+ CommonClass.__init__(self)
+
+## FileClass
+#
+#
+class FileClass(CommonClass):
+ def __init__(self):
+ self.Filename = ''
+ self.Executable = ''
+ self.Family = ''
+ self.FileType = ''
+ CommonClass.__init__(self)
+
+
+## MiscFileClass
+#
+#
+class MiscFileClass(CommonHeaderClass):
+ def __init__(self):
+ CommonHeaderClass.__init__(self)
+ self.Name = ''
+ self.Files = []
+
+
+## UserExtensionsClass
+#
+# This class defined UserExtensions item used in Module/Platform/Package files
+#
+# @param object: Inherited from object class
+#
+# @var UserID: To store value for UserID
+# @var Identifier: To store value for Identifier
+# @var Content: To store value for Content
+#
+class UserExtensionsClass(object):
+ def __init__(self):
+ self.UserID = ''
+ self.Identifier = 0
+ self.Content = ''
+ self.Defines = []
+ self.BuildOptions = []
diff --git a/BaseTools/Source/Python/CommonDataClass/DataClass.py b/BaseTools/Source/Python/CommonDataClass/DataClass.py new file mode 100644 index 0000000000..00f4be8332 --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/DataClass.py @@ -0,0 +1,351 @@ +## @file
+# This file is used to define class for data sturcture used in ECC
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+
+##
+# Static values for data models
+#
+MODEL_UNKNOWN = 0
+
+MODEL_FILE_C = 1001
+MODEL_FILE_H = 1002
+MODEL_FILE_ASM = 1003
+MODEL_FILE_INF = 1011
+MODEL_FILE_DEC = 1012
+MODEL_FILE_DSC = 1013
+MODEL_FILE_FDF = 1014
+MODEL_FILE_INC = 1015
+MODEL_FILE_CIF = 1016
+
+MODEL_IDENTIFIER_FILE_HEADER = 2001
+MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
+MODEL_IDENTIFIER_COMMENT = 2003
+MODEL_IDENTIFIER_PARAMETER = 2004
+MODEL_IDENTIFIER_STRUCTURE = 2005
+MODEL_IDENTIFIER_VARIABLE = 2006
+MODEL_IDENTIFIER_INCLUDE = 2007
+MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
+MODEL_IDENTIFIER_ENUMERATE = 2009
+MODEL_IDENTIFIER_PCD = 2010
+MODEL_IDENTIFIER_UNION = 2011
+MODEL_IDENTIFIER_MACRO_IFDEF = 2012
+MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
+MODEL_IDENTIFIER_MACRO_DEFINE = 2014
+MODEL_IDENTIFIER_MACRO_ENDIF = 2015
+MODEL_IDENTIFIER_MACRO_PROGMA = 2016
+MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
+MODEL_IDENTIFIER_TYPEDEF = 2017
+MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
+MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
+
+MODEL_EFI_PROTOCOL = 3001
+MODEL_EFI_PPI = 3002
+MODEL_EFI_GUID = 3003
+MODEL_EFI_LIBRARY_CLASS = 3004
+MODEL_EFI_LIBRARY_INSTANCE = 3005
+MODEL_EFI_PCD = 3006
+MODEL_EFI_SOURCE_FILE = 3007
+MODEL_EFI_BINARY_FILE = 3008
+MODEL_EFI_SKU_ID = 3009
+MODEL_EFI_INCLUDE = 3010
+MODEL_EFI_DEPEX = 3011
+
+MODEL_PCD = 4000
+MODEL_PCD_FIXED_AT_BUILD = 4001
+MODEL_PCD_PATCHABLE_IN_MODULE = 4002
+MODEL_PCD_FEATURE_FLAG = 4003
+MODEL_PCD_DYNAMIC_EX = 4004
+MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
+MODEL_PCD_DYNAMIC_EX_VPD = 4006
+MODEL_PCD_DYNAMIC_EX_HII = 4007
+MODEL_PCD_DYNAMIC = 4008
+MODEL_PCD_DYNAMIC_DEFAULT = 4009
+MODEL_PCD_DYNAMIC_VPD = 4010
+MODEL_PCD_DYNAMIC_HII = 4011
+
+MODEL_META_DATA_HEADER = 5001
+MODEL_META_DATA_INCLUDE = 5002
+MODEL_META_DATA_DEFINE = 5003
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
+MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
+MODEL_META_DATA_BUILD_OPTION = 5008
+MODEL_META_DATA_COMPONENT = 5009
+MODEL_META_DATA_USER_EXTENSION = 5010
+MODEL_META_DATA_PACKAGE = 5011
+MODEL_META_DATA_NMAKE = 5012
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
+MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
+MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
+
+MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
+ ('MODEL_FILE_C', MODEL_FILE_C),
+ ('MODEL_FILE_H', MODEL_FILE_H),
+ ('MODEL_FILE_ASM', MODEL_FILE_ASM),
+ ('MODEL_FILE_INF', MODEL_FILE_INF),
+ ('MODEL_FILE_DEC', MODEL_FILE_DEC),
+ ('MODEL_FILE_DSC', MODEL_FILE_DSC),
+ ('MODEL_FILE_FDF', MODEL_FILE_FDF),
+ ('MODEL_FILE_INC', MODEL_FILE_INC),
+ ('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
+ ('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
+ ('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
+ ('MODEL_IDENTIFIER_PARAMETER', MODEL_IDENTIFIER_PARAMETER),
+ ('MODEL_IDENTIFIER_STRUCTURE', MODEL_IDENTIFIER_STRUCTURE),
+ ('MODEL_IDENTIFIER_VARIABLE', MODEL_IDENTIFIER_VARIABLE),
+ ('MODEL_IDENTIFIER_INCLUDE', MODEL_IDENTIFIER_INCLUDE),
+ ('MODEL_IDENTIFIER_PREDICATE_EXPRESSION', MODEL_IDENTIFIER_PREDICATE_EXPRESSION),
+ ('MODEL_IDENTIFIER_ENUMERATE', MODEL_IDENTIFIER_ENUMERATE),
+ ('MODEL_IDENTIFIER_PCD', MODEL_IDENTIFIER_PCD),
+ ('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
+ ('MODEL_IDENTIFIER_MACRO_IFDEF', MODEL_IDENTIFIER_MACRO_IFDEF),
+ ('MODEL_IDENTIFIER_MACRO_IFNDEF', MODEL_IDENTIFIER_MACRO_IFNDEF),
+ ('MODEL_IDENTIFIER_MACRO_DEFINE', MODEL_IDENTIFIER_MACRO_DEFINE),
+ ('MODEL_IDENTIFIER_MACRO_ENDIF', MODEL_IDENTIFIER_MACRO_ENDIF),
+ ('MODEL_IDENTIFIER_MACRO_PROGMA', MODEL_IDENTIFIER_MACRO_PROGMA),
+ ('MODEL_IDENTIFIER_FUNCTION_CALLING', MODEL_IDENTIFIER_FUNCTION_CALLING),
+ ('MODEL_IDENTIFIER_TYPEDEF', MODEL_IDENTIFIER_TYPEDEF),
+ ('MODEL_IDENTIFIER_FUNCTION_DECLARATION', MODEL_IDENTIFIER_FUNCTION_DECLARATION),
+ ('MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION),
+ ('MODEL_EFI_PROTOCOL', MODEL_EFI_PROTOCOL),
+ ('MODEL_EFI_PPI', MODEL_EFI_PPI),
+ ('MODEL_EFI_GUID', MODEL_EFI_GUID),
+ ('MODEL_EFI_LIBRARY_CLASS', MODEL_EFI_LIBRARY_CLASS),
+ ('MODEL_EFI_LIBRARY_INSTANCE', MODEL_EFI_LIBRARY_INSTANCE),
+ ('MODEL_EFI_PCD', MODEL_EFI_PCD),
+ ('MODEL_EFI_SKU_ID', MODEL_EFI_SKU_ID),
+ ('MODEL_EFI_INCLUDE', MODEL_EFI_INCLUDE),
+ ('MODEL_EFI_DEPEX', MODEL_EFI_DEPEX),
+ ('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
+ ('MODEL_EFI_SOURCE_FILE', MODEL_EFI_SOURCE_FILE),
+ ('MODEL_EFI_BINARY_FILE', MODEL_EFI_BINARY_FILE),
+ ('MODEL_PCD', MODEL_PCD),
+ ('MODEL_PCD_FIXED_AT_BUILD', MODEL_PCD_FIXED_AT_BUILD),
+ ('MODEL_PCD_PATCHABLE_IN_MODULE', MODEL_PCD_PATCHABLE_IN_MODULE),
+ ('MODEL_PCD_FEATURE_FLAG', MODEL_PCD_FEATURE_FLAG),
+ ('MODEL_PCD_DYNAMIC_EX', MODEL_PCD_DYNAMIC_EX),
+ ('MODEL_PCD_DYNAMIC_EX_DEFAULT', MODEL_PCD_DYNAMIC_EX_DEFAULT),
+ ('MODEL_PCD_DYNAMIC_EX_VPD', MODEL_PCD_DYNAMIC_EX_VPD),
+ ('MODEL_PCD_DYNAMIC_EX_HII', MODEL_PCD_DYNAMIC_EX_HII),
+ ('MODEL_PCD_DYNAMIC', MODEL_PCD_DYNAMIC),
+ ('MODEL_PCD_DYNAMIC_DEFAULT', MODEL_PCD_DYNAMIC_DEFAULT),
+ ('MODEL_PCD_DYNAMIC_VPD', MODEL_PCD_DYNAMIC_VPD),
+ ('MODEL_PCD_DYNAMIC_HII', MODEL_PCD_DYNAMIC_HII),
+ ("MODEL_META_DATA_HEADER", MODEL_META_DATA_HEADER),
+ ("MODEL_META_DATA_INCLUDE", MODEL_META_DATA_INCLUDE),
+ ("MODEL_META_DATA_DEFINE", MODEL_META_DATA_DEFINE),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IF),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE", MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF),
+ ("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF),
+ ("MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH", MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH),
+ ("MODEL_META_DATA_BUILD_OPTION", MODEL_META_DATA_BUILD_OPTION),
+ ("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
+ ('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
+ ('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
+ ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)
+ ]
+
+## FunctionClass
+#
+# This class defines a structure of a function
+#
+# @param ID: ID of a Function
+# @param Header: Header of a Function
+# @param Modifier: Modifier of a Function
+# @param Name: Name of a Function
+# @param ReturnStatement: ReturnStatement of a Funciont
+# @param StartLine: StartLine of a Function
+# @param StartColumn: StartColumn of a Function
+# @param EndLine: EndLine of a Function
+# @param EndColumn: EndColumn of a Function
+# @param BodyStartLine: BodyStartLine of a Function Body
+# @param BodyStartColumn: BodyStartColumn of a Function Body
+# @param BelongsToFile: The Function belongs to which file
+# @param IdentifierList: IdentifierList of a File
+# @param PcdList: PcdList of a File
+#
+# @var ID: ID of a Function
+# @var Header: Header of a Function
+# @var Modifier: Modifier of a Function
+# @var Name: Name of a Function
+# @var ReturnStatement: ReturnStatement of a Funciont
+# @var StartLine: StartLine of a Function
+# @var StartColumn: StartColumn of a Function
+# @var EndLine: EndLine of a Function
+# @var EndColumn: EndColumn of a Function
+# @var BodyStartLine: StartLine of a Function Body
+# @var BodyStartColumn: StartColumn of a Function Body
+# @var BelongsToFile: The Function belongs to which file
+# @var IdentifierList: IdentifierList of a File
+# @var PcdList: PcdList of a File
+#
+class FunctionClass(object):
+ def __init__(self, ID = -1, Header = '', Modifier = '', Name = '', ReturnStatement = '', \
+ StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1, \
+ BodyStartLine = -1, BodyStartColumn = -1, BelongsToFile = -1, \
+ IdentifierList = [], PcdList = [], \
+ FunNameStartLine = -1, FunNameStartColumn = -1):
+ self.ID = ID
+ self.Header = Header
+ self.Modifier = Modifier
+ self.Name = Name
+ self.ReturnStatement = ReturnStatement
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+ self.BodyStartLine = BodyStartLine
+ self.BodyStartColumn = BodyStartColumn
+ self.BelongsToFile = BelongsToFile
+ self.FunNameStartLine = FunNameStartLine
+ self.FunNameStartColumn = FunNameStartColumn
+
+ self.IdentifierList = IdentifierList
+ self.PcdList = PcdList
+
+## IdentifierClass
+#
+# This class defines a structure of a variable
+#
+# @param ID: ID of a Identifier
+# @param Modifier: Modifier of a Identifier
+# @param Type: Type of a Identifier
+# @param Name: Name of a Identifier
+# @param Value: Value of a Identifier
+# @param Model: Model of a Identifier
+# @param BelongsToFile: The Identifier belongs to which file
+# @param BelongsToFunction: The Identifier belongs to which function
+# @param StartLine: StartLine of a Identifier
+# @param StartColumn: StartColumn of a Identifier
+# @param EndLine: EndLine of a Identifier
+# @param EndColumn: EndColumn of a Identifier
+#
+# @var ID: ID of a Identifier
+# @var Modifier: Modifier of a Identifier
+# @var Type: Type of a Identifier
+# @var Name: Name of a Identifier
+# @var Value: Value of a Identifier
+# @var Model: Model of a Identifier
+# @var BelongsToFile: The Identifier belongs to which file
+# @var BelongsToFunction: The Identifier belongs to which function
+# @var StartLine: StartLine of a Identifier
+# @var StartColumn: StartColumn of a Identifier
+# @var EndLine: EndLine of a Identifier
+# @var EndColumn: EndColumn of a Identifier
+#
+class IdentifierClass(object):
+ def __init__(self, ID = -1, Modifier = '', Type = '', Name = '', Value = '', Model = MODEL_UNKNOWN, \
+ BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
+ self.ID = ID
+ self.Modifier = Modifier
+ self.Type = Type
+ self.Name = Name
+ self.Value = Value
+ self.Model = Model
+ self.BelongsToFile = BelongsToFile
+ self.BelongsToFunction = BelongsToFunction
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+
+## PcdClass
+#
+# This class defines a structure of a Pcd
+#
+# @param ID: ID of a Pcd
+# @param CName: CName of a Pcd
+# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+# @param Token: Token of a Pcd
+# @param DatumType: DatumType of a Pcd
+# @param Model: Model of a Pcd
+# @param BelongsToFile: The Pcd belongs to which file
+# @param BelongsToFunction: The Pcd belongs to which function
+# @param StartLine: StartLine of a Pcd
+# @param StartColumn: StartColumn of a Pcd
+# @param EndLine: EndLine of a Pcd
+# @param EndColumn: EndColumn of a Pcd
+#
+# @var ID: ID of a Pcd
+# @var CName: CName of a Pcd
+# @var TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+# @var Token: Token of a Pcd
+# @var DatumType: DatumType of a Pcd
+# @var Model: Model of a Pcd
+# @var BelongsToFile: The Pcd belongs to which file
+# @var BelongsToFunction: The Pcd belongs to which function
+# @var StartLine: StartLine of a Pcd
+# @var StartColumn: StartColumn of a Pcd
+# @var EndLine: EndLine of a Pcd
+# @var EndColumn: EndColumn of a Pcd
+#
+class PcdDataClass(object):
+ def __init__(self, ID = -1, CName = '', TokenSpaceGuidCName = '', Token = '', DatumType = '', Model = MODEL_UNKNOWN, \
+ BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
+ self.ID = ID
+ self.CName = CName
+ self.TokenSpaceGuidCName = TokenSpaceGuidCName
+ self.Token = Token
+ self.DatumType = DatumType
+ self.BelongsToFile = BelongsToFile
+ self.BelongsToFunction = BelongsToFunction
+ self.StartLine = StartLine
+ self.StartColumn = StartColumn
+ self.EndLine = EndLine
+ self.EndColumn = EndColumn
+
+## FileClass
+#
+# This class defines a structure of a file
+#
+# @param ID: ID of a File
+# @param Name: Name of a File
+# @param ExtName: ExtName of a File
+# @param Path: Path of a File
+# @param FullPath: FullPath of a File
+# @param Model: Model of a File
+# @param TimeStamp: TimeStamp of a File
+# @param FunctionList: FunctionList of a File
+# @param IdentifierList: IdentifierList of a File
+# @param PcdList: PcdList of a File
+#
+# @var ID: ID of a File
+# @var Name: Name of a File
+# @var ExtName: ExtName of a File
+# @var Path: Path of a File
+# @var FullPath: FullPath of a File
+# @var Model: Model of a File
+# @var TimeStamp: TimeStamp of a File
+# @var FunctionList: FunctionList of a File
+# @var IdentifierList: IdentifierList of a File
+# @var PcdList: PcdList of a File
+#
+class FileClass(object):
+ def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
+ FunctionList = [], IdentifierList = [], PcdList = []):
+ self.ID = ID
+ self.Name = Name
+ self.ExtName = ExtName
+ self.Path = Path
+ self.FullPath = FullPath
+ self.Model = Model
+ self.TimeStamp = TimeStamp
+
+ self.FunctionList = FunctionList
+ self.IdentifierList = IdentifierList
+ self.PcdList = PcdList
diff --git a/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py b/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py new file mode 100644 index 0000000000..cd8bd4cb7f --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/DistributionPackageClass.py @@ -0,0 +1,159 @@ +## @file
+# This file is used to define a class object to describe a distribution package
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Import Modules
+#
+import os.path
+from CommonClass import *
+from Common.Misc import sdict
+from Common.Misc import GetFiles
+from Common.DecClassObjectLight import Dec
+from Common.InfClassObjectLight import Inf
+from Common.XmlParser import *
+
+## DistributionPackageHeaderClass
+#
+class DistributionPackageHeaderClass(IdentificationClass, CommonHeaderClass):
+ def __init__(self):
+ IdentificationClass.__init__(self)
+ CommonHeaderClass.__init__(self)
+ self.ReadOnly = 'False'
+ self.RePackage = 'True'
+ self.Vendor = ''
+ self.Date = ''
+ self.Signature = 'Md5Sum'
+ self.XmlSpecification = ''
+
+## DistributionPackageClass
+#
+#
+class DistributionPackageClass(object):
+ def __init__(self):
+ self.Header = DistributionPackageHeaderClass()
+ self.PackageSurfaceArea = sdict() # {(Guid, Version, Path) : PackageObj}
+ self.ModuleSurfaceArea = sdict() # {(Guid, Version, Path) : ModuleObj}
+ self.Tools = MiscFileClass()
+ self.MiscellaneousFiles = MiscFileClass()
+ self.UserExtensions = []
+
+ ## Get all included packages and modules for a distribution package
+ #
+ # @param WorkspaceDir: WorkspaceDir
+ # @param PackageList: A list of all packages
+ # @param ModuleList: A list of all modules
+ #
+ def GetDistributionPackage(self, WorkspaceDir, PackageList, ModuleList):
+ AllGuidVersionDict = {}
+ # Get Packages
+ if PackageList:
+ for PackageFile in PackageList:
+ PackageFileFullPath = os.path.normpath(os.path.join(WorkspaceDir, PackageFile))
+ DecObj = Dec(PackageFileFullPath, True, WorkspaceDir)
+ PackageObj = DecObj.Package
+ AllGuidVersionDict[PackageFileFullPath] = [PackageObj.PackageHeader.Guid, PackageObj.PackageHeader.Version]
+
+ # Parser inf file one bye one
+ for File in PackageObj.MiscFiles.Files:
+ Filename = os.path.normpath(os.path.join(PackageObj.PackageHeader.RelaPath, File.Filename))
+ (Name, ExtName) = os.path.splitext(Filename)
+ if ExtName.upper() == '.INF':
+ InfObj = Inf(Filename, True, WorkspaceDir, DecObj.Identification.PackagePath)
+ ModuleObj = InfObj.Module
+ AllGuidVersionDict[File] = [ModuleObj.ModuleHeader.Guid, ModuleObj.ModuleHeader.Version]
+ # Find and update Guid/Version of LibraryClass
+ for Item in ModuleObj.LibraryClasses:
+ if Item.RecommendedInstance:
+ LibClassIns = os.path.normpath(os.path.join(WorkspaceDir, Item.RecommendedInstance))
+ Guid, Version = '', ''
+ if LibClassIns in AllGuidVersionDict:
+ Guid = AllGuidVersionDict[LibClassIns][0]
+ Version = AllGuidVersionDict[LibClassIns][1]
+ else:
+ Lib = Inf(LibClassIns, True, WorkspaceDir)
+ Guid = Lib.Module.ModuleHeader.Guid
+ Version = Lib.Module.ModuleHeader.Version
+ AllGuidVersionDict[LibClassIns] = [Guid, Version]
+ Item.RecommendedInstanceGuid = Guid
+ Item.RecommendedInstanceVersion = Version
+ # Find and update Guid/Version of
+ for Item in ModuleObj.PackageDependencies:
+ if Item.FilePath:
+ PackageFilePath = os.path.normpath(os.path.join(WorkspaceDir, Item.FilePath))
+ Guid, Version = '', ''
+ if PackageFilePath in AllGuidVersionDict:
+ Guid = AllGuidVersionDict[PackageFilePath][0]
+ Version = AllGuidVersionDict[PackageFilePath][1]
+ else:
+ PackageDependencies = Dec(PackageFilePath, True, WorkspaceDir)
+ Guid = PackageDependencies.Package.PackageHeader.Guid
+ Version = PackageDependencies.Package.PackageHeader.Version
+ AllGuidVersionDict[PackageFilePath] = [Guid, Version]
+ Item.PackageGuid = Guid
+ Item.PackageVersion = Version
+
+ # Add module to package
+ PackageObj.Modules[(ModuleObj.ModuleHeader.Guid, ModuleObj.ModuleHeader.Version, ModuleObj.ModuleHeader.CombinePath)] = ModuleObj
+ self.PackageSurfaceArea[(PackageObj.PackageHeader.Guid, PackageObj.PackageHeader.Version, PackageObj.PackageHeader.CombinePath)] = PackageObj
+
+ # Get Modules
+ if ModuleList:
+ for ModuleFile in ModuleList:
+ ModuleFileFullPath = os.path.normpath(os.path.join(WorkspaceDir, ModuleFile))
+ InfObj = Inf(ModuleFileFullPath, True, WorkspaceDir)
+ ModuleObj = InfObj.Module
+ AllGuidVersionDict[ModuleFileFullPath] = [ModuleObj.ModuleHeader.Guid, ModuleObj.ModuleHeader.Version]
+ # Find and update Guid/Version of LibraryClass
+ for Item in ModuleObj.LibraryClasses:
+ if Item.RecommendedInstance:
+ LibClassIns = os.path.normpath(os.path.join(WorkspaceDir, Item.RecommendedInstance))
+ Guid, Version = '', ''
+ if LibClassIns in AllGuidVersionDict:
+ Guid = AllGuidVersionDict[LibClassIns][0]
+ Version = AllGuidVersionDict[LibClassIns][1]
+ else:
+ Lib = Inf(LibClassIns, True, WorkspaceDir)
+ Guid = Lib.Module.ModuleHeader.Guid
+ Version = Lib.Module.ModuleHeader.Version
+ AllGuidVersionDict[LibClassIns] = [Guid, Version]
+ Item.RecommendedInstanceGuid = Guid
+ Item.RecommendedInstanceVersion = Version
+ # Find and update Guid/Version of
+ for Item in ModuleObj.PackageDependencies:
+ if Item.FilePath:
+ PackageFilePath = os.path.normpath(os.path.join(WorkspaceDir, Item.FilePath))
+ Guid, Version = '', ''
+ if PackageFilePath in AllGuidVersionDict:
+ Guid = AllGuidVersionDict[PackageFilePath][0]
+ Version = AllGuidVersionDict[PackageFilePath][1]
+ else:
+ PackageDependencies = Dec(PackageFilePath, True, WorkspaceDir)
+ Guid = PackageDependencies.Package.PackageHeader.Guid
+ Version = PackageDependencies.Package.PackageHeader.Version
+ AllGuidVersionDict[PackageFilePath] = [Guid, Version]
+ Item.PackageGuid = Guid
+ Item.PackageVersion = Version
+ self.ModuleSurfaceArea[(ModuleObj.ModuleHeader.Guid, ModuleObj.ModuleHeader.Version, ModuleObj.ModuleHeader.CombinePath)] = ModuleObj
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+ D = DistributionPackageClass()
+ D.GetDistributionPackage(os.getenv('WORKSPACE'), ['MdePkg/MdePkg.dec', 'TianoModulePkg/TianoModulePkg.dec'], ['MdeModulePkg/Application/HelloWorld/HelloWorld.inf'])
+ Xml = DistributionPackageXml()
+ print Xml.ToXml(D)
+ E = Xml.FromXml('C:\\2.xml')
+ #print Xml.ToXml(E)
diff --git a/BaseTools/Source/Python/CommonDataClass/FdfClass.py b/BaseTools/Source/Python/CommonDataClass/FdfClass.py new file mode 100644 index 0000000000..a9e12ed46d --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/FdfClass.py @@ -0,0 +1,402 @@ +## @file
+# classes represent data in FDF
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+## FD data in FDF
+#
+#
+class FDClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.FdUiName = ''
+ self.CreateFileName = None
+ self.BaseAddress = None
+ self.BaseAddressPcd = None
+ self.Size = None
+ self.SizePcd = None
+ self.ErasePolarity = '1'
+ # 3-tuple list (blockSize, numBlocks, pcd)
+ self.BlockSizeList = []
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ # SetVarDict[var] = value
+ self.SetVarDict = {}
+ self.RegionList = []
+ self.vtfRawDict = {}
+
+## FV data in FDF
+#
+#
+class FvClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.UiFvName = None
+ self.CreateFileName = None
+ # 3-tuple list (blockSize, numBlocks, pcd)
+ self.BlockSizeList = []
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ # SetVarDict[var] = value
+ self.SetVarDict = {}
+ self.FvAlignment = None
+ # FvAttributeDict[attribute] = TRUE/FALSE (1/0)
+ self.FvAttributeDict = {}
+ self.FvNameGuid = None
+ self.AprioriSectionList = []
+ self.FfsList = []
+ self.BsBaseAddress = None
+ self.RtBaseAddress = None
+
+## Region data in FDF
+#
+#
+class RegionClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Offset = None # The begin position of the Region
+ self.Size = None # The Size of the Region
+ self.PcdOffset = None
+ self.PcdSize = None
+ self.SetVarDict = {}
+ self.RegionType = None
+ self.RegionDataList = []
+
+## FFS data in FDF
+#
+#
+class FfsClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.NameGuid = None
+ self.Fixed = False
+ self.CheckSum = False
+ self.Alignment = None
+ self.SectionList = []
+
+## FILE statement data in FDF
+#
+#
+class FileStatementClassObject (FfsClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsClassObject.__init__(self)
+ self.FvFileType = None
+ self.FileName = None
+ self.KeyStringList = []
+ self.FvName = None
+ self.FdName = None
+ self.DefineVarDict = {}
+ self.AprioriSection = None
+ self.KeepReloc = None
+
+## INF statement data in FDF
+#
+#
+class FfsInfStatementClassObject(FfsClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsClassObject.__init__(self)
+ self.Rule = None
+ self.Version = None
+ self.Ui = None
+ self.InfFileName = None
+ self.BuildNum = ''
+ self.KeyStringList = []
+ self.KeepReloc = None
+ self.UseArch = None
+
+## APRIORI section data in FDF
+#
+#
+class AprioriSectionClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ self.FfsList = []
+
+## section data in FDF
+#
+#
+class SectionClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Alignment = None
+
+## Depex expression section in FDF
+#
+#
+class DepexSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.DepexType = None
+ self.Expression = None
+
+## Compress section data in FDF
+#
+#
+class CompressSectionClassObject (SectionClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.CompType = None
+ self.SectionList = []
+
+## Data section data in FDF
+#
+#
+class DataSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.SecType = None
+ self.SectFileName = None
+ self.SectionList = []
+ self.KeepReloc = True
+
+## Rule section data in FDF
+#
+#
+class EfiSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.SectionType = None
+ self.Optional = False
+ self.FileType = None
+ self.StringData = None
+ self.FileName = None
+ self.FileExtension = None
+ self.BuildNum = None
+ self.KeepReloc = None
+
+## FV image section data in FDF
+#
+#
+class FvImageSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.Fv = None
+ self.FvName = None
+ self.FvFileType = None
+ self.FvFileName = None
+ self.FvFileExtension = None
+
+## GUIDed section data in FDF
+#
+#
+class GuidSectionClassObject (SectionClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.NameGuid = None
+ self.SectionList = []
+ self.SectionType = None
+ self.ProcessRequired = False
+ self.AuthStatusValid = False
+
+## UI section data in FDF
+#
+#
+class UiSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.StringData = None
+ self.FileName = None
+
+## Version section data in FDF
+#
+#
+class VerSectionClassObject (SectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+ self.BuildNum = None
+ self.StringData = None
+ self.FileName = None
+
+## Rule data in FDF
+#
+#
+class RuleClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.Arch = None
+ self.ModuleType = None # For Module Type
+ self.TemplateName = None
+ self.NameGuid = None
+ self.Fixed = False
+ self.Alignment = None
+ self.CheckSum = False
+ self.FvFileType = None # for Ffs File Type
+ self.KeyStringList = []
+ self.KeepReloc = None
+
+## Complex rule data in FDF
+#
+#
+class RuleComplexFileClassObject(RuleClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.SectionList = []
+
+## Simple rule data in FDF
+#
+#
+class RuleSimpleFileClassObject(RuleClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.FileName = None
+ self.SectionType = ''
+ self.FileExtension = None
+
+## File extension rule data in FDF
+#
+#
+class RuleFileExtensionClassObject(RuleClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
+ self.FileExtension = None
+
+## Capsule data in FDF
+#
+#
+class CapsuleClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.SpecName = None
+ self.UiCapsuleName = None
+ self.CreateFile = None
+ self.GroupIdNumber = None
+ # DefineVarDict[var] = value
+ self.DefineVarDict = {}
+ # SetVarDict[var] = value
+ self.SetVarDict = {}
+ # TokensDict[var] = value
+ self.TokensDict = {}
+ self.CapsuleDataList = []
+
+## VTF data in FDF
+#
+#
+class VtfClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.KeyArch = None
+ self.ArchList = None
+ self.UiName = None
+ self.ResetBin = None
+ self.ComponentStatementList = []
+
+## VTF component data in FDF
+#
+#
+class ComponentStatementClassObject :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.CompName = None
+ self.CompLoc = None
+ self.CompType = None
+ self.CompVer = None
+ self.CompCs = None
+ self.CompBin = None
+ self.CompSym = None
+ self.CompSize = None
+ self.FilePos = None
+
+## OptionROM data in FDF
+#
+#
+class OptionRomClassObject:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.DriverName = None
+ self.FfsList = []
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/CommonDataClass/ModuleClass.py b/BaseTools/Source/Python/CommonDataClass/ModuleClass.py new file mode 100644 index 0000000000..9d780725b9 --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/ModuleClass.py @@ -0,0 +1,486 @@ +## @file
+# This file is used to define a class object to describe a module
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Import Modules
+#
+from CommonClass import *
+
+## ModuleHeaderClass
+#
+# This class defined header items used in Module file
+#
+# @param IdentificationClass: Inherited from IdentificationClass class
+# @param CommonHeaderClass: Inherited from CommonHeaderClass class
+# @param DefineClass: Inherited from DefineClass class
+#
+# @var ModuleType: To store value for ModuleType
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+# @var BinaryModule: To store value for BinaryModule
+# @var OutputFileBasename: To store value for OutputFileBasename
+# @var ClonedFrom: To store value for ClonedFrom, it is a set structure as
+# [ ClonedRecordClass, ... ]
+# @var PcdIsDriver: To store value for PcdIsDriver, selection scope is in below list
+# PEI_PCD_DRIVER | DXE_PCD_DRIVER
+# @var TianoR8FlashMap_h: To store value for TianoR8FlashMap_h
+# @var InfVersion: To store value for InfVersion
+# @var EfiSpecificationVersion: To store value for EfiSpecificationVersion
+# @var EdkReleaseVersion: To store value for EdkReleaseVersion
+# @var LibraryClass: To store value for LibraryClass, it is a set structure as
+# [ LibraryClassClass, ...]
+# @var ComponentType: To store value for ComponentType, selection scope is in below list
+# LIBRARY | SECURITY_CORE | PEI_CORE | COMBINED_PEIM_DRIVER | PIC_PEIM | RELOCATABLE_PEIM | BS_DRIVER | RT_DRIVER | SAL_RT_DRIVER | APPLICATION
+# @var MakefileName: To store value for MakefileName
+# @var BuildNumber: To store value for BuildNumber
+# @var BuildType: To store value for BuildType
+# @var FfsExt: To store value for FfsExt
+# @var FvExt: To store value for FvExt
+# @var SourceFv: To store value for SourceFv
+# @var CustomMakefile: To store value for CustomMakefile, it is a set structure as
+# { Family : Filename, ... }
+# @var Shadow: To store value for Shadow
+# @var MacroDefines To store the defined macros
+#
+class ModuleHeaderClass(IdentificationClass, CommonHeaderClass, DefineClass):
+ def __init__(self):
+ IdentificationClass.__init__(self)
+ CommonHeaderClass.__init__(self)
+ DefineClass.__init__(self)
+ self.ModuleType = ''
+ self.SupModuleList = []
+ self.SupArchList = []
+ self.BinaryModule = False
+ self.OutputFileBasename = ''
+ self.ClonedFrom = []
+ self.PcdIsDriver = ''
+ self.TianoR8FlashMap_h = False
+ self.InfVersion = ''
+ self.EfiSpecificationVersion = ''
+ self.PiSpecificationVersion = ''
+ self.UefiSpecificationVersion = ''
+ self.EdkReleaseVersion = ''
+ self.LibraryClass = []
+ self.ComponentType = ''
+ self.MakefileName = ''
+ self.BuildNumber = ''
+ self.BuildType = ''
+ self.FfsExt = ''
+ self.FvExt = ''
+ self.SourceFv = ''
+ self.CustomMakefile = {}
+ self.Shadow = ''
+ self.MacroDefines = {}
+ self.SourceOverridePath = ''
+ self.Specification = []
+
+## ModuleSourceFileClass
+#
+# This class defined source file item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param SourceFile: Input value for SourceFile, default is ''
+# @param TagName: Input value for TagName, default is ''
+# @param ToolCode: Input value for ToolCode, default is ''
+# @param ToolChainFamily: Input value for ToolChainFamily, default is ''
+# @param FeatureFlag: Input value for FeatureFlag, default is ''
+# @param SupArchList: Input value for SupArchList, default is []
+#
+# @var SourceFile: To store value for SourceFile
+# @var TagName: To store value for TagName
+# @var ToolCode: To store value for ToolCode
+# @var ToolChainFamily: To store value for ToolChainFamily
+#
+class ModuleSourceFileClass(CommonClass):
+ def __init__(self, SourceFile = '', TagName = '', ToolCode = '', ToolChainFamily = '', FeatureFlag = '', SupArchList = None):
+ self.SourceFile = SourceFile
+ self.TagName = TagName
+ self.ToolCode = ToolCode
+ self.ToolChainFamily = ToolChainFamily
+ self.FileType = ''
+ CommonClass.__init__(self, FeatureFlag = FeatureFlag, SupArchList = SupArchList)
+
+## ModuleBinaryFileClass
+#
+# This class defined binary file item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param BinaryFile: Input value for BinaryFile, default is ''
+# @param FileType: Input value for FileType, default is ''
+# @param FeatureFlag: Input value for FeatureFlag, default is ''
+# @param SupArchList: Input value for SupArchList, default is []
+#
+# @var BinaryFile: To store value for BinaryFile
+# @var FileType: To store value for FileType, selection scope is in below list
+# FW | GUID | PREEFORM | UEFI_APP | UNI_UI | UNI_VER | LIB | PE32 | PIC | PEI_DEPEX | DXE_DEPEX | TE | VER | UI | BIN | FV
+# @var Target: To store value for Target
+# @var ToolChainFamily: To store value for ToolChainFamily
+#
+class ModuleBinaryFileClass(CommonClass):
+ def __init__(self, BinaryFile = '', FileType = '', Target = '', FeatureFlag = '', SupArchList = None):
+ self.BinaryFile = BinaryFile
+ self.FileType = FileType
+ self.Target = Target
+ CommonClass.__init__(self, FeatureFlag = FeatureFlag, SupArchList = SupArchList)
+ self.Filenames = []
+ self.PatchPcdValues = []
+ self.PcdExValues = []
+ self.LibraryInstances = []
+ self.BuildFlags = []
+
+## ModulePackageDependencyClass
+#
+# This class defined package dependency item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param DefineClass: Input value for DefineClass class
+#
+# @var FilePath: To store value for FilePath
+# @var PackageName: To store value for PackageName
+# @var PackageVersion: To store value for PackageVersion
+# @var PackageGuid: To store value for PackageGuid
+#
+class ModulePackageDependencyClass(CommonClass, DefineClass):
+ def __init__(self):
+ self.FilePath = ''
+ self.PackageName = ''
+ self.PackageVersion = ''
+ self.PackageGuid = ''
+ self.Description = ''
+ CommonClass.__init__(self)
+ DefineClass.__init__(self)
+
+## ModuleLibraryClass
+#
+# This class defined library item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var Library: To store value for Library
+#
+class ModuleLibraryClass(CommonClass):
+ def __init__(self):
+ self.Library = ''
+ CommonClass.__init__(self)
+
+## ModuleEventClass
+#
+# This class defined event item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var CName: To store value for CName
+# @var GuidCName: To store value for GuidCName
+# @var Type: To store value for Type, selection scope is in below list
+# CREATE_EVENT | SIGNAL_EVENT
+#
+class ModuleEventClass(CommonClass):
+ def __init__(self):
+ self.CName = ''
+ self.GuidCName = ''
+ self.Type = ''
+ CommonClass.__init__(self)
+
+## ModuleHobClass
+#
+# This class defined hob item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var GuidCName: To store value for GuidCName
+# @var Type: To store value for Type, selection scope is in below list
+# PHIT | MEMORY_ALLOCATION | RESOURCE_DESCRIPTOR | GUID_EXTENSION | FIRMWARE_VOLUME | CPU | POOL | CAPSULE_VOLUME
+#
+class ModuleHobClass(CommonClass):
+ def __init__(self):
+ self.Type = ''
+ self.GuidCName = ''
+ CommonClass.__init__(self)
+
+## ModuleVariableClass
+#
+# This class defined variable item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var GuidCName: To store value for GuidCName
+# @var Name: To store value for Name
+#
+class ModuleVariableClass(CommonClass):
+ def __init__(self):
+ self.Name = ''
+ self.GuidCName = ''
+ CommonClass.__init__(self)
+
+## ModuleBootModeClass
+#
+# This class defined boot mode item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var Name: To store value for Name, selection scope is in below list
+# FULL | MINIMAL | NO_CHANGE | DIAGNOSTICS | DEFAULT | S2_RESUME | S3_RESUME | S4_RESUME | S5_RESUME | FLASH_UPDATE | RECOVERY_FULL | RECOVERY_MINIMAL | RECOVERY_NO_CHANGE | RECOVERY_DIAGNOSTICS | RECOVERY_DEFAULT | RECOVERY_S2_RESUME | RECOVERY_S3_RESUME | RECOVERY_S4_RESUME | RECOVERY_S5_RESUME | RECOVERY_FLASH_UPDATE
+#
+class ModuleBootModeClass(CommonClass):
+ def __init__(self):
+ self.Name = ''
+ CommonClass.__init__(self)
+
+## ModuleSystemTableClass
+#
+# This class defined system table item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var CName: To store value for CName
+#
+class ModuleSystemTableClass(CommonClass):
+ def __init__(self):
+ self.CName = ''
+ CommonClass.__init__(self)
+
+## ModuleDataHubClass
+#
+# This class defined data hub item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var CName: To store value for CName
+#
+class ModuleDataHubClass(CommonClass):
+ def __init__(self):
+ self.CName = ''
+ CommonClass.__init__(self)
+
+## ModuleHiiPackageClass
+#
+# This class defined Hii package item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var CName: To store value for CName
+#
+class ModuleHiiPackageClass(CommonClass):
+ def __init__(self):
+ self.CName = ''
+ CommonClass.__init__(self)
+
+## ModuleExternImageClass
+#
+# This class defined Extern Image item used in Module file
+#
+# @param object: Inherited from object class
+#
+# @var ModuleEntryPoint: To store value for ModuleEntryPoint
+# @var ModuleUnloadImage: To store value for ModuleUnloadImage
+#
+class ModuleExternImageClass(object):
+ def __init__(self):
+ self.ModuleEntryPoint = ''
+ self.ModuleUnloadImage = ''
+
+## ModuleExternLibraryClass
+#
+# This class defined Extern Library item used in Module file
+#
+# @param object: Inherited from object class
+#
+# @var Constructor: To store value for Constructor
+# @var Destructor: To store value for Destructor
+#
+class ModuleExternLibraryClass(object):
+ def __init__(self):
+ self.Constructor = ''
+ self.Destructor = ''
+
+## ModuleExternDriverClass
+#
+# This class defined Extern Driver item used in Module file
+#
+# @param object: Inherited from object class
+#
+# @var DriverBinding: To store value for DriverBinding
+# @var ComponentName: To store value for ComponentName
+# @var DriverConfig: To store value for DriverConfig
+# @var DriverDiag: To store value for DriverDiag
+#
+class ModuleExternDriverClass(object):
+ def __init__(self):
+ self.DriverBinding= ''
+ self.ComponentName = ''
+ self.DriverConfig = ''
+ self.DriverDiag = ''
+
+## ModuleExternCallBackClass
+#
+# This class defined Extern Call Back item used in Module file
+#
+# @param object: Inherited from object class
+#
+# @var SetVirtualAddressMapCallBack: To store value for SetVirtualAddressMapCallBack
+# @var ExitBootServicesCallBack: To store value for ExitBootServicesCallBack
+#
+class ModuleExternCallBackClass(object):
+ def __init__(self):
+ self.SetVirtualAddressMapCallBack = ''
+ self.ExitBootServicesCallBack = ''
+
+## ModuleExternClass
+#
+# This class defined Extern used in Module file
+#
+# @param object: Inherited from object class
+#
+#
+class ModuleExternClass(CommonClass):
+ def __init__(self):
+ self.EntryPoint = ''
+ self.UnloadImage = ''
+ self.Constructor = ''
+ self.Destructor = ''
+ CommonClass.__init__(self)
+
+## ModuleDepexClass
+#
+# This class defined depex item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param DefineClass: Input value for DefineClass class
+#
+# @var Depex: To store value for Depex
+#
+class ModuleDepexClass(CommonClass, DefineClass):
+ def __init__(self):
+ CommonClass.__init__(self)
+ DefineClass.__init__(self)
+ self.Depex = ''
+
+## ModuleNmakeClass
+#
+# This class defined nmake item used in Module file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var Name: To store value for Name
+# @var Value: To store value for Value
+#
+class ModuleNmakeClass(CommonClass):
+ def __init__(self):
+ CommonClass.__init__(self)
+ self.Name = ''
+ self.Value = ''
+
+## ModuleClass
+#
+# This class defined a complete module item
+#
+# @param object: Inherited from object class
+#
+# @var Header: To store value for Header, it is a structure as
+# {Arch : ModuleHeaderClass}
+# @var LibraryClasses: To store value for LibraryClasses, it is a list structure as
+# [ LibraryClassClass, ...]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ ModuleLibraryClass, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFileClass, ...]
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryFileClass, ...]
+# @var NonProcessedFiles: To store value for NonProcessedFiles, it is a list structure as
+# [ '', '', ...]
+# @var PackageDependencies: To store value for PackageDependencies, it is a list structure as
+# [ ModulePackageDependencyClass, ... ]
+# @var Nmake: To store value for Nmake, it is a list structure as
+# [ ModuleNmakeClass, ... ]
+# @var Depex: To store value for Depex, it is a list structure as
+# [ ModuleDepexClass, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludeClass, ...]
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolClass, ...]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiClass, ...]
+# @var Events: To store value for Events, it is a list structure as
+# [ ModuleEventClass, ...]
+# @var Hobs: To store value for Hobs, it is a list structure as
+# [ ModuleHobClass, ...]
+# @var Variables: To store value for Variables, it is a list structure as
+# [ ModuleVariableClass, ...]
+# @var BootModes: To store value for BootModes, it is a list structure as
+# [ ModuleBootModeClass, ...]
+# @var SystemTables: To store value for SystemTables, it is a list structure as
+# [ ModuleSystemTableClass, ...]
+# @var DataHubs: To store value for DataHubs, it is a list structure as
+# [ ModuleDataHubClass, ...]
+# @var HiiPackages: To store value for HiiPackages, it is a list structure as
+# [ ModuleHiiPackageClass, ...]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidClass, ...]
+# @var PcdCodes: To store value for PcdCodes, it is a list structure as
+# [ PcdClass, ...]
+# @var ExternImages: To store value for ExternImages, it is a list structure as
+# [ ModuleExternImageClass, ...]
+# @var ExternLibraries: To store value for ExternLibraries, it is a list structure as
+# [ ModuleExternLibraryClass, ...]
+# @var ExternDrivers: To store value for ExternDrivers, it is a list structure as
+# [ ModuleExternDriverClass, ...]
+# @var ExternCallBacks: To store value for ExternCallBacks, it is a list structure as
+# [ ModuleExternCallBackClass, ...]
+# @var BuildOptions: To store value for BuildOptions, it is a list structure as
+# [ BuildOptionClass, ...]
+# @var UserExtensions: To store value for UserExtensions, it is a list structure as
+# [ UserExtensionsClass, ...]
+#
+class ModuleClass(object):
+ def __init__(self):
+ self.Header = {}
+ self.ModuleHeader = ModuleHeaderClass()
+ self.LibraryClasses = []
+ self.Libraries = []
+ self.Sources = []
+ self.Binaries = []
+ self.NonProcessedFiles = []
+ self.PackageDependencies = []
+ self.Nmake = []
+ self.Depex = []
+ self.PeiDepex = None
+ self.DxeDepex = None
+ self.SmmDepex = None
+ self.Includes = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Events = []
+ self.Hobs = []
+ self.Variables = []
+ self.BootModes = []
+ self.SystemTables = []
+ self.DataHubs = []
+ self.HiiPackages = []
+ self.Guids = []
+ self.PcdCodes = []
+ self.ExternImages = []
+ self.ExternLibraries = []
+ self.ExternDrivers = []
+ self.ExternCallBacks = []
+ self.Externs = []
+ self.BuildOptions = []
+ self.UserExtensions = None
+ self.MiscFiles = None
+ self.FileList = []
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ M = ModuleClass()
diff --git a/BaseTools/Source/Python/CommonDataClass/PackageClass.py b/BaseTools/Source/Python/CommonDataClass/PackageClass.py new file mode 100644 index 0000000000..c064f25ddb --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/PackageClass.py @@ -0,0 +1,127 @@ +## @file
+# This file is used to define a class object to describe a package
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Import Modules
+#
+from CommonClass import *
+from Common.Misc import sdict
+
+## PackageHeaderClass
+#
+# This class defined header items used in Package file
+#
+# @param IdentificationClass: Inherited from IdentificationClass class
+# @param CommonHeaderClass: Inherited from CommonHeaderClass class
+#
+# @var DecSpecification: To store value for DecSpecification
+# @var ReadOnly: To store value for ReadOnly
+# @var RePackage: To store value for RePackage
+# @var ClonedFrom: To store value for ClonedFrom, it is a set structure as
+# [ ClonedRecordClass, ...]
+#
+class PackageHeaderClass(IdentificationClass, CommonHeaderClass):
+ def __init__(self):
+ IdentificationClass.__init__(self)
+ CommonHeaderClass.__init__(self)
+ self.DecSpecification = ''
+ self.ReadOnly = False
+ self.RePackage = False
+ self.PackagePath = ''
+ self.ClonedFrom = []
+
+## PackageIndustryStdHeaderClass
+#
+# This class defined industry std header items used in Package file
+#
+# @param CommonHeaderClass: Inherited from CommonHeaderClass class
+#
+# @var Name: To store value for Name
+# @var IncludeHeader: To store value for IncludeHeader
+#
+class PackageIndustryStdHeaderClass(CommonClass):
+ def __init__(self):
+ self.Name = ''
+ self.IncludeHeader = ''
+ CommonClass.__init__(self)
+
+## PackageIncludePkgHeaderClass
+#
+# This class defined include Pkg header items used in Package file
+#
+# @param object: Inherited from object class
+#
+# @var IncludeHeader: To store value for IncludeHeader
+# @var ModuleType: To store value for ModuleType, it is a set structure as
+# BASE | SEC | PEI_CORE | PEIM | DXE_CORE | DXE_DRIVER | DXE_RUNTIME_DRIVER | DXE_SAL_DRIVER | DXE_SMM_DRIVER | TOOL | UEFI_DRIVER | UEFI_APPLICATION | USER_DEFINED
+#
+class PackageIncludePkgHeaderClass(object):
+ def __init__(self):
+ self.IncludeHeader = ''
+ self.ModuleType = []
+
+## PackageClass
+#
+# This class defined a complete package item
+#
+# @param object: Inherited from object class
+#
+# @var Header: To store value for Header, it is a structure as
+# {Arch : PackageHeaderClass}
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludeClass, ...]
+# @var LibraryClassDeclarations: To store value for LibraryClassDeclarations, it is a list structure as
+# [ LibraryClassClass, ...]
+# @var IndustryStdHeaders: To store value for IndustryStdHeaders, it is a list structure as
+# [ PackageIndustryStdHeader, ...]
+# @var ModuleFiles: To store value for ModuleFiles, it is a list structure as
+# [ '', '', ...]
+# @var PackageIncludePkgHeaders: To store value for PackageIncludePkgHeaders, it is a list structure as
+# [ PackageIncludePkgHeader, ...]
+# @var GuidDeclarations: To store value for GuidDeclarations, it is a list structure as
+# [ GuidClass, ...]
+# @var ProtocolDeclarations: To store value for ProtocolDeclarations, it is a list structure as
+# [ ProtocolClass, ...]
+# @var PpiDeclarations: To store value for PpiDeclarations, it is a list structure as
+# [ PpiClass, ...]
+# @var PcdDeclarations: To store value for PcdDeclarations, it is a list structure as
+# [ PcdClass, ...]
+# @var UserExtensions: To store value for UserExtensions, it is a list structure as
+# [ UserExtensionsClass, ...]
+#
+class PackageClass(object):
+ def __init__(self):
+ self.PackageHeader = PackageHeaderClass()
+ self.Header = {}
+ self.Includes = []
+ self.LibraryClassDeclarations = []
+ self.IndustryStdHeaders = []
+ self.ModuleFiles = []
+ # {[Guid, Value, Path(relative to WORKSPACE)]: ModuleClassObj}
+ self.Modules = sdict()
+ self.PackageIncludePkgHeaders = []
+ self.GuidDeclarations = []
+ self.ProtocolDeclarations = []
+ self.PpiDeclarations = []
+ self.PcdDeclarations = []
+ self.PcdChecks = []
+ self.UserExtensions = UserExtensionsClass()
+ self.MiscFiles = MiscFileClass()
+ self.FileList = []
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ P = PackageClass()
diff --git a/BaseTools/Source/Python/CommonDataClass/PlatformClass.py b/BaseTools/Source/Python/CommonDataClass/PlatformClass.py new file mode 100644 index 0000000000..1966fb9f8c --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/PlatformClass.py @@ -0,0 +1,432 @@ +## @file
+# This file is used to define a class object to describe a platform
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Import Modules
+#
+from CommonClass import *
+
+## SkuInfoListClass
+#
+# This class defined sku info list item used in platform file
+#
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+#
+# @var SkuInfoList: To store value for SkuInfoList, it is a set structure as
+# { SkuName : SkuId }
+#
+class SkuInfoListClass(IncludeStatementClass):
+ def __init__(self):
+ IncludeStatementClass.__init__(self)
+ self.SkuInfoList = {}
+
+## PlatformHeaderClass
+#
+# This class defined header items used in Platform file
+#
+# @param IdentificationClass: Inherited from IdentificationClass class
+# @param CommonHeaderClass: Inherited from CommonHeaderClass class
+# @param DefineClass: Inherited from DefineClass class
+#
+# @var DscSpecification: To store value for DscSpecification
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+# @var BuildTargets: To store value for BuildTargets, selection scope is in below list
+# RELEASE | DEBUG
+# @var IntermediateDirectories: To store value for IntermediateDirectories, selection scope is in below list
+# MODULE | UNIFIED
+# @var OutputDirectory: To store value for OutputDirectory
+# @var ForceDebugTarget: To store value for ForceDebugTarget
+# @var SkuIdName: To store value for SkuIdName
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var ClonedFrom: To store value for ClonedFrom, it is a list structure as
+# [ ClonedRecordClass, ... ]
+#
+class PlatformHeaderClass(IdentificationClass, CommonHeaderClass, DefineClass):
+ def __init__(self):
+ IdentificationClass.__init__(self)
+ CommonHeaderClass.__init__(self)
+ DefineClass.__init__(self)
+ self.DscSpecification = ''
+ self.SupArchList = []
+ self.BuildTargets = []
+ self.IntermediateDirectories = ''
+ self.OutputDirectory = ''
+ self.ForceDebugTarget = ''
+ self.SkuIdName = []
+ self.BuildNumber = ''
+ self.MakefileName = ''
+ self.ClonedFrom = []
+
+## PlatformFlashDefinitionFileClass
+#
+# This class defined FlashDefinitionFile item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var Id: To store value for Id
+# @var UiName: To store value for UiName
+# @var Preferred: To store value for Preferred
+# @var FilePath: To store value for FilePath
+#
+class PlatformFlashDefinitionFileClass(object):
+ def __init__(self):
+ self.Id = ''
+ self.UiName = ''
+ self.Preferred = False
+ self.FilePath = ''
+
+## PlatformFvImageOptionClass
+#
+# This class defined FvImageOption item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var FvImageOptionName: To store value for FvImageOptionName
+# @var FvImageOptionValues: To store value for FvImageOptionValues
+#
+class PlatformFvImageOptionClass(object):
+ def __init__(self):
+ self.FvImageOptionName = ''
+ self.FvImageOptionValues = []
+
+## PlatformFvImageClass
+#
+# This class defined FvImage item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var Name: To store value for Name
+# @var Value: To store value for Value
+# @var Type: To store value for Type, selection scope is in below list
+# Attributes | Options | Components | ImageName
+# @var FvImageNames: To store value for FvImageNames
+# @var FvImageOptions: To store value for FvImageOptions, it is a list structure as
+# [ PlatformFvImageOption, ...]
+#
+class PlatformFvImageClass(object):
+ def __init__(self):
+ self.Name = ''
+ self.Value = ''
+ self.Type = ''
+ self.FvImageNames = []
+ self.FvImageOptions = []
+
+## PlatformFvImageNameClass
+#
+# This class defined FvImageName item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var Name: To store value for Name
+# @var Type: To store value for Type, selection scope is in below list
+# FV_MAIN | FV_MAIN_COMPACT | NV_STORAGE | FV_RECOVERY | FV_RECOVERY_FLOPPY | FV_FILE | CAPSULE_CARGO | NULL | USER_DEFINED
+# @var FvImageOptions: To store value for FvImageOptions, it is a list structure as
+# [ PlatformFvImageOption, ...]
+#
+class PlatformFvImageNameClass(object):
+ def __init__(self):
+ self.Name = ''
+ self.Type = ''
+ self.FvImageOptions = []
+
+## PlatformFvImagesClass
+#
+# This class defined FvImages item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var FvImages: To store value for FvImages
+#
+class PlatformFvImagesClass(object):
+ def __init__(self):
+ self.FvImages = []
+
+## PlatformAntTaskClass
+#
+# This class defined AntTask item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var Id: To store value for Id
+# @var AntCmdOptions: To store value for AntCmdOptions
+# @var FilePath: To store value for FilePath
+#
+class PlatformAntTaskClass(object):
+ def __init__(self):
+ self.Id = ''
+ self.AntCmdOptions = ''
+ self.FilePath = ''
+
+## PlatformFfsSectionClass
+#
+# This class defined FfsSection item used in platform file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var BindingOrder: To store value for BindingOrder
+# @var Compressible: To store value for Compressible
+# @var SectionType: To store value for SectionType
+# @var EncapsulationType: To store value for EncapsulationType
+# @var ToolName: To store value for ToolName
+# @var Filenames: To store value for Filenames
+# @var Args: To store value for Args
+# @var OutFile: To store value for OutFile
+# @var OutputFileExtension: To store value for OutputFileExtension
+# @var ToolNameElement: To store value for ToolNameElement
+#
+class PlatformFfsSectionClass(CommonClass):
+ def __init__(self):
+ CommonClass.__init__(self)
+ self.BindingOrder = ''
+ self.Compressible = ''
+ self.SectionType = ''
+ self.EncapsulationType = ''
+ self.ToolName = ''
+ self.Filenames = []
+ self.Args = ''
+ self.OutFile = ''
+ self.OutputFileExtension = ''
+ self.ToolNameElement = ''
+
+## PlatformFfsSectionsClass
+#
+# This class defined FfsSections item used in platform file
+#
+# @param CommonClass: Inherited from CommonClass class
+#
+# @var BindingOrder: To store value for BindingOrder
+# @var Compressible: To store value for Compressible
+# @var SectionType: To store value for SectionType
+# @var EncapsulationType: To store value for EncapsulationType
+# @var ToolName: To store value for ToolName
+# @var Section: To store value for Section, it is a list structure as
+# [ PlatformFfsSectionClass, ... ]
+# @var Sections: To store value for Sections, it is a list structure as
+# [ PlatformFfsSectionsClass, ...]
+#
+class PlatformFfsSectionsClass(CommonClass):
+ def __init__(self):
+ CommonClass.__init__(self)
+ self.BindingOrder = ''
+ self.Compressible = ''
+ self.SectionType = ''
+ self.EncapsulationType = ''
+ self.ToolName = ''
+ self.Section = []
+ self.Sections = []
+
+## PlatformFfsClass
+#
+# This class defined Ffs item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var Attribute: To store value for Attribute, it is a set structure as
+# { [(Name, PlatformFfsSectionsClass)] : Value}
+# @var Sections: To store value for Sections, it is a list structure as
+# [ PlatformFfsSectionsClass]
+# @var ToolName: To store value for ToolName
+#
+class PlatformFfsClass(object):
+ def __init__(self):
+ self.Attribute = {}
+ self.Sections = []
+ self.Key = ''
+
+## PlatformBuildOptionClass
+#
+# This class defined BuildOption item used in platform file
+#
+# @param object: Inherited from object class
+#
+# @var UserDefinedAntTasks: To store value for UserDefinedAntTasks, it is a set structure as
+# { [Id] : PlatformAntTaskClass, ...}
+# @var Options: To store value for Options, it is a list structure as
+# [ BuildOptionClass, ...]
+# @var UserExtensions: To store value for UserExtensions, it is a set structure as
+# { [(UserID, Identifier)] : UserExtensionsClass, ...}
+# @var FfsKeyList: To store value for FfsKeyList, it is a set structure as
+# { [FfsKey]: PlatformFfsClass, ...}
+#
+class PlatformBuildOptionClass(object):
+ def __init__(self):
+ self.UserDefinedAntTasks = {}
+ self.Options = []
+ self.UserExtensions = {}
+ self.FfsKeyList = {}
+
+## PlatformBuildOptionClasses
+#
+# This class defined BuildOption item list used in platform file
+#
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+#
+# @var FvBinding: To store value for FvBinding
+# @var FfsFileNameGuid: To store value for FfsFileNameGuid
+# @var FfsFormatKey: To store value for FfsFormatKey
+# @var BuildOptionList: To store value for BuildOptionList, it is a list structure as
+# [ BuildOptionClass, ... ]
+#
+class PlatformBuildOptionClasses(IncludeStatementClass):
+ def __init__(self):
+ IncludeStatementClass.__init__(self)
+ self.FvBinding = ''
+ self.FfsFileNameGuid = ''
+ self.FfsFormatKey = ''
+ self.BuildOptionList = []
+
+## PlatformLibraryClass
+#
+# This class defined Library item used in platform file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param DefineClass: Inherited from DefineClass class
+# @param Name: Input value for Name, default is ''
+# @param FilePath: Input value for FilePath, default is ''
+#
+# @var Name: To store value for Name
+# @var FilePath: To store value for FilePath
+# @var ModuleType: To store value for ModuleType
+# @var SupModuleList: To store value for SupModuleList
+# @var ModuleGuid: To store value for ModuleGuid
+# @var ModuleVersion: To store value for ModuleVersion
+# @var PackageGuid: To store value for PackageGuid
+# @var PackageVersion: To store value for PackageVersion
+#
+class PlatformLibraryClass(CommonClass, DefineClass):
+ def __init__(self, Name = '', FilePath = ''):
+ CommonClass.__init__(self)
+ DefineClass.__init__(self)
+ self.Name = Name
+ self.FilePath = FilePath
+ self.ModuleType = []
+ self.SupModuleList = []
+ self.ModuleGuid = ''
+ self.ModuleVersion = ''
+ self.PackageGuid = ''
+ self.PackageVersion = ''
+
+## PlatformLibraryClasses
+#
+# This class defined Library item list used in platform file
+#
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+#
+# @var LibraryList: To store value for LibraryList, it is a list structure as
+# [ PlatformLibraryClass, ... ]
+#
+class PlatformLibraryClasses(IncludeStatementClass):
+ def __init__(self):
+ IncludeStatementClass.__init__(self)
+ self.LibraryList = []
+
+## PlatformModuleClass
+#
+# This class defined Module item used in platform file
+#
+# @param CommonClass: Inherited from CommonClass class
+# @param DefineClass: Inherited from DefineClass class
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+#
+# @var Name: To store value for Name (Library name or libraryclass name or module name)
+# @var FilePath: To store value for FilePath
+# @var Type: To store value for Type, selection scope is in below list
+# LIBRARY | LIBRARY_CLASS | MODULE
+# @var ModuleType: To store value for ModuleType
+# @var ExecFilePath: To store value for ExecFilePath
+# @var LibraryClasses: To store value for LibraryClasses, it is a structure as
+# PlatformLibraryClasses
+# @var PcdBuildDefinitions: To store value for PcdBuildDefinitions, it is a list structure as
+# [ PcdClass, ...]
+# @var ModuleSaBuildOption: To store value for ModuleSaBuildOption, it is a structure as
+# PlatformBuildOptionClasses
+# @var Specifications: To store value for Specifications, it is a list structure as
+# [ '', '', ...]
+#
+class PlatformModuleClass(CommonClass, DefineClass, IncludeStatementClass):
+ def __init__(self):
+ CommonClass.__init__(self)
+ DefineClass.__init__(self)
+ self.Name = ''
+ self.FilePath = ''
+ self.Type = ''
+ self.ModuleType = ''
+ self.ExecFilePath = ''
+ self.LibraryClasses = PlatformLibraryClasses()
+ self.PcdBuildDefinitions = []
+ self.ModuleSaBuildOption = PlatformBuildOptionClasses()
+ self.Specifications = []
+ self.SourceOverridePath = ''
+
+## PlatformModuleClasses
+#
+# This class defined Module item list used in platform file
+#
+# @param IncludeStatementClass: Inherited from IncludeStatementClass class
+#
+# @var ModuleList: To store value for ModuleList, it is a list structure as
+# [ PlatformModuleClass, ... ]
+#
+class PlatformModuleClasses(IncludeStatementClass):
+ def __init__(self):
+ IncludeStatementClass.__init__(self)
+ self.ModuleList = []
+
+## PlatformClass
+#
+# This class defined a complete platform item
+#
+# @param object: Inherited from object class
+#
+# @var Header: To store value for Header, it is a structure as
+# {Arch : PlatformHeaderClass()}
+# @var SkuInfos: To store value for SkuInfos, it is a structure as
+# SkuInfoListClass
+# @var Libraries: To store value for Libraries, it is a structure as
+# PlatformLibraryClasses
+# @var LibraryClasses: To store value for LibraryClasses, it is a structure as
+# PlatformLibraryClasses
+# @var Modules: To store value for Modules, it is a structure as
+# PlatformModuleClasses
+# @var FlashDefinitionFile: To store value for FlashDefinitionFile, it is a structure as
+# PlatformFlashDefinitionFileClass
+# @var BuildOptions: To store value for BuildOptions, it is a structure as
+# PlatformBuildOptionClasses
+# @var DynamicPcdBuildDefinitions: To store value for DynamicPcdBuildDefinitions, it is a list structure as
+# [ PcdClass, ...]
+# @var Fdf: To store value for Fdf, it is a list structure as
+# [ FdfClass, ...]
+# @var UserExtensions: To store value for UserExtensions, it is a list structure as
+# [ UserExtensionsClass, ...]
+#
+class PlatformClass(object):
+ def __init__(self):
+ self.Header = {}
+ self.SkuInfos = SkuInfoListClass()
+ self.Libraries = PlatformLibraryClasses()
+ self.LibraryClasses = PlatformLibraryClasses()
+ self.Modules = PlatformModuleClasses()
+ self.FlashDefinitionFile = PlatformFlashDefinitionFileClass()
+ self.BuildOptions = PlatformBuildOptionClasses()
+ self.DynamicPcdBuildDefinitions = []
+ self.Fdf = []
+ self.UserExtensions = []
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ P = PlatformClass()
diff --git a/BaseTools/Source/Python/CommonDataClass/__init__.py b/BaseTools/Source/Python/CommonDataClass/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/CommonDataClass/__init__.py diff --git a/BaseTools/Source/Python/Ecc/C.g b/BaseTools/Source/Python/Ecc/C.g new file mode 100644 index 0000000000..6aa50460de --- /dev/null +++ b/BaseTools/Source/Python/Ecc/C.g @@ -0,0 +1,626 @@ +
+grammar C;
+options {
+ language=Python;
+ backtrack=true;
+ memoize=true;
+ k=2;
+}
+
+@header {
+ import CodeFragment
+ import FileProfile
+}
+
+@members {
+
+ def printTokenInfo(self, line, offset, tokenText):
+ print str(line)+ ',' + str(offset) + ':' + str(tokenText)
+
+ def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+}
+
+translation_unit
+ : external_declaration*
+ ;
+
+
+/*function_declaration
+@after{
+ print $function_declaration.text
+}
+ : declaration_specifiers IDENTIFIER '(' parameter_list ')' ';'
+ ;
+*/
+external_declaration
+options {k=1;}
+/*@after{
+ print $external_declaration.text
+}*/
+ : ( declaration_specifiers? declarator declaration* '{' )=> function_definition
+ | declaration
+ | macro_statement (';')?
+ ;
+
+
+
+function_definition
+scope {
+ ModifierText;
+ DeclText;
+ LBLine;
+ LBOffset;
+ DeclLine;
+ DeclOffset;
+}
+@init {
+ $function_definition::ModifierText = '';
+ $function_definition::DeclText = '';
+ $function_definition::LBLine = 0;
+ $function_definition::LBOffset = 0;
+ $function_definition::DeclLine = 0;
+ $function_definition::DeclOffset = 0;
+}
+@after{
+ self.StoreFunctionDefinition($function_definition.start.line, $function_definition.start.charPositionInLine, $function_definition.stop.line, $function_definition.stop.charPositionInLine, $function_definition::ModifierText, $function_definition::DeclText, $function_definition::LBLine, $function_definition::LBOffset, $function_definition::DeclLine, $function_definition::DeclOffset)
+}
+ : d=declaration_specifiers? declarator
+ ( declaration+ a=compound_statement // K&R style
+ | b=compound_statement // ANSI style
+ ) {
+ if d != None:
+ $function_definition::ModifierText = $declaration_specifiers.text
+ else:
+ $function_definition::ModifierText = ''
+ $function_definition::DeclText = $declarator.text
+ $function_definition::DeclLine = $declarator.start.line
+ $function_definition::DeclOffset = $declarator.start.charPositionInLine
+ if a != None:
+ $function_definition::LBLine = $a.start.line
+ $function_definition::LBOffset = $a.start.charPositionInLine
+ else:
+ $function_definition::LBLine = $b.start.line
+ $function_definition::LBOffset = $b.start.charPositionInLine
+ }
+ ;
+
+declaration
+ : a='typedef' b=declaration_specifiers?
+ c=init_declarator_list d=';'
+ {
+ if b != None:
+ self.StoreTypedefDefinition($a.line, $a.charPositionInLine, $d.line, $d.charPositionInLine, $b.text, $c.text)
+ else:
+ self.StoreTypedefDefinition($a.line, $a.charPositionInLine, $d.line, $d.charPositionInLine, '', $c.text)
+ }
+ | s=declaration_specifiers t=init_declarator_list? e=';'
+ {
+ if t != None:
+ self.StoreVariableDeclaration($s.start.line, $s.start.charPositionInLine, $t.start.line, $t.start.charPositionInLine, $s.text, $t.text)
+ }
+ ;
+
+declaration_specifiers
+ : ( storage_class_specifier
+ | type_specifier
+ | type_qualifier
+ )+
+ ;
+
+init_declarator_list
+ : init_declarator (',' init_declarator)*
+ ;
+
+init_declarator
+ : declarator ('=' initializer)?
+ ;
+
+storage_class_specifier
+ : 'extern'
+ | 'static'
+ | 'auto'
+ | 'register'
+ | 'STATIC'
+ ;
+
+type_specifier
+ : 'void'
+ | 'char'
+ | 'short'
+ | 'int'
+ | 'long'
+ | 'float'
+ | 'double'
+ | 'signed'
+ | 'unsigned'
+ | s=struct_or_union_specifier
+ {
+ if s.stop != None:
+ self.StoreStructUnionDefinition($s.start.line, $s.start.charPositionInLine, $s.stop.line, $s.stop.charPositionInLine, $s.text)
+ }
+ | e=enum_specifier
+ {
+ if e.stop != None:
+ self.StoreEnumerationDefinition($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)
+ }
+ | (IDENTIFIER type_qualifier* declarator)=> type_id
+ ;
+
+type_id
+ : IDENTIFIER
+ //{self.printTokenInfo($a.line, $a.pos, $a.text)}
+ ;
+
+struct_or_union_specifier
+options {k=3;}
+ : struct_or_union IDENTIFIER? '{' struct_declaration_list '}'
+ | struct_or_union IDENTIFIER
+ ;
+
+struct_or_union
+ : 'struct'
+ | 'union'
+ ;
+
+struct_declaration_list
+ : struct_declaration+
+ ;
+
+struct_declaration
+ : specifier_qualifier_list struct_declarator_list ';'
+ ;
+
+specifier_qualifier_list
+ : ( type_qualifier | type_specifier )+
+ ;
+
+struct_declarator_list
+ : struct_declarator (',' struct_declarator)*
+ ;
+
+struct_declarator
+ : declarator (':' constant_expression)?
+ | ':' constant_expression
+ ;
+
+enum_specifier
+options {k=3;}
+ : 'enum' '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER '{' enumerator_list ','? '}'
+ | 'enum' IDENTIFIER
+ ;
+
+enumerator_list
+ : enumerator (',' enumerator)*
+ ;
+
+enumerator
+ : IDENTIFIER ('=' constant_expression)?
+ ;
+
+type_qualifier
+ : 'const'
+ | 'volatile'
+ | 'IN'
+ | 'OUT'
+ | 'OPTIONAL'
+ | 'CONST'
+ | 'UNALIGNED'
+ | 'VOLATILE'
+ | 'GLOBAL_REMOVE_IF_UNREFERENCED'
+ | 'EFIAPI'
+ | 'EFI_BOOTSERVICE'
+ | 'EFI_RUNTIMESERVICE'
+ ;
+
+declarator
+ : pointer? ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? direct_declarator
+// | ('EFIAPI')? ('EFI_BOOTSERVICE')? ('EFI_RUNTIMESERVICE')? pointer? direct_declarator
+ | pointer
+ ;
+
+direct_declarator
+ : IDENTIFIER declarator_suffix*
+ | '(' ('EFIAPI')? declarator ')' declarator_suffix+
+ ;
+
+declarator_suffix
+ : '[' constant_expression ']'
+ | '[' ']'
+ | '(' parameter_type_list ')'
+ | '(' identifier_list ')'
+ | '(' ')'
+ ;
+
+pointer
+ : '*' type_qualifier+ pointer?
+ | '*' pointer
+ | '*'
+ ;
+
+parameter_type_list
+ : parameter_list (',' ('OPTIONAL')? '...')?
+ ;
+
+parameter_list
+ : parameter_declaration (',' ('OPTIONAL')? parameter_declaration)*
+ ;
+
+parameter_declaration
+ : declaration_specifiers (declarator|abstract_declarator)* ('OPTIONAL')?
+ //accomerdate user-defined type only, no declarator follow.
+ | pointer* IDENTIFIER
+ ;
+
+identifier_list
+ : IDENTIFIER
+ (',' IDENTIFIER)*
+ ;
+
+type_name
+ : specifier_qualifier_list abstract_declarator?
+ | type_id
+ ;
+
+abstract_declarator
+ : pointer direct_abstract_declarator?
+ | direct_abstract_declarator
+ ;
+
+direct_abstract_declarator
+ : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) abstract_declarator_suffix*
+ ;
+
+abstract_declarator_suffix
+ : '[' ']'
+ | '[' constant_expression ']'
+ | '(' ')'
+ | '(' parameter_type_list ')'
+ ;
+
+initializer
+
+ : assignment_expression
+ | '{' initializer_list ','? '}'
+ ;
+
+initializer_list
+ : initializer (',' initializer )*
+ ;
+
+// E x p r e s s i o n s
+
+argument_expression_list
+ : assignment_expression ('OPTIONAL')? (',' assignment_expression ('OPTIONAL')?)*
+ ;
+
+additive_expression
+ : (multiplicative_expression) ('+' multiplicative_expression | '-' multiplicative_expression)*
+ ;
+
+multiplicative_expression
+ : (cast_expression) ('*' cast_expression | '/' cast_expression | '%' cast_expression)*
+ ;
+
+cast_expression
+ : '(' type_name ')' cast_expression
+ | unary_expression
+ ;
+
+unary_expression
+ : postfix_expression
+ | '++' unary_expression
+ | '--' unary_expression
+ | unary_operator cast_expression
+ | 'sizeof' unary_expression
+ | 'sizeof' '(' type_name ')'
+ ;
+
+postfix_expression
+scope {
+ FuncCallText;
+}
+@init {
+ $postfix_expression::FuncCallText = '';
+}
+ : p=primary_expression {$postfix_expression::FuncCallText += $p.text}
+ ( '[' expression ']'
+ | '(' a=')'{self.StoreFunctionCalling($p.start.line, $p.start.charPositionInLine, $a.line, $a.charPositionInLine, $postfix_expression::FuncCallText, '')}
+ | '(' c=argument_expression_list b=')' {self.StoreFunctionCalling($p.start.line, $p.start.charPositionInLine, $b.line, $b.charPositionInLine, $postfix_expression::FuncCallText, $c.text)}
+ | '(' macro_parameter_list ')'
+ | '.' x=IDENTIFIER {$postfix_expression::FuncCallText += '.' + $x.text}
+ | '*' y=IDENTIFIER {$postfix_expression::FuncCallText = $y.text}
+ | '->' z=IDENTIFIER {$postfix_expression::FuncCallText += '->' + $z.text}
+ | '++'
+ | '--'
+ )*
+ ;
+
+macro_parameter_list
+ : parameter_declaration (',' parameter_declaration)*
+ ;
+
+unary_operator
+ : '&'
+ | '*'
+ | '+'
+ | '-'
+ | '~'
+ | '!'
+ ;
+
+primary_expression
+ : IDENTIFIER
+ | constant
+ | '(' expression ')'
+ ;
+
+constant
+ : HEX_LITERAL
+ | OCTAL_LITERAL
+ | DECIMAL_LITERAL
+ | CHARACTER_LITERAL
+ | (IDENTIFIER* STRING_LITERAL+)+ IDENTIFIER*
+ | FLOATING_POINT_LITERAL
+ ;
+
+/////
+
+expression
+ : assignment_expression (',' assignment_expression)*
+ ;
+
+constant_expression
+ : conditional_expression
+ ;
+
+assignment_expression
+ : lvalue assignment_operator assignment_expression
+ | conditional_expression
+ ;
+
+lvalue
+ : unary_expression
+ ;
+
+assignment_operator
+ : '='
+ | '*='
+ | '/='
+ | '%='
+ | '+='
+ | '-='
+ | '<<='
+ | '>>='
+ | '&='
+ | '^='
+ | '|='
+ ;
+
+conditional_expression
+ : e=logical_or_expression ('?' expression ':' conditional_expression {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)})?
+ ;
+
+logical_or_expression
+ : logical_and_expression ('||' logical_and_expression)*
+ ;
+
+logical_and_expression
+ : inclusive_or_expression ('&&' inclusive_or_expression)*
+ ;
+
+inclusive_or_expression
+ : exclusive_or_expression ('|' exclusive_or_expression)*
+ ;
+
+exclusive_or_expression
+ : and_expression ('^' and_expression)*
+ ;
+
+and_expression
+ : equality_expression ('&' equality_expression)*
+ ;
+equality_expression
+ : relational_expression (('=='|'!=') relational_expression )*
+ ;
+
+relational_expression
+ : shift_expression (('<'|'>'|'<='|'>=') shift_expression)*
+ ;
+
+shift_expression
+ : additive_expression (('<<'|'>>') additive_expression)*
+ ;
+
+// S t a t e m e n t s
+
+statement
+ : labeled_statement
+ | compound_statement
+ | expression_statement
+ | selection_statement
+ | iteration_statement
+ | jump_statement
+ | macro_statement
+ | asm2_statement
+ | asm1_statement
+ | asm_statement
+ | declaration
+ ;
+
+asm2_statement
+ : '__asm__'? IDENTIFIER '(' (~(';'))* ')' ';'
+ ;
+
+asm1_statement
+ : '_asm' '{' (~('}'))* '}'
+ ;
+
+asm_statement
+ : '__asm' '{' (~('}'))* '}'
+ ;
+
+macro_statement
+ : IDENTIFIER '(' declaration* statement_list? expression? ')'
+ ;
+
+labeled_statement
+ : IDENTIFIER ':' statement
+ | 'case' constant_expression ':' statement
+ | 'default' ':' statement
+ ;
+
+compound_statement
+ : '{' declaration* statement_list? '}'
+ ;
+
+statement_list
+ : statement+
+ ;
+
+expression_statement
+ : ';'
+ | expression ';'
+ ;
+
+selection_statement
+ : 'if' '(' e=expression ')' {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)} statement (options {k=1; backtrack=false;}:'else' statement)?
+ | 'switch' '(' expression ')' statement
+ ;
+
+iteration_statement
+ : 'while' '(' e=expression ')' statement {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ | 'do' statement 'while' '(' e=expression ')' ';' {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ | 'for' '(' expression_statement e=expression_statement expression? ')' statement {self.StorePredicateExpression($e.start.line, $e.start.charPositionInLine, $e.stop.line, $e.stop.charPositionInLine, $e.text)}
+ ;
+
+jump_statement
+ : 'goto' IDENTIFIER ';'
+ | 'continue' ';'
+ | 'break' ';'
+ | 'return' ';'
+ | 'return' expression ';'
+ ;
+
+IDENTIFIER
+ : LETTER (LETTER|'0'..'9')*
+ ;
+
+fragment
+LETTER
+ : '$'
+ | 'A'..'Z'
+ | 'a'..'z'
+ | '_'
+ ;
+
+CHARACTER_LITERAL
+ : ('L')? '\'' ( EscapeSequence | ~('\''|'\\') ) '\''
+ ;
+
+STRING_LITERAL
+ : ('L')? '"' ( EscapeSequence | ~('\\'|'"') )* '"'
+ ;
+
+HEX_LITERAL : '0' ('x'|'X') HexDigit+ IntegerTypeSuffix? ;
+
+DECIMAL_LITERAL : ('0' | '1'..'9' '0'..'9'*) IntegerTypeSuffix? ;
+
+OCTAL_LITERAL : '0' ('0'..'7')+ IntegerTypeSuffix? ;
+
+fragment
+HexDigit : ('0'..'9'|'a'..'f'|'A'..'F') ;
+
+fragment
+IntegerTypeSuffix
+ : ('u'|'U')
+ | ('l'|'L')
+ | ('u'|'U') ('l'|'L')
+ | ('u'|'U') ('l'|'L') ('l'|'L')
+ ;
+
+FLOATING_POINT_LITERAL
+ : ('0'..'9')+ '.' ('0'..'9')* Exponent? FloatTypeSuffix?
+ | '.' ('0'..'9')+ Exponent? FloatTypeSuffix?
+ | ('0'..'9')+ Exponent FloatTypeSuffix?
+ | ('0'..'9')+ Exponent? FloatTypeSuffix
+ ;
+
+fragment
+Exponent : ('e'|'E') ('+'|'-')? ('0'..'9')+ ;
+
+fragment
+FloatTypeSuffix : ('f'|'F'|'d'|'D') ;
+
+fragment
+EscapeSequence
+ : '\\' ('b'|'t'|'n'|'f'|'r'|'\"'|'\''|'\\')
+ | OctalEscape
+ ;
+
+fragment
+OctalEscape
+ : '\\' ('0'..'3') ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7') ('0'..'7')
+ | '\\' ('0'..'7')
+ ;
+
+fragment
+UnicodeEscape
+ : '\\' 'u' HexDigit HexDigit HexDigit HexDigit
+ ;
+
+WS : (' '|'\r'|'\t'|'\u000C'|'\n') {$channel=HIDDEN;}
+ ;
+
+// ingore '\' of line concatenation
+BS : ('\\') {$channel=HIDDEN;}
+ ;
+
+// ingore function modifiers
+//FUNC_MODIFIERS : 'EFIAPI' {$channel=HIDDEN;}
+// ;
+
+UnicodeVocabulary
+ : '\u0003'..'\uFFFE'
+ ;
+COMMENT
+ : '/*' ( options {greedy=false;} : . )* '*/' {$channel=HIDDEN;}
+ ;
+
+
+LINE_COMMENT
+ : '//' ~('\n'|'\r')* '\r'? '\n' {$channel=HIDDEN;}
+ ;
+
+// ignore #line info for now
+LINE_COMMAND
+ : '#' ~('\n'|'\r')* '\r'? '\n' {$channel=HIDDEN;}
+ ;
diff --git a/BaseTools/Source/Python/Ecc/CLexer.py b/BaseTools/Source/Python/Ecc/CLexer.py new file mode 100644 index 0000000000..cc437e0821 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/CLexer.py @@ -0,0 +1,4887 @@ +# $ANTLR 3.0.1 C.g 2009-02-16 16:02:51 + +from antlr3 import * +from antlr3.compat import set, frozenset + + +# for convenience in actions +HIDDEN = BaseRecognizer.HIDDEN + +# token types +T29=29 +HexDigit=13 +T70=70 +T74=74 +T85=85 +T102=102 +T114=114 +T103=103 +STRING_LITERAL=9 +T32=32 +T81=81 +T41=41 +FloatTypeSuffix=16 +T113=113 +T62=62 +T109=109 +DECIMAL_LITERAL=7 +IntegerTypeSuffix=14 +T68=68 +T73=73 +T84=84 +T33=33 +UnicodeVocabulary=21 +T78=78 +T115=115 +WS=19 +LINE_COMMAND=24 +T42=42 +T96=96 +T71=71 +LINE_COMMENT=23 +T72=72 +T94=94 +FLOATING_POINT_LITERAL=10 +T76=76 +UnicodeEscape=18 +T75=75 +T89=89 +T67=67 +T31=31 +T60=60 +T82=82 +T100=100 +T49=49 +IDENTIFIER=4 +T30=30 +CHARACTER_LITERAL=8 +T79=79 +T36=36 +T58=58 +T93=93 +T35=35 +T107=107 +OCTAL_LITERAL=6 +T83=83 +T61=61 +HEX_LITERAL=5 +T45=45 +T34=34 +T101=101 +T64=64 +T25=25 +T91=91 +T105=105 +T37=37 +T86=86 +T116=116 +EscapeSequence=12 +T26=26 +T51=51 +T111=111 +T46=46 +T77=77 +T38=38 +T106=106 +T112=112 +T69=69 +T39=39 +T44=44 +T55=55 +LETTER=11 +Exponent=15 +T95=95 +T50=50 +T110=110 +T108=108 +BS=20 +T92=92 +T43=43 +T28=28 +T40=40 +T66=66 +COMMENT=22 +T88=88 +T63=63 +T57=57 +T65=65 +T98=98 +T56=56 +T87=87 +T80=80 +T59=59 +T97=97 +T48=48 +T54=54 +EOF=-1 +T104=104 +T47=47 +Tokens=117 +T53=53 +OctalEscape=17 +T99=99 +T27=27 +T52=52 +T90=90 + +class CLexer(Lexer): + + grammarFileName = "C.g" + + def __init__(self, input=None): + Lexer.__init__(self, input) + self.dfa25 = self.DFA25( + self, 25, + eot = self.DFA25_eot, + eof = self.DFA25_eof, + min = self.DFA25_min, + max = self.DFA25_max, + accept = self.DFA25_accept, + special = self.DFA25_special, + transition = self.DFA25_transition + ) + self.dfa35 = self.DFA35( + self, 35, + eot = self.DFA35_eot, + eof = self.DFA35_eof, + min = self.DFA35_min, + max = self.DFA35_max, + accept = self.DFA35_accept, + special = self.DFA35_special, + transition = self.DFA35_transition + ) + + + + + + + # $ANTLR start T25 + def mT25(self, ): + + try: + self.type = T25 + + # C.g:7:5: ( ';' ) + # C.g:7:7: ';' + self.match(u';') + + + + + + finally: + + pass + + # $ANTLR end T25 + + + + # $ANTLR start T26 + def mT26(self, ): + + try: + self.type = T26 + + # C.g:8:5: ( 'typedef' ) + # C.g:8:7: 'typedef' + self.match("typedef") + + + + + + + finally: + + pass + + # $ANTLR end T26 + + + + # $ANTLR start T27 + def mT27(self, ): + + try: + self.type = T27 + + # C.g:9:5: ( ',' ) + # C.g:9:7: ',' + self.match(u',') + + + + + + finally: + + pass + + # $ANTLR end T27 + + + + # $ANTLR start T28 + def mT28(self, ): + + try: + self.type = T28 + + # C.g:10:5: ( '=' ) + # C.g:10:7: '=' + self.match(u'=') + + + + + + finally: + + pass + + # $ANTLR end T28 + + + + # $ANTLR start T29 + def mT29(self, ): + + try: + self.type = T29 + + # C.g:11:5: ( 'extern' ) + # C.g:11:7: 'extern' + self.match("extern") + + + + + + + finally: + + pass + + # $ANTLR end T29 + + + + # $ANTLR start T30 + def mT30(self, ): + + try: + self.type = T30 + + # C.g:12:5: ( 'static' ) + # C.g:12:7: 'static' + self.match("static") + + + + + + + finally: + + pass + + # $ANTLR end T30 + + + + # $ANTLR start T31 + def mT31(self, ): + + try: + self.type = T31 + + # C.g:13:5: ( 'auto' ) + # C.g:13:7: 'auto' + self.match("auto") + + + + + + + finally: + + pass + + # $ANTLR end T31 + + + + # $ANTLR start T32 + def mT32(self, ): + + try: + self.type = T32 + + # C.g:14:5: ( 'register' ) + # C.g:14:7: 'register' + self.match("register") + + + + + + + finally: + + pass + + # $ANTLR end T32 + + + + # $ANTLR start T33 + def mT33(self, ): + + try: + self.type = T33 + + # C.g:15:5: ( 'STATIC' ) + # C.g:15:7: 'STATIC' + self.match("STATIC") + + + + + + + finally: + + pass + + # $ANTLR end T33 + + + + # $ANTLR start T34 + def mT34(self, ): + + try: + self.type = T34 + + # C.g:16:5: ( 'void' ) + # C.g:16:7: 'void' + self.match("void") + + + + + + + finally: + + pass + + # $ANTLR end T34 + + + + # $ANTLR start T35 + def mT35(self, ): + + try: + self.type = T35 + + # C.g:17:5: ( 'char' ) + # C.g:17:7: 'char' + self.match("char") + + + + + + + finally: + + pass + + # $ANTLR end T35 + + + + # $ANTLR start T36 + def mT36(self, ): + + try: + self.type = T36 + + # C.g:18:5: ( 'short' ) + # C.g:18:7: 'short' + self.match("short") + + + + + + + finally: + + pass + + # $ANTLR end T36 + + + + # $ANTLR start T37 + def mT37(self, ): + + try: + self.type = T37 + + # C.g:19:5: ( 'int' ) + # C.g:19:7: 'int' + self.match("int") + + + + + + + finally: + + pass + + # $ANTLR end T37 + + + + # $ANTLR start T38 + def mT38(self, ): + + try: + self.type = T38 + + # C.g:20:5: ( 'long' ) + # C.g:20:7: 'long' + self.match("long") + + + + + + + finally: + + pass + + # $ANTLR end T38 + + + + # $ANTLR start T39 + def mT39(self, ): + + try: + self.type = T39 + + # C.g:21:5: ( 'float' ) + # C.g:21:7: 'float' + self.match("float") + + + + + + + finally: + + pass + + # $ANTLR end T39 + + + + # $ANTLR start T40 + def mT40(self, ): + + try: + self.type = T40 + + # C.g:22:5: ( 'double' ) + # C.g:22:7: 'double' + self.match("double") + + + + + + + finally: + + pass + + # $ANTLR end T40 + + + + # $ANTLR start T41 + def mT41(self, ): + + try: + self.type = T41 + + # C.g:23:5: ( 'signed' ) + # C.g:23:7: 'signed' + self.match("signed") + + + + + + + finally: + + pass + + # $ANTLR end T41 + + + + # $ANTLR start T42 + def mT42(self, ): + + try: + self.type = T42 + + # C.g:24:5: ( 'unsigned' ) + # C.g:24:7: 'unsigned' + self.match("unsigned") + + + + + + + finally: + + pass + + # $ANTLR end T42 + + + + # $ANTLR start T43 + def mT43(self, ): + + try: + self.type = T43 + + # C.g:25:5: ( '{' ) + # C.g:25:7: '{' + self.match(u'{') + + + + + + finally: + + pass + + # $ANTLR end T43 + + + + # $ANTLR start T44 + def mT44(self, ): + + try: + self.type = T44 + + # C.g:26:5: ( '}' ) + # C.g:26:7: '}' + self.match(u'}') + + + + + + finally: + + pass + + # $ANTLR end T44 + + + + # $ANTLR start T45 + def mT45(self, ): + + try: + self.type = T45 + + # C.g:27:5: ( 'struct' ) + # C.g:27:7: 'struct' + self.match("struct") + + + + + + + finally: + + pass + + # $ANTLR end T45 + + + + # $ANTLR start T46 + def mT46(self, ): + + try: + self.type = T46 + + # C.g:28:5: ( 'union' ) + # C.g:28:7: 'union' + self.match("union") + + + + + + + finally: + + pass + + # $ANTLR end T46 + + + + # $ANTLR start T47 + def mT47(self, ): + + try: + self.type = T47 + + # C.g:29:5: ( ':' ) + # C.g:29:7: ':' + self.match(u':') + + + + + + finally: + + pass + + # $ANTLR end T47 + + + + # $ANTLR start T48 + def mT48(self, ): + + try: + self.type = T48 + + # C.g:30:5: ( 'enum' ) + # C.g:30:7: 'enum' + self.match("enum") + + + + + + + finally: + + pass + + # $ANTLR end T48 + + + + # $ANTLR start T49 + def mT49(self, ): + + try: + self.type = T49 + + # C.g:31:5: ( 'const' ) + # C.g:31:7: 'const' + self.match("const") + + + + + + + finally: + + pass + + # $ANTLR end T49 + + + + # $ANTLR start T50 + def mT50(self, ): + + try: + self.type = T50 + + # C.g:32:5: ( 'volatile' ) + # C.g:32:7: 'volatile' + self.match("volatile") + + + + + + + finally: + + pass + + # $ANTLR end T50 + + + + # $ANTLR start T51 + def mT51(self, ): + + try: + self.type = T51 + + # C.g:33:5: ( 'IN' ) + # C.g:33:7: 'IN' + self.match("IN") + + + + + + + finally: + + pass + + # $ANTLR end T51 + + + + # $ANTLR start T52 + def mT52(self, ): + + try: + self.type = T52 + + # C.g:34:5: ( 'OUT' ) + # C.g:34:7: 'OUT' + self.match("OUT") + + + + + + + finally: + + pass + + # $ANTLR end T52 + + + + # $ANTLR start T53 + def mT53(self, ): + + try: + self.type = T53 + + # C.g:35:5: ( 'OPTIONAL' ) + # C.g:35:7: 'OPTIONAL' + self.match("OPTIONAL") + + + + + + + finally: + + pass + + # $ANTLR end T53 + + + + # $ANTLR start T54 + def mT54(self, ): + + try: + self.type = T54 + + # C.g:36:5: ( 'CONST' ) + # C.g:36:7: 'CONST' + self.match("CONST") + + + + + + + finally: + + pass + + # $ANTLR end T54 + + + + # $ANTLR start T55 + def mT55(self, ): + + try: + self.type = T55 + + # C.g:37:5: ( 'UNALIGNED' ) + # C.g:37:7: 'UNALIGNED' + self.match("UNALIGNED") + + + + + + + finally: + + pass + + # $ANTLR end T55 + + + + # $ANTLR start T56 + def mT56(self, ): + + try: + self.type = T56 + + # C.g:38:5: ( 'VOLATILE' ) + # C.g:38:7: 'VOLATILE' + self.match("VOLATILE") + + + + + + + finally: + + pass + + # $ANTLR end T56 + + + + # $ANTLR start T57 + def mT57(self, ): + + try: + self.type = T57 + + # C.g:39:5: ( 'GLOBAL_REMOVE_IF_UNREFERENCED' ) + # C.g:39:7: 'GLOBAL_REMOVE_IF_UNREFERENCED' + self.match("GLOBAL_REMOVE_IF_UNREFERENCED") + + + + + + + finally: + + pass + + # $ANTLR end T57 + + + + # $ANTLR start T58 + def mT58(self, ): + + try: + self.type = T58 + + # C.g:40:5: ( 'EFIAPI' ) + # C.g:40:7: 'EFIAPI' + self.match("EFIAPI") + + + + + + + finally: + + pass + + # $ANTLR end T58 + + + + # $ANTLR start T59 + def mT59(self, ): + + try: + self.type = T59 + + # C.g:41:5: ( 'EFI_BOOTSERVICE' ) + # C.g:41:7: 'EFI_BOOTSERVICE' + self.match("EFI_BOOTSERVICE") + + + + + + + finally: + + pass + + # $ANTLR end T59 + + + + # $ANTLR start T60 + def mT60(self, ): + + try: + self.type = T60 + + # C.g:42:5: ( 'EFI_RUNTIMESERVICE' ) + # C.g:42:7: 'EFI_RUNTIMESERVICE' + self.match("EFI_RUNTIMESERVICE") + + + + + + + finally: + + pass + + # $ANTLR end T60 + + + + # $ANTLR start T61 + def mT61(self, ): + + try: + self.type = T61 + + # C.g:43:5: ( '(' ) + # C.g:43:7: '(' + self.match(u'(') + + + + + + finally: + + pass + + # $ANTLR end T61 + + + + # $ANTLR start T62 + def mT62(self, ): + + try: + self.type = T62 + + # C.g:44:5: ( ')' ) + # C.g:44:7: ')' + self.match(u')') + + + + + + finally: + + pass + + # $ANTLR end T62 + + + + # $ANTLR start T63 + def mT63(self, ): + + try: + self.type = T63 + + # C.g:45:5: ( '[' ) + # C.g:45:7: '[' + self.match(u'[') + + + + + + finally: + + pass + + # $ANTLR end T63 + + + + # $ANTLR start T64 + def mT64(self, ): + + try: + self.type = T64 + + # C.g:46:5: ( ']' ) + # C.g:46:7: ']' + self.match(u']') + + + + + + finally: + + pass + + # $ANTLR end T64 + + + + # $ANTLR start T65 + def mT65(self, ): + + try: + self.type = T65 + + # C.g:47:5: ( '*' ) + # C.g:47:7: '*' + self.match(u'*') + + + + + + finally: + + pass + + # $ANTLR end T65 + + + + # $ANTLR start T66 + def mT66(self, ): + + try: + self.type = T66 + + # C.g:48:5: ( '...' ) + # C.g:48:7: '...' + self.match("...") + + + + + + + finally: + + pass + + # $ANTLR end T66 + + + + # $ANTLR start T67 + def mT67(self, ): + + try: + self.type = T67 + + # C.g:49:5: ( '+' ) + # C.g:49:7: '+' + self.match(u'+') + + + + + + finally: + + pass + + # $ANTLR end T67 + + + + # $ANTLR start T68 + def mT68(self, ): + + try: + self.type = T68 + + # C.g:50:5: ( '-' ) + # C.g:50:7: '-' + self.match(u'-') + + + + + + finally: + + pass + + # $ANTLR end T68 + + + + # $ANTLR start T69 + def mT69(self, ): + + try: + self.type = T69 + + # C.g:51:5: ( '/' ) + # C.g:51:7: '/' + self.match(u'/') + + + + + + finally: + + pass + + # $ANTLR end T69 + + + + # $ANTLR start T70 + def mT70(self, ): + + try: + self.type = T70 + + # C.g:52:5: ( '%' ) + # C.g:52:7: '%' + self.match(u'%') + + + + + + finally: + + pass + + # $ANTLR end T70 + + + + # $ANTLR start T71 + def mT71(self, ): + + try: + self.type = T71 + + # C.g:53:5: ( '++' ) + # C.g:53:7: '++' + self.match("++") + + + + + + + finally: + + pass + + # $ANTLR end T71 + + + + # $ANTLR start T72 + def mT72(self, ): + + try: + self.type = T72 + + # C.g:54:5: ( '--' ) + # C.g:54:7: '--' + self.match("--") + + + + + + + finally: + + pass + + # $ANTLR end T72 + + + + # $ANTLR start T73 + def mT73(self, ): + + try: + self.type = T73 + + # C.g:55:5: ( 'sizeof' ) + # C.g:55:7: 'sizeof' + self.match("sizeof") + + + + + + + finally: + + pass + + # $ANTLR end T73 + + + + # $ANTLR start T74 + def mT74(self, ): + + try: + self.type = T74 + + # C.g:56:5: ( '.' ) + # C.g:56:7: '.' + self.match(u'.') + + + + + + finally: + + pass + + # $ANTLR end T74 + + + + # $ANTLR start T75 + def mT75(self, ): + + try: + self.type = T75 + + # C.g:57:5: ( '->' ) + # C.g:57:7: '->' + self.match("->") + + + + + + + finally: + + pass + + # $ANTLR end T75 + + + + # $ANTLR start T76 + def mT76(self, ): + + try: + self.type = T76 + + # C.g:58:5: ( '&' ) + # C.g:58:7: '&' + self.match(u'&') + + + + + + finally: + + pass + + # $ANTLR end T76 + + + + # $ANTLR start T77 + def mT77(self, ): + + try: + self.type = T77 + + # C.g:59:5: ( '~' ) + # C.g:59:7: '~' + self.match(u'~') + + + + + + finally: + + pass + + # $ANTLR end T77 + + + + # $ANTLR start T78 + def mT78(self, ): + + try: + self.type = T78 + + # C.g:60:5: ( '!' ) + # C.g:60:7: '!' + self.match(u'!') + + + + + + finally: + + pass + + # $ANTLR end T78 + + + + # $ANTLR start T79 + def mT79(self, ): + + try: + self.type = T79 + + # C.g:61:5: ( '*=' ) + # C.g:61:7: '*=' + self.match("*=") + + + + + + + finally: + + pass + + # $ANTLR end T79 + + + + # $ANTLR start T80 + def mT80(self, ): + + try: + self.type = T80 + + # C.g:62:5: ( '/=' ) + # C.g:62:7: '/=' + self.match("/=") + + + + + + + finally: + + pass + + # $ANTLR end T80 + + + + # $ANTLR start T81 + def mT81(self, ): + + try: + self.type = T81 + + # C.g:63:5: ( '%=' ) + # C.g:63:7: '%=' + self.match("%=") + + + + + + + finally: + + pass + + # $ANTLR end T81 + + + + # $ANTLR start T82 + def mT82(self, ): + + try: + self.type = T82 + + # C.g:64:5: ( '+=' ) + # C.g:64:7: '+=' + self.match("+=") + + + + + + + finally: + + pass + + # $ANTLR end T82 + + + + # $ANTLR start T83 + def mT83(self, ): + + try: + self.type = T83 + + # C.g:65:5: ( '-=' ) + # C.g:65:7: '-=' + self.match("-=") + + + + + + + finally: + + pass + + # $ANTLR end T83 + + + + # $ANTLR start T84 + def mT84(self, ): + + try: + self.type = T84 + + # C.g:66:5: ( '<<=' ) + # C.g:66:7: '<<=' + self.match("<<=") + + + + + + + finally: + + pass + + # $ANTLR end T84 + + + + # $ANTLR start T85 + def mT85(self, ): + + try: + self.type = T85 + + # C.g:67:5: ( '>>=' ) + # C.g:67:7: '>>=' + self.match(">>=") + + + + + + + finally: + + pass + + # $ANTLR end T85 + + + + # $ANTLR start T86 + def mT86(self, ): + + try: + self.type = T86 + + # C.g:68:5: ( '&=' ) + # C.g:68:7: '&=' + self.match("&=") + + + + + + + finally: + + pass + + # $ANTLR end T86 + + + + # $ANTLR start T87 + def mT87(self, ): + + try: + self.type = T87 + + # C.g:69:5: ( '^=' ) + # C.g:69:7: '^=' + self.match("^=") + + + + + + + finally: + + pass + + # $ANTLR end T87 + + + + # $ANTLR start T88 + def mT88(self, ): + + try: + self.type = T88 + + # C.g:70:5: ( '|=' ) + # C.g:70:7: '|=' + self.match("|=") + + + + + + + finally: + + pass + + # $ANTLR end T88 + + + + # $ANTLR start T89 + def mT89(self, ): + + try: + self.type = T89 + + # C.g:71:5: ( '?' ) + # C.g:71:7: '?' + self.match(u'?') + + + + + + finally: + + pass + + # $ANTLR end T89 + + + + # $ANTLR start T90 + def mT90(self, ): + + try: + self.type = T90 + + # C.g:72:5: ( '||' ) + # C.g:72:7: '||' + self.match("||") + + + + + + + finally: + + pass + + # $ANTLR end T90 + + + + # $ANTLR start T91 + def mT91(self, ): + + try: + self.type = T91 + + # C.g:73:5: ( '&&' ) + # C.g:73:7: '&&' + self.match("&&") + + + + + + + finally: + + pass + + # $ANTLR end T91 + + + + # $ANTLR start T92 + def mT92(self, ): + + try: + self.type = T92 + + # C.g:74:5: ( '|' ) + # C.g:74:7: '|' + self.match(u'|') + + + + + + finally: + + pass + + # $ANTLR end T92 + + + + # $ANTLR start T93 + def mT93(self, ): + + try: + self.type = T93 + + # C.g:75:5: ( '^' ) + # C.g:75:7: '^' + self.match(u'^') + + + + + + finally: + + pass + + # $ANTLR end T93 + + + + # $ANTLR start T94 + def mT94(self, ): + + try: + self.type = T94 + + # C.g:76:5: ( '==' ) + # C.g:76:7: '==' + self.match("==") + + + + + + + finally: + + pass + + # $ANTLR end T94 + + + + # $ANTLR start T95 + def mT95(self, ): + + try: + self.type = T95 + + # C.g:77:5: ( '!=' ) + # C.g:77:7: '!=' + self.match("!=") + + + + + + + finally: + + pass + + # $ANTLR end T95 + + + + # $ANTLR start T96 + def mT96(self, ): + + try: + self.type = T96 + + # C.g:78:5: ( '<' ) + # C.g:78:7: '<' + self.match(u'<') + + + + + + finally: + + pass + + # $ANTLR end T96 + + + + # $ANTLR start T97 + def mT97(self, ): + + try: + self.type = T97 + + # C.g:79:5: ( '>' ) + # C.g:79:7: '>' + self.match(u'>') + + + + + + finally: + + pass + + # $ANTLR end T97 + + + + # $ANTLR start T98 + def mT98(self, ): + + try: + self.type = T98 + + # C.g:80:5: ( '<=' ) + # C.g:80:7: '<=' + self.match("<=") + + + + + + + finally: + + pass + + # $ANTLR end T98 + + + + # $ANTLR start T99 + def mT99(self, ): + + try: + self.type = T99 + + # C.g:81:5: ( '>=' ) + # C.g:81:7: '>=' + self.match(">=") + + + + + + + finally: + + pass + + # $ANTLR end T99 + + + + # $ANTLR start T100 + def mT100(self, ): + + try: + self.type = T100 + + # C.g:82:6: ( '<<' ) + # C.g:82:8: '<<' + self.match("<<") + + + + + + + finally: + + pass + + # $ANTLR end T100 + + + + # $ANTLR start T101 + def mT101(self, ): + + try: + self.type = T101 + + # C.g:83:6: ( '>>' ) + # C.g:83:8: '>>' + self.match(">>") + + + + + + + finally: + + pass + + # $ANTLR end T101 + + + + # $ANTLR start T102 + def mT102(self, ): + + try: + self.type = T102 + + # C.g:84:6: ( '__asm__' ) + # C.g:84:8: '__asm__' + self.match("__asm__") + + + + + + + finally: + + pass + + # $ANTLR end T102 + + + + # $ANTLR start T103 + def mT103(self, ): + + try: + self.type = T103 + + # C.g:85:6: ( '_asm' ) + # C.g:85:8: '_asm' + self.match("_asm") + + + + + + + finally: + + pass + + # $ANTLR end T103 + + + + # $ANTLR start T104 + def mT104(self, ): + + try: + self.type = T104 + + # C.g:86:6: ( '__asm' ) + # C.g:86:8: '__asm' + self.match("__asm") + + + + + + + finally: + + pass + + # $ANTLR end T104 + + + + # $ANTLR start T105 + def mT105(self, ): + + try: + self.type = T105 + + # C.g:87:6: ( 'case' ) + # C.g:87:8: 'case' + self.match("case") + + + + + + + finally: + + pass + + # $ANTLR end T105 + + + + # $ANTLR start T106 + def mT106(self, ): + + try: + self.type = T106 + + # C.g:88:6: ( 'default' ) + # C.g:88:8: 'default' + self.match("default") + + + + + + + finally: + + pass + + # $ANTLR end T106 + + + + # $ANTLR start T107 + def mT107(self, ): + + try: + self.type = T107 + + # C.g:89:6: ( 'if' ) + # C.g:89:8: 'if' + self.match("if") + + + + + + + finally: + + pass + + # $ANTLR end T107 + + + + # $ANTLR start T108 + def mT108(self, ): + + try: + self.type = T108 + + # C.g:90:6: ( 'else' ) + # C.g:90:8: 'else' + self.match("else") + + + + + + + finally: + + pass + + # $ANTLR end T108 + + + + # $ANTLR start T109 + def mT109(self, ): + + try: + self.type = T109 + + # C.g:91:6: ( 'switch' ) + # C.g:91:8: 'switch' + self.match("switch") + + + + + + + finally: + + pass + + # $ANTLR end T109 + + + + # $ANTLR start T110 + def mT110(self, ): + + try: + self.type = T110 + + # C.g:92:6: ( 'while' ) + # C.g:92:8: 'while' + self.match("while") + + + + + + + finally: + + pass + + # $ANTLR end T110 + + + + # $ANTLR start T111 + def mT111(self, ): + + try: + self.type = T111 + + # C.g:93:6: ( 'do' ) + # C.g:93:8: 'do' + self.match("do") + + + + + + + finally: + + pass + + # $ANTLR end T111 + + + + # $ANTLR start T112 + def mT112(self, ): + + try: + self.type = T112 + + # C.g:94:6: ( 'for' ) + # C.g:94:8: 'for' + self.match("for") + + + + + + + finally: + + pass + + # $ANTLR end T112 + + + + # $ANTLR start T113 + def mT113(self, ): + + try: + self.type = T113 + + # C.g:95:6: ( 'goto' ) + # C.g:95:8: 'goto' + self.match("goto") + + + + + + + finally: + + pass + + # $ANTLR end T113 + + + + # $ANTLR start T114 + def mT114(self, ): + + try: + self.type = T114 + + # C.g:96:6: ( 'continue' ) + # C.g:96:8: 'continue' + self.match("continue") + + + + + + + finally: + + pass + + # $ANTLR end T114 + + + + # $ANTLR start T115 + def mT115(self, ): + + try: + self.type = T115 + + # C.g:97:6: ( 'break' ) + # C.g:97:8: 'break' + self.match("break") + + + + + + + finally: + + pass + + # $ANTLR end T115 + + + + # $ANTLR start T116 + def mT116(self, ): + + try: + self.type = T116 + + # C.g:98:6: ( 'return' ) + # C.g:98:8: 'return' + self.match("return") + + + + + + + finally: + + pass + + # $ANTLR end T116 + + + + # $ANTLR start IDENTIFIER + def mIDENTIFIER(self, ): + + try: + self.type = IDENTIFIER + + # C.g:533:2: ( LETTER ( LETTER | '0' .. '9' )* ) + # C.g:533:4: LETTER ( LETTER | '0' .. '9' )* + self.mLETTER() + + # C.g:533:11: ( LETTER | '0' .. '9' )* + while True: #loop1 + alt1 = 2 + LA1_0 = self.input.LA(1) + + if (LA1_0 == u'$' or (u'0' <= LA1_0 <= u'9') or (u'A' <= LA1_0 <= u'Z') or LA1_0 == u'_' or (u'a' <= LA1_0 <= u'z')) : + alt1 = 1 + + + if alt1 == 1: + # C.g: + if self.input.LA(1) == u'$' or (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + else: + break #loop1 + + + + + + + finally: + + pass + + # $ANTLR end IDENTIFIER + + + + # $ANTLR start LETTER + def mLETTER(self, ): + + try: + # C.g:538:2: ( '$' | 'A' .. 'Z' | 'a' .. 'z' | '_' ) + # C.g: + if self.input.LA(1) == u'$' or (u'A' <= self.input.LA(1) <= u'Z') or self.input.LA(1) == u'_' or (u'a' <= self.input.LA(1) <= u'z'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + + finally: + + pass + + # $ANTLR end LETTER + + + + # $ANTLR start CHARACTER_LITERAL + def mCHARACTER_LITERAL(self, ): + + try: + self.type = CHARACTER_LITERAL + + # C.g:545:5: ( ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' ) + # C.g:545:9: ( 'L' )? '\\'' ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) '\\'' + # C.g:545:9: ( 'L' )? + alt2 = 2 + LA2_0 = self.input.LA(1) + + if (LA2_0 == u'L') : + alt2 = 1 + if alt2 == 1: + # C.g:545:10: 'L' + self.match(u'L') + + + + + self.match(u'\'') + + # C.g:545:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) ) + alt3 = 2 + LA3_0 = self.input.LA(1) + + if (LA3_0 == u'\\') : + alt3 = 1 + elif ((u'\u0000' <= LA3_0 <= u'&') or (u'(' <= LA3_0 <= u'[') or (u']' <= LA3_0 <= u'\uFFFE')) : + alt3 = 2 + else: + nvae = NoViableAltException("545:21: ( EscapeSequence | ~ ( '\\'' | '\\\\' ) )", 3, 0, self.input) + + raise nvae + + if alt3 == 1: + # C.g:545:23: EscapeSequence + self.mEscapeSequence() + + + + elif alt3 == 2: + # C.g:545:40: ~ ( '\\'' | '\\\\' ) + if (u'\u0000' <= self.input.LA(1) <= u'&') or (u'(' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + self.match(u'\'') + + + + + + finally: + + pass + + # $ANTLR end CHARACTER_LITERAL + + + + # $ANTLR start STRING_LITERAL + def mSTRING_LITERAL(self, ): + + try: + self.type = STRING_LITERAL + + # C.g:549:5: ( ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' ) + # C.g:549:8: ( 'L' )? '\"' ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* '\"' + # C.g:549:8: ( 'L' )? + alt4 = 2 + LA4_0 = self.input.LA(1) + + if (LA4_0 == u'L') : + alt4 = 1 + if alt4 == 1: + # C.g:549:9: 'L' + self.match(u'L') + + + + + self.match(u'"') + + # C.g:549:19: ( EscapeSequence | ~ ( '\\\\' | '\"' ) )* + while True: #loop5 + alt5 = 3 + LA5_0 = self.input.LA(1) + + if (LA5_0 == u'\\') : + alt5 = 1 + elif ((u'\u0000' <= LA5_0 <= u'!') or (u'#' <= LA5_0 <= u'[') or (u']' <= LA5_0 <= u'\uFFFE')) : + alt5 = 2 + + + if alt5 == 1: + # C.g:549:21: EscapeSequence + self.mEscapeSequence() + + + + elif alt5 == 2: + # C.g:549:38: ~ ( '\\\\' | '\"' ) + if (u'\u0000' <= self.input.LA(1) <= u'!') or (u'#' <= self.input.LA(1) <= u'[') or (u']' <= self.input.LA(1) <= u'\uFFFE'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + else: + break #loop5 + + + self.match(u'"') + + + + + + finally: + + pass + + # $ANTLR end STRING_LITERAL + + + + # $ANTLR start HEX_LITERAL + def mHEX_LITERAL(self, ): + + try: + self.type = HEX_LITERAL + + # C.g:552:13: ( '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? ) + # C.g:552:15: '0' ( 'x' | 'X' ) ( HexDigit )+ ( IntegerTypeSuffix )? + self.match(u'0') + + if self.input.LA(1) == u'X' or self.input.LA(1) == u'x': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + # C.g:552:29: ( HexDigit )+ + cnt6 = 0 + while True: #loop6 + alt6 = 2 + LA6_0 = self.input.LA(1) + + if ((u'0' <= LA6_0 <= u'9') or (u'A' <= LA6_0 <= u'F') or (u'a' <= LA6_0 <= u'f')) : + alt6 = 1 + + + if alt6 == 1: + # C.g:552:29: HexDigit + self.mHexDigit() + + + + else: + if cnt6 >= 1: + break #loop6 + + eee = EarlyExitException(6, self.input) + raise eee + + cnt6 += 1 + + + # C.g:552:39: ( IntegerTypeSuffix )? + alt7 = 2 + LA7_0 = self.input.LA(1) + + if (LA7_0 == u'L' or LA7_0 == u'U' or LA7_0 == u'l' or LA7_0 == u'u') : + alt7 = 1 + if alt7 == 1: + # C.g:552:39: IntegerTypeSuffix + self.mIntegerTypeSuffix() + + + + + + + + + finally: + + pass + + # $ANTLR end HEX_LITERAL + + + + # $ANTLR start DECIMAL_LITERAL + def mDECIMAL_LITERAL(self, ): + + try: + self.type = DECIMAL_LITERAL + + # C.g:554:17: ( ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? ) + # C.g:554:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) ( IntegerTypeSuffix )? + # C.g:554:19: ( '0' | '1' .. '9' ( '0' .. '9' )* ) + alt9 = 2 + LA9_0 = self.input.LA(1) + + if (LA9_0 == u'0') : + alt9 = 1 + elif ((u'1' <= LA9_0 <= u'9')) : + alt9 = 2 + else: + nvae = NoViableAltException("554:19: ( '0' | '1' .. '9' ( '0' .. '9' )* )", 9, 0, self.input) + + raise nvae + + if alt9 == 1: + # C.g:554:20: '0' + self.match(u'0') + + + + elif alt9 == 2: + # C.g:554:26: '1' .. '9' ( '0' .. '9' )* + self.matchRange(u'1', u'9') + + # C.g:554:35: ( '0' .. '9' )* + while True: #loop8 + alt8 = 2 + LA8_0 = self.input.LA(1) + + if ((u'0' <= LA8_0 <= u'9')) : + alt8 = 1 + + + if alt8 == 1: + # C.g:554:35: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + break #loop8 + + + + + + # C.g:554:46: ( IntegerTypeSuffix )? + alt10 = 2 + LA10_0 = self.input.LA(1) + + if (LA10_0 == u'L' or LA10_0 == u'U' or LA10_0 == u'l' or LA10_0 == u'u') : + alt10 = 1 + if alt10 == 1: + # C.g:554:46: IntegerTypeSuffix + self.mIntegerTypeSuffix() + + + + + + + + + finally: + + pass + + # $ANTLR end DECIMAL_LITERAL + + + + # $ANTLR start OCTAL_LITERAL + def mOCTAL_LITERAL(self, ): + + try: + self.type = OCTAL_LITERAL + + # C.g:556:15: ( '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? ) + # C.g:556:17: '0' ( '0' .. '7' )+ ( IntegerTypeSuffix )? + self.match(u'0') + + # C.g:556:21: ( '0' .. '7' )+ + cnt11 = 0 + while True: #loop11 + alt11 = 2 + LA11_0 = self.input.LA(1) + + if ((u'0' <= LA11_0 <= u'7')) : + alt11 = 1 + + + if alt11 == 1: + # C.g:556:22: '0' .. '7' + self.matchRange(u'0', u'7') + + + + else: + if cnt11 >= 1: + break #loop11 + + eee = EarlyExitException(11, self.input) + raise eee + + cnt11 += 1 + + + # C.g:556:33: ( IntegerTypeSuffix )? + alt12 = 2 + LA12_0 = self.input.LA(1) + + if (LA12_0 == u'L' or LA12_0 == u'U' or LA12_0 == u'l' or LA12_0 == u'u') : + alt12 = 1 + if alt12 == 1: + # C.g:556:33: IntegerTypeSuffix + self.mIntegerTypeSuffix() + + + + + + + + + finally: + + pass + + # $ANTLR end OCTAL_LITERAL + + + + # $ANTLR start HexDigit + def mHexDigit(self, ): + + try: + # C.g:559:10: ( ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) ) + # C.g:559:12: ( '0' .. '9' | 'a' .. 'f' | 'A' .. 'F' ) + if (u'0' <= self.input.LA(1) <= u'9') or (u'A' <= self.input.LA(1) <= u'F') or (u'a' <= self.input.LA(1) <= u'f'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + + finally: + + pass + + # $ANTLR end HexDigit + + + + # $ANTLR start IntegerTypeSuffix + def mIntegerTypeSuffix(self, ): + + try: + # C.g:563:2: ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) ) + alt13 = 4 + LA13_0 = self.input.LA(1) + + if (LA13_0 == u'U' or LA13_0 == u'u') : + LA13_1 = self.input.LA(2) + + if (LA13_1 == u'L' or LA13_1 == u'l') : + LA13_3 = self.input.LA(3) + + if (LA13_3 == u'L' or LA13_3 == u'l') : + alt13 = 4 + else: + alt13 = 3 + else: + alt13 = 1 + elif (LA13_0 == u'L' or LA13_0 == u'l') : + alt13 = 2 + else: + nvae = NoViableAltException("561:1: fragment IntegerTypeSuffix : ( ( 'u' | 'U' ) | ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) | ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) );", 13, 0, self.input) + + raise nvae + + if alt13 == 1: + # C.g:563:4: ( 'u' | 'U' ) + if self.input.LA(1) == u'U' or self.input.LA(1) == u'u': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + elif alt13 == 2: + # C.g:564:4: ( 'l' | 'L' ) + if self.input.LA(1) == u'L' or self.input.LA(1) == u'l': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + elif alt13 == 3: + # C.g:565:4: ( 'u' | 'U' ) ( 'l' | 'L' ) + if self.input.LA(1) == u'U' or self.input.LA(1) == u'u': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + if self.input.LA(1) == u'L' or self.input.LA(1) == u'l': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + elif alt13 == 4: + # C.g:566:4: ( 'u' | 'U' ) ( 'l' | 'L' ) ( 'l' | 'L' ) + if self.input.LA(1) == u'U' or self.input.LA(1) == u'u': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + if self.input.LA(1) == u'L' or self.input.LA(1) == u'l': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + if self.input.LA(1) == u'L' or self.input.LA(1) == u'l': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + finally: + + pass + + # $ANTLR end IntegerTypeSuffix + + + + # $ANTLR start FLOATING_POINT_LITERAL + def mFLOATING_POINT_LITERAL(self, ): + + try: + self.type = FLOATING_POINT_LITERAL + + # C.g:570:5: ( ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? | '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? | ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? | ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix ) + alt25 = 4 + alt25 = self.dfa25.predict(self.input) + if alt25 == 1: + # C.g:570:9: ( '0' .. '9' )+ '.' ( '0' .. '9' )* ( Exponent )? ( FloatTypeSuffix )? + # C.g:570:9: ( '0' .. '9' )+ + cnt14 = 0 + while True: #loop14 + alt14 = 2 + LA14_0 = self.input.LA(1) + + if ((u'0' <= LA14_0 <= u'9')) : + alt14 = 1 + + + if alt14 == 1: + # C.g:570:10: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + if cnt14 >= 1: + break #loop14 + + eee = EarlyExitException(14, self.input) + raise eee + + cnt14 += 1 + + + self.match(u'.') + + # C.g:570:25: ( '0' .. '9' )* + while True: #loop15 + alt15 = 2 + LA15_0 = self.input.LA(1) + + if ((u'0' <= LA15_0 <= u'9')) : + alt15 = 1 + + + if alt15 == 1: + # C.g:570:26: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + break #loop15 + + + # C.g:570:37: ( Exponent )? + alt16 = 2 + LA16_0 = self.input.LA(1) + + if (LA16_0 == u'E' or LA16_0 == u'e') : + alt16 = 1 + if alt16 == 1: + # C.g:570:37: Exponent + self.mExponent() + + + + + # C.g:570:47: ( FloatTypeSuffix )? + alt17 = 2 + LA17_0 = self.input.LA(1) + + if (LA17_0 == u'D' or LA17_0 == u'F' or LA17_0 == u'd' or LA17_0 == u'f') : + alt17 = 1 + if alt17 == 1: + # C.g:570:47: FloatTypeSuffix + self.mFloatTypeSuffix() + + + + + + + elif alt25 == 2: + # C.g:571:9: '.' ( '0' .. '9' )+ ( Exponent )? ( FloatTypeSuffix )? + self.match(u'.') + + # C.g:571:13: ( '0' .. '9' )+ + cnt18 = 0 + while True: #loop18 + alt18 = 2 + LA18_0 = self.input.LA(1) + + if ((u'0' <= LA18_0 <= u'9')) : + alt18 = 1 + + + if alt18 == 1: + # C.g:571:14: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + if cnt18 >= 1: + break #loop18 + + eee = EarlyExitException(18, self.input) + raise eee + + cnt18 += 1 + + + # C.g:571:25: ( Exponent )? + alt19 = 2 + LA19_0 = self.input.LA(1) + + if (LA19_0 == u'E' or LA19_0 == u'e') : + alt19 = 1 + if alt19 == 1: + # C.g:571:25: Exponent + self.mExponent() + + + + + # C.g:571:35: ( FloatTypeSuffix )? + alt20 = 2 + LA20_0 = self.input.LA(1) + + if (LA20_0 == u'D' or LA20_0 == u'F' or LA20_0 == u'd' or LA20_0 == u'f') : + alt20 = 1 + if alt20 == 1: + # C.g:571:35: FloatTypeSuffix + self.mFloatTypeSuffix() + + + + + + + elif alt25 == 3: + # C.g:572:9: ( '0' .. '9' )+ Exponent ( FloatTypeSuffix )? + # C.g:572:9: ( '0' .. '9' )+ + cnt21 = 0 + while True: #loop21 + alt21 = 2 + LA21_0 = self.input.LA(1) + + if ((u'0' <= LA21_0 <= u'9')) : + alt21 = 1 + + + if alt21 == 1: + # C.g:572:10: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + if cnt21 >= 1: + break #loop21 + + eee = EarlyExitException(21, self.input) + raise eee + + cnt21 += 1 + + + self.mExponent() + + # C.g:572:30: ( FloatTypeSuffix )? + alt22 = 2 + LA22_0 = self.input.LA(1) + + if (LA22_0 == u'D' or LA22_0 == u'F' or LA22_0 == u'd' or LA22_0 == u'f') : + alt22 = 1 + if alt22 == 1: + # C.g:572:30: FloatTypeSuffix + self.mFloatTypeSuffix() + + + + + + + elif alt25 == 4: + # C.g:573:9: ( '0' .. '9' )+ ( Exponent )? FloatTypeSuffix + # C.g:573:9: ( '0' .. '9' )+ + cnt23 = 0 + while True: #loop23 + alt23 = 2 + LA23_0 = self.input.LA(1) + + if ((u'0' <= LA23_0 <= u'9')) : + alt23 = 1 + + + if alt23 == 1: + # C.g:573:10: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + if cnt23 >= 1: + break #loop23 + + eee = EarlyExitException(23, self.input) + raise eee + + cnt23 += 1 + + + # C.g:573:21: ( Exponent )? + alt24 = 2 + LA24_0 = self.input.LA(1) + + if (LA24_0 == u'E' or LA24_0 == u'e') : + alt24 = 1 + if alt24 == 1: + # C.g:573:21: Exponent + self.mExponent() + + + + + self.mFloatTypeSuffix() + + + + + finally: + + pass + + # $ANTLR end FLOATING_POINT_LITERAL + + + + # $ANTLR start Exponent + def mExponent(self, ): + + try: + # C.g:577:10: ( ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ ) + # C.g:577:12: ( 'e' | 'E' ) ( '+' | '-' )? ( '0' .. '9' )+ + if self.input.LA(1) == u'E' or self.input.LA(1) == u'e': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + # C.g:577:22: ( '+' | '-' )? + alt26 = 2 + LA26_0 = self.input.LA(1) + + if (LA26_0 == u'+' or LA26_0 == u'-') : + alt26 = 1 + if alt26 == 1: + # C.g: + if self.input.LA(1) == u'+' or self.input.LA(1) == u'-': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + # C.g:577:33: ( '0' .. '9' )+ + cnt27 = 0 + while True: #loop27 + alt27 = 2 + LA27_0 = self.input.LA(1) + + if ((u'0' <= LA27_0 <= u'9')) : + alt27 = 1 + + + if alt27 == 1: + # C.g:577:34: '0' .. '9' + self.matchRange(u'0', u'9') + + + + else: + if cnt27 >= 1: + break #loop27 + + eee = EarlyExitException(27, self.input) + raise eee + + cnt27 += 1 + + + + + + + finally: + + pass + + # $ANTLR end Exponent + + + + # $ANTLR start FloatTypeSuffix + def mFloatTypeSuffix(self, ): + + try: + # C.g:580:17: ( ( 'f' | 'F' | 'd' | 'D' ) ) + # C.g:580:19: ( 'f' | 'F' | 'd' | 'D' ) + if self.input.LA(1) == u'D' or self.input.LA(1) == u'F' or self.input.LA(1) == u'd' or self.input.LA(1) == u'f': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + + + finally: + + pass + + # $ANTLR end FloatTypeSuffix + + + + # $ANTLR start EscapeSequence + def mEscapeSequence(self, ): + + try: + # C.g:584:5: ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape ) + alt28 = 2 + LA28_0 = self.input.LA(1) + + if (LA28_0 == u'\\') : + LA28_1 = self.input.LA(2) + + if (LA28_1 == u'"' or LA28_1 == u'\'' or LA28_1 == u'\\' or LA28_1 == u'b' or LA28_1 == u'f' or LA28_1 == u'n' or LA28_1 == u'r' or LA28_1 == u't') : + alt28 = 1 + elif ((u'0' <= LA28_1 <= u'7')) : + alt28 = 2 + else: + nvae = NoViableAltException("582:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 1, self.input) + + raise nvae + + else: + nvae = NoViableAltException("582:1: fragment EscapeSequence : ( '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) | OctalEscape );", 28, 0, self.input) + + raise nvae + + if alt28 == 1: + # C.g:584:8: '\\\\' ( 'b' | 't' | 'n' | 'f' | 'r' | '\\\"' | '\\'' | '\\\\' ) + self.match(u'\\') + + if self.input.LA(1) == u'"' or self.input.LA(1) == u'\'' or self.input.LA(1) == u'\\' or self.input.LA(1) == u'b' or self.input.LA(1) == u'f' or self.input.LA(1) == u'n' or self.input.LA(1) == u'r' or self.input.LA(1) == u't': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + elif alt28 == 2: + # C.g:585:9: OctalEscape + self.mOctalEscape() + + + + + finally: + + pass + + # $ANTLR end EscapeSequence + + + + # $ANTLR start OctalEscape + def mOctalEscape(self, ): + + try: + # C.g:590:5: ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ) + alt29 = 3 + LA29_0 = self.input.LA(1) + + if (LA29_0 == u'\\') : + LA29_1 = self.input.LA(2) + + if ((u'0' <= LA29_1 <= u'3')) : + LA29_2 = self.input.LA(3) + + if ((u'0' <= LA29_2 <= u'7')) : + LA29_4 = self.input.LA(4) + + if ((u'0' <= LA29_4 <= u'7')) : + alt29 = 1 + else: + alt29 = 2 + else: + alt29 = 3 + elif ((u'4' <= LA29_1 <= u'7')) : + LA29_3 = self.input.LA(3) + + if ((u'0' <= LA29_3 <= u'7')) : + alt29 = 2 + else: + alt29 = 3 + else: + nvae = NoViableAltException("588:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 1, self.input) + + raise nvae + + else: + nvae = NoViableAltException("588:1: fragment OctalEscape : ( '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) | '\\\\' ( '0' .. '7' ) );", 29, 0, self.input) + + raise nvae + + if alt29 == 1: + # C.g:590:9: '\\\\' ( '0' .. '3' ) ( '0' .. '7' ) ( '0' .. '7' ) + self.match(u'\\') + + # C.g:590:14: ( '0' .. '3' ) + # C.g:590:15: '0' .. '3' + self.matchRange(u'0', u'3') + + + + + # C.g:590:25: ( '0' .. '7' ) + # C.g:590:26: '0' .. '7' + self.matchRange(u'0', u'7') + + + + + # C.g:590:36: ( '0' .. '7' ) + # C.g:590:37: '0' .. '7' + self.matchRange(u'0', u'7') + + + + + + + elif alt29 == 2: + # C.g:591:9: '\\\\' ( '0' .. '7' ) ( '0' .. '7' ) + self.match(u'\\') + + # C.g:591:14: ( '0' .. '7' ) + # C.g:591:15: '0' .. '7' + self.matchRange(u'0', u'7') + + + + + # C.g:591:25: ( '0' .. '7' ) + # C.g:591:26: '0' .. '7' + self.matchRange(u'0', u'7') + + + + + + + elif alt29 == 3: + # C.g:592:9: '\\\\' ( '0' .. '7' ) + self.match(u'\\') + + # C.g:592:14: ( '0' .. '7' ) + # C.g:592:15: '0' .. '7' + self.matchRange(u'0', u'7') + + + + + + + + finally: + + pass + + # $ANTLR end OctalEscape + + + + # $ANTLR start UnicodeEscape + def mUnicodeEscape(self, ): + + try: + # C.g:597:5: ( '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit ) + # C.g:597:9: '\\\\' 'u' HexDigit HexDigit HexDigit HexDigit + self.match(u'\\') + + self.match(u'u') + + self.mHexDigit() + + self.mHexDigit() + + self.mHexDigit() + + self.mHexDigit() + + + + + + finally: + + pass + + # $ANTLR end UnicodeEscape + + + + # $ANTLR start WS + def mWS(self, ): + + try: + self.type = WS + + # C.g:600:5: ( ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) ) + # C.g:600:8: ( ' ' | '\\r' | '\\t' | '\\u000C' | '\\n' ) + if (u'\t' <= self.input.LA(1) <= u'\n') or (u'\f' <= self.input.LA(1) <= u'\r') or self.input.LA(1) == u' ': + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + #action start + self.channel=HIDDEN; + #action end + + + + + finally: + + pass + + # $ANTLR end WS + + + + # $ANTLR start BS + def mBS(self, ): + + try: + self.type = BS + + # C.g:604:5: ( ( '\\\\' ) ) + # C.g:604:7: ( '\\\\' ) + # C.g:604:7: ( '\\\\' ) + # C.g:604:8: '\\\\' + self.match(u'\\') + + + + + #action start + self.channel=HIDDEN; + #action end + + + + + finally: + + pass + + # $ANTLR end BS + + + + # $ANTLR start UnicodeVocabulary + def mUnicodeVocabulary(self, ): + + try: + self.type = UnicodeVocabulary + + # C.g:612:5: ( '\\u0003' .. '\\uFFFE' ) + # C.g:612:7: '\\u0003' .. '\\uFFFE' + self.matchRange(u'\u0003', u'\uFFFE') + + + + + + finally: + + pass + + # $ANTLR end UnicodeVocabulary + + + + # $ANTLR start COMMENT + def mCOMMENT(self, ): + + try: + self.type = COMMENT + + # C.g:615:5: ( '/*' ( options {greedy=false; } : . )* '*/' ) + # C.g:615:9: '/*' ( options {greedy=false; } : . )* '*/' + self.match("/*") + + + # C.g:615:14: ( options {greedy=false; } : . )* + while True: #loop30 + alt30 = 2 + LA30_0 = self.input.LA(1) + + if (LA30_0 == u'*') : + LA30_1 = self.input.LA(2) + + if (LA30_1 == u'/') : + alt30 = 2 + elif ((u'\u0000' <= LA30_1 <= u'.') or (u'0' <= LA30_1 <= u'\uFFFE')) : + alt30 = 1 + + + elif ((u'\u0000' <= LA30_0 <= u')') or (u'+' <= LA30_0 <= u'\uFFFE')) : + alt30 = 1 + + + if alt30 == 1: + # C.g:615:42: . + self.matchAny() + + + + else: + break #loop30 + + + self.match("*/") + + + #action start + self.channel=HIDDEN; + #action end + + + + + finally: + + pass + + # $ANTLR end COMMENT + + + + # $ANTLR start LINE_COMMENT + def mLINE_COMMENT(self, ): + + try: + self.type = LINE_COMMENT + + # C.g:620:5: ( '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' ) + # C.g:620:7: '//' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' + self.match("//") + + + # C.g:620:12: (~ ( '\\n' | '\\r' ) )* + while True: #loop31 + alt31 = 2 + LA31_0 = self.input.LA(1) + + if ((u'\u0000' <= LA31_0 <= u'\t') or (u'\u000B' <= LA31_0 <= u'\f') or (u'\u000E' <= LA31_0 <= u'\uFFFE')) : + alt31 = 1 + + + if alt31 == 1: + # C.g:620:12: ~ ( '\\n' | '\\r' ) + if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + else: + break #loop31 + + + # C.g:620:26: ( '\\r' )? + alt32 = 2 + LA32_0 = self.input.LA(1) + + if (LA32_0 == u'\r') : + alt32 = 1 + if alt32 == 1: + # C.g:620:26: '\\r' + self.match(u'\r') + + + + + self.match(u'\n') + + #action start + self.channel=HIDDEN; + #action end + + + + + finally: + + pass + + # $ANTLR end LINE_COMMENT + + + + # $ANTLR start LINE_COMMAND + def mLINE_COMMAND(self, ): + + try: + self.type = LINE_COMMAND + + # C.g:625:5: ( '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' ) + # C.g:625:7: '#' (~ ( '\\n' | '\\r' ) )* ( '\\r' )? '\\n' + self.match(u'#') + + # C.g:625:11: (~ ( '\\n' | '\\r' ) )* + while True: #loop33 + alt33 = 2 + LA33_0 = self.input.LA(1) + + if ((u'\u0000' <= LA33_0 <= u'\t') or (u'\u000B' <= LA33_0 <= u'\f') or (u'\u000E' <= LA33_0 <= u'\uFFFE')) : + alt33 = 1 + + + if alt33 == 1: + # C.g:625:11: ~ ( '\\n' | '\\r' ) + if (u'\u0000' <= self.input.LA(1) <= u'\t') or (u'\u000B' <= self.input.LA(1) <= u'\f') or (u'\u000E' <= self.input.LA(1) <= u'\uFFFE'): + self.input.consume(); + + else: + mse = MismatchedSetException(None, self.input) + self.recover(mse) + raise mse + + + + + else: + break #loop33 + + + # C.g:625:25: ( '\\r' )? + alt34 = 2 + LA34_0 = self.input.LA(1) + + if (LA34_0 == u'\r') : + alt34 = 1 + if alt34 == 1: + # C.g:625:25: '\\r' + self.match(u'\r') + + + + + self.match(u'\n') + + #action start + self.channel=HIDDEN; + #action end + + + + + finally: + + pass + + # $ANTLR end LINE_COMMAND + + + + def mTokens(self): + # C.g:1:8: ( T25 | T26 | T27 | T28 | T29 | T30 | T31 | T32 | T33 | T34 | T35 | T36 | T37 | T38 | T39 | T40 | T41 | T42 | T43 | T44 | T45 | T46 | T47 | T48 | T49 | T50 | T51 | T52 | T53 | T54 | T55 | T56 | T57 | T58 | T59 | T60 | T61 | T62 | T63 | T64 | T65 | T66 | T67 | T68 | T69 | T70 | T71 | T72 | T73 | T74 | T75 | T76 | T77 | T78 | T79 | T80 | T81 | T82 | T83 | T84 | T85 | T86 | T87 | T88 | T89 | T90 | T91 | T92 | T93 | T94 | T95 | T96 | T97 | T98 | T99 | T100 | T101 | T102 | T103 | T104 | T105 | T106 | T107 | T108 | T109 | T110 | T111 | T112 | T113 | T114 | T115 | T116 | IDENTIFIER | CHARACTER_LITERAL | STRING_LITERAL | HEX_LITERAL | DECIMAL_LITERAL | OCTAL_LITERAL | FLOATING_POINT_LITERAL | WS | BS | UnicodeVocabulary | COMMENT | LINE_COMMENT | LINE_COMMAND ) + alt35 = 105 + alt35 = self.dfa35.predict(self.input) + if alt35 == 1: + # C.g:1:10: T25 + self.mT25() + + + + elif alt35 == 2: + # C.g:1:14: T26 + self.mT26() + + + + elif alt35 == 3: + # C.g:1:18: T27 + self.mT27() + + + + elif alt35 == 4: + # C.g:1:22: T28 + self.mT28() + + + + elif alt35 == 5: + # C.g:1:26: T29 + self.mT29() + + + + elif alt35 == 6: + # C.g:1:30: T30 + self.mT30() + + + + elif alt35 == 7: + # C.g:1:34: T31 + self.mT31() + + + + elif alt35 == 8: + # C.g:1:38: T32 + self.mT32() + + + + elif alt35 == 9: + # C.g:1:42: T33 + self.mT33() + + + + elif alt35 == 10: + # C.g:1:46: T34 + self.mT34() + + + + elif alt35 == 11: + # C.g:1:50: T35 + self.mT35() + + + + elif alt35 == 12: + # C.g:1:54: T36 + self.mT36() + + + + elif alt35 == 13: + # C.g:1:58: T37 + self.mT37() + + + + elif alt35 == 14: + # C.g:1:62: T38 + self.mT38() + + + + elif alt35 == 15: + # C.g:1:66: T39 + self.mT39() + + + + elif alt35 == 16: + # C.g:1:70: T40 + self.mT40() + + + + elif alt35 == 17: + # C.g:1:74: T41 + self.mT41() + + + + elif alt35 == 18: + # C.g:1:78: T42 + self.mT42() + + + + elif alt35 == 19: + # C.g:1:82: T43 + self.mT43() + + + + elif alt35 == 20: + # C.g:1:86: T44 + self.mT44() + + + + elif alt35 == 21: + # C.g:1:90: T45 + self.mT45() + + + + elif alt35 == 22: + # C.g:1:94: T46 + self.mT46() + + + + elif alt35 == 23: + # C.g:1:98: T47 + self.mT47() + + + + elif alt35 == 24: + # C.g:1:102: T48 + self.mT48() + + + + elif alt35 == 25: + # C.g:1:106: T49 + self.mT49() + + + + elif alt35 == 26: + # C.g:1:110: T50 + self.mT50() + + + + elif alt35 == 27: + # C.g:1:114: T51 + self.mT51() + + + + elif alt35 == 28: + # C.g:1:118: T52 + self.mT52() + + + + elif alt35 == 29: + # C.g:1:122: T53 + self.mT53() + + + + elif alt35 == 30: + # C.g:1:126: T54 + self.mT54() + + + + elif alt35 == 31: + # C.g:1:130: T55 + self.mT55() + + + + elif alt35 == 32: + # C.g:1:134: T56 + self.mT56() + + + + elif alt35 == 33: + # C.g:1:138: T57 + self.mT57() + + + + elif alt35 == 34: + # C.g:1:142: T58 + self.mT58() + + + + elif alt35 == 35: + # C.g:1:146: T59 + self.mT59() + + + + elif alt35 == 36: + # C.g:1:150: T60 + self.mT60() + + + + elif alt35 == 37: + # C.g:1:154: T61 + self.mT61() + + + + elif alt35 == 38: + # C.g:1:158: T62 + self.mT62() + + + + elif alt35 == 39: + # C.g:1:162: T63 + self.mT63() + + + + elif alt35 == 40: + # C.g:1:166: T64 + self.mT64() + + + + elif alt35 == 41: + # C.g:1:170: T65 + self.mT65() + + + + elif alt35 == 42: + # C.g:1:174: T66 + self.mT66() + + + + elif alt35 == 43: + # C.g:1:178: T67 + self.mT67() + + + + elif alt35 == 44: + # C.g:1:182: T68 + self.mT68() + + + + elif alt35 == 45: + # C.g:1:186: T69 + self.mT69() + + + + elif alt35 == 46: + # C.g:1:190: T70 + self.mT70() + + + + elif alt35 == 47: + # C.g:1:194: T71 + self.mT71() + + + + elif alt35 == 48: + # C.g:1:198: T72 + self.mT72() + + + + elif alt35 == 49: + # C.g:1:202: T73 + self.mT73() + + + + elif alt35 == 50: + # C.g:1:206: T74 + self.mT74() + + + + elif alt35 == 51: + # C.g:1:210: T75 + self.mT75() + + + + elif alt35 == 52: + # C.g:1:214: T76 + self.mT76() + + + + elif alt35 == 53: + # C.g:1:218: T77 + self.mT77() + + + + elif alt35 == 54: + # C.g:1:222: T78 + self.mT78() + + + + elif alt35 == 55: + # C.g:1:226: T79 + self.mT79() + + + + elif alt35 == 56: + # C.g:1:230: T80 + self.mT80() + + + + elif alt35 == 57: + # C.g:1:234: T81 + self.mT81() + + + + elif alt35 == 58: + # C.g:1:238: T82 + self.mT82() + + + + elif alt35 == 59: + # C.g:1:242: T83 + self.mT83() + + + + elif alt35 == 60: + # C.g:1:246: T84 + self.mT84() + + + + elif alt35 == 61: + # C.g:1:250: T85 + self.mT85() + + + + elif alt35 == 62: + # C.g:1:254: T86 + self.mT86() + + + + elif alt35 == 63: + # C.g:1:258: T87 + self.mT87() + + + + elif alt35 == 64: + # C.g:1:262: T88 + self.mT88() + + + + elif alt35 == 65: + # C.g:1:266: T89 + self.mT89() + + + + elif alt35 == 66: + # C.g:1:270: T90 + self.mT90() + + + + elif alt35 == 67: + # C.g:1:274: T91 + self.mT91() + + + + elif alt35 == 68: + # C.g:1:278: T92 + self.mT92() + + + + elif alt35 == 69: + # C.g:1:282: T93 + self.mT93() + + + + elif alt35 == 70: + # C.g:1:286: T94 + self.mT94() + + + + elif alt35 == 71: + # C.g:1:290: T95 + self.mT95() + + + + elif alt35 == 72: + # C.g:1:294: T96 + self.mT96() + + + + elif alt35 == 73: + # C.g:1:298: T97 + self.mT97() + + + + elif alt35 == 74: + # C.g:1:302: T98 + self.mT98() + + + + elif alt35 == 75: + # C.g:1:306: T99 + self.mT99() + + + + elif alt35 == 76: + # C.g:1:310: T100 + self.mT100() + + + + elif alt35 == 77: + # C.g:1:315: T101 + self.mT101() + + + + elif alt35 == 78: + # C.g:1:320: T102 + self.mT102() + + + + elif alt35 == 79: + # C.g:1:325: T103 + self.mT103() + + + + elif alt35 == 80: + # C.g:1:330: T104 + self.mT104() + + + + elif alt35 == 81: + # C.g:1:335: T105 + self.mT105() + + + + elif alt35 == 82: + # C.g:1:340: T106 + self.mT106() + + + + elif alt35 == 83: + # C.g:1:345: T107 + self.mT107() + + + + elif alt35 == 84: + # C.g:1:350: T108 + self.mT108() + + + + elif alt35 == 85: + # C.g:1:355: T109 + self.mT109() + + + + elif alt35 == 86: + # C.g:1:360: T110 + self.mT110() + + + + elif alt35 == 87: + # C.g:1:365: T111 + self.mT111() + + + + elif alt35 == 88: + # C.g:1:370: T112 + self.mT112() + + + + elif alt35 == 89: + # C.g:1:375: T113 + self.mT113() + + + + elif alt35 == 90: + # C.g:1:380: T114 + self.mT114() + + + + elif alt35 == 91: + # C.g:1:385: T115 + self.mT115() + + + + elif alt35 == 92: + # C.g:1:390: T116 + self.mT116() + + + + elif alt35 == 93: + # C.g:1:395: IDENTIFIER + self.mIDENTIFIER() + + + + elif alt35 == 94: + # C.g:1:406: CHARACTER_LITERAL + self.mCHARACTER_LITERAL() + + + + elif alt35 == 95: + # C.g:1:424: STRING_LITERAL + self.mSTRING_LITERAL() + + + + elif alt35 == 96: + # C.g:1:439: HEX_LITERAL + self.mHEX_LITERAL() + + + + elif alt35 == 97: + # C.g:1:451: DECIMAL_LITERAL + self.mDECIMAL_LITERAL() + + + + elif alt35 == 98: + # C.g:1:467: OCTAL_LITERAL + self.mOCTAL_LITERAL() + + + + elif alt35 == 99: + # C.g:1:481: FLOATING_POINT_LITERAL + self.mFLOATING_POINT_LITERAL() + + + + elif alt35 == 100: + # C.g:1:504: WS + self.mWS() + + + + elif alt35 == 101: + # C.g:1:507: BS + self.mBS() + + + + elif alt35 == 102: + # C.g:1:510: UnicodeVocabulary + self.mUnicodeVocabulary() + + + + elif alt35 == 103: + # C.g:1:528: COMMENT + self.mCOMMENT() + + + + elif alt35 == 104: + # C.g:1:536: LINE_COMMENT + self.mLINE_COMMENT() + + + + elif alt35 == 105: + # C.g:1:549: LINE_COMMAND + self.mLINE_COMMAND() + + + + + + + + + # lookup tables for DFA #25 + + DFA25_eot = DFA.unpack( + u"\7\uffff\1\10\2\uffff" + ) + + DFA25_eof = DFA.unpack( + u"\12\uffff" + ) + + DFA25_min = DFA.unpack( + u"\2\56\1\uffff\1\53\2\uffff\2\60\2\uffff" + ) + + DFA25_max = DFA.unpack( + u"\1\71\1\146\1\uffff\1\71\2\uffff\1\71\1\146\2\uffff" + ) + + DFA25_accept = DFA.unpack( + u"\2\uffff\1\2\1\uffff\1\4\1\1\2\uffff\2\3" + ) + + DFA25_special = DFA.unpack( + u"\12\uffff" + ) + + + DFA25_transition = [ + DFA.unpack(u"\1\2\1\uffff\12\1"), + DFA.unpack(u"\1\5\1\uffff\12\1\12\uffff\1\4\1\3\1\4\35\uffff\1\4" + u"\1\3\1\4"), + DFA.unpack(u""), + DFA.unpack(u"\1\6\1\uffff\1\6\2\uffff\12\7"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\12\7"), + DFA.unpack(u"\12\7\12\uffff\1\11\1\uffff\1\11\35\uffff\1\11\1\uffff" + u"\1\11"), + DFA.unpack(u""), + DFA.unpack(u"") + ] + + # class definition for DFA #25 + + DFA25 = DFA + # lookup tables for DFA #35 + + DFA35_eot = DFA.unpack( + u"\2\uffff\1\75\1\uffff\1\100\14\75\3\uffff\7\75\4\uffff\1\147\1" + u"\151\1\155\1\161\1\165\1\167\1\172\1\uffff\1\175\1\u0080\1\u0083" + u"\1\u0085\1\u0088\1\uffff\5\75\1\uffff\2\72\2\u0092\2\uffff\1\72" + u"\2\uffff\1\75\4\uffff\16\75\1\u00ab\4\75\1\u00b1\2\75\3\uffff\1" + u"\u00b5\7\75\35\uffff\1\u00be\1\uffff\1\u00c0\10\uffff\5\75\4\uffff" + u"\1\u00c6\1\u0092\3\uffff\23\75\1\uffff\1\u00db\1\75\1\u00dd\2\75" + u"\1\uffff\3\75\1\uffff\1\u00e3\6\75\4\uffff\5\75\1\uffff\1\75\1" + u"\u00f1\1\u00f2\7\75\1\u00fa\3\75\1\u00fe\3\75\1\u0102\1\u0103\1" + u"\uffff\1\u0104\1\uffff\5\75\1\uffff\10\75\1\u0113\1\75\1\u0115" + u"\2\75\2\uffff\6\75\1\u011e\1\uffff\3\75\1\uffff\2\75\1\u0124\3" + u"\uffff\1\u0125\3\75\1\u0129\1\75\1\u012b\6\75\1\u0133\1\uffff\1" + u"\u0134\1\uffff\1\u0135\1\75\1\u0137\1\u0138\1\u0139\1\u013a\1\u013b" + u"\1\u013c\1\uffff\1\75\1\u013e\1\u013f\2\75\2\uffff\1\u0142\2\75" + u"\1\uffff\1\75\1\uffff\5\75\1\u014b\1\75\3\uffff\1\u014d\6\uffff" + u"\1\75\2\uffff\2\75\1\uffff\1\u0151\7\75\1\uffff\1\u0159\1\uffff" + u"\1\u015a\1\u015b\1\u015c\1\uffff\1\u015d\1\u015e\1\75\1\u0160\3" + u"\75\6\uffff\1\u0164\1\uffff\3\75\1\uffff\20\75\1\u0178\2\75\1\uffff" + u"\4\75\1\u017f\1\75\1\uffff\11\75\1\u018a\1\uffff" + ) + + DFA35_eof = DFA.unpack( + u"\u018b\uffff" + ) + + DFA35_min = DFA.unpack( + u"\1\3\1\uffff\1\171\1\uffff\1\75\1\154\1\150\1\165\1\145\1\124\1" + u"\157\1\141\1\146\1\157\1\154\1\145\1\156\3\uffff\1\116\1\120\1" + u"\117\1\116\1\117\1\114\1\106\4\uffff\1\75\1\56\1\53\1\55\1\52\1" + u"\75\1\46\1\uffff\1\75\1\74\3\75\1\uffff\1\137\1\150\1\157\1\162" + u"\1\42\1\uffff\2\0\2\56\2\uffff\1\0\2\uffff\1\160\4\uffff\1\165" + u"\1\163\1\164\1\141\1\151\1\147\1\157\1\164\1\147\1\101\1\151\1" + u"\156\1\163\1\141\1\44\1\164\1\156\1\162\1\157\1\44\1\146\1\151" + u"\3\uffff\1\44\2\124\1\116\1\101\1\114\1\117\1\111\35\uffff\1\75" + u"\1\uffff\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\4\uffff\2" + u"\56\3\uffff\1\145\1\155\2\145\1\165\2\164\1\156\1\145\1\162\1\157" + u"\1\151\1\165\1\124\1\144\1\141\1\163\1\145\1\162\1\uffff\1\44\1" + u"\147\1\44\1\141\1\142\1\uffff\1\141\1\151\1\157\1\uffff\1\44\1" + u"\111\1\123\1\114\1\101\1\102\1\101\4\uffff\1\163\1\155\1\154\1" + u"\157\1\141\1\uffff\1\144\2\44\1\162\1\143\1\151\1\143\1\145\1\157" + u"\1\164\1\44\1\163\1\162\1\111\1\44\1\164\1\151\1\164\2\44\1\uffff" + u"\1\44\1\uffff\1\164\1\154\1\165\1\147\1\156\1\uffff\1\117\1\124" + u"\1\111\1\124\1\101\1\102\1\120\1\155\1\44\1\145\1\44\1\153\1\145" + u"\2\uffff\1\156\1\164\1\143\1\150\1\144\1\146\1\44\1\uffff\1\164" + u"\1\156\1\103\1\uffff\1\151\1\156\1\44\3\uffff\1\44\1\145\1\154" + u"\1\156\1\44\1\116\1\44\1\107\1\111\1\114\1\117\1\125\1\111\1\44" + u"\1\uffff\1\44\1\uffff\1\44\1\146\6\44\1\uffff\1\145\2\44\1\154" + u"\1\165\2\uffff\1\44\1\164\1\145\1\uffff\1\101\1\uffff\1\116\1\114" + u"\1\137\1\117\1\116\1\44\1\137\3\uffff\1\44\6\uffff\1\162\2\uffff" + u"\2\145\1\uffff\1\44\1\144\1\114\2\105\1\122\2\124\1\uffff\1\44" + u"\1\uffff\3\44\1\uffff\2\44\1\104\1\44\1\105\1\123\1\111\6\uffff" + u"\1\44\1\uffff\1\115\1\105\1\115\1\uffff\1\117\1\122\1\105\2\126" + u"\1\123\1\105\1\111\1\105\1\137\1\103\1\122\1\111\1\105\1\126\1" + u"\106\1\44\1\111\1\137\1\uffff\1\103\1\125\1\105\1\116\1\44\1\122" + u"\1\uffff\1\105\1\106\1\105\1\122\1\105\1\116\1\103\1\105\1\104" + u"\1\44\1\uffff" + ) + + DFA35_max = DFA.unpack( + u"\1\ufffe\1\uffff\1\171\1\uffff\1\75\1\170\1\167\1\165\1\145\1\124" + u"\2\157\1\156\3\157\1\156\3\uffff\1\116\1\125\1\117\1\116\1\117" + u"\1\114\1\106\4\uffff\1\75\1\71\1\75\1\76\3\75\1\uffff\2\75\1\76" + u"\1\75\1\174\1\uffff\1\141\1\150\1\157\1\162\1\47\1\uffff\2\ufffe" + u"\1\170\1\146\2\uffff\1\ufffe\2\uffff\1\160\4\uffff\1\165\1\163" + u"\1\164\1\162\1\151\1\172\1\157\2\164\1\101\1\154\1\156\1\163\1" + u"\141\1\172\1\164\1\156\1\162\1\157\1\172\1\146\1\163\3\uffff\1" + u"\172\2\124\1\116\1\101\1\114\1\117\1\111\35\uffff\1\75\1\uffff" + u"\1\75\10\uffff\1\141\1\163\1\151\1\164\1\145\4\uffff\2\146\3\uffff" + u"\1\145\1\155\2\145\1\165\2\164\1\156\1\145\1\162\1\157\1\151\1" + u"\165\1\124\1\144\1\141\1\164\1\145\1\162\1\uffff\1\172\1\147\1" + u"\172\1\141\1\142\1\uffff\1\141\1\151\1\157\1\uffff\1\172\1\111" + u"\1\123\1\114\1\101\1\102\1\137\4\uffff\1\163\1\155\1\154\1\157" + u"\1\141\1\uffff\1\144\2\172\1\162\1\143\1\151\1\143\1\145\1\157" + u"\1\164\1\172\1\163\1\162\1\111\1\172\1\164\1\151\1\164\2\172\1" + u"\uffff\1\172\1\uffff\1\164\1\154\1\165\1\147\1\156\1\uffff\1\117" + u"\1\124\1\111\1\124\1\101\1\122\1\120\1\155\1\172\1\145\1\172\1" + u"\153\1\145\2\uffff\1\156\1\164\1\143\1\150\1\144\1\146\1\172\1" + u"\uffff\1\164\1\156\1\103\1\uffff\1\151\1\156\1\172\3\uffff\1\172" + u"\1\145\1\154\1\156\1\172\1\116\1\172\1\107\1\111\1\114\1\117\1" + u"\125\1\111\1\172\1\uffff\1\172\1\uffff\1\172\1\146\6\172\1\uffff" + u"\1\145\2\172\1\154\1\165\2\uffff\1\172\1\164\1\145\1\uffff\1\101" + u"\1\uffff\1\116\1\114\1\137\1\117\1\116\1\172\1\137\3\uffff\1\172" + u"\6\uffff\1\162\2\uffff\2\145\1\uffff\1\172\1\144\1\114\2\105\1" + u"\122\2\124\1\uffff\1\172\1\uffff\3\172\1\uffff\2\172\1\104\1\172" + u"\1\105\1\123\1\111\6\uffff\1\172\1\uffff\1\115\1\105\1\115\1\uffff" + u"\1\117\1\122\1\105\2\126\1\123\1\105\1\111\1\105\1\137\1\103\1" + u"\122\1\111\1\105\1\126\1\106\1\172\1\111\1\137\1\uffff\1\103\1" + u"\125\1\105\1\116\1\172\1\122\1\uffff\1\105\1\106\1\105\1\122\1" + u"\105\1\116\1\103\1\105\1\104\1\172\1\uffff" + ) + + DFA35_accept = DFA.unpack( + u"\1\uffff\1\1\1\uffff\1\3\15\uffff\1\23\1\24\1\27\7\uffff\1\45\1" + u"\46\1\47\1\50\7\uffff\1\65\5\uffff\1\101\5\uffff\1\135\4\uffff" + u"\1\144\1\145\1\uffff\1\146\1\1\1\uffff\1\135\1\3\1\106\1\4\26\uffff" + u"\1\23\1\24\1\27\10\uffff\1\45\1\46\1\47\1\50\1\67\1\51\1\52\1\62" + u"\1\143\1\57\1\72\1\53\1\63\1\73\1\60\1\54\1\70\1\150\1\147\1\55" + u"\1\71\1\56\1\76\1\103\1\64\1\65\1\107\1\66\1\112\1\uffff\1\110" + u"\1\uffff\1\113\1\111\1\77\1\105\1\100\1\102\1\104\1\101\5\uffff" + u"\1\136\1\137\1\140\1\141\2\uffff\1\144\1\145\1\151\23\uffff\1\123" + u"\5\uffff\1\127\3\uffff\1\33\7\uffff\1\74\1\114\1\75\1\115\5\uffff" + u"\1\142\24\uffff\1\15\1\uffff\1\130\5\uffff\1\34\15\uffff\1\30\1" + u"\124\7\uffff\1\7\3\uffff\1\12\3\uffff\1\121\1\13\1\16\16\uffff" + u"\1\117\1\uffff\1\131\10\uffff\1\14\5\uffff\1\31\1\17\3\uffff\1" + u"\26\1\uffff\1\36\7\uffff\1\120\1\126\1\133\1\uffff\1\5\1\25\1\6" + u"\1\125\1\21\1\61\1\uffff\1\134\1\11\2\uffff\1\20\10\uffff\1\42" + u"\1\uffff\1\2\3\uffff\1\122\7\uffff\1\116\1\10\1\32\1\132\1\22\1" + u"\35\1\uffff\1\40\3\uffff\1\37\23\uffff\1\43\6\uffff\1\44\12\uffff" + u"\1\41" + ) + + DFA35_special = DFA.unpack( + u"\u018b\uffff" + ) + + + DFA35_transition = [ + DFA.unpack(u"\6\72\2\67\1\72\2\67\22\72\1\67\1\47\1\64\1\71\1\62" + u"\1\44\1\45\1\63\1\33\1\34\1\37\1\41\1\3\1\42\1\40\1\43\1\65\11" + u"\66\1\23\1\1\1\50\1\4\1\51\1\54\1\72\2\62\1\26\1\62\1\32\1\62\1" + u"\31\1\62\1\24\2\62\1\61\2\62\1\25\3\62\1\11\1\62\1\27\1\30\4\62" + u"\1\35\1\70\1\36\1\52\1\55\1\72\1\7\1\60\1\13\1\17\1\5\1\16\1\57" + u"\1\62\1\14\2\62\1\15\5\62\1\10\1\6\1\2\1\20\1\12\1\56\3\62\1\21" + u"\1\53\1\22\1\46\uff80\72"), + DFA.unpack(u""), + DFA.unpack(u"\1\74"), + DFA.unpack(u""), + DFA.unpack(u"\1\77"), + DFA.unpack(u"\1\102\1\uffff\1\101\11\uffff\1\103"), + DFA.unpack(u"\1\107\1\106\12\uffff\1\104\2\uffff\1\105"), + DFA.unpack(u"\1\110"), + DFA.unpack(u"\1\111"), + DFA.unpack(u"\1\112"), + DFA.unpack(u"\1\113"), + DFA.unpack(u"\1\115\6\uffff\1\116\6\uffff\1\114"), + DFA.unpack(u"\1\117\7\uffff\1\120"), + DFA.unpack(u"\1\121"), + DFA.unpack(u"\1\123\2\uffff\1\122"), + DFA.unpack(u"\1\125\11\uffff\1\124"), + DFA.unpack(u"\1\126"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\132"), + DFA.unpack(u"\1\134\4\uffff\1\133"), + DFA.unpack(u"\1\135"), + DFA.unpack(u"\1\136"), + DFA.unpack(u"\1\137"), + DFA.unpack(u"\1\140"), + DFA.unpack(u"\1\141"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\146"), + DFA.unpack(u"\1\150\1\uffff\12\152"), + DFA.unpack(u"\1\153\21\uffff\1\154"), + DFA.unpack(u"\1\160\17\uffff\1\157\1\156"), + DFA.unpack(u"\1\164\4\uffff\1\163\15\uffff\1\162"), + DFA.unpack(u"\1\166"), + DFA.unpack(u"\1\171\26\uffff\1\170"), + DFA.unpack(u""), + DFA.unpack(u"\1\174"), + DFA.unpack(u"\1\177\1\176"), + DFA.unpack(u"\1\u0082\1\u0081"), + DFA.unpack(u"\1\u0084"), + DFA.unpack(u"\1\u0086\76\uffff\1\u0087"), + DFA.unpack(u""), + DFA.unpack(u"\1\u008a\1\uffff\1\u008b"), + DFA.unpack(u"\1\u008c"), + DFA.unpack(u"\1\u008d"), + DFA.unpack(u"\1\u008e"), + DFA.unpack(u"\1\u0090\4\uffff\1\u008f"), + DFA.unpack(u""), + DFA.unpack(u"\47\u008f\1\uffff\uffd7\u008f"), + DFA.unpack(u"\uffff\u0090"), + DFA.unpack(u"\1\152\1\uffff\10\u0093\2\152\12\uffff\3\152\21\uffff" + u"\1\u0091\13\uffff\3\152\21\uffff\1\u0091"), + DFA.unpack(u"\1\152\1\uffff\12\u0094\12\uffff\3\152\35\uffff\3\152"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\uffff\u0097"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u0098"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u0099"), + DFA.unpack(u"\1\u009a"), + DFA.unpack(u"\1\u009b"), + DFA.unpack(u"\1\u009d\20\uffff\1\u009c"), + DFA.unpack(u"\1\u009e"), + DFA.unpack(u"\1\u009f\22\uffff\1\u00a0"), + DFA.unpack(u"\1\u00a1"), + DFA.unpack(u"\1\u00a2"), + DFA.unpack(u"\1\u00a3\14\uffff\1\u00a4"), + DFA.unpack(u"\1\u00a5"), + DFA.unpack(u"\1\u00a6\2\uffff\1\u00a7"), + DFA.unpack(u"\1\u00a8"), + DFA.unpack(u"\1\u00a9"), + DFA.unpack(u"\1\u00aa"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00ac"), + DFA.unpack(u"\1\u00ad"), + DFA.unpack(u"\1\u00ae"), + DFA.unpack(u"\1\u00af"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\24\75\1\u00b0\5\75"), + DFA.unpack(u"\1\u00b2"), + DFA.unpack(u"\1\u00b4\11\uffff\1\u00b3"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00b6"), + DFA.unpack(u"\1\u00b7"), + DFA.unpack(u"\1\u00b8"), + DFA.unpack(u"\1\u00b9"), + DFA.unpack(u"\1\u00ba"), + DFA.unpack(u"\1\u00bb"), + DFA.unpack(u"\1\u00bc"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u00bd"), + DFA.unpack(u""), + DFA.unpack(u"\1\u00bf"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u00c1"), + DFA.unpack(u"\1\u00c2"), + DFA.unpack(u"\1\u00c3"), + DFA.unpack(u"\1\u00c4"), + DFA.unpack(u"\1\u00c5"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\152\1\uffff\10\u0093\2\152\12\uffff\3\152\35\uffff" + u"\3\152"), + DFA.unpack(u"\1\152\1\uffff\12\u0094\12\uffff\3\152\35\uffff\3\152"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u00c7"), + DFA.unpack(u"\1\u00c8"), + DFA.unpack(u"\1\u00c9"), + DFA.unpack(u"\1\u00ca"), + DFA.unpack(u"\1\u00cb"), + DFA.unpack(u"\1\u00cc"), + DFA.unpack(u"\1\u00cd"), + DFA.unpack(u"\1\u00ce"), + DFA.unpack(u"\1\u00cf"), + DFA.unpack(u"\1\u00d0"), + DFA.unpack(u"\1\u00d1"), + DFA.unpack(u"\1\u00d2"), + DFA.unpack(u"\1\u00d3"), + DFA.unpack(u"\1\u00d4"), + DFA.unpack(u"\1\u00d5"), + DFA.unpack(u"\1\u00d6"), + DFA.unpack(u"\1\u00d8\1\u00d7"), + DFA.unpack(u"\1\u00d9"), + DFA.unpack(u"\1\u00da"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00dc"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00de"), + DFA.unpack(u"\1\u00df"), + DFA.unpack(u""), + DFA.unpack(u"\1\u00e0"), + DFA.unpack(u"\1\u00e1"), + DFA.unpack(u"\1\u00e2"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00e4"), + DFA.unpack(u"\1\u00e5"), + DFA.unpack(u"\1\u00e6"), + DFA.unpack(u"\1\u00e7"), + DFA.unpack(u"\1\u00e8"), + DFA.unpack(u"\1\u00ea\35\uffff\1\u00e9"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u00eb"), + DFA.unpack(u"\1\u00ec"), + DFA.unpack(u"\1\u00ed"), + DFA.unpack(u"\1\u00ee"), + DFA.unpack(u"\1\u00ef"), + DFA.unpack(u""), + DFA.unpack(u"\1\u00f0"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00f3"), + DFA.unpack(u"\1\u00f4"), + DFA.unpack(u"\1\u00f5"), + DFA.unpack(u"\1\u00f6"), + DFA.unpack(u"\1\u00f7"), + DFA.unpack(u"\1\u00f8"), + DFA.unpack(u"\1\u00f9"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00fb"), + DFA.unpack(u"\1\u00fc"), + DFA.unpack(u"\1\u00fd"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u00ff"), + DFA.unpack(u"\1\u0100"), + DFA.unpack(u"\1\u0101"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0105"), + DFA.unpack(u"\1\u0106"), + DFA.unpack(u"\1\u0107"), + DFA.unpack(u"\1\u0108"), + DFA.unpack(u"\1\u0109"), + DFA.unpack(u""), + DFA.unpack(u"\1\u010a"), + DFA.unpack(u"\1\u010b"), + DFA.unpack(u"\1\u010c"), + DFA.unpack(u"\1\u010d"), + DFA.unpack(u"\1\u010e"), + DFA.unpack(u"\1\u010f\17\uffff\1\u0110"), + DFA.unpack(u"\1\u0111"), + DFA.unpack(u"\1\u0112"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0114"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0116"), + DFA.unpack(u"\1\u0117"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u0118"), + DFA.unpack(u"\1\u0119"), + DFA.unpack(u"\1\u011a"), + DFA.unpack(u"\1\u011b"), + DFA.unpack(u"\1\u011c"), + DFA.unpack(u"\1\u011d"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\u011f"), + DFA.unpack(u"\1\u0120"), + DFA.unpack(u"\1\u0121"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0122"), + DFA.unpack(u"\1\u0123"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0126"), + DFA.unpack(u"\1\u0127"), + DFA.unpack(u"\1\u0128"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u012a"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u012c"), + DFA.unpack(u"\1\u012d"), + DFA.unpack(u"\1\u012e"), + DFA.unpack(u"\1\u012f"), + DFA.unpack(u"\1\u0130"), + DFA.unpack(u"\1\u0131"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\u0132\1" + u"\uffff\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0136"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\u013d"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0140"), + DFA.unpack(u"\1\u0141"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0143"), + DFA.unpack(u"\1\u0144"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0145"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0146"), + DFA.unpack(u"\1\u0147"), + DFA.unpack(u"\1\u0148"), + DFA.unpack(u"\1\u0149"), + DFA.unpack(u"\1\u014a"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u014c"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u014e"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\u014f"), + DFA.unpack(u"\1\u0150"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0152"), + DFA.unpack(u"\1\u0153"), + DFA.unpack(u"\1\u0154"), + DFA.unpack(u"\1\u0155"), + DFA.unpack(u"\1\u0156"), + DFA.unpack(u"\1\u0157"), + DFA.unpack(u"\1\u0158"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u015f"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0161"), + DFA.unpack(u"\1\u0162"), + DFA.unpack(u"\1\u0163"), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u""), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0165"), + DFA.unpack(u"\1\u0166"), + DFA.unpack(u"\1\u0167"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0168"), + DFA.unpack(u"\1\u0169"), + DFA.unpack(u"\1\u016a"), + DFA.unpack(u"\1\u016b"), + DFA.unpack(u"\1\u016c"), + DFA.unpack(u"\1\u016d"), + DFA.unpack(u"\1\u016e"), + DFA.unpack(u"\1\u016f"), + DFA.unpack(u"\1\u0170"), + DFA.unpack(u"\1\u0171"), + DFA.unpack(u"\1\u0172"), + DFA.unpack(u"\1\u0173"), + DFA.unpack(u"\1\u0174"), + DFA.unpack(u"\1\u0175"), + DFA.unpack(u"\1\u0176"), + DFA.unpack(u"\1\u0177"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0179"), + DFA.unpack(u"\1\u017a"), + DFA.unpack(u""), + DFA.unpack(u"\1\u017b"), + DFA.unpack(u"\1\u017c"), + DFA.unpack(u"\1\u017d"), + DFA.unpack(u"\1\u017e"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"\1\u0180"), + DFA.unpack(u""), + DFA.unpack(u"\1\u0181"), + DFA.unpack(u"\1\u0182"), + DFA.unpack(u"\1\u0183"), + DFA.unpack(u"\1\u0184"), + DFA.unpack(u"\1\u0185"), + DFA.unpack(u"\1\u0186"), + DFA.unpack(u"\1\u0187"), + DFA.unpack(u"\1\u0188"), + DFA.unpack(u"\1\u0189"), + DFA.unpack(u"\1\75\13\uffff\12\75\7\uffff\32\75\4\uffff\1\75\1\uffff" + u"\32\75"), + DFA.unpack(u"") + ] + + # class definition for DFA #35 + + DFA35 = DFA + + diff --git a/BaseTools/Source/Python/Ecc/CParser.py b/BaseTools/Source/Python/Ecc/CParser.py new file mode 100644 index 0000000000..194a6aa451 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/CParser.py @@ -0,0 +1,18825 @@ +# $ANTLR 3.0.1 C.g 2009-02-16 16:02:50 + +from antlr3 import * +from antlr3.compat import set, frozenset +
+import CodeFragment
+import FileProfile
+ + + +# for convenience in actions +HIDDEN = BaseRecognizer.HIDDEN + +# token types +CHARACTER_LITERAL=8 +LETTER=11 +Exponent=15 +DECIMAL_LITERAL=7 +IntegerTypeSuffix=14 +UnicodeVocabulary=21 +HexDigit=13 +BS=20 +WS=19 +LINE_COMMAND=24 +COMMENT=22 +LINE_COMMENT=23 +OCTAL_LITERAL=6 +HEX_LITERAL=5 +FLOATING_POINT_LITERAL=10 +UnicodeEscape=18 +EscapeSequence=12 +EOF=-1 +STRING_LITERAL=9 +OctalEscape=17 +IDENTIFIER=4 +FloatTypeSuffix=16 + +# token names +tokenNames = [ + "<invalid>", "<EOR>", "<DOWN>", "<UP>", + "IDENTIFIER", "HEX_LITERAL", "OCTAL_LITERAL", "DECIMAL_LITERAL", "CHARACTER_LITERAL", + "STRING_LITERAL", "FLOATING_POINT_LITERAL", "LETTER", "EscapeSequence", + "HexDigit", "IntegerTypeSuffix", "Exponent", "FloatTypeSuffix", "OctalEscape", + "UnicodeEscape", "WS", "BS", "UnicodeVocabulary", "COMMENT", "LINE_COMMENT", + "LINE_COMMAND", "';'", "'typedef'", "','", "'='", "'extern'", "'static'", + "'auto'", "'register'", "'STATIC'", "'void'", "'char'", "'short'", "'int'", + "'long'", "'float'", "'double'", "'signed'", "'unsigned'", "'{'", "'}'", + "'struct'", "'union'", "':'", "'enum'", "'const'", "'volatile'", "'IN'", + "'OUT'", "'OPTIONAL'", "'CONST'", "'UNALIGNED'", "'VOLATILE'", "'GLOBAL_REMOVE_IF_UNREFERENCED'", + "'EFIAPI'", "'EFI_BOOTSERVICE'", "'EFI_RUNTIMESERVICE'", "'('", "')'", + "'['", "']'", "'*'", "'...'", "'+'", "'-'", "'/'", "'%'", "'++'", "'--'", + "'sizeof'", "'.'", "'->'", "'&'", "'~'", "'!'", "'*='", "'/='", "'%='", + "'+='", "'-='", "'<<='", "'>>='", "'&='", "'^='", "'|='", "'?'", "'||'", + "'&&'", "'|'", "'^'", "'=='", "'!='", "'<'", "'>'", "'<='", "'>='", + "'<<'", "'>>'", "'__asm__'", "'_asm'", "'__asm'", "'case'", "'default'", + "'if'", "'else'", "'switch'", "'while'", "'do'", "'for'", "'goto'", + "'continue'", "'break'", "'return'" +] + + +class function_definition_scope(object): + def __init__(self): + self.ModifierText = None + self.DeclText = None + self.LBLine = None + self.LBOffset = None + self.DeclLine = None + self.DeclOffset = None +class postfix_expression_scope(object): + def __init__(self): + self.FuncCallText = None + + +class CParser(Parser): + grammarFileName = "C.g" + tokenNames = tokenNames + + def __init__(self, input): + Parser.__init__(self, input) + self.ruleMemo = {} + + self.function_definition_stack = [] + self.postfix_expression_stack = [] + + + + + + +
+
+ def printTokenInfo(self, line, offset, tokenText):
+ print str(line)+ ',' + str(offset) + ':' + str(tokenText)
+
+ def StorePredicateExpression(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ PredExp = CodeFragment.PredicateExpression(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.PredicateExpressionList.append(PredExp)
+
+ def StoreEnumerationDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ EnumDef = CodeFragment.EnumerationDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.EnumerationDefinitionList.append(EnumDef)
+
+ def StoreStructUnionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, Text):
+ SUDef = CodeFragment.StructUnionDefinition(Text, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.StructUnionDefinitionList.append(SUDef)
+
+ def StoreTypedefDefinition(self, StartLine, StartOffset, EndLine, EndOffset, FromText, ToText):
+ Tdef = CodeFragment.TypedefDefinition(FromText, ToText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.TypedefDefinitionList.append(Tdef)
+
+ def StoreFunctionDefinition(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText, LeftBraceLine, LeftBraceOffset, DeclLine, DeclOffset):
+ FuncDef = CodeFragment.FunctionDefinition(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset), (LeftBraceLine, LeftBraceOffset), (DeclLine, DeclOffset))
+ FileProfile.FunctionDefinitionList.append(FuncDef)
+
+ def StoreVariableDeclaration(self, StartLine, StartOffset, EndLine, EndOffset, ModifierText, DeclText):
+ VarDecl = CodeFragment.VariableDeclaration(ModifierText, DeclText, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.VariableDeclarationList.append(VarDecl)
+
+ def StoreFunctionCalling(self, StartLine, StartOffset, EndLine, EndOffset, FuncName, ParamList):
+ FuncCall = CodeFragment.FunctionCalling(FuncName, ParamList, (StartLine, StartOffset), (EndLine, EndOffset))
+ FileProfile.FunctionCallingList.append(FuncCall)
+
+ + + + # $ANTLR start translation_unit + # C.g:50:1: translation_unit : ( external_declaration )* ; + def translation_unit(self, ): + + translation_unit_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 1): + return + + # C.g:51:2: ( ( external_declaration )* ) + # C.g:51:4: ( external_declaration )* + # C.g:51:4: ( external_declaration )* + while True: #loop1 + alt1 = 2 + LA1_0 = self.input.LA(1) + + if (LA1_0 == IDENTIFIER or LA1_0 == 26 or (29 <= LA1_0 <= 42) or (45 <= LA1_0 <= 46) or (48 <= LA1_0 <= 61) or LA1_0 == 65) : + alt1 = 1 + + + if alt1 == 1: + # C.g:0:0: external_declaration + self.following.append(self.FOLLOW_external_declaration_in_translation_unit64) + self.external_declaration() + self.following.pop() + if self.failed: + return + + + else: + break #loop1 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 1, translation_unit_StartIndex) + + pass + + return + + # $ANTLR end translation_unit + + + # $ANTLR start external_declaration + # C.g:62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? ); + def external_declaration(self, ): + + external_declaration_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 2): + return + + # C.g:67:2: ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? ) + alt3 = 3 + LA3_0 = self.input.LA(1) + + if ((29 <= LA3_0 <= 33)) : + LA3_1 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 1, self.input) + + raise nvae + + elif (LA3_0 == 34) : + LA3_2 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 2, self.input) + + raise nvae + + elif (LA3_0 == 35) : + LA3_3 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 3, self.input) + + raise nvae + + elif (LA3_0 == 36) : + LA3_4 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 4, self.input) + + raise nvae + + elif (LA3_0 == 37) : + LA3_5 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 5, self.input) + + raise nvae + + elif (LA3_0 == 38) : + LA3_6 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 6, self.input) + + raise nvae + + elif (LA3_0 == 39) : + LA3_7 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 7, self.input) + + raise nvae + + elif (LA3_0 == 40) : + LA3_8 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 8, self.input) + + raise nvae + + elif (LA3_0 == 41) : + LA3_9 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 9, self.input) + + raise nvae + + elif (LA3_0 == 42) : + LA3_10 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 10, self.input) + + raise nvae + + elif ((45 <= LA3_0 <= 46)) : + LA3_11 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 11, self.input) + + raise nvae + + elif (LA3_0 == 48) : + LA3_12 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 12, self.input) + + raise nvae + + elif (LA3_0 == IDENTIFIER) : + LA3_13 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + elif (True) : + alt3 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 13, self.input) + + raise nvae + + elif (LA3_0 == 58) : + LA3_14 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 14, self.input) + + raise nvae + + elif (LA3_0 == 65) and (self.synpred4()): + alt3 = 1 + elif (LA3_0 == 59) : + LA3_16 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 16, self.input) + + raise nvae + + elif (LA3_0 == 60) : + LA3_17 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 17, self.input) + + raise nvae + + elif ((49 <= LA3_0 <= 57)) : + LA3_18 = self.input.LA(2) + + if (self.synpred4()) : + alt3 = 1 + elif (self.synpred5()) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 18, self.input) + + raise nvae + + elif (LA3_0 == 61) and (self.synpred4()): + alt3 = 1 + elif (LA3_0 == 26) : + alt3 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("62:1: external_declaration options {k=1; } : ( ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition | declaration | macro_statement ( ';' )? );", 3, 0, self.input) + + raise nvae + + if alt3 == 1: + # C.g:67:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' )=> function_definition + self.following.append(self.FOLLOW_function_definition_in_external_declaration103) + self.function_definition() + self.following.pop() + if self.failed: + return + + + elif alt3 == 2: + # C.g:68:4: declaration + self.following.append(self.FOLLOW_declaration_in_external_declaration108) + self.declaration() + self.following.pop() + if self.failed: + return + + + elif alt3 == 3: + # C.g:69:4: macro_statement ( ';' )? + self.following.append(self.FOLLOW_macro_statement_in_external_declaration113) + self.macro_statement() + self.following.pop() + if self.failed: + return + # C.g:69:20: ( ';' )? + alt2 = 2 + LA2_0 = self.input.LA(1) + + if (LA2_0 == 25) : + alt2 = 1 + if alt2 == 1: + # C.g:69:21: ';' + self.match(self.input, 25, self.FOLLOW_25_in_external_declaration116) + if self.failed: + return + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 2, external_declaration_StartIndex) + + pass + + return + + # $ANTLR end external_declaration + + class function_definition_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start function_definition + # C.g:74:1: function_definition : (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ; + def function_definition(self, ): + self.function_definition_stack.append(function_definition_scope()) + retval = self.function_definition_return() + retval.start = self.input.LT(1) + function_definition_StartIndex = self.input.index() + d = None + + a = None + + b = None + + declarator1 = None + + +
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = ''
+ self.function_definition_stack[-1].LBLine = 0
+ self.function_definition_stack[-1].LBOffset = 0
+ self.function_definition_stack[-1].DeclLine = 0
+ self.function_definition_stack[-1].DeclOffset = 0
+ + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 3): + return retval + + # C.g:94:2: ( (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) ) + # C.g:94:4: (d= declaration_specifiers )? declarator ( ( declaration )+ a= compound_statement | b= compound_statement ) + # C.g:94:5: (d= declaration_specifiers )? + alt4 = 2 + LA4 = self.input.LA(1) + if LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33 or LA4 == 34 or LA4 == 35 or LA4 == 36 or LA4 == 37 or LA4 == 38 or LA4 == 39 or LA4 == 40 or LA4 == 41 or LA4 == 42 or LA4 == 45 or LA4 == 46 or LA4 == 48 or LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57: + alt4 = 1 + elif LA4 == IDENTIFIER: + LA4 = self.input.LA(2) + if LA4 == 65: + alt4 = 1 + elif LA4 == 58: + LA4_21 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 59: + LA4_22 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 60: + LA4_23 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == IDENTIFIER: + LA4_24 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 61: + LA4_25 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 29 or LA4 == 30 or LA4 == 31 or LA4 == 32 or LA4 == 33: + LA4_26 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 34: + LA4_27 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 35: + LA4_28 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 36: + LA4_29 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 37: + LA4_30 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 38: + LA4_31 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 39: + LA4_32 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 40: + LA4_33 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 41: + LA4_34 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 42: + LA4_35 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 45 or LA4 == 46: + LA4_36 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 48: + LA4_37 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 49 or LA4 == 50 or LA4 == 51 or LA4 == 52 or LA4 == 53 or LA4 == 54 or LA4 == 55 or LA4 == 56 or LA4 == 57: + LA4_38 = self.input.LA(3) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 58: + LA4_14 = self.input.LA(2) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 59: + LA4_16 = self.input.LA(2) + + if (self.synpred7()) : + alt4 = 1 + elif LA4 == 60: + LA4_17 = self.input.LA(2) + + if (self.synpred7()) : + alt4 = 1 + if alt4 == 1: + # C.g:0:0: d= declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_function_definition147) + d = self.declaration_specifiers() + self.following.pop() + if self.failed: + return retval + + + + self.following.append(self.FOLLOW_declarator_in_function_definition150) + declarator1 = self.declarator() + self.following.pop() + if self.failed: + return retval + # C.g:95:3: ( ( declaration )+ a= compound_statement | b= compound_statement ) + alt6 = 2 + LA6_0 = self.input.LA(1) + + if (LA6_0 == IDENTIFIER or LA6_0 == 26 or (29 <= LA6_0 <= 42) or (45 <= LA6_0 <= 46) or (48 <= LA6_0 <= 60)) : + alt6 = 1 + elif (LA6_0 == 43) : + alt6 = 2 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("95:3: ( ( declaration )+ a= compound_statement | b= compound_statement )", 6, 0, self.input) + + raise nvae + + if alt6 == 1: + # C.g:95:5: ( declaration )+ a= compound_statement + # C.g:95:5: ( declaration )+ + cnt5 = 0 + while True: #loop5 + alt5 = 2 + LA5_0 = self.input.LA(1) + + if (LA5_0 == IDENTIFIER or LA5_0 == 26 or (29 <= LA5_0 <= 42) or (45 <= LA5_0 <= 46) or (48 <= LA5_0 <= 60)) : + alt5 = 1 + + + if alt5 == 1: + # C.g:0:0: declaration + self.following.append(self.FOLLOW_declaration_in_function_definition156) + self.declaration() + self.following.pop() + if self.failed: + return retval + + + else: + if cnt5 >= 1: + break #loop5 + + if self.backtracking > 0: + self.failed = True + return retval + + eee = EarlyExitException(5, self.input) + raise eee + + cnt5 += 1 + + + self.following.append(self.FOLLOW_compound_statement_in_function_definition161) + a = self.compound_statement() + self.following.pop() + if self.failed: + return retval + + + elif alt6 == 2: + # C.g:96:5: b= compound_statement + self.following.append(self.FOLLOW_compound_statement_in_function_definition170) + b = self.compound_statement() + self.following.pop() + if self.failed: + return retval + + + + if self.backtracking == 0: +
+ if d != None:
+ self.function_definition_stack[-1].ModifierText = self.input.toString(d.start,d.stop)
+ else:
+ self.function_definition_stack[-1].ModifierText = ''
+ self.function_definition_stack[-1].DeclText = self.input.toString(declarator1.start,declarator1.stop)
+ self.function_definition_stack[-1].DeclLine = declarator1.start.line
+ self.function_definition_stack[-1].DeclOffset = declarator1.start.charPositionInLine
+ if a != None:
+ self.function_definition_stack[-1].LBLine = a.start.line
+ self.function_definition_stack[-1].LBOffset = a.start.charPositionInLine
+ else:
+ self.function_definition_stack[-1].LBLine = b.start.line
+ self.function_definition_stack[-1].LBOffset = b.start.charPositionInLine
+ + + + + + retval.stop = self.input.LT(-1) + + if self.backtracking == 0: +
+ self.StoreFunctionDefinition(retval.start.line, retval.start.charPositionInLine, retval.stop.line, retval.stop.charPositionInLine, self.function_definition_stack[-1].ModifierText, self.function_definition_stack[-1].DeclText, self.function_definition_stack[-1].LBLine, self.function_definition_stack[-1].LBOffset, self.function_definition_stack[-1].DeclLine, self.function_definition_stack[-1].DeclOffset)
+ + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 3, function_definition_StartIndex) + + self.function_definition_stack.pop() + pass + + return retval + + # $ANTLR end function_definition + + + # $ANTLR start declaration + # C.g:114:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' ); + def declaration(self, ): + + declaration_StartIndex = self.input.index() + a = None + d = None + e = None + b = None + + c = None + + s = None + + t = None + + + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 4): + return + + # C.g:115:2: (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' ) + alt9 = 2 + LA9_0 = self.input.LA(1) + + if (LA9_0 == 26) : + alt9 = 1 + elif (LA9_0 == IDENTIFIER or (29 <= LA9_0 <= 42) or (45 <= LA9_0 <= 46) or (48 <= LA9_0 <= 60)) : + alt9 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("114:1: declaration : (a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' | s= declaration_specifiers (t= init_declarator_list )? e= ';' );", 9, 0, self.input) + + raise nvae + + if alt9 == 1: + # C.g:115:4: a= 'typedef' (b= declaration_specifiers )? c= init_declarator_list d= ';' + a = self.input.LT(1) + self.match(self.input, 26, self.FOLLOW_26_in_declaration193) + if self.failed: + return + # C.g:115:17: (b= declaration_specifiers )? + alt7 = 2 + LA7 = self.input.LA(1) + if LA7 == 29 or LA7 == 30 or LA7 == 31 or LA7 == 32 or LA7 == 33 or LA7 == 34 or LA7 == 35 or LA7 == 36 or LA7 == 37 or LA7 == 38 or LA7 == 39 or LA7 == 40 or LA7 == 41 or LA7 == 42 or LA7 == 45 or LA7 == 46 or LA7 == 48 or LA7 == 49 or LA7 == 50 or LA7 == 51 or LA7 == 52 or LA7 == 53 or LA7 == 54 or LA7 == 55 or LA7 == 56 or LA7 == 57: + alt7 = 1 + elif LA7 == IDENTIFIER: + LA7_13 = self.input.LA(2) + + if (LA7_13 == IDENTIFIER or (29 <= LA7_13 <= 42) or (45 <= LA7_13 <= 46) or (48 <= LA7_13 <= 60) or LA7_13 == 65) : + alt7 = 1 + elif (LA7_13 == 61) : + LA7_25 = self.input.LA(3) + + if (self.synpred10()) : + alt7 = 1 + elif LA7 == 58: + LA7_14 = self.input.LA(2) + + if (self.synpred10()) : + alt7 = 1 + elif LA7 == 59: + LA7_16 = self.input.LA(2) + + if (self.synpred10()) : + alt7 = 1 + elif LA7 == 60: + LA7_17 = self.input.LA(2) + + if (self.synpred10()) : + alt7 = 1 + if alt7 == 1: + # C.g:0:0: b= declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_declaration197) + b = self.declaration_specifiers() + self.following.pop() + if self.failed: + return + + + + self.following.append(self.FOLLOW_init_declarator_list_in_declaration206) + c = self.init_declarator_list() + self.following.pop() + if self.failed: + return + d = self.input.LT(1) + self.match(self.input, 25, self.FOLLOW_25_in_declaration210) + if self.failed: + return + if self.backtracking == 0: +
+ if b != None:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, self.input.toString(b.start,b.stop), self.input.toString(c.start,c.stop))
+ else:
+ self.StoreTypedefDefinition(a.line, a.charPositionInLine, d.line, d.charPositionInLine, '', self.input.toString(c.start,c.stop))
+ + + + + elif alt9 == 2: + # C.g:123:4: s= declaration_specifiers (t= init_declarator_list )? e= ';' + self.following.append(self.FOLLOW_declaration_specifiers_in_declaration224) + s = self.declaration_specifiers() + self.following.pop() + if self.failed: + return + # C.g:123:30: (t= init_declarator_list )? + alt8 = 2 + LA8_0 = self.input.LA(1) + + if (LA8_0 == IDENTIFIER or (58 <= LA8_0 <= 61) or LA8_0 == 65) : + alt8 = 1 + if alt8 == 1: + # C.g:0:0: t= init_declarator_list + self.following.append(self.FOLLOW_init_declarator_list_in_declaration228) + t = self.init_declarator_list() + self.following.pop() + if self.failed: + return + + + + e = self.input.LT(1) + self.match(self.input, 25, self.FOLLOW_25_in_declaration233) + if self.failed: + return + if self.backtracking == 0: +
+ if t != None:
+ self.StoreVariableDeclaration(s.start.line, s.start.charPositionInLine, t.start.line, t.start.charPositionInLine, self.input.toString(s.start,s.stop), self.input.toString(t.start,t.stop))
+ + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 4, declaration_StartIndex) + + pass + + return + + # $ANTLR end declaration + + class declaration_specifiers_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start declaration_specifiers + # C.g:130:1: declaration_specifiers : ( storage_class_specifier | type_specifier | type_qualifier )+ ; + def declaration_specifiers(self, ): + + retval = self.declaration_specifiers_return() + retval.start = self.input.LT(1) + declaration_specifiers_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 5): + return retval + + # C.g:131:2: ( ( storage_class_specifier | type_specifier | type_qualifier )+ ) + # C.g:131:6: ( storage_class_specifier | type_specifier | type_qualifier )+ + # C.g:131:6: ( storage_class_specifier | type_specifier | type_qualifier )+ + cnt10 = 0 + while True: #loop10 + alt10 = 4 + LA10 = self.input.LA(1) + if LA10 == 58: + LA10_2 = self.input.LA(2) + + if (self.synpred15()) : + alt10 = 3 + + + elif LA10 == 59: + LA10_3 = self.input.LA(2) + + if (self.synpred15()) : + alt10 = 3 + + + elif LA10 == 60: + LA10_4 = self.input.LA(2) + + if (self.synpred15()) : + alt10 = 3 + + + elif LA10 == IDENTIFIER: + LA10_5 = self.input.LA(2) + + if (self.synpred14()) : + alt10 = 2 + + + elif LA10 == 53: + LA10_9 = self.input.LA(2) + + if (self.synpred15()) : + alt10 = 3 + + + elif LA10 == 29 or LA10 == 30 or LA10 == 31 or LA10 == 32 or LA10 == 33: + alt10 = 1 + elif LA10 == 34 or LA10 == 35 or LA10 == 36 or LA10 == 37 or LA10 == 38 or LA10 == 39 or LA10 == 40 or LA10 == 41 or LA10 == 42 or LA10 == 45 or LA10 == 46 or LA10 == 48: + alt10 = 2 + elif LA10 == 49 or LA10 == 50 or LA10 == 51 or LA10 == 52 or LA10 == 54 or LA10 == 55 or LA10 == 56 or LA10 == 57: + alt10 = 3 + + if alt10 == 1: + # C.g:131:10: storage_class_specifier + self.following.append(self.FOLLOW_storage_class_specifier_in_declaration_specifiers254) + self.storage_class_specifier() + self.following.pop() + if self.failed: + return retval + + + elif alt10 == 2: + # C.g:132:7: type_specifier + self.following.append(self.FOLLOW_type_specifier_in_declaration_specifiers262) + self.type_specifier() + self.following.pop() + if self.failed: + return retval + + + elif alt10 == 3: + # C.g:133:13: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_declaration_specifiers276) + self.type_qualifier() + self.following.pop() + if self.failed: + return retval + + + else: + if cnt10 >= 1: + break #loop10 + + if self.backtracking > 0: + self.failed = True + return retval + + eee = EarlyExitException(10, self.input) + raise eee + + cnt10 += 1 + + + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 5, declaration_specifiers_StartIndex) + + pass + + return retval + + # $ANTLR end declaration_specifiers + + class init_declarator_list_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start init_declarator_list + # C.g:137:1: init_declarator_list : init_declarator ( ',' init_declarator )* ; + def init_declarator_list(self, ): + + retval = self.init_declarator_list_return() + retval.start = self.input.LT(1) + init_declarator_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 6): + return retval + + # C.g:138:2: ( init_declarator ( ',' init_declarator )* ) + # C.g:138:4: init_declarator ( ',' init_declarator )* + self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list298) + self.init_declarator() + self.following.pop() + if self.failed: + return retval + # C.g:138:20: ( ',' init_declarator )* + while True: #loop11 + alt11 = 2 + LA11_0 = self.input.LA(1) + + if (LA11_0 == 27) : + alt11 = 1 + + + if alt11 == 1: + # C.g:138:21: ',' init_declarator + self.match(self.input, 27, self.FOLLOW_27_in_init_declarator_list301) + if self.failed: + return retval + self.following.append(self.FOLLOW_init_declarator_in_init_declarator_list303) + self.init_declarator() + self.following.pop() + if self.failed: + return retval + + + else: + break #loop11 + + + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 6, init_declarator_list_StartIndex) + + pass + + return retval + + # $ANTLR end init_declarator_list + + + # $ANTLR start init_declarator + # C.g:141:1: init_declarator : declarator ( '=' initializer )? ; + def init_declarator(self, ): + + init_declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 7): + return + + # C.g:142:2: ( declarator ( '=' initializer )? ) + # C.g:142:4: declarator ( '=' initializer )? + self.following.append(self.FOLLOW_declarator_in_init_declarator316) + self.declarator() + self.following.pop() + if self.failed: + return + # C.g:142:15: ( '=' initializer )? + alt12 = 2 + LA12_0 = self.input.LA(1) + + if (LA12_0 == 28) : + alt12 = 1 + if alt12 == 1: + # C.g:142:16: '=' initializer + self.match(self.input, 28, self.FOLLOW_28_in_init_declarator319) + if self.failed: + return + self.following.append(self.FOLLOW_initializer_in_init_declarator321) + self.initializer() + self.following.pop() + if self.failed: + return + + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 7, init_declarator_StartIndex) + + pass + + return + + # $ANTLR end init_declarator + + + # $ANTLR start storage_class_specifier + # C.g:145:1: storage_class_specifier : ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' ); + def storage_class_specifier(self, ): + + storage_class_specifier_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 8): + return + + # C.g:146:2: ( 'extern' | 'static' | 'auto' | 'register' | 'STATIC' ) + # C.g: + if (29 <= self.input.LA(1) <= 33): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_storage_class_specifier0 + ) + raise mse + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 8, storage_class_specifier_StartIndex) + + pass + + return + + # $ANTLR end storage_class_specifier + + + # $ANTLR start type_specifier + # C.g:153:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id ); + def type_specifier(self, ): + + type_specifier_StartIndex = self.input.index() + s = None + + e = None + + + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 9): + return + + # C.g:154:2: ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id ) + alt13 = 12 + LA13_0 = self.input.LA(1) + + if (LA13_0 == 34) : + alt13 = 1 + elif (LA13_0 == 35) : + alt13 = 2 + elif (LA13_0 == 36) : + alt13 = 3 + elif (LA13_0 == 37) : + alt13 = 4 + elif (LA13_0 == 38) : + alt13 = 5 + elif (LA13_0 == 39) : + alt13 = 6 + elif (LA13_0 == 40) : + alt13 = 7 + elif (LA13_0 == 41) : + alt13 = 8 + elif (LA13_0 == 42) : + alt13 = 9 + elif ((45 <= LA13_0 <= 46)) : + alt13 = 10 + elif (LA13_0 == 48) : + alt13 = 11 + elif (LA13_0 == IDENTIFIER) and (self.synpred34()): + alt13 = 12 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("153:1: type_specifier : ( 'void' | 'char' | 'short' | 'int' | 'long' | 'float' | 'double' | 'signed' | 'unsigned' | s= struct_or_union_specifier | e= enum_specifier | ( IDENTIFIER ( type_qualifier )* declarator )=> type_id );", 13, 0, self.input) + + raise nvae + + if alt13 == 1: + # C.g:154:4: 'void' + self.match(self.input, 34, self.FOLLOW_34_in_type_specifier366) + if self.failed: + return + + + elif alt13 == 2: + # C.g:155:4: 'char' + self.match(self.input, 35, self.FOLLOW_35_in_type_specifier371) + if self.failed: + return + + + elif alt13 == 3: + # C.g:156:4: 'short' + self.match(self.input, 36, self.FOLLOW_36_in_type_specifier376) + if self.failed: + return + + + elif alt13 == 4: + # C.g:157:4: 'int' + self.match(self.input, 37, self.FOLLOW_37_in_type_specifier381) + if self.failed: + return + + + elif alt13 == 5: + # C.g:158:4: 'long' + self.match(self.input, 38, self.FOLLOW_38_in_type_specifier386) + if self.failed: + return + + + elif alt13 == 6: + # C.g:159:4: 'float' + self.match(self.input, 39, self.FOLLOW_39_in_type_specifier391) + if self.failed: + return + + + elif alt13 == 7: + # C.g:160:4: 'double' + self.match(self.input, 40, self.FOLLOW_40_in_type_specifier396) + if self.failed: + return + + + elif alt13 == 8: + # C.g:161:4: 'signed' + self.match(self.input, 41, self.FOLLOW_41_in_type_specifier401) + if self.failed: + return + + + elif alt13 == 9: + # C.g:162:4: 'unsigned' + self.match(self.input, 42, self.FOLLOW_42_in_type_specifier406) + if self.failed: + return + + + elif alt13 == 10: + # C.g:163:4: s= struct_or_union_specifier + self.following.append(self.FOLLOW_struct_or_union_specifier_in_type_specifier413) + s = self.struct_or_union_specifier() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: +
+ if s.stop != None:
+ self.StoreStructUnionDefinition(s.start.line, s.start.charPositionInLine, s.stop.line, s.stop.charPositionInLine, self.input.toString(s.start,s.stop))
+ + + + + elif alt13 == 11: + # C.g:168:4: e= enum_specifier + self.following.append(self.FOLLOW_enum_specifier_in_type_specifier423) + e = self.enum_specifier() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: +
+ if e.stop != None:
+ self.StoreEnumerationDefinition(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop))
+ + + + + elif alt13 == 12: + # C.g:173:4: ( IDENTIFIER ( type_qualifier )* declarator )=> type_id + self.following.append(self.FOLLOW_type_id_in_type_specifier441) + self.type_id() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 9, type_specifier_StartIndex) + + pass + + return + + # $ANTLR end type_specifier + + + # $ANTLR start type_id + # C.g:176:1: type_id : IDENTIFIER ; + def type_id(self, ): + + type_id_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 10): + return + + # C.g:177:5: ( IDENTIFIER ) + # C.g:177:9: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_type_id457) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 10, type_id_StartIndex) + + pass + + return + + # $ANTLR end type_id + + class struct_or_union_specifier_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start struct_or_union_specifier + # C.g:181:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER ); + def struct_or_union_specifier(self, ): + + retval = self.struct_or_union_specifier_return() + retval.start = self.input.LT(1) + struct_or_union_specifier_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 11): + return retval + + # C.g:183:2: ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER ) + alt15 = 2 + LA15_0 = self.input.LA(1) + + if ((45 <= LA15_0 <= 46)) : + LA15_1 = self.input.LA(2) + + if (LA15_1 == IDENTIFIER) : + LA15_2 = self.input.LA(3) + + if (LA15_2 == 43) : + alt15 = 1 + elif (LA15_2 == EOF or LA15_2 == IDENTIFIER or LA15_2 == 25 or LA15_2 == 27 or (29 <= LA15_2 <= 42) or (45 <= LA15_2 <= 63) or LA15_2 == 65) : + alt15 = 2 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("181:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 2, self.input) + + raise nvae + + elif (LA15_1 == 43) : + alt15 = 1 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("181:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 1, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("181:1: struct_or_union_specifier options {k=3; } : ( struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' | struct_or_union IDENTIFIER );", 15, 0, self.input) + + raise nvae + + if alt15 == 1: + # C.g:183:4: struct_or_union ( IDENTIFIER )? '{' struct_declaration_list '}' + self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier484) + self.struct_or_union() + self.following.pop() + if self.failed: + return retval + # C.g:183:20: ( IDENTIFIER )? + alt14 = 2 + LA14_0 = self.input.LA(1) + + if (LA14_0 == IDENTIFIER) : + alt14 = 1 + if alt14 == 1: + # C.g:0:0: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier486) + if self.failed: + return retval + + + + self.match(self.input, 43, self.FOLLOW_43_in_struct_or_union_specifier489) + if self.failed: + return retval + self.following.append(self.FOLLOW_struct_declaration_list_in_struct_or_union_specifier491) + self.struct_declaration_list() + self.following.pop() + if self.failed: + return retval + self.match(self.input, 44, self.FOLLOW_44_in_struct_or_union_specifier493) + if self.failed: + return retval + + + elif alt15 == 2: + # C.g:184:4: struct_or_union IDENTIFIER + self.following.append(self.FOLLOW_struct_or_union_in_struct_or_union_specifier498) + self.struct_or_union() + self.following.pop() + if self.failed: + return retval + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_struct_or_union_specifier500) + if self.failed: + return retval + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 11, struct_or_union_specifier_StartIndex) + + pass + + return retval + + # $ANTLR end struct_or_union_specifier + + + # $ANTLR start struct_or_union + # C.g:187:1: struct_or_union : ( 'struct' | 'union' ); + def struct_or_union(self, ): + + struct_or_union_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 12): + return + + # C.g:188:2: ( 'struct' | 'union' ) + # C.g: + if (45 <= self.input.LA(1) <= 46): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_struct_or_union0 + ) + raise mse + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 12, struct_or_union_StartIndex) + + pass + + return + + # $ANTLR end struct_or_union + + + # $ANTLR start struct_declaration_list + # C.g:192:1: struct_declaration_list : ( struct_declaration )+ ; + def struct_declaration_list(self, ): + + struct_declaration_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 13): + return + + # C.g:193:2: ( ( struct_declaration )+ ) + # C.g:193:4: ( struct_declaration )+ + # C.g:193:4: ( struct_declaration )+ + cnt16 = 0 + while True: #loop16 + alt16 = 2 + LA16_0 = self.input.LA(1) + + if (LA16_0 == IDENTIFIER or (34 <= LA16_0 <= 42) or (45 <= LA16_0 <= 46) or (48 <= LA16_0 <= 60)) : + alt16 = 1 + + + if alt16 == 1: + # C.g:0:0: struct_declaration + self.following.append(self.FOLLOW_struct_declaration_in_struct_declaration_list527) + self.struct_declaration() + self.following.pop() + if self.failed: + return + + + else: + if cnt16 >= 1: + break #loop16 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(16, self.input) + raise eee + + cnt16 += 1 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 13, struct_declaration_list_StartIndex) + + pass + + return + + # $ANTLR end struct_declaration_list + + + # $ANTLR start struct_declaration + # C.g:196:1: struct_declaration : specifier_qualifier_list struct_declarator_list ';' ; + def struct_declaration(self, ): + + struct_declaration_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 14): + return + + # C.g:197:2: ( specifier_qualifier_list struct_declarator_list ';' ) + # C.g:197:4: specifier_qualifier_list struct_declarator_list ';' + self.following.append(self.FOLLOW_specifier_qualifier_list_in_struct_declaration539) + self.specifier_qualifier_list() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_struct_declarator_list_in_struct_declaration541) + self.struct_declarator_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_struct_declaration543) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 14, struct_declaration_StartIndex) + + pass + + return + + # $ANTLR end struct_declaration + + + # $ANTLR start specifier_qualifier_list + # C.g:200:1: specifier_qualifier_list : ( type_qualifier | type_specifier )+ ; + def specifier_qualifier_list(self, ): + + specifier_qualifier_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 15): + return + + # C.g:201:2: ( ( type_qualifier | type_specifier )+ ) + # C.g:201:4: ( type_qualifier | type_specifier )+ + # C.g:201:4: ( type_qualifier | type_specifier )+ + cnt17 = 0 + while True: #loop17 + alt17 = 3 + LA17 = self.input.LA(1) + if LA17 == 58: + LA17_2 = self.input.LA(2) + + if (self.synpred39()) : + alt17 = 1 + + + elif LA17 == 59: + LA17_3 = self.input.LA(2) + + if (self.synpred39()) : + alt17 = 1 + + + elif LA17 == 60: + LA17_4 = self.input.LA(2) + + if (self.synpred39()) : + alt17 = 1 + + + elif LA17 == IDENTIFIER: + LA17 = self.input.LA(2) + if LA17 == EOF or LA17 == IDENTIFIER or LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48 or LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57 or LA17 == 58 or LA17 == 59 or LA17 == 60 or LA17 == 62 or LA17 == 65: + alt17 = 2 + elif LA17 == 61: + LA17_94 = self.input.LA(3) + + if (self.synpred40()) : + alt17 = 2 + + + elif LA17 == 47: + LA17_95 = self.input.LA(3) + + if (self.synpred40()) : + alt17 = 2 + + + elif LA17 == 63: + LA17_96 = self.input.LA(3) + + if (self.synpred40()) : + alt17 = 2 + + + + elif LA17 == 49 or LA17 == 50 or LA17 == 51 or LA17 == 52 or LA17 == 53 or LA17 == 54 or LA17 == 55 or LA17 == 56 or LA17 == 57: + alt17 = 1 + elif LA17 == 34 or LA17 == 35 or LA17 == 36 or LA17 == 37 or LA17 == 38 or LA17 == 39 or LA17 == 40 or LA17 == 41 or LA17 == 42 or LA17 == 45 or LA17 == 46 or LA17 == 48: + alt17 = 2 + + if alt17 == 1: + # C.g:201:6: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_specifier_qualifier_list556) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + elif alt17 == 2: + # C.g:201:23: type_specifier + self.following.append(self.FOLLOW_type_specifier_in_specifier_qualifier_list560) + self.type_specifier() + self.following.pop() + if self.failed: + return + + + else: + if cnt17 >= 1: + break #loop17 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(17, self.input) + raise eee + + cnt17 += 1 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 15, specifier_qualifier_list_StartIndex) + + pass + + return + + # $ANTLR end specifier_qualifier_list + + + # $ANTLR start struct_declarator_list + # C.g:204:1: struct_declarator_list : struct_declarator ( ',' struct_declarator )* ; + def struct_declarator_list(self, ): + + struct_declarator_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 16): + return + + # C.g:205:2: ( struct_declarator ( ',' struct_declarator )* ) + # C.g:205:4: struct_declarator ( ',' struct_declarator )* + self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list574) + self.struct_declarator() + self.following.pop() + if self.failed: + return + # C.g:205:22: ( ',' struct_declarator )* + while True: #loop18 + alt18 = 2 + LA18_0 = self.input.LA(1) + + if (LA18_0 == 27) : + alt18 = 1 + + + if alt18 == 1: + # C.g:205:23: ',' struct_declarator + self.match(self.input, 27, self.FOLLOW_27_in_struct_declarator_list577) + if self.failed: + return + self.following.append(self.FOLLOW_struct_declarator_in_struct_declarator_list579) + self.struct_declarator() + self.following.pop() + if self.failed: + return + + + else: + break #loop18 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 16, struct_declarator_list_StartIndex) + + pass + + return + + # $ANTLR end struct_declarator_list + + + # $ANTLR start struct_declarator + # C.g:208:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression ); + def struct_declarator(self, ): + + struct_declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 17): + return + + # C.g:209:2: ( declarator ( ':' constant_expression )? | ':' constant_expression ) + alt20 = 2 + LA20_0 = self.input.LA(1) + + if (LA20_0 == IDENTIFIER or (58 <= LA20_0 <= 61) or LA20_0 == 65) : + alt20 = 1 + elif (LA20_0 == 47) : + alt20 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("208:1: struct_declarator : ( declarator ( ':' constant_expression )? | ':' constant_expression );", 20, 0, self.input) + + raise nvae + + if alt20 == 1: + # C.g:209:4: declarator ( ':' constant_expression )? + self.following.append(self.FOLLOW_declarator_in_struct_declarator592) + self.declarator() + self.following.pop() + if self.failed: + return + # C.g:209:15: ( ':' constant_expression )? + alt19 = 2 + LA19_0 = self.input.LA(1) + + if (LA19_0 == 47) : + alt19 = 1 + if alt19 == 1: + # C.g:209:16: ':' constant_expression + self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator595) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_struct_declarator597) + self.constant_expression() + self.following.pop() + if self.failed: + return + + + + + + elif alt20 == 2: + # C.g:210:4: ':' constant_expression + self.match(self.input, 47, self.FOLLOW_47_in_struct_declarator604) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_struct_declarator606) + self.constant_expression() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 17, struct_declarator_StartIndex) + + pass + + return + + # $ANTLR end struct_declarator + + class enum_specifier_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start enum_specifier + # C.g:213:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER ); + def enum_specifier(self, ): + + retval = self.enum_specifier_return() + retval.start = self.input.LT(1) + enum_specifier_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 18): + return retval + + # C.g:215:2: ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER ) + alt23 = 3 + LA23_0 = self.input.LA(1) + + if (LA23_0 == 48) : + LA23_1 = self.input.LA(2) + + if (LA23_1 == IDENTIFIER) : + LA23_2 = self.input.LA(3) + + if (LA23_2 == 43) : + alt23 = 2 + elif (LA23_2 == EOF or LA23_2 == IDENTIFIER or LA23_2 == 25 or LA23_2 == 27 or (29 <= LA23_2 <= 42) or (45 <= LA23_2 <= 63) or LA23_2 == 65) : + alt23 = 3 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("213:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 2, self.input) + + raise nvae + + elif (LA23_1 == 43) : + alt23 = 1 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("213:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 1, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("213:1: enum_specifier options {k=3; } : ( 'enum' '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' | 'enum' IDENTIFIER );", 23, 0, self.input) + + raise nvae + + if alt23 == 1: + # C.g:215:4: 'enum' '{' enumerator_list ( ',' )? '}' + self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier624) + if self.failed: + return retval + self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier626) + if self.failed: + return retval + self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier628) + self.enumerator_list() + self.following.pop() + if self.failed: + return retval + # C.g:215:31: ( ',' )? + alt21 = 2 + LA21_0 = self.input.LA(1) + + if (LA21_0 == 27) : + alt21 = 1 + if alt21 == 1: + # C.g:0:0: ',' + self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier630) + if self.failed: + return retval + + + + self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier633) + if self.failed: + return retval + + + elif alt23 == 2: + # C.g:216:4: 'enum' IDENTIFIER '{' enumerator_list ( ',' )? '}' + self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier638) + if self.failed: + return retval + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier640) + if self.failed: + return retval + self.match(self.input, 43, self.FOLLOW_43_in_enum_specifier642) + if self.failed: + return retval + self.following.append(self.FOLLOW_enumerator_list_in_enum_specifier644) + self.enumerator_list() + self.following.pop() + if self.failed: + return retval + # C.g:216:42: ( ',' )? + alt22 = 2 + LA22_0 = self.input.LA(1) + + if (LA22_0 == 27) : + alt22 = 1 + if alt22 == 1: + # C.g:0:0: ',' + self.match(self.input, 27, self.FOLLOW_27_in_enum_specifier646) + if self.failed: + return retval + + + + self.match(self.input, 44, self.FOLLOW_44_in_enum_specifier649) + if self.failed: + return retval + + + elif alt23 == 3: + # C.g:217:4: 'enum' IDENTIFIER + self.match(self.input, 48, self.FOLLOW_48_in_enum_specifier654) + if self.failed: + return retval + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enum_specifier656) + if self.failed: + return retval + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 18, enum_specifier_StartIndex) + + pass + + return retval + + # $ANTLR end enum_specifier + + + # $ANTLR start enumerator_list + # C.g:220:1: enumerator_list : enumerator ( ',' enumerator )* ; + def enumerator_list(self, ): + + enumerator_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 19): + return + + # C.g:221:2: ( enumerator ( ',' enumerator )* ) + # C.g:221:4: enumerator ( ',' enumerator )* + self.following.append(self.FOLLOW_enumerator_in_enumerator_list667) + self.enumerator() + self.following.pop() + if self.failed: + return + # C.g:221:15: ( ',' enumerator )* + while True: #loop24 + alt24 = 2 + LA24_0 = self.input.LA(1) + + if (LA24_0 == 27) : + LA24_1 = self.input.LA(2) + + if (LA24_1 == IDENTIFIER) : + alt24 = 1 + + + + + if alt24 == 1: + # C.g:221:16: ',' enumerator + self.match(self.input, 27, self.FOLLOW_27_in_enumerator_list670) + if self.failed: + return + self.following.append(self.FOLLOW_enumerator_in_enumerator_list672) + self.enumerator() + self.following.pop() + if self.failed: + return + + + else: + break #loop24 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 19, enumerator_list_StartIndex) + + pass + + return + + # $ANTLR end enumerator_list + + + # $ANTLR start enumerator + # C.g:224:1: enumerator : IDENTIFIER ( '=' constant_expression )? ; + def enumerator(self, ): + + enumerator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 20): + return + + # C.g:225:2: ( IDENTIFIER ( '=' constant_expression )? ) + # C.g:225:4: IDENTIFIER ( '=' constant_expression )? + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_enumerator685) + if self.failed: + return + # C.g:225:15: ( '=' constant_expression )? + alt25 = 2 + LA25_0 = self.input.LA(1) + + if (LA25_0 == 28) : + alt25 = 1 + if alt25 == 1: + # C.g:225:16: '=' constant_expression + self.match(self.input, 28, self.FOLLOW_28_in_enumerator688) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_enumerator690) + self.constant_expression() + self.following.pop() + if self.failed: + return + + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 20, enumerator_StartIndex) + + pass + + return + + # $ANTLR end enumerator + + + # $ANTLR start type_qualifier + # C.g:228:1: type_qualifier : ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' ); + def type_qualifier(self, ): + + type_qualifier_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 21): + return + + # C.g:229:2: ( 'const' | 'volatile' | 'IN' | 'OUT' | 'OPTIONAL' | 'CONST' | 'UNALIGNED' | 'VOLATILE' | 'GLOBAL_REMOVE_IF_UNREFERENCED' | 'EFIAPI' | 'EFI_BOOTSERVICE' | 'EFI_RUNTIMESERVICE' ) + # C.g: + if (49 <= self.input.LA(1) <= 60): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_type_qualifier0 + ) + raise mse + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 21, type_qualifier_StartIndex) + + pass + + return + + # $ANTLR end type_qualifier + + class declarator_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start declarator + # C.g:243:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer ); + def declarator(self, ): + + retval = self.declarator_return() + retval.start = self.input.LT(1) + declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 22): + return retval + + # C.g:244:2: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer ) + alt30 = 2 + LA30_0 = self.input.LA(1) + + if (LA30_0 == 65) : + LA30_1 = self.input.LA(2) + + if (self.synpred65()) : + alt30 = 1 + elif (True) : + alt30 = 2 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("243:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 1, self.input) + + raise nvae + + elif (LA30_0 == IDENTIFIER or (58 <= LA30_0 <= 61)) : + alt30 = 1 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("243:1: declarator : ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator | pointer );", 30, 0, self.input) + + raise nvae + + if alt30 == 1: + # C.g:244:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator + # C.g:244:4: ( pointer )? + alt26 = 2 + LA26_0 = self.input.LA(1) + + if (LA26_0 == 65) : + alt26 = 1 + if alt26 == 1: + # C.g:0:0: pointer + self.following.append(self.FOLLOW_pointer_in_declarator769) + self.pointer() + self.following.pop() + if self.failed: + return retval + + + + # C.g:244:13: ( 'EFIAPI' )? + alt27 = 2 + LA27_0 = self.input.LA(1) + + if (LA27_0 == 58) : + alt27 = 1 + if alt27 == 1: + # C.g:244:14: 'EFIAPI' + self.match(self.input, 58, self.FOLLOW_58_in_declarator773) + if self.failed: + return retval + + + + # C.g:244:25: ( 'EFI_BOOTSERVICE' )? + alt28 = 2 + LA28_0 = self.input.LA(1) + + if (LA28_0 == 59) : + alt28 = 1 + if alt28 == 1: + # C.g:244:26: 'EFI_BOOTSERVICE' + self.match(self.input, 59, self.FOLLOW_59_in_declarator778) + if self.failed: + return retval + + + + # C.g:244:46: ( 'EFI_RUNTIMESERVICE' )? + alt29 = 2 + LA29_0 = self.input.LA(1) + + if (LA29_0 == 60) : + alt29 = 1 + if alt29 == 1: + # C.g:244:47: 'EFI_RUNTIMESERVICE' + self.match(self.input, 60, self.FOLLOW_60_in_declarator783) + if self.failed: + return retval + + + + self.following.append(self.FOLLOW_direct_declarator_in_declarator787) + self.direct_declarator() + self.following.pop() + if self.failed: + return retval + + + elif alt30 == 2: + # C.g:246:4: pointer + self.following.append(self.FOLLOW_pointer_in_declarator793) + self.pointer() + self.following.pop() + if self.failed: + return retval + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 22, declarator_StartIndex) + + pass + + return retval + + # $ANTLR end declarator + + + # $ANTLR start direct_declarator + # C.g:249:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ ); + def direct_declarator(self, ): + + direct_declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 23): + return + + # C.g:250:2: ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ ) + alt34 = 2 + LA34_0 = self.input.LA(1) + + if (LA34_0 == IDENTIFIER) : + alt34 = 1 + elif (LA34_0 == 61) : + alt34 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("249:1: direct_declarator : ( IDENTIFIER ( declarator_suffix )* | '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ );", 34, 0, self.input) + + raise nvae + + if alt34 == 1: + # C.g:250:4: IDENTIFIER ( declarator_suffix )* + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_direct_declarator804) + if self.failed: + return + # C.g:250:15: ( declarator_suffix )* + while True: #loop31 + alt31 = 2 + LA31_0 = self.input.LA(1) + + if (LA31_0 == 61) : + LA31 = self.input.LA(2) + if LA31 == 62: + LA31_30 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 58: + LA31_31 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 65: + LA31_32 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 59: + LA31_33 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 60: + LA31_34 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == IDENTIFIER: + LA31_35 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 29 or LA31 == 30 or LA31 == 31 or LA31 == 32 or LA31 == 33: + LA31_37 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 34: + LA31_38 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 35: + LA31_39 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 36: + LA31_40 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 37: + LA31_41 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 38: + LA31_42 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 39: + LA31_43 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 40: + LA31_44 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 41: + LA31_45 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 42: + LA31_46 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 45 or LA31 == 46: + LA31_47 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 48: + LA31_48 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 49 or LA31 == 50 or LA31 == 51 or LA31 == 52 or LA31 == 53 or LA31 == 54 or LA31 == 55 or LA31 == 56 or LA31 == 57: + LA31_49 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + + elif (LA31_0 == 63) : + LA31 = self.input.LA(2) + if LA31 == 64: + LA31_51 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 61: + LA31_52 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == IDENTIFIER: + LA31_53 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == HEX_LITERAL: + LA31_54 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == OCTAL_LITERAL: + LA31_55 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == DECIMAL_LITERAL: + LA31_56 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == CHARACTER_LITERAL: + LA31_57 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == STRING_LITERAL: + LA31_58 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == FLOATING_POINT_LITERAL: + LA31_59 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 71: + LA31_60 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 72: + LA31_61 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 65 or LA31 == 67 or LA31 == 68 or LA31 == 76 or LA31 == 77 or LA31 == 78: + LA31_62 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + elif LA31 == 73: + LA31_63 = self.input.LA(3) + + if (self.synpred66()) : + alt31 = 1 + + + + + + if alt31 == 1: + # C.g:0:0: declarator_suffix + self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator806) + self.declarator_suffix() + self.following.pop() + if self.failed: + return + + + else: + break #loop31 + + + + + elif alt34 == 2: + # C.g:251:4: '(' ( 'EFIAPI' )? declarator ')' ( declarator_suffix )+ + self.match(self.input, 61, self.FOLLOW_61_in_direct_declarator812) + if self.failed: + return + # C.g:251:8: ( 'EFIAPI' )? + alt32 = 2 + LA32_0 = self.input.LA(1) + + if (LA32_0 == 58) : + LA32_1 = self.input.LA(2) + + if (self.synpred68()) : + alt32 = 1 + if alt32 == 1: + # C.g:251:9: 'EFIAPI' + self.match(self.input, 58, self.FOLLOW_58_in_direct_declarator815) + if self.failed: + return + + + + self.following.append(self.FOLLOW_declarator_in_direct_declarator819) + self.declarator() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_direct_declarator821) + if self.failed: + return + # C.g:251:35: ( declarator_suffix )+ + cnt33 = 0 + while True: #loop33 + alt33 = 2 + LA33_0 = self.input.LA(1) + + if (LA33_0 == 61) : + LA33 = self.input.LA(2) + if LA33 == 62: + LA33_30 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 58: + LA33_31 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 65: + LA33_32 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 59: + LA33_33 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 60: + LA33_34 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == IDENTIFIER: + LA33_35 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 29 or LA33 == 30 or LA33 == 31 or LA33 == 32 or LA33 == 33: + LA33_37 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 34: + LA33_38 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 35: + LA33_39 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 36: + LA33_40 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 37: + LA33_41 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 38: + LA33_42 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 39: + LA33_43 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 40: + LA33_44 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 41: + LA33_45 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 42: + LA33_46 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 45 or LA33 == 46: + LA33_47 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 48: + LA33_48 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 49 or LA33 == 50 or LA33 == 51 or LA33 == 52 or LA33 == 53 or LA33 == 54 or LA33 == 55 or LA33 == 56 or LA33 == 57: + LA33_49 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + + elif (LA33_0 == 63) : + LA33 = self.input.LA(2) + if LA33 == 64: + LA33_51 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 61: + LA33_52 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == IDENTIFIER: + LA33_53 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == HEX_LITERAL: + LA33_54 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == OCTAL_LITERAL: + LA33_55 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == DECIMAL_LITERAL: + LA33_56 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == CHARACTER_LITERAL: + LA33_57 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == STRING_LITERAL: + LA33_58 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == FLOATING_POINT_LITERAL: + LA33_59 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 71: + LA33_60 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 72: + LA33_61 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 65 or LA33 == 67 or LA33 == 68 or LA33 == 76 or LA33 == 77 or LA33 == 78: + LA33_62 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + elif LA33 == 73: + LA33_63 = self.input.LA(3) + + if (self.synpred69()) : + alt33 = 1 + + + + + + if alt33 == 1: + # C.g:0:0: declarator_suffix + self.following.append(self.FOLLOW_declarator_suffix_in_direct_declarator823) + self.declarator_suffix() + self.following.pop() + if self.failed: + return + + + else: + if cnt33 >= 1: + break #loop33 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(33, self.input) + raise eee + + cnt33 += 1 + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 23, direct_declarator_StartIndex) + + pass + + return + + # $ANTLR end direct_declarator + + + # $ANTLR start declarator_suffix + # C.g:254:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' ); + def declarator_suffix(self, ): + + declarator_suffix_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 24): + return + + # C.g:255:2: ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' ) + alt35 = 5 + LA35_0 = self.input.LA(1) + + if (LA35_0 == 63) : + LA35_1 = self.input.LA(2) + + if (LA35_1 == 64) : + alt35 = 2 + elif ((IDENTIFIER <= LA35_1 <= FLOATING_POINT_LITERAL) or LA35_1 == 61 or LA35_1 == 65 or (67 <= LA35_1 <= 68) or (71 <= LA35_1 <= 73) or (76 <= LA35_1 <= 78)) : + alt35 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("254:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 1, self.input) + + raise nvae + + elif (LA35_0 == 61) : + LA35 = self.input.LA(2) + if LA35 == 62: + alt35 = 5 + elif LA35 == IDENTIFIER: + LA35_17 = self.input.LA(3) + + if (self.synpred72()) : + alt35 = 3 + elif (self.synpred73()) : + alt35 = 4 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("254:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 17, self.input) + + raise nvae + + elif LA35 == 29 or LA35 == 30 or LA35 == 31 or LA35 == 32 or LA35 == 33 or LA35 == 34 or LA35 == 35 or LA35 == 36 or LA35 == 37 or LA35 == 38 or LA35 == 39 or LA35 == 40 or LA35 == 41 or LA35 == 42 or LA35 == 45 or LA35 == 46 or LA35 == 48 or LA35 == 49 or LA35 == 50 or LA35 == 51 or LA35 == 52 or LA35 == 53 or LA35 == 54 or LA35 == 55 or LA35 == 56 or LA35 == 57 or LA35 == 58 or LA35 == 59 or LA35 == 60 or LA35 == 65: + alt35 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("254:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 2, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("254:1: declarator_suffix : ( '[' constant_expression ']' | '[' ']' | '(' parameter_type_list ')' | '(' identifier_list ')' | '(' ')' );", 35, 0, self.input) + + raise nvae + + if alt35 == 1: + # C.g:255:6: '[' constant_expression ']' + self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix837) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_declarator_suffix839) + self.constant_expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix841) + if self.failed: + return + + + elif alt35 == 2: + # C.g:256:9: '[' ']' + self.match(self.input, 63, self.FOLLOW_63_in_declarator_suffix851) + if self.failed: + return + self.match(self.input, 64, self.FOLLOW_64_in_declarator_suffix853) + if self.failed: + return + + + elif alt35 == 3: + # C.g:257:9: '(' parameter_type_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_declarator_suffix863) + if self.failed: + return + self.following.append(self.FOLLOW_parameter_type_list_in_declarator_suffix865) + self.parameter_type_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix867) + if self.failed: + return + + + elif alt35 == 4: + # C.g:258:9: '(' identifier_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_declarator_suffix877) + if self.failed: + return + self.following.append(self.FOLLOW_identifier_list_in_declarator_suffix879) + self.identifier_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix881) + if self.failed: + return + + + elif alt35 == 5: + # C.g:259:9: '(' ')' + self.match(self.input, 61, self.FOLLOW_61_in_declarator_suffix891) + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_declarator_suffix893) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 24, declarator_suffix_StartIndex) + + pass + + return + + # $ANTLR end declarator_suffix + + + # $ANTLR start pointer + # C.g:262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' ); + def pointer(self, ): + + pointer_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 25): + return + + # C.g:263:2: ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' ) + alt38 = 3 + LA38_0 = self.input.LA(1) + + if (LA38_0 == 65) : + LA38 = self.input.LA(2) + if LA38 == 58: + LA38_2 = self.input.LA(3) + + if (self.synpred76()) : + alt38 = 1 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 2, self.input) + + raise nvae + + elif LA38 == 59: + LA38_3 = self.input.LA(3) + + if (self.synpred76()) : + alt38 = 1 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 3, self.input) + + raise nvae + + elif LA38 == 60: + LA38_4 = self.input.LA(3) + + if (self.synpred76()) : + alt38 = 1 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 4, self.input) + + raise nvae + + elif LA38 == EOF or LA38 == IDENTIFIER or LA38 == 25 or LA38 == 26 or LA38 == 27 or LA38 == 28 or LA38 == 29 or LA38 == 30 or LA38 == 31 or LA38 == 32 or LA38 == 33 or LA38 == 34 or LA38 == 35 or LA38 == 36 or LA38 == 37 or LA38 == 38 or LA38 == 39 or LA38 == 40 or LA38 == 41 or LA38 == 42 or LA38 == 43 or LA38 == 45 or LA38 == 46 or LA38 == 47 or LA38 == 48 or LA38 == 61 or LA38 == 62 or LA38 == 63: + alt38 = 3 + elif LA38 == 53: + LA38_20 = self.input.LA(3) + + if (self.synpred76()) : + alt38 = 1 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 20, self.input) + + raise nvae + + elif LA38 == 49 or LA38 == 50 or LA38 == 51 or LA38 == 52 or LA38 == 54 or LA38 == 55 or LA38 == 56 or LA38 == 57: + LA38_28 = self.input.LA(3) + + if (self.synpred76()) : + alt38 = 1 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 28, self.input) + + raise nvae + + elif LA38 == 65: + LA38_29 = self.input.LA(3) + + if (self.synpred77()) : + alt38 = 2 + elif (True) : + alt38 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 29, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 1, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("262:1: pointer : ( '*' ( type_qualifier )+ ( pointer )? | '*' pointer | '*' );", 38, 0, self.input) + + raise nvae + + if alt38 == 1: + # C.g:263:4: '*' ( type_qualifier )+ ( pointer )? + self.match(self.input, 65, self.FOLLOW_65_in_pointer904) + if self.failed: + return + # C.g:263:8: ( type_qualifier )+ + cnt36 = 0 + while True: #loop36 + alt36 = 2 + LA36 = self.input.LA(1) + if LA36 == 58: + LA36_2 = self.input.LA(2) + + if (self.synpred74()) : + alt36 = 1 + + + elif LA36 == 59: + LA36_3 = self.input.LA(2) + + if (self.synpred74()) : + alt36 = 1 + + + elif LA36 == 60: + LA36_4 = self.input.LA(2) + + if (self.synpred74()) : + alt36 = 1 + + + elif LA36 == 53: + LA36_20 = self.input.LA(2) + + if (self.synpred74()) : + alt36 = 1 + + + elif LA36 == 49 or LA36 == 50 or LA36 == 51 or LA36 == 52 or LA36 == 54 or LA36 == 55 or LA36 == 56 or LA36 == 57: + LA36_28 = self.input.LA(2) + + if (self.synpred74()) : + alt36 = 1 + + + + if alt36 == 1: + # C.g:0:0: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_pointer906) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + else: + if cnt36 >= 1: + break #loop36 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(36, self.input) + raise eee + + cnt36 += 1 + + + # C.g:263:24: ( pointer )? + alt37 = 2 + LA37_0 = self.input.LA(1) + + if (LA37_0 == 65) : + LA37_1 = self.input.LA(2) + + if (self.synpred75()) : + alt37 = 1 + if alt37 == 1: + # C.g:0:0: pointer + self.following.append(self.FOLLOW_pointer_in_pointer909) + self.pointer() + self.following.pop() + if self.failed: + return + + + + + + elif alt38 == 2: + # C.g:264:4: '*' pointer + self.match(self.input, 65, self.FOLLOW_65_in_pointer915) + if self.failed: + return + self.following.append(self.FOLLOW_pointer_in_pointer917) + self.pointer() + self.following.pop() + if self.failed: + return + + + elif alt38 == 3: + # C.g:265:4: '*' + self.match(self.input, 65, self.FOLLOW_65_in_pointer922) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 25, pointer_StartIndex) + + pass + + return + + # $ANTLR end pointer + + + # $ANTLR start parameter_type_list + # C.g:268:1: parameter_type_list : parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ; + def parameter_type_list(self, ): + + parameter_type_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 26): + return + + # C.g:269:2: ( parameter_list ( ',' ( 'OPTIONAL' )? '...' )? ) + # C.g:269:4: parameter_list ( ',' ( 'OPTIONAL' )? '...' )? + self.following.append(self.FOLLOW_parameter_list_in_parameter_type_list933) + self.parameter_list() + self.following.pop() + if self.failed: + return + # C.g:269:19: ( ',' ( 'OPTIONAL' )? '...' )? + alt40 = 2 + LA40_0 = self.input.LA(1) + + if (LA40_0 == 27) : + alt40 = 1 + if alt40 == 1: + # C.g:269:20: ',' ( 'OPTIONAL' )? '...' + self.match(self.input, 27, self.FOLLOW_27_in_parameter_type_list936) + if self.failed: + return + # C.g:269:24: ( 'OPTIONAL' )? + alt39 = 2 + LA39_0 = self.input.LA(1) + + if (LA39_0 == 53) : + alt39 = 1 + if alt39 == 1: + # C.g:269:25: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_parameter_type_list939) + if self.failed: + return + + + + self.match(self.input, 66, self.FOLLOW_66_in_parameter_type_list943) + if self.failed: + return + + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 26, parameter_type_list_StartIndex) + + pass + + return + + # $ANTLR end parameter_type_list + + + # $ANTLR start parameter_list + # C.g:272:1: parameter_list : parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ; + def parameter_list(self, ): + + parameter_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 27): + return + + # C.g:273:2: ( parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* ) + # C.g:273:4: parameter_declaration ( ',' ( 'OPTIONAL' )? parameter_declaration )* + self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list956) + self.parameter_declaration() + self.following.pop() + if self.failed: + return + # C.g:273:26: ( ',' ( 'OPTIONAL' )? parameter_declaration )* + while True: #loop42 + alt42 = 2 + LA42_0 = self.input.LA(1) + + if (LA42_0 == 27) : + LA42_1 = self.input.LA(2) + + if (LA42_1 == 53) : + LA42_3 = self.input.LA(3) + + if (self.synpred81()) : + alt42 = 1 + + + elif (LA42_1 == IDENTIFIER or (29 <= LA42_1 <= 42) or (45 <= LA42_1 <= 46) or (48 <= LA42_1 <= 52) or (54 <= LA42_1 <= 60) or LA42_1 == 65) : + alt42 = 1 + + + + + if alt42 == 1: + # C.g:273:27: ',' ( 'OPTIONAL' )? parameter_declaration + self.match(self.input, 27, self.FOLLOW_27_in_parameter_list959) + if self.failed: + return + # C.g:273:31: ( 'OPTIONAL' )? + alt41 = 2 + LA41_0 = self.input.LA(1) + + if (LA41_0 == 53) : + LA41_1 = self.input.LA(2) + + if (self.synpred80()) : + alt41 = 1 + if alt41 == 1: + # C.g:273:32: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_parameter_list962) + if self.failed: + return + + + + self.following.append(self.FOLLOW_parameter_declaration_in_parameter_list966) + self.parameter_declaration() + self.following.pop() + if self.failed: + return + + + else: + break #loop42 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 27, parameter_list_StartIndex) + + pass + + return + + # $ANTLR end parameter_list + + + # $ANTLR start parameter_declaration + # C.g:276:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER ); + def parameter_declaration(self, ): + + parameter_declaration_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 28): + return + + # C.g:277:2: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER ) + alt46 = 2 + LA46 = self.input.LA(1) + if LA46 == 29 or LA46 == 30 or LA46 == 31 or LA46 == 32 or LA46 == 33 or LA46 == 34 or LA46 == 35 or LA46 == 36 or LA46 == 37 or LA46 == 38 or LA46 == 39 or LA46 == 40 or LA46 == 41 or LA46 == 42 or LA46 == 45 or LA46 == 46 or LA46 == 48 or LA46 == 49 or LA46 == 50 or LA46 == 51 or LA46 == 52 or LA46 == 53 or LA46 == 54 or LA46 == 55 or LA46 == 56 or LA46 == 57 or LA46 == 58 or LA46 == 59 or LA46 == 60: + alt46 = 1 + elif LA46 == IDENTIFIER: + LA46_13 = self.input.LA(2) + + if (self.synpred85()) : + alt46 = 1 + elif (True) : + alt46 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("276:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 13, self.input) + + raise nvae + + elif LA46 == 65: + alt46 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("276:1: parameter_declaration : ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? | ( pointer )* IDENTIFIER );", 46, 0, self.input) + + raise nvae + + if alt46 == 1: + # C.g:277:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? + self.following.append(self.FOLLOW_declaration_specifiers_in_parameter_declaration979) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + # C.g:277:27: ( declarator | abstract_declarator )* + while True: #loop43 + alt43 = 3 + LA43 = self.input.LA(1) + if LA43 == 65: + LA43_5 = self.input.LA(2) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == IDENTIFIER or LA43 == 58 or LA43 == 59 or LA43 == 60: + alt43 = 1 + elif LA43 == 61: + LA43 = self.input.LA(2) + if LA43 == 29 or LA43 == 30 or LA43 == 31 or LA43 == 32 or LA43 == 33 or LA43 == 34 or LA43 == 35 or LA43 == 36 or LA43 == 37 or LA43 == 38 or LA43 == 39 or LA43 == 40 or LA43 == 41 or LA43 == 42 or LA43 == 45 or LA43 == 46 or LA43 == 48 or LA43 == 49 or LA43 == 50 or LA43 == 51 or LA43 == 52 or LA43 == 53 or LA43 == 54 or LA43 == 55 or LA43 == 56 or LA43 == 57 or LA43 == 62 or LA43 == 63: + alt43 = 2 + elif LA43 == IDENTIFIER: + LA43_37 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == 58: + LA43_38 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == 65: + LA43_39 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == 59: + LA43_40 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == 60: + LA43_41 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + elif LA43 == 61: + LA43_43 = self.input.LA(3) + + if (self.synpred82()) : + alt43 = 1 + elif (self.synpred83()) : + alt43 = 2 + + + + elif LA43 == 63: + alt43 = 2 + + if alt43 == 1: + # C.g:277:28: declarator + self.following.append(self.FOLLOW_declarator_in_parameter_declaration982) + self.declarator() + self.following.pop() + if self.failed: + return + + + elif alt43 == 2: + # C.g:277:39: abstract_declarator + self.following.append(self.FOLLOW_abstract_declarator_in_parameter_declaration984) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + + + else: + break #loop43 + + + # C.g:277:61: ( 'OPTIONAL' )? + alt44 = 2 + LA44_0 = self.input.LA(1) + + if (LA44_0 == 53) : + alt44 = 1 + if alt44 == 1: + # C.g:277:62: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_parameter_declaration989) + if self.failed: + return + + + + + + elif alt46 == 2: + # C.g:279:4: ( pointer )* IDENTIFIER + # C.g:279:4: ( pointer )* + while True: #loop45 + alt45 = 2 + LA45_0 = self.input.LA(1) + + if (LA45_0 == 65) : + alt45 = 1 + + + if alt45 == 1: + # C.g:0:0: pointer + self.following.append(self.FOLLOW_pointer_in_parameter_declaration998) + self.pointer() + self.following.pop() + if self.failed: + return + + + else: + break #loop45 + + + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_parameter_declaration1001) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 28, parameter_declaration_StartIndex) + + pass + + return + + # $ANTLR end parameter_declaration + + + # $ANTLR start identifier_list + # C.g:282:1: identifier_list : IDENTIFIER ( ',' IDENTIFIER )* ; + def identifier_list(self, ): + + identifier_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 29): + return + + # C.g:283:2: ( IDENTIFIER ( ',' IDENTIFIER )* ) + # C.g:283:4: IDENTIFIER ( ',' IDENTIFIER )* + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1012) + if self.failed: + return + # C.g:284:2: ( ',' IDENTIFIER )* + while True: #loop47 + alt47 = 2 + LA47_0 = self.input.LA(1) + + if (LA47_0 == 27) : + alt47 = 1 + + + if alt47 == 1: + # C.g:284:3: ',' IDENTIFIER + self.match(self.input, 27, self.FOLLOW_27_in_identifier_list1016) + if self.failed: + return + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_identifier_list1018) + if self.failed: + return + + + else: + break #loop47 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 29, identifier_list_StartIndex) + + pass + + return + + # $ANTLR end identifier_list + + + # $ANTLR start type_name + # C.g:287:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id ); + def type_name(self, ): + + type_name_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 30): + return + + # C.g:288:2: ( specifier_qualifier_list ( abstract_declarator )? | type_id ) + alt49 = 2 + LA49_0 = self.input.LA(1) + + if ((34 <= LA49_0 <= 42) or (45 <= LA49_0 <= 46) or (48 <= LA49_0 <= 60)) : + alt49 = 1 + elif (LA49_0 == IDENTIFIER) : + LA49_13 = self.input.LA(2) + + if (self.synpred89()) : + alt49 = 1 + elif (True) : + alt49 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("287:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 13, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("287:1: type_name : ( specifier_qualifier_list ( abstract_declarator )? | type_id );", 49, 0, self.input) + + raise nvae + + if alt49 == 1: + # C.g:288:4: specifier_qualifier_list ( abstract_declarator )? + self.following.append(self.FOLLOW_specifier_qualifier_list_in_type_name1031) + self.specifier_qualifier_list() + self.following.pop() + if self.failed: + return + # C.g:288:29: ( abstract_declarator )? + alt48 = 2 + LA48_0 = self.input.LA(1) + + if (LA48_0 == 61 or LA48_0 == 63 or LA48_0 == 65) : + alt48 = 1 + if alt48 == 1: + # C.g:0:0: abstract_declarator + self.following.append(self.FOLLOW_abstract_declarator_in_type_name1033) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + + + + + + elif alt49 == 2: + # C.g:289:4: type_id + self.following.append(self.FOLLOW_type_id_in_type_name1039) + self.type_id() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 30, type_name_StartIndex) + + pass + + return + + # $ANTLR end type_name + + + # $ANTLR start abstract_declarator + # C.g:292:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator ); + def abstract_declarator(self, ): + + abstract_declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 31): + return + + # C.g:293:2: ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator ) + alt51 = 2 + LA51_0 = self.input.LA(1) + + if (LA51_0 == 65) : + alt51 = 1 + elif (LA51_0 == 61 or LA51_0 == 63) : + alt51 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("292:1: abstract_declarator : ( pointer ( direct_abstract_declarator )? | direct_abstract_declarator );", 51, 0, self.input) + + raise nvae + + if alt51 == 1: + # C.g:293:4: pointer ( direct_abstract_declarator )? + self.following.append(self.FOLLOW_pointer_in_abstract_declarator1050) + self.pointer() + self.following.pop() + if self.failed: + return + # C.g:293:12: ( direct_abstract_declarator )? + alt50 = 2 + LA50_0 = self.input.LA(1) + + if (LA50_0 == 61) : + LA50 = self.input.LA(2) + if LA50 == 62: + LA50_12 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 58: + LA50_13 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 65: + LA50_14 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 59: + LA50_15 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 60: + LA50_16 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == IDENTIFIER: + LA50_17 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 61: + LA50_18 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 29 or LA50 == 30 or LA50 == 31 or LA50 == 32 or LA50 == 33: + LA50_19 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 34: + LA50_20 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 35: + LA50_21 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 36: + LA50_22 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 37: + LA50_23 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 38: + LA50_24 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 39: + LA50_25 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 40: + LA50_26 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 41: + LA50_27 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 42: + LA50_28 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 45 or LA50 == 46: + LA50_29 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 48: + LA50_30 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 49 or LA50 == 50 or LA50 == 51 or LA50 == 52 or LA50 == 53 or LA50 == 54 or LA50 == 55 or LA50 == 56 or LA50 == 57: + LA50_31 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 63: + LA50_32 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif (LA50_0 == 63) : + LA50 = self.input.LA(2) + if LA50 == 64: + LA50_33 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 61: + LA50_34 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == IDENTIFIER: + LA50_35 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == HEX_LITERAL: + LA50_36 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == OCTAL_LITERAL: + LA50_37 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == DECIMAL_LITERAL: + LA50_38 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == CHARACTER_LITERAL: + LA50_39 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == STRING_LITERAL: + LA50_40 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == FLOATING_POINT_LITERAL: + LA50_41 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 71: + LA50_42 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 72: + LA50_43 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 65 or LA50 == 67 or LA50 == 68 or LA50 == 76 or LA50 == 77 or LA50 == 78: + LA50_44 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + elif LA50 == 73: + LA50_45 = self.input.LA(3) + + if (self.synpred90()) : + alt50 = 1 + if alt50 == 1: + # C.g:0:0: direct_abstract_declarator + self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1052) + self.direct_abstract_declarator() + self.following.pop() + if self.failed: + return + + + + + + elif alt51 == 2: + # C.g:294:4: direct_abstract_declarator + self.following.append(self.FOLLOW_direct_abstract_declarator_in_abstract_declarator1058) + self.direct_abstract_declarator() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 31, abstract_declarator_StartIndex) + + pass + + return + + # $ANTLR end abstract_declarator + + + # $ANTLR start direct_abstract_declarator + # C.g:297:1: direct_abstract_declarator : ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ; + def direct_abstract_declarator(self, ): + + direct_abstract_declarator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 32): + return + + # C.g:298:2: ( ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* ) + # C.g:298:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) ( abstract_declarator_suffix )* + # C.g:298:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix ) + alt52 = 2 + LA52_0 = self.input.LA(1) + + if (LA52_0 == 61) : + LA52 = self.input.LA(2) + if LA52 == IDENTIFIER or LA52 == 29 or LA52 == 30 or LA52 == 31 or LA52 == 32 or LA52 == 33 or LA52 == 34 or LA52 == 35 or LA52 == 36 or LA52 == 37 or LA52 == 38 or LA52 == 39 or LA52 == 40 or LA52 == 41 or LA52 == 42 or LA52 == 45 or LA52 == 46 or LA52 == 48 or LA52 == 49 or LA52 == 50 or LA52 == 51 or LA52 == 52 or LA52 == 53 or LA52 == 54 or LA52 == 55 or LA52 == 56 or LA52 == 57 or LA52 == 58 or LA52 == 59 or LA52 == 60 or LA52 == 62: + alt52 = 2 + elif LA52 == 65: + LA52_18 = self.input.LA(3) + + if (self.synpred92()) : + alt52 = 1 + elif (True) : + alt52 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("298:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 18, self.input) + + raise nvae + + elif LA52 == 61 or LA52 == 63: + alt52 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("298:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 1, self.input) + + raise nvae + + elif (LA52_0 == 63) : + alt52 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("298:4: ( '(' abstract_declarator ')' | abstract_declarator_suffix )", 52, 0, self.input) + + raise nvae + + if alt52 == 1: + # C.g:298:6: '(' abstract_declarator ')' + self.match(self.input, 61, self.FOLLOW_61_in_direct_abstract_declarator1071) + if self.failed: + return + self.following.append(self.FOLLOW_abstract_declarator_in_direct_abstract_declarator1073) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_direct_abstract_declarator1075) + if self.failed: + return + + + elif alt52 == 2: + # C.g:298:36: abstract_declarator_suffix + self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1079) + self.abstract_declarator_suffix() + self.following.pop() + if self.failed: + return + + + + # C.g:298:65: ( abstract_declarator_suffix )* + while True: #loop53 + alt53 = 2 + LA53_0 = self.input.LA(1) + + if (LA53_0 == 61) : + LA53 = self.input.LA(2) + if LA53 == 62: + LA53_12 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 58: + LA53_13 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 65: + LA53_14 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 59: + LA53_15 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 60: + LA53_16 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == IDENTIFIER: + LA53_17 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 29 or LA53 == 30 or LA53 == 31 or LA53 == 32 or LA53 == 33: + LA53_19 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 34: + LA53_20 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 35: + LA53_21 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 36: + LA53_22 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 37: + LA53_23 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 38: + LA53_24 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 39: + LA53_25 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 40: + LA53_26 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 41: + LA53_27 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 42: + LA53_28 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 45 or LA53 == 46: + LA53_29 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 48: + LA53_30 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 49 or LA53 == 50 or LA53 == 51 or LA53 == 52 or LA53 == 53 or LA53 == 54 or LA53 == 55 or LA53 == 56 or LA53 == 57: + LA53_31 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + + elif (LA53_0 == 63) : + LA53 = self.input.LA(2) + if LA53 == 64: + LA53_33 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 61: + LA53_34 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == IDENTIFIER: + LA53_35 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == HEX_LITERAL: + LA53_36 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == OCTAL_LITERAL: + LA53_37 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == DECIMAL_LITERAL: + LA53_38 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == CHARACTER_LITERAL: + LA53_39 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == STRING_LITERAL: + LA53_40 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == FLOATING_POINT_LITERAL: + LA53_41 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 71: + LA53_42 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 72: + LA53_43 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 65 or LA53 == 67 or LA53 == 68 or LA53 == 76 or LA53 == 77 or LA53 == 78: + LA53_44 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + elif LA53 == 73: + LA53_45 = self.input.LA(3) + + if (self.synpred93()) : + alt53 = 1 + + + + + + if alt53 == 1: + # C.g:0:0: abstract_declarator_suffix + self.following.append(self.FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1083) + self.abstract_declarator_suffix() + self.following.pop() + if self.failed: + return + + + else: + break #loop53 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 32, direct_abstract_declarator_StartIndex) + + pass + + return + + # $ANTLR end direct_abstract_declarator + + + # $ANTLR start abstract_declarator_suffix + # C.g:301:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' ); + def abstract_declarator_suffix(self, ): + + abstract_declarator_suffix_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 33): + return + + # C.g:302:2: ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' ) + alt54 = 4 + LA54_0 = self.input.LA(1) + + if (LA54_0 == 63) : + LA54_1 = self.input.LA(2) + + if (LA54_1 == 64) : + alt54 = 1 + elif ((IDENTIFIER <= LA54_1 <= FLOATING_POINT_LITERAL) or LA54_1 == 61 or LA54_1 == 65 or (67 <= LA54_1 <= 68) or (71 <= LA54_1 <= 73) or (76 <= LA54_1 <= 78)) : + alt54 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("301:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 1, self.input) + + raise nvae + + elif (LA54_0 == 61) : + LA54_2 = self.input.LA(2) + + if (LA54_2 == 62) : + alt54 = 3 + elif (LA54_2 == IDENTIFIER or (29 <= LA54_2 <= 42) or (45 <= LA54_2 <= 46) or (48 <= LA54_2 <= 60) or LA54_2 == 65) : + alt54 = 4 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("301:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 2, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("301:1: abstract_declarator_suffix : ( '[' ']' | '[' constant_expression ']' | '(' ')' | '(' parameter_type_list ')' );", 54, 0, self.input) + + raise nvae + + if alt54 == 1: + # C.g:302:4: '[' ']' + self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1095) + if self.failed: + return + self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1097) + if self.failed: + return + + + elif alt54 == 2: + # C.g:303:4: '[' constant_expression ']' + self.match(self.input, 63, self.FOLLOW_63_in_abstract_declarator_suffix1102) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_abstract_declarator_suffix1104) + self.constant_expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 64, self.FOLLOW_64_in_abstract_declarator_suffix1106) + if self.failed: + return + + + elif alt54 == 3: + # C.g:304:4: '(' ')' + self.match(self.input, 61, self.FOLLOW_61_in_abstract_declarator_suffix1111) + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1113) + if self.failed: + return + + + elif alt54 == 4: + # C.g:305:4: '(' parameter_type_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_abstract_declarator_suffix1118) + if self.failed: + return + self.following.append(self.FOLLOW_parameter_type_list_in_abstract_declarator_suffix1120) + self.parameter_type_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_abstract_declarator_suffix1122) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 33, abstract_declarator_suffix_StartIndex) + + pass + + return + + # $ANTLR end abstract_declarator_suffix + + + # $ANTLR start initializer + # C.g:308:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' ); + def initializer(self, ): + + initializer_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 34): + return + + # C.g:310:2: ( assignment_expression | '{' initializer_list ( ',' )? '}' ) + alt56 = 2 + LA56_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA56_0 <= FLOATING_POINT_LITERAL) or LA56_0 == 61 or LA56_0 == 65 or (67 <= LA56_0 <= 68) or (71 <= LA56_0 <= 73) or (76 <= LA56_0 <= 78)) : + alt56 = 1 + elif (LA56_0 == 43) : + alt56 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("308:1: initializer : ( assignment_expression | '{' initializer_list ( ',' )? '}' );", 56, 0, self.input) + + raise nvae + + if alt56 == 1: + # C.g:310:4: assignment_expression + self.following.append(self.FOLLOW_assignment_expression_in_initializer1135) + self.assignment_expression() + self.following.pop() + if self.failed: + return + + + elif alt56 == 2: + # C.g:311:4: '{' initializer_list ( ',' )? '}' + self.match(self.input, 43, self.FOLLOW_43_in_initializer1140) + if self.failed: + return + self.following.append(self.FOLLOW_initializer_list_in_initializer1142) + self.initializer_list() + self.following.pop() + if self.failed: + return + # C.g:311:25: ( ',' )? + alt55 = 2 + LA55_0 = self.input.LA(1) + + if (LA55_0 == 27) : + alt55 = 1 + if alt55 == 1: + # C.g:0:0: ',' + self.match(self.input, 27, self.FOLLOW_27_in_initializer1144) + if self.failed: + return + + + + self.match(self.input, 44, self.FOLLOW_44_in_initializer1147) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 34, initializer_StartIndex) + + pass + + return + + # $ANTLR end initializer + + + # $ANTLR start initializer_list + # C.g:314:1: initializer_list : initializer ( ',' initializer )* ; + def initializer_list(self, ): + + initializer_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 35): + return + + # C.g:315:2: ( initializer ( ',' initializer )* ) + # C.g:315:4: initializer ( ',' initializer )* + self.following.append(self.FOLLOW_initializer_in_initializer_list1158) + self.initializer() + self.following.pop() + if self.failed: + return + # C.g:315:16: ( ',' initializer )* + while True: #loop57 + alt57 = 2 + LA57_0 = self.input.LA(1) + + if (LA57_0 == 27) : + LA57_1 = self.input.LA(2) + + if ((IDENTIFIER <= LA57_1 <= FLOATING_POINT_LITERAL) or LA57_1 == 43 or LA57_1 == 61 or LA57_1 == 65 or (67 <= LA57_1 <= 68) or (71 <= LA57_1 <= 73) or (76 <= LA57_1 <= 78)) : + alt57 = 1 + + + + + if alt57 == 1: + # C.g:315:17: ',' initializer + self.match(self.input, 27, self.FOLLOW_27_in_initializer_list1161) + if self.failed: + return + self.following.append(self.FOLLOW_initializer_in_initializer_list1163) + self.initializer() + self.following.pop() + if self.failed: + return + + + else: + break #loop57 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 35, initializer_list_StartIndex) + + pass + + return + + # $ANTLR end initializer_list + + class argument_expression_list_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start argument_expression_list + # C.g:320:1: argument_expression_list : assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ; + def argument_expression_list(self, ): + + retval = self.argument_expression_list_return() + retval.start = self.input.LT(1) + argument_expression_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 36): + return retval + + # C.g:321:2: ( assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* ) + # C.g:321:6: assignment_expression ( 'OPTIONAL' )? ( ',' assignment_expression ( 'OPTIONAL' )? )* + self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1181) + self.assignment_expression() + self.following.pop() + if self.failed: + return retval + # C.g:321:28: ( 'OPTIONAL' )? + alt58 = 2 + LA58_0 = self.input.LA(1) + + if (LA58_0 == 53) : + alt58 = 1 + if alt58 == 1: + # C.g:321:29: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1184) + if self.failed: + return retval + + + + # C.g:321:42: ( ',' assignment_expression ( 'OPTIONAL' )? )* + while True: #loop60 + alt60 = 2 + LA60_0 = self.input.LA(1) + + if (LA60_0 == 27) : + alt60 = 1 + + + if alt60 == 1: + # C.g:321:43: ',' assignment_expression ( 'OPTIONAL' )? + self.match(self.input, 27, self.FOLLOW_27_in_argument_expression_list1189) + if self.failed: + return retval + self.following.append(self.FOLLOW_assignment_expression_in_argument_expression_list1191) + self.assignment_expression() + self.following.pop() + if self.failed: + return retval + # C.g:321:69: ( 'OPTIONAL' )? + alt59 = 2 + LA59_0 = self.input.LA(1) + + if (LA59_0 == 53) : + alt59 = 1 + if alt59 == 1: + # C.g:321:70: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_argument_expression_list1194) + if self.failed: + return retval + + + + + + else: + break #loop60 + + + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 36, argument_expression_list_StartIndex) + + pass + + return retval + + # $ANTLR end argument_expression_list + + + # $ANTLR start additive_expression + # C.g:324:1: additive_expression : ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ; + def additive_expression(self, ): + + additive_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 37): + return + + # C.g:325:2: ( ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* ) + # C.g:325:4: ( multiplicative_expression ) ( '+' multiplicative_expression | '-' multiplicative_expression )* + # C.g:325:4: ( multiplicative_expression ) + # C.g:325:5: multiplicative_expression + self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1210) + self.multiplicative_expression() + self.following.pop() + if self.failed: + return + + + + # C.g:325:32: ( '+' multiplicative_expression | '-' multiplicative_expression )* + while True: #loop61 + alt61 = 3 + LA61_0 = self.input.LA(1) + + if (LA61_0 == 67) : + alt61 = 1 + elif (LA61_0 == 68) : + alt61 = 2 + + + if alt61 == 1: + # C.g:325:33: '+' multiplicative_expression + self.match(self.input, 67, self.FOLLOW_67_in_additive_expression1214) + if self.failed: + return + self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1216) + self.multiplicative_expression() + self.following.pop() + if self.failed: + return + + + elif alt61 == 2: + # C.g:325:65: '-' multiplicative_expression + self.match(self.input, 68, self.FOLLOW_68_in_additive_expression1220) + if self.failed: + return + self.following.append(self.FOLLOW_multiplicative_expression_in_additive_expression1222) + self.multiplicative_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop61 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 37, additive_expression_StartIndex) + + pass + + return + + # $ANTLR end additive_expression + + + # $ANTLR start multiplicative_expression + # C.g:328:1: multiplicative_expression : ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ; + def multiplicative_expression(self, ): + + multiplicative_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 38): + return + + # C.g:329:2: ( ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* ) + # C.g:329:4: ( cast_expression ) ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* + # C.g:329:4: ( cast_expression ) + # C.g:329:5: cast_expression + self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1236) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + + # C.g:329:22: ( '*' cast_expression | '/' cast_expression | '%' cast_expression )* + while True: #loop62 + alt62 = 4 + LA62 = self.input.LA(1) + if LA62 == 65: + alt62 = 1 + elif LA62 == 69: + alt62 = 2 + elif LA62 == 70: + alt62 = 3 + + if alt62 == 1: + # C.g:329:23: '*' cast_expression + self.match(self.input, 65, self.FOLLOW_65_in_multiplicative_expression1240) + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1242) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + elif alt62 == 2: + # C.g:329:45: '/' cast_expression + self.match(self.input, 69, self.FOLLOW_69_in_multiplicative_expression1246) + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1248) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + elif alt62 == 3: + # C.g:329:67: '%' cast_expression + self.match(self.input, 70, self.FOLLOW_70_in_multiplicative_expression1252) + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_multiplicative_expression1254) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop62 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 38, multiplicative_expression_StartIndex) + + pass + + return + + # $ANTLR end multiplicative_expression + + + # $ANTLR start cast_expression + # C.g:332:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression ); + def cast_expression(self, ): + + cast_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 39): + return + + # C.g:333:2: ( '(' type_name ')' cast_expression | unary_expression ) + alt63 = 2 + LA63_0 = self.input.LA(1) + + if (LA63_0 == 61) : + LA63 = self.input.LA(2) + if LA63 == IDENTIFIER: + LA63_13 = self.input.LA(3) + + if (self.synpred108()) : + alt63 = 1 + elif (True) : + alt63 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("332:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 13, self.input) + + raise nvae + + elif LA63 == HEX_LITERAL or LA63 == OCTAL_LITERAL or LA63 == DECIMAL_LITERAL or LA63 == CHARACTER_LITERAL or LA63 == STRING_LITERAL or LA63 == FLOATING_POINT_LITERAL or LA63 == 61 or LA63 == 65 or LA63 == 67 or LA63 == 68 or LA63 == 71 or LA63 == 72 or LA63 == 73 or LA63 == 76 or LA63 == 77 or LA63 == 78: + alt63 = 2 + elif LA63 == 34 or LA63 == 35 or LA63 == 36 or LA63 == 37 or LA63 == 38 or LA63 == 39 or LA63 == 40 or LA63 == 41 or LA63 == 42 or LA63 == 45 or LA63 == 46 or LA63 == 48 or LA63 == 49 or LA63 == 50 or LA63 == 51 or LA63 == 52 or LA63 == 53 or LA63 == 54 or LA63 == 55 or LA63 == 56 or LA63 == 57 or LA63 == 58 or LA63 == 59 or LA63 == 60: + alt63 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("332:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 1, self.input) + + raise nvae + + elif ((IDENTIFIER <= LA63_0 <= FLOATING_POINT_LITERAL) or LA63_0 == 65 or (67 <= LA63_0 <= 68) or (71 <= LA63_0 <= 73) or (76 <= LA63_0 <= 78)) : + alt63 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("332:1: cast_expression : ( '(' type_name ')' cast_expression | unary_expression );", 63, 0, self.input) + + raise nvae + + if alt63 == 1: + # C.g:333:4: '(' type_name ')' cast_expression + self.match(self.input, 61, self.FOLLOW_61_in_cast_expression1267) + if self.failed: + return + self.following.append(self.FOLLOW_type_name_in_cast_expression1269) + self.type_name() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_cast_expression1271) + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_cast_expression1273) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + elif alt63 == 2: + # C.g:334:4: unary_expression + self.following.append(self.FOLLOW_unary_expression_in_cast_expression1278) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 39, cast_expression_StartIndex) + + pass + + return + + # $ANTLR end cast_expression + + + # $ANTLR start unary_expression + # C.g:337:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' ); + def unary_expression(self, ): + + unary_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 40): + return + + # C.g:338:2: ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' ) + alt64 = 6 + LA64 = self.input.LA(1) + if LA64 == IDENTIFIER or LA64 == HEX_LITERAL or LA64 == OCTAL_LITERAL or LA64 == DECIMAL_LITERAL or LA64 == CHARACTER_LITERAL or LA64 == STRING_LITERAL or LA64 == FLOATING_POINT_LITERAL or LA64 == 61: + alt64 = 1 + elif LA64 == 71: + alt64 = 2 + elif LA64 == 72: + alt64 = 3 + elif LA64 == 65 or LA64 == 67 or LA64 == 68 or LA64 == 76 or LA64 == 77 or LA64 == 78: + alt64 = 4 + elif LA64 == 73: + LA64_12 = self.input.LA(2) + + if (LA64_12 == 61) : + LA64_13 = self.input.LA(3) + + if (self.synpred113()) : + alt64 = 5 + elif (True) : + alt64 = 6 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("337:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 13, self.input) + + raise nvae + + elif ((IDENTIFIER <= LA64_12 <= FLOATING_POINT_LITERAL) or LA64_12 == 65 or (67 <= LA64_12 <= 68) or (71 <= LA64_12 <= 73) or (76 <= LA64_12 <= 78)) : + alt64 = 5 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("337:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 12, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("337:1: unary_expression : ( postfix_expression | '++' unary_expression | '--' unary_expression | unary_operator cast_expression | 'sizeof' unary_expression | 'sizeof' '(' type_name ')' );", 64, 0, self.input) + + raise nvae + + if alt64 == 1: + # C.g:338:4: postfix_expression + self.following.append(self.FOLLOW_postfix_expression_in_unary_expression1289) + self.postfix_expression() + self.following.pop() + if self.failed: + return + + + elif alt64 == 2: + # C.g:339:4: '++' unary_expression + self.match(self.input, 71, self.FOLLOW_71_in_unary_expression1294) + if self.failed: + return + self.following.append(self.FOLLOW_unary_expression_in_unary_expression1296) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + elif alt64 == 3: + # C.g:340:4: '--' unary_expression + self.match(self.input, 72, self.FOLLOW_72_in_unary_expression1301) + if self.failed: + return + self.following.append(self.FOLLOW_unary_expression_in_unary_expression1303) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + elif alt64 == 4: + # C.g:341:4: unary_operator cast_expression + self.following.append(self.FOLLOW_unary_operator_in_unary_expression1308) + self.unary_operator() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_unary_expression1310) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + elif alt64 == 5: + # C.g:342:4: 'sizeof' unary_expression + self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1315) + if self.failed: + return + self.following.append(self.FOLLOW_unary_expression_in_unary_expression1317) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + elif alt64 == 6: + # C.g:343:4: 'sizeof' '(' type_name ')' + self.match(self.input, 73, self.FOLLOW_73_in_unary_expression1322) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_unary_expression1324) + if self.failed: + return + self.following.append(self.FOLLOW_type_name_in_unary_expression1326) + self.type_name() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_unary_expression1328) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 40, unary_expression_StartIndex) + + pass + + return + + # $ANTLR end unary_expression + + + # $ANTLR start postfix_expression + # C.g:346:1: postfix_expression : p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ; + def postfix_expression(self, ): + self.postfix_expression_stack.append(postfix_expression_scope()) + postfix_expression_StartIndex = self.input.index() + a = None + b = None + x = None + y = None + z = None + p = None + + c = None + + +
+ self.postfix_expression_stack[-1].FuncCallText = ''
+ + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 41): + return + + # C.g:353:2: (p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* ) + # C.g:353:6: p= primary_expression ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* + self.following.append(self.FOLLOW_primary_expression_in_postfix_expression1352) + p = self.primary_expression() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: + self.postfix_expression_stack[-1].FuncCallText += self.input.toString(p.start,p.stop) + + # C.g:354:9: ( '[' expression ']' | '(' a= ')' | '(' c= argument_expression_list b= ')' | '(' macro_parameter_list ')' | '.' x= IDENTIFIER | '*' y= IDENTIFIER | '->' z= IDENTIFIER | '++' | '--' )* + while True: #loop65 + alt65 = 10 + LA65 = self.input.LA(1) + if LA65 == 65: + LA65_1 = self.input.LA(2) + + if (LA65_1 == IDENTIFIER) : + LA65_30 = self.input.LA(3) + + if (self.synpred119()) : + alt65 = 6 + + + + + elif LA65 == 63: + alt65 = 1 + elif LA65 == 61: + LA65 = self.input.LA(2) + if LA65 == 62: + alt65 = 2 + elif LA65 == IDENTIFIER: + LA65_43 = self.input.LA(3) + + if (self.synpred116()) : + alt65 = 3 + elif (self.synpred117()) : + alt65 = 4 + + + elif LA65 == HEX_LITERAL or LA65 == OCTAL_LITERAL or LA65 == DECIMAL_LITERAL or LA65 == CHARACTER_LITERAL or LA65 == STRING_LITERAL or LA65 == FLOATING_POINT_LITERAL or LA65 == 61 or LA65 == 67 or LA65 == 68 or LA65 == 71 or LA65 == 72 or LA65 == 73 or LA65 == 76 or LA65 == 77 or LA65 == 78: + alt65 = 3 + elif LA65 == 65: + LA65_53 = self.input.LA(3) + + if (self.synpred116()) : + alt65 = 3 + elif (self.synpred117()) : + alt65 = 4 + + + elif LA65 == 29 or LA65 == 30 or LA65 == 31 or LA65 == 32 or LA65 == 33 or LA65 == 34 or LA65 == 35 or LA65 == 36 or LA65 == 37 or LA65 == 38 or LA65 == 39 or LA65 == 40 or LA65 == 41 or LA65 == 42 or LA65 == 45 or LA65 == 46 or LA65 == 48 or LA65 == 49 or LA65 == 50 or LA65 == 51 or LA65 == 52 or LA65 == 53 or LA65 == 54 or LA65 == 55 or LA65 == 56 or LA65 == 57 or LA65 == 58 or LA65 == 59 or LA65 == 60: + alt65 = 4 + + elif LA65 == 74: + alt65 = 5 + elif LA65 == 75: + alt65 = 7 + elif LA65 == 71: + alt65 = 8 + elif LA65 == 72: + alt65 = 9 + + if alt65 == 1: + # C.g:354:13: '[' expression ']' + self.match(self.input, 63, self.FOLLOW_63_in_postfix_expression1368) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_postfix_expression1370) + self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 64, self.FOLLOW_64_in_postfix_expression1372) + if self.failed: + return + + + elif alt65 == 2: + # C.g:355:13: '(' a= ')' + self.match(self.input, 61, self.FOLLOW_61_in_postfix_expression1386) + if self.failed: + return + a = self.input.LT(1) + self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1390) + if self.failed: + return + if self.backtracking == 0: + self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, a.line, a.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, '') + + + + elif alt65 == 3: + # C.g:356:13: '(' c= argument_expression_list b= ')' + self.match(self.input, 61, self.FOLLOW_61_in_postfix_expression1405) + if self.failed: + return + self.following.append(self.FOLLOW_argument_expression_list_in_postfix_expression1409) + c = self.argument_expression_list() + self.following.pop() + if self.failed: + return + b = self.input.LT(1) + self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1413) + if self.failed: + return + if self.backtracking == 0: + self.StoreFunctionCalling(p.start.line, p.start.charPositionInLine, b.line, b.charPositionInLine, self.postfix_expression_stack[-1].FuncCallText, self.input.toString(c.start,c.stop)) + + + + elif alt65 == 4: + # C.g:357:13: '(' macro_parameter_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_postfix_expression1429) + if self.failed: + return + self.following.append(self.FOLLOW_macro_parameter_list_in_postfix_expression1431) + self.macro_parameter_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_postfix_expression1433) + if self.failed: + return + + + elif alt65 == 5: + # C.g:358:13: '.' x= IDENTIFIER + self.match(self.input, 74, self.FOLLOW_74_in_postfix_expression1447) + if self.failed: + return + x = self.input.LT(1) + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1451) + if self.failed: + return + if self.backtracking == 0: + self.postfix_expression_stack[-1].FuncCallText += '.' + x.text + + + + elif alt65 == 6: + # C.g:359:13: '*' y= IDENTIFIER + self.match(self.input, 65, self.FOLLOW_65_in_postfix_expression1467) + if self.failed: + return + y = self.input.LT(1) + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1471) + if self.failed: + return + if self.backtracking == 0: + self.postfix_expression_stack[-1].FuncCallText = y.text + + + + elif alt65 == 7: + # C.g:360:13: '->' z= IDENTIFIER + self.match(self.input, 75, self.FOLLOW_75_in_postfix_expression1487) + if self.failed: + return + z = self.input.LT(1) + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_postfix_expression1491) + if self.failed: + return + if self.backtracking == 0: + self.postfix_expression_stack[-1].FuncCallText += '->' + z.text + + + + elif alt65 == 8: + # C.g:361:13: '++' + self.match(self.input, 71, self.FOLLOW_71_in_postfix_expression1507) + if self.failed: + return + + + elif alt65 == 9: + # C.g:362:13: '--' + self.match(self.input, 72, self.FOLLOW_72_in_postfix_expression1521) + if self.failed: + return + + + else: + break #loop65 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 41, postfix_expression_StartIndex) + + self.postfix_expression_stack.pop() + pass + + return + + # $ANTLR end postfix_expression + + + # $ANTLR start macro_parameter_list + # C.g:366:1: macro_parameter_list : parameter_declaration ( ',' parameter_declaration )* ; + def macro_parameter_list(self, ): + + macro_parameter_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 42): + return + + # C.g:367:2: ( parameter_declaration ( ',' parameter_declaration )* ) + # C.g:367:4: parameter_declaration ( ',' parameter_declaration )* + self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1544) + self.parameter_declaration() + self.following.pop() + if self.failed: + return + # C.g:367:26: ( ',' parameter_declaration )* + while True: #loop66 + alt66 = 2 + LA66_0 = self.input.LA(1) + + if (LA66_0 == 27) : + alt66 = 1 + + + if alt66 == 1: + # C.g:367:27: ',' parameter_declaration + self.match(self.input, 27, self.FOLLOW_27_in_macro_parameter_list1547) + if self.failed: + return + self.following.append(self.FOLLOW_parameter_declaration_in_macro_parameter_list1549) + self.parameter_declaration() + self.following.pop() + if self.failed: + return + + + else: + break #loop66 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 42, macro_parameter_list_StartIndex) + + pass + + return + + # $ANTLR end macro_parameter_list + + + # $ANTLR start unary_operator + # C.g:370:1: unary_operator : ( '&' | '*' | '+' | '-' | '~' | '!' ); + def unary_operator(self, ): + + unary_operator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 43): + return + + # C.g:371:2: ( '&' | '*' | '+' | '-' | '~' | '!' ) + # C.g: + if self.input.LA(1) == 65 or (67 <= self.input.LA(1) <= 68) or (76 <= self.input.LA(1) <= 78): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_unary_operator0 + ) + raise mse + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 43, unary_operator_StartIndex) + + pass + + return + + # $ANTLR end unary_operator + + class primary_expression_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start primary_expression + # C.g:379:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' ); + def primary_expression(self, ): + + retval = self.primary_expression_return() + retval.start = self.input.LT(1) + primary_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 44): + return retval + + # C.g:380:2: ( IDENTIFIER | constant | '(' expression ')' ) + alt67 = 3 + LA67 = self.input.LA(1) + if LA67 == IDENTIFIER: + LA67_1 = self.input.LA(2) + + if (LA67_1 == IDENTIFIER or LA67_1 == STRING_LITERAL) : + alt67 = 2 + elif (LA67_1 == EOF or LA67_1 == 25 or (27 <= LA67_1 <= 28) or LA67_1 == 44 or LA67_1 == 47 or LA67_1 == 53 or (61 <= LA67_1 <= 65) or (67 <= LA67_1 <= 72) or (74 <= LA67_1 <= 76) or (79 <= LA67_1 <= 101)) : + alt67 = 1 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("379:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 1, self.input) + + raise nvae + + elif LA67 == HEX_LITERAL or LA67 == OCTAL_LITERAL or LA67 == DECIMAL_LITERAL or LA67 == CHARACTER_LITERAL or LA67 == STRING_LITERAL or LA67 == FLOATING_POINT_LITERAL: + alt67 = 2 + elif LA67 == 61: + alt67 = 3 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("379:1: primary_expression : ( IDENTIFIER | constant | '(' expression ')' );", 67, 0, self.input) + + raise nvae + + if alt67 == 1: + # C.g:380:4: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_primary_expression1598) + if self.failed: + return retval + + + elif alt67 == 2: + # C.g:381:4: constant + self.following.append(self.FOLLOW_constant_in_primary_expression1603) + self.constant() + self.following.pop() + if self.failed: + return retval + + + elif alt67 == 3: + # C.g:382:4: '(' expression ')' + self.match(self.input, 61, self.FOLLOW_61_in_primary_expression1608) + if self.failed: + return retval + self.following.append(self.FOLLOW_expression_in_primary_expression1610) + self.expression() + self.following.pop() + if self.failed: + return retval + self.match(self.input, 62, self.FOLLOW_62_in_primary_expression1612) + if self.failed: + return retval + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 44, primary_expression_StartIndex) + + pass + + return retval + + # $ANTLR end primary_expression + + + # $ANTLR start constant + # C.g:385:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL ); + def constant(self, ): + + constant_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 45): + return + + # C.g:386:5: ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL ) + alt72 = 6 + LA72 = self.input.LA(1) + if LA72 == HEX_LITERAL: + alt72 = 1 + elif LA72 == OCTAL_LITERAL: + alt72 = 2 + elif LA72 == DECIMAL_LITERAL: + alt72 = 3 + elif LA72 == CHARACTER_LITERAL: + alt72 = 4 + elif LA72 == IDENTIFIER or LA72 == STRING_LITERAL: + alt72 = 5 + elif LA72 == FLOATING_POINT_LITERAL: + alt72 = 6 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("385:1: constant : ( HEX_LITERAL | OCTAL_LITERAL | DECIMAL_LITERAL | CHARACTER_LITERAL | ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* | FLOATING_POINT_LITERAL );", 72, 0, self.input) + + raise nvae + + if alt72 == 1: + # C.g:386:9: HEX_LITERAL + self.match(self.input, HEX_LITERAL, self.FOLLOW_HEX_LITERAL_in_constant1628) + if self.failed: + return + + + elif alt72 == 2: + # C.g:387:9: OCTAL_LITERAL + self.match(self.input, OCTAL_LITERAL, self.FOLLOW_OCTAL_LITERAL_in_constant1638) + if self.failed: + return + + + elif alt72 == 3: + # C.g:388:9: DECIMAL_LITERAL + self.match(self.input, DECIMAL_LITERAL, self.FOLLOW_DECIMAL_LITERAL_in_constant1648) + if self.failed: + return + + + elif alt72 == 4: + # C.g:389:7: CHARACTER_LITERAL + self.match(self.input, CHARACTER_LITERAL, self.FOLLOW_CHARACTER_LITERAL_in_constant1656) + if self.failed: + return + + + elif alt72 == 5: + # C.g:390:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ ( IDENTIFIER )* + # C.g:390:7: ( ( IDENTIFIER )* ( STRING_LITERAL )+ )+ + cnt70 = 0 + while True: #loop70 + alt70 = 2 + LA70_0 = self.input.LA(1) + + if (LA70_0 == IDENTIFIER) : + LA70_1 = self.input.LA(2) + + if (LA70_1 == IDENTIFIER) : + LA70_61 = self.input.LA(3) + + if (self.synpred137()) : + alt70 = 1 + + + elif (LA70_1 == STRING_LITERAL) : + alt70 = 1 + + + elif (LA70_0 == STRING_LITERAL) : + alt70 = 1 + + + if alt70 == 1: + # C.g:390:8: ( IDENTIFIER )* ( STRING_LITERAL )+ + # C.g:390:8: ( IDENTIFIER )* + while True: #loop68 + alt68 = 2 + LA68_0 = self.input.LA(1) + + if (LA68_0 == IDENTIFIER) : + alt68 = 1 + + + if alt68 == 1: + # C.g:0:0: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1665) + if self.failed: + return + + + else: + break #loop68 + + + # C.g:390:20: ( STRING_LITERAL )+ + cnt69 = 0 + while True: #loop69 + alt69 = 2 + LA69_0 = self.input.LA(1) + + if (LA69_0 == STRING_LITERAL) : + LA69_31 = self.input.LA(2) + + if (self.synpred136()) : + alt69 = 1 + + + + + if alt69 == 1: + # C.g:0:0: STRING_LITERAL + self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_constant1668) + if self.failed: + return + + + else: + if cnt69 >= 1: + break #loop69 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(69, self.input) + raise eee + + cnt69 += 1 + + + + + else: + if cnt70 >= 1: + break #loop70 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(70, self.input) + raise eee + + cnt70 += 1 + + + # C.g:390:38: ( IDENTIFIER )* + while True: #loop71 + alt71 = 2 + LA71_0 = self.input.LA(1) + + if (LA71_0 == IDENTIFIER) : + alt71 = 1 + + + if alt71 == 1: + # C.g:0:0: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_constant1673) + if self.failed: + return + + + else: + break #loop71 + + + + + elif alt72 == 6: + # C.g:391:9: FLOATING_POINT_LITERAL + self.match(self.input, FLOATING_POINT_LITERAL, self.FOLLOW_FLOATING_POINT_LITERAL_in_constant1684) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 45, constant_StartIndex) + + pass + + return + + # $ANTLR end constant + + class expression_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start expression + # C.g:396:1: expression : assignment_expression ( ',' assignment_expression )* ; + def expression(self, ): + + retval = self.expression_return() + retval.start = self.input.LT(1) + expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 46): + return retval + + # C.g:397:2: ( assignment_expression ( ',' assignment_expression )* ) + # C.g:397:4: assignment_expression ( ',' assignment_expression )* + self.following.append(self.FOLLOW_assignment_expression_in_expression1700) + self.assignment_expression() + self.following.pop() + if self.failed: + return retval + # C.g:397:26: ( ',' assignment_expression )* + while True: #loop73 + alt73 = 2 + LA73_0 = self.input.LA(1) + + if (LA73_0 == 27) : + alt73 = 1 + + + if alt73 == 1: + # C.g:397:27: ',' assignment_expression + self.match(self.input, 27, self.FOLLOW_27_in_expression1703) + if self.failed: + return retval + self.following.append(self.FOLLOW_assignment_expression_in_expression1705) + self.assignment_expression() + self.following.pop() + if self.failed: + return retval + + + else: + break #loop73 + + + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 46, expression_StartIndex) + + pass + + return retval + + # $ANTLR end expression + + + # $ANTLR start constant_expression + # C.g:400:1: constant_expression : conditional_expression ; + def constant_expression(self, ): + + constant_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 47): + return + + # C.g:401:2: ( conditional_expression ) + # C.g:401:4: conditional_expression + self.following.append(self.FOLLOW_conditional_expression_in_constant_expression1718) + self.conditional_expression() + self.following.pop() + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 47, constant_expression_StartIndex) + + pass + + return + + # $ANTLR end constant_expression + + + # $ANTLR start assignment_expression + # C.g:404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression ); + def assignment_expression(self, ): + + assignment_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 48): + return + + # C.g:405:2: ( lvalue assignment_operator assignment_expression | conditional_expression ) + alt74 = 2 + LA74 = self.input.LA(1) + if LA74 == IDENTIFIER: + LA74 = self.input.LA(2) + if LA74 == STRING_LITERAL: + LA74_13 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 13, self.input) + + raise nvae + + elif LA74 == IDENTIFIER: + LA74_14 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 14, self.input) + + raise nvae + + elif LA74 == 63: + LA74_15 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 15, self.input) + + raise nvae + + elif LA74 == 61: + LA74_16 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 16, self.input) + + raise nvae + + elif LA74 == 74: + LA74_17 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 17, self.input) + + raise nvae + + elif LA74 == 65: + LA74_18 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 18, self.input) + + raise nvae + + elif LA74 == 75: + LA74_19 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 19, self.input) + + raise nvae + + elif LA74 == 71: + LA74_20 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 20, self.input) + + raise nvae + + elif LA74 == 72: + LA74_21 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 21, self.input) + + raise nvae + + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 1, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74 = self.input.LA(2) + if LA74 == 63: + LA74_44 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 44, self.input) + + raise nvae + + elif LA74 == 61: + LA74_45 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 45, self.input) + + raise nvae + + elif LA74 == 74: + LA74_46 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 46, self.input) + + raise nvae + + elif LA74 == 65: + LA74_47 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 47, self.input) + + raise nvae + + elif LA74 == 75: + LA74_48 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 48, self.input) + + raise nvae + + elif LA74 == 71: + LA74_49 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 49, self.input) + + raise nvae + + elif LA74 == 72: + LA74_50 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 50, self.input) + + raise nvae + + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 2, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74 = self.input.LA(2) + if LA74 == 63: + LA74_73 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 73, self.input) + + raise nvae + + elif LA74 == 61: + LA74_74 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 74, self.input) + + raise nvae + + elif LA74 == 74: + LA74_75 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 75, self.input) + + raise nvae + + elif LA74 == 65: + LA74_76 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 76, self.input) + + raise nvae + + elif LA74 == 75: + LA74_77 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 77, self.input) + + raise nvae + + elif LA74 == 71: + LA74_78 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 78, self.input) + + raise nvae + + elif LA74 == 72: + LA74_79 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 79, self.input) + + raise nvae + + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 3, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74 = self.input.LA(2) + if LA74 == 63: + LA74_102 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 102, self.input) + + raise nvae + + elif LA74 == 61: + LA74_103 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 103, self.input) + + raise nvae + + elif LA74 == 74: + LA74_104 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 104, self.input) + + raise nvae + + elif LA74 == 65: + LA74_105 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 105, self.input) + + raise nvae + + elif LA74 == 75: + LA74_106 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 106, self.input) + + raise nvae + + elif LA74 == 71: + LA74_107 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 107, self.input) + + raise nvae + + elif LA74 == 72: + LA74_108 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 108, self.input) + + raise nvae + + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 4, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74 = self.input.LA(2) + if LA74 == 63: + LA74_131 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 131, self.input) + + raise nvae + + elif LA74 == 61: + LA74_132 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 132, self.input) + + raise nvae + + elif LA74 == 74: + LA74_133 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 133, self.input) + + raise nvae + + elif LA74 == 65: + LA74_134 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 134, self.input) + + raise nvae + + elif LA74 == 75: + LA74_135 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 135, self.input) + + raise nvae + + elif LA74 == 71: + LA74_136 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 136, self.input) + + raise nvae + + elif LA74 == 72: + LA74_137 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 137, self.input) + + raise nvae + + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 5, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74 = self.input.LA(2) + if LA74 == IDENTIFIER: + LA74_160 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 160, self.input) + + raise nvae + + elif LA74 == 63: + LA74_161 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 161, self.input) + + raise nvae + + elif LA74 == 61: + LA74_162 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 162, self.input) + + raise nvae + + elif LA74 == 74: + LA74_163 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 163, self.input) + + raise nvae + + elif LA74 == 65: + LA74_164 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 164, self.input) + + raise nvae + + elif LA74 == 75: + LA74_165 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 165, self.input) + + raise nvae + + elif LA74 == 71: + LA74_166 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 166, self.input) + + raise nvae + + elif LA74 == 72: + LA74_167 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 167, self.input) + + raise nvae + + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + elif LA74 == STRING_LITERAL: + LA74_169 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 169, self.input) + + raise nvae + + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 6, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74 = self.input.LA(2) + if LA74 == 63: + LA74_191 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 191, self.input) + + raise nvae + + elif LA74 == 61: + LA74_192 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 192, self.input) + + raise nvae + + elif LA74 == 74: + LA74_193 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 193, self.input) + + raise nvae + + elif LA74 == 65: + LA74_194 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 194, self.input) + + raise nvae + + elif LA74 == 75: + LA74_195 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 195, self.input) + + raise nvae + + elif LA74 == 71: + LA74_196 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 196, self.input) + + raise nvae + + elif LA74 == 72: + LA74_197 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 197, self.input) + + raise nvae + + elif LA74 == EOF or LA74 == 25 or LA74 == 27 or LA74 == 44 or LA74 == 47 or LA74 == 53 or LA74 == 62 or LA74 == 64 or LA74 == 67 or LA74 == 68 or LA74 == 69 or LA74 == 70 or LA74 == 76 or LA74 == 89 or LA74 == 90 or LA74 == 91 or LA74 == 92 or LA74 == 93 or LA74 == 94 or LA74 == 95 or LA74 == 96 or LA74 == 97 or LA74 == 98 or LA74 == 99 or LA74 == 100 or LA74 == 101: + alt74 = 2 + elif LA74 == 28 or LA74 == 79 or LA74 == 80 or LA74 == 81 or LA74 == 82 or LA74 == 83 or LA74 == 84 or LA74 == 85 or LA74 == 86 or LA74 == 87 or LA74 == 88: + alt74 = 1 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 7, self.input) + + raise nvae + + elif LA74 == 61: + LA74 = self.input.LA(2) + if LA74 == IDENTIFIER: + LA74_220 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 220, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74_221 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 221, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74_222 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 222, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74_223 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 223, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74_224 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 224, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74_225 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 225, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74_226 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 226, self.input) + + raise nvae + + elif LA74 == 61: + LA74_227 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 227, self.input) + + raise nvae + + elif LA74 == 71: + LA74_228 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 228, self.input) + + raise nvae + + elif LA74 == 72: + LA74_229 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 229, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74_230 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 230, self.input) + + raise nvae + + elif LA74 == 73: + LA74_231 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 231, self.input) + + raise nvae + + elif LA74 == 34 or LA74 == 35 or LA74 == 36 or LA74 == 37 or LA74 == 38 or LA74 == 39 or LA74 == 40 or LA74 == 41 or LA74 == 42 or LA74 == 45 or LA74 == 46 or LA74 == 48 or LA74 == 49 or LA74 == 50 or LA74 == 51 or LA74 == 52 or LA74 == 53 or LA74 == 54 or LA74 == 55 or LA74 == 56 or LA74 == 57 or LA74 == 58 or LA74 == 59 or LA74 == 60: + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 8, self.input) + + raise nvae + + elif LA74 == 71: + LA74 = self.input.LA(2) + if LA74 == IDENTIFIER: + LA74_244 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 244, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74_245 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 245, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74_246 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 246, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74_247 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 247, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74_248 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 248, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74_249 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 249, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74_250 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 250, self.input) + + raise nvae + + elif LA74 == 61: + LA74_251 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 251, self.input) + + raise nvae + + elif LA74 == 71: + LA74_252 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 252, self.input) + + raise nvae + + elif LA74 == 72: + LA74_253 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 253, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74_254 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 254, self.input) + + raise nvae + + elif LA74 == 73: + LA74_255 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 255, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 9, self.input) + + raise nvae + + elif LA74 == 72: + LA74 = self.input.LA(2) + if LA74 == IDENTIFIER: + LA74_256 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 256, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74_257 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 257, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74_258 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 258, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74_259 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 259, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74_260 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 260, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74_261 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 261, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74_262 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 262, self.input) + + raise nvae + + elif LA74 == 61: + LA74_263 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 263, self.input) + + raise nvae + + elif LA74 == 71: + LA74_264 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 264, self.input) + + raise nvae + + elif LA74 == 72: + LA74_265 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 265, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74_266 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 266, self.input) + + raise nvae + + elif LA74 == 73: + LA74_267 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 267, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 10, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74 = self.input.LA(2) + if LA74 == 61: + LA74_268 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 268, self.input) + + raise nvae + + elif LA74 == IDENTIFIER: + LA74_269 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 269, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74_270 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 270, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74_271 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 271, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74_272 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 272, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74_273 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 273, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74_274 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 274, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74_275 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 275, self.input) + + raise nvae + + elif LA74 == 71: + LA74_276 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 276, self.input) + + raise nvae + + elif LA74 == 72: + LA74_277 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 277, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74_278 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 278, self.input) + + raise nvae + + elif LA74 == 73: + LA74_279 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 279, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 11, self.input) + + raise nvae + + elif LA74 == 73: + LA74 = self.input.LA(2) + if LA74 == 61: + LA74_280 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 280, self.input) + + raise nvae + + elif LA74 == IDENTIFIER: + LA74_281 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 281, self.input) + + raise nvae + + elif LA74 == HEX_LITERAL: + LA74_282 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 282, self.input) + + raise nvae + + elif LA74 == OCTAL_LITERAL: + LA74_283 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 283, self.input) + + raise nvae + + elif LA74 == DECIMAL_LITERAL: + LA74_284 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 284, self.input) + + raise nvae + + elif LA74 == CHARACTER_LITERAL: + LA74_285 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 285, self.input) + + raise nvae + + elif LA74 == STRING_LITERAL: + LA74_286 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 286, self.input) + + raise nvae + + elif LA74 == FLOATING_POINT_LITERAL: + LA74_287 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 287, self.input) + + raise nvae + + elif LA74 == 71: + LA74_288 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 288, self.input) + + raise nvae + + elif LA74 == 72: + LA74_289 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 289, self.input) + + raise nvae + + elif LA74 == 65 or LA74 == 67 or LA74 == 68 or LA74 == 76 or LA74 == 77 or LA74 == 78: + LA74_290 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 290, self.input) + + raise nvae + + elif LA74 == 73: + LA74_291 = self.input.LA(3) + + if (self.synpred141()) : + alt74 = 1 + elif (True) : + alt74 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 291, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 12, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("404:1: assignment_expression : ( lvalue assignment_operator assignment_expression | conditional_expression );", 74, 0, self.input) + + raise nvae + + if alt74 == 1: + # C.g:405:4: lvalue assignment_operator assignment_expression + self.following.append(self.FOLLOW_lvalue_in_assignment_expression1729) + self.lvalue() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_assignment_operator_in_assignment_expression1731) + self.assignment_operator() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_assignment_expression_in_assignment_expression1733) + self.assignment_expression() + self.following.pop() + if self.failed: + return + + + elif alt74 == 2: + # C.g:406:4: conditional_expression + self.following.append(self.FOLLOW_conditional_expression_in_assignment_expression1738) + self.conditional_expression() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 48, assignment_expression_StartIndex) + + pass + + return + + # $ANTLR end assignment_expression + + + # $ANTLR start lvalue + # C.g:409:1: lvalue : unary_expression ; + def lvalue(self, ): + + lvalue_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 49): + return + + # C.g:410:2: ( unary_expression ) + # C.g:410:4: unary_expression + self.following.append(self.FOLLOW_unary_expression_in_lvalue1750) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 49, lvalue_StartIndex) + + pass + + return + + # $ANTLR end lvalue + + + # $ANTLR start assignment_operator + # C.g:413:1: assignment_operator : ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' ); + def assignment_operator(self, ): + + assignment_operator_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 50): + return + + # C.g:414:2: ( '=' | '*=' | '/=' | '%=' | '+=' | '-=' | '<<=' | '>>=' | '&=' | '^=' | '|=' ) + # C.g: + if self.input.LA(1) == 28 or (79 <= self.input.LA(1) <= 88): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_assignment_operator0 + ) + raise mse + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 50, assignment_operator_StartIndex) + + pass + + return + + # $ANTLR end assignment_operator + + + # $ANTLR start conditional_expression + # C.g:427:1: conditional_expression : e= logical_or_expression ( '?' expression ':' conditional_expression )? ; + def conditional_expression(self, ): + + conditional_expression_StartIndex = self.input.index() + e = None + + + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 51): + return + + # C.g:428:2: (e= logical_or_expression ( '?' expression ':' conditional_expression )? ) + # C.g:428:4: e= logical_or_expression ( '?' expression ':' conditional_expression )? + self.following.append(self.FOLLOW_logical_or_expression_in_conditional_expression1824) + e = self.logical_or_expression() + self.following.pop() + if self.failed: + return + # C.g:428:28: ( '?' expression ':' conditional_expression )? + alt75 = 2 + LA75_0 = self.input.LA(1) + + if (LA75_0 == 89) : + alt75 = 1 + if alt75 == 1: + # C.g:428:29: '?' expression ':' conditional_expression + self.match(self.input, 89, self.FOLLOW_89_in_conditional_expression1827) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_conditional_expression1829) + self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 47, self.FOLLOW_47_in_conditional_expression1831) + if self.failed: + return + self.following.append(self.FOLLOW_conditional_expression_in_conditional_expression1833) + self.conditional_expression() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: + self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) + + + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 51, conditional_expression_StartIndex) + + pass + + return + + # $ANTLR end conditional_expression + + class logical_or_expression_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start logical_or_expression + # C.g:431:1: logical_or_expression : logical_and_expression ( '||' logical_and_expression )* ; + def logical_or_expression(self, ): + + retval = self.logical_or_expression_return() + retval.start = self.input.LT(1) + logical_or_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 52): + return retval + + # C.g:432:2: ( logical_and_expression ( '||' logical_and_expression )* ) + # C.g:432:4: logical_and_expression ( '||' logical_and_expression )* + self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1848) + self.logical_and_expression() + self.following.pop() + if self.failed: + return retval + # C.g:432:27: ( '||' logical_and_expression )* + while True: #loop76 + alt76 = 2 + LA76_0 = self.input.LA(1) + + if (LA76_0 == 90) : + alt76 = 1 + + + if alt76 == 1: + # C.g:432:28: '||' logical_and_expression + self.match(self.input, 90, self.FOLLOW_90_in_logical_or_expression1851) + if self.failed: + return retval + self.following.append(self.FOLLOW_logical_and_expression_in_logical_or_expression1853) + self.logical_and_expression() + self.following.pop() + if self.failed: + return retval + + + else: + break #loop76 + + + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 52, logical_or_expression_StartIndex) + + pass + + return retval + + # $ANTLR end logical_or_expression + + + # $ANTLR start logical_and_expression + # C.g:435:1: logical_and_expression : inclusive_or_expression ( '&&' inclusive_or_expression )* ; + def logical_and_expression(self, ): + + logical_and_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 53): + return + + # C.g:436:2: ( inclusive_or_expression ( '&&' inclusive_or_expression )* ) + # C.g:436:4: inclusive_or_expression ( '&&' inclusive_or_expression )* + self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1866) + self.inclusive_or_expression() + self.following.pop() + if self.failed: + return + # C.g:436:28: ( '&&' inclusive_or_expression )* + while True: #loop77 + alt77 = 2 + LA77_0 = self.input.LA(1) + + if (LA77_0 == 91) : + alt77 = 1 + + + if alt77 == 1: + # C.g:436:29: '&&' inclusive_or_expression + self.match(self.input, 91, self.FOLLOW_91_in_logical_and_expression1869) + if self.failed: + return + self.following.append(self.FOLLOW_inclusive_or_expression_in_logical_and_expression1871) + self.inclusive_or_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop77 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 53, logical_and_expression_StartIndex) + + pass + + return + + # $ANTLR end logical_and_expression + + + # $ANTLR start inclusive_or_expression + # C.g:439:1: inclusive_or_expression : exclusive_or_expression ( '|' exclusive_or_expression )* ; + def inclusive_or_expression(self, ): + + inclusive_or_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 54): + return + + # C.g:440:2: ( exclusive_or_expression ( '|' exclusive_or_expression )* ) + # C.g:440:4: exclusive_or_expression ( '|' exclusive_or_expression )* + self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1884) + self.exclusive_or_expression() + self.following.pop() + if self.failed: + return + # C.g:440:28: ( '|' exclusive_or_expression )* + while True: #loop78 + alt78 = 2 + LA78_0 = self.input.LA(1) + + if (LA78_0 == 92) : + alt78 = 1 + + + if alt78 == 1: + # C.g:440:29: '|' exclusive_or_expression + self.match(self.input, 92, self.FOLLOW_92_in_inclusive_or_expression1887) + if self.failed: + return + self.following.append(self.FOLLOW_exclusive_or_expression_in_inclusive_or_expression1889) + self.exclusive_or_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop78 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 54, inclusive_or_expression_StartIndex) + + pass + + return + + # $ANTLR end inclusive_or_expression + + + # $ANTLR start exclusive_or_expression + # C.g:443:1: exclusive_or_expression : and_expression ( '^' and_expression )* ; + def exclusive_or_expression(self, ): + + exclusive_or_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 55): + return + + # C.g:444:2: ( and_expression ( '^' and_expression )* ) + # C.g:444:4: and_expression ( '^' and_expression )* + self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1902) + self.and_expression() + self.following.pop() + if self.failed: + return + # C.g:444:19: ( '^' and_expression )* + while True: #loop79 + alt79 = 2 + LA79_0 = self.input.LA(1) + + if (LA79_0 == 93) : + alt79 = 1 + + + if alt79 == 1: + # C.g:444:20: '^' and_expression + self.match(self.input, 93, self.FOLLOW_93_in_exclusive_or_expression1905) + if self.failed: + return + self.following.append(self.FOLLOW_and_expression_in_exclusive_or_expression1907) + self.and_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop79 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 55, exclusive_or_expression_StartIndex) + + pass + + return + + # $ANTLR end exclusive_or_expression + + + # $ANTLR start and_expression + # C.g:447:1: and_expression : equality_expression ( '&' equality_expression )* ; + def and_expression(self, ): + + and_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 56): + return + + # C.g:448:2: ( equality_expression ( '&' equality_expression )* ) + # C.g:448:4: equality_expression ( '&' equality_expression )* + self.following.append(self.FOLLOW_equality_expression_in_and_expression1920) + self.equality_expression() + self.following.pop() + if self.failed: + return + # C.g:448:24: ( '&' equality_expression )* + while True: #loop80 + alt80 = 2 + LA80_0 = self.input.LA(1) + + if (LA80_0 == 76) : + alt80 = 1 + + + if alt80 == 1: + # C.g:448:25: '&' equality_expression + self.match(self.input, 76, self.FOLLOW_76_in_and_expression1923) + if self.failed: + return + self.following.append(self.FOLLOW_equality_expression_in_and_expression1925) + self.equality_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop80 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 56, and_expression_StartIndex) + + pass + + return + + # $ANTLR end and_expression + + + # $ANTLR start equality_expression + # C.g:450:1: equality_expression : relational_expression ( ( '==' | '!=' ) relational_expression )* ; + def equality_expression(self, ): + + equality_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 57): + return + + # C.g:451:2: ( relational_expression ( ( '==' | '!=' ) relational_expression )* ) + # C.g:451:4: relational_expression ( ( '==' | '!=' ) relational_expression )* + self.following.append(self.FOLLOW_relational_expression_in_equality_expression1937) + self.relational_expression() + self.following.pop() + if self.failed: + return + # C.g:451:26: ( ( '==' | '!=' ) relational_expression )* + while True: #loop81 + alt81 = 2 + LA81_0 = self.input.LA(1) + + if ((94 <= LA81_0 <= 95)) : + alt81 = 1 + + + if alt81 == 1: + # C.g:451:27: ( '==' | '!=' ) relational_expression + if (94 <= self.input.LA(1) <= 95): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_equality_expression1940 + ) + raise mse + + + self.following.append(self.FOLLOW_relational_expression_in_equality_expression1946) + self.relational_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop81 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 57, equality_expression_StartIndex) + + pass + + return + + # $ANTLR end equality_expression + + + # $ANTLR start relational_expression + # C.g:454:1: relational_expression : shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ; + def relational_expression(self, ): + + relational_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 58): + return + + # C.g:455:2: ( shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* ) + # C.g:455:4: shift_expression ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* + self.following.append(self.FOLLOW_shift_expression_in_relational_expression1960) + self.shift_expression() + self.following.pop() + if self.failed: + return + # C.g:455:21: ( ( '<' | '>' | '<=' | '>=' ) shift_expression )* + while True: #loop82 + alt82 = 2 + LA82_0 = self.input.LA(1) + + if ((96 <= LA82_0 <= 99)) : + alt82 = 1 + + + if alt82 == 1: + # C.g:455:22: ( '<' | '>' | '<=' | '>=' ) shift_expression + if (96 <= self.input.LA(1) <= 99): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_relational_expression1963 + ) + raise mse + + + self.following.append(self.FOLLOW_shift_expression_in_relational_expression1973) + self.shift_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop82 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 58, relational_expression_StartIndex) + + pass + + return + + # $ANTLR end relational_expression + + + # $ANTLR start shift_expression + # C.g:458:1: shift_expression : additive_expression ( ( '<<' | '>>' ) additive_expression )* ; + def shift_expression(self, ): + + shift_expression_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 59): + return + + # C.g:459:2: ( additive_expression ( ( '<<' | '>>' ) additive_expression )* ) + # C.g:459:4: additive_expression ( ( '<<' | '>>' ) additive_expression )* + self.following.append(self.FOLLOW_additive_expression_in_shift_expression1986) + self.additive_expression() + self.following.pop() + if self.failed: + return + # C.g:459:24: ( ( '<<' | '>>' ) additive_expression )* + while True: #loop83 + alt83 = 2 + LA83_0 = self.input.LA(1) + + if ((100 <= LA83_0 <= 101)) : + alt83 = 1 + + + if alt83 == 1: + # C.g:459:25: ( '<<' | '>>' ) additive_expression + if (100 <= self.input.LA(1) <= 101): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_shift_expression1989 + ) + raise mse + + + self.following.append(self.FOLLOW_additive_expression_in_shift_expression1995) + self.additive_expression() + self.following.pop() + if self.failed: + return + + + else: + break #loop83 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 59, shift_expression_StartIndex) + + pass + + return + + # $ANTLR end shift_expression + + + # $ANTLR start statement + # C.g:464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration ); + def statement(self, ): + + statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 60): + return + + # C.g:465:2: ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration ) + alt84 = 11 + LA84 = self.input.LA(1) + if LA84 == IDENTIFIER: + LA84 = self.input.LA(2) + if LA84 == 47: + alt84 = 1 + elif LA84 == 61: + LA84_44 = self.input.LA(3) + + if (self.synpred168()) : + alt84 = 3 + elif (self.synpred172()) : + alt84 = 7 + elif (self.synpred173()) : + alt84 = 8 + elif (True) : + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 44, self.input) + + raise nvae + + elif LA84 == STRING_LITERAL or LA84 == 27 or LA84 == 28 or LA84 == 63 or LA84 == 67 or LA84 == 68 or LA84 == 69 or LA84 == 70 or LA84 == 71 or LA84 == 72 or LA84 == 74 or LA84 == 75 or LA84 == 76 or LA84 == 79 or LA84 == 80 or LA84 == 81 or LA84 == 82 or LA84 == 83 or LA84 == 84 or LA84 == 85 or LA84 == 86 or LA84 == 87 or LA84 == 88 or LA84 == 89 or LA84 == 90 or LA84 == 91 or LA84 == 92 or LA84 == 93 or LA84 == 94 or LA84 == 95 or LA84 == 96 or LA84 == 97 or LA84 == 98 or LA84 == 99 or LA84 == 100 or LA84 == 101: + alt84 = 3 + elif LA84 == 65: + LA84_47 = self.input.LA(3) + + if (self.synpred168()) : + alt84 = 3 + elif (True) : + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 47, self.input) + + raise nvae + + elif LA84 == 25: + LA84_65 = self.input.LA(3) + + if (self.synpred168()) : + alt84 = 3 + elif (True) : + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 65, self.input) + + raise nvae + + elif LA84 == IDENTIFIER: + LA84_67 = self.input.LA(3) + + if (self.synpred168()) : + alt84 = 3 + elif (True) : + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 67, self.input) + + raise nvae + + elif LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60: + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 1, self.input) + + raise nvae + + elif LA84 == 105 or LA84 == 106: + alt84 = 1 + elif LA84 == 43: + alt84 = 2 + elif LA84 == HEX_LITERAL or LA84 == OCTAL_LITERAL or LA84 == DECIMAL_LITERAL or LA84 == CHARACTER_LITERAL or LA84 == STRING_LITERAL or LA84 == FLOATING_POINT_LITERAL or LA84 == 25 or LA84 == 61 or LA84 == 65 or LA84 == 67 or LA84 == 68 or LA84 == 71 or LA84 == 72 or LA84 == 73 or LA84 == 76 or LA84 == 77 or LA84 == 78: + alt84 = 3 + elif LA84 == 107 or LA84 == 109: + alt84 = 4 + elif LA84 == 110 or LA84 == 111 or LA84 == 112: + alt84 = 5 + elif LA84 == 113 or LA84 == 114 or LA84 == 115 or LA84 == 116: + alt84 = 6 + elif LA84 == 102: + alt84 = 8 + elif LA84 == 103: + alt84 = 9 + elif LA84 == 104: + alt84 = 10 + elif LA84 == 26 or LA84 == 29 or LA84 == 30 or LA84 == 31 or LA84 == 32 or LA84 == 33 or LA84 == 34 or LA84 == 35 or LA84 == 36 or LA84 == 37 or LA84 == 38 or LA84 == 39 or LA84 == 40 or LA84 == 41 or LA84 == 42 or LA84 == 45 or LA84 == 46 or LA84 == 48 or LA84 == 49 or LA84 == 50 or LA84 == 51 or LA84 == 52 or LA84 == 53 or LA84 == 54 or LA84 == 55 or LA84 == 56 or LA84 == 57 or LA84 == 58 or LA84 == 59 or LA84 == 60: + alt84 = 11 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("464:1: statement : ( labeled_statement | compound_statement | expression_statement | selection_statement | iteration_statement | jump_statement | macro_statement | asm2_statement | asm1_statement | asm_statement | declaration );", 84, 0, self.input) + + raise nvae + + if alt84 == 1: + # C.g:465:4: labeled_statement + self.following.append(self.FOLLOW_labeled_statement_in_statement2010) + self.labeled_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 2: + # C.g:466:4: compound_statement + self.following.append(self.FOLLOW_compound_statement_in_statement2015) + self.compound_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 3: + # C.g:467:4: expression_statement + self.following.append(self.FOLLOW_expression_statement_in_statement2020) + self.expression_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 4: + # C.g:468:4: selection_statement + self.following.append(self.FOLLOW_selection_statement_in_statement2025) + self.selection_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 5: + # C.g:469:4: iteration_statement + self.following.append(self.FOLLOW_iteration_statement_in_statement2030) + self.iteration_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 6: + # C.g:470:4: jump_statement + self.following.append(self.FOLLOW_jump_statement_in_statement2035) + self.jump_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 7: + # C.g:471:4: macro_statement + self.following.append(self.FOLLOW_macro_statement_in_statement2040) + self.macro_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 8: + # C.g:472:4: asm2_statement + self.following.append(self.FOLLOW_asm2_statement_in_statement2045) + self.asm2_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 9: + # C.g:473:4: asm1_statement + self.following.append(self.FOLLOW_asm1_statement_in_statement2050) + self.asm1_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 10: + # C.g:474:4: asm_statement + self.following.append(self.FOLLOW_asm_statement_in_statement2055) + self.asm_statement() + self.following.pop() + if self.failed: + return + + + elif alt84 == 11: + # C.g:475:4: declaration + self.following.append(self.FOLLOW_declaration_in_statement2060) + self.declaration() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 60, statement_StartIndex) + + pass + + return + + # $ANTLR end statement + + + # $ANTLR start asm2_statement + # C.g:478:1: asm2_statement : ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ; + def asm2_statement(self, ): + + asm2_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 61): + return + + # C.g:479:2: ( ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' ) + # C.g:479:4: ( '__asm__' )? IDENTIFIER '(' (~ ( ';' ) )* ')' ';' + # C.g:479:4: ( '__asm__' )? + alt85 = 2 + LA85_0 = self.input.LA(1) + + if (LA85_0 == 102) : + alt85 = 1 + if alt85 == 1: + # C.g:0:0: '__asm__' + self.match(self.input, 102, self.FOLLOW_102_in_asm2_statement2071) + if self.failed: + return + + + + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_asm2_statement2074) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_asm2_statement2076) + if self.failed: + return + # C.g:479:30: (~ ( ';' ) )* + while True: #loop86 + alt86 = 2 + LA86_0 = self.input.LA(1) + + if (LA86_0 == 62) : + LA86_1 = self.input.LA(2) + + if ((IDENTIFIER <= LA86_1 <= LINE_COMMAND) or (26 <= LA86_1 <= 116)) : + alt86 = 1 + + + elif ((IDENTIFIER <= LA86_0 <= LINE_COMMAND) or (26 <= LA86_0 <= 61) or (63 <= LA86_0 <= 116)) : + alt86 = 1 + + + if alt86 == 1: + # C.g:479:31: ~ ( ';' ) + if (IDENTIFIER <= self.input.LA(1) <= LINE_COMMAND) or (26 <= self.input.LA(1) <= 116): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_asm2_statement2079 + ) + raise mse + + + + + else: + break #loop86 + + + self.match(self.input, 62, self.FOLLOW_62_in_asm2_statement2086) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_asm2_statement2088) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 61, asm2_statement_StartIndex) + + pass + + return + + # $ANTLR end asm2_statement + + + # $ANTLR start asm1_statement + # C.g:482:1: asm1_statement : '_asm' '{' (~ ( '}' ) )* '}' ; + def asm1_statement(self, ): + + asm1_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 62): + return + + # C.g:483:2: ( '_asm' '{' (~ ( '}' ) )* '}' ) + # C.g:483:4: '_asm' '{' (~ ( '}' ) )* '}' + self.match(self.input, 103, self.FOLLOW_103_in_asm1_statement2100) + if self.failed: + return + self.match(self.input, 43, self.FOLLOW_43_in_asm1_statement2102) + if self.failed: + return + # C.g:483:15: (~ ( '}' ) )* + while True: #loop87 + alt87 = 2 + LA87_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA87_0 <= 43) or (45 <= LA87_0 <= 116)) : + alt87 = 1 + + + if alt87 == 1: + # C.g:483:16: ~ ( '}' ) + if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 116): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_asm1_statement2105 + ) + raise mse + + + + + else: + break #loop87 + + + self.match(self.input, 44, self.FOLLOW_44_in_asm1_statement2112) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 62, asm1_statement_StartIndex) + + pass + + return + + # $ANTLR end asm1_statement + + + # $ANTLR start asm_statement + # C.g:486:1: asm_statement : '__asm' '{' (~ ( '}' ) )* '}' ; + def asm_statement(self, ): + + asm_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 63): + return + + # C.g:487:2: ( '__asm' '{' (~ ( '}' ) )* '}' ) + # C.g:487:4: '__asm' '{' (~ ( '}' ) )* '}' + self.match(self.input, 104, self.FOLLOW_104_in_asm_statement2123) + if self.failed: + return + self.match(self.input, 43, self.FOLLOW_43_in_asm_statement2125) + if self.failed: + return + # C.g:487:16: (~ ( '}' ) )* + while True: #loop88 + alt88 = 2 + LA88_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA88_0 <= 43) or (45 <= LA88_0 <= 116)) : + alt88 = 1 + + + if alt88 == 1: + # C.g:487:17: ~ ( '}' ) + if (IDENTIFIER <= self.input.LA(1) <= 43) or (45 <= self.input.LA(1) <= 116): + self.input.consume(); + self.errorRecovery = False + self.failed = False + + else: + if self.backtracking > 0: + self.failed = True + return + + mse = MismatchedSetException(None, self.input) + self.recoverFromMismatchedSet( + self.input, mse, self.FOLLOW_set_in_asm_statement2128 + ) + raise mse + + + + + else: + break #loop88 + + + self.match(self.input, 44, self.FOLLOW_44_in_asm_statement2135) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 63, asm_statement_StartIndex) + + pass + + return + + # $ANTLR end asm_statement + + + # $ANTLR start macro_statement + # C.g:490:1: macro_statement : IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ; + def macro_statement(self, ): + + macro_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 64): + return + + # C.g:491:2: ( IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' ) + # C.g:491:4: IDENTIFIER '(' ( declaration )* ( statement_list )? ( expression )? ')' + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_macro_statement2147) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_macro_statement2149) + if self.failed: + return + # C.g:491:19: ( declaration )* + while True: #loop89 + alt89 = 2 + LA89 = self.input.LA(1) + if LA89 == IDENTIFIER: + LA89 = self.input.LA(2) + if LA89 == 61: + LA89_45 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 65: + LA89_48 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_66 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_69 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_70 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_71 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_72 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_73 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_74 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_75 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_76 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_77 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_78 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_79 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_80 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_81 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_82 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_83 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_84 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_85 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 26: + LA89 = self.input.LA(2) + if LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_87 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_88 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_89 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_90 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_91 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_92 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_93 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_94 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_95 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_96 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_97 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_98 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_99 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_100 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 65: + LA89_101 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_102 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_103 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_104 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_105 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_106 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_107 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_108 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_109 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_110 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_111 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_112 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_113 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_114 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_115 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_116 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_117 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_118 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_119 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_120 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_121 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_122 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_123 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_124 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_125 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 34: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_126 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_127 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_128 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_129 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_130 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_131 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_132 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_133 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_134 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_135 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_136 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_137 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_138 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_139 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_140 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_141 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_142 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_143 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_144 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_145 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 35: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_146 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_147 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_148 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_149 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_150 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_151 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_152 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_153 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_154 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_155 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_156 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_157 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_158 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_159 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_160 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_161 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_162 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_163 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_164 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_165 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 36: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_166 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_167 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_168 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_169 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_170 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_171 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_172 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_173 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_174 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_175 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_176 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_177 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_178 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_179 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_180 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_181 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_182 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_183 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_184 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_185 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 37: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_186 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_187 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_188 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_189 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_190 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_191 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_192 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_193 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_194 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_195 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_196 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_197 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_198 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_199 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_200 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_201 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_202 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_203 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_204 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_205 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 38: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_206 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_207 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_208 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_209 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_210 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_211 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_212 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_213 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_214 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_215 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_216 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_217 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_218 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_219 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_220 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_221 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_222 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_223 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_224 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_225 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 39: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_226 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_227 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_228 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_229 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_230 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_231 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_232 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_233 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_234 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_235 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_236 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_237 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_238 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_239 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_240 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_241 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_242 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_243 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_244 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_245 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 40: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_246 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_247 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_248 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_249 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_250 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_251 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_252 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_253 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_254 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_255 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_256 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_257 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_258 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_259 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_260 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_261 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_262 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_263 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_264 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_265 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 41: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_266 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_267 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_268 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_269 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_270 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_271 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_272 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_273 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_274 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_275 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_276 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_277 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_278 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_279 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_280 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_281 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_282 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_283 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_284 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_285 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 42: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_286 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_287 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_288 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_289 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_290 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_291 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_292 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_293 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_294 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_295 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_296 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_297 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_298 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_299 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_300 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_301 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_302 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_303 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_304 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_305 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + elif LA89 == 45 or LA89 == 46: + LA89_40 = self.input.LA(2) + + if (LA89_40 == IDENTIFIER) : + LA89_306 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif (LA89_40 == 43) : + LA89_307 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + + elif LA89 == 48: + LA89_41 = self.input.LA(2) + + if (LA89_41 == IDENTIFIER) : + LA89_308 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif (LA89_41 == 43) : + LA89_309 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57 or LA89 == 58 or LA89 == 59 or LA89 == 60: + LA89 = self.input.LA(2) + if LA89 == 65: + LA89_310 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 58: + LA89_311 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 59: + LA89_312 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 60: + LA89_313 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == IDENTIFIER: + LA89_314 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 61: + LA89_315 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 25: + LA89_316 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 29 or LA89 == 30 or LA89 == 31 or LA89 == 32 or LA89 == 33: + LA89_317 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 34: + LA89_318 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 35: + LA89_319 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 36: + LA89_320 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 37: + LA89_321 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 38: + LA89_322 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 39: + LA89_323 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 40: + LA89_324 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 41: + LA89_325 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 42: + LA89_326 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 45 or LA89 == 46: + LA89_327 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 48: + LA89_328 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + elif LA89 == 49 or LA89 == 50 or LA89 == 51 or LA89 == 52 or LA89 == 53 or LA89 == 54 or LA89 == 55 or LA89 == 56 or LA89 == 57: + LA89_329 = self.input.LA(3) + + if (self.synpred180()) : + alt89 = 1 + + + + + if alt89 == 1: + # C.g:0:0: declaration + self.following.append(self.FOLLOW_declaration_in_macro_statement2151) + self.declaration() + self.following.pop() + if self.failed: + return + + + else: + break #loop89 + + + # C.g:491:33: ( statement_list )? + alt90 = 2 + LA90 = self.input.LA(1) + if LA90 == IDENTIFIER: + LA90 = self.input.LA(2) + if LA90 == 61: + LA90_44 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 45 or LA90 == 46 or LA90 == 47 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60: + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_46 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == IDENTIFIER: + LA90_47 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 63: + LA90_48 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_49 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_50 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_51 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_52 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_53 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_54 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_72 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_73 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_74 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_75 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_76 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_77 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_78 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_79 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_80 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_81 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_82 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_83 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_84 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_85 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25 or LA90 == 26 or LA90 == 29 or LA90 == 30 or LA90 == 31 or LA90 == 32 or LA90 == 33 or LA90 == 34 or LA90 == 35 or LA90 == 36 or LA90 == 37 or LA90 == 38 or LA90 == 39 or LA90 == 40 or LA90 == 41 or LA90 == 42 or LA90 == 43 or LA90 == 45 or LA90 == 46 or LA90 == 48 or LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60 or LA90 == 102 or LA90 == 103 or LA90 == 104 or LA90 == 105 or LA90 == 106 or LA90 == 107 or LA90 == 109 or LA90 == 110 or LA90 == 111 or LA90 == 112 or LA90 == 113 or LA90 == 114 or LA90 == 115 or LA90 == 116: + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90 = self.input.LA(2) + if LA90 == 63: + LA90_87 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_88 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_89 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_90 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_91 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_92 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_93 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_94 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_95 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_96 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_97 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_98 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_99 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_100 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_101 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_102 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_103 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_104 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_105 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_106 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_107 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_110 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90 = self.input.LA(2) + if LA90 == 63: + LA90_111 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_112 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_113 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_114 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_115 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_116 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_117 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_118 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_119 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_120 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_121 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_122 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_123 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_124 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_125 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_126 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_127 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_128 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_129 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_130 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_131 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_132 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90 = self.input.LA(2) + if LA90 == 63: + LA90_135 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_136 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_137 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_138 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_139 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_140 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_141 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_142 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_143 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_144 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_145 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_146 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_147 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_148 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_149 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_150 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_151 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_152 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_153 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_154 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_155 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_156 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90 = self.input.LA(2) + if LA90 == 63: + LA90_159 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_160 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_161 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_162 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_163 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_164 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_165 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_166 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_167 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_168 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_169 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_170 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_171 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_172 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_173 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_174 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_175 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_176 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_177 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_178 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_179 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_180 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90 = self.input.LA(2) + if LA90 == IDENTIFIER: + LA90_183 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 63: + LA90_184 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_185 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_186 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_187 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_188 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_189 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_190 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_191 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_192 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_193 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_194 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_195 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_196 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_197 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_198 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_199 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_200 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_201 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_202 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_203 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_204 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_206 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_207 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90 = self.input.LA(2) + if LA90 == 63: + LA90_209 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_210 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 74: + LA90_211 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65: + LA90_212 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 75: + LA90_213 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_214 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_215 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 69: + LA90_216 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 70: + LA90_217 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 67: + LA90_218 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 68: + LA90_219 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 100 or LA90 == 101: + LA90_220 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 96 or LA90 == 97 or LA90 == 98 or LA90 == 99: + LA90_221 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 94 or LA90 == 95: + LA90_222 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 76: + LA90_223 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 93: + LA90_224 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 92: + LA90_225 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 91: + LA90_226 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 90: + LA90_227 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 89: + LA90_228 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 27: + LA90_229 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 25: + alt90 = 1 + elif LA90 == 28 or LA90 == 79 or LA90 == 80 or LA90 == 81 or LA90 == 82 or LA90 == 83 or LA90 == 84 or LA90 == 85 or LA90 == 86 or LA90 == 87 or LA90 == 88: + LA90_231 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90 = self.input.LA(2) + if LA90 == IDENTIFIER: + LA90_233 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90_234 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90_235 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90_236 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90_237 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_238 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90_239 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_240 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_241 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_242 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90_243 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90_244 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 49 or LA90 == 50 or LA90 == 51 or LA90 == 52 or LA90 == 53 or LA90 == 54 or LA90 == 55 or LA90 == 56 or LA90 == 57 or LA90 == 58 or LA90 == 59 or LA90 == 60: + LA90_245 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 34: + LA90_246 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 35: + LA90_247 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 36: + LA90_248 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 37: + LA90_249 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 38: + LA90_250 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 39: + LA90_251 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 40: + LA90_252 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 41: + LA90_253 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 42: + LA90_254 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 45 or LA90 == 46: + LA90_255 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 48: + LA90_256 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90 = self.input.LA(2) + if LA90 == IDENTIFIER: + LA90_257 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90_258 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90_259 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90_260 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90_261 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_262 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90_263 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_264 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_265 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_266 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90_267 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90_268 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90 = self.input.LA(2) + if LA90 == IDENTIFIER: + LA90_269 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90_270 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90_271 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90_272 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90_273 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_274 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90_275 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 61: + LA90_276 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_277 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_278 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90_279 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90_280 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90 = self.input.LA(2) + if LA90 == 61: + LA90_281 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == IDENTIFIER: + LA90_282 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90_283 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90_284 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90_285 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90_286 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_287 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90_288 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_289 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_290 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90_291 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90_292 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90 = self.input.LA(2) + if LA90 == 61: + LA90_293 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == IDENTIFIER: + LA90_294 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == HEX_LITERAL: + LA90_295 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == OCTAL_LITERAL: + LA90_296 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == DECIMAL_LITERAL: + LA90_297 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == CHARACTER_LITERAL: + LA90_298 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == STRING_LITERAL: + LA90_299 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == FLOATING_POINT_LITERAL: + LA90_300 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 71: + LA90_301 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 72: + LA90_302 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 65 or LA90 == 67 or LA90 == 68 or LA90 == 76 or LA90 == 77 or LA90 == 78: + LA90_303 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + elif LA90 == 73: + LA90_304 = self.input.LA(3) + + if (self.synpred181()) : + alt90 = 1 + if alt90 == 1: + # C.g:0:0: statement_list + self.following.append(self.FOLLOW_statement_list_in_macro_statement2155) + self.statement_list() + self.following.pop() + if self.failed: + return + + + + # C.g:491:49: ( expression )? + alt91 = 2 + LA91_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA91_0 <= FLOATING_POINT_LITERAL) or LA91_0 == 61 or LA91_0 == 65 or (67 <= LA91_0 <= 68) or (71 <= LA91_0 <= 73) or (76 <= LA91_0 <= 78)) : + alt91 = 1 + if alt91 == 1: + # C.g:0:0: expression + self.following.append(self.FOLLOW_expression_in_macro_statement2158) + self.expression() + self.following.pop() + if self.failed: + return + + + + self.match(self.input, 62, self.FOLLOW_62_in_macro_statement2161) + if self.failed: + return + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 64, macro_statement_StartIndex) + + pass + + return + + # $ANTLR end macro_statement + + + # $ANTLR start labeled_statement + # C.g:494:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement ); + def labeled_statement(self, ): + + labeled_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 65): + return + + # C.g:495:2: ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement ) + alt92 = 3 + LA92 = self.input.LA(1) + if LA92 == IDENTIFIER: + alt92 = 1 + elif LA92 == 105: + alt92 = 2 + elif LA92 == 106: + alt92 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("494:1: labeled_statement : ( IDENTIFIER ':' statement | 'case' constant_expression ':' statement | 'default' ':' statement );", 92, 0, self.input) + + raise nvae + + if alt92 == 1: + # C.g:495:4: IDENTIFIER ':' statement + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_labeled_statement2173) + if self.failed: + return + self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2175) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_labeled_statement2177) + self.statement() + self.following.pop() + if self.failed: + return + + + elif alt92 == 2: + # C.g:496:4: 'case' constant_expression ':' statement + self.match(self.input, 105, self.FOLLOW_105_in_labeled_statement2182) + if self.failed: + return + self.following.append(self.FOLLOW_constant_expression_in_labeled_statement2184) + self.constant_expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2186) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_labeled_statement2188) + self.statement() + self.following.pop() + if self.failed: + return + + + elif alt92 == 3: + # C.g:497:4: 'default' ':' statement + self.match(self.input, 106, self.FOLLOW_106_in_labeled_statement2193) + if self.failed: + return + self.match(self.input, 47, self.FOLLOW_47_in_labeled_statement2195) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_labeled_statement2197) + self.statement() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 65, labeled_statement_StartIndex) + + pass + + return + + # $ANTLR end labeled_statement + + class compound_statement_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start compound_statement + # C.g:500:1: compound_statement : '{' ( declaration )* ( statement_list )? '}' ; + def compound_statement(self, ): + + retval = self.compound_statement_return() + retval.start = self.input.LT(1) + compound_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 66): + return retval + + # C.g:501:2: ( '{' ( declaration )* ( statement_list )? '}' ) + # C.g:501:4: '{' ( declaration )* ( statement_list )? '}' + self.match(self.input, 43, self.FOLLOW_43_in_compound_statement2208) + if self.failed: + return retval + # C.g:501:8: ( declaration )* + while True: #loop93 + alt93 = 2 + LA93 = self.input.LA(1) + if LA93 == IDENTIFIER: + LA93 = self.input.LA(2) + if LA93 == 61: + LA93_44 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 65: + LA93_48 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_67 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_69 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_70 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_71 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_72 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_73 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_74 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_75 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_76 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_77 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_78 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_79 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_80 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_81 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_82 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_83 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_84 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_85 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 26: + LA93 = self.input.LA(2) + if LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_86 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_87 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_88 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_89 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_90 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_91 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_92 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_93 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_94 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_95 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_96 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_97 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_98 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_99 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 65: + LA93_100 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_101 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_102 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_103 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_104 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_105 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_106 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_107 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_108 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_109 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_110 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_111 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_112 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_113 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_114 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_115 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_116 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_117 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_118 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_119 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_120 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_121 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_122 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_123 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_124 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 34: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_125 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_126 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_127 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_128 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_129 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_130 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_131 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_132 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_133 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_134 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_135 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_136 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_137 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_138 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_139 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_140 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_141 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_142 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_143 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_144 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 35: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_145 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_146 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_147 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_148 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_149 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_150 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_151 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_152 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_153 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_154 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_155 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_156 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_157 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_158 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_159 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_160 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_161 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_162 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_163 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_164 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 36: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_165 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_166 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_167 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_168 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_169 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_170 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_171 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_172 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_173 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_174 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_175 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_176 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_177 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_178 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_179 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_180 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_181 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_182 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_183 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_184 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 37: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_185 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_186 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_187 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_188 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_189 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_190 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_191 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_192 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_193 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_194 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_195 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_196 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_197 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_198 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_199 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_200 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_201 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_202 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_203 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_204 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 38: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_205 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_206 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_207 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_208 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_209 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_210 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_211 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_212 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_213 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_214 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_215 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_216 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_217 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_218 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_219 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_220 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_221 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_222 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_223 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_224 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 39: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_225 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_226 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_227 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_228 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_229 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_230 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_231 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_232 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_233 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_234 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_235 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_236 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_237 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_238 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_239 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_240 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_241 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_242 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_243 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_244 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 40: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_245 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_246 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_247 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_248 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_249 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_250 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_251 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_252 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_253 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_254 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_255 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_256 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_257 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_258 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_259 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_260 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_261 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_262 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_263 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_264 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 41: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_265 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_266 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_267 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_268 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_269 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_270 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_271 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_272 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_273 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_274 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_275 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_276 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_277 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_278 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_279 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_280 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_281 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_282 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_283 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_284 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 42: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_285 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_286 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_287 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_288 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_289 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_290 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_291 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_292 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_293 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_294 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_295 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_296 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_297 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_298 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_299 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_300 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_301 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_302 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_303 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_304 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + elif LA93 == 45 or LA93 == 46: + LA93_40 = self.input.LA(2) + + if (LA93_40 == IDENTIFIER) : + LA93_305 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif (LA93_40 == 43) : + LA93_306 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + + elif LA93 == 48: + LA93_41 = self.input.LA(2) + + if (LA93_41 == IDENTIFIER) : + LA93_307 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif (LA93_41 == 43) : + LA93_308 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57 or LA93 == 58 or LA93 == 59 or LA93 == 60: + LA93 = self.input.LA(2) + if LA93 == 65: + LA93_309 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 58: + LA93_310 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 59: + LA93_311 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 60: + LA93_312 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == IDENTIFIER: + LA93_313 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 61: + LA93_314 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 25: + LA93_315 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 29 or LA93 == 30 or LA93 == 31 or LA93 == 32 or LA93 == 33: + LA93_316 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 34: + LA93_317 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 35: + LA93_318 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 36: + LA93_319 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 37: + LA93_320 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 38: + LA93_321 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 39: + LA93_322 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 40: + LA93_323 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 41: + LA93_324 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 42: + LA93_325 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 45 or LA93 == 46: + LA93_326 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 48: + LA93_327 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + elif LA93 == 49 or LA93 == 50 or LA93 == 51 or LA93 == 52 or LA93 == 53 or LA93 == 54 or LA93 == 55 or LA93 == 56 or LA93 == 57: + LA93_328 = self.input.LA(3) + + if (self.synpred185()) : + alt93 = 1 + + + + + if alt93 == 1: + # C.g:0:0: declaration + self.following.append(self.FOLLOW_declaration_in_compound_statement2210) + self.declaration() + self.following.pop() + if self.failed: + return retval + + + else: + break #loop93 + + + # C.g:501:21: ( statement_list )? + alt94 = 2 + LA94_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA94_0 <= FLOATING_POINT_LITERAL) or (25 <= LA94_0 <= 26) or (29 <= LA94_0 <= 43) or (45 <= LA94_0 <= 46) or (48 <= LA94_0 <= 61) or LA94_0 == 65 or (67 <= LA94_0 <= 68) or (71 <= LA94_0 <= 73) or (76 <= LA94_0 <= 78) or (102 <= LA94_0 <= 107) or (109 <= LA94_0 <= 116)) : + alt94 = 1 + if alt94 == 1: + # C.g:0:0: statement_list + self.following.append(self.FOLLOW_statement_list_in_compound_statement2213) + self.statement_list() + self.following.pop() + if self.failed: + return retval + + + + self.match(self.input, 44, self.FOLLOW_44_in_compound_statement2216) + if self.failed: + return retval + + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 66, compound_statement_StartIndex) + + pass + + return retval + + # $ANTLR end compound_statement + + + # $ANTLR start statement_list + # C.g:504:1: statement_list : ( statement )+ ; + def statement_list(self, ): + + statement_list_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 67): + return + + # C.g:505:2: ( ( statement )+ ) + # C.g:505:4: ( statement )+ + # C.g:505:4: ( statement )+ + cnt95 = 0 + while True: #loop95 + alt95 = 2 + LA95 = self.input.LA(1) + if LA95 == IDENTIFIER: + LA95 = self.input.LA(2) + if LA95 == 25 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 45 or LA95 == 46 or LA95 == 47 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60: + alt95 = 1 + elif LA95 == 61: + LA95_47 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 63: + LA95_48 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_49 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_50 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_51 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_52 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_53 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_54 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_55 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == IDENTIFIER: + LA95_56 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_57 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_58 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_59 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_60 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_61 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_62 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_63 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_64 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_65 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_66 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_67 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_68 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_69 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_70 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == HEX_LITERAL: + LA95 = self.input.LA(2) + if LA95 == 63: + LA95_89 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_90 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_91 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_92 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_93 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_94 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_95 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_96 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_97 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_98 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_99 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_100 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_101 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_102 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_103 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_104 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_105 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_106 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_107 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_108 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_109 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_112 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == OCTAL_LITERAL: + LA95 = self.input.LA(2) + if LA95 == 63: + LA95_113 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_114 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_115 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_116 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_117 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_118 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_119 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_120 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_121 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_122 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_123 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_124 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_125 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_126 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_127 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_128 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_129 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_130 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_131 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_132 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_133 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_134 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + + elif LA95 == DECIMAL_LITERAL: + LA95 = self.input.LA(2) + if LA95 == 63: + LA95_137 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_138 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_139 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_140 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_141 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_142 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_143 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_144 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_145 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_146 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_147 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_148 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_149 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_150 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_151 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_152 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_153 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_154 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_155 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_156 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_157 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_158 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + + elif LA95 == CHARACTER_LITERAL: + LA95 = self.input.LA(2) + if LA95 == 63: + LA95_161 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_162 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_163 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_164 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_165 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_166 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_167 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_168 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_169 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_170 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_171 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_172 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_173 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_174 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_175 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_176 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_177 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_178 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_179 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_180 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_181 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_182 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + + elif LA95 == STRING_LITERAL: + LA95 = self.input.LA(2) + if LA95 == IDENTIFIER: + LA95_185 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 63: + LA95_186 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_187 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_188 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_189 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_190 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_191 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_192 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_193 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_194 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_195 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_196 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_197 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_198 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_199 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_200 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_201 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_202 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_203 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_204 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_205 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_206 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + elif LA95 == STRING_LITERAL: + LA95_208 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_210 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95 = self.input.LA(2) + if LA95 == 63: + LA95_211 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_212 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 74: + LA95_213 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65: + LA95_214 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 75: + LA95_215 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_216 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_217 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 69: + LA95_218 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 70: + LA95_219 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 67: + LA95_220 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 68: + LA95_221 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 100 or LA95 == 101: + LA95_222 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 96 or LA95 == 97 or LA95 == 98 or LA95 == 99: + LA95_223 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 94 or LA95 == 95: + LA95_224 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 76: + LA95_225 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 93: + LA95_226 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 92: + LA95_227 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 91: + LA95_228 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 90: + LA95_229 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 89: + LA95_230 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 27: + LA95_231 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 28 or LA95 == 79 or LA95 == 80 or LA95 == 81 or LA95 == 82 or LA95 == 83 or LA95 == 84 or LA95 == 85 or LA95 == 86 or LA95 == 87 or LA95 == 88: + LA95_233 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 25: + alt95 = 1 + + elif LA95 == 61: + LA95 = self.input.LA(2) + if LA95 == IDENTIFIER: + LA95_235 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == HEX_LITERAL: + LA95_236 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == OCTAL_LITERAL: + LA95_237 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == DECIMAL_LITERAL: + LA95_238 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == CHARACTER_LITERAL: + LA95_239 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_240 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95_241 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_242 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_243 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_244 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95_245 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 73: + LA95_246 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60: + LA95_247 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 34: + LA95_248 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 35: + LA95_249 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 36: + LA95_250 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 37: + LA95_251 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 38: + LA95_252 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 39: + LA95_253 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 40: + LA95_254 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 41: + LA95_255 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 42: + LA95_256 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 45 or LA95 == 46: + LA95_257 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 48: + LA95_258 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == 71: + LA95 = self.input.LA(2) + if LA95 == IDENTIFIER: + LA95_259 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == HEX_LITERAL: + LA95_260 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == OCTAL_LITERAL: + LA95_261 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == DECIMAL_LITERAL: + LA95_262 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == CHARACTER_LITERAL: + LA95_263 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_264 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95_265 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_266 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_267 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_268 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95_269 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 73: + LA95_270 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == 72: + LA95 = self.input.LA(2) + if LA95 == IDENTIFIER: + LA95_271 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == HEX_LITERAL: + LA95_272 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == OCTAL_LITERAL: + LA95_273 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == DECIMAL_LITERAL: + LA95_274 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == CHARACTER_LITERAL: + LA95_275 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_276 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95_277 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 61: + LA95_278 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_279 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_280 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95_281 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 73: + LA95_282 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95 = self.input.LA(2) + if LA95 == 61: + LA95_283 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == IDENTIFIER: + LA95_284 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == HEX_LITERAL: + LA95_285 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == OCTAL_LITERAL: + LA95_286 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == DECIMAL_LITERAL: + LA95_287 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == CHARACTER_LITERAL: + LA95_288 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_289 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95_290 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_291 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_292 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95_293 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 73: + LA95_294 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == 73: + LA95 = self.input.LA(2) + if LA95 == 61: + LA95_295 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == IDENTIFIER: + LA95_296 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == HEX_LITERAL: + LA95_297 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == OCTAL_LITERAL: + LA95_298 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == DECIMAL_LITERAL: + LA95_299 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == CHARACTER_LITERAL: + LA95_300 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == STRING_LITERAL: + LA95_301 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == FLOATING_POINT_LITERAL: + LA95_302 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 71: + LA95_303 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 72: + LA95_304 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 65 or LA95 == 67 or LA95 == 68 or LA95 == 76 or LA95 == 77 or LA95 == 78: + LA95_305 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + elif LA95 == 73: + LA95_306 = self.input.LA(3) + + if (self.synpred187()) : + alt95 = 1 + + + + elif LA95 == 25 or LA95 == 26 or LA95 == 29 or LA95 == 30 or LA95 == 31 or LA95 == 32 or LA95 == 33 or LA95 == 34 or LA95 == 35 or LA95 == 36 or LA95 == 37 or LA95 == 38 or LA95 == 39 or LA95 == 40 or LA95 == 41 or LA95 == 42 or LA95 == 43 or LA95 == 45 or LA95 == 46 or LA95 == 48 or LA95 == 49 or LA95 == 50 or LA95 == 51 or LA95 == 52 or LA95 == 53 or LA95 == 54 or LA95 == 55 or LA95 == 56 or LA95 == 57 or LA95 == 58 or LA95 == 59 or LA95 == 60 or LA95 == 102 or LA95 == 103 or LA95 == 104 or LA95 == 105 or LA95 == 106 or LA95 == 107 or LA95 == 109 or LA95 == 110 or LA95 == 111 or LA95 == 112 or LA95 == 113 or LA95 == 114 or LA95 == 115 or LA95 == 116: + alt95 = 1 + + if alt95 == 1: + # C.g:0:0: statement + self.following.append(self.FOLLOW_statement_in_statement_list2227) + self.statement() + self.following.pop() + if self.failed: + return + + + else: + if cnt95 >= 1: + break #loop95 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(95, self.input) + raise eee + + cnt95 += 1 + + + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 67, statement_list_StartIndex) + + pass + + return + + # $ANTLR end statement_list + + class expression_statement_return(object): + def __init__(self): + self.start = None + self.stop = None + + + + # $ANTLR start expression_statement + # C.g:508:1: expression_statement : ( ';' | expression ';' ); + def expression_statement(self, ): + + retval = self.expression_statement_return() + retval.start = self.input.LT(1) + expression_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 68): + return retval + + # C.g:509:2: ( ';' | expression ';' ) + alt96 = 2 + LA96_0 = self.input.LA(1) + + if (LA96_0 == 25) : + alt96 = 1 + elif ((IDENTIFIER <= LA96_0 <= FLOATING_POINT_LITERAL) or LA96_0 == 61 or LA96_0 == 65 or (67 <= LA96_0 <= 68) or (71 <= LA96_0 <= 73) or (76 <= LA96_0 <= 78)) : + alt96 = 2 + else: + if self.backtracking > 0: + self.failed = True + return retval + + nvae = NoViableAltException("508:1: expression_statement : ( ';' | expression ';' );", 96, 0, self.input) + + raise nvae + + if alt96 == 1: + # C.g:509:4: ';' + self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2239) + if self.failed: + return retval + + + elif alt96 == 2: + # C.g:510:4: expression ';' + self.following.append(self.FOLLOW_expression_in_expression_statement2244) + self.expression() + self.following.pop() + if self.failed: + return retval + self.match(self.input, 25, self.FOLLOW_25_in_expression_statement2246) + if self.failed: + return retval + + + retval.stop = self.input.LT(-1) + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 68, expression_statement_StartIndex) + + pass + + return retval + + # $ANTLR end expression_statement + + + # $ANTLR start selection_statement + # C.g:513:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement ); + def selection_statement(self, ): + + selection_statement_StartIndex = self.input.index() + e = None + + + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 69): + return + + # C.g:514:2: ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement ) + alt98 = 2 + LA98_0 = self.input.LA(1) + + if (LA98_0 == 107) : + alt98 = 1 + elif (LA98_0 == 109) : + alt98 = 2 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("513:1: selection_statement : ( 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? | 'switch' '(' expression ')' statement );", 98, 0, self.input) + + raise nvae + + if alt98 == 1: + # C.g:514:4: 'if' '(' e= expression ')' statement ( options {k=1; backtrack=false; } : 'else' statement )? + self.match(self.input, 107, self.FOLLOW_107_in_selection_statement2257) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_selection_statement2259) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_selection_statement2263) + e = self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2265) + if self.failed: + return + if self.backtracking == 0: + self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) + + self.following.append(self.FOLLOW_statement_in_selection_statement2269) + self.statement() + self.following.pop() + if self.failed: + return + # C.g:514:167: ( options {k=1; backtrack=false; } : 'else' statement )? + alt97 = 2 + LA97_0 = self.input.LA(1) + + if (LA97_0 == 108) : + alt97 = 1 + if alt97 == 1: + # C.g:514:200: 'else' statement + self.match(self.input, 108, self.FOLLOW_108_in_selection_statement2284) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_selection_statement2286) + self.statement() + self.following.pop() + if self.failed: + return + + + + + + elif alt98 == 2: + # C.g:515:4: 'switch' '(' expression ')' statement + self.match(self.input, 109, self.FOLLOW_109_in_selection_statement2293) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_selection_statement2295) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_selection_statement2297) + self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_selection_statement2299) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_selection_statement2301) + self.statement() + self.following.pop() + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 69, selection_statement_StartIndex) + + pass + + return + + # $ANTLR end selection_statement + + + # $ANTLR start iteration_statement + # C.g:518:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement ); + def iteration_statement(self, ): + + iteration_statement_StartIndex = self.input.index() + e = None + + + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 70): + return + + # C.g:519:2: ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement ) + alt100 = 3 + LA100 = self.input.LA(1) + if LA100 == 110: + alt100 = 1 + elif LA100 == 111: + alt100 = 2 + elif LA100 == 112: + alt100 = 3 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("518:1: iteration_statement : ( 'while' '(' e= expression ')' statement | 'do' statement 'while' '(' e= expression ')' ';' | 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement );", 100, 0, self.input) + + raise nvae + + if alt100 == 1: + # C.g:519:4: 'while' '(' e= expression ')' statement + self.match(self.input, 110, self.FOLLOW_110_in_iteration_statement2312) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_iteration_statement2314) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_iteration_statement2318) + e = self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2320) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_iteration_statement2322) + self.statement() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: + self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) + + + + elif alt100 == 2: + # C.g:520:4: 'do' statement 'while' '(' e= expression ')' ';' + self.match(self.input, 111, self.FOLLOW_111_in_iteration_statement2329) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_iteration_statement2331) + self.statement() + self.following.pop() + if self.failed: + return + self.match(self.input, 110, self.FOLLOW_110_in_iteration_statement2333) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_iteration_statement2335) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_iteration_statement2339) + e = self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2341) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_iteration_statement2343) + if self.failed: + return + if self.backtracking == 0: + self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) + + + + elif alt100 == 3: + # C.g:521:4: 'for' '(' expression_statement e= expression_statement ( expression )? ')' statement + self.match(self.input, 112, self.FOLLOW_112_in_iteration_statement2350) + if self.failed: + return + self.match(self.input, 61, self.FOLLOW_61_in_iteration_statement2352) + if self.failed: + return + self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2354) + self.expression_statement() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_expression_statement_in_iteration_statement2358) + e = self.expression_statement() + self.following.pop() + if self.failed: + return + # C.g:521:58: ( expression )? + alt99 = 2 + LA99_0 = self.input.LA(1) + + if ((IDENTIFIER <= LA99_0 <= FLOATING_POINT_LITERAL) or LA99_0 == 61 or LA99_0 == 65 or (67 <= LA99_0 <= 68) or (71 <= LA99_0 <= 73) or (76 <= LA99_0 <= 78)) : + alt99 = 1 + if alt99 == 1: + # C.g:0:0: expression + self.following.append(self.FOLLOW_expression_in_iteration_statement2360) + self.expression() + self.following.pop() + if self.failed: + return + + + + self.match(self.input, 62, self.FOLLOW_62_in_iteration_statement2363) + if self.failed: + return + self.following.append(self.FOLLOW_statement_in_iteration_statement2365) + self.statement() + self.following.pop() + if self.failed: + return + if self.backtracking == 0: + self.StorePredicateExpression(e.start.line, e.start.charPositionInLine, e.stop.line, e.stop.charPositionInLine, self.input.toString(e.start,e.stop)) + + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 70, iteration_statement_StartIndex) + + pass + + return + + # $ANTLR end iteration_statement + + + # $ANTLR start jump_statement + # C.g:524:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' ); + def jump_statement(self, ): + + jump_statement_StartIndex = self.input.index() + try: + try: + if self.backtracking > 0 and self.alreadyParsedRule(self.input, 71): + return + + # C.g:525:2: ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' ) + alt101 = 5 + LA101 = self.input.LA(1) + if LA101 == 113: + alt101 = 1 + elif LA101 == 114: + alt101 = 2 + elif LA101 == 115: + alt101 = 3 + elif LA101 == 116: + LA101_4 = self.input.LA(2) + + if (LA101_4 == 25) : + alt101 = 4 + elif ((IDENTIFIER <= LA101_4 <= FLOATING_POINT_LITERAL) or LA101_4 == 61 or LA101_4 == 65 or (67 <= LA101_4 <= 68) or (71 <= LA101_4 <= 73) or (76 <= LA101_4 <= 78)) : + alt101 = 5 + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("524:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 4, self.input) + + raise nvae + + else: + if self.backtracking > 0: + self.failed = True + return + + nvae = NoViableAltException("524:1: jump_statement : ( 'goto' IDENTIFIER ';' | 'continue' ';' | 'break' ';' | 'return' ';' | 'return' expression ';' );", 101, 0, self.input) + + raise nvae + + if alt101 == 1: + # C.g:525:4: 'goto' IDENTIFIER ';' + self.match(self.input, 113, self.FOLLOW_113_in_jump_statement2378) + if self.failed: + return + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_jump_statement2380) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2382) + if self.failed: + return + + + elif alt101 == 2: + # C.g:526:4: 'continue' ';' + self.match(self.input, 114, self.FOLLOW_114_in_jump_statement2387) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2389) + if self.failed: + return + + + elif alt101 == 3: + # C.g:527:4: 'break' ';' + self.match(self.input, 115, self.FOLLOW_115_in_jump_statement2394) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2396) + if self.failed: + return + + + elif alt101 == 4: + # C.g:528:4: 'return' ';' + self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2401) + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2403) + if self.failed: + return + + + elif alt101 == 5: + # C.g:529:4: 'return' expression ';' + self.match(self.input, 116, self.FOLLOW_116_in_jump_statement2408) + if self.failed: + return + self.following.append(self.FOLLOW_expression_in_jump_statement2410) + self.expression() + self.following.pop() + if self.failed: + return + self.match(self.input, 25, self.FOLLOW_25_in_jump_statement2412) + if self.failed: + return + + + + except RecognitionException, re: + self.reportError(re) + self.recover(self.input, re) + finally: + if self.backtracking > 0: + self.memoize(self.input, 71, jump_statement_StartIndex) + + pass + + return + + # $ANTLR end jump_statement + + # $ANTLR start synpred2 + def synpred2_fragment(self, ): + # C.g:67:6: ( declaration_specifiers ) + # C.g:67:6: declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_synpred290) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred2 + + + + # $ANTLR start synpred4 + def synpred4_fragment(self, ): + # C.g:67:4: ( ( declaration_specifiers )? declarator ( declaration )* '{' ) + # C.g:67:6: ( declaration_specifiers )? declarator ( declaration )* '{' + # C.g:67:6: ( declaration_specifiers )? + alt102 = 2 + LA102 = self.input.LA(1) + if LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33 or LA102 == 34 or LA102 == 35 or LA102 == 36 or LA102 == 37 or LA102 == 38 or LA102 == 39 or LA102 == 40 or LA102 == 41 or LA102 == 42 or LA102 == 45 or LA102 == 46 or LA102 == 48 or LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57: + alt102 = 1 + elif LA102 == IDENTIFIER: + LA102 = self.input.LA(2) + if LA102 == 65: + alt102 = 1 + elif LA102 == 58: + LA102_21 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 59: + LA102_22 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 60: + LA102_23 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == IDENTIFIER: + LA102_24 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 61: + LA102_25 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 29 or LA102 == 30 or LA102 == 31 or LA102 == 32 or LA102 == 33: + LA102_26 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 34: + LA102_27 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 35: + LA102_28 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 36: + LA102_29 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 37: + LA102_30 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 38: + LA102_31 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 39: + LA102_32 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 40: + LA102_33 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 41: + LA102_34 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 42: + LA102_35 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 45 or LA102 == 46: + LA102_36 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 48: + LA102_37 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 49 or LA102 == 50 or LA102 == 51 or LA102 == 52 or LA102 == 53 or LA102 == 54 or LA102 == 55 or LA102 == 56 or LA102 == 57: + LA102_38 = self.input.LA(3) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 58: + LA102_14 = self.input.LA(2) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 59: + LA102_16 = self.input.LA(2) + + if (self.synpred2()) : + alt102 = 1 + elif LA102 == 60: + LA102_17 = self.input.LA(2) + + if (self.synpred2()) : + alt102 = 1 + if alt102 == 1: + # C.g:0:0: declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_synpred490) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + + + + self.following.append(self.FOLLOW_declarator_in_synpred493) + self.declarator() + self.following.pop() + if self.failed: + return + # C.g:67:41: ( declaration )* + while True: #loop103 + alt103 = 2 + LA103_0 = self.input.LA(1) + + if (LA103_0 == IDENTIFIER or LA103_0 == 26 or (29 <= LA103_0 <= 42) or (45 <= LA103_0 <= 46) or (48 <= LA103_0 <= 60)) : + alt103 = 1 + + + if alt103 == 1: + # C.g:0:0: declaration + self.following.append(self.FOLLOW_declaration_in_synpred495) + self.declaration() + self.following.pop() + if self.failed: + return + + + else: + break #loop103 + + + self.match(self.input, 43, self.FOLLOW_43_in_synpred498) + if self.failed: + return + + + # $ANTLR end synpred4 + + + + # $ANTLR start synpred5 + def synpred5_fragment(self, ): + # C.g:68:4: ( declaration ) + # C.g:68:4: declaration + self.following.append(self.FOLLOW_declaration_in_synpred5108) + self.declaration() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred5 + + + + # $ANTLR start synpred7 + def synpred7_fragment(self, ): + # C.g:94:6: ( declaration_specifiers ) + # C.g:94:6: declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_synpred7147) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred7 + + + + # $ANTLR start synpred10 + def synpred10_fragment(self, ): + # C.g:115:18: ( declaration_specifiers ) + # C.g:115:18: declaration_specifiers + self.following.append(self.FOLLOW_declaration_specifiers_in_synpred10197) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred10 + + + + # $ANTLR start synpred14 + def synpred14_fragment(self, ): + # C.g:132:7: ( type_specifier ) + # C.g:132:7: type_specifier + self.following.append(self.FOLLOW_type_specifier_in_synpred14262) + self.type_specifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred14 + + + + # $ANTLR start synpred15 + def synpred15_fragment(self, ): + # C.g:133:13: ( type_qualifier ) + # C.g:133:13: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred15276) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred15 + + + + # $ANTLR start synpred33 + def synpred33_fragment(self, ): + # C.g:173:16: ( type_qualifier ) + # C.g:173:16: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred33434) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred33 + + + + # $ANTLR start synpred34 + def synpred34_fragment(self, ): + # C.g:173:4: ( IDENTIFIER ( type_qualifier )* declarator ) + # C.g:173:5: IDENTIFIER ( type_qualifier )* declarator + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred34432) + if self.failed: + return + # C.g:173:16: ( type_qualifier )* + while True: #loop106 + alt106 = 2 + LA106 = self.input.LA(1) + if LA106 == 58: + LA106_2 = self.input.LA(2) + + if (self.synpred33()) : + alt106 = 1 + + + elif LA106 == 59: + LA106_3 = self.input.LA(2) + + if (self.synpred33()) : + alt106 = 1 + + + elif LA106 == 60: + LA106_4 = self.input.LA(2) + + if (self.synpred33()) : + alt106 = 1 + + + elif LA106 == 49 or LA106 == 50 or LA106 == 51 or LA106 == 52 or LA106 == 53 or LA106 == 54 or LA106 == 55 or LA106 == 56 or LA106 == 57: + alt106 = 1 + + if alt106 == 1: + # C.g:0:0: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred34434) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + else: + break #loop106 + + + self.following.append(self.FOLLOW_declarator_in_synpred34437) + self.declarator() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred34 + + + + # $ANTLR start synpred39 + def synpred39_fragment(self, ): + # C.g:201:6: ( type_qualifier ) + # C.g:201:6: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred39556) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred39 + + + + # $ANTLR start synpred40 + def synpred40_fragment(self, ): + # C.g:201:23: ( type_specifier ) + # C.g:201:23: type_specifier + self.following.append(self.FOLLOW_type_specifier_in_synpred40560) + self.type_specifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred40 + + + + # $ANTLR start synpred65 + def synpred65_fragment(self, ): + # C.g:244:4: ( ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator ) + # C.g:244:4: ( pointer )? ( 'EFIAPI' )? ( 'EFI_BOOTSERVICE' )? ( 'EFI_RUNTIMESERVICE' )? direct_declarator + # C.g:244:4: ( pointer )? + alt111 = 2 + LA111_0 = self.input.LA(1) + + if (LA111_0 == 65) : + alt111 = 1 + if alt111 == 1: + # C.g:0:0: pointer + self.following.append(self.FOLLOW_pointer_in_synpred65769) + self.pointer() + self.following.pop() + if self.failed: + return + + + + # C.g:244:13: ( 'EFIAPI' )? + alt112 = 2 + LA112_0 = self.input.LA(1) + + if (LA112_0 == 58) : + alt112 = 1 + if alt112 == 1: + # C.g:244:14: 'EFIAPI' + self.match(self.input, 58, self.FOLLOW_58_in_synpred65773) + if self.failed: + return + + + + # C.g:244:25: ( 'EFI_BOOTSERVICE' )? + alt113 = 2 + LA113_0 = self.input.LA(1) + + if (LA113_0 == 59) : + alt113 = 1 + if alt113 == 1: + # C.g:244:26: 'EFI_BOOTSERVICE' + self.match(self.input, 59, self.FOLLOW_59_in_synpred65778) + if self.failed: + return + + + + # C.g:244:46: ( 'EFI_RUNTIMESERVICE' )? + alt114 = 2 + LA114_0 = self.input.LA(1) + + if (LA114_0 == 60) : + alt114 = 1 + if alt114 == 1: + # C.g:244:47: 'EFI_RUNTIMESERVICE' + self.match(self.input, 60, self.FOLLOW_60_in_synpred65783) + if self.failed: + return + + + + self.following.append(self.FOLLOW_direct_declarator_in_synpred65787) + self.direct_declarator() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred65 + + + + # $ANTLR start synpred66 + def synpred66_fragment(self, ): + # C.g:250:15: ( declarator_suffix ) + # C.g:250:15: declarator_suffix + self.following.append(self.FOLLOW_declarator_suffix_in_synpred66806) + self.declarator_suffix() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred66 + + + + # $ANTLR start synpred68 + def synpred68_fragment(self, ): + # C.g:251:9: ( 'EFIAPI' ) + # C.g:251:9: 'EFIAPI' + self.match(self.input, 58, self.FOLLOW_58_in_synpred68815) + if self.failed: + return + + + # $ANTLR end synpred68 + + + + # $ANTLR start synpred69 + def synpred69_fragment(self, ): + # C.g:251:35: ( declarator_suffix ) + # C.g:251:35: declarator_suffix + self.following.append(self.FOLLOW_declarator_suffix_in_synpred69823) + self.declarator_suffix() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred69 + + + + # $ANTLR start synpred72 + def synpred72_fragment(self, ): + # C.g:257:9: ( '(' parameter_type_list ')' ) + # C.g:257:9: '(' parameter_type_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_synpred72863) + if self.failed: + return + self.following.append(self.FOLLOW_parameter_type_list_in_synpred72865) + self.parameter_type_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred72867) + if self.failed: + return + + + # $ANTLR end synpred72 + + + + # $ANTLR start synpred73 + def synpred73_fragment(self, ): + # C.g:258:9: ( '(' identifier_list ')' ) + # C.g:258:9: '(' identifier_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_synpred73877) + if self.failed: + return + self.following.append(self.FOLLOW_identifier_list_in_synpred73879) + self.identifier_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred73881) + if self.failed: + return + + + # $ANTLR end synpred73 + + + + # $ANTLR start synpred74 + def synpred74_fragment(self, ): + # C.g:263:8: ( type_qualifier ) + # C.g:263:8: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred74906) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred74 + + + + # $ANTLR start synpred75 + def synpred75_fragment(self, ): + # C.g:263:24: ( pointer ) + # C.g:263:24: pointer + self.following.append(self.FOLLOW_pointer_in_synpred75909) + self.pointer() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred75 + + + + # $ANTLR start synpred76 + def synpred76_fragment(self, ): + # C.g:263:4: ( '*' ( type_qualifier )+ ( pointer )? ) + # C.g:263:4: '*' ( type_qualifier )+ ( pointer )? + self.match(self.input, 65, self.FOLLOW_65_in_synpred76904) + if self.failed: + return + # C.g:263:8: ( type_qualifier )+ + cnt116 = 0 + while True: #loop116 + alt116 = 2 + LA116_0 = self.input.LA(1) + + if ((49 <= LA116_0 <= 60)) : + alt116 = 1 + + + if alt116 == 1: + # C.g:0:0: type_qualifier + self.following.append(self.FOLLOW_type_qualifier_in_synpred76906) + self.type_qualifier() + self.following.pop() + if self.failed: + return + + + else: + if cnt116 >= 1: + break #loop116 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(116, self.input) + raise eee + + cnt116 += 1 + + + # C.g:263:24: ( pointer )? + alt117 = 2 + LA117_0 = self.input.LA(1) + + if (LA117_0 == 65) : + alt117 = 1 + if alt117 == 1: + # C.g:0:0: pointer + self.following.append(self.FOLLOW_pointer_in_synpred76909) + self.pointer() + self.following.pop() + if self.failed: + return + + + + + + # $ANTLR end synpred76 + + + + # $ANTLR start synpred77 + def synpred77_fragment(self, ): + # C.g:264:4: ( '*' pointer ) + # C.g:264:4: '*' pointer + self.match(self.input, 65, self.FOLLOW_65_in_synpred77915) + if self.failed: + return + self.following.append(self.FOLLOW_pointer_in_synpred77917) + self.pointer() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred77 + + + + # $ANTLR start synpred80 + def synpred80_fragment(self, ): + # C.g:273:32: ( 'OPTIONAL' ) + # C.g:273:32: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_synpred80962) + if self.failed: + return + + + # $ANTLR end synpred80 + + + + # $ANTLR start synpred81 + def synpred81_fragment(self, ): + # C.g:273:27: ( ',' ( 'OPTIONAL' )? parameter_declaration ) + # C.g:273:27: ',' ( 'OPTIONAL' )? parameter_declaration + self.match(self.input, 27, self.FOLLOW_27_in_synpred81959) + if self.failed: + return + # C.g:273:31: ( 'OPTIONAL' )? + alt119 = 2 + LA119_0 = self.input.LA(1) + + if (LA119_0 == 53) : + LA119_1 = self.input.LA(2) + + if (self.synpred80()) : + alt119 = 1 + if alt119 == 1: + # C.g:273:32: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_synpred81962) + if self.failed: + return + + + + self.following.append(self.FOLLOW_parameter_declaration_in_synpred81966) + self.parameter_declaration() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred81 + + + + # $ANTLR start synpred82 + def synpred82_fragment(self, ): + # C.g:277:28: ( declarator ) + # C.g:277:28: declarator + self.following.append(self.FOLLOW_declarator_in_synpred82982) + self.declarator() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred82 + + + + # $ANTLR start synpred83 + def synpred83_fragment(self, ): + # C.g:277:39: ( abstract_declarator ) + # C.g:277:39: abstract_declarator + self.following.append(self.FOLLOW_abstract_declarator_in_synpred83984) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred83 + + + + # $ANTLR start synpred85 + def synpred85_fragment(self, ): + # C.g:277:4: ( declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? ) + # C.g:277:4: declaration_specifiers ( declarator | abstract_declarator )* ( 'OPTIONAL' )? + self.following.append(self.FOLLOW_declaration_specifiers_in_synpred85979) + self.declaration_specifiers() + self.following.pop() + if self.failed: + return + # C.g:277:27: ( declarator | abstract_declarator )* + while True: #loop120 + alt120 = 3 + LA120 = self.input.LA(1) + if LA120 == 65: + LA120_3 = self.input.LA(2) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == IDENTIFIER or LA120 == 58 or LA120 == 59 or LA120 == 60: + alt120 = 1 + elif LA120 == 61: + LA120 = self.input.LA(2) + if LA120 == 29 or LA120 == 30 or LA120 == 31 or LA120 == 32 or LA120 == 33 or LA120 == 34 or LA120 == 35 or LA120 == 36 or LA120 == 37 or LA120 == 38 or LA120 == 39 or LA120 == 40 or LA120 == 41 or LA120 == 42 or LA120 == 45 or LA120 == 46 or LA120 == 48 or LA120 == 49 or LA120 == 50 or LA120 == 51 or LA120 == 52 or LA120 == 53 or LA120 == 54 or LA120 == 55 or LA120 == 56 or LA120 == 57 or LA120 == 62 or LA120 == 63: + alt120 = 2 + elif LA120 == 58: + LA120_21 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == 65: + LA120_22 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == 59: + LA120_23 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == 60: + LA120_24 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == IDENTIFIER: + LA120_25 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + elif LA120 == 61: + LA120_26 = self.input.LA(3) + + if (self.synpred82()) : + alt120 = 1 + elif (self.synpred83()) : + alt120 = 2 + + + + elif LA120 == 63: + alt120 = 2 + + if alt120 == 1: + # C.g:277:28: declarator + self.following.append(self.FOLLOW_declarator_in_synpred85982) + self.declarator() + self.following.pop() + if self.failed: + return + + + elif alt120 == 2: + # C.g:277:39: abstract_declarator + self.following.append(self.FOLLOW_abstract_declarator_in_synpred85984) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + + + else: + break #loop120 + + + # C.g:277:61: ( 'OPTIONAL' )? + alt121 = 2 + LA121_0 = self.input.LA(1) + + if (LA121_0 == 53) : + alt121 = 1 + if alt121 == 1: + # C.g:277:62: 'OPTIONAL' + self.match(self.input, 53, self.FOLLOW_53_in_synpred85989) + if self.failed: + return + + + + + + # $ANTLR end synpred85 + + + + # $ANTLR start synpred89 + def synpred89_fragment(self, ): + # C.g:288:4: ( specifier_qualifier_list ( abstract_declarator )? ) + # C.g:288:4: specifier_qualifier_list ( abstract_declarator )? + self.following.append(self.FOLLOW_specifier_qualifier_list_in_synpred891031) + self.specifier_qualifier_list() + self.following.pop() + if self.failed: + return + # C.g:288:29: ( abstract_declarator )? + alt122 = 2 + LA122_0 = self.input.LA(1) + + if (LA122_0 == 61 or LA122_0 == 63 or LA122_0 == 65) : + alt122 = 1 + if alt122 == 1: + # C.g:0:0: abstract_declarator + self.following.append(self.FOLLOW_abstract_declarator_in_synpred891033) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + + + + + + # $ANTLR end synpred89 + + + + # $ANTLR start synpred90 + def synpred90_fragment(self, ): + # C.g:293:12: ( direct_abstract_declarator ) + # C.g:293:12: direct_abstract_declarator + self.following.append(self.FOLLOW_direct_abstract_declarator_in_synpred901052) + self.direct_abstract_declarator() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred90 + + + + # $ANTLR start synpred92 + def synpred92_fragment(self, ): + # C.g:298:6: ( '(' abstract_declarator ')' ) + # C.g:298:6: '(' abstract_declarator ')' + self.match(self.input, 61, self.FOLLOW_61_in_synpred921071) + if self.failed: + return + self.following.append(self.FOLLOW_abstract_declarator_in_synpred921073) + self.abstract_declarator() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred921075) + if self.failed: + return + + + # $ANTLR end synpred92 + + + + # $ANTLR start synpred93 + def synpred93_fragment(self, ): + # C.g:298:65: ( abstract_declarator_suffix ) + # C.g:298:65: abstract_declarator_suffix + self.following.append(self.FOLLOW_abstract_declarator_suffix_in_synpred931083) + self.abstract_declarator_suffix() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred93 + + + + # $ANTLR start synpred108 + def synpred108_fragment(self, ): + # C.g:333:4: ( '(' type_name ')' cast_expression ) + # C.g:333:4: '(' type_name ')' cast_expression + self.match(self.input, 61, self.FOLLOW_61_in_synpred1081267) + if self.failed: + return + self.following.append(self.FOLLOW_type_name_in_synpred1081269) + self.type_name() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred1081271) + if self.failed: + return + self.following.append(self.FOLLOW_cast_expression_in_synpred1081273) + self.cast_expression() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred108 + + + + # $ANTLR start synpred113 + def synpred113_fragment(self, ): + # C.g:342:4: ( 'sizeof' unary_expression ) + # C.g:342:4: 'sizeof' unary_expression + self.match(self.input, 73, self.FOLLOW_73_in_synpred1131315) + if self.failed: + return + self.following.append(self.FOLLOW_unary_expression_in_synpred1131317) + self.unary_expression() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred113 + + + + # $ANTLR start synpred116 + def synpred116_fragment(self, ): + # C.g:356:13: ( '(' argument_expression_list ')' ) + # C.g:356:13: '(' argument_expression_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_synpred1161405) + if self.failed: + return + self.following.append(self.FOLLOW_argument_expression_list_in_synpred1161409) + self.argument_expression_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred1161413) + if self.failed: + return + + + # $ANTLR end synpred116 + + + + # $ANTLR start synpred117 + def synpred117_fragment(self, ): + # C.g:357:13: ( '(' macro_parameter_list ')' ) + # C.g:357:13: '(' macro_parameter_list ')' + self.match(self.input, 61, self.FOLLOW_61_in_synpred1171429) + if self.failed: + return + self.following.append(self.FOLLOW_macro_parameter_list_in_synpred1171431) + self.macro_parameter_list() + self.following.pop() + if self.failed: + return + self.match(self.input, 62, self.FOLLOW_62_in_synpred1171433) + if self.failed: + return + + + # $ANTLR end synpred117 + + + + # $ANTLR start synpred119 + def synpred119_fragment(self, ): + # C.g:359:13: ( '*' IDENTIFIER ) + # C.g:359:13: '*' IDENTIFIER + self.match(self.input, 65, self.FOLLOW_65_in_synpred1191467) + if self.failed: + return + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1191471) + if self.failed: + return + + + # $ANTLR end synpred119 + + + + # $ANTLR start synpred136 + def synpred136_fragment(self, ): + # C.g:390:20: ( STRING_LITERAL ) + # C.g:390:20: STRING_LITERAL + self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1361668) + if self.failed: + return + + + # $ANTLR end synpred136 + + + + # $ANTLR start synpred137 + def synpred137_fragment(self, ): + # C.g:390:8: ( ( IDENTIFIER )* ( STRING_LITERAL )+ ) + # C.g:390:8: ( IDENTIFIER )* ( STRING_LITERAL )+ + # C.g:390:8: ( IDENTIFIER )* + while True: #loop125 + alt125 = 2 + LA125_0 = self.input.LA(1) + + if (LA125_0 == IDENTIFIER) : + alt125 = 1 + + + if alt125 == 1: + # C.g:0:0: IDENTIFIER + self.match(self.input, IDENTIFIER, self.FOLLOW_IDENTIFIER_in_synpred1371665) + if self.failed: + return + + + else: + break #loop125 + + + # C.g:390:20: ( STRING_LITERAL )+ + cnt126 = 0 + while True: #loop126 + alt126 = 2 + LA126_0 = self.input.LA(1) + + if (LA126_0 == STRING_LITERAL) : + alt126 = 1 + + + if alt126 == 1: + # C.g:0:0: STRING_LITERAL + self.match(self.input, STRING_LITERAL, self.FOLLOW_STRING_LITERAL_in_synpred1371668) + if self.failed: + return + + + else: + if cnt126 >= 1: + break #loop126 + + if self.backtracking > 0: + self.failed = True + return + + eee = EarlyExitException(126, self.input) + raise eee + + cnt126 += 1 + + + + + # $ANTLR end synpred137 + + + + # $ANTLR start synpred141 + def synpred141_fragment(self, ): + # C.g:405:4: ( lvalue assignment_operator assignment_expression ) + # C.g:405:4: lvalue assignment_operator assignment_expression + self.following.append(self.FOLLOW_lvalue_in_synpred1411729) + self.lvalue() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_assignment_operator_in_synpred1411731) + self.assignment_operator() + self.following.pop() + if self.failed: + return + self.following.append(self.FOLLOW_assignment_expression_in_synpred1411733) + self.assignment_expression() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred141 + + + + # $ANTLR start synpred168 + def synpred168_fragment(self, ): + # C.g:467:4: ( expression_statement ) + # C.g:467:4: expression_statement + self.following.append(self.FOLLOW_expression_statement_in_synpred1682020) + self.expression_statement() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred168 + + + + # $ANTLR start synpred172 + def synpred172_fragment(self, ): + # C.g:471:4: ( macro_statement ) + # C.g:471:4: macro_statement + self.following.append(self.FOLLOW_macro_statement_in_synpred1722040) + self.macro_statement() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred172 + + + + # $ANTLR start synpred173 + def synpred173_fragment(self, ): + # C.g:472:4: ( asm2_statement ) + # C.g:472:4: asm2_statement + self.following.append(self.FOLLOW_asm2_statement_in_synpred1732045) + self.asm2_statement() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred173 + + + + # $ANTLR start synpred180 + def synpred180_fragment(self, ): + # C.g:491:19: ( declaration ) + # C.g:491:19: declaration + self.following.append(self.FOLLOW_declaration_in_synpred1802151) + self.declaration() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred180 + + + + # $ANTLR start synpred181 + def synpred181_fragment(self, ): + # C.g:491:33: ( statement_list ) + # C.g:491:33: statement_list + self.following.append(self.FOLLOW_statement_list_in_synpred1812155) + self.statement_list() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred181 + + + + # $ANTLR start synpred185 + def synpred185_fragment(self, ): + # C.g:501:8: ( declaration ) + # C.g:501:8: declaration + self.following.append(self.FOLLOW_declaration_in_synpred1852210) + self.declaration() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred185 + + + + # $ANTLR start synpred187 + def synpred187_fragment(self, ): + # C.g:505:4: ( statement ) + # C.g:505:4: statement + self.following.append(self.FOLLOW_statement_in_synpred1872227) + self.statement() + self.following.pop() + if self.failed: + return + + + # $ANTLR end synpred187 + + + + def synpred185(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred185_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred7(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred7_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred14(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred14_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred65(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred65_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred15(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred15_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred117(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred117_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred173(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred173_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred68(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred68_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred40(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred40_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred141(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred141_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred75(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred75_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred92(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred92_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred4(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred4_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred85(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred85_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred39(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred39_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred76(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred76_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred119(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred119_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred90(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred90_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred187(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred187_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred33(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred33_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred2(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred2_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred83(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred83_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred69(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred69_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred72(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred72_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred168(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred168_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred34(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred34_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred181(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred181_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred116(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred116_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred113(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred113_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred80(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred80_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred73(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred73_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred89(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred89_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred10(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred10_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred81(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred81_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred180(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred180_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred136(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred136_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred77(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred77_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred172(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred172_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred137(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred137_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred74(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred74_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred5(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred5_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred108(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred108_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred82(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred82_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred93(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred93_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + def synpred66(self): + self.backtracking += 1 + start = self.input.mark() + self.synpred66_fragment() + success = not self.failed + self.input.rewind(start) + self.backtracking -= 1 + self.failed = False + return success + + + + + + FOLLOW_external_declaration_in_translation_unit64 = frozenset([1, 4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65]) + FOLLOW_function_definition_in_external_declaration103 = frozenset([1]) + FOLLOW_declaration_in_external_declaration108 = frozenset([1]) + FOLLOW_macro_statement_in_external_declaration113 = frozenset([1, 25]) + FOLLOW_25_in_external_declaration116 = frozenset([1]) + FOLLOW_declaration_specifiers_in_function_definition147 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_declarator_in_function_definition150 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_declaration_in_function_definition156 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_compound_statement_in_function_definition161 = frozenset([1]) + FOLLOW_compound_statement_in_function_definition170 = frozenset([1]) + FOLLOW_26_in_declaration193 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65]) + FOLLOW_declaration_specifiers_in_declaration197 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_init_declarator_list_in_declaration206 = frozenset([25]) + FOLLOW_25_in_declaration210 = frozenset([1]) + FOLLOW_declaration_specifiers_in_declaration224 = frozenset([4, 25, 58, 59, 60, 61, 65]) + FOLLOW_init_declarator_list_in_declaration228 = frozenset([25]) + FOLLOW_25_in_declaration233 = frozenset([1]) + FOLLOW_storage_class_specifier_in_declaration_specifiers254 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_specifier_in_declaration_specifiers262 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_qualifier_in_declaration_specifiers276 = frozenset([1, 4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_init_declarator_in_init_declarator_list298 = frozenset([1, 27]) + FOLLOW_27_in_init_declarator_list301 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_init_declarator_in_init_declarator_list303 = frozenset([1, 27]) + FOLLOW_declarator_in_init_declarator316 = frozenset([1, 28]) + FOLLOW_28_in_init_declarator319 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_initializer_in_init_declarator321 = frozenset([1]) + FOLLOW_set_in_storage_class_specifier0 = frozenset([1]) + FOLLOW_34_in_type_specifier366 = frozenset([1]) + FOLLOW_35_in_type_specifier371 = frozenset([1]) + FOLLOW_36_in_type_specifier376 = frozenset([1]) + FOLLOW_37_in_type_specifier381 = frozenset([1]) + FOLLOW_38_in_type_specifier386 = frozenset([1]) + FOLLOW_39_in_type_specifier391 = frozenset([1]) + FOLLOW_40_in_type_specifier396 = frozenset([1]) + FOLLOW_41_in_type_specifier401 = frozenset([1]) + FOLLOW_42_in_type_specifier406 = frozenset([1]) + FOLLOW_struct_or_union_specifier_in_type_specifier413 = frozenset([1]) + FOLLOW_enum_specifier_in_type_specifier423 = frozenset([1]) + FOLLOW_type_id_in_type_specifier441 = frozenset([1]) + FOLLOW_IDENTIFIER_in_type_id457 = frozenset([1]) + FOLLOW_struct_or_union_in_struct_or_union_specifier484 = frozenset([4, 43]) + FOLLOW_IDENTIFIER_in_struct_or_union_specifier486 = frozenset([43]) + FOLLOW_43_in_struct_or_union_specifier489 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_struct_declaration_list_in_struct_or_union_specifier491 = frozenset([44]) + FOLLOW_44_in_struct_or_union_specifier493 = frozenset([1]) + FOLLOW_struct_or_union_in_struct_or_union_specifier498 = frozenset([4]) + FOLLOW_IDENTIFIER_in_struct_or_union_specifier500 = frozenset([1]) + FOLLOW_set_in_struct_or_union0 = frozenset([1]) + FOLLOW_struct_declaration_in_struct_declaration_list527 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_specifier_qualifier_list_in_struct_declaration539 = frozenset([4, 47, 58, 59, 60, 61, 65]) + FOLLOW_struct_declarator_list_in_struct_declaration541 = frozenset([25]) + FOLLOW_25_in_struct_declaration543 = frozenset([1]) + FOLLOW_type_qualifier_in_specifier_qualifier_list556 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_specifier_in_specifier_qualifier_list560 = frozenset([1, 4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_struct_declarator_in_struct_declarator_list574 = frozenset([1, 27]) + FOLLOW_27_in_struct_declarator_list577 = frozenset([4, 47, 58, 59, 60, 61, 65]) + FOLLOW_struct_declarator_in_struct_declarator_list579 = frozenset([1, 27]) + FOLLOW_declarator_in_struct_declarator592 = frozenset([1, 47]) + FOLLOW_47_in_struct_declarator595 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_struct_declarator597 = frozenset([1]) + FOLLOW_47_in_struct_declarator604 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_struct_declarator606 = frozenset([1]) + FOLLOW_48_in_enum_specifier624 = frozenset([43]) + FOLLOW_43_in_enum_specifier626 = frozenset([4]) + FOLLOW_enumerator_list_in_enum_specifier628 = frozenset([27, 44]) + FOLLOW_27_in_enum_specifier630 = frozenset([44]) + FOLLOW_44_in_enum_specifier633 = frozenset([1]) + FOLLOW_48_in_enum_specifier638 = frozenset([4]) + FOLLOW_IDENTIFIER_in_enum_specifier640 = frozenset([43]) + FOLLOW_43_in_enum_specifier642 = frozenset([4]) + FOLLOW_enumerator_list_in_enum_specifier644 = frozenset([27, 44]) + FOLLOW_27_in_enum_specifier646 = frozenset([44]) + FOLLOW_44_in_enum_specifier649 = frozenset([1]) + FOLLOW_48_in_enum_specifier654 = frozenset([4]) + FOLLOW_IDENTIFIER_in_enum_specifier656 = frozenset([1]) + FOLLOW_enumerator_in_enumerator_list667 = frozenset([1, 27]) + FOLLOW_27_in_enumerator_list670 = frozenset([4]) + FOLLOW_enumerator_in_enumerator_list672 = frozenset([1, 27]) + FOLLOW_IDENTIFIER_in_enumerator685 = frozenset([1, 28]) + FOLLOW_28_in_enumerator688 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_enumerator690 = frozenset([1]) + FOLLOW_set_in_type_qualifier0 = frozenset([1]) + FOLLOW_pointer_in_declarator769 = frozenset([4, 58, 59, 60, 61]) + FOLLOW_58_in_declarator773 = frozenset([4, 59, 60, 61]) + FOLLOW_59_in_declarator778 = frozenset([4, 60, 61]) + FOLLOW_60_in_declarator783 = frozenset([4, 61]) + FOLLOW_direct_declarator_in_declarator787 = frozenset([1]) + FOLLOW_pointer_in_declarator793 = frozenset([1]) + FOLLOW_IDENTIFIER_in_direct_declarator804 = frozenset([1, 61, 63]) + FOLLOW_declarator_suffix_in_direct_declarator806 = frozenset([1, 61, 63]) + FOLLOW_61_in_direct_declarator812 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_58_in_direct_declarator815 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_declarator_in_direct_declarator819 = frozenset([62]) + FOLLOW_62_in_direct_declarator821 = frozenset([61, 63]) + FOLLOW_declarator_suffix_in_direct_declarator823 = frozenset([1, 61, 63]) + FOLLOW_63_in_declarator_suffix837 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_declarator_suffix839 = frozenset([64]) + FOLLOW_64_in_declarator_suffix841 = frozenset([1]) + FOLLOW_63_in_declarator_suffix851 = frozenset([64]) + FOLLOW_64_in_declarator_suffix853 = frozenset([1]) + FOLLOW_61_in_declarator_suffix863 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_type_list_in_declarator_suffix865 = frozenset([62]) + FOLLOW_62_in_declarator_suffix867 = frozenset([1]) + FOLLOW_61_in_declarator_suffix877 = frozenset([4]) + FOLLOW_identifier_list_in_declarator_suffix879 = frozenset([62]) + FOLLOW_62_in_declarator_suffix881 = frozenset([1]) + FOLLOW_61_in_declarator_suffix891 = frozenset([62]) + FOLLOW_62_in_declarator_suffix893 = frozenset([1]) + FOLLOW_65_in_pointer904 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_qualifier_in_pointer906 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_pointer_in_pointer909 = frozenset([1]) + FOLLOW_65_in_pointer915 = frozenset([65]) + FOLLOW_pointer_in_pointer917 = frozenset([1]) + FOLLOW_65_in_pointer922 = frozenset([1]) + FOLLOW_parameter_list_in_parameter_type_list933 = frozenset([1, 27]) + FOLLOW_27_in_parameter_type_list936 = frozenset([53, 66]) + FOLLOW_53_in_parameter_type_list939 = frozenset([66]) + FOLLOW_66_in_parameter_type_list943 = frozenset([1]) + FOLLOW_parameter_declaration_in_parameter_list956 = frozenset([1, 27]) + FOLLOW_27_in_parameter_list959 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_53_in_parameter_list962 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_declaration_in_parameter_list966 = frozenset([1, 27]) + FOLLOW_declaration_specifiers_in_parameter_declaration979 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_declarator_in_parameter_declaration982 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_abstract_declarator_in_parameter_declaration984 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_53_in_parameter_declaration989 = frozenset([1]) + FOLLOW_pointer_in_parameter_declaration998 = frozenset([4, 65]) + FOLLOW_IDENTIFIER_in_parameter_declaration1001 = frozenset([1]) + FOLLOW_IDENTIFIER_in_identifier_list1012 = frozenset([1, 27]) + FOLLOW_27_in_identifier_list1016 = frozenset([4]) + FOLLOW_IDENTIFIER_in_identifier_list1018 = frozenset([1, 27]) + FOLLOW_specifier_qualifier_list_in_type_name1031 = frozenset([1, 61, 63, 65]) + FOLLOW_abstract_declarator_in_type_name1033 = frozenset([1]) + FOLLOW_type_id_in_type_name1039 = frozenset([1]) + FOLLOW_pointer_in_abstract_declarator1050 = frozenset([1, 61, 63]) + FOLLOW_direct_abstract_declarator_in_abstract_declarator1052 = frozenset([1]) + FOLLOW_direct_abstract_declarator_in_abstract_declarator1058 = frozenset([1]) + FOLLOW_61_in_direct_abstract_declarator1071 = frozenset([61, 63, 65]) + FOLLOW_abstract_declarator_in_direct_abstract_declarator1073 = frozenset([62]) + FOLLOW_62_in_direct_abstract_declarator1075 = frozenset([1, 61, 63]) + FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1079 = frozenset([1, 61, 63]) + FOLLOW_abstract_declarator_suffix_in_direct_abstract_declarator1083 = frozenset([1, 61, 63]) + FOLLOW_63_in_abstract_declarator_suffix1095 = frozenset([64]) + FOLLOW_64_in_abstract_declarator_suffix1097 = frozenset([1]) + FOLLOW_63_in_abstract_declarator_suffix1102 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_abstract_declarator_suffix1104 = frozenset([64]) + FOLLOW_64_in_abstract_declarator_suffix1106 = frozenset([1]) + FOLLOW_61_in_abstract_declarator_suffix1111 = frozenset([62]) + FOLLOW_62_in_abstract_declarator_suffix1113 = frozenset([1]) + FOLLOW_61_in_abstract_declarator_suffix1118 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_type_list_in_abstract_declarator_suffix1120 = frozenset([62]) + FOLLOW_62_in_abstract_declarator_suffix1122 = frozenset([1]) + FOLLOW_assignment_expression_in_initializer1135 = frozenset([1]) + FOLLOW_43_in_initializer1140 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_initializer_list_in_initializer1142 = frozenset([27, 44]) + FOLLOW_27_in_initializer1144 = frozenset([44]) + FOLLOW_44_in_initializer1147 = frozenset([1]) + FOLLOW_initializer_in_initializer_list1158 = frozenset([1, 27]) + FOLLOW_27_in_initializer_list1161 = frozenset([4, 5, 6, 7, 8, 9, 10, 43, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_initializer_in_initializer_list1163 = frozenset([1, 27]) + FOLLOW_assignment_expression_in_argument_expression_list1181 = frozenset([1, 27, 53]) + FOLLOW_53_in_argument_expression_list1184 = frozenset([1, 27]) + FOLLOW_27_in_argument_expression_list1189 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_assignment_expression_in_argument_expression_list1191 = frozenset([1, 27, 53]) + FOLLOW_53_in_argument_expression_list1194 = frozenset([1, 27]) + FOLLOW_multiplicative_expression_in_additive_expression1210 = frozenset([1, 67, 68]) + FOLLOW_67_in_additive_expression1214 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_multiplicative_expression_in_additive_expression1216 = frozenset([1, 67, 68]) + FOLLOW_68_in_additive_expression1220 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_multiplicative_expression_in_additive_expression1222 = frozenset([1, 67, 68]) + FOLLOW_cast_expression_in_multiplicative_expression1236 = frozenset([1, 65, 69, 70]) + FOLLOW_65_in_multiplicative_expression1240 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_multiplicative_expression1242 = frozenset([1, 65, 69, 70]) + FOLLOW_69_in_multiplicative_expression1246 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_multiplicative_expression1248 = frozenset([1, 65, 69, 70]) + FOLLOW_70_in_multiplicative_expression1252 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_multiplicative_expression1254 = frozenset([1, 65, 69, 70]) + FOLLOW_61_in_cast_expression1267 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_name_in_cast_expression1269 = frozenset([62]) + FOLLOW_62_in_cast_expression1271 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_cast_expression1273 = frozenset([1]) + FOLLOW_unary_expression_in_cast_expression1278 = frozenset([1]) + FOLLOW_postfix_expression_in_unary_expression1289 = frozenset([1]) + FOLLOW_71_in_unary_expression1294 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_unary_expression_in_unary_expression1296 = frozenset([1]) + FOLLOW_72_in_unary_expression1301 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_unary_expression_in_unary_expression1303 = frozenset([1]) + FOLLOW_unary_operator_in_unary_expression1308 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_unary_expression1310 = frozenset([1]) + FOLLOW_73_in_unary_expression1315 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_unary_expression_in_unary_expression1317 = frozenset([1]) + FOLLOW_73_in_unary_expression1322 = frozenset([61]) + FOLLOW_61_in_unary_expression1324 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_name_in_unary_expression1326 = frozenset([62]) + FOLLOW_62_in_unary_expression1328 = frozenset([1]) + FOLLOW_primary_expression_in_postfix_expression1352 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_63_in_postfix_expression1368 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_postfix_expression1370 = frozenset([64]) + FOLLOW_64_in_postfix_expression1372 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_61_in_postfix_expression1386 = frozenset([62]) + FOLLOW_62_in_postfix_expression1390 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_61_in_postfix_expression1405 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_argument_expression_list_in_postfix_expression1409 = frozenset([62]) + FOLLOW_62_in_postfix_expression1413 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_61_in_postfix_expression1429 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_macro_parameter_list_in_postfix_expression1431 = frozenset([62]) + FOLLOW_62_in_postfix_expression1433 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_74_in_postfix_expression1447 = frozenset([4]) + FOLLOW_IDENTIFIER_in_postfix_expression1451 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_65_in_postfix_expression1467 = frozenset([4]) + FOLLOW_IDENTIFIER_in_postfix_expression1471 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_75_in_postfix_expression1487 = frozenset([4]) + FOLLOW_IDENTIFIER_in_postfix_expression1491 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_71_in_postfix_expression1507 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_72_in_postfix_expression1521 = frozenset([1, 61, 63, 65, 71, 72, 74, 75]) + FOLLOW_parameter_declaration_in_macro_parameter_list1544 = frozenset([1, 27]) + FOLLOW_27_in_macro_parameter_list1547 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_declaration_in_macro_parameter_list1549 = frozenset([1, 27]) + FOLLOW_set_in_unary_operator0 = frozenset([1]) + FOLLOW_IDENTIFIER_in_primary_expression1598 = frozenset([1]) + FOLLOW_constant_in_primary_expression1603 = frozenset([1]) + FOLLOW_61_in_primary_expression1608 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_primary_expression1610 = frozenset([62]) + FOLLOW_62_in_primary_expression1612 = frozenset([1]) + FOLLOW_HEX_LITERAL_in_constant1628 = frozenset([1]) + FOLLOW_OCTAL_LITERAL_in_constant1638 = frozenset([1]) + FOLLOW_DECIMAL_LITERAL_in_constant1648 = frozenset([1]) + FOLLOW_CHARACTER_LITERAL_in_constant1656 = frozenset([1]) + FOLLOW_IDENTIFIER_in_constant1665 = frozenset([4, 9]) + FOLLOW_STRING_LITERAL_in_constant1668 = frozenset([1, 4, 9]) + FOLLOW_IDENTIFIER_in_constant1673 = frozenset([1, 4]) + FOLLOW_FLOATING_POINT_LITERAL_in_constant1684 = frozenset([1]) + FOLLOW_assignment_expression_in_expression1700 = frozenset([1, 27]) + FOLLOW_27_in_expression1703 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_assignment_expression_in_expression1705 = frozenset([1, 27]) + FOLLOW_conditional_expression_in_constant_expression1718 = frozenset([1]) + FOLLOW_lvalue_in_assignment_expression1729 = frozenset([28, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88]) + FOLLOW_assignment_operator_in_assignment_expression1731 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_assignment_expression_in_assignment_expression1733 = frozenset([1]) + FOLLOW_conditional_expression_in_assignment_expression1738 = frozenset([1]) + FOLLOW_unary_expression_in_lvalue1750 = frozenset([1]) + FOLLOW_set_in_assignment_operator0 = frozenset([1]) + FOLLOW_logical_or_expression_in_conditional_expression1824 = frozenset([1, 89]) + FOLLOW_89_in_conditional_expression1827 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_conditional_expression1829 = frozenset([47]) + FOLLOW_47_in_conditional_expression1831 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_conditional_expression_in_conditional_expression1833 = frozenset([1]) + FOLLOW_logical_and_expression_in_logical_or_expression1848 = frozenset([1, 90]) + FOLLOW_90_in_logical_or_expression1851 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_logical_and_expression_in_logical_or_expression1853 = frozenset([1, 90]) + FOLLOW_inclusive_or_expression_in_logical_and_expression1866 = frozenset([1, 91]) + FOLLOW_91_in_logical_and_expression1869 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_inclusive_or_expression_in_logical_and_expression1871 = frozenset([1, 91]) + FOLLOW_exclusive_or_expression_in_inclusive_or_expression1884 = frozenset([1, 92]) + FOLLOW_92_in_inclusive_or_expression1887 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_exclusive_or_expression_in_inclusive_or_expression1889 = frozenset([1, 92]) + FOLLOW_and_expression_in_exclusive_or_expression1902 = frozenset([1, 93]) + FOLLOW_93_in_exclusive_or_expression1905 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_and_expression_in_exclusive_or_expression1907 = frozenset([1, 93]) + FOLLOW_equality_expression_in_and_expression1920 = frozenset([1, 76]) + FOLLOW_76_in_and_expression1923 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_equality_expression_in_and_expression1925 = frozenset([1, 76]) + FOLLOW_relational_expression_in_equality_expression1937 = frozenset([1, 94, 95]) + FOLLOW_set_in_equality_expression1940 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_relational_expression_in_equality_expression1946 = frozenset([1, 94, 95]) + FOLLOW_shift_expression_in_relational_expression1960 = frozenset([1, 96, 97, 98, 99]) + FOLLOW_set_in_relational_expression1963 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_shift_expression_in_relational_expression1973 = frozenset([1, 96, 97, 98, 99]) + FOLLOW_additive_expression_in_shift_expression1986 = frozenset([1, 100, 101]) + FOLLOW_set_in_shift_expression1989 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_additive_expression_in_shift_expression1995 = frozenset([1, 100, 101]) + FOLLOW_labeled_statement_in_statement2010 = frozenset([1]) + FOLLOW_compound_statement_in_statement2015 = frozenset([1]) + FOLLOW_expression_statement_in_statement2020 = frozenset([1]) + FOLLOW_selection_statement_in_statement2025 = frozenset([1]) + FOLLOW_iteration_statement_in_statement2030 = frozenset([1]) + FOLLOW_jump_statement_in_statement2035 = frozenset([1]) + FOLLOW_macro_statement_in_statement2040 = frozenset([1]) + FOLLOW_asm2_statement_in_statement2045 = frozenset([1]) + FOLLOW_asm1_statement_in_statement2050 = frozenset([1]) + FOLLOW_asm_statement_in_statement2055 = frozenset([1]) + FOLLOW_declaration_in_statement2060 = frozenset([1]) + FOLLOW_102_in_asm2_statement2071 = frozenset([4]) + FOLLOW_IDENTIFIER_in_asm2_statement2074 = frozenset([61]) + FOLLOW_61_in_asm2_statement2076 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_set_in_asm2_statement2079 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_62_in_asm2_statement2086 = frozenset([25]) + FOLLOW_25_in_asm2_statement2088 = frozenset([1]) + FOLLOW_103_in_asm1_statement2100 = frozenset([43]) + FOLLOW_43_in_asm1_statement2102 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_set_in_asm1_statement2105 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_44_in_asm1_statement2112 = frozenset([1]) + FOLLOW_104_in_asm_statement2123 = frozenset([43]) + FOLLOW_43_in_asm_statement2125 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_set_in_asm_statement2128 = frozenset([4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_44_in_asm_statement2135 = frozenset([1]) + FOLLOW_IDENTIFIER_in_macro_statement2147 = frozenset([61]) + FOLLOW_61_in_macro_statement2149 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_declaration_in_macro_statement2151 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_list_in_macro_statement2155 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 62, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_macro_statement2158 = frozenset([62]) + FOLLOW_62_in_macro_statement2161 = frozenset([1]) + FOLLOW_IDENTIFIER_in_labeled_statement2173 = frozenset([47]) + FOLLOW_47_in_labeled_statement2175 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_labeled_statement2177 = frozenset([1]) + FOLLOW_105_in_labeled_statement2182 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_constant_expression_in_labeled_statement2184 = frozenset([47]) + FOLLOW_47_in_labeled_statement2186 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_labeled_statement2188 = frozenset([1]) + FOLLOW_106_in_labeled_statement2193 = frozenset([47]) + FOLLOW_47_in_labeled_statement2195 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_labeled_statement2197 = frozenset([1]) + FOLLOW_43_in_compound_statement2208 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_declaration_in_compound_statement2210 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_list_in_compound_statement2213 = frozenset([44]) + FOLLOW_44_in_compound_statement2216 = frozenset([1]) + FOLLOW_statement_in_statement_list2227 = frozenset([1, 4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_25_in_expression_statement2239 = frozenset([1]) + FOLLOW_expression_in_expression_statement2244 = frozenset([25]) + FOLLOW_25_in_expression_statement2246 = frozenset([1]) + FOLLOW_107_in_selection_statement2257 = frozenset([61]) + FOLLOW_61_in_selection_statement2259 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_selection_statement2263 = frozenset([62]) + FOLLOW_62_in_selection_statement2265 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_selection_statement2269 = frozenset([1, 108]) + FOLLOW_108_in_selection_statement2284 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_selection_statement2286 = frozenset([1]) + FOLLOW_109_in_selection_statement2293 = frozenset([61]) + FOLLOW_61_in_selection_statement2295 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_selection_statement2297 = frozenset([62]) + FOLLOW_62_in_selection_statement2299 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_selection_statement2301 = frozenset([1]) + FOLLOW_110_in_iteration_statement2312 = frozenset([61]) + FOLLOW_61_in_iteration_statement2314 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_iteration_statement2318 = frozenset([62]) + FOLLOW_62_in_iteration_statement2320 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_iteration_statement2322 = frozenset([1]) + FOLLOW_111_in_iteration_statement2329 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_iteration_statement2331 = frozenset([110]) + FOLLOW_110_in_iteration_statement2333 = frozenset([61]) + FOLLOW_61_in_iteration_statement2335 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_iteration_statement2339 = frozenset([62]) + FOLLOW_62_in_iteration_statement2341 = frozenset([25]) + FOLLOW_25_in_iteration_statement2343 = frozenset([1]) + FOLLOW_112_in_iteration_statement2350 = frozenset([61]) + FOLLOW_61_in_iteration_statement2352 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_statement_in_iteration_statement2354 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_statement_in_iteration_statement2358 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 62, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_iteration_statement2360 = frozenset([62]) + FOLLOW_62_in_iteration_statement2363 = frozenset([4, 5, 6, 7, 8, 9, 10, 25, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116]) + FOLLOW_statement_in_iteration_statement2365 = frozenset([1]) + FOLLOW_113_in_jump_statement2378 = frozenset([4]) + FOLLOW_IDENTIFIER_in_jump_statement2380 = frozenset([25]) + FOLLOW_25_in_jump_statement2382 = frozenset([1]) + FOLLOW_114_in_jump_statement2387 = frozenset([25]) + FOLLOW_25_in_jump_statement2389 = frozenset([1]) + FOLLOW_115_in_jump_statement2394 = frozenset([25]) + FOLLOW_25_in_jump_statement2396 = frozenset([1]) + FOLLOW_116_in_jump_statement2401 = frozenset([25]) + FOLLOW_25_in_jump_statement2403 = frozenset([1]) + FOLLOW_116_in_jump_statement2408 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_expression_in_jump_statement2410 = frozenset([25]) + FOLLOW_25_in_jump_statement2412 = frozenset([1]) + FOLLOW_declaration_specifiers_in_synpred290 = frozenset([1]) + FOLLOW_declaration_specifiers_in_synpred490 = frozenset([4, 58, 59, 60, 61, 65]) + FOLLOW_declarator_in_synpred493 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_declaration_in_synpred495 = frozenset([4, 26, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_43_in_synpred498 = frozenset([1]) + FOLLOW_declaration_in_synpred5108 = frozenset([1]) + FOLLOW_declaration_specifiers_in_synpred7147 = frozenset([1]) + FOLLOW_declaration_specifiers_in_synpred10197 = frozenset([1]) + FOLLOW_type_specifier_in_synpred14262 = frozenset([1]) + FOLLOW_type_qualifier_in_synpred15276 = frozenset([1]) + FOLLOW_type_qualifier_in_synpred33434 = frozenset([1]) + FOLLOW_IDENTIFIER_in_synpred34432 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65]) + FOLLOW_type_qualifier_in_synpred34434 = frozenset([4, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 65]) + FOLLOW_declarator_in_synpred34437 = frozenset([1]) + FOLLOW_type_qualifier_in_synpred39556 = frozenset([1]) + FOLLOW_type_specifier_in_synpred40560 = frozenset([1]) + FOLLOW_pointer_in_synpred65769 = frozenset([4, 58, 59, 60, 61]) + FOLLOW_58_in_synpred65773 = frozenset([4, 59, 60, 61]) + FOLLOW_59_in_synpred65778 = frozenset([4, 60, 61]) + FOLLOW_60_in_synpred65783 = frozenset([4, 61]) + FOLLOW_direct_declarator_in_synpred65787 = frozenset([1]) + FOLLOW_declarator_suffix_in_synpred66806 = frozenset([1]) + FOLLOW_58_in_synpred68815 = frozenset([1]) + FOLLOW_declarator_suffix_in_synpred69823 = frozenset([1]) + FOLLOW_61_in_synpred72863 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_type_list_in_synpred72865 = frozenset([62]) + FOLLOW_62_in_synpred72867 = frozenset([1]) + FOLLOW_61_in_synpred73877 = frozenset([4]) + FOLLOW_identifier_list_in_synpred73879 = frozenset([62]) + FOLLOW_62_in_synpred73881 = frozenset([1]) + FOLLOW_type_qualifier_in_synpred74906 = frozenset([1]) + FOLLOW_pointer_in_synpred75909 = frozenset([1]) + FOLLOW_65_in_synpred76904 = frozenset([49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_qualifier_in_synpred76906 = frozenset([1, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_pointer_in_synpred76909 = frozenset([1]) + FOLLOW_65_in_synpred77915 = frozenset([65]) + FOLLOW_pointer_in_synpred77917 = frozenset([1]) + FOLLOW_53_in_synpred80962 = frozenset([1]) + FOLLOW_27_in_synpred81959 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_53_in_synpred81962 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_parameter_declaration_in_synpred81966 = frozenset([1]) + FOLLOW_declarator_in_synpred82982 = frozenset([1]) + FOLLOW_abstract_declarator_in_synpred83984 = frozenset([1]) + FOLLOW_declaration_specifiers_in_synpred85979 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_declarator_in_synpred85982 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_abstract_declarator_in_synpred85984 = frozenset([1, 4, 53, 58, 59, 60, 61, 63, 65]) + FOLLOW_53_in_synpred85989 = frozenset([1]) + FOLLOW_specifier_qualifier_list_in_synpred891031 = frozenset([1, 61, 63, 65]) + FOLLOW_abstract_declarator_in_synpred891033 = frozenset([1]) + FOLLOW_direct_abstract_declarator_in_synpred901052 = frozenset([1]) + FOLLOW_61_in_synpred921071 = frozenset([61, 63, 65]) + FOLLOW_abstract_declarator_in_synpred921073 = frozenset([62]) + FOLLOW_62_in_synpred921075 = frozenset([1]) + FOLLOW_abstract_declarator_suffix_in_synpred931083 = frozenset([1]) + FOLLOW_61_in_synpred1081267 = frozenset([4, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]) + FOLLOW_type_name_in_synpred1081269 = frozenset([62]) + FOLLOW_62_in_synpred1081271 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_cast_expression_in_synpred1081273 = frozenset([1]) + FOLLOW_73_in_synpred1131315 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_unary_expression_in_synpred1131317 = frozenset([1]) + FOLLOW_61_in_synpred1161405 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_argument_expression_list_in_synpred1161409 = frozenset([62]) + FOLLOW_62_in_synpred1161413 = frozenset([1]) + FOLLOW_61_in_synpred1171429 = frozenset([4, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 45, 46, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 65]) + FOLLOW_macro_parameter_list_in_synpred1171431 = frozenset([62]) + FOLLOW_62_in_synpred1171433 = frozenset([1]) + FOLLOW_65_in_synpred1191467 = frozenset([4]) + FOLLOW_IDENTIFIER_in_synpred1191471 = frozenset([1]) + FOLLOW_STRING_LITERAL_in_synpred1361668 = frozenset([1]) + FOLLOW_IDENTIFIER_in_synpred1371665 = frozenset([4, 9]) + FOLLOW_STRING_LITERAL_in_synpred1371668 = frozenset([1, 9]) + FOLLOW_lvalue_in_synpred1411729 = frozenset([28, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88]) + FOLLOW_assignment_operator_in_synpred1411731 = frozenset([4, 5, 6, 7, 8, 9, 10, 61, 65, 67, 68, 71, 72, 73, 76, 77, 78]) + FOLLOW_assignment_expression_in_synpred1411733 = frozenset([1]) + FOLLOW_expression_statement_in_synpred1682020 = frozenset([1]) + FOLLOW_macro_statement_in_synpred1722040 = frozenset([1]) + FOLLOW_asm2_statement_in_synpred1732045 = frozenset([1]) + FOLLOW_declaration_in_synpred1802151 = frozenset([1]) + FOLLOW_statement_list_in_synpred1812155 = frozenset([1]) + FOLLOW_declaration_in_synpred1852210 = frozenset([1]) + FOLLOW_statement_in_synpred1872227 = frozenset([1]) + diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py new file mode 100644 index 0000000000..c8bc54de3e --- /dev/null +++ b/BaseTools/Source/Python/Ecc/Check.py @@ -0,0 +1,865 @@ +## @file
+# This file is used to define checkpoints used by ECC tool
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+import os
+import re
+from CommonDataClass.DataClass import *
+from Common.DataType import SUP_MODULE_LIST_STRING, TAB_VALUE_SPLIT
+from EccToolError import *
+import EccGlobalData
+import c
+
+## Check
+#
+# This class is to define checkpoints used by ECC tool
+#
+# @param object: Inherited from object class
+#
+class Check(object):
+ def __init__(self):
+ pass
+
+ # Check all required checkpoints
+ def Check(self):
+ self.MetaDataFileCheck()
+ self.DoxygenCheck()
+ self.IncludeFileCheck()
+ self.PredicateExpressionCheck()
+ self.DeclAndDataTypeCheck()
+ self.FunctionLayoutCheck()
+ self.NamingConventionCheck()
+
+ # C Function Layout Checking
+ def FunctionLayoutCheck(self):
+ self.FunctionLayoutCheckReturnType()
+ self.FunctionLayoutCheckModifier()
+ self.FunctionLayoutCheckName()
+ self.FunctionLayoutCheckPrototype()
+ self.FunctionLayoutCheckBody()
+ self.FunctionLayoutCheckLocalVariable()
+
+ def WalkTree(self):
+ IgnoredPattern = c.GetIgnoredDirListPattern()
+ for Dirpath, Dirnames, Filenames in os.walk(EccGlobalData.gTarget):
+ for Dir in Dirnames:
+ Dirname = os.path.join(Dirpath, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ Dirnames.remove(Dir)
+ Dirnames.append(Dirname)
+ if IgnoredPattern.match(Dirpath.upper()):
+ continue
+ yield (Dirpath, Dirnames, Filenames)
+
+ # Check whether return type exists and in the first line
+ def FunctionLayoutCheckReturnType(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout return type ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c', '.h'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckFuncLayoutReturnType(FullName)
+
+ # Check whether any optional functional modifiers exist and next to the return type
+ def FunctionLayoutCheckModifier(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout modifier ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c', '.h'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckFuncLayoutModifier(FullName)
+
+ # Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+ # Check whether the closing parenthesis is on its own line and also indented two spaces
+ def FunctionLayoutCheckName(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function name ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c', '.h'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckFuncLayoutName(FullName)
+ # Check whether the function prototypes in include files have the same form as function definitions
+ def FunctionLayoutCheckPrototype(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function prototype ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[PROTOTYPE]" + FullName)
+ c.CheckFuncLayoutPrototype(FullName)
+
+ # Check whether the body of a function is contained by open and close braces that must be in the first column
+ def FunctionLayoutCheckBody(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout function body ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckFuncLayoutBody(FullName)
+
+ # Check whether the data declarations is the first code in a module.
+ # self.CFunctionLayoutCheckDataDeclaration = 1
+ # Check whether no initialization of a variable as part of its declaration
+ def FunctionLayoutCheckLocalVariable(self):
+ if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking function layout local variables ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckFuncLayoutLocalVariable(FullName)
+
+ # Check whether no use of STATIC for functions
+ # self.CFunctionLayoutCheckNoStatic = 1
+
+ # Declarations and Data Types Checking
+ def DeclAndDataTypeCheck(self):
+ self.DeclCheckNoUseCType()
+ self.DeclCheckInOutModifier()
+ self.DeclCheckEFIAPIModifier()
+ self.DeclCheckEnumeratedType()
+ self.DeclCheckStructureDeclaration()
+ self.DeclCheckSameStructure()
+ self.DeclCheckUnionType()
+
+
+ # Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
+ def DeclCheckNoUseCType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration No use C type ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckDeclNoUseCType(FullName)
+
+ # Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+ def DeclCheckInOutModifier(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration argument modifier ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ c.CheckDeclArgModifier(FullName)
+
+ # Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+ def DeclCheckEFIAPIModifier(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckEFIAPIModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ pass
+
+ # Check whether Enumerated Type has a 'typedef' and the name is capital
+ def DeclCheckEnumeratedType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration enum typedef ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[ENUM]" + FullName)
+ c.CheckDeclEnumTypedef(FullName)
+
+ # Check whether Structure Type has a 'typedef' and the name is capital
+ def DeclCheckStructureDeclaration(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration struct typedef ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[STRUCT]" + FullName)
+ c.CheckDeclStructTypedef(FullName)
+
+ # Check whether having same Structure
+ def DeclCheckSameStructure(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckSameStructure == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking same struct ...")
+ AllStructure = {}
+ for IdentifierTable in EccGlobalData.gIdentifierTableList:
+ SqlCommand = """select ID, Name, BelongsToFile from %s where Model = %s""" %(IdentifierTable, MODEL_IDENTIFIER_STRUCTURE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[1] != '':
+ if Record[1] not in AllStructure.keys():
+ AllStructure[Record[1]] = Record[2]
+ else:
+ ID = AllStructure[Record[1]]
+ SqlCommand = """select FullPath from File where ID = %s """ % ID
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ OtherMsg = "The structure name '%s' is duplicate" % Record[1]
+ if NewRecordSet != []:
+ OtherMsg = "The structure name [%s] is duplicate with the one defined in %s, maybe struct NOT typedefed or the typedef new type NOT used to qualify variables" % (Record[1], NewRecordSet[0][0])
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE, OtherMsg = OtherMsg, BelongsToTable = IdentifierTable, BelongsToItem = Record[0])
+
+ # Check whether Union Type has a 'typedef' and the name is capital
+ def DeclCheckUnionType(self):
+ if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Declaration union typedef ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[UNION]" + FullName)
+ c.CheckDeclUnionTypedef(FullName)
+
+ # Predicate Expression Checking
+ def PredicateExpressionCheck(self):
+ self.PredicateExpressionCheckBooleanValue()
+ self.PredicateExpressionCheckNonBooleanOperator()
+ self.PredicateExpressionCheckComparisonNullType()
+
+ # Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+ def PredicateExpressionCheckBooleanValue(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression Boolean value ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[BOOLEAN]" + FullName)
+ c.CheckBooleanValueComparison(FullName)
+
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ def PredicateExpressionCheckNonBooleanOperator(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
+ c.CheckNonBooleanValueComparison(FullName)
+ # Check whether a comparison of any pointer to zero must be done via the NULL type
+ def PredicateExpressionCheckComparisonNullType(self):
+ if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking predicate expression NULL pointer ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.c'):
+ FullName = os.path.join(Dirpath, F)
+ EdkLogger.quiet("[POINTER]" + FullName)
+ c.CheckPointerNullComparison(FullName)
+ # Include file checking
+ def IncludeFileCheck(self):
+ self.IncludeFileCheckIfndef()
+ self.IncludeFileCheckData()
+ self.IncludeFileCheckSameName()
+
+ # Check whether having include files with same name
+ def IncludeFileCheckSameName(self):
+ if EccGlobalData.gConfig.IncludeFileCheckSameName == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking same header file name ...")
+ SqlCommand = """select ID, FullPath from File
+ where Model = 1002 order by Name """
+ RecordDict = {}
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ List = Record[1].replace('/', '\\').split('\\')
+ if len(List) >= 2:
+ Key = List[-2] + '\\' + List[-1]
+ else:
+ Key = List[0]
+ if Key not in RecordDict:
+ RecordDict[Key] = [Record]
+ else:
+ RecordDict[Key].append(Record)
+
+ for Key in RecordDict:
+ if len(RecordDict[Key]) > 1:
+ for Item in RecordDict[Key]:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_INCLUDE_FILE_CHECK_NAME, OtherMsg = "The file name for '%s' is duplicate" % (Item[1]), BelongsToTable = 'File', BelongsToItem = Item[0])
+
+ # Check whether all include file contents is guarded by a #ifndef statement.
+ def IncludeFileCheckIfndef(self):
+ if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking header file ifndef ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckHeaderFileIfndef(FullName)
+
+ # Check whether include files NOT contain code or define data variables
+ def IncludeFileCheckData(self):
+ if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking header file data ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckHeaderFileData(FullName)
+
+ # Doxygen document checking
+ def DoxygenCheck(self):
+ self.DoxygenCheckFileHeader()
+ self.DoxygenCheckFunctionHeader()
+ self.DoxygenCheckCommentDescription()
+ self.DoxygenCheckCommentFormat()
+ self.DoxygenCheckCommand()
+
+ # Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+ def DoxygenCheckFileHeader(self):
+ if EccGlobalData.gConfig.DoxygenCheckFileHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen file header ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckFileHeaderDoxygenComments(FullName)
+
+ # Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+ def DoxygenCheckFunctionHeader(self):
+ if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen function header ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # The brief description must end with a period.
+ def DoxygenCheckCommentDescription(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommentDescription == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ pass
+
+ # Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+ def DoxygenCheckCommentFormat(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen comment ///< ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
+
+ # Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+ def DoxygenCheckCommand(self):
+ if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Doxygen command ...")
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ MsgList = c.CheckDoxygenCommand(FullName)
+
+ # Meta-Data File Processing Checking
+ def MetaDataFileCheck(self):
+ self.MetaDataFileCheckPathName()
+ self.MetaDataFileCheckGenerateFileList()
+ self.MetaDataFileCheckLibraryInstance()
+ self.MetaDataFileCheckLibraryInstanceDependent()
+ self.MetaDataFileCheckLibraryInstanceOrder()
+ self.MetaDataFileCheckLibraryNoUse()
+ self.MetaDataFileCheckBinaryInfInFdf()
+ self.MetaDataFileCheckPcdDuplicate()
+ self.MetaDataFileCheckPcdFlash()
+ self.MetaDataFileCheckPcdNoUse()
+ self.MetaDataFileCheckGuidDuplicate()
+ self.MetaDataFileCheckModuleFileNoUse()
+ self.MetaDataFileCheckPcdType()
+
+ # Check whether each file defined in meta-data exists
+ def MetaDataFileCheckPathName(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPathName == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This item is covered when parsing Inf/Dec/Dsc files
+ pass
+
+ # Generate a list for all files defined in meta-data files
+ def MetaDataFileCheckGenerateFileList(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckGenerateFileList == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This item is covered when parsing Inf/Dec/Dsc files
+ pass
+
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its Inf file,
+ # and any module specifying the library instance must be one of the supported types.
+ def MetaDataFileCheckLibraryInstance(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstance == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance type issue ...")
+ SqlCommand = """select A.ID, A.Value2, B.Value2 from Inf as A left join Inf as B
+ where A.Value1 = 'LIBRARY_CLASS' and A.Model = %s
+ and B.Value1 = 'MODULE_TYPE' and B.Model = %s and A.BelongsToFile = B.BelongsToFile
+ group by A.BelongsToFile""" % (MODEL_META_DATA_HEADER, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ LibraryClasses = {}
+ for Record in RecordSet:
+ List = Record[1].split('|', 1)
+ SupModType = []
+ if len(List) == 1:
+ SupModType = SUP_MODULE_LIST_STRING.split(TAB_VALUE_SPLIT)
+ elif len(List) == 2:
+ SupModType = List[1].split()
+
+ if List[0] not in LibraryClasses:
+ LibraryClasses[List[0]] = SupModType
+ else:
+ for Item in SupModType:
+ if Item not in LibraryClasses[List[0]]:
+ LibraryClasses[List[0]].append(Item)
+
+ if Record[2] != 'BASE' and Record[2] not in SupModType:
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2, OtherMsg = "The Library Class '%s' does not specify its supported module types" % (List[0]), BelongsToTable = 'Inf', BelongsToItem = Record[0])
+
+ SqlCommand = """select A.ID, A.Value1, B.Value2 from Inf as A left join Inf as B
+ where A.Model = %s and B.Value1 = '%s' and B.Model = %s
+ and B.BelongsToFile = A.BelongsToFile""" \
+ % (MODEL_EFI_LIBRARY_CLASS, 'MODULE_TYPE', MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ # Merge all LibraryClasses' supmodlist
+ RecordDict = {}
+ for Record in RecordSet:
+ if Record[1] not in RecordDict:
+ RecordDict[Record[1]] = [str(Record[2])]
+ else:
+ if Record[2] not in RecordDict[Record[1]]:
+ RecordDict[Record[1]].append(Record[2])
+
+ for Record in RecordSet:
+ if Record[1] in LibraryClasses:
+ if Record[2] not in LibraryClasses[Record[1]] and 'BASE' not in RecordDict[Record[1]]:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg = "The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable = 'Inf', BelongsToItem = Record[0])
+ else:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1, OtherMsg = "The type of Library Class [%s] defined in Inf file does not match the type of the module" % (Record[1]), BelongsToTable = 'Inf', BelongsToItem = Record[0])
+
+ # Check whether a Library Instance has been defined for all dependent library classes
+ def MetaDataFileCheckLibraryInstanceDependent(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceDependent == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance dependent issue ...")
+ SqlCommand = """select ID, Value1, Value2 from Dsc where Model = %s""" % MODEL_EFI_LIBRARY_CLASS
+ LibraryClasses = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for LibraryClass in LibraryClasses:
+ if LibraryClass[1].upper() != 'NULL':
+ LibraryIns = os.path.normpath(os.path.join(EccGlobalData.gWorkspace, LibraryClass[2]))
+ SqlCommand = """select Value2 from Inf where BelongsToFile =
+ (select ID from File where lower(FullPath) = lower('%s'))
+ and Value1 = '%s'""" % (LibraryIns, 'LIBRARY_CLASS')
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ IsFound = False
+ for Record in RecordSet:
+ LibName = Record[0].split('|', 1)[0]
+ if LibraryClass[1] == LibName:
+ IsFound = True
+ if not IsFound:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, LibraryClass[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT, OtherMsg = "The Library Class [%s] is not specified in '%s'" % (LibraryClass[1], LibraryClass[2]), BelongsToTable = 'Dsc', BelongsToItem = LibraryClass[0])
+
+ # Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+ def MetaDataFileCheckLibraryInstanceOrder(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryInstanceOrder == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ # This checkpoint is not necessary for Ecc check
+ pass
+
+ # Check whether the unnecessary inclusion of library classes in the Inf file
+ def MetaDataFileCheckLibraryNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckLibraryNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for library instance not used ...")
+ SqlCommand = """select ID, Value1 from Inf as A where A.Model = %s and A.Value1 not in (select B.Value1 from Dsc as B where Model = %s)""" % (MODEL_EFI_LIBRARY_CLASS, MODEL_EFI_LIBRARY_CLASS)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE, OtherMsg = "The Library Class [%s] is not used in any platform" % (Record[1]), BelongsToTable = 'Inf', BelongsToItem = Record[0])
+
+ # Check whether an Inf file is specified in the FDF file, but not in the Dsc file, then the Inf file must be for a Binary module only
+ def MetaDataFileCheckBinaryInfInFdf(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckBinaryInfInFdf == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for non-binary modules defined in FDF files ...")
+ SqlCommand = """select A.ID, A.Value1 from Fdf as A
+ where A.Model = %s
+ and A.Enabled > -1
+ and A.Value1 not in
+ (select B.Value1 from Dsc as B
+ where B.Model = %s
+ and B.Enabled > -1)""" % (MODEL_META_DATA_COMPONENT, MODEL_META_DATA_COMPONENT)
+ RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
+ for Record in RecordSet:
+ FdfID = Record[0]
+ FilePath = Record[1]
+ FilePath = os.path.normpath(os.path.join(EccGlobalData.gWorkspace, FilePath))
+ SqlCommand = """select ID from Inf where Model = %s and BelongsToFile = (select ID from File where FullPath like '%s')
+ """ % (MODEL_EFI_SOURCE_FILE, FilePath)
+ NewRecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ if NewRecordSet!= []:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, FilePath):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF, OtherMsg = "File [%s] defined in FDF file and not in DSC file must be a binary module" % (FilePath), BelongsToTable = 'Fdf', BelongsToItem = FdfID)
+
+ # Check whether a PCD is set in a Dsc file or the FDF file, but not in both.
+ def MetaDataFileCheckPcdDuplicate(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for duplicate PCDs defined in both DSC and FDF files ...")
+ SqlCommand = """
+ select A.ID, A.Value2, B.ID, B.Value2 from Dsc as A, Fdf as B
+ where A.Model >= %s and A.Model < %s
+ and B.Model >= %s and B.Model < %s
+ and A.Value2 = B.Value2
+ and A.Enabled > -1
+ and B.Enabled > -1
+ group by A.ID
+ """% (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg = "The PCD [%s] is defined in both FDF file and DSC file" % (Record[1]), BelongsToTable = 'Dsc', BelongsToItem = Record[0])
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[3]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg = "The PCD [%s] is defined in both FDF file and DSC file" % (Record[3]), BelongsToTable = 'Fdf', BelongsToItem = Record[2])
+
+ EdkLogger.quiet("Checking for duplicate PCDs defined in DEC files ...")
+ SqlCommand = """
+ select A.ID, A.Value2 from Dec as A, Dec as B
+ where A.Model >= %s and A.Model < %s
+ and B.Model >= %s and B.Model < %s
+ and A.Value2 = B.Value2
+ and ((A.Arch = B.Arch) and (A.Arch != 'COMMON' or B.Arch != 'COMMON'))
+ and A.ID != B.ID
+ and A.Enabled > -1
+ and B.Enabled > -1
+ and A.BelongsToFile = B.BelongsToFile
+ group by A.ID
+ """% (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, OtherMsg = "The PCD [%s] is defined duplicated in DEC file" % (Record[1]), BelongsToTable = 'Dec', BelongsToItem = Record[0])
+
+ # Check whether PCD settings in the FDF file can only be related to flash.
+ def MetaDataFileCheckPcdFlash(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdFlash == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking only Flash related PCDs are used in FDF ...")
+ SqlCommand = """
+ select ID, Value2, BelongsToFile from Fdf as A
+ where A.Model >= %s and Model < %s
+ and A.Enabled > -1
+ and A.Value2 not like '%%Flash%%'
+ """% (MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblFdf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_FLASH, OtherMsg = "The PCD [%s] defined in FDF file is not related to Flash" % (Record[1]), BelongsToTable = 'Fdf', BelongsToItem = Record[0])
+
+ # Check whether PCDs used in Inf files but not specified in Dsc or FDF files
+ def MetaDataFileCheckPcdNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for non-specified PCDs ...")
+ SqlCommand = """
+ select ID, Value2, BelongsToFile from Inf as A
+ where A.Model >= %s and Model < %s
+ and A.Enabled > -1
+ and A.Value2 not in
+ (select Value2 from Dsc as B
+ where B.Model >= %s and B.Model < %s
+ and B.Enabled > -1)
+ and A.Value2 not in
+ (select Value2 from Fdf as C
+ where C.Model >= %s and C.Model < %s
+ and C.Enabled > -1)
+ """% (MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER, MODEL_PCD, MODEL_META_DATA_HEADER)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_NO_USE, OtherMsg = "The PCD [%s] defined in INF file is not specified in either DSC or FDF files" % (Record[1]), BelongsToTable = 'Inf', BelongsToItem = Record[0])
+
+ # Check whether having duplicate guids defined for Guid/Protocol/Ppi
+ def MetaDataFileCheckGuidDuplicate(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckGuidDuplicate == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for duplicate GUID/PPI/PROTOCOL ...")
+ # Check Guid
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID, MODEL_EFI_GUID)
+ # Check protocol
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL, MODEL_EFI_PROTOCOL)
+ # Check ppi
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDec)
+ self.CheckGuidProtocolPpi(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI, EccGlobalData.gDb.TblDsc)
+ self.CheckGuidProtocolPpiValue(ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI, MODEL_EFI_PPI)
+
+ # Check whether all files under module directory are described in INF files
+ def MetaDataFileCheckModuleFileNoUse(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckModuleFileNoUse == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for no used module files ...")
+ SqlCommand = """
+ select upper(Path) from File where ID in (select BelongsToFile from INF where BelongsToFile != -1)
+ """
+ InfPathSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ InfPathList = []
+ for Item in InfPathSet:
+ if Item[0] not in InfPathList:
+ InfPathList.append(Item[0])
+ SqlCommand = """
+ select ID, Path, FullPath from File where upper(FullPath) not in
+ (select upper(A.Path) || '\\' || upper(B.Value1) from File as A, INF as B
+ where A.ID in (select BelongsToFile from INF where Model = %s group by BelongsToFile) and
+ B.BelongsToFile = A.ID and B.Model = %s)
+ and (Model = %s or Model = %s)
+ """ % (MODEL_EFI_SOURCE_FILE, MODEL_EFI_SOURCE_FILE, MODEL_FILE_C, MODEL_FILE_H)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ Path = Record[1]
+ Path = Path.upper().replace('\X64', '').replace('\IA32', '').replace('\EBC', '').replace('\IPF', '').replace('\ARM', '')
+ if Path in InfPathList:
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, Record[2]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, OtherMsg = "The source file [%s] is existing in module directory but it is not described in INF file." % (Record[2]), BelongsToTable = 'File', BelongsToItem = Record[0])
+
+ # Check whether the PCD is correctly used in C function via its type
+ def MetaDataFileCheckPcdType(self):
+ if EccGlobalData.gConfig.MetaDataFileCheckPcdType == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking for pcd type in c code function usage ...")
+ SqlCommand = """
+ select ID, Model, Value1, BelongsToFile from INF where Model > %s and Model < %s
+ """ % (MODEL_PCD, MODEL_META_DATA_HEADER)
+ PcdSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Pcd in PcdSet:
+ Model = Pcd[1]
+ PcdName = Pcd[2]
+ if len(Pcd[2].split(".")) > 1:
+ PcdName = Pcd[2].split(".")[1]
+ BelongsToFile = Pcd[3]
+ SqlCommand = """
+ select ID from File where FullPath in
+ (select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
+ and B.ID = %s)
+ """ %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile)
+ TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Tbl in TableSet:
+ TblName = 'Identifier' + str(Tbl[0])
+ SqlCommand = """
+ select Name, ID from %s where value like '%%%s%%' and Model = %s
+ """ % (TblName, PcdName, MODEL_IDENTIFIER_FUNCTION_CALLING)
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ TblNumber = TblName.replace('Identifier', '')
+ for Record in RecordSet:
+ FunName = Record[0]
+ if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, FunName):
+ if Model in [MODEL_PCD_FIXED_AT_BUILD] and not FunName.startswith('FixedPcdGet'):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg = "The pcd '%s' is defined as a FixPcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable = TblName, BelongsToItem = Record[1])
+ if Model in [MODEL_PCD_FEATURE_FLAG] and (not FunName.startswith('FeaturePcdGet') and not FunName.startswith('FeaturePcdSet')):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg = "The pcd '%s' is defined as a FeaturePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable = TblName, BelongsToItem = Record[1])
+ if Model in [MODEL_PCD_PATCHABLE_IN_MODULE] and (not FunName.startswith('PatchablePcdGet') and not FunName.startswith('PatchablePcdSet')):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_PCD_TYPE, OtherMsg = "The pcd '%s' is defined as a PatchablePcd but now it is called by c function [%s]" % (PcdName, FunName), BelongsToTable = TblName, BelongsToItem = Record[1])
+
+ #ERROR_META_DATA_FILE_CHECK_PCD_TYPE
+ pass
+
+ # Check whether these is duplicate Guid/Ppi/Protocol name
+ def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
+ Name = ''
+ if Model == MODEL_EFI_GUID:
+ Name = 'guid'
+ if Model == MODEL_EFI_PROTOCOL:
+ Name = 'protocol'
+ if Model == MODEL_EFI_PPI:
+ Name = 'ppi'
+ SqlCommand = """
+ select A.ID, A.Value1 from %s as A, %s as B
+ where A.Model = %s and B.Model = %s
+ and A.Value1 = B.Value1 and A.ID <> B.ID
+ and A.Enabled > -1
+ and B.Enabled > -1
+ group by A.ID
+ """ % (Table.Table, Table.Table, Model, Model)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ErrorID, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg = "The %s name [%s] is defined more than one time" % (Name.upper(), Record[1]), BelongsToTable = Table.Table, BelongsToItem = Record[0])
+
+ # Check whether these is duplicate Guid/Ppi/Protocol value
+ def CheckGuidProtocolPpiValue(self, ErrorID, Model):
+ Name = ''
+ Table = EccGlobalData.gDb.TblDec
+ if Model == MODEL_EFI_GUID:
+ Name = 'guid'
+ if Model == MODEL_EFI_PROTOCOL:
+ Name = 'protocol'
+ if Model == MODEL_EFI_PPI:
+ Name = 'ppi'
+ SqlCommand = """
+ select A.ID, A.Value2 from %s as A, %s as B
+ where A.Model = %s and B.Model = %s
+ and A.Value2 = B.Value2 and A.ID <> B.ID
+ group by A.ID
+ """ % (Table.Table, Table.Table, Model, Model)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not EccGlobalData.gException.IsException(ErrorID, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ErrorID, OtherMsg = "The %s value [%s] is used more than one time" % (Name.upper(), Record[1]), BelongsToTable = Table.Table, BelongsToItem = Record[0])
+
+ # Naming Convention Check
+ def NamingConventionCheck(self):
+
+ for Dirpath, Dirnames, Filenames in self.WalkTree():
+ for F in Filenames:
+ if os.path.splitext(F)[1] in ('.h', '.c'):
+ FullName = os.path.join(Dirpath, F)
+ Id = c.GetTableID(FullName)
+ if Id < 0:
+ continue
+ FileTable = 'Identifier' + str(Id)
+ self.NamingConventionCheckDefineStatement(FileTable)
+ self.NamingConventionCheckTypedefStatement(FileTable)
+ self.NamingConventionCheckIfndefStatement(FileTable)
+ self.NamingConventionCheckVariableName(FileTable)
+ self.NamingConventionCheckSingleCharacterVariable(FileTable)
+
+ self.NamingConventionCheckPathName()
+ self.NamingConventionCheckFunctionName()
+
+ # Check whether only capital letters are used for #define declarations
+ def NamingConventionCheckDefineStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckDefineStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of #define statement ...")
+
+ SqlCommand = """select ID, Value from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_MACRO_DEFINE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Name = Record[1].strip().split()[1]
+ if Name.find('(') != -1:
+ Name = Name[0:Name.find('(')]
+ if Name.upper() != Name:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT, OtherMsg = "The #define name [%s] does not follow the rules" % (Name), BelongsToTable = FileTable, BelongsToItem = Record[0])
+
+ # Check whether only capital letters are used for typedef declarations
+ def NamingConventionCheckTypedefStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of #typedef statement ...")
+
+ SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_TYPEDEF)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Name = Record[1].strip()
+ if Name != '' and Name != None:
+ if Name[0] == '(':
+ Name = Name[1:Name.find(')')]
+ if Name.find('(') > -1:
+ Name = Name[Name.find('(') + 1 : Name.find(')')]
+ Name = Name.replace('WINAPI', '')
+ Name = Name.replace('*', '').strip()
+ if Name.upper() != Name:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT, OtherMsg = "The #typedef name [%s] does not follow the rules" % (Name), BelongsToTable = FileTable, BelongsToItem = Record[0])
+
+ # Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+ def NamingConventionCheckIfndefStatement(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckTypedefStatement == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of #ifndef statement ...")
+
+ SqlCommand = """select ID, Value from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_MACRO_IFNDEF)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Name = Record[1].replace('#ifndef', '').strip()
+ if Name[0] != '_' or Name[-1] != '_':
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, Name):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT, OtherMsg = "The #ifndef name [%s] does not follow the rules" % (Name), BelongsToTable = FileTable, BelongsToItem = Record[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the path name followed the rule
+ def NamingConventionCheckPathName(self):
+ if EccGlobalData.gConfig.NamingConventionCheckPathName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of file path name ...")
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ SqlCommand = """select ID, Name from File"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not Pattern.match(Record[1]):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_PATH_NAME, OtherMsg = "The file path [%s] does not follow the rules" % (Record[1]), BelongsToTable = 'File', BelongsToItem = Record[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # 4. Global variable name must start with a 'g'
+ # Check whether the variable name followed the rule
+ def NamingConventionCheckVariableName(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckVariableName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of variable name ...")
+ Pattern = re.compile(r'^[A-Zgm]+\S*[a-z]\S*$')
+
+ SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_VARIABLE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not Pattern.match(Record[1]):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, OtherMsg = "The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable = FileTable, BelongsToItem = Record[0])
+
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the function name followed the rule
+ def NamingConventionCheckFunctionName(self):
+ if EccGlobalData.gConfig.NamingConventionCheckFunctionName == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of function name ...")
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ SqlCommand = """select ID, Name from Function"""
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ if not Pattern.match(Record[1]):
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME, OtherMsg = "The function name [%s] does not follow the rules" % (Record[1]), BelongsToTable = 'Function', BelongsToItem = Record[0])
+
+ # Check whether NO use short variable name with single character
+ def NamingConventionCheckSingleCharacterVariable(self, FileTable):
+ if EccGlobalData.gConfig.NamingConventionCheckSingleCharacterVariable == '1' or EccGlobalData.gConfig.NamingConventionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking naming covention of single character variable name ...")
+
+ SqlCommand = """select ID, Name from %s where Model = %s""" %(FileTable, MODEL_IDENTIFIER_VARIABLE)
+ RecordSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ Variable = Record[1].replace('*', '')
+ if len(Variable) == 1:
+ if not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, Record[1]):
+ EccGlobalData.gDb.TblReport.Insert(ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE, OtherMsg = "The variable name [%s] does not follow the rules" % (Record[1]), BelongsToTable = FileTable, BelongsToItem = Record[0])
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ Check = Check()
+ Check.Check()
diff --git a/BaseTools/Source/Python/Ecc/CodeFragment.py b/BaseTools/Source/Python/Ecc/CodeFragment.py new file mode 100644 index 0000000000..1c5c5e4df2 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/CodeFragment.py @@ -0,0 +1,165 @@ +## @file
+# fragments of source file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+
+## The description of comment contents and start & end position
+#
+#
+class Comment :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param CommentType The type of comment (T_COMMENT_TWO_SLASH or T_COMMENT_SLASH_STAR).
+ #
+ def __init__(self, Str, Begin, End, CommentType):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+ self.Type = CommentType
+
+## The description of preprocess directives and start & end position
+#
+#
+class PP_Directive :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of predicate expression and start & end position
+#
+#
+class PredicateExpression :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of function definition and start & end position
+#
+#
+class FunctionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ # @param LBPos The left brace position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, End, LBPos, NamePos):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.EndPos = End
+ self.LeftBracePos = LBPos
+ self.NamePos = NamePos
+
+## The description of variable declaration and start & end position
+#
+#
+class VariableDeclaration :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param NamePos The name position tuple.
+ #
+ def __init__(self, ModifierStr, DeclStr, Begin, NamePos):
+ self.Modifier = ModifierStr
+ self.Declarator = DeclStr
+ self.StartPos = Begin
+ self.NameStartPos = NamePos
+
+## The description of enum definition and start & end position
+#
+#
+class EnumerationDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of struct/union definition and start & end position
+#
+#
+class StructUnionDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Str, Begin, End):
+ self.Content = Str
+ self.StartPos = Begin
+ self.EndPos = End
+
+## The description of 'Typedef' definition and start & end position
+#
+#
+class TypedefDefinition :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, FromStr, ToStr, Begin, End):
+ self.FromType = FromStr
+ self.ToType = ToStr
+ self.StartPos = Begin
+ self.EndPos = End
+
+class FunctionCalling:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param Begin The start position tuple.
+ # @param End The end position tuple.
+ #
+ def __init__(self, Name, Param, Begin, End):
+ self.FuncName = Name
+ self.ParamList = Param
+ self.StartPos = Begin
+ self.EndPos = End
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py new file mode 100644 index 0000000000..d95faeef6a --- /dev/null +++ b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py @@ -0,0 +1,624 @@ +## @file
+# preprocess source file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+
+import re
+import os
+import sys
+
+import antlr3
+from CLexer import CLexer
+from CParser import CParser
+
+import FileProfile
+from CodeFragment import Comment
+from CodeFragment import PP_Directive
+from ParserWarning import Warning
+
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+(T_COMMENT_TWO_SLASH, T_COMMENT_SLASH_STAR) = (0, 1)
+
+(T_PP_INCLUDE, T_PP_DEFINE, T_PP_OTHERS) = (0, 1, 2)
+
+## The collector for source code fragments.
+#
+# PreprocessFile method should be called prior to ParseFile
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class CodeFragmentCollector:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile.FileProfile(FileName)
+ self.Profile.FileLinesList.append(T_CHAR_LF)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ ## __IsWhiteSpace() method
+ #
+ # Whether char at current FileBufferPos is whitespace
+ #
+ # @param self The object pointer
+ # @param Char The char to test
+ # @retval True The char is a kind of white space
+ # @retval False The char is NOT a kind of white space
+ #
+ def __IsWhiteSpace(self, Char):
+ if Char in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_SPACE, T_CHAR_TAB, T_CHAR_LF):
+ return True
+ else:
+ return False
+
+ ## __SkipWhiteSpace() method
+ #
+ # Skip white spaces from current char, return number of chars skipped
+ #
+ # @param self The object pointer
+ # @retval Count The number of chars skipped
+ #
+ def __SkipWhiteSpace(self):
+ Count = 0
+ while not self.__EndOfFile():
+ Count += 1
+ if self.__CurrentChar() in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_LF, T_CHAR_SPACE, T_CHAR_TAB):
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ else:
+ Count = Count - 1
+ return Count
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = NumberOfLines
+ if NumberOfLines > 0:
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ CurrentChar = self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+# if CurrentChar > 255:
+# raise Warning("Non-Ascii char found At Line %d, offset %d" % (self.CurrentLineNumber, self.CurrentOffsetWithinLine), self.FileName, self.CurrentLineNumber)
+ return CurrentChar
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __SetCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCharValue(self, Line, Offset, Value):
+ self.Profile.FileLinesList[Line - 1][Offset] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ ## __InsertComma() method
+ #
+ # Insert ',' to replace PP
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __InsertComma(self, Line):
+
+
+ if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
+ BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
+ if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
+ return
+
+ if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
+ return
+
+ if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
+ return
+
+ self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ PPExtend = False
+ CommentObj = None
+ PPDirectiveObj = None
+ # HashComment in quoted string " " is ignored.
+ InString = False
+ InCharLiteral = False
+
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
+ while not self.__EndOfFile():
+
+ if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
+ InString = not InString
+
+ if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
+ InCharLiteral = not InCharLiteral
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ if HashComment and PPDirectiveObj != None:
+ if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPExtend = True
+ else:
+ PPExtend = False
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ CommentObj.Content += T_CHAR_LF
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ if InComment and HashComment and not PPExtend:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+
+ if InString or InCharLiteral:
+ CurrentLine = "".join(self.__CurrentLine())
+ if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
+ self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
+
+ if InComment and not DoubleSlashComment and not HashComment:
+ CommentObj.Content += T_CHAR_LF
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ if HashComment:
+ # // follows hash PP directive
+ if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+ continue
+ else:
+ PPDirectiveObj.Content += self.__CurrentChar()
+ if PPExtend:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ else:
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = True
+ DoubleSlashComment = True
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
+ InComment = True
+ HashComment = True
+ PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+# self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ if InComment and HashComment and not PPExtend:
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+
+ self.Rewind()
+
+ def PreprocessFileWithClear(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ PPExtend = False
+ CommentObj = None
+ PPDirectiveObj = None
+ # HashComment in quoted string " " is ignored.
+ InString = False
+ InCharLiteral = False
+
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesListFromFile]
+ while not self.__EndOfFile():
+
+ if not InComment and self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE:
+ InString = not InString
+
+ if not InComment and self.__CurrentChar() == T_CHAR_SINGLE_QUOTE:
+ InCharLiteral = not InCharLiteral
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ if HashComment and PPDirectiveObj != None:
+ if PPDirectiveObj.Content.rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPExtend = True
+ else:
+ PPExtend = False
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ CommentObj.Content += T_CHAR_LF
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ if InComment and HashComment and not PPExtend:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.Content += T_CHAR_LF
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+
+ if InString or InCharLiteral:
+ CurrentLine = "".join(self.__CurrentLine())
+ if CurrentLine.rstrip(T_CHAR_LF).rstrip(T_CHAR_CR).endswith(T_CHAR_BACKSLASH):
+ SlashIndex = CurrentLine.rindex(T_CHAR_BACKSLASH)
+ self.__SetCharValue(self.CurrentLineNumber, SlashIndex, T_CHAR_SPACE)
+
+ if InComment and not DoubleSlashComment and not HashComment:
+ CommentObj.Content += T_CHAR_LF
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ CommentObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+ FileProfile.CommentList.append(CommentObj)
+ CommentObj = None
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ if HashComment:
+ # // follows hash PP directive
+ if self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = False
+ HashComment = False
+ PPDirectiveObj.EndPos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine - 1)
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ PPDirectiveObj = None
+ continue
+ else:
+ PPDirectiveObj.Content += self.__CurrentChar()
+# if PPExtend:
+# self.__SetCurrentCharValue(T_CHAR_SPACE)
+ else:
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH:
+ InComment = True
+ DoubleSlashComment = True
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_TWO_SLASH)
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not InString and not InCharLiteral:
+ InComment = True
+ HashComment = True
+ PPDirectiveObj = PP_Directive('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None)
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ CommentObj = Comment('', (self.CurrentLineNumber, self.CurrentOffsetWithinLine), None, T_COMMENT_SLASH_STAR)
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ CommentObj.Content += self.__CurrentChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ EndLinePos = (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ if InComment and DoubleSlashComment:
+ CommentObj.EndPos = EndLinePos
+ FileProfile.CommentList.append(CommentObj)
+ if InComment and HashComment and not PPExtend:
+ PPDirectiveObj.EndPos = EndLinePos
+ FileProfile.PPDirectiveList.append(PPDirectiveObj)
+ self.Rewind()
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+ self.PreprocessFile()
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ FileStringContents = ''
+ for fileLine in self.Profile.FileLinesList:
+ FileStringContents += fileLine
+ cStream = antlr3.StringStream(FileStringContents)
+ lexer = CLexer(cStream)
+ tStream = antlr3.CommonTokenStream(lexer)
+ parser = CParser(tStream)
+ parser.translation_unit()
+
+ def ParseFileWithClearedPPDirective(self):
+ self.PreprocessFileWithClear()
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ FileStringContents = ''
+ for fileLine in self.Profile.FileLinesList:
+ FileStringContents += fileLine
+ cStream = antlr3.StringStream(FileStringContents)
+ lexer = CLexer(cStream)
+ tStream = antlr3.CommonTokenStream(lexer)
+ parser = CParser(tStream)
+ parser.translation_unit()
+
+ def CleanFileProfileBuffer(self):
+ FileProfile.CommentList = []
+ FileProfile.PPDirectiveList = []
+ FileProfile.PredicateExpressionList = []
+ FileProfile.FunctionDefinitionList = []
+ FileProfile.VariableDeclarationList = []
+ FileProfile.EnumerationDefinitionList = []
+ FileProfile.StructUnionDefinitionList = []
+ FileProfile.TypedefDefinitionList = []
+ FileProfile.FunctionCallingList = []
+
+ def PrintFragments(self):
+
+ print '################# ' + self.FileName + '#####################'
+
+ print '/****************************************/'
+ print '/*************** COMMENTS ***************/'
+ print '/****************************************/'
+ for comment in FileProfile.CommentList:
+ print str(comment.StartPos) + comment.Content
+
+ print '/****************************************/'
+ print '/********* PREPROCESS DIRECTIVES ********/'
+ print '/****************************************/'
+ for pp in FileProfile.PPDirectiveList:
+ print str(pp.StartPos) + pp.Content
+
+ print '/****************************************/'
+ print '/********* VARIABLE DECLARATIONS ********/'
+ print '/****************************************/'
+ for var in FileProfile.VariableDeclarationList:
+ print str(var.StartPos) + var.Modifier + ' '+ var.Declarator
+
+ print '/****************************************/'
+ print '/********* FUNCTION DEFINITIONS *********/'
+ print '/****************************************/'
+ for func in FileProfile.FunctionDefinitionList:
+ print str(func.StartPos) + func.Modifier + ' '+ func.Declarator + ' ' + str(func.NamePos)
+
+ print '/****************************************/'
+ print '/************ ENUMERATIONS **************/'
+ print '/****************************************/'
+ for enum in FileProfile.EnumerationDefinitionList:
+ print str(enum.StartPos) + enum.Content
+
+ print '/****************************************/'
+ print '/*********** STRUCTS/UNIONS *************/'
+ print '/****************************************/'
+ for su in FileProfile.StructUnionDefinitionList:
+ print str(su.StartPos) + su.Content
+
+ print '/****************************************/'
+ print '/********* PREDICATE EXPRESSIONS ********/'
+ print '/****************************************/'
+ for predexp in FileProfile.PredicateExpressionList:
+ print str(predexp.StartPos) + predexp.Content
+
+ print '/****************************************/'
+ print '/************** TYPEDEFS ****************/'
+ print '/****************************************/'
+ for typedef in FileProfile.TypedefDefinitionList:
+ print str(typedef.StartPos) + typedef.ToType
+
+if __name__ == "__main__":
+
+ collector = CodeFragmentCollector(sys.argv[1])
+ collector.PreprocessFile()
+ print "For Test."
diff --git a/BaseTools/Source/Python/Ecc/Configuration.py b/BaseTools/Source/Python/Ecc/Configuration.py new file mode 100644 index 0000000000..bd9313cef4 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/Configuration.py @@ -0,0 +1,264 @@ +## @file
+# This file is used to define class Configuration
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import Common.EdkLogger as EdkLogger
+from Common.DataType import *
+from Common.String import *
+
+## Configuration
+#
+# This class is used to define all items in configuration file
+#
+# @param Filename: The name of configuration file, the default is config.ini
+#
+class Configuration(object):
+ def __init__(self, Filename):
+ self.Filename = Filename
+
+ self.Version = 0.1
+
+ ## Identify to if check all items
+ # 1 - Check all items and ignore all other detailed items
+ # 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
+ #
+ self.CheckAll = 0
+
+ ## Identify to if automatically correct mistakes
+ # 1 - Automatically correct
+ # 0 - Not automatically correct
+ # Only the following check points can be automatically corrected, others not listed below are not supported even it is 1
+ #
+ # GeneralCheckTab
+ # GeneralCheckIndentation
+ # GeneralCheckLine
+ # GeneralCheckCarriageReturn
+ # SpaceCheckAll
+ #
+ self.AutoCorrect = 0
+
+ # List customized Modifer here, split with ','
+ # Defaultly use the definition in class DataType
+ self.ModifierList = MODIFIER_LIST
+
+ ## General Checking
+ self.GeneralCheckAll = 0
+
+ # Check whether NO Tab is used, replaced with spaces
+ self.GeneralCheckNoTab = 1
+ # The width of Tab
+ self.GeneralCheckTabWidth = 2
+ # Check whether the indentation is followed coding style
+ self.GeneralCheckIndentation = 1
+ # The width of indentation
+ self.GeneralCheckIndentationWidth = 2
+ # Check whether no line is exceeding defined widty
+ self.GeneralCheckLine = 1
+ # The width of a line
+ self.GeneralCheckLineWidth = 120
+ # Check whether no use of _asm in the source file
+ self.GeneralCheckNo_Asm = 1
+ # Check whether no use of "#progma" in source file except "#pragma pack(#)".
+ self.GeneralCheckNoProgma = 1
+ # Check whether there is a carriage return at the end of the file
+ self.GeneralCheckCarriageReturn = 1
+ # Check whether the file exists
+ self.GeneralCheckFileExistence = 1
+
+ ## Space Checking
+ self.SpaceCheckAll = 1
+
+ ## Predicate Expression Checking
+ self.PredicateExpressionCheckAll = 0
+
+ # Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+ self.PredicateExpressionCheckBooleanValue = 1
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ self.PredicateExpressionCheckNonBooleanOperator = 1
+ # Check whether a comparison of any pointer to zero must be done via the NULL type
+ self.PredicateExpressionCheckComparisonNullType = 1
+
+ ## Headers Checking
+ self.HeaderCheckAll = 0
+
+ # Check whether File header exists
+ self.HeaderCheckFile = 1
+ # Check whether Function header exists
+ self.HeaderCheckFunction = 1
+
+ ## C Function Layout Checking
+ self.CFunctionLayoutCheckAll = 0
+
+ # Check whether return type exists and in the first line
+ self.CFunctionLayoutCheckReturnType = 1
+ # Check whether any optional functional modifiers exist and next to the return type
+ self.CFunctionLayoutCheckOptionalFunctionalModifier = 1
+ # Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+ # Check whether the closing parenthesis is on its own line and also indented two spaces
+ self.CFunctionLayoutCheckFunctionName = 1
+ # Check whether the function prototypes in include files have the same form as function definitions
+ self.CFunctionLayoutCheckFunctionPrototype = 1
+ # Check whether the body of a function is contained by open and close braces that must be in the first column
+ self.CFunctionLayoutCheckFunctionBody = 1
+ # Check whether the data declarations is the first code in a module.
+ self.CFunctionLayoutCheckDataDeclaration = 1
+ # Check whether no initialization of a variable as part of its declaration
+ self.CFunctionLayoutCheckNoInitOfVariable = 1
+ # Check whether no use of STATIC for functions
+ self.CFunctionLayoutCheckNoStatic = 1
+
+ ## Include Files Checking
+ self.IncludeFileCheckAll = 0
+
+ #Check whether having include files with same name
+ self.IncludeFileCheckSameName = 1
+ # Check whether all include file contents is guarded by a #ifndef statement.
+ # the #ifndef must be the first line of code following the file header comment
+ # the #endif must appear on the last line in the file
+ self.IncludeFileCheckIfndefStatement = 1
+ # Check whether include files contain only public or only private data
+ # Check whether include files NOT contain code or define data variables
+ self.IncludeFileCheckData = 1
+
+ ## Declarations and Data Types Checking
+ self.DeclarationDataTypeCheckAll = 0
+
+ # Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
+ self.DeclarationDataTypeCheckNoUseCType = 1
+ # Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+ self.DeclarationDataTypeCheckInOutModifier = 1
+ # Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+ self.DeclarationDataTypeCheckEFIAPIModifier = 1
+ # Check whether Enumerated Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckEnumeratedType = 1
+ # Check whether Structure Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckStructureDeclaration = 1
+ # Check whether having same Structure
+ self.DeclarationDataTypeCheckSameStructure = 1
+ # Check whether Union Type has a 'typedef' and the name is capital
+ self.DeclarationDataTypeCheckUnionType = 1
+
+ ## Naming Conventions Checking
+ self.NamingConventionCheckAll = 0
+
+ # Check whether only capital letters are used for #define declarations
+ self.NamingConventionCheckDefineStatement = 1
+ # Check whether only capital letters are used for typedef declarations
+ self.NamingConventionCheckTypedefStatement = 1
+ # Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+ self.NamingConventionCheckIfndefStatement = 1
+ # Rule for path name, variable name and function name
+ # 1. First character should be upper case
+ # 2. Existing lower case in a word
+ # 3. No space existence
+ # Check whether the path name followed the rule
+ self.NamingConventionCheckPathName = 1
+ # Check whether the variable name followed the rule
+ self.NamingConventionCheckVariableName = 1
+ # Check whether the function name followed the rule
+ self.NamingConventionCheckFunctionName = 1
+ # Check whether NO use short variable name with single character
+ self.NamingConventionCheckSingleCharacterVariable = 1
+
+ ## Doxygen Checking
+ self.DoxygenCheckAll = 0
+
+ # Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+ self.DoxygenCheckFileHeader = 1
+ # Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+ self.DoxygenCheckFunctionHeader = 1
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # The brief description must end with a period.
+ self.DoxygenCheckCommentDescription = 1
+ # Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+ self.DoxygenCheckCommentFormat = 1
+ # Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+ self.DoxygenCheckCommand = 1
+
+ ## Meta-Data File Processing Checking
+ self.MetaDataFileCheckAll = 0
+
+ # Check whether each file defined in meta-data exists
+ self.MetaDataFileCheckPathName = 1
+ # Generate a list for all files defined in meta-data files
+ self.MetaDataFileCheckGenerateFileList = 1
+ # The path of log file
+ self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its INF file,
+ # and any module specifying the library instance must be one of the supported types.
+ self.MetaDataFileCheckLibraryInstance = 1
+ # Check whether a Library Instance has been defined for all dependent library classes
+ self.MetaDataFileCheckLibraryInstanceDependent = 1
+ # Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+ self.MetaDataFileCheckLibraryInstanceOrder = 1
+ # Check whether the unnecessary inclusion of library classes in the INF file
+ self.MetaDataFileCheckLibraryNoUse = 1
+ # Check whether an INF file is specified in the FDF file, but not in the DSC file, then the INF file must be for a Binary module only
+ self.MetaDataFileCheckBinaryInfInFdf = 1
+ # Not to report error and warning related OS include file such as "windows.h" and "stdio.h"
+ # Check whether a PCD is set in a DSC file or the FDF file, but not in both.
+ self.MetaDataFileCheckPcdDuplicate = 1
+ # Check whether PCD settings in the FDF file can only be related to flash.
+ self.MetaDataFileCheckPcdFlash = 1
+ # Check whether PCDs used in INF files but not specified in DSC or FDF files
+ self.MetaDataFileCheckPcdNoUse = 1
+ # Check whether having duplicate guids defined for Guid/Protocol/Ppi
+ self.MetaDataFileCheckGuidDuplicate = 1
+ # Check whether all files under module directory are described in INF files
+ self.MetaDataFileCheckModuleFileNoUse = 1
+ # Check whether the PCD is correctly used in C function via its type
+ self.MetaDataFileCheckPcdType = 1
+
+ #
+ # The check points in this section are reserved
+ #
+ # GotoStatementCheckAll = 0
+ #
+ self.SpellingCheckAll = 0
+
+ # The directory listed here will not be parsed, split with ','
+ self.SkipDirList = []
+
+ self.ParseConfig()
+
+ def ParseConfig(self):
+ Filepath = os.path.normpath(self.Filename)
+ if not os.path.isfile(Filepath):
+ ErrorMsg = "Can't find configuration file '%s'" % Filepath
+ EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
+
+ LineNo = 0
+ for Line in open(Filepath, 'r'):
+ LineNo = LineNo + 1
+ Line = CleanString(Line)
+ if Line != '':
+ List = GetSplitValueList(Line, TAB_EQUAL_SPLIT)
+ if List[0] not in self.__dict__:
+ ErrorMsg = "Invalid configuration option '%s' was found" % List[0]
+ EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath, Line = LineNo)
+ if List[0] == 'ModifierList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'MetaDataFileCheckPathOfGenerateFileList' and List[1] == "":
+ continue
+ if List[0] == 'SkipDirList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ self.__dict__[List[0]] = List[1]
+
+ def ShowMe(self):
+ print self.Filename
+ for Key in self.__dict__.keys():
+ print Key, '=', self.__dict__[Key]
diff --git a/BaseTools/Source/Python/Ecc/Database.py b/BaseTools/Source/Python/Ecc/Database.py new file mode 100644 index 0000000000..c9311f65a5 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/Database.py @@ -0,0 +1,344 @@ +## @file
+# This file is used to create a database used by ECC tool
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os, time
+
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+
+from Table.TableDataModel import TableDataModel
+from Table.TableFile import TableFile
+from Table.TableFunction import TableFunction
+from Table.TablePcd import TablePcd
+from Table.TableIdentifier import TableIdentifier
+from Table.TableReport import TableReport
+from Table.TableInf import TableInf
+from Table.TableDec import TableDec
+from Table.TableDsc import TableDsc
+from Table.TableFdf import TableFdf
+
+##
+# Static definitions
+#
+DATABASE_PATH = "Ecc.db"
+
+## Database
+#
+# This class defined the ECC databse
+# During the phase of initialization, the database will create all tables and
+# insert all records of table DataModel
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the ECC database
+#
+# @var Conn: Connection of the ECC database
+# @var Cur: Cursor of the connection
+# @var TblDataModel: Local instance for TableDataModel
+#
+class Database(object):
+ def __init__(self, DbPath):
+ self.DbPath = DbPath
+ self.Conn = None
+ self.Cur = None
+ self.TblDataModel = None
+ self.TblFile = None
+ self.TblFunction = None
+ self.TblIdentifier = None
+ self.TblPcd = None
+ self.TblReport = None
+ self.TblInf = None
+ self.TblDec = None
+ self.TblDsc = None
+ self.TblFdf = None
+
+ ## Initialize ECC database
+ #
+ # 1. Delete all old existing tables
+ # 2. Create new tables
+ # 3. Initialize table DataModel
+ #
+ def InitDatabase(self, NewDatabase = True):
+ EdkLogger.verbose("\nInitialize ECC database started ...")
+ #
+ # Drop all old existing tables
+ #
+ if NewDatabase:
+ if os.path.exists(self.DbPath):
+ os.remove(self.DbPath)
+ self.Conn = sqlite3.connect(self.DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=4096")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ # to avoid non-ascii charater conversion error
+ self.Conn.text_factory = str
+ self.Cur = self.Conn.cursor()
+
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+ self.TblFunction = TableFunction(self.Cur)
+ self.TblIdentifier = TableIdentifier(self.Cur)
+ self.TblPcd = TablePcd(self.Cur)
+ self.TblReport = TableReport(self.Cur)
+ self.TblInf = TableInf(self.Cur)
+ self.TblDec = TableDec(self.Cur)
+ self.TblDsc = TableDsc(self.Cur)
+ self.TblFdf = TableFdf(self.Cur)
+
+ #
+ # Create new tables
+ #
+ if NewDatabase:
+ self.TblDataModel.Create()
+ self.TblFile.Create()
+ self.TblFunction.Create()
+ self.TblPcd.Create()
+ self.TblReport.Create()
+ self.TblInf.Create()
+ self.TblDec.Create()
+ self.TblDsc.Create()
+ self.TblFdf.Create()
+
+ #
+ # Init each table's ID
+ #
+ self.TblDataModel.InitID()
+ self.TblFile.InitID()
+ self.TblFunction.InitID()
+ self.TblPcd.InitID()
+ self.TblReport.InitID()
+ self.TblInf.InitID()
+ self.TblDec.InitID()
+ self.TblDsc.InitID()
+ self.TblFdf.InitID()
+
+ #
+ # Initialize table DataModel
+ #
+ if NewDatabase:
+ self.TblDataModel.InitTable()
+
+ EdkLogger.verbose("Initialize ECC database ... DONE!")
+
+ ## Query a table
+ #
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close entire database
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ #
+ # Commit to file
+ #
+ self.Conn.commit()
+
+ #
+ # Close connection and cursor
+ #
+ self.Cur.close()
+ self.Conn.close()
+
+ ## Insert one file information
+ #
+ # Insert one file's information to the database
+ # 1. Create a record in TableFile
+ # 2. Create functions one by one
+ # 2.1 Create variables of function one by one
+ # 2.2 Create pcds of function one by one
+ # 3. Create variables one by one
+ # 4. Create pcds one by one
+ #
+ def InsertOneFile(self, File):
+ #
+ # Insert a record for file
+ #
+ FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
+ IdTable = TableIdentifier(self.Cur)
+ IdTable.Table = "Identifier%s" % FileID
+ IdTable.Create()
+
+ #
+ # Insert function of file
+ #
+ for Function in File.FunctionList:
+ FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
+ Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
+ Function.BodyStartLine, Function.BodyStartColumn, FileID, \
+ Function.FunNameStartLine, Function.FunNameStartColumn)
+ #
+ # Insert Identifier of function
+ #
+ for Identifier in Function.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ #
+ # Insert Pcd of function
+ #
+ for Pcd in Function.PcdList:
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
+ FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
+ #
+ # Insert Identifier of file
+ #
+ for Identifier in File.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ #
+ # Insert Pcd of file
+ #
+ for Pcd in File.PcdList:
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
+ FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
+
+ EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
+
+ ## UpdateIdentifierBelongsToFunction
+ #
+ # Update the field "BelongsToFunction" for each Indentifier
+ #
+ #
+ def UpdateIdentifierBelongsToFunction_disabled(self):
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
+
+ SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ Records = self.Cur.fetchall()
+ for Record in Records:
+ IdentifierID = Record[0]
+ BelongsToFile = Record[1]
+ StartLine = Record[2]
+ EndLine = Record[3]
+ Model = Record[4]
+
+ #
+ # Check whether an identifier belongs to a function
+ #
+ EdkLogger.debug(4, "For common identifiers ... ")
+ SqlCommand = """select ID from Function
+ where StartLine < %s and EndLine > %s
+ and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ IDs = self.Cur.fetchall()
+ for ID in IDs:
+ SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ #
+ # Check whether the identifier is a function header
+ #
+ EdkLogger.debug(4, "For function headers ... ")
+ if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
+ SqlCommand = """select ID from Function
+ where StartLine = %s + 1
+ and BelongsToFile = %s""" % (EndLine, BelongsToFile)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+ IDs = self.Cur.fetchall()
+ for ID in IDs:
+ SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
+
+
+ ## UpdateIdentifierBelongsToFunction
+ #
+ # Update the field "BelongsToFunction" for each Indentifier
+ #
+ #
+ def UpdateIdentifierBelongsToFunction(self):
+ EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
+
+ SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
+ Records = self.TblFunction.Exec(SqlCommand)
+ Data1 = []
+ Data2 = []
+ for Record in Records:
+ FunctionID = Record[0]
+ BelongsToFile = Record[1]
+ StartLine = Record[2]
+ EndLine = Record[3]
+ #Data1.append(("'file%s'" % BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine))
+ #Data2.append(("'file%s'" % BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1))
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s where BelongsToFile = %s and StartLine > %s and EndLine < %s""" % \
+ (BelongsToFile, FunctionID, BelongsToFile, StartLine, EndLine)
+ self.TblIdentifier.Exec(SqlCommand)
+
+ SqlCommand = """Update Identifier%s set BelongsToFunction = %s, Model = %s where BelongsToFile = %s and Model = %s and EndLine = %s""" % \
+ (BelongsToFile, FunctionID, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, BelongsToFile, DataClass.MODEL_IDENTIFIER_COMMENT, StartLine - 1)
+ self.TblIdentifier.Exec(SqlCommand)
+# #
+# # Check whether an identifier belongs to a function
+# #
+# print Data1
+# SqlCommand = """Update ? set BelongsToFunction = ? where BelongsToFile = ? and StartLine > ? and EndLine < ?"""
+# print SqlCommand
+# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+# self.Cur.executemany(SqlCommand, Data1)
+#
+# #
+# # Check whether the identifier is a function header
+# #
+# EdkLogger.debug(4, "For function headers ... ")
+# SqlCommand = """Update ? set BelongsToFunction = ?, Model = ? where BelongsToFile = ? and Model = ? and EndLine = ?"""
+# EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+# self.Cur.executemany(SqlCommand, Data2)
+#
+# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
+
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ #EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
+
+ Db = Database(DATABASE_PATH)
+ Db.InitDatabase()
+ Db.QueryTable(Db.TblDataModel)
+
+ identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
+ identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
+ identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
+ identifier4 = DataClass.IdentifierClass(-1, '', '', "i1'", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 77, 43, 88, 43)
+ fun1 = DataClass.FunctionClass(-1, '', '', 'fun1', '', 21, 2, 60, 45, 1, 23, 0, [], [])
+ file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
+ Db.InsertOneFile(file)
+ Db.UpdateIdentifierBelongsToFunction()
+
+ Db.QueryTable(Db.TblFile)
+ Db.QueryTable(Db.TblFunction)
+ Db.QueryTable(Db.TblPcd)
+ Db.QueryTable(Db.TblIdentifier)
+
+ Db.Close()
+ EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
+
diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py new file mode 100644 index 0000000000..4767645d05 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/Ecc.py @@ -0,0 +1,329 @@ +## @file
+# This file is used to be the main entrance of ECC tool
+#
+# Copyright (c) 2009, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, time, glob, sys
+import Common.EdkLogger as EdkLogger
+import Database
+import EccGlobalData
+from MetaDataParser import *
+from optparse import OptionParser
+from Configuration import Configuration
+from Check import Check
+from Common.InfClassObject import Inf
+from Common.DecClassObject import Dec
+from Common.DscClassObject import Dsc
+from Common.FdfClassObject import Fdf
+from Common.String import NormPath
+from Common import BuildToolError
+import c
+from Exception import *
+
+## Ecc
+#
+# This class is used to define Ecc main entrance
+#
+# @param object: Inherited from object class
+#
+class Ecc(object):
+ def __init__(self):
+ # Version and Copyright
+ self.VersionNumber = "0.01"
+ self.Version = "%prog Version " + self.VersionNumber
+ self.Copyright = "Copyright (c) 2009, Intel Corporation All rights reserved."
+
+ self.InitDefaultConfigIni()
+ self.OutputFile = 'output.txt'
+ self.ReportFile = 'Report.csv'
+ self.ExceptionFile = 'exception.xml'
+ self.IsInit = True
+ self.ScanSourceCode = True
+ self.ScanMetaData = True
+
+ # Parse the options and args
+ self.ParseOption()
+
+ # Generate checkpoints list
+ EccGlobalData.gConfig = Configuration(self.ConfigFile)
+
+ # Generate exception list
+ EccGlobalData.gException = ExceptionCheck(self.ExceptionFile)
+
+ # Init Ecc database
+ EccGlobalData.gDb = Database.Database(Database.DATABASE_PATH)
+ EccGlobalData.gDb.InitDatabase(self.IsInit)
+
+ # Build ECC database
+ self.BuildDatabase()
+
+ # Start to check
+ self.Check()
+
+ # Show report
+ self.GenReport()
+
+ # Close Database
+ EccGlobalData.gDb.Close()
+
+ def InitDefaultConfigIni(self):
+ paths = map(lambda p: os.path.join(p, 'Ecc', 'config.ini'), sys.path)
+ paths = (os.path.realpath('config.ini'),) + tuple(paths)
+ for path in paths:
+ if os.path.exists(path):
+ self.ConfigFile = path
+ return
+ self.ConfigFile = 'config.ini'
+
+ ## BuildDatabase
+ #
+ # Build the database for target
+ #
+ def BuildDatabase(self):
+ # Clean report table
+ EccGlobalData.gDb.TblReport.Drop()
+ EccGlobalData.gDb.TblReport.Create()
+
+ # Build database
+ if self.IsInit:
+ if self.ScanSourceCode:
+ EdkLogger.quiet("Building database for source code ...")
+ c.CollectSourceCodeDataIntoDB(EccGlobalData.gTarget)
+ if self.ScanMetaData:
+ EdkLogger.quiet("Building database for source code done!")
+ self.BuildMetaDataFileDatabase()
+
+ EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
+
+ ## BuildMetaDataFileDatabase
+ #
+ # Build the database for meta data files
+ #
+ def BuildMetaDataFileDatabase(self):
+ EdkLogger.quiet("Building database for meta data files ...")
+ Op = open(EccGlobalData.gConfig.MetaDataFileCheckPathOfGenerateFileList, 'w+')
+ #SkipDirs = Read from config file
+ SkipDirs = EccGlobalData.gConfig.SkipDirList
+ for Root, Dirs, Files in os.walk(EccGlobalData.gTarget):
+ for Dir in Dirs:
+ if Dir.upper() in SkipDirs:
+ Dirs.remove(Dir)
+
+ for Dir in Dirs:
+ Dirname = os.path.join(Root, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ Dirs.remove(Dir)
+ Dirs.append(Dirname)
+
+ for File in Files:
+ if len(File) > 4 and File[-4:].upper() == ".DEC":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ Dec(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".DSC":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ Dsc(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".INF":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ Inf(Filename, True, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ continue
+ if len(File) > 4 and File[-4:].upper() == ".FDF":
+ Filename = os.path.normpath(os.path.join(Root, File))
+ EdkLogger.quiet("Parsing %s" % Filename)
+ Op.write("%s\r" % Filename)
+ Fdf(Filename, True, EccGlobalData.gWorkspace, EccGlobalData.gDb)
+ continue
+ Op.close()
+
+ # Commit to database
+ EccGlobalData.gDb.Conn.commit()
+
+ EdkLogger.quiet("Building database for meta data files done!")
+
+ ##
+ #
+ # Check each checkpoint
+ #
+ def Check(self):
+ EdkLogger.quiet("Checking ...")
+ EccCheck = Check()
+ EccCheck.Check()
+ EdkLogger.quiet("Checking done!")
+
+ ##
+ #
+ # Generate the scan report
+ #
+ def GenReport(self):
+ EdkLogger.quiet("Generating report ...")
+ EccGlobalData.gDb.TblReport.ToCSV(self.ReportFile)
+ EdkLogger.quiet("Generating report done!")
+
+ def GetRealPathCase(self, path):
+ TmpPath = path.rstrip(os.sep)
+ PathParts = TmpPath.split(os.sep)
+ if len(PathParts) == 0:
+ return path
+ if len(PathParts) == 1:
+ if PathParts[0].strip().endswith(':'):
+ return PathParts[0].upper()
+ # Relative dir, list . current dir
+ Dirs = os.listdir('.')
+ for Dir in Dirs:
+ if Dir.upper() == PathParts[0].upper():
+ return Dir
+
+ if PathParts[0].strip().endswith(':'):
+ PathParts[0] = PathParts[0].upper()
+ ParentDir = PathParts[0]
+ RealPath = ParentDir
+ if PathParts[0] == '':
+ RealPath = os.sep
+ ParentDir = os.sep
+
+ PathParts.remove(PathParts[0]) # need to remove the parent
+ for Part in PathParts:
+ Dirs = os.listdir(ParentDir + os.sep)
+ for Dir in Dirs:
+ if Dir.upper() == Part.upper():
+ RealPath += os.sep
+ RealPath += Dir
+ break
+ ParentDir += os.sep
+ ParentDir += Dir
+
+ return RealPath
+
+ ## ParseOption
+ #
+ # Parse options
+ #
+ def ParseOption(self):
+ EdkLogger.quiet("Loading ECC configuration ... done")
+ (Options, Target) = self.EccOptionParser()
+
+ # Check workspace envirnoment
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+ else:
+ EccGlobalData.gWorkspace = os.path.normpath(os.getenv("WORKSPACE"))
+ if not os.path.exists(EccGlobalData.gWorkspace):
+ EdkLogger.error("ECC", BuildToolError.FILE_NOT_FOUND, ExtraData="WORKSPACE = %s" % EccGlobalData.gWorkspace)
+ os.environ["WORKSPACE"] = EccGlobalData.gWorkspace
+ # Set log level
+ self.SetLogLevel(Options)
+
+ # Set other options
+ if Options.ConfigFile != None:
+ self.ConfigFile = Options.ConfigFile
+ if Options.OutputFile != None:
+ self.OutputFile = Options.OutputFile
+ if Options.ReportFile != None:
+ self.ReportFile = Options.ReportFile
+ if Options.Target != None:
+ if not os.path.isdir(Options.Target):
+ EdkLogger.error("ECC", BuildToolError.OPTION_VALUE_INVALID, ExtraData="Target [%s] does NOT exist" % Options.Target)
+ else:
+ EccGlobalData.gTarget = self.GetRealPathCase(os.path.normpath(Options.Target))
+ else:
+ EdkLogger.warn("Ecc", EdkLogger.ECC_ERROR, "The target source tree was not specified, using current WORKSPACE instead!")
+ EccGlobalData.gTarget = os.path.normpath(os.getenv("WORKSPACE"))
+ if Options.keepdatabase != None:
+ self.IsInit = False
+ if Options.metadata != None and Options.sourcecode != None:
+ EdkLogger.error("ECC", BuildToolError.OPTION_CONFLICT, ExtraData="-m and -s can't be specified at one time")
+ if Options.metadata != None:
+ self.ScanSourceCode = False
+ if Options.sourcecode != None:
+ self.ScanMetaData = False
+
+ ## SetLogLevel
+ #
+ # Set current log level of the tool based on args
+ #
+ # @param Option: The option list including log level setting
+ #
+ def SetLogLevel(self, Option):
+ if Option.verbose != None:
+ EdkLogger.SetLevel(EdkLogger.VERBOSE)
+ elif Option.quiet != None:
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ elif Option.debug != None:
+ EdkLogger.SetLevel(Option.debug + 1)
+ else:
+ EdkLogger.SetLevel(EdkLogger.INFO)
+
+ ## Parse command line options
+ #
+ # Using standard Python module optparse to parse command line option of this tool.
+ #
+ # @retval Opt A optparse.Values object containing the parsed options
+ # @retval Args Target of build command
+ #
+ def EccOptionParser(self):
+ Parser = OptionParser(description = self.Copyright, version = self.Version, prog = "Ecc.exe", usage = "%prog [options]")
+ Parser.add_option("-t", "--target sourcepath", action="store", type="string", dest='Target',
+ help="Check all files under the target workspace.")
+ Parser.add_option("-c", "--config filename", action="store", type="string", dest="ConfigFile",
+ help="Specify a configuration file. Defaultly use config.ini under ECC tool directory.")
+ Parser.add_option("-o", "--outfile filename", action="store", type="string", dest="OutputFile",
+ help="Specify the name of an output file, if and only if one filename was specified.")
+ Parser.add_option("-r", "--reportfile filename", action="store", type="string", dest="ReportFile",
+ help="Specify the name of an report file, if and only if one filename was specified.")
+ Parser.add_option("-m", "--metadata", action="store_true", type=None, help="Only scan meta-data files information if this option is specified.")
+ Parser.add_option("-s", "--sourcecode", action="store_true", type=None, help="Only scan source code files information if this option is specified.")
+ Parser.add_option("-k", "--keepdatabase", action="store_true", type=None, help="The existing Ecc database will not be cleaned except report information if this option is specified.")
+ Parser.add_option("-l", "--log filename", action="store", dest="LogFile", help="""If specified, the tool should emit the changes that
+ were made by the tool after printing the result message.
+ If filename, the emit to the file, otherwise emit to
+ standard output. If no modifications were made, then do not
+ create a log file, or output a log message.""")
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\
+ "including library instances selected, final dependency expression, "\
+ "and warning messages, etc.")
+ Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.")
+
+ (Opt, Args)=Parser.parse_args()
+
+ return (Opt, Args)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Initialize log system
+ EdkLogger.Initialize()
+ EdkLogger.IsRaiseError = False
+ EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
+
+ StartTime = time.clock()
+ Ecc = Ecc()
+ FinishTime = time.clock()
+
+ BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime))))
+ EdkLogger.quiet("\n%s [%s]" % (time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration))
diff --git a/BaseTools/Source/Python/Ecc/EccGlobalData.py b/BaseTools/Source/Python/Ecc/EccGlobalData.py new file mode 100644 index 0000000000..8e8f24b8c8 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/EccGlobalData.py @@ -0,0 +1,24 @@ +## @file
+# This file is used to save global datas used by ECC tool
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+
+gWorkspace = ''
+gTarget = ''
+gConfig = None
+gDb = None
+gIdentifierTableList = []
+gException = None
\ No newline at end of file diff --git a/BaseTools/Source/Python/Ecc/EccToolError.py b/BaseTools/Source/Python/Ecc/EccToolError.py new file mode 100644 index 0000000000..9c4d10d55b --- /dev/null +++ b/BaseTools/Source/Python/Ecc/EccToolError.py @@ -0,0 +1,179 @@ +## @file
+# Standardized Error Hanlding infrastructures.
+#
+# Copyright (c) 20087, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+ERROR_GENERAL_CHECK_ALL = 1000
+ERROR_GENERAL_CHECK_NO_TAB = 1001
+ERROR_GENERAL_CHECK_INDENTATION = 1002
+ERROR_GENERAL_CHECK_LINE = 1003
+ERROR_GENERAL_CHECK_NO_ASM = 1004
+ERROR_GENERAL_CHECK_NO_PROGMA = 1005
+ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
+ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
+
+ERROR_SPACE_CHECK_ALL = 2000
+
+ERROR_PREDICATE_EXPRESSION_CHECK_ALL = 3000
+ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE = 3001
+ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR = 3002
+ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE = 3003
+
+ERROR_HEADER_CHECK_ALL = 4000
+ERROR_HEADER_CHECK_FILE = 4001
+ERROR_HEADER_CHECK_FUNCTION = 4002
+
+ERROR_C_FUNCTION_LAYOUT_CHECK_ALL = 5000
+ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE = 5001
+ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER = 5002
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME = 5003
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE = 5004
+ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY = 5005
+ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION = 5006
+ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE = 5007
+ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC = 5008
+
+ERROR_INCLUDE_FILE_CHECK_ALL = 6000
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 = 6001
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 = 6002
+ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 = 6003
+ERROR_INCLUDE_FILE_CHECK_DATA = 6004
+ERROR_INCLUDE_FILE_CHECK_NAME = 6005
+
+ERROR_DECLARATION_DATA_TYPE_CHECK_ALL = 7000
+ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE = 7001
+ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER = 7002
+ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER = 7003
+ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE = 7004
+ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION = 7005
+ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE = 7007
+ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE = 7006
+ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE = 7008
+
+ERROR_NAMING_CONVENTION_CHECK_ALL = 8000
+ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT = 8001
+ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT = 8002
+ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT = 8003
+ERROR_NAMING_CONVENTION_CHECK_PATH_NAME = 8004
+ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME = 8005
+ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME = 8006
+ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE = 8007
+
+ERROR_DOXYGEN_CHECK_ALL = 9000
+ERROR_DOXYGEN_CHECK_FILE_HEADER = 9001
+ERROR_DOXYGEN_CHECK_FUNCTION_HEADER = 9002
+ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION = 9003
+ERROR_DOXYGEN_CHECK_COMMENT_FORMAT = 9004
+ERROR_DOXYGEN_CHECK_COMMAND = 9005
+
+ERROR_META_DATA_FILE_CHECK_ALL = 10000
+ERROR_META_DATA_FILE_CHECK_PATH_NAME = 10001
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 = 10002
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 = 10003
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT = 10004
+ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER = 10005
+ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE = 10006
+ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF = 10007
+ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE = 10008
+ERROR_META_DATA_FILE_CHECK_PCD_FLASH = 10009
+ERROR_META_DATA_FILE_CHECK_PCD_NO_USE = 10010
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID = 10011
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL = 10012
+ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI = 10013
+ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE = 10014
+ERROR_META_DATA_FILE_CHECK_PCD_TYPE = 10015
+
+ERROR_SPELLING_CHECK_ALL = 11000
+
+gEccErrorMessage = {
+ ERROR_GENERAL_CHECK_ALL : "",
+ ERROR_GENERAL_CHECK_NO_TAB : "'TAB' character is not allowed in source code, please replace each 'TAB' with two spaces",
+ ERROR_GENERAL_CHECK_INDENTATION : "Indentation does not follow coding style",
+ ERROR_GENERAL_CHECK_LINE : "The width of each line does not follow coding style",
+ ERROR_GENERAL_CHECK_NO_ASM : "There should be no use of _asm in the source file",
+ ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
+ ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
+ ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
+
+ ERROR_SPACE_CHECK_ALL : "",
+
+ ERROR_PREDICATE_EXPRESSION_CHECK_ALL : "",
+ ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE : "Boolean values and variable type BOOLEAN should not use explicit comparisons to TRUE or FALSE",
+ ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR : "Non-Boolean comparisons should use a compare operator (==, !=, >, < >=, <=)",
+ ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE : "A comparison of any pointer to zero must be done via the NULL type",
+
+ ERROR_HEADER_CHECK_ALL : "",
+ ERROR_HEADER_CHECK_FILE : "File header doesn't exist",
+ ERROR_HEADER_CHECK_FUNCTION : "Function header doesn't exist",
+
+ ERROR_C_FUNCTION_LAYOUT_CHECK_ALL : "",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE : "Return type of a function should exist and in the first line",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER : "Any optional functional modifiers should exist and next to the return type",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME : """Function name should be left justified, followed by the beginning of the parameter list, with the closing parenthesis on its own line, indented two spaces""",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE : "Function prototypes in include files have the same form as function definitions",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY : "The body of a function should be contained by open and close braces that must be in the first column",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_DATA_DECLARATION : "The data declarations should be the first code in a module",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE : "There should be no initialization of a variable as part of its declaration",
+ ERROR_C_FUNCTION_LAYOUT_CHECK_NO_STATIC : "There should be no use of STATIC for functions",
+
+ ERROR_INCLUDE_FILE_CHECK_ALL : "",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1 : "All include file contents should be guarded by a #ifndef statement.",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2 : "The #ifndef must be the first line of code following the file header comment",
+ ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3 : "The #endif must appear on the last line in the file",
+ ERROR_INCLUDE_FILE_CHECK_DATA : "Include files should contain only public or only private data and cannot contain code or define data variables",
+ ERROR_INCLUDE_FILE_CHECK_NAME : "No permission for the inlcude file with same names",
+
+ ERROR_DECLARATION_DATA_TYPE_CHECK_ALL : "",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE : "There should be no use of int, unsigned, char, void, static, long in any .c, .h or .asl files",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER : """The modifiers IN, OUT, OPTIONAL, and UNALIGNED should be used only to qualify arguments to a function and should not appear in a data type declaration""",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_EFI_API_MODIFIER : "The EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE : "Enumerated Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION : "Structure Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_SAME_STRUCTURE : "No permission for the structure with same names",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE : "Union Type should have a 'typedef' and the name must be in capital letters",
+ ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE : "Complex types should be typedef-ed",
+
+ ERROR_NAMING_CONVENTION_CHECK_ALL : "",
+ ERROR_NAMING_CONVENTION_CHECK_DEFINE_STATEMENT : "Only capital letters are allowed to be used for #define declarations",
+ ERROR_NAMING_CONVENTION_CHECK_TYPEDEF_STATEMENT : "Only capital letters are allowed to be used for typedef declarations",
+ ERROR_NAMING_CONVENTION_CHECK_IFNDEF_STATEMENT : "The #ifndef at the start of an include file should use both prefix and postfix underscore characters, '_'",
+ ERROR_NAMING_CONVENTION_CHECK_PATH_NAME : """Path name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
+ ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME : """Variable name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters 4. Global variable name must start with a 'g'""",
+ ERROR_NAMING_CONVENTION_CHECK_FUNCTION_NAME : """Function name does not follow the rules: 1. First character should be upper case 2. Must contain lower case characters 3. No white space characters""",
+ ERROR_NAMING_CONVENTION_CHECK_SINGLE_CHARACTER_VARIABLE : "There should be no use of short (single character) variable names",
+
+ ERROR_DOXYGEN_CHECK_ALL : "",
+ ERROR_DOXYGEN_CHECK_FILE_HEADER : "The file headers should follow Doxygen special documentation blocks in section 2.3.5",
+ ERROR_DOXYGEN_CHECK_FUNCTION_HEADER : "The function headers should follow Doxygen special documentation blocks in section 2.3.5",
+ ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION : """The first line of text in a comment block should be a brief description of the element being documented and the brief description must end with a period.""",
+ ERROR_DOXYGEN_CHECK_COMMENT_FORMAT : "For comment line with '///< ... text ...' format, if it is used, it should be after the code section",
+ ERROR_DOXYGEN_CHECK_COMMAND : "Only Doxygen commands @bug and @todo are allowed to mark the code",
+
+ ERROR_META_DATA_FILE_CHECK_ALL : "",
+ ERROR_META_DATA_FILE_CHECK_PATH_NAME : "The file defined in meta-data does not exist",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_1 : "A library instances defined for a given module (or dependent library instance) doesn't match the module's type.",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_2 : "A library instance must specify the Supported Module Types in its INF file",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_DEPENDENT : "A library instance must be defined for all dependent library classes",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_INSTANCE_ORDER : "The library Instances specified by the LibraryClasses sections should be listed in order of dependencies",
+ ERROR_META_DATA_FILE_CHECK_LIBRARY_NO_USE : "There should be no unnecessary inclusion of library classes in the INF file",
+ ERROR_META_DATA_FILE_CHECK_BINARY_INF_IN_FDF : "An INF file is specified in the FDF file, but not in the DSC file, therefore the INF file must be for a Binary module only",
+ ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE : "Duplicate PCDs found",
+ ERROR_META_DATA_FILE_CHECK_PCD_FLASH : "PCD settings in the FDF file should only be related to flash",
+ ERROR_META_DATA_FILE_CHECK_PCD_NO_USE : "There should be no PCDs declared in INF files that are not specified in in either a DSC or FDF file",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_GUID : "Duplicate GUID found",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_PROTOCOL : "Duplicate PROTOCOL found",
+ ERROR_META_DATA_FILE_CHECK_DUPLICATE_PPI : "Duplicate PPI found",
+ ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE : "No used module files found",
+ ERROR_META_DATA_FILE_CHECK_PCD_TYPE : "Wrong C code function used for this kind of PCD",
+
+ ERROR_SPELLING_CHECK_ALL : "",
+ }
+
diff --git a/BaseTools/Source/Python/Ecc/Exception.py b/BaseTools/Source/Python/Ecc/Exception.py new file mode 100644 index 0000000000..733408551a --- /dev/null +++ b/BaseTools/Source/Python/Ecc/Exception.py @@ -0,0 +1,87 @@ +## @file
+# This file is used to parse exception items found by ECC tool
+#
+# Copyright (c) 2009, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from Common.XmlRoutines import *
+import os.path
+
+# ExceptionXml to parse Exception Node of XML file
+class ExceptionXml(object):
+ def __init__(self):
+ self.KeyWord = ''
+ self.ErrorID = ''
+ self.FilePath = ''
+
+ def FromXml(self, Item, Key):
+ self.KeyWord = XmlElement(Item, '%s/KeyWord' % Key)
+ self.ErrorID = XmlElement(Item, '%s/ErrorID' % Key)
+ self.FilePath = os.path.normpath(XmlElement(Item, '%s/FilePath' % Key))
+
+ def __str__(self):
+ return 'ErrorID = %s KeyWord = %s FilePath = %s' %(self.ErrorID, self.KeyWord, self.FilePath)
+
+# ExceptionListXml to parse Exception Node List of XML file
+class ExceptionListXml(object):
+ def __init__(self):
+ self.List = []
+
+ def FromXmlFile(self, FilePath):
+ XmlContent = XmlParseFile(FilePath)
+ for Item in XmlList(XmlContent, '/ExceptionList/Exception'):
+ Exp = ExceptionXml()
+ Exp.FromXml(Item, 'Exception')
+ self.List.append(Exp)
+
+ def ToList(self):
+ RtnList = []
+ for Item in self.List:
+ #RtnList.append((Item.ErrorID, Item.KeyWord, Item.FilePath))
+ RtnList.append((Item.ErrorID, Item.KeyWord))
+
+ return RtnList
+
+ def __str__(self):
+ RtnStr = ''
+ if self.List:
+ for Item in self.List:
+ RtnStr = RtnStr + str(Item) + '\n'
+ return RtnStr
+
+# A class to check exception
+class ExceptionCheck(object):
+ def __init__(self, FilePath = None):
+ self.ExceptionList = []
+ self.ExceptionListXml = ExceptionListXml()
+ self.LoadExceptionListXml(FilePath)
+
+ def LoadExceptionListXml(self, FilePath):
+ if FilePath and os.path.isfile(FilePath):
+ self.ExceptionListXml.FromXmlFile(FilePath)
+ self.ExceptionList = self.ExceptionListXml.ToList()
+
+ def IsException(self, ErrorID, KeyWord, FileID=-1):
+ if (str(ErrorID), KeyWord) in self.ExceptionList:
+ return True
+ else:
+ return False
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ El = ExceptionCheck('C:\\Hess\\Project\\BuildTool\\src\\Ecc\\exception.xml')
+ print El.ExceptionList
diff --git a/BaseTools/Source/Python/Ecc/FileProfile.py b/BaseTools/Source/Python/Ecc/FileProfile.py new file mode 100644 index 0000000000..810087ea07 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/FileProfile.py @@ -0,0 +1,57 @@ +## @file
+# fragments of source file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+
+import re
+import os
+from ParserWarning import Warning
+
+CommentList = []
+PPDirectiveList = []
+PredicateExpressionList = []
+FunctionDefinitionList = []
+VariableDeclarationList = []
+EnumerationDefinitionList = []
+StructUnionDefinitionList = []
+TypedefDefinitionList = []
+FunctionCallingList = []
+
+## record file data when parsing source
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ self.FileLinesListFromFile = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesListFromFile = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
+
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/Ecc/MetaDataParser.py b/BaseTools/Source/Python/Ecc/MetaDataParser.py new file mode 100644 index 0000000000..fb4239f474 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/MetaDataParser.py @@ -0,0 +1,65 @@ +## @file
+# This file is used to define common parser functions for meta-data
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+import os
+from CommonDataClass.DataClass import *
+
+
+## Get the inlcude path list for a source file
+#
+# 1. Find the source file belongs to which inf file
+# 2. Find the inf's package
+# 3. Return the include path list of the package
+#
+def GetIncludeListOfFile(WorkSpace, Filepath, Db):
+ IncludeList = []
+ Filepath = os.path.normpath(Filepath)
+ SqlCommand = """
+ select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
+ select distinct B.BelongsToFile from File as A left join Inf as B
+ where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
+ and Inf.BelongsToFile = File.ID""" \
+ % (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ DecFullPath = os.path.normpath(os.path.join(WorkSpace, Record[0]))
+ InfFullPath = os.path.normpath(os.path.join(WorkSpace, Record[1]))
+ (DecPath, DecName) = os.path.split(DecFullPath)
+ (InfPath, InfName) = os.path.split(InfFullPath)
+ SqlCommand = """select Value1 from Dec where BelongsToFile =
+ (select ID from File where FullPath = '%s') and Model = %s""" \
+ % (DecFullPath, MODEL_EFI_INCLUDE)
+ NewRecordSet = Db.TblDec.Exec(SqlCommand)
+ if InfPath not in IncludeList:
+ IncludeList.append(InfPath)
+ for NewRecord in NewRecordSet:
+ IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
+ if IncludePath not in IncludeList:
+ IncludeList.append(IncludePath)
+
+ return IncludeList
+
+## Get the table list
+#
+# Search table file and find all small tables
+#
+def GetTableList(FileModelList, Table, Db):
+ TableList = []
+ SqlCommand = """select ID from File where Model in %s""" % str(FileModelList)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ TableName = Table + str(Record[0])
+ TableList.append(TableName)
+
+ return TableList
+
diff --git a/BaseTools/Source/Python/Ecc/ParserWarning.py b/BaseTools/Source/Python/Ecc/ParserWarning.py new file mode 100644 index 0000000000..547360d927 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/ParserWarning.py @@ -0,0 +1,17 @@ +## The exception class that used to report error messages when preprocessing
+#
+# Currently the "ToolName" is set to be "ECC PP".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+ self.message = Str
+ self.FileName = File
+ self.LineNumber = Line
+ self.ToolName = 'ECC PP'
\ No newline at end of file diff --git a/BaseTools/Source/Python/Ecc/__init__.py b/BaseTools/Source/Python/Ecc/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/__init__.py diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py new file mode 100644 index 0000000000..b8b1d2d6f5 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/c.py @@ -0,0 +1,2503 @@ +import sys
+import os
+import re
+import string
+import CodeFragmentCollector
+import FileProfile
+from CommonDataClass import DataClass
+import Database
+from Common import EdkLogger
+from EccToolError import *
+import EccGlobalData
+import MetaDataParser
+
+IncludeFileListDict = {}
+AllIncludeFileListDict = {}
+IncludePathListDict = {}
+ComplexTypeDict = {}
+SUDict = {}
+IgnoredKeywordList = ['EFI_ERROR']
+
+def GetIgnoredDirListPattern():
+ skipList = list(EccGlobalData.gConfig.SkipDirList) + ['.svn']
+ DirString = string.join(skipList, '|')
+ p = re.compile(r'.*[\\/](?:%s)[\\/]?.*' % DirString)
+ return p
+
+def GetFuncDeclPattern():
+ p = re.compile(r'(?:EFIAPI|EFI_BOOT_SERVICE|EFI_RUNTIME_SERVICE)?\s*[_\w]+\s*\(.*\)$', re.DOTALL)
+ return p
+
+def GetArrayPattern():
+ p = re.compile(r'[_\w]*\s*[\[.*\]]+')
+ return p
+
+def GetTypedefFuncPointerPattern():
+ p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
+ return p
+
+def GetDB():
+ return EccGlobalData.gDb
+
+def GetConfig():
+ return EccGlobalData.gConfig
+
+def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
+ Msg = Msg.replace('\n', '').replace('\r', '')
+ MsgPartList = Msg.split()
+ Msg = ''
+ for Part in MsgPartList:
+ Msg += Part
+ Msg += ' '
+ GetDB().TblReport.Insert(ErrorType, OtherMsg = Msg, BelongsToTable = TableName, BelongsToItem = ItemId)
+
+def GetIdType(Str):
+ Type = DataClass.MODEL_UNKNOWN
+ Str = Str.replace('#', '# ')
+ List = Str.split()
+ if List[1] == 'include':
+ Type = DataClass.MODEL_IDENTIFIER_INCLUDE
+ elif List[1] == 'define':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_DEFINE
+ elif List[1] == 'ifdef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFDEF
+ elif List[1] == 'ifndef':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF
+ elif List[1] == 'endif':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_ENDIF
+ elif List[1] == 'pragma':
+ Type = DataClass.MODEL_IDENTIFIER_MACRO_PROGMA
+ else:
+ Type = DataClass.MODEL_UNKNOWN
+ return Type
+
+def SuOccurInTypedef (Su, TdList):
+ for Td in TdList:
+ if Su.StartPos[0] == Td.StartPos[0] and Su.EndPos[0] == Td.EndPos[0]:
+ return True
+ return False
+
+def GetIdentifierList():
+ IdList = []
+ for comment in FileProfile.CommentList:
+ IdComment = DataClass.IdentifierClass(-1, '', '', '', comment.Content, DataClass.MODEL_IDENTIFIER_COMMENT, -1, -1, comment.StartPos[0],comment.StartPos[1],comment.EndPos[0],comment.EndPos[1])
+ IdList.append(IdComment)
+
+ for pp in FileProfile.PPDirectiveList:
+ Type = GetIdType(pp.Content)
+ IdPP = DataClass.IdentifierClass(-1, '', '', '', pp.Content, Type, -1, -1, pp.StartPos[0],pp.StartPos[1],pp.EndPos[0],pp.EndPos[1])
+ IdList.append(IdPP)
+
+ for pe in FileProfile.PredicateExpressionList:
+ IdPE = DataClass.IdentifierClass(-1, '', '', '', pe.Content, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION, -1, -1, pe.StartPos[0],pe.StartPos[1],pe.EndPos[0],pe.EndPos[1])
+ IdList.append(IdPE)
+
+ FuncDeclPattern = GetFuncDeclPattern()
+ ArrayPattern = GetArrayPattern()
+ for var in FileProfile.VariableDeclarationList:
+ DeclText = var.Declarator.lstrip()
+ FuncPointerPattern = GetTypedefFuncPointerPattern()
+ if FuncPointerPattern.match(DeclText):
+ continue
+ VarNameStartLine = var.NameStartPos[0]
+ VarNameStartColumn = var.NameStartPos[1]
+ FirstChar = DeclText[0]
+ while not FirstChar.isalpha() and FirstChar != '_':
+ if FirstChar == '*':
+ var.Modifier += '*'
+ VarNameStartColumn += 1
+ DeclText = DeclText.lstrip('*')
+ elif FirstChar == '\r':
+ DeclText = DeclText.lstrip('\r\n').lstrip('\r')
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == '\n':
+ DeclText = DeclText.lstrip('\n')
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == ' ':
+ DeclText = DeclText.lstrip(' ')
+ VarNameStartColumn += 1
+ elif FirstChar == '\t':
+ DeclText = DeclText.lstrip('\t')
+ VarNameStartColumn += 8
+ else:
+ DeclText = DeclText[1:]
+ VarNameStartColumn += 1
+ FirstChar = DeclText[0]
+
+ var.Declarator = DeclText
+ if FuncDeclPattern.match(var.Declarator):
+ DeclSplitList = var.Declarator.split('(')
+ FuncName = DeclSplitList[0].strip()
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1].strip()
+ NameStart = DeclSplitList[0].rfind(FuncName)
+ var.Declarator = var.Declarator[NameStart:]
+ if NameStart > 0:
+ var.Modifier += ' ' + DeclSplitList[0][0:NameStart]
+ Index = 0
+ PreChar = ''
+ while Index < NameStart:
+ FirstChar = DeclSplitList[0][Index]
+ if DeclSplitList[0][Index:].startswith('EFIAPI'):
+ Index += 6
+ VarNameStartColumn += 6
+ PreChar = ''
+ continue
+ elif FirstChar == '\r':
+ Index += 1
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ VarNameStartLine += 1
+ VarNameStartColumn = 0
+ elif FirstChar == ' ':
+ Index += 1
+ VarNameStartColumn += 1
+ elif FirstChar == '\t':
+ Index += 1
+ VarNameStartColumn += 8
+ else:
+ Index += 1
+ VarNameStartColumn += 1
+ PreChar = FirstChar
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', var.Declarator, FuncName, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, -1, -1, var.StartPos[0], var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+ continue
+
+ if var.Declarator.find('{') == -1:
+ for decl in var.Declarator.split(','):
+ DeclList = decl.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0],var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+ else:
+ DeclList = var.Declarator.split('=')
+ Name = DeclList[0].strip()
+ if ArrayPattern.match(Name):
+ LSBPos = var.Declarator.find('[')
+ var.Modifier += ' ' + Name[LSBPos:]
+ Name = Name[0:LSBPos]
+ IdVar = DataClass.IdentifierClass(-1, var.Modifier, '', Name, (len(DeclList) > 1 and [DeclList[1]]or [''])[0], DataClass.MODEL_IDENTIFIER_VARIABLE, -1, -1, var.StartPos[0],var.StartPos[1], VarNameStartLine, VarNameStartColumn)
+ IdList.append(IdVar)
+
+ for enum in FileProfile.EnumerationDefinitionList:
+ LBPos = enum.Content.find('{')
+ RBPos = enum.Content.find('}')
+ Name = enum.Content[4:LBPos].strip()
+ Value = enum.Content[LBPos+1:RBPos]
+ IdEnum = DataClass.IdentifierClass(-1, '', '', Name, Value, DataClass.MODEL_IDENTIFIER_ENUMERATE, -1, -1, enum.StartPos[0],enum.StartPos[1],enum.EndPos[0],enum.EndPos[1])
+ IdList.append(IdEnum)
+
+ for su in FileProfile.StructUnionDefinitionList:
+ if SuOccurInTypedef(su, FileProfile.TypedefDefinitionList):
+ continue
+ Type = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ SkipLen = 6
+ if su.Content.startswith('union'):
+ Type = DataClass.MODEL_IDENTIFIER_UNION
+ SkipLen = 5
+ LBPos = su.Content.find('{')
+ RBPos = su.Content.find('}')
+ if LBPos == -1 or RBPos == -1:
+ Name = su.Content[SkipLen:].strip()
+ Value = ''
+ else:
+ Name = su.Content[SkipLen:LBPos].strip()
+ Value = su.Content[LBPos:RBPos+1]
+ IdPE = DataClass.IdentifierClass(-1, '', '', Name, Value, Type, -1, -1, su.StartPos[0],su.StartPos[1],su.EndPos[0],su.EndPos[1])
+ IdList.append(IdPE)
+
+ TdFuncPointerPattern = GetTypedefFuncPointerPattern()
+ for td in FileProfile.TypedefDefinitionList:
+ Modifier = ''
+ Name = td.ToType
+ Value = td.FromType
+ if TdFuncPointerPattern.match(td.ToType):
+ Modifier = td.FromType
+ LBPos = td.ToType.find('(')
+ TmpStr = td.ToType[LBPos+1:].strip()
+ StarPos = TmpStr.find('*')
+ if StarPos != -1:
+ Modifier += ' ' + TmpStr[0:StarPos]
+ while TmpStr[StarPos] == '*':
+# Modifier += ' ' + '*'
+ StarPos += 1
+ TmpStr = TmpStr[StarPos:].strip()
+ RBPos = TmpStr.find(')')
+ Name = TmpStr[0:RBPos]
+ Value = 'FP' + TmpStr[RBPos + 1:]
+ else:
+ while Name.startswith('*'):
+ Value += ' ' + '*'
+ Name = Name.lstrip('*').strip()
+
+ if Name.find('[') != -1:
+ LBPos = Name.find('[')
+ RBPos = Name.rfind(']')
+ Value += Name[LBPos : RBPos + 1]
+ Name = Name[0 : LBPos]
+
+ IdTd = DataClass.IdentifierClass(-1, Modifier, '', Name, Value, DataClass.MODEL_IDENTIFIER_TYPEDEF, -1, -1, td.StartPos[0],td.StartPos[1],td.EndPos[0],td.EndPos[1])
+ IdList.append(IdTd)
+
+ for funcCall in FileProfile.FunctionCallingList:
+ IdFC = DataClass.IdentifierClass(-1, '', '', funcCall.FuncName, funcCall.ParamList, DataClass.MODEL_IDENTIFIER_FUNCTION_CALLING, -1, -1, funcCall.StartPos[0],funcCall.StartPos[1],funcCall.EndPos[0],funcCall.EndPos[1])
+ IdList.append(IdFC)
+ return IdList
+
+def StripNonAlnumChars(Str):
+ StrippedStr = ''
+ for Char in Str:
+ if Char.isalnum():
+ StrippedStr += Char
+ return StrippedStr
+
+def GetParamList(FuncDeclarator, FuncNameLine = 0, FuncNameOffset = 0):
+ FuncDeclarator = StripComments(FuncDeclarator)
+ ParamIdList = []
+ #DeclSplitList = FuncDeclarator.split('(')
+ LBPos = FuncDeclarator.find('(')
+ #if len(DeclSplitList) < 2:
+ if LBPos == -1:
+ return ParamIdList
+ #FuncName = DeclSplitList[0]
+ FuncName = FuncDeclarator[0:LBPos]
+ #ParamStr = DeclSplitList[1].rstrip(')')
+ ParamStr = FuncDeclarator[LBPos + 1:].rstrip(')')
+ LineSkipped = 0
+ OffsetSkipped = 0
+ TailChar = FuncName[-1]
+ while not TailChar.isalpha() and TailChar != '_':
+
+ if TailChar == '\n':
+ FuncName = FuncName.rstrip('\r\n').rstrip('\n')
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif TailChar == '\r':
+ FuncName = FuncName.rstrip('\r')
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif TailChar == ' ':
+ FuncName = FuncName.rstrip(' ')
+ OffsetSkipped += 1
+ elif TailChar == '\t':
+ FuncName = FuncName.rstrip('\t')
+ OffsetSkipped += 8
+ else:
+ FuncName = FuncName[:-1]
+ TailChar = FuncName[-1]
+
+ OffsetSkipped += 1 #skip '('
+
+ for p in ParamStr.split(','):
+ ListP = p.split()
+ if len(ListP) == 0:
+ continue
+ ParamName = ListP[-1]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ if ParamName == 'OPTIONAL':
+ if ParamModifier == '':
+ ParamModifier += ' ' + 'OPTIONAL'
+ DeclText = ''
+ else:
+ ParamName = ListP[-2]
+ DeclText = ParamName.strip()
+ RightSpacePos = p.rfind(ParamName)
+ ParamModifier = p[0:RightSpacePos]
+ ParamModifier += 'OPTIONAL'
+ while DeclText.startswith('*'):
+ ParamModifier += ' ' + '*'
+ DeclText = DeclText.lstrip('*').strip()
+ ParamName = DeclText
+ # ignore array length if exists.
+ LBIndex = ParamName.find('[')
+ if LBIndex != -1:
+ ParamName = ParamName[0:LBIndex]
+
+ Start = RightSpacePos
+ Index = 0
+ PreChar = ''
+ while Index < Start:
+ FirstChar = p[Index]
+
+ if FirstChar == '\r':
+ Index += 1
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == ' ':
+ Index += 1
+ OffsetSkipped += 1
+ elif FirstChar == '\t':
+ Index += 1
+ OffsetSkipped += 8
+ else:
+ Index += 1
+ OffsetSkipped += 1
+ PreChar = FirstChar
+
+ ParamBeginLine = FuncNameLine + LineSkipped
+ ParamBeginOffset = FuncNameOffset + OffsetSkipped
+
+ Index = Start + len(ParamName)
+ PreChar = ''
+ while Index < len(p):
+ FirstChar = p[Index]
+
+ if FirstChar == '\r':
+ Index += 1
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ LineSkipped += 1
+ OffsetSkipped = 0
+ elif FirstChar == ' ':
+ Index += 1
+ OffsetSkipped += 1
+ elif FirstChar == '\t':
+ Index += 1
+ OffsetSkipped += 8
+ else:
+ Index += 1
+ OffsetSkipped += 1
+ PreChar = FirstChar
+
+ ParamEndLine = FuncNameLine + LineSkipped
+ ParamEndOffset = FuncNameOffset + OffsetSkipped
+ if ParamName != '...':
+ ParamName = StripNonAlnumChars(ParamName)
+ IdParam = DataClass.IdentifierClass(-1, ParamModifier, '', ParamName, '', DataClass.MODEL_IDENTIFIER_PARAMETER, -1, -1, ParamBeginLine, ParamBeginOffset, ParamEndLine, ParamEndOffset)
+ ParamIdList.append(IdParam)
+
+ OffsetSkipped += 1 #skip ','
+
+ return ParamIdList
+
+def GetFunctionList():
+ FuncObjList = []
+ for FuncDef in FileProfile.FunctionDefinitionList:
+ ParamIdList = []
+ DeclText = FuncDef.Declarator.lstrip()
+ FuncNameStartLine = FuncDef.NamePos[0]
+ FuncNameStartColumn = FuncDef.NamePos[1]
+ FirstChar = DeclText[0]
+ while not FirstChar.isalpha() and FirstChar != '_':
+ if FirstChar == '*':
+ FuncDef.Modifier += '*'
+ FuncNameStartColumn += 1
+ DeclText = DeclText.lstrip('*')
+ elif FirstChar == '\r':
+ DeclText = DeclText.lstrip('\r\n').lstrip('\r')
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == '\n':
+ DeclText = DeclText.lstrip('\n')
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == ' ':
+ DeclText = DeclText.lstrip(' ')
+ FuncNameStartColumn += 1
+ elif FirstChar == '\t':
+ DeclText = DeclText.lstrip('\t')
+ FuncNameStartColumn += 8
+ else:
+ DeclText = DeclText[1:]
+ FuncNameStartColumn += 1
+ FirstChar = DeclText[0]
+
+ FuncDef.Declarator = DeclText
+ DeclSplitList = FuncDef.Declarator.split('(')
+ if len(DeclSplitList) < 2:
+ continue
+
+ FuncName = DeclSplitList[0]
+ FuncNamePartList = FuncName.split()
+ if len(FuncNamePartList) > 1:
+ FuncName = FuncNamePartList[-1]
+ NameStart = DeclSplitList[0].rfind(FuncName)
+ if NameStart > 0:
+ FuncDef.Modifier += ' ' + DeclSplitList[0][0:NameStart]
+ Index = 0
+ PreChar = ''
+ while Index < NameStart:
+ FirstChar = DeclSplitList[0][Index]
+ if DeclSplitList[0][Index:].startswith('EFIAPI'):
+ Index += 6
+ FuncNameStartColumn += 6
+ PreChar = ''
+ continue
+ elif FirstChar == '\r':
+ Index += 1
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == '\n':
+ Index += 1
+ if PreChar != '\r':
+ FuncNameStartLine += 1
+ FuncNameStartColumn = 0
+ elif FirstChar == ' ':
+ Index += 1
+ FuncNameStartColumn += 1
+ elif FirstChar == '\t':
+ Index += 1
+ FuncNameStartColumn += 8
+ else:
+ Index += 1
+ FuncNameStartColumn += 1
+ PreChar = FirstChar
+
+ FuncObj = DataClass.FunctionClass(-1, FuncDef.Declarator, FuncDef.Modifier, FuncName.strip(), '', FuncDef.StartPos[0],FuncDef.StartPos[1],FuncDef.EndPos[0],FuncDef.EndPos[1], FuncDef.LeftBracePos[0], FuncDef.LeftBracePos[1], -1, ParamIdList, [], FuncNameStartLine, FuncNameStartColumn)
+ FuncObjList.append(FuncObj)
+
+ return FuncObjList
+
+def GetFileModificationTimeFromDB(FullFileName):
+ TimeValue = 0.0
+ Db = GetDB()
+ SqlStatement = """ select TimeStamp
+ from File
+ where FullPath = \'%s\'
+ """ % (FullFileName)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ TimeValue = Result[0]
+ return TimeValue
+
+def CollectSourceCodeDataIntoDB(RootDir):
+ FileObjList = []
+ tuple = os.walk(RootDir)
+ IgnoredPattern = GetIgnoredDirListPattern()
+ ParseErrorFileList = []
+
+ for dirpath, dirnames, filenames in tuple:
+ if IgnoredPattern.match(dirpath.upper()):
+ continue
+
+ for Dir in dirnames:
+ Dirname = os.path.join(dirpath, Dir)
+ if os.path.islink(Dirname):
+ Dirname = os.path.realpath(Dirname)
+ if os.path.isdir(Dirname):
+ # symlinks to directories are treated as directories
+ dirnames.remove(Dir)
+ dirnames.append(Dirname)
+
+ for f in filenames:
+ FullName = os.path.normpath(os.path.join(dirpath, f))
+ if os.path.splitext(f)[1] in ('.h', '.c'):
+ EdkLogger.info("Parsing " + FullName)
+ model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
+ collector = CodeFragmentCollector.CodeFragmentCollector(FullName)
+ try:
+ collector.ParseFile()
+ except UnicodeError:
+ ParseErrorFileList.append(FullName)
+ collector.CleanFileProfileBuffer()
+ collector.ParseFileWithClearedPPDirective()
+# collector.PrintFragments()
+ BaseName = os.path.basename(f)
+ DirName = os.path.dirname(FullName)
+ Ext = os.path.splitext(f)[1].lstrip('.')
+ ModifiedTime = os.path.getmtime(FullName)
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
+ FileObjList.append(FileObj)
+ collector.CleanFileProfileBuffer()
+
+ if len(ParseErrorFileList) > 0:
+ EdkLogger.info("Found unrecoverable error during parsing:\n\t%s\n" % "\n\t".join(ParseErrorFileList))
+
+ Db = GetDB()
+ for file in FileObjList:
+ Db.InsertOneFile(file)
+
+ Db.UpdateIdentifierBelongsToFunction()
+
+def GetTableID(FullFileName, ErrorMsgList = None):
+ if ErrorMsgList == None:
+ ErrorMsgList = []
+
+ Db = GetDB()
+ SqlStatement = """ select ID
+ from File
+ where FullPath like '%s'
+ """ % FullFileName
+
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ FileID = -1
+ for Result in ResultSet:
+ if FileID != -1:
+ ErrorMsgList.append('Duplicate file ID found in DB for file %s' % FullFileName)
+ return -2
+ FileID = Result[0]
+ if FileID == -1:
+ ErrorMsgList.append('NO file ID found in DB for file %s' % FullFileName)
+ return -1
+ return FileID
+
+def GetIncludeFileList(FullFileName):
+ IFList = IncludeFileListDict.get(FullFileName)
+ if IFList != None:
+ return IFList
+
+ FileID = GetTableID(FullFileName)
+ if FileID < 0:
+ return []
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_INCLUDE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ IncludeFileListDict[FullFileName] = ResultSet
+ return ResultSet
+
+def GetFullPathOfIncludeFile(Str, IncludePathList):
+ for IncludePath in IncludePathList:
+ FullPath = os.path.join(IncludePath, Str)
+ FullPath = os.path.normpath(FullPath)
+ if os.path.exists(FullPath):
+ return FullPath
+ return None
+
+def GetAllIncludeFiles(FullFileName):
+ if AllIncludeFileListDict.get(FullFileName) != None:
+ return AllIncludeFileListDict.get(FullFileName)
+
+ FileDirName = os.path.dirname(FullFileName)
+ IncludePathList = IncludePathListDict.get(FileDirName)
+ if IncludePathList == None:
+ IncludePathList = MetaDataParser.GetIncludeListOfFile(EccGlobalData.gWorkspace, FullFileName, GetDB())
+ if FileDirName not in IncludePathList:
+ IncludePathList.insert(0, FileDirName)
+ IncludePathListDict[FileDirName] = IncludePathList
+ IncludeFileQueue = []
+ for IncludeFile in GetIncludeFileList(FullFileName):
+ FileName = IncludeFile[0].lstrip('#').strip()
+ FileName = FileName.lstrip('include').strip()
+ FileName = FileName.strip('\"')
+ FileName = FileName.lstrip('<').rstrip('>').strip()
+ FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
+ if FullPath != None:
+ IncludeFileQueue.append(FullPath)
+
+ i = 0
+ while i < len(IncludeFileQueue):
+ for IncludeFile in GetIncludeFileList(IncludeFileQueue[i]):
+ FileName = IncludeFile[0].lstrip('#').strip()
+ FileName = FileName.lstrip('include').strip()
+ FileName = FileName.strip('\"')
+ FileName = FileName.lstrip('<').rstrip('>').strip()
+ FullPath = GetFullPathOfIncludeFile(FileName, IncludePathList)
+ if FullPath != None and FullPath not in IncludeFileQueue:
+ IncludeFileQueue.insert(i + 1, FullPath)
+ i += 1
+
+ AllIncludeFileListDict[FullFileName] = IncludeFileQueue
+ return IncludeFileQueue
+
+def GetPredicateListFromPredicateExpStr(PES):
+
+ PredicateList = []
+ i = 0
+ PredicateBegin = 0
+ #PredicateEnd = 0
+ LogicOpPos = -1
+ p = GetFuncDeclPattern()
+ while i < len(PES) - 1:
+ if (PES[i].isalnum() or PES[i] == '_' or PES[i] == '*') and LogicOpPos > PredicateBegin:
+ PredicateBegin = i
+ if (PES[i] == '&' and PES[i+1] == '&') or (PES[i] == '|' and PES[i+1] == '|'):
+ LogicOpPos = i
+ Exp = PES[PredicateBegin:i].strip()
+ # Exp may contain '.' or '->'
+ TmpExp = Exp.replace('.', '').replace('->', '')
+ if p.match(TmpExp):
+ PredicateList.append(Exp)
+ else:
+ PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
+ i += 1
+
+ if PredicateBegin > LogicOpPos:
+ while PredicateBegin < len(PES):
+ if PES[PredicateBegin].isalnum() or PES[PredicateBegin] == '_' or PES[PredicateBegin] == '*':
+ break
+ PredicateBegin += 1
+ Exp = PES[PredicateBegin:len(PES)].strip()
+ # Exp may contain '.' or '->'
+ TmpExp = Exp.replace('.', '').replace('->', '')
+ if p.match(TmpExp):
+ PredicateList.append(Exp)
+ else:
+ PredicateList.append(Exp.rstrip(';').rstrip(')').strip())
+ return PredicateList
+
+def GetCNameList(Lvalue, StarList = []):
+ Lvalue += ' '
+ i = 0
+ SearchBegin = 0
+ VarStart = -1
+ VarEnd = -1
+ VarList = []
+
+ while SearchBegin < len(Lvalue):
+ while i < len(Lvalue):
+ if Lvalue[i].isalnum() or Lvalue[i] == '_':
+ if VarStart == -1:
+ VarStart = i
+ VarEnd = i
+ i += 1
+ elif VarEnd != -1:
+ VarList.append(Lvalue[VarStart:VarEnd+1])
+ i += 1
+ break
+ else:
+ if VarStart == -1 and Lvalue[i] == '*':
+ StarList.append('*')
+ i += 1
+ if VarEnd == -1:
+ break
+
+
+ DotIndex = Lvalue[VarEnd:].find('.')
+ ArrowIndex = Lvalue[VarEnd:].find('->')
+ if DotIndex == -1 and ArrowIndex == -1:
+ break
+ elif DotIndex == -1 and ArrowIndex != -1:
+ SearchBegin = VarEnd + ArrowIndex
+ elif ArrowIndex == -1 and DotIndex != -1:
+ SearchBegin = VarEnd + DotIndex
+ else:
+ SearchBegin = VarEnd + ((DotIndex < ArrowIndex) and DotIndex or ArrowIndex)
+
+ i = SearchBegin
+ VarStart = -1
+ VarEnd = -1
+
+ return VarList
+
+def SplitPredicateByOp(Str, Op, IsFuncCalling = False):
+
+ Name = Str.strip()
+ Value = None
+
+ if IsFuncCalling:
+ Index = 0
+ LBFound = False
+ UnmatchedLBCount = 0
+ while Index < len(Str):
+ while not LBFound and Str[Index] != '_' and not Str[Index].isalnum():
+ Index += 1
+
+ while not LBFound and (Str[Index].isalnum() or Str[Index] == '_'):
+ Index += 1
+ # maybe type-cast at the begining, skip it.
+ RemainingStr = Str[Index:].lstrip()
+ if RemainingStr.startswith(')') and not LBFound:
+ Index += 1
+ continue
+
+ if RemainingStr.startswith('(') and not LBFound:
+ LBFound = True
+
+ if Str[Index] == '(':
+ UnmatchedLBCount += 1
+ Index += 1
+ continue
+
+ if Str[Index] == ')':
+ UnmatchedLBCount -= 1
+ Index += 1
+ if UnmatchedLBCount == 0:
+ break
+ continue
+
+ Index += 1
+
+ if UnmatchedLBCount > 0:
+ return [Name]
+
+ IndexInRemainingStr = Str[Index:].find(Op)
+ if IndexInRemainingStr == -1:
+ return [Name]
+
+ Name = Str[0:Index + IndexInRemainingStr].strip()
+ Value = Str[Index+IndexInRemainingStr+len(Op):].strip()
+ return [Name, Value]
+
+ TmpStr = Str.rstrip(';').rstrip(')')
+ while True:
+ Index = TmpStr.rfind(Op)
+ if Index == -1:
+ return [Name]
+
+ if Str[Index - 1].isalnum() or Str[Index - 1].isspace() or Str[Index - 1] == ')':
+ Name = Str[0:Index].strip()
+ Value = Str[Index + len(Op):].strip()
+ return [Name, Value]
+
+ TmpStr = Str[0:Index - 1]
+
+def SplitPredicateStr(Str):
+ IsFuncCalling = False
+ p = GetFuncDeclPattern()
+ TmpStr = Str.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ IsFuncCalling = True
+
+ PredPartList = SplitPredicateByOp(Str, '==', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '==']
+
+ PredPartList = SplitPredicateByOp(Str, '!=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '!=']
+
+ PredPartList = SplitPredicateByOp(Str, '>=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '>=']
+
+ PredPartList = SplitPredicateByOp(Str, '<=', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '<=']
+
+ PredPartList = SplitPredicateByOp(Str, '>', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '>']
+
+ PredPartList = SplitPredicateByOp(Str, '<', IsFuncCalling)
+ if len(PredPartList) > 1:
+ return [PredPartList, '<']
+
+ return [[Str, None], None]
+
+def GetFuncContainsPE(ExpLine, ResultSet):
+ for Result in ResultSet:
+ if Result[0] < ExpLine and Result[1] > ExpLine:
+ return Result
+ return None
+
+def PatternInModifier(Modifier, SubStr):
+ PartList = Modifier.split()
+ for Part in PartList:
+ if Part == SubStr:
+ return True
+ return False
+
+def GetDataTypeFromModifier(ModifierStr):
+ MList = ModifierStr.split()
+ for M in MList:
+ if M in EccGlobalData.gConfig.ModifierList:
+ MList.remove(M)
+ # remove array sufix
+ if M.startswith('['):
+ MList.remove(M)
+
+ ReturnType = ''
+ for M in MList:
+ ReturnType += M + ' '
+
+ ReturnType = ReturnType.strip()
+ if len(ReturnType) == 0:
+ ReturnType = 'VOID'
+ return ReturnType
+
+def DiffModifier(Str1, Str2):
+ PartList1 = Str1.split()
+ PartList2 = Str2.split()
+ if PartList1 == PartList2:
+ return False
+ else:
+ return True
+
+def GetTypedefDict(FullFileName):
+
+ Dict = ComplexTypeDict.get(FullFileName)
+ if Dict != None:
+ return Dict
+
+ FileID = GetTableID(FullFileName)
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Modifier, Name, Value, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ Dict = {}
+ for Result in ResultSet:
+ if len(Result[0]) == 0:
+ Dict[Result[1]] = Result[2]
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, Value, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if not Result[2].startswith('FP ('):
+ Dict[Result[1]] = Result[2]
+ else:
+ if len(Result[0]) == 0:
+ Dict[Result[1]] = 'VOID'
+ else:
+ Dict[Result[1]] = GetDataTypeFromModifier(Result[0])
+
+ ComplexTypeDict[FullFileName] = Dict
+ return Dict
+
+def GetSUDict(FullFileName):
+
+ Dict = SUDict.get(FullFileName)
+ if Dict != None:
+ return Dict
+
+ FileID = GetTableID(FullFileName)
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Name, Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ Dict = {}
+ for Result in ResultSet:
+ if len(Result[1]) > 0:
+ Dict[Result[0]] = Result[1]
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_STRUCTURE, DataClass.MODEL_IDENTIFIER_UNION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(Result[1]) > 0:
+ Dict[Result[0]] = Result[1]
+
+ SUDict[FullFileName] = Dict
+ return Dict
+
+def StripComments(Str):
+ Str += ' '
+ ListFromStr = list(Str)
+
+ InComment = False
+ DoubleSlashComment = False
+ Index = 0
+ while Index < len(ListFromStr):
+ # meet new line, then no longer in a comment for //
+ if ListFromStr[Index] == '\n':
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ Index += 1
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and ListFromStr[Index] == '*' and ListFromStr[Index+1] == '/':
+ ListFromStr[Index] = ' '
+ Index += 1
+ ListFromStr[Index] = ' '
+ Index += 1
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ ListFromStr[Index] = ' '
+ Index += 1
+ # check for // comment
+ elif ListFromStr[Index] == '/' and ListFromStr[Index+1] == '/' and ListFromStr[Index+2] != '\n':
+ InComment = True
+ DoubleSlashComment = True
+
+ # check for /* comment start
+ elif ListFromStr[Index] == '/' and ListFromStr[Index+1] == '*':
+ ListFromStr[Index] = ' '
+ Index += 1
+ ListFromStr[Index] = ' '
+ Index += 1
+ InComment = True
+ else:
+ Index += 1
+
+ # restore from List to String
+ Str = "".join(ListFromStr)
+ Str = Str.rstrip(' ')
+
+ return Str
+
+def GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict):
+ Value = TypedefDict.get(Type)
+ if Value == None:
+ Value = SUDict.get(Type)
+ if Value == None:
+ return None
+
+ LBPos = Value.find('{')
+ while LBPos == -1:
+ FTList = Value.split()
+ for FT in FTList:
+ if FT not in ('struct', 'union'):
+ Value = TypedefDict.get(FT)
+ if Value == None:
+ Value = SUDict.get(FT)
+ break
+
+ if Value == None:
+ return None
+
+ LBPos = Value.find('{')
+
+# RBPos = Value.find('}')
+ Fields = Value[LBPos + 1:]
+ Fields = StripComments(Fields)
+ FieldsList = Fields.split(';')
+ for Field in FieldsList:
+ Field = Field.strip()
+ Index = Field.rfind(FieldName)
+ if Index < 1:
+ continue
+ if not Field[Index - 1].isalnum():
+ if Index + len(FieldName) == len(Field):
+ Type = GetDataTypeFromModifier(Field[0:Index])
+ return Type.strip()
+ else:
+ # For the condition that the field in struct is an array with [] sufixes...
+ if not Field[Index + len(FieldName)].isalnum():
+ Type = GetDataTypeFromModifier(Field[0:Index])
+ return Type.strip()
+
+ return None
+
+def GetRealType(Type, TypedefDict, TargetType = None):
+ if TargetType != None and Type == TargetType:
+ return Type
+ while TypedefDict.get(Type):
+ Type = TypedefDict.get(Type)
+ if TargetType != None and Type == TargetType:
+ return Type
+ return Type
+
+def GetTypeInfo(RefList, Modifier, FullFileName, TargetType = None):
+ TypedefDict = GetTypedefDict(FullFileName)
+ SUDict = GetSUDict(FullFileName)
+ Type = GetDataTypeFromModifier(Modifier).replace('*', '').strip()
+
+ Type = Type.split()[-1]
+ Index = 0
+ while Index < len(RefList):
+ FieldName = RefList[Index]
+ FromType = GetFinalTypeValue(Type, FieldName, TypedefDict, SUDict)
+ if FromType == None:
+ return None
+ # we want to determine the exact type.
+ if TargetType != None:
+ Type = FromType.split()[0]
+ # we only want to check if it is a pointer
+ else:
+ Type = FromType
+ if Type.find('*') != -1 and Index == len(RefList)-1:
+ return Type
+ Type = FromType.split()[0]
+
+ Index += 1
+
+ Type = GetRealType(Type, TypedefDict, TargetType)
+
+ return Type
+
+def GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall = False, TargetType = None, StarList = None):
+
+ PredVar = PredVarList[0]
+ FileID = GetTableID(FullFileName)
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ # search variable in include files
+
+ # it is a function call, search function declarations and definitions
+ if IsFuncCall:
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Value = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Value = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ FileID = GetTableID(FullFileName)
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d and Name = \'%s\'
+ """ % (FileID, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d and Name = \'%s\'
+ """ % (FileID, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ return None
+
+ # really variable, search local variable first
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Name = \'%s\' and StartLine >= %d and StartLine <= %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar, FuncRecord[0], FuncRecord[1])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ VarFound = False
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+# Type = GetDataTypeFromModifier(Result[0]).split()[-1]
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList != None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ # search function parameters second
+ ParamList = GetParamList(FuncRecord[2])
+ for Param in ParamList:
+ if Param.Name.strip() == PredVar:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Param.Modifier, FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Param.Modifier).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList != None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ # search global variable next
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and Name = \'%s\' and BelongsToFunction = -1
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList != None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ for F in IncludeFileList:
+ FileID = GetTableID(F)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d and BelongsToFunction = -1 and Name = \'%s\'
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, PredVar)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ if len(PredVarList) > 1:
+ Type = GetTypeInfo(PredVarList[1:], Result[0], FullFileName, TargetType)
+ return Type
+ else:
+ TypeList = GetDataTypeFromModifier(Result[0]).split()
+ Type = TypeList[-1]
+ if len(TypeList) > 1 and StarList != None:
+ for Star in StarList:
+ Type = Type.strip()
+ Type = Type.rstrip(Star)
+ # Get real type after de-reference pointers.
+ if len(Type.strip()) == 0:
+ Type = TypeList[-2]
+ TypedefDict = GetTypedefDict(FullFileName)
+ Type = GetRealType(Type, TypedefDict, TargetType)
+ return Type
+
+def CheckFuncLayoutReturnType(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID, StartLine, StartColumn, EndLine, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ FuncName = Result[5]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
+ continue
+ Index = Result[0].find(TypeStart)
+ if Index != 0 or Result[3] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, FileTable, Result[1])
+
+ if Result[2] == Result[4]:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear on its own line' % FuncName, FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID, StartLine, StartColumn, FunNameStartLine, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+ FuncName = Result[5]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, FuncName):
+ continue
+ Index = Result[0].find(ReturnType)
+ if Index != 0 or Result[3] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear at the start of line' % FuncName, 'Function', Result[1])
+
+ if Result[2] == Result[4]:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_RETURN_TYPE, '[%s] Return Type should appear on its own line' % FuncName, 'Function', Result[1])
+
+def CheckFuncLayoutModifier(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+# if len(ReturnType) == 0:
+# continue
+ Index = Result[0].find(TypeStart)
+ if Index != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ReturnType = GetDataTypeFromModifier(Result[0])
+ TypeStart = ReturnType.split()[0]
+# if len(ReturnType) == 0:
+# continue
+ Index = Result[0].find(TypeStart)
+ if Index != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_OPTIONAL_FUNCTIONAL_MODIFIER, '', 'Function', Result[1])
+
+def CheckFuncLayoutName(FullFileName):
+ ErrorMsgList = []
+ # Parameter variable format pattern.
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+ ParamIgnoreList = ('VOID', '...')
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, ID, EndColumn, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
+ continue
+ if Result[2] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, FileTable, Result[1])
+ ParamList = GetParamList(Result[0])
+ if len(ParamList) == 0:
+ continue
+ StartLine = 0
+ for Param in ParamList:
+ if Param.StartLine <= StartLine:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, FileTable, Result[1])
+ if Param.StartLine - StartLine > 1:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, FileTable, Result[1])
+ if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
+ StartLine = Param.StartLine
+
+ if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', FileTable, Result[1])
+
+ SqlStatement = """ select Modifier, ID, FunNameStartColumn, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, FuncName):
+ continue
+ if Result[2] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Function name [%s] should appear at the start of a line' % FuncName, 'Function', Result[1])
+ ParamList = GetParamList(Result[0])
+ if len(ParamList) == 0:
+ continue
+ StartLine = 0
+ for Param in ParamList:
+ if Param.StartLine <= StartLine:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Parameter %s should be in its own line.' % Param.Name, 'Function', Result[1])
+ if Param.StartLine - StartLine > 1:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, 'Empty line appears before Parameter %s.' % Param.Name, 'Function', Result[1])
+ if not Pattern.match(Param.Name) and not Param.Name in ParamIgnoreList and not EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Param.Name):
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Parameter [%s] NOT follow naming convention.' % Param.Name, FileTable, Result[1])
+ StartLine = Param.StartLine
+ if not Result[0].endswith('\n )') and not Result[0].endswith('\r )'):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_NAME, '\')\' should be on a new line and indented two spaces', 'Function', Result[1])
+
+def CheckFuncLayoutPrototype(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select Modifier, Header, Name, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+
+ FuncDefList = []
+ for Result in ResultSet:
+ FuncDefList.append(Result)
+
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FuncDeclList = []
+ for Result in ResultSet:
+ FuncDeclList.append(Result)
+
+ UndeclFuncList = []
+ for FuncDef in FuncDefList:
+ FuncName = FuncDef[2].strip()
+ FuncModifier = FuncDef[0]
+ FuncDefHeader = FuncDef[1]
+ for FuncDecl in FuncDeclList:
+ LBPos = FuncDecl[1].find('(')
+ DeclName = FuncDecl[1][0:LBPos].strip()
+ DeclModifier = FuncDecl[0]
+ if DeclName == FuncName:
+ if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
+ ParamListOfDef = GetParamList(FuncDefHeader)
+ ParamListOfDecl = GetParamList(FuncDecl[1])
+ if len(ParamListOfDef) != len(ParamListOfDecl):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Parameter number different.', 'Function', FuncDef[3])
+ break
+
+ Index = 0
+ while Index < len(ParamListOfDef):
+ if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Parameter %s has different modifier with prototype.' % ParamListOfDef[Index].Name, 'Function', FuncDef[3])
+ Index += 1
+ break
+ else:
+ UndeclFuncList.append(FuncDef)
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ FuncDeclList = []
+ for F in IncludeFileList:
+ FileID = GetTableID(F, ErrorMsgList)
+ if FileID < 0:
+ continue
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+
+ for Result in ResultSet:
+ FuncDeclList.append(Result)
+
+ for FuncDef in UndeclFuncList:
+ FuncName = FuncDef[2].strip()
+ FuncModifier = FuncDef[0]
+ FuncDefHeader = FuncDef[1]
+ for FuncDecl in FuncDeclList:
+ LBPos = FuncDecl[1].find('(')
+ DeclName = FuncDecl[1][0:LBPos].strip()
+ DeclModifier = FuncDecl[0]
+ if DeclName == FuncName:
+ if DiffModifier(FuncModifier, DeclModifier) and not EccGlobalData.gException.IsException(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, FuncName):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Function [%s] modifier different with prototype.' % FuncName, 'Function', FuncDef[3])
+ ParamListOfDef = GetParamList(FuncDefHeader)
+ ParamListOfDecl = GetParamList(FuncDecl[1])
+ if len(ParamListOfDef) != len(ParamListOfDecl):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Parameter number different.', 'Function', FuncDef[3])
+ break
+
+ Index = 0
+ while Index < len(ParamListOfDef):
+ if DiffModifier(ParamListOfDef[Index].Modifier, ParamListOfDecl[Index].Modifier):
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_PROTO_TYPE, 'Parameter %s has different modifier with prototype.' % ParamListOfDef[Index].Name, 'Function', FuncDef[3])
+ Index += 1
+ break
+
+def CheckFuncLayoutBody(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ FileTable = 'Identifier' + str(FileID)
+ Db = GetDB()
+ SqlStatement = """ select BodyStartColumn, EndColumn, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+ for Result in ResultSet:
+ if Result[0] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, 'open brace should be at the very beginning of a line.', 'Function', Result[2])
+ if Result[1] != 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_FUNCTION_BODY, 'close brace should be at the very beginning of a line.', 'Function', Result[2])
+
+def CheckFuncLayoutLocalVariable(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return ErrorMsgList
+ FL = []
+ for Result in ResultSet:
+ FL.append(Result)
+
+ for F in FL:
+ SqlStatement = """ select Name, Value, ID
+ from %s
+ where Model = %d and BelongsToFunction = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE, F[0])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ continue
+
+ for Result in ResultSet:
+ if len(Result[1]) > 0:
+ PrintErrorMsg(ERROR_C_FUNCTION_LAYOUT_CHECK_NO_INIT_OF_VARIABLE, 'Variable Name: %s' % Result[0], FileTable, Result[2])
+
+def CheckMemberVariableFormat(Name, Value, FileTable, TdId, ModelId):
+ ErrMsgList = []
+ # Member variable format pattern.
+ Pattern = re.compile(r'^[A-Z]+\S*[a-z]\S*$')
+
+ LBPos = Value.find('{')
+ RBPos = Value.rfind('}')
+ if LBPos == -1 or RBPos == -1:
+ return ErrMsgList
+
+ Fields = Value[LBPos + 1 : RBPos]
+ Fields = StripComments(Fields).strip()
+ NestPos = Fields.find ('struct')
+ if NestPos != -1 and (NestPos + len('struct') < len(Fields)):
+ if not Fields[NestPos + len('struct') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested struct in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+ NestPos = Fields.find ('union')
+ if NestPos != -1 and (NestPos + len('union') < len(Fields)):
+ if not Fields[NestPos + len('union') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested union in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+ NestPos = Fields.find ('enum')
+ if NestPos != -1 and (NestPos + len('enum') < len(Fields)):
+ if not Fields[NestPos + len('enum') + 1].isalnum():
+ if not EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, Name):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NESTED_STRUCTURE, 'Nested enum in [%s].' % (Name), FileTable, TdId)
+ return ErrMsgList
+
+ if ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
+ FieldsList = Fields.split(',')
+ # deal with enum is pre-assigned a value by function call ( , , , ...)
+ QuoteCount = 0
+ Index = 0
+ RemoveCurrentElement = False
+ while Index < len(FieldsList):
+ Field = FieldsList[Index]
+
+ if Field.find('(') != -1:
+ QuoteCount += 1
+ RemoveCurrentElement = True
+ Index += 1
+ continue
+
+ if Field.find(')') != -1 and QuoteCount > 0:
+ QuoteCount -= 1
+
+ if RemoveCurrentElement:
+ FieldsList.remove(Field)
+ if QuoteCount == 0:
+ RemoveCurrentElement = False
+ continue
+
+ if QuoteCount == 0:
+ RemoveCurrentElement = False
+
+ Index += 1
+ else:
+ FieldsList = Fields.split(';')
+
+ for Field in FieldsList:
+ Field = Field.strip()
+ if Field == '':
+ continue
+ # For the condition that the field in struct is an array with [] sufixes...
+ if Field[-1] == ']':
+ LBPos = Field.find('[')
+ Field = Field[0:LBPos]
+ # For the condition that bit field ": Number"
+ if Field.find(':') != -1:
+ ColonPos = Field.find(':')
+ Field = Field[0:ColonPos]
+
+ Field = Field.strip()
+ if Field == '':
+ continue
+ # Enum could directly assign value to variable
+ Field = Field.split('=')[0].strip()
+ TokenList = Field.split()
+ # Remove pointers before variable
+ if not Pattern.match(TokenList[-1].lstrip('*')):
+ ErrMsgList.append(TokenList[-1].lstrip('*'))
+
+ return ErrMsgList
+
+def CheckDeclTypedefFormat(FullFileName, ModelId):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Name, StartLine, EndLine, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, ModelId)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ ResultList = []
+ for Result in ResultSet:
+ ResultList.append(Result)
+
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ALL
+ if ModelId == DataClass.MODEL_IDENTIFIER_STRUCTURE:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_STRUCTURE_DECLARATION
+ elif ModelId == DataClass.MODEL_IDENTIFIER_ENUMERATE:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_ENUMERATED_TYPE
+ elif ModelId == DataClass.MODEL_IDENTIFIER_UNION:
+ ErrorType = ERROR_DECLARATION_DATA_TYPE_CHECK_UNION_TYPE
+
+ SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ TdSet = Db.TblFile.Exec(SqlStatement)
+ TdList = []
+ for Td in TdSet:
+ TdList.append(Td)
+ # Check member variable name format that from typedefs of ONLY this file.
+ for Td in TdList:
+ Name = Td[1].strip()
+ Value = Td[2].strip()
+ if Value.startswith('enum'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
+ elif Value.startswith('struct'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ elif Value.startswith('union'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
+ else:
+ continue
+
+ if ValueModelId != ModelId:
+ continue
+ # Check member variable format.
+ ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Td[5], ModelId)
+ for ErrMsg in ErrMsgList:
+ if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Name+'.'+ErrMsg):
+ continue
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Name+'.'+ErrMsg), FileTable, Td[5])
+
+ # First check in current file to see whether struct/union/enum is typedef-ed.
+ UntypedefedList = []
+ for Result in ResultList:
+ # Check member variable format.
+ Name = Result[0].strip()
+ Value = Result[4].strip()
+ if Value.startswith('enum'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_ENUMERATE
+ elif Value.startswith('struct'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_STRUCTURE
+ elif Value.startswith('union'):
+ ValueModelId = DataClass.MODEL_IDENTIFIER_UNION
+ else:
+ continue
+
+ if ValueModelId != ModelId:
+ continue
+ ErrMsgList = CheckMemberVariableFormat(Name, Value, FileTable, Result[3], ModelId)
+ for ErrMsg in ErrMsgList:
+ if EccGlobalData.gException.IsException(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, Result[0]+'.'+ErrMsg):
+ continue
+ PrintErrorMsg(ERROR_NAMING_CONVENTION_CHECK_VARIABLE_NAME, 'Member variable [%s] NOT follow naming convention.' % (Result[0]+'.'+ErrMsg), FileTable, Result[3])
+ # Check whether it is typedefed.
+ Found = False
+ for Td in TdList:
+ # skip function pointer
+ if len(Td[0]) > 0:
+ continue
+ if Result[1] >= Td[3] and Td[4] >= Result[2]:
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Result[0] in Td[2].split():
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Found:
+ break
+
+ if not Found:
+ UntypedefedList.append(Result)
+ continue
+
+ if len(UntypedefedList) == 0:
+ return
+
+ IncludeFileList = GetAllIncludeFiles(FullFileName)
+ TdList = []
+ for F in IncludeFileList:
+ FileID = GetTableID(F, ErrorMsgList)
+ if FileID < 0:
+ continue
+
+ IncludeFileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (IncludeFileTable, DataClass.MODEL_IDENTIFIER_TYPEDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ TdList.extend(ResultSet)
+
+ for Result in UntypedefedList:
+
+ # Check whether it is typedefed.
+ Found = False
+ for Td in TdList:
+
+ if len(Td[0]) > 0:
+ continue
+ if Result[1] >= Td[3] and Td[4] >= Result[2]:
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Result[0] in Td[2].split():
+ Found = True
+ if not Td[1].isupper():
+ PrintErrorMsg(ErrorType, 'Typedef should be UPPER case', FileTable, Td[5])
+ if Found:
+ break
+
+ if not Found:
+ PrintErrorMsg(ErrorType, 'No Typedef for %s' % Result[0], FileTable, Result[3])
+ continue
+
+def CheckDeclStructTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_STRUCTURE)
+
+def CheckDeclEnumTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_ENUMERATE)
+
+def CheckDeclUnionTypedef(FullFileName):
+ CheckDeclTypedefFormat(FullFileName, DataClass.MODEL_IDENTIFIER_UNION)
+
+def CheckDeclArgModifier(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ ModifierTuple = ('IN', 'OUT', 'OPTIONAL', 'UNALIGNED')
+ MAX_MODIFIER_LENGTH = 100
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier) and len(Result[0]) < MAX_MODIFIER_LENGTH:
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Variable Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Header, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ for Modifier in ModifierTuple:
+ if PatternInModifier(Result[0], Modifier):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_IN_OUT_MODIFIER, 'Return Type Modifier %s' % Result[0], FileTable, Result[2])
+ break
+
+def CheckDeclNoUseCType(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Modifier, Name, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CTypeTuple = ('int', 'unsigned', 'char', 'void', 'static', 'long')
+ for Result in ResultSet:
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Variable type %s' % Type, FileTable, Result[2])
+ break
+
+ SqlStatement = """ select Modifier, Name, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ParamList = GetParamList(Result[1])
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
+ continue
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '%s Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
+
+ for Param in ParamList:
+ if PatternInModifier(Param.Modifier, Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
+
+ SqlStatement = """ select Modifier, Header, ID, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ ParamList = GetParamList(Result[1])
+ FuncName = Result[3]
+ if EccGlobalData.gException.IsException(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, FuncName):
+ continue
+ for Type in CTypeTuple:
+ if PatternInModifier(Result[0], Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, '[%s] Return type %s' % (FuncName, Result[0]), FileTable, Result[2])
+
+ for Param in ParamList:
+ if PatternInModifier(Param.Modifier, Type):
+ PrintErrorMsg(ERROR_DECLARATION_DATA_TYPE_CHECK_NO_USE_C_TYPE, 'Parameter %s' % Param.Name, FileTable, Result[2])
+
+
+def CheckPointerNullComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord == None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] == None:
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type != None:
+ if Type.find('*') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, None, StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type == None:
+ continue
+ if Type.find('*') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_COMPARISON_NULL_TYPE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+def CheckNonBooleanValueComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord == None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+# if p.match(Exp):
+# continue
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] == None:
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type != None:
+ if Type.find('BOOLEAN') == -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type == None:
+ continue
+ if Type.find('BOOLEAN') == -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_NO_BOOLEAN_OPERATOR, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+
+def CheckBooleanValueComparison(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ # cache the found function return type to accelerate later checking in this file.
+ FuncReturnTypeDict = {}
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_PREDICATE_EXPRESSION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+ PSL = []
+ for Result in ResultSet:
+ PSL.append([Result[0], Result[1], Result[2]])
+
+ SqlStatement = """ select BodyStartLine, EndLine, Header, Modifier, ID
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ FL = []
+ for Result in ResultSet:
+ FL.append([Result[0], Result[1], Result[2], Result[3], Result[4]])
+
+ p = GetFuncDeclPattern()
+ for Str in PSL:
+ FuncRecord = GetFuncContainsPE(Str[1], FL)
+ if FuncRecord == None:
+ continue
+
+ for Exp in GetPredicateListFromPredicateExpStr(Str[0]):
+ PredInfo = SplitPredicateStr(Exp)
+ if PredInfo[1] in ('==', '!=') and PredInfo[0][1] in ('TRUE', 'FALSE'):
+ PredVarStr = PredInfo[0][0].strip()
+ IsFuncCall = False
+ SearchInCache = False
+ # PredVarStr may contain '.' or '->'
+ TmpStr = PredVarStr.replace('.', '').replace('->', '')
+ if p.match(TmpStr):
+ PredVarStr = PredVarStr[0:PredVarStr.find('(')]
+ SearchInCache = True
+ # Only direct function call using IsFuncCall branch. Multi-level ref. function call is considered a variable.
+ if TmpStr.startswith(PredVarStr):
+ IsFuncCall = True
+
+ if PredVarStr.strip() in IgnoredKeywordList:
+ continue
+ StarList = []
+ PredVarList = GetCNameList(PredVarStr, StarList)
+ # No variable found, maybe value first? like (0 == VarName)
+ if len(PredVarList) == 0:
+ continue
+
+ if SearchInCache:
+ Type = FuncReturnTypeDict.get(PredVarStr)
+ if Type != None:
+ if Type.find('BOOLEAN') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+ continue
+
+ if PredVarStr in FuncReturnTypeDict:
+ continue
+
+ Type = GetVarInfo(PredVarList, FuncRecord, FullFileName, IsFuncCall, 'BOOLEAN', StarList)
+ if SearchInCache:
+ FuncReturnTypeDict[PredVarStr] = Type
+ if Type == None:
+ continue
+ if Type.find('BOOLEAN') != -1:
+ PrintErrorMsg(ERROR_PREDICATE_EXPRESSION_CHECK_BOOLEAN_VALUE, 'Predicate Expression: %s' % Exp, FileTable, Str[2])
+
+
+def CheckHeaderFileData(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select ID, Modifier
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_VARIABLE)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[1].startswith('extern'):
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Variable definition appears in header file', FileTable, Result[0])
+
+ SqlStatement = """ select ID
+ from Function
+ where BelongsToFile = %d
+ """ % FileID
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_DATA, 'Function definition appears in header file', 'Function', Result[0])
+
+ return ErrorMsgList
+
+def CheckHeaderFileIfndef(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine
+ from %s
+ where Model = %d order by StartLine
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_MACRO_IFNDEF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_1, '', 'File', FileID)
+ return ErrorMsgList
+ for Result in ResultSet:
+ SqlStatement = """ select Value, EndLine
+ from %s
+ where EndLine < %d
+ """ % (FileTable, Result[1])
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[0].startswith('/*') and not Result[0].startswith('//'):
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_2, '', 'File', FileID)
+ break
+
+ SqlStatement = """ select Value
+ from %s
+ where StartLine > (select max(EndLine) from %s where Model = %d)
+ """ % (FileTable, FileTable, DataClass.MODEL_IDENTIFIER_MACRO_ENDIF)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ if not Result[0].startswith('/*') and not Result[0].startswith('//'):
+ PrintErrorMsg(ERROR_INCLUDE_FILE_CHECK_IFNDEF_STATEMENT_3, '', 'File', FileID)
+ return ErrorMsgList
+
+def CheckDoxygenCommand(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID
+ from %s
+ where Model = %d or Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ DoxygenCommandList = ['bug', 'todo', 'example', 'file', 'attention', 'param', 'post', 'pre', 'retval', 'return', 'sa', 'since', 'test', 'note', 'par']
+ for Result in ResultSet:
+ CommentStr = Result[0]
+ CommentPartList = CommentStr.split()
+ for Part in CommentPartList:
+ if Part.upper() == 'BUGBUG':
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Bug should be marked with doxygen tag @bug', FileTable, Result[1])
+ if Part.upper() == 'TODO':
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'ToDo should be marked with doxygen tag @todo', FileTable, Result[1])
+ if Part.startswith('@'):
+ if EccGlobalData.gException.IsException(ERROR_DOXYGEN_CHECK_COMMAND, Part):
+ continue
+ if Part.lstrip('@').isalpha():
+ if Part.lstrip('@') not in DoxygenCommandList:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+ else:
+ Index = Part.find('[')
+ if Index == -1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+ RealCmd = Part[1:Index]
+ if RealCmd not in DoxygenCommandList:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMAND, 'Unknown doxygen command %s' % Part, FileTable, Result[1])
+
+
+def CheckDoxygenTripleForwardSlash(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+
+ SqlStatement = """ select ID, BodyStartLine, BodyStartColumn, EndLine, EndColumn
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ return
+
+ FuncDefSet = []
+ for Result in ResultSet:
+ FuncDefSet.append(Result)
+
+
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID, StartLine, StartColumn, EndLine, EndColumn
+ from %s
+ where Model = %d
+
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print 'Unrecognized chars in comment of file %s', FullFileName
+
+
+ for Result in CommentSet:
+ CommentStr = Result[0]
+ StartLine = Result[2]
+ StartColumn = Result[3]
+ EndLine = Result[4]
+ EndColumn = Result[5]
+ if not CommentStr.startswith('///<'):
+ continue
+
+ Found = False
+ for FuncDef in FuncDefSet:
+ if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
+ Found = True
+ break
+ if StartLine > FuncDef[1] and EndLine < FuncDef[3]:
+ Found = True
+ break
+ if StartLine == FuncDef[1] and StartColumn > FuncDef[2] and EndLine < FuncDef[3]:
+ Found = True
+ break
+ if StartLine > FuncDef[1] and EndLine == FuncDef[3] and EndColumn < FuncDef[4]:
+ Found = True
+ break
+ if Found:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_FORMAT, '', FileTable, Result[1])
+
+
+def CheckFileHeaderDoxygenComments(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, ID
+ from %s
+ where Model = %d and StartLine = 1 and StartColumn = 0
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ if len(ResultSet) == 0:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)
+ return ErrorMsgList
+
+ for Result in ResultSet:
+ CommentStr = Result[0]
+ if not CommentStr.startswith('/** @file'):
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, Result[1])
+ if not CommentStr.endswith('**/'):
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with **/', FileTable, Result[1])
+ if CommentStr.find('.') == -1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', FileTable, Result[1])
+
+def CheckFuncHeaderDoxygenComments(FullFileName):
+ ErrorMsgList = []
+
+ FileID = GetTableID(FullFileName, ErrorMsgList)
+ if FileID < 0:
+ return ErrorMsgList
+
+ Db = GetDB()
+ FileTable = 'Identifier' + str(FileID)
+ SqlStatement = """ select Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
+
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print 'Unrecognized chars in comment of file %s', FullFileName
+
+ # Func Decl check
+ SqlStatement = """ select Modifier, Name, StartLine, ID, Value
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_DECLARATION)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[4]
+ FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
+ if FunctionHeaderComment:
+ CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
+ else:
+ if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
+ continue
+ ErrorMsgList.append('Line %d :Function %s has NO comment immediately preceding it.' % (Result[2], Result[1]))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), FileTable, Result[3])
+
+ # Func Def check
+ SqlStatement = """ select Value, StartLine, EndLine, ID
+ from %s
+ where Model = %d
+ """ % (FileTable, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
+
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ CommentSet = []
+ try:
+ for Result in ResultSet:
+ CommentSet.append(Result)
+ except:
+ print 'Unrecognized chars in comment of file %s', FullFileName
+
+ SqlStatement = """ select Modifier, Header, StartLine, ID, Name
+ from Function
+ where BelongsToFile = %d
+ """ % (FileID)
+ ResultSet = Db.TblFile.Exec(SqlStatement)
+ for Result in ResultSet:
+ FuncName = Result[4]
+ FunctionHeaderComment = CheckCommentImmediatelyPrecedeFunctionHeader(Result[1], Result[2], CommentSet)
+ if FunctionHeaderComment:
+ CheckFunctionHeaderConsistentWithDoxygenComment(Result[0], Result[1], Result[2], FunctionHeaderComment[0], FunctionHeaderComment[1], ErrorMsgList, FunctionHeaderComment[3], FileTable)
+ else:
+ if EccGlobalData.gException.IsException(ERROR_HEADER_CHECK_FUNCTION, FuncName):
+ continue
+ ErrorMsgList.append('Line %d :Function [%s] has NO comment immediately preceding it.' % (Result[2], Result[1]))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'Function [%s] has NO comment immediately preceding it.' % (FuncName), 'Function', Result[3])
+ return ErrorMsgList
+
+def CheckCommentImmediatelyPrecedeFunctionHeader(FuncName, FuncStartLine, CommentSet):
+
+ for Comment in CommentSet:
+ if Comment[2] == FuncStartLine - 1:
+ return Comment
+ return None
+
+def GetDoxygenStrFromComment(Str):
+ DoxygenStrList = []
+ ParamTagList = Str.split('@param')
+ if len(ParamTagList) > 1:
+ i = 1
+ while i < len(ParamTagList):
+ DoxygenStrList.append('@param' + ParamTagList[i])
+ i += 1
+
+ Str = ParamTagList[0]
+
+ RetvalTagList = ParamTagList[-1].split('@retval')
+ if len(RetvalTagList) > 1:
+ if len(ParamTagList) > 1:
+ DoxygenStrList[-1] = '@param' + RetvalTagList[0]
+ i = 1
+ while i < len(RetvalTagList):
+ DoxygenStrList.append('@retval' + RetvalTagList[i])
+ i += 1
+
+ ReturnTagList = RetvalTagList[-1].split('@return')
+ if len(ReturnTagList) > 1:
+ if len(RetvalTagList) > 1:
+ DoxygenStrList[-1] = '@retval' + ReturnTagList[0]
+ elif len(ParamTagList) > 1:
+ DoxygenStrList[-1] = '@param' + ReturnTagList[0]
+ i = 1
+ while i < len(ReturnTagList):
+ DoxygenStrList.append('@return' + ReturnTagList[i])
+ i += 1
+
+ if len(DoxygenStrList) > 0:
+ DoxygenStrList[-1] = DoxygenStrList[-1].rstrip('--*/')
+
+ return DoxygenStrList
+
+def CheckGeneralDoxygenCommentLayout(Str, StartLine, ErrorMsgList, CommentId = -1, TableName = ''):
+ #/** --*/ @retval after @param
+ if not Str.startswith('/**'):
+ ErrorMsgList.append('Line %d : Comment does NOT have prefix /** ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have prefix /** ', TableName, CommentId)
+ if not Str.endswith('**/'):
+ ErrorMsgList.append('Line %d : Comment does NOT have tail **/ ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Comment does NOT have tail **/ ', TableName, CommentId)
+ FirstRetvalIndex = Str.find('@retval')
+ LastParamIndex = Str.rfind('@param')
+ if (FirstRetvalIndex > 0) and (LastParamIndex > 0) and (FirstRetvalIndex < LastParamIndex):
+ ErrorMsgList.append('Line %d : @retval appear before @param ' % StartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, @retval appear before @param ', TableName, CommentId)
+
+def CheckFunctionHeaderConsistentWithDoxygenComment(FuncModifier, FuncHeader, FuncStartLine, CommentStr, CommentStartLine, ErrorMsgList, CommentId = -1, TableName = ''):
+
+ ParamList = GetParamList(FuncHeader)
+ CheckGeneralDoxygenCommentLayout(CommentStr, CommentStartLine, ErrorMsgList, CommentId, TableName)
+ DescriptionStr = CommentStr
+ DoxygenStrList = GetDoxygenStrFromComment(DescriptionStr)
+ if DescriptionStr.find('.') == -1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', TableName, CommentId)
+ DoxygenTagNumber = len(DoxygenStrList)
+ ParamNumber = len(ParamList)
+ for Param in ParamList:
+ if Param.Name.upper() == 'VOID' and ParamNumber == 1:
+ ParamNumber -= 1
+ Index = 0
+ if ParamNumber > 0 and DoxygenTagNumber > 0:
+ while Index < ParamNumber and Index < DoxygenTagNumber:
+ ParamModifier = ParamList[Index].Modifier
+ ParamName = ParamList[Index].Name.strip()
+ Tag = DoxygenStrList[Index].strip(' ')
+ if (not Tag[-1] == ('\n')) and (not Tag[-1] == ('\r')):
+ ErrorMsgList.append('Line %d : in Comment, \"%s\" does NOT end with new line ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
+ PrintErrorMsg(ERROR_HEADER_CHECK_FUNCTION, 'in Comment, \"%s\" does NOT end with new line ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
+ TagPartList = Tag.split()
+ if len(TagPartList) < 2:
+ ErrorMsgList.append('Line %d : in Comment, \"%s\" does NOT contain doxygen contents ' % (CommentStartLine, Tag.replace('\n', '').replace('\r', '')))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, \"%s\" does NOT contain doxygen contents ' % (Tag.replace('\n', '').replace('\r', '')), TableName, CommentId)
+ Index += 1
+ continue
+ LBPos = Tag.find('[')
+ RBPos = Tag.find(']')
+ ParamToLBContent = Tag[len('@param'):LBPos].strip()
+ if LBPos > 0 and len(ParamToLBContent)==0 and RBPos > LBPos:
+ InOutStr = ''
+ ModifierPartList = ParamModifier.split()
+ for Part in ModifierPartList:
+ if Part.strip() == 'IN':
+ InOutStr += 'in'
+ if Part.strip() == 'OUT':
+ if InOutStr != '':
+ InOutStr += ', out'
+ else:
+ InOutStr = 'out'
+
+ if InOutStr != '':
+ if Tag.find('['+InOutStr+']') == -1:
+ ErrorMsgList.append('Line %d : in Comment, \"%s\" does NOT have %s ' % (CommentStartLine, (TagPartList[0] + ' ' +TagPartList[1]).replace('\n', '').replace('\r', ''), '['+InOutStr+']'))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, \"%s\" does NOT have %s ' % ((TagPartList[0] + ' ' +TagPartList[1]).replace('\n', '').replace('\r', ''), '['+InOutStr+']'), TableName, CommentId)
+ if Tag.find(ParamName) == -1 and ParamName != 'VOID' and ParamName != 'void':
+ ErrorMsgList.append('Line %d : in Comment, \"%s\" does NOT consistent with parameter name %s ' % (CommentStartLine, (TagPartList[0] + ' ' +TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName))
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'in Comment, \"%s\" does NOT consistent with parameter name %s ' % ((TagPartList[0] + ' ' +TagPartList[1]).replace('\n', '').replace('\r', ''), ParamName), TableName, CommentId)
+ Index += 1
+
+ if Index < ParamNumber:
+ ErrorMsgList.append('Line %d : Number of doxygen tags in comment less than number of function parameters' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of doxygen tags in comment less than number of function parameters ', TableName, CommentId)
+ # VOID return type, NOT VOID*. VOID* should be matched with a doxygen tag.
+ if (FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1:
+
+ # assume we allow a return description tag for void func. return. that's why 'DoxygenTagNumber - 1' is used instead of 'DoxygenTagNumber'
+ if Index < DoxygenTagNumber - 1 or (Index < DoxygenTagNumber and DoxygenStrList[Index].startswith('@retval')):
+ ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need no doxygen tags in comment ', TableName, CommentId)
+ else:
+ if Index < DoxygenTagNumber and not DoxygenStrList[Index].startswith('@retval') and not DoxygenStrList[Index].startswith('@return'):
+ ErrorMsgList.append('Line %d : Number of @param doxygen tags in comment does NOT match number of function parameters' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'Number of @param doxygen tags in comment does NOT match number of function parameters ', TableName, CommentId)
+ else:
+ if ParamNumber == 0 and DoxygenTagNumber != 0 and ((FuncModifier.find('VOID') != -1 or FuncModifier.find('void') != -1) and FuncModifier.find('*') == -1):
+ ErrorMsgList.append('Line %d : VOID return type need NO doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'VOID return type need NO doxygen tags in comment ', TableName, CommentId)
+ if ParamNumber != 0 and DoxygenTagNumber == 0:
+ ErrorMsgList.append('Line %d : No doxygen tags in comment' % CommentStartLine)
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FUNCTION_HEADER, 'No doxygen tags in comment ', TableName, CommentId)
+
+if __name__ == '__main__':
+
+# EdkLogger.Initialize()
+# EdkLogger.SetLevel(EdkLogger.QUIET)
+# CollectSourceCodeDataIntoDB(sys.argv[1])
+ MsgList = CheckFuncHeaderDoxygenComments('C:\\Combo\\R9\\LakeportX64Dev\\FlashDevicePkg\\Library\\SpiFlashChipM25P64\\SpiFlashChipM25P64.c')
+ for Msg in MsgList:
+ print Msg
+ print 'Done!'
diff --git a/BaseTools/Source/Python/Ecc/config.ini b/BaseTools/Source/Python/Ecc/config.ini new file mode 100644 index 0000000000..a3215aedaa --- /dev/null +++ b/BaseTools/Source/Python/Ecc/config.ini @@ -0,0 +1,242 @@ +## @file
+# This file is used to set configuration of ECC tool
+# For the items listed below, 1 means valid, 0 means invalid
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+#
+# Identify the version of current configuration
+#
+Version = 0.1
+
+#
+# Identify to if check all items
+# 1 - Check all items and ignore all other detailed items
+# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
+#
+CheckAll = 0
+
+#
+# Identify to if automatically correct mistakes
+# 1 - Automatically correct
+# 0 - Not automatically correct
+# Only the following check points can be automatically corrected, others not listed below are not supported even it is 1
+#
+# GeneralCheckTab
+# GeneralCheckIndentation
+# GeneralCheckLine
+# GeneralCheckCarriageReturn
+# SpaceCheckAll
+#
+AutoCorrect = 1
+
+#
+# List customized Modifer here, split with ','
+#
+ModifierList = IN, OUT, OPTIONAL, UNALIGNED, EFI_RUNTIMESERVICE, EFI_BOOTSERVICE, EFIAPI, TPMINTERNALAPI
+
+#
+# General Checking
+#
+GeneralCheckAll = 0
+
+# Check whether NO Tab is used, replaced with spaces
+GeneralCheckNoTab = 1
+# The width of Tab
+GeneralCheckTabWidth = 2
+# Check whether the indentation is followed coding style
+GeneralCheckIndentation = 1
+# The width of indentation
+GeneralCheckIndentationWidth = 2
+# Check whether no line is exceeding defined widty
+GeneralCheckLine = 1
+# The width of a line
+GeneralCheckLineWidth = 120
+# Check whether no use of _asm in the source file
+GeneralCheckNo_Asm = 1
+# Check whether no use of "#progma" in source file except "#pragma pack(#)".
+GeneralCheckNoProgma = 1
+# Check whether there is a carriage return at the end of the file
+GeneralCheckCarriageReturn = 1
+# Check whether the file exists
+GeneralCheckFileExistence = 1
+
+#
+# Space Checking
+#
+SpaceCheckAll = 1
+
+#
+# Predicate Expression Checking
+#
+PredicateExpressionCheckAll = 0
+
+# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
+PredicateExpressionCheckBooleanValue = 1
+# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+PredicateExpressionCheckNonBooleanOperator = 1
+# Check whether a comparison of any pointer to zero must be done via the NULL type
+PredicateExpressionCheckComparisonNullType = 1
+
+#
+# Headers Checking
+#
+HeaderCheckAll = 0
+
+# Check whether File header exists
+HeaderCheckFile = 1
+# Check whether Function header exists
+HeaderCheckFunction = 1
+
+#
+# C Function Layout Checking
+#
+CFunctionLayoutCheckAll = 0
+
+# Check whether return type exists and in the first line
+CFunctionLayoutCheckReturnType = 1
+# Check whether any optional functional modifiers exist and next to the return type
+CFunctionLayoutCheckOptionalFunctionalModifier = 1
+# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
+# Check whether the closing parenthesis is on its own line and also indented two spaces
+CFunctionLayoutCheckFunctionName = 1
+# Check whether the function prototypes in include files have the same form as function definitions
+CFunctionLayoutCheckFunctionPrototype = 1
+# Check whether the body of a function is contained by open and close braces that must be in the first column
+CFunctionLayoutCheckFunctionBody = 1
+# Check whether the data declarations is the first code in a module.
+CFunctionLayoutCheckDataDeclaration = 1
+# Check whether no initialization of a variable as part of its declaration
+CFunctionLayoutCheckNoInitOfVariable = 1
+# Check whether no use of STATIC for functions
+CFunctionLayoutCheckNoStatic = 1
+
+#
+# Include Files Checking
+#
+IncludeFileCheckAll = 0
+
+#Check whether having include files with same name
+IncludeFileCheckSameName = 1
+# Check whether all include file contents is guarded by a #ifndef statement.
+# the #ifndef must be the first line of code following the file header comment
+# the #endif must appear on the last line in the file
+IncludeFileCheckIfndefStatement = 1
+# Check whether include files contain only public or only private data
+# Check whether include files NOT contain code or define data variables
+IncludeFileCheckData = 1
+
+#
+# Declarations and Data Types Checking
+#
+DeclarationDataTypeCheckAll = 0
+
+# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
+DeclarationDataTypeCheckNoUseCType = 1
+# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
+DeclarationDataTypeCheckInOutModifier = 1
+# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
+DeclarationDataTypeCheckEFIAPIModifier = 1
+# Check whether Enumerated Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckEnumeratedType = 1
+# Check whether Structure Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckStructureDeclaration = 1
+# Check whether having same Structure
+DeclarationDataTypeCheckSameStructure = 1
+# Check whether Union Type has a 'typedef' and the name is capital
+DeclarationDataTypeCheckUnionType = 1
+
+
+#
+# Naming Conventions Checking
+#
+NamingConventionCheckAll = 0
+
+# Check whether only capital letters are used for #define declarations
+NamingConventionCheckDefineStatement = 1
+# Check whether only capital letters are used for typedef declarations
+NamingConventionCheckTypedefStatement = 1
+# Check whether the #ifndef at the start of an include file uses both prefix and postfix underscore characters, '_'.
+NamingConventionCheckIfndefStatement = 1
+# Rule for path name, variable name and function name
+# 1. First character should be upper case
+# 2. Existing lower case in a word
+# 3. No space existence
+# 4. Global variable name must start by a 'g'
+# Check whether the path name followed the rule
+NamingConventionCheckPathName = 1
+# Check whether the variable name followed the rule
+NamingConventionCheckVariableName = 1
+# Check whether the function name followed the rule
+NamingConventionCheckFunctionName = 1
+# Check whether NO use short variable name with single character
+NamingConventionCheckSingleCharacterVariable = 1
+
+#
+# Doxygen Checking
+#
+DoxygenCheckAll = 0
+
+# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
+DoxygenCheckFileHeader = 1
+# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
+DoxygenCheckFunctionHeader = 1
+# Check whether the first line of text in a comment block is a brief description of the element being documented.
+# The brief description must end with a period.
+DoxygenCheckCommentDescription = 1
+# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
+DoxygenCheckCommentFormat = 1
+# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
+DoxygenCheckCommand = 1
+
+#
+# Meta-Data File Processing Checking
+#
+MetaDataFileCheckAll = 0
+
+# Check whether each file defined in meta-data exists
+MetaDataFileCheckPathName = 1
+# Generate a list for all files defined in meta-data files
+MetaDataFileCheckGenerateFileList = 1
+# The path of log file
+MetaDataFileCheckPathOfGenerateFileList = File.log
+# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+# Each Library Instance must specify the Supported Module Types in its INF file,
+# and any module specifying the library instance must be one of the supported types.
+MetaDataFileCheckLibraryInstance = 1
+# Check whether a Library Instance has been defined for all dependent library classes
+MetaDataFileCheckLibraryInstanceDependent = 1
+# Check whether the Library Instances specified by the LibraryClasses sections are listed in order of dependencies
+MetaDataFileCheckLibraryInstanceOrder = 1
+# Check whether the unnecessary inclusion of library classes in the INF file
+MetaDataFileCheckLibraryNoUse = 1
+# Check whether an INF file is specified in the FDF file, but not in the DSC file, then the INF file must be for a Binary module only
+MetaDataFileCheckBinaryInfInFdf = 1
+# Not to report error and warning related OS include file such as "windows.h" and "stdio.h".
+# Check whether a PCD is set in a DSC file or the FDF file, but not in both.
+MetaDataFileCheckPcdDuplicate = 1
+# Check whether PCD settings in the FDF file can only be related to flash.
+MetaDataFileCheckPcdFlash = 1
+# Check whether PCDs used in INF files but not specified in DSC or FDF files
+MetaDataFileCheckPcdNoUse = 1
+# Check whether having duplicate guids defined for Guid/Protocol/Ppi
+MetaDataFileCheckGuidDuplicate = 1
+# Check whether all files under module directory are described in INF files
+MetaDataFileCheckModuleFileNoUse = 1
+# Check whether the PCD is correctly used in C function via its type
+MetaDataFileCheckPcdType = 1
+
+#
+# The check points in this section are reserved
+#
+# GotoStatementCheckAll = 0
+# SpellingCheckAll = 0
+#
diff --git a/BaseTools/Source/Python/Ecc/exception.xml b/BaseTools/Source/Python/Ecc/exception.xml new file mode 100644 index 0000000000..0dc67527b5 --- /dev/null +++ b/BaseTools/Source/Python/Ecc/exception.xml @@ -0,0 +1,310 @@ +<ExceptionList xmlns="http://www.uefi.org/2008/2.1" xmlns:xsi="http:/www.w3.org/2001/XMLSchema-instance">
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_break</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86DisablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalX86EnablePaging32</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>InternalLongJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>SetJump</KeyWord>
+ <ErrorID>4002</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite8</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite16</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoRead32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>IoWrite32</KeyWord>
+ <ErrorID>5001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>5003</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__debugbreak</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__readmsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>__writemsr</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedCompareExchange64</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedDecrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_InterlockedIncrement</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_inpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outp</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpw</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_outpd</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_ReadWriteBarrier</KeyWord>
+ <ErrorID>7001</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R1</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@Rx</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>@R2.</KeyWord>
+ <ErrorID>9005</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>_DriverUnloadHandler</KeyWord>
+ <ErrorID>8006</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>ASSERT</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_EXTENDED_DATA</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+ <Exception>
+ <KeyWord>REPORT_STATUS_CODE_WITH_DEVICE_PATH</KeyWord>
+ <ErrorID>10015</ErrorID>
+ </Exception>
+</ExceptionList>
\ No newline at end of file diff --git a/BaseTools/Source/Python/Fdb/__init__.py b/BaseTools/Source/Python/Fdb/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/Fdb/__init__.py diff --git a/BaseTools/Source/Python/FixFlash/__init__.py b/BaseTools/Source/Python/FixFlash/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/FixFlash/__init__.py diff --git a/BaseTools/Source/Python/GNUmakefile b/BaseTools/Source/Python/GNUmakefile new file mode 100644 index 0000000000..aa569b3624 --- /dev/null +++ b/BaseTools/Source/Python/GNUmakefile @@ -0,0 +1,6 @@ + +all: + +clean: + find . -name '*.pyc' -exec rm '{}' ';' + diff --git a/BaseTools/Source/Python/GenFds/AprioriSection.py b/BaseTools/Source/Python/GenFds/AprioriSection.py new file mode 100644 index 0000000000..92a9794f51 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/AprioriSection.py @@ -0,0 +1,118 @@ +## @file
+# process APRIORI file data and generate PEI/DXE APRIORI file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from struct import *
+import os
+import StringIO
+import FfsFileStatement
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import AprioriSectionClassObject
+from Common.String import *
+from Common.Misc import SaveFileOnChange,PathClass
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## process APRIORI file data and generate PEI/DXE APRIORI file
+#
+#
+class AprioriSection (AprioriSectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ AprioriSectionClassObject.__init__(self)
+ self.AprioriType = ""
+
+ ## GenFfs() method
+ #
+ # Generate FFS for APRIORI file
+ #
+ # @param self The object pointer
+ # @param FvName for whom apriori file generated
+ # @param Dict dictionary contains macro and its value
+ # @retval string Generated file name
+ #
+ def GenFfs (self, FvName, Dict = {}):
+ DXE_GUID = "FC510EE7-FFDC-11D4-BD41-0080C73C8881"
+ PEI_GUID = "1B45CC0A-156A-428A-AF62-49864DA0E6E6"
+ Buffer = StringIO.StringIO('')
+ AprioriFileGuid = DXE_GUID
+ if self.AprioriType == "PEI":
+ AprioriFileGuid = PEI_GUID
+ OutputAprFilePath = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, \
+ GenFdsGlobalVariable.FfsDir,\
+ AprioriFileGuid + FvName)
+ if not os.path.exists(OutputAprFilePath) :
+ os.makedirs(OutputAprFilePath)
+
+ OutputAprFileName = os.path.join( OutputAprFilePath, \
+ AprioriFileGuid + FvName + '.Apri' )
+ AprFfsFileName = os.path.join (OutputAprFilePath,\
+ AprioriFileGuid + FvName + '.Ffs')
+
+ Dict.update(self.DefineVarDict)
+ for FfsObj in self.FfsList :
+ Guid = ""
+ if isinstance(FfsObj, FfsFileStatement.FileStatement):
+ Guid = FfsObj.NameGuid
+ else:
+ InfFileName = NormPath(FfsObj.InfFileName)
+ Arch = FfsObj.GetCurrentArch()
+
+ if Arch != None:
+ Dict['$(ARCH)'] = Arch
+ InfFileName = GenFdsGlobalVariable.MacroExtend(InfFileName, Dict, Arch)
+
+ if Arch != None:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), Arch]
+ Guid = Inf.Guid
+
+ else:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClass(InfFileName, GenFdsGlobalVariable.WorkSpaceDir), 'COMMON']
+ Guid = Inf.Guid
+
+ self.BinFileList = Inf.Module.Binaries
+ if self.BinFileList == []:
+ EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
+ "INF %s not found in build ARCH %s!" \
+ % (InfFileName, GenFdsGlobalVariable.ArchList))
+
+
+ GuidPart = Guid.split('-')
+ Buffer.write(pack('I', long(GuidPart[0], 16)))
+ Buffer.write(pack('H', int(GuidPart[1], 16)))
+ Buffer.write(pack('H', int(GuidPart[2], 16)))
+
+ for Num in range(2):
+ Char = GuidPart[3][Num*2:Num*2+2]
+ Buffer.write(pack('B', int(Char, 16)))
+
+ for Num in range(6):
+ Char = GuidPart[4][Num*2:Num*2+2]
+ Buffer.write(pack('B', int(Char, 16)))
+
+ SaveFileOnChange(OutputAprFileName, Buffer.getvalue())
+
+ RawSectionFileName = os.path.join( OutputAprFilePath, \
+ AprioriFileGuid + FvName + '.raw' )
+ GenFdsGlobalVariable.GenerateSection(RawSectionFileName, [OutputAprFileName], 'EFI_SECTION_RAW')
+ GenFdsGlobalVariable.GenerateFfs(AprFfsFileName, [RawSectionFileName],
+ 'EFI_FV_FILETYPE_FREEFORM', AprioriFileGuid)
+
+ return AprFfsFileName
+
diff --git a/BaseTools/Source/Python/GenFds/Attribute.py b/BaseTools/Source/Python/GenFds/Attribute.py new file mode 100644 index 0000000000..67f9956e1d --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Attribute.py @@ -0,0 +1,28 @@ +## @file
+# name value pair
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+
+## name value pair
+#
+#
+class Attribute:
+ ## The constructor
+ #
+ # @param self The object pointer
+ def __init__(self):
+ self.Name = None
+ self.Value = None
\ No newline at end of file diff --git a/BaseTools/Source/Python/GenFds/Capsule.py b/BaseTools/Source/Python/GenFds/Capsule.py new file mode 100644 index 0000000000..7f17fcda68 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Capsule.py @@ -0,0 +1,89 @@ +## @file
+# generate capsule
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import CapsuleClassObject
+import os
+import subprocess
+import StringIO
+from Common.Misc import SaveFileOnChange
+
+
+T_CHAR_LF = '\n'
+
+## create inf file describes what goes into capsule and call GenFv to generate capsule
+#
+#
+class Capsule (CapsuleClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ CapsuleClassObject.__init__(self)
+ # For GenFv
+ self.BlockSize = None
+ # For GenFv
+ self.BlockNum = None
+
+ ## Generate capsule
+ #
+ # @param self The object pointer
+ #
+ def GenCapsule(self):
+ CapInfFile = self.GenCapInf()
+ CapInfFile.writelines("[files]" + T_CHAR_LF)
+
+ for CapsuleDataObj in self.CapsuleDataList :
+ FileName = CapsuleDataObj.GenCapsuleSubItem()
+ CapInfFile.writelines("EFI_FILE_NAME = " + \
+ FileName + \
+ T_CHAR_LF)
+ SaveFileOnChange(self.CapInfFileName, CapInfFile.getvalue(), False)
+ CapInfFile.close()
+ #
+ # Call GenFv tool to generate capsule
+ #
+ CapOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiCapsuleName)
+ CapOutputFile = CapOutputFile + '.Cap'
+ GenFdsGlobalVariable.GenerateFirmwareVolume(
+ CapOutputFile,
+ [self.CapInfFileName],
+ Capsule=True
+ )
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ ## Generate inf file for capsule
+ #
+ # @param self The object pointer
+ # @retval file inf file object
+ #
+ def GenCapInf(self):
+ self.CapInfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
+ self.UiCapsuleName + "_Cap" + '.inf')
+ CapInfFile = StringIO.StringIO() #open (self.CapInfFileName , 'w+')
+
+ CapInfFile.writelines("[options]" + T_CHAR_LF)
+
+ for Item in self.TokensDict.keys():
+ CapInfFile.writelines("EFI_" + \
+ Item + \
+ ' = ' + \
+ self.TokensDict.get(Item) + \
+ T_CHAR_LF)
+
+ return CapInfFile
diff --git a/BaseTools/Source/Python/GenFds/CapsuleData.py b/BaseTools/Source/Python/GenFds/CapsuleData.py new file mode 100644 index 0000000000..db29737963 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/CapsuleData.py @@ -0,0 +1,84 @@ +## @file
+# generate capsule
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Ffs
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import StringIO
+
+## base class for capsule data
+#
+#
+class CapsuleData:
+ ## The constructor
+ #
+ # @param self The object pointer
+ def __init__(self):
+ pass
+
+ ## generate capsule data
+ #
+ # @param self The object pointer
+ def GenCapsuleSubItem(self):
+ pass
+
+## FFS class for capsule data
+#
+#
+class CapsuleFfs (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init_(self) :
+ self.Ffs = None
+
+ ## generate FFS capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ FfsFile = self.Ffs.GenFfs()
+ return FfsFile
+
+## FV class for capsule data
+#
+#
+class CapsuleFv (CapsuleData):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self) :
+ self.FvName = None
+
+ ## generate FV capsule data
+ #
+ # @param self The object pointer
+ # @retval string Generated file name
+ #
+ def GenCapsuleSubItem(self):
+ if self.FvName.find('.fv') == -1:
+ if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
+ FdBuffer = StringIO.StringIO('')
+ FvFile = FvObj.AddToBuffer(FdBuffer)
+ return FvFile
+
+ else:
+ FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)
+ return FvFile
diff --git a/BaseTools/Source/Python/GenFds/ComponentStatement.py b/BaseTools/Source/Python/GenFds/ComponentStatement.py new file mode 100644 index 0000000000..8a7540fe25 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/ComponentStatement.py @@ -0,0 +1,29 @@ +## @file
+# VTF components
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from CommonDataClass.FdfClass import ComponentStatementClassObject
+
+## VTF components
+#
+#
+class ComponentStatement (ComponentStatementClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ ComponentStatementClassObject.__init__(self)
diff --git a/BaseTools/Source/Python/GenFds/CompressSection.py b/BaseTools/Source/Python/GenFds/CompressSection.py new file mode 100644 index 0000000000..4a32ea4458 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/CompressSection.py @@ -0,0 +1,87 @@ +## @file
+# process compress section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from Ffs import Ffs
+import Section
+import subprocess
+import os
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import CompressSectionClassObject
+
+## generate compress section
+#
+#
+class CompressSection (CompressSectionClassObject) :
+
+ ## compress types: PI standard and non PI standard
+ CompTypeDict = {
+ 'PI_STD' : 'PI_STD',
+ 'NON_PI_STD' : 'NON_PI_STD'
+ }
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ CompressSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):
+
+ if FfsInf != None:
+ self.CompType = FfsInf.__ExtendMacro__(self.CompType)
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+
+ SectFiles = tuple()
+ Index = 0
+ for Sect in self.SectionList:
+ Index = Index + 1
+ SecIndex = '%s.%d' %(SecNum, Index)
+ ReturnSectList, AlignValue = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList, FfsInf, Dict)
+ if ReturnSectList != []:
+ for FileData in ReturnSectList:
+ SectFiles += (FileData,)
+
+
+ OutputFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ 'SEC' + \
+ SecNum + \
+ Ffs.SectionSuffix['COMPRESS']
+ OutputFile = os.path.normpath(OutputFile)
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, SectFiles, Section.Section.SectionType['COMPRESS'],
+ CompressionType=self.CompTypeDict[self.CompType])
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+
+
diff --git a/BaseTools/Source/Python/GenFds/DataSection.py b/BaseTools/Source/Python/GenFds/DataSection.py new file mode 100644 index 0000000000..7f24b51fc3 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/DataSection.py @@ -0,0 +1,109 @@ +## @file
+# process data section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import subprocess
+from Ffs import Ffs
+import os
+from CommonDataClass.FdfClass import DataSectionClassObject
+import shutil
+
+## generate data section
+#
+#
+class DataSection (DataSectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ DataSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = {}):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if FfsFile != None:
+ self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
+ self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict, FfsFile.CurrentArch)
+ else:
+ self.SectFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.SectFileName)
+ self.SectFileName = GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)
+
+ """Check Section file exist or not !"""
+
+ if not os.path.exists(self.SectFileName):
+ self.SectFileName = os.path.join (GenFdsGlobalVariable.WorkSpaceDir,
+ self.SectFileName)
+
+ """Copy Map file to Ffs output"""
+ Filename = GenFdsGlobalVariable.MacroExtend(self.SectFileName)
+ if Filename[(len(Filename)-4):] == '.efi':
+ MapFile = Filename.replace('.efi', '.map')
+ if os.path.exists(MapFile):
+ CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
+ if not os.path.exists(CopyMapFile) or \
+ (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
+ shutil.copyfile(MapFile, CopyMapFile)
+
+ NoStrip = True
+ if self.SecType in ('TE', 'PE32'):
+ if self.KeepReloc != None:
+ NoStrip = self.KeepReloc
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(self.SectFileName) > os.path.getmtime(FileBeforeStrip)):
+ shutil.copyfile(self.SectFileName, FileBeforeStrip)
+ StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
+ Strip=True
+ )
+ self.SectFileName = StrippedFile
+
+ if self.SecType == 'TE':
+ TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenFdsGlobalVariable.MacroExtend(self.SectFileName, Dict)],
+ Type='te'
+ )
+ self.SectFileName = TeFile
+
+ OutputFile = os.path.join (OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get(self.SecType))
+ OutputFile = os.path.normpath(OutputFile)
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [self.SectFileName], Section.Section.SectionType.get(self.SecType))
+ FileList = [OutputFile]
+ return FileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/DepexSection.py b/BaseTools/Source/Python/GenFds/DepexSection.py new file mode 100644 index 0000000000..1c8c82a72e --- /dev/null +++ b/BaseTools/Source/Python/GenFds/DepexSection.py @@ -0,0 +1,102 @@ +## @file
+# process depex section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import subprocess
+from Ffs import Ffs
+import os
+from CommonDataClass.FdfClass import DepexSectionClassObject
+from AutoGen.GenDepex import DependencyExpression
+import shutil
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## generate data section
+#
+#
+class DepexSection (DepexSectionClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ DepexSectionClassObject.__init__(self)
+
+ def __FindGuidValue(self, CName):
+ for Arch in GenFdsGlobalVariable.ArchList:
+ for PkgDb in GenFdsGlobalVariable.WorkSpace.PackageList:
+ if CName in PkgDb.Ppis:
+ return PkgDb.Ppis[CName]
+ if CName in PkgDb.Protocols:
+ return PkgDb.Protocols[CName]
+ if CName in PkgDb.Guids:
+ return PkgDb.Guids[CName]
+ return None
+
+ ## GenSection() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, keyStringList, FfsFile = None, Dict = {}):
+
+ self.Expression = self.Expression.replace("\n", " ").replace("\r", " ")
+ ExpList = self.Expression.split()
+ ExpGuidDict = {}
+
+ for Exp in ExpList:
+ if Exp.upper() not in ('AND', 'OR', 'NOT', 'TRUE', 'FALSE', 'SOR', 'BEFORE', 'AFTER', 'END'):
+ GuidStr = self.__FindGuidValue(Exp)
+ if GuidStr == None:
+ EdkLogger.error("GenFds", RESOURCE_NOT_AVAILABLE,
+ "Depex GUID %s could not be found in build DB! (ModuleName: %s)" % (Exp, ModuleName))
+
+ ExpGuidDict[Exp] = GuidStr
+
+ for Item in ExpGuidDict:
+ self.Expression = self.Expression.replace(Item, ExpGuidDict[Item])
+
+ self.Expression = self.Expression.strip()
+ ModuleType = (self.DepexType.startswith('PEI') and ['PEIM'] or ['DXE_DRIVER'])[0]
+ if self.DepexType.startswith('SMM'):
+ ModuleType = 'SMM_DRIVER'
+ InputFile = os.path.join (OutputPath, ModuleName + 'SEC' + SecNum + '.dpx')
+ InputFile = os.path.normpath(InputFile)
+
+ Dpx = DependencyExpression(self.Expression, ModuleType)
+ Dpx.Generate(InputFile)
+
+ OutputFile = os.path.join (OutputPath, ModuleName + 'SEC' + SecNum + '.depex')
+ if self.DepexType.startswith('SMM'):
+ OutputFile = os.path.join (OutputPath, ModuleName + 'SEC' + SecNum + '.smm')
+ OutputFile = os.path.normpath(OutputFile)
+ SecType = (self.DepexType.startswith('PEI') and ['PEI_DEPEX'] or ['DXE_DEPEX'])[0]
+ if self.DepexType.startswith('SMM'):
+ SecType = 'SMM_DEPEX'
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [InputFile], Section.Section.SectionType.get (SecType))
+ FileList = [OutputFile]
+ return FileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py new file mode 100644 index 0000000000..2c112ed5cb --- /dev/null +++ b/BaseTools/Source/Python/GenFds/EfiSection.py @@ -0,0 +1,262 @@ +## @file
+# process rule section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import subprocess
+from Ffs import Ffs
+import os
+from CommonDataClass.FdfClass import EfiSectionClassObject
+import shutil
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## generate rule section
+#
+#
+class EfiSection (EfiSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ EfiSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate rule section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name list, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}) :
+
+ if self.FileName != None and self.FileName.startswith('PCD('):
+ self.FileName = GenFdsGlobalVariable.GetPcdValue(self.FileName)
+ """Prepare the parameter of GenSection"""
+ if FfsInf != None :
+ InfFileName = FfsInf.InfFileName
+ SectionType = FfsInf.__ExtendMacro__(self.SectionType)
+ Filename = FfsInf.__ExtendMacro__(self.FileName)
+ BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
+ StringData = FfsInf.__ExtendMacro__(self.StringData)
+ NoStrip = True
+ if FfsInf.ModuleType in ('SEC', 'PEI_CORE', 'PEIM') and SectionType in ('TE', 'PE32'):
+ if FfsInf.KeepReloc != None:
+ NoStrip = FfsInf.KeepReloc
+ elif FfsInf.KeepRelocFromRule != None:
+ NoStrip = FfsInf.KeepRelocFromRule
+ elif self.KeepReloc != None:
+ NoStrip = self.KeepReloc
+ elif FfsInf.ShadowFromInfFile != None:
+ NoStrip = FfsInf.ShadowFromInfFile
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s apply rule for None!" %ModuleName)
+
+ """If the file name was pointed out, add it in FileList"""
+ FileList = []
+ if Filename != None:
+ Filename = GenFdsGlobalVariable.MacroExtend(Filename, Dict)
+ if not self.Optional:
+ FileList.append(Filename)
+ elif os.path.exists(Filename):
+ FileList.append(Filename)
+ else:
+ FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FileType, self.FileExtension, Dict)
+ if IsSect :
+ return FileList, self.Alignment
+
+ Index = 0
+
+ """ If Section type is 'VERSION'"""
+ OutputFileList = []
+ if SectionType == 'VERSION':
+
+ InfOverrideVerString = False
+ if FfsInf.Version != None:
+ #StringData = FfsInf.Version
+ BuildNum = FfsInf.Version
+ InfOverrideVerString = True
+
+ if InfOverrideVerString:
+ #VerTuple = ('-n', '"' + StringData + '"')
+ if BuildNum != None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ else:
+ BuildNumTuple = tuple()
+
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + 'SEC' + str(Num) + Ffs.SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=StringData,
+ Ver=BuildNum)
+ OutputFileList.append(OutputFile)
+
+ elif FileList != []:
+ for File in FileList:
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum , Index)
+ OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + Num + Ffs.SectionSuffix.get(SectionType))
+ f = open(File, 'r')
+ VerString = f.read()
+ f.close()
+# VerTuple = ('-n', '"' + VerString + '"')
+ BuildNum = VerString
+ if BuildNum != None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=VerString,
+ Ver=BuildNum)
+ OutputFileList.append(OutputFile)
+
+ else:
+# if StringData != None and len(StringData) > 0:
+# VerTuple = ('-n', '"' + StringData + '"')
+# else:
+# VerTuple = tuple()
+# VerString = ' ' + ' '.join(VerTuple)
+ BuildNum = StringData
+ if BuildNum != None and BuildNum != '':
+ BuildNumTuple = ('-j', BuildNum)
+ else:
+ BuildNumTuple = tuple()
+ BuildNumString = ' ' + ' '.join(BuildNumTuple)
+
+ #if VerString == '' and
+ if BuildNumString == '':
+ if self.Optional == True :
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ return [], None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss Version Section value" %InfFileName)
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + 'SEC' + str(Num) + Ffs.SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_VERSION',
+ #Ui=VerString,
+ Ver=BuildNum)
+ OutputFileList.append(OutputFile)
+
+ #
+ # If Section Type is 'UI'
+ #
+ elif SectionType == 'UI':
+
+ InfOverrideUiString = False
+ if FfsInf.Ui != None:
+ StringData = FfsInf.Ui
+ InfOverrideUiString = True
+
+ if InfOverrideUiString:
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + 'SEC' + str(Num) + Ffs.SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=StringData)
+ OutputFileList.append(OutputFile)
+
+ elif FileList != []:
+ for File in FileList:
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum , Index)
+ OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + Num + Ffs.SectionSuffix.get(SectionType))
+ f = open(File, 'r')
+ UiString = f.read()
+ f.close()
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=UiString)
+ OutputFileList.append(OutputFile)
+ else:
+ if StringData != None and len(StringData) > 0:
+ UiTuple = ('-n', '"' + StringData + '"')
+ else:
+ UiTuple = tuple()
+
+ if self.Optional == True :
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ return '', None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss UI Section value" %InfFileName)
+
+ Num = SecNum
+ OutputFile = os.path.join( OutputPath, ModuleName + 'SEC' + str(Num) + Ffs.SectionSuffix.get(SectionType))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [], 'EFI_SECTION_USER_INTERFACE',
+ Ui=StringData)
+ OutputFileList.append(OutputFile)
+
+
+ else:
+ """If File List is empty"""
+ if FileList == [] :
+ if self.Optional == True:
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ return [], None
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Output file for %s section could not be found for %s" % (SectionType, InfFileName))
+
+ else:
+ """Convert the File to Section file one by one """
+ for File in FileList:
+ """ Copy Map file to FFS output path """
+ Index = Index + 1
+ Num = '%s.%d' %(SecNum , Index)
+ OutputFile = os.path.join( OutputPath, ModuleName + 'SEC' + Num + Ffs.SectionSuffix.get(SectionType))
+ File = GenFdsGlobalVariable.MacroExtend(File, Dict)
+ if File[(len(File)-4):] == '.efi':
+ MapFile = File.replace('.efi', '.map')
+ if os.path.exists(MapFile):
+ CopyMapFile = os.path.join(OutputPath, ModuleName + '.map')
+ if not os.path.exists(CopyMapFile) or \
+ (os.path.getmtime(MapFile) > os.path.getmtime(CopyMapFile)):
+ shutil.copyfile(MapFile, CopyMapFile)
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(OutputPath, ModuleName + '.efi')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
+ shutil.copyfile(File, FileBeforeStrip)
+ StrippedFile = os.path.join(OutputPath, ModuleName + '.stripped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenFdsGlobalVariable.MacroExtend(File, Dict)],
+ Strip=True
+ )
+ File = StrippedFile
+ """For TE Section call GenFw to generate TE image"""
+
+ if SectionType == 'TE':
+ TeFile = os.path.join( OutputPath, ModuleName + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenFdsGlobalVariable.MacroExtend(File, Dict)],
+ Type='te'
+ )
+ File = TeFile
+
+ """Call GenSection"""
+ GenFdsGlobalVariable.GenerateSection(OutputFile,
+ [GenFdsGlobalVariable.MacroExtend(File)],
+ Section.Section.SectionType.get (SectionType)
+ )
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/Fd.py b/BaseTools/Source/Python/GenFds/Fd.py new file mode 100644 index 0000000000..99baa6abe5 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Fd.py @@ -0,0 +1,169 @@ +## @file
+# process FD generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Region
+import Fv
+import os
+import StringIO
+import sys
+from struct import *
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import FDClassObject
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import SaveFileOnChange
+
+## generate FD
+#
+#
+class FD(FDClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FDClassObject.__init__(self)
+
+ ## GenFd() method
+ #
+ # Generate FD
+ #
+ # @param self The object pointer
+ # @param FvBinDict dictionary contains generated FV name and its file name
+ # @retval string Generated FD file name
+ #
+ def GenFd (self, FvBinDict):
+ #
+ # Print Information
+ #
+ GenFdsGlobalVariable.InfLogger("Fd File Name:%s" %self.FdUiName)
+ Offset = 0x00
+ for item in self.BlockSizeList:
+ Offset = Offset + item[0] * item[1]
+ if Offset != self.Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s Size not consistent with block array' % self.FdUiName)
+ GenFdsGlobalVariable.VerboseLogger('Following Fv will be add to Fd !!!')
+ for FvObj in GenFdsGlobalVariable.FdfParser.Profile.FvDict:
+ GenFdsGlobalVariable.VerboseLogger(FvObj)
+
+ GenFdsGlobalVariable.VerboseLogger('################### Gen VTF ####################')
+ self.GenVtfFile()
+
+ FdBuffer = StringIO.StringIO('')
+ PreviousRegionStart = -1
+ PreviousRegionSize = 1
+ for RegionObj in self.RegionList :
+ if RegionObj.Offset + RegionObj.Size <= PreviousRegionStart:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Region offset 0x%X in wrong order with Region starting from 0x%X, size 0x%X\nRegions in FDF must have offsets appear in ascending order.'\
+ % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
+ elif RegionObj.Offset <= PreviousRegionStart or (RegionObj.Offset >=PreviousRegionStart and RegionObj.Offset < PreviousRegionStart + PreviousRegionSize):
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'Region offset 0x%X overlaps with Region starting from 0x%X, size 0x%X' \
+ % (RegionObj.Offset, PreviousRegionStart, PreviousRegionSize))
+ elif RegionObj.Offset > PreviousRegionStart + PreviousRegionSize:
+ GenFdsGlobalVariable.InfLogger('Padding region starting from offset 0x%X, with size 0x%X' %(PreviousRegionStart + PreviousRegionSize, RegionObj.Offset - (PreviousRegionStart + PreviousRegionSize)))
+ PadRegion = Region.Region()
+ PadRegion.Offset = PreviousRegionStart + PreviousRegionSize
+ PadRegion.Size = RegionObj.Offset - PadRegion.Offset
+ PadRegion.AddToBuffer(FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, FvBinDict, self.vtfRawDict, self.DefineVarDict)
+ PreviousRegionStart = RegionObj.Offset
+ PreviousRegionSize = RegionObj.Size
+ #
+ # Call each region's AddToBuffer function
+ #
+ if PreviousRegionSize > self.Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'FD %s size too small' % self.FdUiName)
+ GenFdsGlobalVariable.VerboseLogger('Call each region\'s AddToBuffer function')
+ RegionObj.AddToBuffer (FdBuffer, self.BaseAddress, self.BlockSizeList, self.ErasePolarity, FvBinDict, self.vtfRawDict, self.DefineVarDict)
+ #
+ # Create a empty Fd file
+ #
+ GenFdsGlobalVariable.VerboseLogger ('Create an empty Fd file')
+ FdFileName = os.path.join(GenFdsGlobalVariable.FvDir,
+ self.FdUiName + '.fd')
+ #FdFile = open(FdFileName, 'wb')
+
+ #
+ # Write the buffer contents to Fd file
+ #
+ GenFdsGlobalVariable.VerboseLogger('Write the buffer contents to Fd file')
+ SaveFileOnChange(FdFileName, FdBuffer.getvalue())
+ #FdFile.write(FdBuffer.getvalue());
+ #FdFile.close();
+ FdBuffer.close();
+ return FdFileName
+
+ ## generate VTF
+ #
+ # @param self The object pointer
+ #
+ def GenVtfFile (self) :
+ #
+ # Get this Fd's all Fv name
+ #
+ FvAddDict ={}
+ FvList = []
+ for RegionObj in self.RegionList:
+ if RegionObj.RegionType == 'FV':
+ if len(RegionObj.RegionDataList) == 1:
+ RegionData = RegionObj.RegionDataList[0]
+ FvList.append(RegionData.upper())
+ FvAddDict[RegionData.upper()] = (int(self.BaseAddress,16) + \
+ RegionObj.Offset, RegionObj.Size)
+ else:
+ Offset = RegionObj.Offset
+ for RegionData in RegionObj.RegionDataList:
+ FvList.append(RegionData.upper())
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
+ if len(FvObj.BlockSizeList) < 1:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ 'FV.%s must point out FVs blocksize and Fv BlockNum' \
+ % FvObj.UiFvName)
+ else:
+ Size = 0
+ for blockStatement in FvObj.BlockSizeList:
+ Size = Size + blockStatement[0] * blockStatement[1]
+ FvAddDict[RegionData.upper()] = (int(self.BaseAddress,16) + \
+ Offset, Size)
+ Offset = Offset + Size
+ #
+ # Check whether this Fd need VTF
+ #
+ Flag = False
+ for VtfObj in GenFdsGlobalVariable.FdfParser.Profile.VtfList:
+ compLocList = VtfObj.GetFvList()
+ if set(compLocList).issubset(FvList):
+ Flag = True
+ break
+ if Flag == True:
+ self.vtfRawDict = VtfObj.GenVtf(FvAddDict)
+
+ ## generate flash map file
+ #
+ # @param self The object pointer
+ #
+ def GenFlashMap (self):
+ pass
+
+
+
+
+
+
+
+
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py new file mode 100644 index 0000000000..0bf8f5514b --- /dev/null +++ b/BaseTools/Source/Python/GenFds/FdfParser.py @@ -0,0 +1,3778 @@ +## @file
+# parse FDF file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Fd
+import Region
+import Fv
+import AprioriSection
+import FfsInfStatement
+import FfsFileStatement
+import VerSection
+import UiSection
+import FvImageSection
+import DataSection
+import DepexSection
+import CompressSection
+import GuidSection
+import Capsule
+import CapsuleData
+import Rule
+import RuleComplexFile
+import RuleSimpleFile
+import EfiSection
+import Vtf
+import ComponentStatement
+import OptionRom
+import OptRomInfStatement
+import OptRomFileStatement
+
+from Common.BuildToolError import *
+from Common import EdkLogger
+
+import re
+import os
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+IncludeFileList = []
+# Macro passed from command line, which has greatest priority and can NOT be overridden by those in FDF
+InputMacroDict = {}
+# All Macro values when parsing file, not replace existing Macro
+AllMacroList = []
+
+def GetRealFileLine (File, Line):
+
+ InsertedLines = 0
+ for Profile in IncludeFileList:
+ if Line >= Profile.InsertStartLineNumber and Line < Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ return (Profile.FileName, Line - Profile.InsertStartLineNumber + 1)
+ if Line >= Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ InsertedLines += Profile.InsertAdjust + len(Profile.FileLinesList)
+
+ return (File, Line - InsertedLines)
+
+## The exception class that used to report error messages when parsing FDF
+#
+# Currently the "ToolName" is set to be "FDF Parser".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+
+ FileLineTuple = GetRealFileLine(File, Line)
+ self.FileName = FileLineTuple[0]
+ self.LineNumber = FileLineTuple[1]
+ self.Message = Str
+ self.ToolName = 'FdfParser'
+
+ def __str__(self):
+ return self.Message
+
+## The MACRO class that used to record macro value data when parsing include file
+#
+#
+class MacroProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName, Line):
+ self.FileName = FileName
+ self.DefinedAtLine = Line
+ self.MacroName = None
+ self.MacroValue = None
+
+## The Include file content class that used to record file data when parsing include file
+#
+# May raise Exception when opening file.
+#
+class IncludeFileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileName = FileName
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except:
+ EdkLogger.error("FdfParser", FILE_OPEN_FAILURE, ExtraData=FileName)
+
+ self.InsertStartLineNumber = None
+ self.InsertAdjust = 0
+
+## The FDF content class that used to record file data when parsing FDF
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except:
+ EdkLogger.error("FdfParser", FILE_OPEN_FAILURE, ExtraData=FileName)
+
+
+ self.PcdDict = {}
+ self.InfList = []
+
+ self.FdDict = {}
+ self.FvDict = {}
+ self.CapsuleList = []
+ self.VtfList = []
+ self.RuleDict = {}
+ self.OptRomDict = {}
+
+## The syntax parser for FDF
+#
+# PreprocessFile method should be called prior to ParseFile
+# CycleReferenceCheck method can detect cycles in FDF contents
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class FdfParser:
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile(FileName)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+ self.CurrentFdName = None
+ self.CurrentFvName = None
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ self.__WipeOffArea = []
+
+ ## __IsWhiteSpace() method
+ #
+ # Whether char at current FileBufferPos is whitespace
+ #
+ # @param self The object pointer
+ # @param Char The char to test
+ # @retval True The char is a kind of white space
+ # @retval False The char is NOT a kind of white space
+ #
+ def __IsWhiteSpace(self, Char):
+ if Char in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_SPACE, T_CHAR_TAB, T_CHAR_LF):
+ return True
+ else:
+ return False
+
+ ## __SkipWhiteSpace() method
+ #
+ # Skip white spaces from current char, return number of chars skipped
+ #
+ # @param self The object pointer
+ # @retval Count The number of chars skipped
+ #
+ def __SkipWhiteSpace(self):
+ Count = 0
+ while not self.__EndOfFile():
+ Count += 1
+ if self.__CurrentChar() in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_LF, T_CHAR_SPACE, T_CHAR_TAB):
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ else:
+ Count = Count - 1
+ return Count
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ if self.CurrentLineNumber > len(self.Profile.FileLinesList):
+ return True
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ def __StringToList(self):
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesList]
+ self.Profile.FileLinesList[-1].append(' ')
+
+ def __ReplaceMacros(self, Str, File, Line):
+ MacroEnd = 0
+ while Str.find('$(', MacroEnd) >= 0:
+ MacroStart = Str.find('$(', MacroEnd)
+ if Str.find(')', MacroStart) > 0:
+ MacroEnd = Str.find(')', MacroStart)
+ Name = Str[MacroStart + 2 : MacroEnd]
+ Value = None
+ if Name in InputMacroDict:
+ Value = InputMacroDict[Name]
+
+ else:
+ for Profile in AllMacroList:
+ if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
+ Value = Profile.MacroValue
+
+ if Value != None:
+ Str = Str.replace('$(' + Name + ')', Value)
+ MacroEnd = MacroStart + len(Value)
+
+ else:
+ raise Warning("Macro not complete", self.FileName, self.CurrentLineNumber)
+ return Str
+
+ def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):
+ if StartPos[0] == EndPos[0]:
+ Offset = StartPos[1]
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+ return
+
+ Offset = StartPos[1]
+ while self.Profile.FileLinesList[StartPos[0]][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+
+ Line = StartPos[0]
+ while Line < EndPos[0]:
+ Offset = 0
+ while self.Profile.FileLinesList[Line][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[Line][Offset] = Value
+ Offset += 1
+ Line += 1
+
+ Offset = 0
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[EndPos[0]][Offset] = Value
+ Offset += 1
+
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ # HashComment in quoted string " " is ignored.
+ InString = False
+
+ while not self.__EndOfFile():
+
+ if self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE and not InComment:
+ InString = not InString
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ if InComment and HashComment:
+ InComment = False
+ HashComment = False
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH and not self.__EndOfLine():
+ InComment = True
+ DoubleSlashComment = True
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not self.__EndOfLine() and not InString:
+ InComment = True
+ HashComment = True
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessIncludeFile(self):
+
+ while self.__GetNextToken():
+
+ if self.__Token == '!include':
+ IncludeLine = self.CurrentLineNumber
+ IncludeOffset = self.CurrentOffsetWithinLine - len('!include')
+ if not self.__GetNextToken():
+ raise Warning("expected include file name", self.FileName, self.CurrentLineNumber)
+ IncFileName = self.__Token
+ if not os.path.isabs(IncFileName):
+ if IncFileName.startswith('$(WORKSPACE)'):
+ Str = IncFileName.replace('$(WORKSPACE)', os.environ.get('WORKSPACE'))
+ if os.path.exists(Str):
+ if not os.path.isabs(Str):
+ Str = os.path.abspath(Str)
+ IncFileName = Str
+ else:
+ # file is in the same dir with FDF file
+ FullFdf = self.FileName
+ if not os.path.isabs(self.FileName):
+ FullFdf = os.path.join(os.environ.get('WORKSPACE'), self.FileName)
+
+ IncFileName = os.path.join(os.path.dirname(FullFdf), IncFileName)
+
+ if not os.path.exists(os.path.normpath(IncFileName)):
+ raise Warning("Include file not exists", self.FileName, self.CurrentLineNumber)
+
+ IncFileProfile = IncludeFileProfile(os.path.normpath(IncFileName))
+
+ CurrentLine = self.CurrentLineNumber
+ CurrentOffset = self.CurrentOffsetWithinLine
+ # list index of the insertion, note that line number is 'CurrentLine + 1'
+ InsertAtLine = CurrentLine
+ IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
+ # deal with remaining portions after "!include filename", if exists.
+ if self.__GetNextToken():
+ if self.CurrentLineNumber == CurrentLine:
+ RemainingLine = self.__CurrentLine()[CurrentOffset:]
+ self.Profile.FileLinesList.insert(self.CurrentLineNumber, RemainingLine)
+ IncFileProfile.InsertAdjust += 1
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ for Line in IncFileProfile.FileLinesList:
+ self.Profile.FileLinesList.insert(InsertAtLine, Line)
+ self.CurrentLineNumber += 1
+ InsertAtLine += 1
+
+ IncludeFileList.append(IncFileProfile)
+
+ # comment out the processed include file statement
+ TempList = list(self.Profile.FileLinesList[IncludeLine - 1])
+ TempList.insert(IncludeOffset, '#')
+ self.Profile.FileLinesList[IncludeLine - 1] = ''.join(TempList)
+
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessConditionalStatement(self):
+ # IfList is a stack of if branches with elements of list [Pos, CondSatisfied, BranchDetermined]
+ IfList = []
+ while self.__GetNextToken():
+ if self.__Token == 'DEFINE':
+ DefineLine = self.CurrentLineNumber - 1
+ DefineOffset = self.CurrentOffsetWithinLine - len('DEFINE')
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ pass
+ Value = self.__Token
+ if not Macro in InputMacroDict:
+ FileLineTuple = GetRealFileLine(self.FileName, DefineLine + 1)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+ self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!ifdef', '!ifndef', '!if'):
+ IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ IfList.append([IfStartPos, None, None])
+ CondLabel = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ NotDefineFlag = False
+ if CondLabel == '!ifndef':
+ NotDefineFlag = True
+ if CondLabel == '!ifdef' or CondLabel == '!ifndef':
+ if NotFlag:
+ raise Warning("'NOT' operation not allowed for Macro name", self.FileName, self.CurrentLineNumber)
+
+ if CondLabel == '!if':
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ else:
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1)
+ if NotDefineFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!elseif', '!else'):
+ ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ if len(IfList) <= 0:
+ raise Warning("Missing !if statement", self.FileName, self.CurrentLineNumber)
+ if IfList[-1][1]:
+ IfList[-1] = [ElseStartPos, False, True]
+ self.__WipeOffArea.append((ElseStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], ElseStartPos))
+ IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
+ if self.__Token == '!elseif':
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, IfList[-1][2]]
+
+ if IfList[-1][1]:
+ if IfList[-1][2]:
+ IfList[-1][1] = False
+ else:
+ IfList[-1][2] = True
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+
+ elif self.__Token == '!endif':
+ if IfList[-1][1]:
+ self.__WipeOffArea.append(((self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len('!endif')), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ IfList.pop()
+
+
+ if len(IfList) > 0:
+ raise Warning("Missing !endif", self.FileName, self.CurrentLineNumber)
+ self.Rewind()
+
+ def __EvaluateConditional(self, Name, Line, Op = None, Value = None):
+
+ FileLineTuple = GetRealFileLine(self.FileName, Line)
+ if Name in InputMacroDict:
+ MacroValue = InputMacroDict[Name]
+ if Op == None:
+ if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number", self.FileName, Line)
+
+ for Profile in AllMacroList:
+ if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
+ if Op == None:
+ if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != Profile.MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == Profile.MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(Profile.MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number", self.FileName, Line)
+
+ return False
+
+ ## __IsToken() method
+ #
+ # Check whether input string is found from current char position along
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsToken(self, String, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __IsKeyword() method
+ #
+ # Check whether input keyword is found from current char position along, whole word only!
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param Keyword The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsKeyword(self, KeyWord, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(KeyWord.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(KeyWord)
+ if index == 0:
+ followingChar = self.__CurrentLine()[self.CurrentOffsetWithinLine + len(KeyWord)]
+ if not str(followingChar).isspace() and followingChar not in SEPERATOR_TUPLE:
+ return False
+ self.CurrentOffsetWithinLine += len(KeyWord)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __GetNextWord() method
+ #
+ # Get next C name from file lines
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a C name string, file buffer pointer moved forward
+ # @retval False Not able to find a C name string, file buffer pointer not changed
+ #
+ def __GetNextWord(self):
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+
+ TempChar = self.__CurrentChar()
+ StartPos = self.CurrentOffsetWithinLine
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') or TempChar == '_':
+ self.__GetOneChar()
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') \
+ or (TempChar >= '0' and TempChar <= '9') or TempChar == '_' or TempChar == '-':
+ self.__GetOneChar()
+
+ else:
+ break
+
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+
+ return False
+
+ ## __GetNextToken() method
+ #
+ # Get next token unit before a seperator
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a token unit, file buffer pointer moved forward
+ # @retval False Not able to find a token unit, file buffer pointer not changed
+ #
+ def __GetNextToken(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ StartLine = self.CurrentLineNumber
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and TempChar not in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ break
+ else:
+ break
+# else:
+# return False
+
+ EndPos = self.CurrentOffsetWithinLine
+ if self.CurrentLineNumber != StartLine:
+ EndPos = len(self.Profile.FileLinesList[StartLine-1])
+ self.__Token = self.Profile.FileLinesList[StartLine-1][StartPos : EndPos]
+ if StartPos != self.CurrentOffsetWithinLine:
+ return True
+ else:
+ return False
+
+ def __GetNextOp(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space
+ if not str(TempChar).isspace():
+ self.__GetOneChar()
+ else:
+ break
+ else:
+ return False
+
+ if StartPos != self.CurrentOffsetWithinLine:
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ else:
+ return False
+ ## __GetNextGuid() method
+ #
+ # Get next token unit before a seperator
+ # If found, the GUID string is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a registry format GUID, file buffer pointer moved forward
+ # @retval False Not able to find a registry format GUID, file buffer pointer not changed
+ #
+ def __GetNextGuid(self):
+
+ if not self.__GetNextToken():
+ return False
+ p = re.compile('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}')
+ if p.match(self.__Token) != None:
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __UndoToken() method
+ #
+ # Go back one token unit in file buffer
+ #
+ # @param self The object pointer
+ #
+ def __UndoToken(self):
+ self.__UndoOneChar()
+ while self.__CurrentChar().isspace():
+ if not self.__UndoOneChar():
+ self.__GetOneChar()
+ return
+
+
+ StartPos = self.CurrentOffsetWithinLine
+ CurrentLine = self.CurrentLineNumber
+ while CurrentLine == self.CurrentLineNumber:
+
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and not TempChar in SEPERATOR_TUPLE:
+ if not self.__UndoOneChar():
+ break
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ return
+ else:
+ break
+
+ self.__GetOneChar()
+
+ ## __HexDigit() method
+ #
+ # Whether char input is a Hex data bit
+ #
+ # @param self The object pointer
+ # @param TempChar The char to test
+ # @retval True The char is a Hex data bit
+ # @retval False The char is NOT a Hex data bit
+ #
+ def __HexDigit(self, TempChar):
+ if (TempChar >= 'a' and TempChar <= 'f') or (TempChar >= 'A' and TempChar <= 'F') \
+ or (TempChar >= '0' and TempChar <= '9'):
+ return True
+ else:
+ return False
+
+ def __IsHex(self, HexStr):
+ if not HexStr.upper().startswith("0X"):
+ return False
+ if len(self.__Token) <= 2:
+ return False
+ charList = [c for c in HexStr[2 : ] if not self.__HexDigit( c)]
+ if len(charList) == 0:
+ return True
+ else:
+ return False
+ ## __GetNextHexNumber() method
+ #
+ # Get next HEX data before a seperator
+ # If found, the HEX data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a HEX data, file buffer pointer moved forward
+ # @retval False Not able to find a HEX data, file buffer pointer not changed
+ #
+ def __GetNextHexNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__IsHex(self.__Token):
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextDecimalNumber() method
+ #
+ # Get next decimal data before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a decimal data, file buffer pointer moved forward
+ # @retval False Not able to find a decimal data, file buffer pointer not changed
+ #
+ def __GetNextDecimalNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__Token.isdigit():
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextPcdName() method
+ #
+ # Get next PCD token space C name and PCD C name pair before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval Tuple PCD C name and PCD token space C name pair
+ #
+ def __GetNextPcdName(self):
+ if not self.__GetNextWord():
+ raise Warning("expected format of <PcdTokenSpaceCName>.<PcdCName>", self.FileName, self.CurrentLineNumber)
+ pcdTokenSpaceCName = self.__Token
+
+ if not self.__IsToken( "."):
+ raise Warning("expected format of <PcdTokenSpaceCName>.<PcdCName>", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected format of <PcdTokenSpaceCName>.<PcdCName>", self.FileName, self.CurrentLineNumber)
+ pcdCName = self.__Token
+
+ return (pcdCName, pcdTokenSpaceCName)
+
+ ## __GetStringData() method
+ #
+ # Get string contents quoted in ""
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a string data, file buffer pointer moved forward
+ # @retval False Not able to find a string data, file buffer pointer not changed
+ #
+ def __GetStringData(self):
+ if self.__Token.startswith("\"") or self.__Token.startswith("L\""):
+ self.__UndoToken()
+ self.__SkipToToken("\"")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\""):
+ raise Warning("Missing Quote \" for String", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \" for String", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\"')
+ return True
+
+ elif self.__Token.startswith("\'") or self.__Token.startswith("L\'"):
+ self.__UndoToken()
+ self.__SkipToToken("\'")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\'"):
+ raise Warning("Missing Quote \' for String", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \' for String", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\'')
+ return True
+
+ else:
+ return False
+
+ ## __SkipToToken() method
+ #
+ # Search forward in file buffer for the string
+ # The skipped chars are put into self.__SkippedChars
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find the string, file buffer pointer moved forward
+ # @retval False Not able to find the string, file buffer pointer not changed
+ #
+ def __SkipToToken(self, String, IgnoreCase = False):
+ StartPos = self.GetFileBufferPos()
+
+ self.__SkippedChars = ""
+ while not self.__EndOfFile():
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__SkippedChars += String
+ return True
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ self.SetFileBufferPos( StartPos)
+ self.__SkippedChars = ""
+ return False
+
+ ## GetFileBufferPos() method
+ #
+ # Return the tuple of current line and offset within the line
+ #
+ # @param self The object pointer
+ # @retval Tuple Line number and offset pair
+ #
+ def GetFileBufferPos(self):
+ return (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ ## SetFileBufferPos() method
+ #
+ # Restore the file buffer position
+ #
+ # @param self The object pointer
+ # @param Pos The new file buffer position
+ #
+ def SetFileBufferPos(self, Pos):
+ (self.CurrentLineNumber, self.CurrentOffsetWithinLine) = Pos
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+
+ try:
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessIncludeFile()
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessConditionalStatement()
+ self.__StringToList()
+ for Pos in self.__WipeOffArea:
+ self.__ReplaceFragment(Pos[0], Pos[1])
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+
+ while self.__GetDefines():
+ pass
+
+ Index = 0
+ while Index < len(self.Profile.FileLinesList):
+ FileLineTuple = GetRealFileLine(self.FileName, Index + 1)
+ self.Profile.FileLinesList[Index] = self.__ReplaceMacros(self.Profile.FileLinesList[Index], FileLineTuple[0], FileLineTuple[1])
+ Index += 1
+
+ while self.__GetFd():
+ pass
+
+ while self.__GetFv():
+ pass
+
+ while self.__GetCapsule():
+ pass
+
+ while self.__GetVtf():
+ pass
+
+ while self.__GetRule():
+ pass
+
+ while self.__GetOptionRom():
+ pass
+
+ except Warning, X:
+ self.__UndoToken()
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #'\n\tGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
+ X.Message += ' near line %d, column %d: %s' \
+ % (FileLineTuple[1], self.CurrentOffsetWithinLine + 1, self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :].rstrip('\n').rstrip('\r'))
+ raise
+
+ ## __GetDefines() method
+ #
+ # Get Defines section contents and store its data into AllMacrosList
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Defines
+ # @retval False Not able to find a Defines
+ #
+ def __GetDefines(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[DEFINES"):
+ if not S.startswith("[FD.") and not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [DEFINES], [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[DEFINES", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [DEFINES", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetNextWord():
+ Macro = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken() or self.__Token.startswith('['):
+ raise Warning("expected MACRO value", self.FileName, self.CurrentLineNumber)
+ Value = self.__Token
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+
+ return False
+
+ ## __GetFd() method
+ #
+ # Get FD section contents and store its data into FD dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FD
+ # @retval False Not able to find a FD
+ #
+ def __GetFd(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FD."):
+ if not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FD.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [FD.]", self.FileName, self.CurrentLineNumber)
+
+ FdName = self.__GetUiName()
+ self.CurrentFdName = FdName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ FdObj = Fd.FD()
+ FdObj.FdUiName = self.CurrentFdName
+ self.Profile.FdDict[self.CurrentFdName] = FdObj
+ Status = self.__GetCreateFile(FdObj)
+ if not Status:
+ raise Warning("FD name error", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetTokenStatements(FdObj):
+ return False
+
+ self.__GetDefineStatements(FdObj)
+
+ self.__GetSetStatements(FdObj)
+
+ if not self.__GetRegionLayout(FdObj):
+ raise Warning("expected region layout", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetRegionLayout(FdObj):
+ pass
+ return True
+
+ ## __GetUiName() method
+ #
+ # Return the UI name of a section
+ #
+ # @param self The object pointer
+ # @retval FdName UI name
+ #
+ def __GetUiName(self):
+ Name = ""
+ if self.__GetNextWord():
+ Name = self.__Token
+
+ return Name
+
+ ## __GetCreateFile() method
+ #
+ # Return the output file name of object
+ #
+ # @param self The object pointer
+ # @param Obj object whose data will be stored in file
+ # @retval FdName UI name
+ #
+ def __GetCreateFile(self, Obj):
+
+ if self.__IsKeyword( "CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name", self.FileName, self.CurrentLineNumber)
+
+ FileName = self.__Token
+ Obj.CreateFileName = FileName
+
+ return True
+
+ ## __GetTokenStatements() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statement is got
+ # @retval True Successfully find a token statement
+ # @retval False Not able to find a token statement
+ #
+ def __GetTokenStatements(self, Obj):
+ if not self.__IsKeyword( "BaseAddress"):
+ raise Warning("BaseAddress missing", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex base address", self.FileName, self.CurrentLineNumber)
+
+ Obj.BaseAddress = self.__Token
+
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.BaseAddressPcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Obj.BaseAddress
+
+ if not self.__IsKeyword( "Size"):
+ raise Warning("Size missing", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex size", self.FileName, self.CurrentLineNumber)
+
+
+ Size = self.__Token
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.SizePcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Size
+ Obj.Size = long(Size, 0)
+
+ if not self.__IsKeyword( "ErasePolarity"):
+ raise Warning("ErasePolarity missing", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Erase Polarity", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token != "1" and self.__Token != "0":
+ raise Warning("expected 1 or 0 Erase Polarity", self.FileName, self.CurrentLineNumber)
+
+ Obj.ErasePolarity = self.__Token
+
+ Status = self.__GetBlockStatements(Obj)
+ return Status
+
+ ## __GetAddressStatements() method
+ #
+ # Get address statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom address statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetAddressStatements(self, Obj):
+
+ if self.__IsKeyword("BsBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address", self.FileName, self.CurrentLineNumber)
+
+ BsAddress = long(self.__Token, 0)
+ Obj.BsBaseAddress = BsAddress
+
+ if self.__IsKeyword("RtBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address", self.FileName, self.CurrentLineNumber)
+
+ RtAddress = long(self.__Token, 0)
+ Obj.RtBaseAddress = RtAddress
+
+ ## __GetBlockStatements() method
+ #
+ # Get block statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatements(self, Obj):
+
+ if not self.__GetBlockStatement(Obj):
+ raise Warning("expected block statement", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetBlockStatement(Obj):
+ pass
+ return True
+
+ ## __GetBlockStatement() method
+ #
+ # Get block statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatement(self, Obj):
+ if not self.__IsKeyword( "BlockSize"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber() and not self.__GetNextDecimalNumber():
+ raise Warning("expected Hex block size", self.FileName, self.CurrentLineNumber)
+
+ BlockSize = self.__Token
+ BlockSizePcd = None
+ if self.__IsToken( "|"):
+ PcdPair = self.__GetNextPcdName()
+ BlockSizePcd = PcdPair
+ self.Profile.PcdDict[PcdPair] = BlockSize
+ BlockSize = long(self.__Token, 0)
+
+ BlockNumber = None
+ if self.__IsKeyword( "NumBlocks"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected block numbers", self.FileName, self.CurrentLineNumber)
+
+ BlockNumber = long(self.__Token, 0)
+
+ Obj.BlockSizeList.append((BlockSize, BlockNumber, BlockSizePcd))
+ return True
+
+ ## __GetDefineStatements() method
+ #
+ # Get define statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatements(self, Obj):
+ while self.__GetDefineStatement( Obj):
+ pass
+
+ ## __GetDefineStatement() method
+ #
+ # Get define statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatement(self, Obj):
+ if self.__IsKeyword("DEFINE"):
+ self.__GetNextToken()
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ Macro = '$(' + Macro + ')'
+ Obj.DefineVarDict[Macro] = Value
+ return True
+
+ return False
+
+ ## __GetSetStatements() method
+ #
+ # Get set statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatements(self, Obj):
+ while self.__GetSetStatement(Obj):
+ pass
+
+ ## __GetSetStatement() method
+ #
+ # Get set statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatement(self, Obj):
+ if self.__IsKeyword("SET"):
+ PcdPair = self.__GetNextPcdName()
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ if Value.startswith("{"):
+ # deal with value with {}
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ Value += self.__SkippedChars
+
+ Obj.SetVarDict[PcdPair] = Value
+ self.Profile.PcdDict[PcdPair] = Value
+ return True
+
+ return False
+
+ ## __GetRegionLayout() method
+ #
+ # Get region layout for FD
+ #
+ # @param self The object pointer
+ # @param Fd for whom region is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetRegionLayout(self, Fd):
+ if not self.__GetNextHexNumber():
+ return False
+
+ RegionObj = Region.Region()
+ RegionObj.Offset = long(self.__Token, 0)
+ Fd.RegionList.append(RegionObj)
+
+ if not self.__IsToken( "|"):
+ raise Warning("expected '|'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Region Size", self.FileName, self.CurrentLineNumber)
+ RegionObj.Size = long(self.__Token, 0)
+
+ if not self.__GetNextWord():
+ return True
+
+ if not self.__Token in ("SET", "FV", "FILE", "DATA"):
+ self.__UndoToken()
+ RegionObj.PcdOffset = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdOffset] = "0x%08X" % (RegionObj.Offset + long(Fd.BaseAddress, 0))
+ if self.__IsToken( "|"):
+ RegionObj.PcdSize = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdSize] = "0x%08X" % RegionObj.Size
+
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "SET":
+ self.__UndoToken()
+ self.__GetSetStatements( RegionObj)
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "FV":
+ self.__UndoToken()
+ self.__GetRegionFvType( RegionObj)
+
+ elif self.__Token == "FILE":
+ self.__UndoToken()
+ self.__GetRegionFileType( RegionObj)
+
+ else:
+ self.__UndoToken()
+ self.__GetRegionDataType( RegionObj)
+
+ return True
+
+ ## __GetRegionFvType() method
+ #
+ # Get region fv data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFvType(self, RegionObj):
+
+ if not self.__IsKeyword( "FV"):
+ raise Warning("expected Keyword 'FV'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FV"
+ RegionObj.RegionDataList.append(self.__Token)
+
+ while self.__IsKeyword( "FV"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionFileType() method
+ #
+ # Get region file data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFileType(self, RegionObj):
+
+ if not self.__IsKeyword( "FILE"):
+ raise Warning("expected Keyword 'FILE'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FILE"
+ RegionObj.RegionDataList.append( self.__Token)
+
+ while self.__IsKeyword( "FILE"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FILE name", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionDataType() method
+ #
+ # Get region array data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionDataType(self, RegionObj):
+
+ if not self.__IsKeyword( "DATA"):
+ raise Warning("expected Region Data type", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ if not self.__GetNextHexNumber():
+ raise Warning("Invalid Hex number", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionType = "DATA"
+ RegionObj.RegionDataList.append( DataString)
+
+ while self.__IsKeyword( "DATA"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ self.__GetNextHexNumber()
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionDataList.append( DataString)
+
+ ## __GetFv() method
+ #
+ # Get FV section contents and store its data into FV dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FV
+ # @retval False Not able to find a FV
+ #
+ def __GetFv(self):
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FV."):
+ if not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FV.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("Unknown Keyword '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__GetUiName()
+ self.CurrentFvName = FvName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ FvObj = Fv.FV()
+ FvObj.UiFvName = self.CurrentFvName
+ self.Profile.FvDict[self.CurrentFvName] = FvObj
+
+ Status = self.__GetCreateFile(FvObj)
+ if not Status:
+ raise Warning("FV name error", self.FileName, self.CurrentLineNumber)
+
+ self.__GetDefineStatements(FvObj)
+
+ self.__GetAddressStatements(FvObj)
+
+ self.__GetBlockStatement(FvObj)
+
+ self.__GetSetStatements(FvObj)
+
+ self.__GetFvAlignment(FvObj)
+
+ self.__GetFvAttributes(FvObj)
+
+ self.__GetFvNameGuid(FvObj)
+
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+
+ while True:
+ isInf = self.__GetInfStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ isFile = self.__GetFileStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## __GetFvAlignment() method
+ #
+ # Get alignment for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom alignment is got
+ # @retval True Successfully find a alignment statement
+ # @retval False Not able to find a alignment statement
+ #
+ def __GetFvAlignment(self, Obj):
+
+ if not self.__IsKeyword( "FvAlignment"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token.upper() not in ("1", "2", "4", "8", "16", "32", "64", "128", "256", "512", \
+ "1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K", \
+ "1M", "2M", "4M", "8M", "16M", "32M", "64M", "128M", "256M", "512M", \
+ "1G", "2G"):
+ raise Warning("Unknown alignment value '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ Obj.FvAlignment = self.__Token
+ return True
+
+ ## __GetFvAttributes() method
+ #
+ # Get attributes for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom attribute is got
+ # @retval None
+ #
+ def __GetFvAttributes(self, FvObj):
+
+ while self.__GetNextWord():
+ name = self.__Token
+ if name not in ("ERASE_POLARITY", "MEMORY_MAPPED", \
+ "STICKY_WRITE", "LOCK_CAP", "LOCK_STATUS", "WRITE_ENABLED_CAP", \
+ "WRITE_DISABLED_CAP", "WRITE_STATUS", "READ_ENABLED_CAP", \
+ "READ_DISABLED_CAP", "READ_STATUS", "READ_LOCK_CAP", \
+ "READ_LOCK_STATUS", "WRITE_LOCK_CAP", "WRITE_LOCK_STATUS", \
+ "WRITE_POLICY_RELIABLE"):
+ self.__UndoToken()
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0)", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvAttributeDict[name] = self.__Token
+
+ return
+
+ ## __GetFvNameGuid() method
+ #
+ # Get FV GUID for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom GUID is got
+ # @retval None
+ #
+ def __GetFvNameGuid(self, FvObj):
+
+ if not self.__IsKeyword( "FvNameGuid"):
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ raise Warning("expected FV GUID value", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvNameGuid = self.__Token
+
+ return
+
+ ## __GetAprioriSection() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param FvObj for whom apriori is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find apriori statement
+ # @retval False Not able to find apriori statement
+ #
+ def __GetAprioriSection(self, FvObj, MacroDict = {}):
+
+ if not self.__IsKeyword( "APRIORI"):
+ return False
+
+ if not self.__IsKeyword("PEI") and not self.__IsKeyword("DXE"):
+ raise Warning("expected Apriori file type", self.FileName, self.CurrentLineNumber)
+ AprType = self.__Token
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ AprSectionObj = AprioriSection.AprioriSection()
+ AprSectionObj.AprioriType = AprType
+
+ self.__GetDefineStatements(AprSectionObj)
+ MacroDict.update(AprSectionObj.DefineVarDict)
+
+ while True:
+ IsInf = self.__GetInfStatement( AprSectionObj, MacroDict = MacroDict)
+ IsFile = self.__GetFileStatement( AprSectionObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ FvObj.AprioriSectionList.append(AprSectionObj)
+ return True
+
+ ## __GetInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def __GetInfStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "INF"):
+ return False
+
+ ffsInf = FfsInfStatement.FfsInfStatement()
+ self.__GetInfOptions( ffsInf)
+
+ if not self.__GetNextToken():
+ raise Warning("expected INF file path", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self.__Token
+
+# if ffsInf.InfFileName.find('$') >= 0:
+# ffsInf.InfFileName = GenFdsGlobalVariable.GenFdsGlobalVariable.MacroExtend(ffsInf.InfFileName, MacroDict)
+
+ if not ffsInf.InfFileName in self.Profile.InfList:
+ self.Profile.InfList.append(ffsInf.InfFileName)
+
+ if self.__IsToken('|'):
+ if self.__IsKeyword('RELOCS_STRIPPED'):
+ ffsInf.KeepReloc = False
+ elif self.__IsKeyword('RELOCS_RETAINED'):
+ ffsInf.KeepReloc = True
+ else:
+ raise Warning("Unknown reloc strip flag '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = ffsInf
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## __GetInfOptions() method
+ #
+ # Get options for INF
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom option is got
+ #
+ def __GetInfOptions(self, FfsInfObj):
+
+ if self.__IsKeyword( "RuleOverride"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Rule name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.Rule = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Version", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Version = self.__Token
+
+ if self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI name", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Ui = self.__Token
+
+ if self.__IsKeyword( "USE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected ARCH name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.UseArch = self.__Token
+
+
+ if self.__GetNextToken():
+ p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if p.match(self.__Token):
+ FfsInfObj.KeyStringList.append(self.__Token)
+ if not self.__IsToken(","):
+ return
+ else:
+ self.__UndoToken()
+ return
+
+ while self.__GetNextToken():
+ if not p.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ ## __GetFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def __GetFileStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "FILE"):
+ return False
+
+ FfsFileObj = FfsFileStatement.FileStatement()
+
+ if not self.__GetNextWord():
+ raise Warning("expected FFS type", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvFileType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ if not self.__GetNextWord():
+ raise Warning("expected File GUID", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ FfsFileObj.NameGuid = self.__Token
+
+ self.__GetFilePart( FfsFileObj, MacroDict.copy())
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = FfsFileObj
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+ ## __FileCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a file type.
+ #
+ # @param self The object pointer
+ # @param FileType The file type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __FileCouldHaveRelocFlag (self, FileType):
+ if FileType in ('SEC', 'PEI_CORE', 'PEIM', 'PEI_DXE_COMBO'):
+ return True
+ else:
+ return False
+
+ ## __SectionCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a section type.
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __SectionCouldHaveRelocFlag (self, SectionType):
+ if SectionType in ('TE', 'PE32'):
+ return True
+ else:
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get components for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom component is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetFilePart(self, FfsFileObj, MacroDict = {}):
+
+ self.__GetFileOpts( FfsFileObj)
+
+ if not self.__IsToken("{"):
+# if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+# if self.__FileCouldHaveRelocFlag(FfsFileObj.FvFileType):
+# if self.__Token == 'RELOCS_STRIPPED':
+# FfsFileObj.KeepReloc = False
+# else:
+# FfsFileObj.KeepReloc = True
+# else:
+# raise Warning("File type %s could not have reloc strip flag%d" % (FfsFileObj.FvFileType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+#
+# if not self.__IsToken("{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name or section data", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "FV":
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FV name", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvName = self.__Token
+
+ elif self.__Token == "FD":
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FD name", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FdName = self.__Token
+
+ elif self.__Token in ("DEFINE", "APRIORI", "SECTION"):
+ self.__UndoToken()
+ self.__GetSectionData( FfsFileObj, MacroDict)
+ else:
+ FfsFileObj.FileName = self.__Token
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ ## __GetFileOpts() method
+ #
+ # Get options for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom options is got
+ #
+ def __GetFileOpts(self, FfsFileObj):
+
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ FfsFileObj.KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+ if self.__IsKeyword( "FIXED", True):
+ FfsFileObj.Fixed = True
+
+ if self.__IsKeyword( "CHECKSUM", True):
+ FfsFileObj.CheckSum = True
+
+ if self.__GetAlignment():
+ FfsFileObj.Alignment = self.__Token
+
+
+
+ ## __GetAlignment() method
+ #
+ # Return the alignment value
+ #
+ # @param self The object pointer
+ # @retval True Successfully find alignment
+ # @retval False Not able to find alignment
+ #
+ def __GetAlignment(self):
+ if self.__IsKeyword( "Align", True):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value", self.FileName, self.CurrentLineNumber)
+ return True
+
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get section data for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom section is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetSectionData(self, FfsFileObj, MacroDict = {}):
+ Dict = {}
+ Dict.update(MacroDict)
+
+ self.__GetDefineStatements(FfsFileObj)
+
+ Dict.update(FfsFileObj.DefineVarDict)
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+
+ while True:
+ IsLeafSection = self.__GetLeafSection(FfsFileObj, Dict)
+ IsEncapSection = self.__GetEncapsulationSec(FfsFileObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ ## __GetLeafSection() method
+ #
+ # Get leaf section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetLeafSection(self, Obj, MacroDict = {}):
+
+ OldPos = self.GetFileBufferPos()
+
+ if not self.__IsKeyword( "SECTION"):
+ if len(Obj.SectionList) == 0:
+ raise Warning("expected SECTION", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ BuildNum = None
+ if self.__IsKeyword( "BUILD_NUM"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Build number value", self.FileName, self.CurrentLineNumber)
+
+ BuildNum = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected version", self.FileName, self.CurrentLineNumber)
+ VerSectionObj = VerSection.VerSection()
+ VerSectionObj.Alignment = AlignValue
+ VerSectionObj.BuildNum = BuildNum
+ if self.__GetStringData():
+ VerSectionObj.StringData = self.__Token
+ else:
+ VerSectionObj.FileName = self.__Token
+ Obj.SectionList.append(VerSectionObj)
+
+ elif self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI", self.FileName, self.CurrentLineNumber)
+ UiSectionObj = UiSection.UiSection()
+ UiSectionObj.Alignment = AlignValue
+ if self.__GetStringData():
+ UiSectionObj.StringData = self.__Token
+ else:
+ UiSectionObj.FileName = self.__Token
+ Obj.SectionList.append(UiSectionObj)
+
+ elif self.__IsKeyword( "FV_IMAGE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FV name or FV file path", self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__Token
+ FvObj = None
+
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ FvObj.UiFvName = FvName.upper()
+ self.__GetDefineStatements(FvObj)
+ MacroDict.update(FvObj.DefineVarDict)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj, MacroDict.copy())
+ IsFile = self.__GetFileStatement(FvObj, MacroDict.copy())
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ FvImageSectionObj = FvImageSection.FvImageSection()
+ FvImageSectionObj.Alignment = AlignValue
+ if FvObj != None:
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+ else:
+ FvImageSectionObj.FvName = FvName.upper()
+ FvImageSectionObj.FvFileName = FvName
+
+ Obj.SectionList.append(FvImageSectionObj)
+
+ elif self.__IsKeyword("PEI_DEPEX_EXP") or self.__IsKeyword("DXE_DEPEX_EXP") or self.__IsKeyword("SMM_DEPEX_EXP"):
+ DepexSectionObj = DepexSection.DepexSection()
+ DepexSectionObj.Alignment = AlignValue
+ DepexSectionObj.DepexType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected Depex expression ending '}'", self.FileName, self.CurrentLineNumber)
+
+ DepexSectionObj.Expression = self.__SkippedChars.rstrip('}')
+ Obj.SectionList.append(DepexSectionObj)
+
+ else:
+
+ if not self.__GetNextWord():
+ raise Warning("expected section type", self.FileName, self.CurrentLineNumber)
+
+ # Encapsulation section appear, UndoToken and return
+ if self.__Token == "COMPRESS" or self.__Token == "GUIDED":
+ self.SetFileBufferPos(OldPos)
+ return False
+
+ if self.__Token not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown section type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ # DataSection
+ DataSectionObj = DataSection.DataSection()
+ DataSectionObj.Alignment = AlignValue
+ DataSectionObj.SecType = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Obj.FvFileType) and self.__SectionCouldHaveRelocFlag(DataSectionObj.SecType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ DataSectionObj.KeepReloc = False
+ else:
+ DataSectionObj.KeepReloc = True
+ else:
+ raise Warning("File type %s, section type %s, could not have reloc strip flag%d" % (Obj.FvFileType, DataSectionObj.SecType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("="):
+ if not self.__GetNextToken():
+ raise Warning("expected section file path", self.FileName, self.CurrentLineNumber)
+ DataSectionObj.SectFileName = self.__Token
+ else:
+ if not self.__GetCglSection(DataSectionObj):
+ return False
+
+ Obj.SectionList.append(DataSectionObj)
+
+ return True
+
+ ## __GetCglSection() method
+ #
+ # Get compressed or GUIDed section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param AlignValue alignment value for complex section
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetCglSection(self, Obj, AlignValue = None):
+
+ if self.__IsKeyword( "COMPRESS"):
+ type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection.CompressSection()
+ CompressSectionObj.Alignment = AlignValue
+ CompressSectionObj.CompType = type
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(CompressSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(CompressSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(CompressSectionObj)
+
+# else:
+# raise Warning("Compress type not known")
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+ if not self.__IsToken("{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection.GuidSection()
+ GuidSectionObj.Alignment = AlignValue
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(GuidSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(GuidSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetGuidAttri() method
+ #
+ # Get attributes for GUID section
+ #
+ # @param self The object pointer
+ # @retval AttribDict Dictionary of key-value pair of section attributes
+ #
+ def __GetGuidAttrib(self):
+
+ AttribDict = {}
+ AttribDict["PROCESSING_REQUIRED"] = False
+ AttribDict["AUTH_STATUS_VALID"] = False
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0)", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='")
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0)", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ return AttribDict
+
+ ## __GetEncapsulationSec() method
+ #
+ # Get encapsulation section for FILE
+ #
+ # @param self The object pointer
+ # @param FfsFile for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEncapsulationSec(self, FfsFileObj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__IsKeyword( "SECTION"):
+ if len(FfsFileObj.SectionList) == 0:
+ raise Warning("expected SECTION", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ if not self.__GetCglSection(FfsFileObj, AlignValue):
+ self.SetFileBufferPos(OldPos)
+ return False
+ else:
+ return True
+
+ ## __GetCapsule() method
+ #
+ # Get capsule section contents and store its data into capsule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a capsule
+ # @retval False Not able to find a capsule
+ #
+ def __GetCapsule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[CAPSULE."):
+ if not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[CAPSULE.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Capsule.]", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj = Capsule.Capsule()
+
+ CapsuleName = self.__GetUiName()
+ if not CapsuleName:
+ raise Warning("expected capsule name", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.UiCapsuleName = CapsuleName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.CreateFile = self.__Token
+
+ self.__GetCapsuleStatements(CapsuleObj)
+ self.Profile.CapsuleList.append(CapsuleObj)
+ return True
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom statements are got
+ #
+ def __GetCapsuleStatements(self, Obj):
+ self.__GetCapsuleTokens(Obj)
+ self.__GetDefineStatements(Obj)
+ self.__GetSetStatements(Obj)
+
+ self.__GetCapsuleData(Obj)
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get token statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statements are got
+ #
+ def __GetCapsuleTokens(self, Obj):
+
+ if not self.__IsKeyword("CAPSULE_GUID"):
+ raise Warning("expected 'CAPSULE_GUID'", self.FileName, self.CurrentLineNumber)
+
+ while self.__CurrentLine().find("=") != -1:
+ NameValue = self.__CurrentLine().split("=")
+ Obj.TokensDict[NameValue[0].strip()] = NameValue[1].strip()
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __GetCapsuleData() method
+ #
+ # Get capsule data for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom capsule data are got
+ #
+ def __GetCapsuleData(self, Obj):
+
+ while True:
+ IsInf = self.__GetInfStatement(Obj, True)
+ IsFile = self.__GetFileStatement(Obj, True)
+ IsFv = self.__GetFvStatement(Obj)
+ if not IsInf and not IsFile and not IsFv:
+ break
+
+ ## __GetFvStatement() method
+ #
+ # Get FV for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom FV is got
+ # @retval True Successfully find a FV statement
+ # @retval False Not able to find a FV statement
+ #
+ def __GetFvStatement(self, CapsuleObj):
+
+ if not self.__IsKeyword("FV"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name", self.FileName, self.CurrentLineNumber)
+
+ CapsuleFv = CapsuleData.CapsuleFv()
+ CapsuleFv.FvName = self.__Token
+ CapsuleObj.CapsuleDataList.append(CapsuleFv)
+ return True
+
+ ## __GetRule() method
+ #
+ # Get Rule section contents and store its data into rule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Rule
+ # @retval False Not able to find a Rule
+ #
+ def __GetRule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[RULE."):
+ if not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+ self.__UndoToken()
+ if not self.__IsToken("[Rule.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Rule.]", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.'", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".")
+ if Arch.upper() not in ("IA32", "X64", "IPF", "EBC", "ARM", "COMMON"):
+ raise Warning("Unknown Arch '%s'" % Arch, self.FileName, self.CurrentLineNumber)
+
+ ModuleType = self.__GetModuleType()
+
+ TemplateName = ""
+ if self.__IsToken("."):
+ if not self.__GetNextWord():
+ raise Warning("expected template name", self.FileName, self.CurrentLineNumber)
+ TemplateName = self.__Token
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ RuleObj = self.__GetRuleFileStatements()
+ RuleObj.Arch = Arch.upper()
+ RuleObj.ModuleType = ModuleType
+ RuleObj.TemplateName = TemplateName
+ if TemplateName == '' :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() ] = RuleObj
+ else :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() + \
+ '.' + \
+ TemplateName.upper() ] = RuleObj
+# self.Profile.RuleList.append(rule)
+ return True
+
+ ## __GetModuleType() method
+ #
+ # Return the module type
+ #
+ # @param self The object pointer
+ # @retval string module type
+ #
+ def __GetModuleType(self):
+
+ if not self.__GetNextWord():
+ raise Warning("expected Module type", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("SEC", "PEI_CORE", "PEIM", "DXE_CORE", \
+ "DXE_DRIVER", "DXE_SAL_DRIVER", \
+ "DXE_SMM_DRIVER", "DXE_RUNTIME_DRIVER", \
+ "UEFI_DRIVER", "UEFI_APPLICATION", "USER_DEFINED", "DEFAULT", "BASE", \
+ "SECURITY_CORE", "COMBINED_PEIM_DRIVER", "PIC_PEIM", "RELOCATABLE_PEIM", \
+ "PE32_PEIM", "BS_DRIVER", "RT_DRIVER", "SAL_RT_DRIVER", "APPLICATION", "ACPITABLE", "SMM_DRIVER", "SMM_CORE"):
+ raise Warning("Unknown Module type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ return self.__Token
+
+ ## __GetFileExtension() method
+ #
+ # Return the file extension
+ #
+ # @param self The object pointer
+ # @retval string file name extension
+ #
+ def __GetFileExtension(self):
+ if not self.__IsToken("."):
+ raise Warning("expected '.'", self.FileName, self.CurrentLineNumber)
+
+ Ext = ""
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z][a-zA-Z0-9]*)')
+ if Pattern.match(self.__Token):
+ Ext = self.__Token
+ return '.' + Ext
+ else:
+ raise Warning("Unknown file extension '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ else:
+ raise Warning("expected file extension", self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleFileStatement() method
+ #
+ # Get rule contents
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def __GetRuleFileStatements(self):
+
+ if not self.__IsKeyword("FILE"):
+ raise Warning("expected FILE", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected FFS type", self.FileName, self.CurrentLineNumber)
+
+ Type = self.__Token.strip().upper()
+ if Type not in ("RAW", "FREEFORM", "SEC", "PEI_CORE", "PEIM",\
+ "PEI_DXE_COMBO", "DRIVER", "DXE_CORE", "APPLICATION", "FV_IMAGE", "SMM_DXE_COMBO", "SMM", "SMM_CORE"):
+ raise Warning("Unknown FV type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsKeyword("$(NAMED_GUID)"):
+ if not self.__GetNextWord():
+ raise Warning("expected $(NAMED_GUID)", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ NameGuid = self.__Token
+
+ KeepReloc = None
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Type):
+ if self.__Token == 'RELOCS_STRIPPED':
+ KeepReloc = False
+ else:
+ KeepReloc = True
+ else:
+ raise Warning("File type %s could not have reloc strip flag%d" % (Type, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ KeyStringList = []
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\"", self.FileName, self.CurrentLineNumber)
+ KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+
+ Fixed = False
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ CheckSum = False
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ AlignValue = ""
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if self.__IsToken("{"):
+ # Complex file rule expected
+ Rule = RuleComplexFile.RuleComplexFile()
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(Rule)
+ IsLeaf = self.__GetEfiSection(Rule)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken("}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ return Rule
+
+ elif self.__IsToken("|"):
+ # Ext rule expected
+ Ext = self.__GetFileExtension()
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileExtension = Ext
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ return Rule
+
+ else:
+ # Simple file rule expected
+ if not self.__GetNextWord():
+ raise Warning("expected leaf section type", self.FileName, self.CurrentLineNumber)
+
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "PEI_DEPEX", "VERSION", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown leaf section name '%s'" % SectionName, self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name", self.FileName, self.CurrentLineNumber)
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+ Rule.SectionType = SectionName
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileName = self.__Token
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+ return Rule
+
+ ## __GetEfiSection() method
+ #
+ # Get section list for Rule
+ #
+ # @param self The object pointer
+ # @param Obj for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEfiSection(self, Obj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__GetNextWord():
+ return False
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ self.__UndoToken()
+ return False
+
+ if SectionName == "FV_IMAGE":
+ FvImageSectionObj = FvImageSection.FvImageSection()
+ if self.__IsKeyword("FV_IMAGE"):
+ pass
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ self.__GetDefineStatements(FvObj)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj)
+ self.__GetAprioriSection(FvObj)
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj)
+ IsFile = self.__GetFileStatement(FvObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+
+ else:
+ if not self.__IsKeyword("FV"):
+ raise Warning("expected 'FV'", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword("FV"):
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsToken('|'):
+ FvImageSectionObj.FvFileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ FvImageSectionObj.FvFileName = self.__Token
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected FV file name", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(FvImageSectionObj)
+ return True
+
+ EfiSectionObj = EfiSection.EfiSection()
+ EfiSectionObj.SectionType = SectionName
+
+ if not self.__GetNextToken():
+ raise Warning("expected file type", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "STRING":
+ if not self.__RuleSectionCouldHaveString(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have string data%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken('='):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Quoted String", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ EfiSectionObj.StringData = self.__Token
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ else:
+ EfiSectionObj.FileType = self.__Token
+ self.__CheckRuleSectionFileType(EfiSectionObj.SectionType, EfiSectionObj.FileType)
+
+ if self.__IsKeyword("Optional"):
+ if not self.__RuleSectionCouldBeOptional(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT be optional%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.Optional = True
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM%d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ if self.__GetAlignment():
+ EfiSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__SectionCouldHaveRelocFlag(EfiSectionObj.SectionType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ EfiSectionObj.KeepReloc = False
+ else:
+ EfiSectionObj.KeepReloc = True
+ if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ raise Warning("Section type %s has reloc strip flag conflict with Rule" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
+ else:
+ raise Warning("Section type %s could not have reloc strip flag" % EfiSectionObj.SectionType, self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsToken('|'):
+ EfiSectionObj.FileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+
+ if self.__Token.startswith('PCD'):
+ self.__UndoToken()
+ self.__GetNextWord()
+
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ EfiSectionObj.FileName = self.__Token
+
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected section file name", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(EfiSectionObj)
+ return True
+
+ ## __RuleSectionCouldBeOptional() method
+ #
+ # Get whether a section could be optional
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could be optional
+ # @retval False section never optional
+ #
+ def __RuleSectionCouldBeOptional(self, SectionType):
+ if SectionType in ("DXE_DEPEX", "UI", "VERSION", "PEI_DEPEX", "RAW", "SMM_DEPEX"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveBuildNum() method
+ #
+ # Get whether a section could have build number information
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have build number information
+ # @retval False section never have build number information
+ #
+ def __RuleSectionCouldHaveBuildNum(self, SectionType):
+ if SectionType in ("VERSION"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveString() method
+ #
+ # Get whether a section could have string
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have string
+ # @retval False section never have string
+ #
+ def __RuleSectionCouldHaveString(self, SectionType):
+ if SectionType in ("UI", "VERSION"):
+ return True
+ else:
+ return False
+
+ ## __CheckRuleSectionFileType() method
+ #
+ # Get whether a section matches a file type
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @param FileType The file type to check
+ #
+ def __CheckRuleSectionFileType(self, SectionType, FileType):
+ if SectionType == "COMPAT16":
+ if FileType not in ("COMPAT16", "SEC_COMPAT16"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PE32":
+ if FileType not in ("PE32", "SEC_PE32"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PIC":
+ if FileType not in ("PIC", "PIC"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "TE":
+ if FileType not in ("TE", "SEC_TE"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "RAW":
+ if FileType not in ("BIN", "SEC_BIN", "RAW", "ASL", "ACPI"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "DXE_DEPEX":
+ if FileType not in ("DXE_DEPEX", "SEC_DXE_DEPEX"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "UI":
+ if FileType not in ("UI", "SEC_UI"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "VERSION":
+ if FileType not in ("VERSION", "SEC_VERSION"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PEI_DEPEX":
+ if FileType not in ("PEI_DEPEX", "SEC_PEI_DEPEX"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+ elif SectionType == "GUID":
+ if FileType not in ("PE32", "SEC_GUID"):
+ raise Warning("Incorrect section file type '%s'" % FileType, self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleEncapsulationSection() method
+ #
+ # Get encapsulation section for Rule
+ #
+ # @param self The object pointer
+ # @param Rule for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetRuleEncapsulationSection(self, Rule):
+
+ if self.__IsKeyword( "COMPRESS"):
+ Type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ Type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection.CompressSection()
+
+ CompressSectionObj.CompType = Type
+ # Recursive sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(CompressSectionObj)
+ IsLeaf = self.__GetEfiSection(CompressSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(CompressSectionObj)
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ if self.__IsKeyword( "$(NAMED_GUID)"):
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{'", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection.GuidSection()
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+
+ # Efi sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(GuidSectionObj)
+ IsLeaf = self.__GetEfiSection(GuidSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetVtf() method
+ #
+ # Get VTF section contents and store its data into VTF list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a VTF
+ # @retval False Not able to find a VTF
+ #
+ def __GetVtf(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[VTF."):
+ if not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[VTF.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [VTF.]", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.'", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".").upper()
+ if Arch not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch '%s'" % Arch, self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected VTF name", self.FileName, self.CurrentLineNumber)
+ Name = self.__Token.upper()
+
+ VtfObj = Vtf.Vtf()
+ VtfObj.UiName = Name
+ VtfObj.KeyArch = Arch
+
+ if self.__IsToken(","):
+ if not self.__GetNextWord():
+ raise Warning("expected Arch list", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ VtfObj.ArchList = self.__Token.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("IA32_RST_BIN"):
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Reset file", self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ResetBin = self.__Token
+
+ while self.__GetComponentStatement(VtfObj):
+ pass
+
+ self.Profile.VtfList.append(VtfObj)
+ return True
+
+ ## __GetComponentStatement() method
+ #
+ # Get components in VTF
+ #
+ # @param self The object pointer
+ # @param VtfObj for whom component is got
+ # @retval True Successfully find a component
+ # @retval False Not able to find a component
+ #
+ def __GetComponentStatement(self, VtfObj):
+
+ if not self.__IsKeyword("COMP_NAME"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected Component Name", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj = ComponentStatement.ComponentStatement()
+ CompStatementObj.CompName = self.__Token
+
+ if not self.__IsKeyword("COMP_LOC"):
+ raise Warning("expected COMP_LOC", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompLoc = ""
+ if self.__GetNextWord():
+ CompStatementObj.CompLoc = self.__Token
+ if self.__IsToken('|'):
+ if not self.__GetNextWord():
+ raise Warning("Expected Region Name", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token not in ("F", "N", "S"): #, "H", "L", "PH", "PL"): not support
+ raise Warning("Unknown location type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.FilePos = self.__Token
+ else:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ if not self.__IsKeyword("COMP_TYPE"):
+ raise Warning("expected COMP_TYPE", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component type", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("FIT", "PAL_B", "PAL_A", "OEM"):
+ if not self.__Token.startswith("0x") or len(self.__Token) < 3 or len(self.__Token) > 4 or \
+ not self.__HexDigit(self.__Token[2]) or not self.__HexDigit(self.__Token[-1]):
+ raise Warning("Unknown location type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompType = self.__Token
+
+ if not self.__IsKeyword("COMP_VER"):
+ raise Warning("expected COMP_VER", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component version", self.FileName, self.CurrentLineNumber)
+
+ Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
+ if Pattern.match(self.__Token) == None:
+ raise Warning("Unknown version format '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompVer = self.__Token
+
+ if not self.__IsKeyword("COMP_CS"):
+ raise Warning("expected COMP_CS", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component CS", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("1", "0"):
+ raise Warning("Unknown Component CS '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompCs = self.__Token
+
+
+ if not self.__IsKeyword("COMP_BIN"):
+ raise Warning("expected COMP_BIN", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component file", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompBin = self.__Token
+
+ if not self.__IsKeyword("COMP_SYM"):
+ raise Warning("expected COMP_SYM", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component symbol file", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompSym = self.__Token
+
+ if not self.__IsKeyword("COMP_SIZE"):
+ raise Warning("expected COMP_SIZE", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("-"):
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextDecimalNumber():
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextHexNumber():
+ CompStatementObj.CompSize = self.__Token
+ else:
+ raise Warning("Unknown size '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ComponentStatementList.append(CompStatementObj)
+ return True
+
+ ## __GetOptionRom() method
+ #
+ # Get OptionROM section contents and store its data into OptionROM list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a OptionROM
+ # @retval False Not able to find a OptionROM
+ #
+ def __GetOptionRom(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+
+ self.__UndoToken()
+ if not self.__IsToken("[OptionRom.", True):
+ raise Warning("Unknown Keyword '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ OptRomName = self.__GetUiName()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ OptRomObj = OptionRom.OPTIONROM()
+ OptRomObj.DriverName = OptRomName
+ self.Profile.OptRomDict[OptRomName] = OptRomObj
+
+ while True:
+ isInf = self.__GetOptRomInfStatement(OptRomObj)
+ isFile = self.__GetOptRomFileStatement(OptRomObj)
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## __GetOptRomInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def __GetOptRomInfStatement(self, Obj):
+
+ if not self.__IsKeyword( "INF"):
+ return False
+
+ ffsInf = OptRomInfStatement.OptRomInfStatement()
+ self.__GetInfOptions( ffsInf)
+
+ if not self.__GetNextToken():
+ raise Warning("expected INF file path", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self.__Token
+
+ if not ffsInf.InfFileName in self.Profile.InfList:
+ self.Profile.InfList.append(ffsInf.InfFileName)
+
+
+ self.__GetOptRomOverrides (ffsInf)
+
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## __GetOptRomOverrides() method
+ #
+ # Get overrides for OptROM INF & FILE
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom overrides is got
+ #
+ def __GetOptRomOverrides(self, Obj):
+ if self.__IsToken('{'):
+ Overrides = OptionRom.OverrideAttribs()
+ if self.__IsKeyword( "PCI_VENDOR_ID"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex vendor id", self.FileName, self.CurrentLineNumber)
+ Overrides.PciVendorId = self.__Token
+
+ if self.__IsKeyword( "PCI_CLASS_CODE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex class code", self.FileName, self.CurrentLineNumber)
+ Overrides.PciClassCode = self.__Token
+
+ if self.__IsKeyword( "PCI_DEVICE_ID"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex device id", self.FileName, self.CurrentLineNumber)
+
+ Overrides.PciDeviceId = self.__Token
+
+ if self.__IsKeyword( "PCI_REVISION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex revision", self.FileName, self.CurrentLineNumber)
+ Overrides.PciRevision = self.__Token
+
+ if self.__IsKeyword( "COMPRESS"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected TRUE/FALSE for compress", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token.upper() == 'TRUE':
+ Overrides.NeedCompress = True
+
+ if not self.__IsToken( "}"):
+
+ if self.__Token not in ("PCI_CLASS_CODE", "PCI_VENDOR_ID", "PCI_DEVICE_ID", "PCI_REVISION", "COMPRESS"):
+ raise Warning("unknown attribute %s" % self.__Token, self.FileName, self.CurrentLineNumber)
+
+ raise Warning("expected '}'", self.FileName, self.CurrentLineNumber)
+
+ Obj.OverrideAttribs = Overrides
+
+ ## __GetOptRomFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def __GetOptRomFileStatement(self, Obj):
+
+ if not self.__IsKeyword( "FILE"):
+ return False
+
+ FfsFileObj = OptRomFileStatement.OptRomFileStatement()
+
+ if not self.__IsKeyword("EFI") and not self.__IsKeyword("BIN"):
+ raise Warning("expected Binary type (EFI/BIN)", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FileType = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected File path", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FileName = self.__Token
+
+ if FfsFileObj.FileType == 'EFI':
+ self.__GetOptRomOverrides(FfsFileObj)
+
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+
+ ## __GetFvInFd() method
+ #
+ # Get FV list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval FvList list of FV in FD
+ #
+ def __GetFvInFd (self, FdName):
+
+ FvList = []
+ if FdName.upper() in self.Profile.FdDict.keys():
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == 'FV':
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData != None and elementRegionData.upper() not in FvList:
+ FvList.append(elementRegionData.upper())
+ return FvList
+
+ ## __GetReferencedFdFvTuple() method
+ #
+ # Get FD and FV list referenced by a FFS file
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def __GetReferencedFdFvTuple(self, FvObj, RefFdList = [], RefFvList = []):
+
+ for FfsObj in FvObj.FfsList:
+ if isinstance(FfsObj, FfsFileStatement.FileStatement):
+ if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ RefFvList.append(FfsObj.FvName.upper())
+ elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ RefFdList.append(FfsObj.FdName.upper())
+ else:
+ self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
+
+ ## __GetReferencedFdFvTupleFromSection() method
+ #
+ # Get FD and FV list referenced by a FFS section
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param FdList referenced FD by section
+ # @param FvList referenced FV by section
+ #
+ def __GetReferencedFdFvTupleFromSection(self, FfsFile, FdList = [], FvList = []):
+
+ SectionStack = []
+ SectionStack.extend(FfsFile.SectionList)
+ while SectionStack != []:
+ SectionObj = SectionStack.pop()
+ if isinstance(SectionObj, FvImageSection.FvImageSection):
+ if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ FvList.append(SectionObj.FvName.upper())
+ if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ FvList.append(SectionObj.Fv.UiFvName.upper())
+ self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
+
+ if isinstance(SectionObj, CompressSection.CompressSection) or isinstance(SectionObj, GuidSection.GuidSection):
+ SectionStack.extend(SectionObj.SectionList)
+
+ ## CycleReferenceCheck() method
+ #
+ # Check whether cycle reference exists in FDF
+ #
+ # @param self The object pointer
+ # @retval True cycle reference exists
+ # @retval False Not exists cycle reference
+ #
+ def CycleReferenceCheck(self):
+
+ CycleRefExists = False
+
+ try:
+ for FvName in self.Profile.FvDict.keys():
+ LogStr = "Cycle Reference Checking for FV: %s\n" % FvName
+ RefFvStack = []
+ RefFvStack.append(FvName)
+ FdAnalyzedList = []
+
+ while RefFvStack != []:
+ FvNameFromStack = RefFvStack.pop()
+ if FvNameFromStack.upper() in self.Profile.FvDict.keys():
+ FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
+ else:
+ continue
+
+ RefFdList = []
+ RefFvList = []
+ self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
+ continue
+
+ LogStr += "FD %s is referenced by FV %s\n" % (RefFdName, FvNameFromStack)
+ FvInFdList = self.__GetFvInFd(RefFdName)
+ if FvInFdList != []:
+ LogStr += "FD %s contains FV: " % RefFdName
+ for FvObj in FvInFdList:
+ LogStr += FvObj
+ LogStr += ' \n'
+ if FvObj not in RefFvStack:
+ RefFvStack.append(FvObj)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+ FdAnalyzedList.append(RefFdName)
+
+ for RefFvName in RefFvList:
+ LogStr += "FV %s is referenced by FV %s\n" % (RefFvName, FvNameFromStack)
+ if RefFvName not in RefFvStack:
+ RefFvStack.append(RefFvName)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+
+ except Warning:
+ print LogStr
+
+ finally:
+ return CycleRefExists
+
+if __name__ == "__main__":
+ parser = FdfParser("..\LakeportX64Pkg.fdf")
+ try:
+ parser.ParseFile()
+ parser.CycleReferenceCheck()
+ except Warning, X:
+ print str(X)
+ else:
+ print "Success!"
+
diff --git a/BaseTools/Source/Python/GenFds/Ffs.py b/BaseTools/Source/Python/GenFds/Ffs.py new file mode 100644 index 0000000000..aaa791763b --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Ffs.py @@ -0,0 +1,81 @@ +## @file
+# process FFS generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from CommonDataClass.FdfClass import FDClassObject
+
+## generate FFS
+#
+#
+class Ffs(FDClassObject):
+
+ # mapping between MODULE type in FDF (from INF) and file type for GenFfs
+ ModuleTypeToFileType = {
+ 'SEC' : 'EFI_FV_FILETYPE_SECURITY_CORE',
+ 'PEI_CORE' : 'EFI_FV_FILETYPE_PEI_CORE',
+ 'PEIM' : 'EFI_FV_FILETYPE_PEIM',
+ 'DXE_CORE' : 'EFI_FV_FILETYPE_DXE_CORE',
+ 'DXE_DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'DXE_SAL_DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'DXE_SMM_DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'DXE_RUNTIME_DRIVER': 'EFI_FV_FILETYPE_DRIVER',
+ 'UEFI_DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'UEFI_APPLICATION' : 'EFI_FV_FILETYPE_APPLICATION',
+ 'SMM_DRIVER' : 'EFI_FV_FILETYPE_SMM',
+ 'SMM_CORE' : 'EFI_FV_FILETYPE_SMM_CORE'
+ }
+
+ # mapping between FILE type in FDF and file type for GenFfs
+ FdfFvFileTypeToFileType = {
+ 'SEC' : 'EFI_FV_FILETYPE_SECURITY_CORE',
+ 'PEI_CORE' : 'EFI_FV_FILETYPE_PEI_CORE',
+ 'PEIM' : 'EFI_FV_FILETYPE_PEIM',
+ 'DXE_CORE' : 'EFI_FV_FILETYPE_DXE_CORE',
+ 'FREEFORM' : 'EFI_FV_FILETYPE_FREEFORM',
+ 'DRIVER' : 'EFI_FV_FILETYPE_DRIVER',
+ 'APPLICATION' : 'EFI_FV_FILETYPE_APPLICATION',
+ 'FV_IMAGE' : 'EFI_FV_FILETYPE_FIRMWARE_VOLUME_IMAGE',
+ 'RAW' : 'EFI_FV_FILETYPE_RAW',
+ 'PEI_DXE_COMBO' : 'EFI_FV_FILETYPE_COMBINED_PEIM_DRIVER',
+ 'SMM_DXE_COMBO' : 'EFI_FV_FILETYPE_COMBINED_SMM_DXE',
+ 'SMM' : 'EFI_FV_FILETYPE_SMM',
+ 'SMM_CORE' : 'EFI_FV_FILETYPE_SMM_CORE'
+ }
+
+ # mapping between section type in FDF and file suffix
+ SectionSuffix = {
+ 'PE32' : '.pe32',
+ 'PIC' : '.pic',
+ 'TE' : '.te',
+ 'DXE_DEPEX' : '.dpx',
+ 'VERSION' : '.ver',
+ 'UI' : '.ui',
+ 'COMPAT16' : '.com16',
+ 'RAW' : '.raw',
+ 'FREEFORM_SUBTYPE_GUID': '.guid',
+ 'FV_IMAGE' : 'fv.sec',
+ 'COMPRESS' : '.com',
+ 'GUIDED' : '.guided',
+ 'PEI_DEPEX' : '.dpx',
+ 'SMM_DEPEX' : '.smm'
+ }
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsClassObject.__init__(self)
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py new file mode 100644 index 0000000000..ed778f3d44 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py @@ -0,0 +1,118 @@ +## @file
+# process FFS generation from FILE statement
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Ffs
+import Rule
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import os
+import StringIO
+import subprocess
+from CommonDataClass.FdfClass import FileStatementClassObject
+from Common import EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import GuidStructureByteArrayToGuidString
+
+## generate FFS from FILE
+#
+#
+class FileStatement (FileStatementClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FileStatementClassObject.__init__(self)
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = {}):
+
+ if self.NameGuid != None and self.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(self.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (self.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (self.NameGuid))
+ self.NameGuid = RegistryGuidStr
+
+ OutputDir = os.path.join(GenFdsGlobalVariable.FfsDir, self.NameGuid)
+ if not os.path.exists(OutputDir):
+ os.makedirs(OutputDir)
+
+ Dict.update(self.DefineVarDict)
+ SectionAlignments = None
+ if self.FvName != None :
+ Buffer = StringIO.StringIO('')
+ if self.FvName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (self.FvName))
+ Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())
+ FileName = Fv.AddToBuffer(Buffer)
+ SectionFiles = [FileName]
+
+ elif self.FdName != None:
+ if self.FdName.upper() not in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FD (%s) is NOT described in FDF file!" % (self.FdName))
+ Fd = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())
+ FvBin = {}
+ FileName = Fd.GenFd(FvBin)
+ SectionFiles = [FileName]
+
+ elif self.FileName != None:
+ self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ SectionFiles = [GenFdsGlobalVariable.MacroExtend(self.FileName, Dict)]
+
+ else:
+ SectionFiles = []
+ Index = 0
+ SectionAlignments = []
+ for section in self.SectionList :
+ Index = Index + 1
+ SecIndex = '%d' %Index
+ sectList, align = section.GenSection(OutputDir, self.NameGuid, SecIndex, self.KeyStringList, None, Dict)
+ if sectList != []:
+ for sect in sectList:
+ SectionFiles.append(sect)
+ SectionAlignments.append(align)
+
+ #
+ # Prepare the parameter
+ #
+ FfsFileOutput = os.path.join(OutputDir, self.NameGuid + '.ffs')
+ GenFdsGlobalVariable.GenerateFfs(FfsFileOutput, SectionFiles,
+ Ffs.Ffs.FdfFvFileTypeToFileType.get(self.FvFileType),
+ self.NameGuid,
+ Fixed=self.Fixed,
+ CheckSum=self.CheckSum,
+ Align=self.Alignment,
+ SectionAlign=SectionAlignments
+ )
+
+ return FfsFileOutput
+
+
+
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py new file mode 100644 index 0000000000..0dcd96da2e --- /dev/null +++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py @@ -0,0 +1,582 @@ +## @file
+# process FFS generation from INF statement
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Rule
+import os
+import shutil
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import Ffs
+import subprocess
+import sys
+import Section
+import RuleSimpleFile
+import RuleComplexFile
+from CommonDataClass.FdfClass import FfsInfStatementClassObject
+from Common.String import *
+from Common.Misc import PathClass
+from Common.Misc import GuidStructureByteArrayToGuidString
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## generate FFS from INF
+#
+#
+class FfsInfStatement(FfsInfStatementClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsInfStatementClassObject.__init__(self)
+ self.TargetOverrideList = []
+ self.ShadowFromInfFile = None
+ self.KeepRelocFromRule = None
+ self.InDsc = True
+ self.OptRomDefs = {}
+
+ ## __InfParse() method
+ #
+ # Parse inf file to get module information
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ #
+ def __InfParse__(self, Dict = {}):
+
+ GenFdsGlobalVariable.VerboseLogger( " Begine parsing INf file : %s" %self.InfFileName)
+
+ self.InfFileName = self.InfFileName.replace('$(WORKSPACE)', '')
+ if self.InfFileName[0] == '\\' or self.InfFileName[0] == '/' :
+ self.InfFileName = self.InfFileName[1:]
+
+ if self.InfFileName.find('$') == -1:
+ InfPath = NormPath(self.InfFileName)
+ if not os.path.exists(InfPath):
+ InfPath = GenFdsGlobalVariable.ReplaceWorkspaceMacro(InfPath)
+ if not os.path.exists(InfPath):
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Non-existant Module %s !" % (self.InfFileName))
+
+ self.CurrentArch = self.GetCurrentArch()
+ #
+ # Get the InfClass object
+ #
+
+ PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ ErrorCode, ErrorInfo = PathClassObj.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+
+ if self.CurrentArch != None:
+
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, self.CurrentArch]
+ #
+ # Set Ffs BaseName, MdouleGuid, ModuleType, Version, OutputPath
+ #
+ self.BaseName = Inf.BaseName
+ self.ModuleGuid = Inf.Guid
+ self.ModuleType = Inf.ModuleType
+ if Inf.AutoGenVersion < 0x00010005:
+ self.ModuleType = Inf.ComponentType
+ self.VersionString = Inf.Version
+ self.BinFileList = Inf.Binaries
+ self.SourceFileList = Inf.Sources
+ if self.KeepReloc == None and Inf.Shadow:
+ self.ShadowFromInfFile = Inf.Shadow
+
+ else:
+ Inf = GenFdsGlobalVariable.WorkSpace.BuildObject[PathClassObj, 'COMMON']
+ self.BaseName = Inf.BaseName
+ self.ModuleGuid = Inf.Guid
+ self.ModuleType = Inf.ModuleType
+ self.VersionString = Inf.Version
+ self.BinFileList = Inf.Binaries
+ self.SourceFileList = Inf.Sources
+ if self.BinFileList == []:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "INF %s specified in FDF could not be found in build ARCH %s!" \
+ % (self.InfFileName, GenFdsGlobalVariable.ArchList))
+
+ if len(self.SourceFileList) != 0 and not self.InDsc:
+ EdkLogger.warn("GenFds", GENFDS_ERROR, "Module %s NOT found in DSC file; Is it really a binary module?" % (self.InfFileName))
+
+ if Inf._Defs != None and len(Inf._Defs) > 0:
+ self.OptRomDefs.update(Inf._Defs)
+
+ GenFdsGlobalVariable.VerboseLogger( "BaseName : %s" %self.BaseName)
+ GenFdsGlobalVariable.VerboseLogger("ModuleGuid : %s" %self.ModuleGuid)
+ GenFdsGlobalVariable.VerboseLogger("ModuleType : %s" %self.ModuleType)
+ GenFdsGlobalVariable.VerboseLogger("VersionString : %s" %self.VersionString)
+ GenFdsGlobalVariable.VerboseLogger("InfFileName :%s" %self.InfFileName)
+
+ #
+ # Set OutputPath = ${WorkSpace}\Build\Fv\Ffs\${ModuleGuid}+ ${MdouleName}\
+ #
+
+ self.OutputPath = os.path.join(GenFdsGlobalVariable.FfsDir, \
+ self.ModuleGuid + self.BaseName)
+ if not os.path.exists(self.OutputPath) :
+ os.makedirs(self.OutputPath)
+
+ self.EfiOutputPath = self.__GetEFIOutPutPath__()
+ GenFdsGlobalVariable.VerboseLogger( "ModuelEFIPath: " + self.EfiOutputPath)
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = {}):
+ #
+ # Parse Inf file get Module related information
+ #
+
+ self.__InfParse__(Dict)
+ #
+ # Get the rule of how to generate Ffs file
+ #
+ Rule = self.__GetRule__()
+ GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
+ #FileType = Ffs.Ffs.ModuleTypeToFileType[Rule.ModuleType]
+ #
+ # For the rule only has simpleFile
+ #
+ if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
+ SectionOutputList = self.__GenSimpleFileSection__(Rule)
+ FfsOutput = self.__GenSimpleFileFfs__(Rule, SectionOutputList)
+ return FfsOutput
+ #
+ # For Rule has ComplexFile
+ #
+ elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
+ InputSectList, InputSectAlignments = self.__GenComplexFileSection__(Rule)
+ FfsOutput = self.__GenComplexFileFfs__(Rule, InputSectList, InputSectAlignments)
+
+ return FfsOutput
+
+ ## __ExtendMacro__() method
+ #
+ # Replace macro with its value
+ #
+ # @param self The object pointer
+ # @param String The string to be replaced
+ # @retval string Macro replaced string
+ #
+ def __ExtendMacro__ (self, String):
+ MacroDict = {
+ '$(INF_OUTPUT)' : self.EfiOutputPath,
+ '$(MODULE_NAME)' : self.BaseName,
+ '$(BUILD_NUMBER)': self.BuildNum,
+ '$(INF_VERSION)' : self.VersionString,
+ '$(NAMED_GUID)' : self.ModuleGuid
+ }
+ String = GenFdsGlobalVariable.MacroExtend(String, MacroDict)
+ return String
+
+ ## __GetRule__() method
+ #
+ # Get correct rule for generating FFS for this INF
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def __GetRule__ (self) :
+ CurrentArchList = []
+ if self.CurrentArch == None:
+ CurrentArchList = ['common']
+ else:
+ CurrentArchList.append(self.CurrentArch)
+
+ for CurrentArch in CurrentArchList:
+ RuleName = 'RULE' + \
+ '.' + \
+ CurrentArch.upper() + \
+ '.' + \
+ self.ModuleType.upper()
+ if self.Rule != None:
+ RuleName = RuleName + \
+ '.' + \
+ self.Rule.upper()
+
+ Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
+ if Rule != None:
+ GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
+ return Rule
+
+ RuleName = 'RULE' + \
+ '.' + \
+ 'COMMON' + \
+ '.' + \
+ self.ModuleType.upper()
+
+ if self.Rule != None:
+ RuleName = RuleName + \
+ '.' + \
+ self.Rule.upper()
+
+ GenFdsGlobalVariable.VerboseLogger ('Trying to apply common rule %s for INF %s' % (RuleName, self.InfFileName))
+
+ Rule = GenFdsGlobalVariable.FdfParser.Profile.RuleDict.get(RuleName)
+ if Rule != None:
+ GenFdsGlobalVariable.VerboseLogger ("Want To Find Rule Name is : " + RuleName)
+ return Rule
+
+ if Rule == None :
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'Don\'t Find common rule %s for INF %s' \
+ % (RuleName, self.InfFileName))
+
+ ## __GetPlatformArchList__() method
+ #
+ # Get Arch list this INF built under
+ #
+ # @param self The object pointer
+ # @retval list Arch list
+ #
+ def __GetPlatformArchList__(self):
+
+ InfFileKey = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, self.InfFileName))
+ DscArchList = []
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'IA32']
+ if PlatformDataBase != None:
+ if InfFileKey in PlatformDataBase.Modules:
+ DscArchList.append ('IA32')
+
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'X64']
+ if PlatformDataBase != None:
+ if InfFileKey in PlatformDataBase.Modules:
+ DscArchList.append ('X64')
+
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'IPF']
+ if PlatformDataBase != None:
+ if InfFileKey in (PlatformDataBase.Modules):
+ DscArchList.append ('IPF')
+
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'ARM']
+ if PlatformDataBase != None:
+ if InfFileKey in (PlatformDataBase.Modules):
+ DscArchList.append ('ARM')
+
+ PlatformDataBase = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'EBC']
+ if PlatformDataBase != None:
+ if InfFileKey in (PlatformDataBase.Modules):
+ DscArchList.append ('EBC')
+
+ return DscArchList
+
+ ## GetCurrentArch() method
+ #
+ # Get Arch list of the module from this INF is to be placed into flash
+ #
+ # @param self The object pointer
+ # @retval list Arch list
+ #
+ def GetCurrentArch(self) :
+
+ TargetArchList = GenFdsGlobalVariable.ArchList
+
+ PlatformArchList = self.__GetPlatformArchList__()
+
+ CurArchList = TargetArchList
+ if PlatformArchList != []:
+ CurArchList = list(set (TargetArchList) & set (PlatformArchList))
+ GenFdsGlobalVariable.VerboseLogger ("Valid target architecture(s) is : " + " ".join(CurArchList))
+
+ ArchList = []
+ if self.KeyStringList != []:
+ for Key in self.KeyStringList:
+ Key = GenFdsGlobalVariable.MacroExtend(Key)
+ Target, Tag, Arch = Key.split('_')
+ if Arch in CurArchList:
+ ArchList.append(Arch)
+ if Target not in self.TargetOverrideList:
+ self.TargetOverrideList.append(Target)
+ else:
+ ArchList = CurArchList
+
+ UseArchList = TargetArchList
+ if self.UseArch != None:
+ UseArchList = []
+ UseArchList.append(self.UseArch)
+ ArchList = list(set (UseArchList) & set (ArchList))
+
+ self.InfFileName = NormPath(self.InfFileName)
+ if len(PlatformArchList) == 0:
+ self.InDsc = False
+ PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
+ ErrorCode, ErrorInfo = PathClassObj.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
+ if len(ArchList) == 1:
+ Arch = ArchList[0]
+ return Arch
+ elif len(ArchList) > 1:
+ if len(PlatformArchList) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "GenFds command line option has multiple ARCHs %s. Not able to determine which ARCH is valid for Module %s !" % (str(ArchList), self.InfFileName))
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module built under multiple ARCHs %s. Not able to determine which output to put into flash for Module %s !" % (str(ArchList), self.InfFileName))
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s appears under ARCH %s in platform %s, but current deduced ARCH is %s, so NO build output could be put into flash." \
+ % (self.InfFileName, str(PlatformArchList), GenFdsGlobalVariable.ActivePlatform, str(set (UseArchList) & set (TargetArchList))))
+
+ ## __GetEFIOutPutPath__() method
+ #
+ # Get the output path for generated files
+ #
+ # @param self The object pointer
+ # @retval string Path that output files from this INF go to
+ #
+ def __GetEFIOutPutPath__(self):
+ Arch = ''
+ OutputPath = ''
+ (ModulePath, FileName) = os.path.split(self.InfFileName)
+ Index = FileName.find('.')
+ FileName = FileName[0:Index]
+ Arch = "NoneArch"
+ if self.CurrentArch != None:
+ Arch = self.CurrentArch
+
+ OutputPath = os.path.join(GenFdsGlobalVariable.OutputDirDict[Arch],
+ Arch ,
+ ModulePath,
+ FileName,
+ 'OUTPUT'
+ )
+ OutputPath = os.path.realpath(OutputPath)
+ return OutputPath
+
+ ## __GenSimpleFileSection__() method
+ #
+ # Generate section by specified file name or a list of files with file extension
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenSimpleFileSection__(self, Rule):
+ #
+ # Prepare the parameter of GenSection
+ #
+ FileList = []
+ OutputFileList = []
+ if Rule.FileName != None:
+ GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
+ else:
+ FileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
+
+ Index = 1
+ SectionType = Rule.SectionType
+ NoStrip = True
+ if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
+ if self.KeepReloc != None:
+ NoStrip = self.KeepReloc
+ elif Rule.KeepReloc != None:
+ NoStrip = Rule.KeepReloc
+ elif self.ShadowFromInfFile != None:
+ NoStrip = self.ShadowFromInfFile
+
+ if FileList != [] :
+ for File in FileList:
+
+ SecNum = '%d' %Index
+ GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
+ Ffs.Ffs.SectionSuffix[SectionType] + 'SEC' + SecNum
+ Index = Index + 1
+ OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(File) > os.path.getmtime(FileBeforeStrip)):
+ shutil.copyfile(File, FileBeforeStrip)
+ StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenFdsGlobalVariable.MacroExtend(File, Dict, self.CurrentArch)],
+ Strip=True
+ )
+ File = StrippedFile
+
+ if SectionType == 'TE':
+ TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenFdsGlobalVariable.MacroExtend(File, Dict, self.CurrentArch)],
+ Type='te'
+ )
+ File = TeFile
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [File], Section.Section.SectionType[SectionType])
+ OutputFileList.append(OutputFile)
+ else:
+ SecNum = '%d' %Index
+ GenSecOutputFile= self.__ExtendMacro__(Rule.NameGuid) + \
+ Ffs.Ffs.SectionSuffix[SectionType] + 'SEC' + SecNum
+ OutputFile = os.path.join(self.OutputPath, GenSecOutputFile)
+
+ if not NoStrip:
+ FileBeforeStrip = os.path.join(self.OutputPath, ModuleName + '.reloc')
+ if not os.path.exists(FileBeforeStrip) or \
+ (os.path.getmtime(GenSecInputFile) > os.path.getmtime(FileBeforeStrip)):
+ shutil.copyfile(GenSecInputFile, FileBeforeStrip)
+ StrippedFile = os.path.join(self.OutputPath, ModuleName + '.stipped')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ StrippedFile,
+ [GenFdsGlobalVariable.MacroExtend(GenSecInputFile, Dict, self.CurrentArch)],
+ Strip=True
+ )
+ GenSecInputFile = StrippedFile
+
+ if SectionType == 'TE':
+ TeFile = os.path.join( self.OutputPath, self.ModuleGuid + 'Te.raw')
+ GenFdsGlobalVariable.GenerateFirmwareImage(
+ TeFile,
+ [GenFdsGlobalVariable.MacroExtend(File, Dict, self.CurrentArch)],
+ Type='te'
+ )
+ GenSecInputFile = TeFile
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [GenSecInputFile], Section.Section.SectionType[SectionType])
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList
+
+ ## __GenSimpleFileFfs__() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @param InputFileList The output file list from GenSection
+ # @retval string Generated FFS file name
+ #
+ def __GenSimpleFileFfs__(self, Rule, InputFileList):
+ FfsOutput = self.OutputPath + \
+ os.sep + \
+ self.__ExtendMacro__(Rule.NameGuid) + \
+ '.ffs'
+
+ GenFdsGlobalVariable.VerboseLogger(self.__ExtendMacro__(Rule.NameGuid))
+ InputSection = []
+ SectionAlignments = []
+ for InputFile in InputFileList:
+ InputSection.append(InputFile)
+ SectionAlignments.append(Rule.Alignment)
+
+ if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (Rule.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (Rule.NameGuid))
+ self.ModuleGuid = RegistryGuidStr
+
+ GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputSection,
+ Ffs.Ffs.FdfFvFileTypeToFileType[Rule.FvFileType],
+ self.ModuleGuid, Fixed=Rule.Fixed,
+ CheckSum=Rule.CheckSum, Align=Rule.Alignment,
+ SectionAlign=SectionAlignments
+ )
+ return FfsOutput
+
+ ## __GenComplexFileSection__() method
+ #
+ # Generate section by sections in Rule
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenComplexFileSection__(self, Rule):
+ if self.ModuleType in ('SEC', 'PEI_CORE', 'PEIM'):
+ if Rule.KeepReloc != None:
+ self.KeepRelocFromRule = Rule.KeepReloc
+ SectFiles = []
+ SectAlignments = []
+ Index = 1
+ for Sect in Rule.SectionList:
+ SecIndex = '%d' %Index
+ SectList = []
+ if Rule.KeyStringList != []:
+ SectList, Align = Sect.GenSection(self.OutputPath , self.ModuleGuid, SecIndex, Rule.KeyStringList, self)
+ else :
+ SectList, Align = Sect.GenSection(self.OutputPath , self.ModuleGuid, SecIndex, self.KeyStringList, self)
+ for SecName in SectList :
+ SectFiles.append(SecName)
+ SectAlignments.append(Align)
+ Index = Index + 1
+ return SectFiles, SectAlignments
+
+ ## __GenComplexFileFfs__() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @param InputFileList The output file list from GenSection
+ # @retval string Generated FFS file name
+ #
+ def __GenComplexFileFfs__(self, Rule, InputFile, Alignments):
+
+ if Rule.NameGuid != None and Rule.NameGuid.startswith('PCD('):
+ PcdValue = GenFdsGlobalVariable.GetPcdValue(Rule.NameGuid)
+ if len(PcdValue) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, '%s NOT defined.' \
+ % (Rule.NameGuid))
+ if PcdValue.startswith('{'):
+ PcdValue = GuidStructureByteArrayToGuidString(PcdValue)
+ RegistryGuidStr = PcdValue
+ if len(RegistryGuidStr) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'GUID value for %s in wrong format.' \
+ % (Rule.NameGuid))
+ self.ModuleGuid = RegistryGuidStr
+
+ FfsOutput = os.path.join( self.OutputPath, self.ModuleGuid + '.ffs')
+ GenFdsGlobalVariable.GenerateFfs(FfsOutput, InputFile,
+ Ffs.Ffs.FdfFvFileTypeToFileType[Rule.FvFileType],
+ self.ModuleGuid, Fixed=Rule.Fixed,
+ CheckSum=Rule.CheckSum, Align=Rule.Alignment,
+ SectionAlign=Alignments
+ )
+ return FfsOutput
+
+ ## __GetGenFfsCmdParameter__() method
+ #
+ # Create parameter string for GenFfs
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval tuple (FileType, Fixed, CheckSum, Alignment)
+ #
+ def __GetGenFfsCmdParameter__(self, Rule):
+ result = tuple()
+ result += ('-t', Ffs.Ffs.FdfFvFileTypeToFileType[Rule.FvFileType])
+ if Rule.Fixed != False:
+ result += ('-x',)
+ if Rule.CheckSum != False:
+ result += ('-s',)
+
+ if Rule.Alignment != None and Rule.Alignment != '':
+ result += ('-a', Rule.Alignment)
+
+ return result
diff --git a/BaseTools/Source/Python/GenFds/Fv.py b/BaseTools/Source/Python/GenFds/Fv.py new file mode 100644 index 0000000000..74248f71c3 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Fv.py @@ -0,0 +1,215 @@ +## @file
+# process FV generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import shutil
+import subprocess
+import StringIO
+
+import Ffs
+import AprioriSection
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from GenFds import GenFds
+from CommonDataClass.FdfClass import FvClassObject
+from Common.Misc import SaveFileOnChange
+
+T_CHAR_LF = '\n'
+
+## generate FV
+#
+#
+class FV (FvClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FvClassObject.__init__(self)
+ self.FvInfFile = None
+ self.FvAddressFile = None
+ self.BaseAddress = None
+ self.InfFileName = None
+ self.FvAddressFileName = None
+
+ ## AddToBuffer()
+ #
+ # Generate Fv and add it to the Buffer
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated FV data will be put
+ # @param BaseAddress base address of FV
+ # @param BlockSize block size of FV
+ # @param BlockNum How many blocks in FV
+ # @param ErasePolarity Flash erase polarity
+ # @param VtfDict VTF objects
+ # @param MacroDict macro value pair
+ # @retval string Generated FV file path
+ #
+ def AddToBuffer (self, Buffer, BaseAddress=None, BlockSize= None, BlockNum=None, ErasePloarity='1', VtfDict=None, MacroDict = {}) :
+
+ if self.UiFvName.upper() in GenFds.FvBinDict.keys():
+ return GenFds.FvBinDict[self.UiFvName.upper()]
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerating %s FV ..." %self.UiFvName)
+
+ self.__InitializeInf__(BaseAddress, BlockSize, BlockNum, ErasePloarity, VtfDict)
+ #
+ # First Process the Apriori section
+ #
+ MacroDict.update(self.DefineVarDict)
+
+ GenFdsGlobalVariable.VerboseLogger('First generate Apriori file !')
+ FfsFileList = []
+ for AprSection in self.AprioriSectionList:
+ FileName = AprSection.GenFfs (self.UiFvName, MacroDict)
+ FfsFileList.append(FileName)
+ # Add Apriori file name to Inf file
+ self.FvInfFile.writelines("EFI_FILE_NAME = " + \
+ FileName + \
+ T_CHAR_LF)
+
+ # Process Modules in FfsList
+ for FfsFile in self.FfsList :
+ FileName = FfsFile.GenFfs(MacroDict)
+ FfsFileList.append(FileName)
+ self.FvInfFile.writelines("EFI_FILE_NAME = " + \
+ FileName + \
+ T_CHAR_LF)
+
+ SaveFileOnChange(self.InfFileName, self.FvInfFile.getvalue(), False)
+ self.FvInfFile.close()
+ #
+ # Call GenFv tool
+ #
+ FvOutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiFvName)
+ FvOutputFile = FvOutputFile + '.Fv'
+ # BUGBUG: FvOutputFile could be specified from FDF file (FV section, CreateFile statement)
+ if self.CreateFileName != None:
+ FvOutputFile = self.CreateFileName
+
+ FvInfoFileName = os.path.join(GenFdsGlobalVariable.FfsDir, self.UiFvName + '.inf')
+ shutil.copy(GenFdsGlobalVariable.FvAddressFileName, FvInfoFileName)
+ GenFdsGlobalVariable.GenerateFirmwareVolume(
+ FvOutputFile,
+ [self.InfFileName],
+ AddressFile=FvInfoFileName,
+ FfsList=FfsFileList
+ )
+
+ #
+ # Write the Fv contents to Buffer
+ #
+ FvFileObj = open ( FvOutputFile,'r+b')
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerate %s FV Successfully" %self.UiFvName)
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ Buffer.write(FvFileObj.read())
+ FvFileObj.close()
+ GenFds.FvBinDict[self.UiFvName.upper()] = FvOutputFile
+ return FvOutputFile
+
+ ## __InitializeInf__()
+ #
+ # Initilize the inf file to create FV
+ #
+ # @param self The object pointer
+ # @param BaseAddress base address of FV
+ # @param BlockSize block size of FV
+ # @param BlockNum How many blocks in FV
+ # @param ErasePolarity Flash erase polarity
+ # @param VtfDict VTF objects
+ #
+ def __InitializeInf__ (self, BaseAddress = None, BlockSize= None, BlockNum = None, ErasePloarity='1', VtfDict=None) :
+ #
+ # Create FV inf file
+ #
+ self.InfFileName = os.path.join(GenFdsGlobalVariable.FvDir,
+ self.UiFvName + '.inf')
+ self.FvInfFile = StringIO.StringIO()
+
+ #
+ # Add [Options]
+ #
+ self.FvInfFile.writelines("[options]" + T_CHAR_LF)
+ if BaseAddress != None :
+ self.FvInfFile.writelines("EFI_BASE_ADDRESS = " + \
+ BaseAddress + \
+ T_CHAR_LF)
+
+ if BlockSize != None:
+ self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
+ '0x%X' %BlockSize + \
+ T_CHAR_LF)
+ if BlockNum != None:
+ self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
+ ' 0x%X' %BlockNum + \
+ T_CHAR_LF)
+ else:
+ for BlockSize in self.BlockSizeList :
+ if BlockSize[0] != None:
+ self.FvInfFile.writelines("EFI_BLOCK_SIZE = " + \
+ '0x%X' %BlockSize[0] + \
+ T_CHAR_LF)
+
+ if BlockSize[1] != None:
+ self.FvInfFile.writelines("EFI_NUM_BLOCKS = " + \
+ ' 0x%X' %BlockSize[1] + \
+ T_CHAR_LF)
+
+ if self.BsBaseAddress != None:
+ self.FvInfFile.writelines('EFI_BOOT_DRIVER_BASE_ADDRESS = ' + \
+ '0x%X' %self.BsBaseAddress)
+ if self.RtBaseAddress != None:
+ self.FvInfFile.writelines('EFI_RUNTIME_DRIVER_BASE_ADDRESS = ' + \
+ '0x%X' %self.RtBaseAddress)
+ #
+ # Add attribute
+ #
+ self.FvInfFile.writelines("[attributes]" + T_CHAR_LF)
+
+ self.FvInfFile.writelines("EFI_ERASE_POLARITY = " + \
+ ' %s' %ErasePloarity + \
+ T_CHAR_LF)
+ if not (self.FvAttributeDict == None):
+ for FvAttribute in self.FvAttributeDict.keys() :
+ self.FvInfFile.writelines("EFI_" + \
+ FvAttribute + \
+ ' = ' + \
+ self.FvAttributeDict[FvAttribute] + \
+ T_CHAR_LF )
+ if self.FvAlignment != None:
+ self.FvInfFile.writelines("EFI_FVB2_ALIGNMENT_" + \
+ self.FvAlignment.strip() + \
+ " = TRUE" + \
+ T_CHAR_LF)
+
+ if self.FvNameGuid != None:
+ self.FvInfFile.writelines("EFI_FVNAME_GUID" + \
+ " = %s" % self.FvNameGuid + \
+ T_CHAR_LF)
+ #
+ # Add [Files]
+ #
+
+ self.FvInfFile.writelines("[files]" + T_CHAR_LF)
+ if VtfDict != None and self.UiFvName in VtfDict.keys():
+ self.FvInfFile.writelines("EFI_FILE_NAME = " + \
+ VtfDict.get(self.UiFvName) + \
+ T_CHAR_LF)
+
+
diff --git a/BaseTools/Source/Python/GenFds/FvImageSection.py b/BaseTools/Source/Python/GenFds/FvImageSection.py new file mode 100644 index 0000000000..3a3e714228 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/FvImageSection.py @@ -0,0 +1,90 @@ +## @file
+# process FV image section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+import StringIO
+from Ffs import Ffs
+import subprocess
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import os
+from CommonDataClass.FdfClass import FvImageSectionClassObject
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## generate FV image section
+#
+#
+class FvImageSection(FvImageSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FvImageSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate FV image section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):
+
+ OutputFileList = []
+ if self.FvFileType != None:
+ FileList, IsSect = Section.Section.GetFileList(FfsInf, self.FvFileType, self.FvFileExtension)
+ if IsSect :
+ return FileList, self.Alignment
+
+ Num = SecNum
+
+ for FileName in FileList:
+ OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + Num + Ffs.SectionSuffix.get("FV_IMAGE"))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE')
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+ #
+ # Generate Fv
+ #
+ if self.FvName != None:
+ Buffer = StringIO.StringIO('')
+ Fv = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName)
+ if Fv != None:
+ self.Fv = Fv
+ FvFileName = self.Fv.AddToBuffer(Buffer, MacroDict = Dict)
+ else:
+ if self.FvFileName != None:
+ FvFileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvFileName)
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FvImageSection Failed! %s NOT found in FDF" % self.FvName)
+
+ #
+ # Prepare the parameter of GenSection
+ #
+ OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get("FV_IMAGE"))
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [FvFileName], 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE')
+ OutputFileList.append(OutputFile)
+
+ return OutputFileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py new file mode 100644 index 0000000000..2bc416f828 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/GenFds.py @@ -0,0 +1,486 @@ +## @file +# generate flash image +# +# Copyright (c) 2007, Intel Corporation +# +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +from optparse import OptionParser +import sys +import os +import linecache +import FdfParser +from Common.BuildToolError import * +from GenFdsGlobalVariable import GenFdsGlobalVariable +from Workspace.WorkspaceDatabase import WorkspaceDatabase +from Workspace.BuildClassObject import PcdClassObject +from Workspace.BuildClassObject import ModuleBuildClassObject +import RuleComplexFile +from EfiSection import EfiSection +import StringIO +import Common.TargetTxtClassObject as TargetTxtClassObject +import Common.ToolDefClassObject as ToolDefClassObject +import Common.DataType +import Common.GlobalData as GlobalData +from Common import EdkLogger +from Common.String import * +from Common.Misc import DirCache,PathClass + +## Version and Copyright +versionNumber = "1.0" +__version__ = "%prog Version " + versionNumber +__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved." + +## Tool entrance method +# +# This method mainly dispatch specific methods per the command line options. +# If no error found, return zero value so the caller of this tool can know +# if it's executed successfully or not. +# +# @retval 0 Tool was successful +# @retval 1 Tool failed +# +def main(): + global Options + Options = myOptionParser() + + global Workspace + Workspace = "" + ArchList = None + ReturnCode = 0 + + EdkLogger.Initialize() + try: + if Options.verbose != None: + EdkLogger.SetLevel(EdkLogger.VERBOSE) + GenFdsGlobalVariable.VerboseMode = True + + if Options.FixedAddress != None: + GenFdsGlobalVariable.FixedLoadAddress = True + + if Options.quiet != None: + EdkLogger.SetLevel(EdkLogger.QUIET) + if Options.debug != None: + EdkLogger.SetLevel(Options.debug + 1) + GenFdsGlobalVariable.DebugLevel = Options.debug + else: + EdkLogger.SetLevel(EdkLogger.INFO) + + if (Options.Workspace == None): + EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "WORKSPACE not defined", + ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.") + elif not os.path.exists(Options.Workspace): + EdkLogger.error("GenFds", BuildToolError.PARAMETER_INVALID, "WORKSPACE is invalid", + ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.") + else: + Workspace = os.path.normcase(Options.Workspace) + GenFdsGlobalVariable.WorkSpaceDir = Workspace + if 'EDK_SOURCE' in os.environ.keys(): + GenFdsGlobalVariable.EdkSourceDir = os.path.normcase(os.environ['EDK_SOURCE']) + if (Options.debug): + GenFdsGlobalVariable.VerboseLogger( "Using Workspace:" + Workspace) + os.chdir(GenFdsGlobalVariable.WorkSpaceDir) + + if (Options.filename): + FdfFilename = Options.filename + FdfFilename = GenFdsGlobalVariable.ReplaceWorkspaceMacro(FdfFilename) + else: + EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing FDF filename") + + if (Options.BuildTarget): + GenFdsGlobalVariable.TargetName = Options.BuildTarget + else: + EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing build target") + + if (Options.ToolChain): + GenFdsGlobalVariable.ToolChainTag = Options.ToolChain + else: + EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing tool chain tag") + + if FdfFilename[0:2] == '..': + FdfFilename = os.path.realpath(FdfFilename) + if FdfFilename[1] != ':': + FdfFilename = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, FdfFilename) + + if not os.path.exists(FdfFilename): + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=FdfFilename) + GenFdsGlobalVariable.FdfFile = FdfFilename + GenFdsGlobalVariable.FdfFileTimeStamp = os.path.getmtime(FdfFilename) + + if (Options.activePlatform): + ActivePlatform = Options.activePlatform + ActivePlatform = GenFdsGlobalVariable.ReplaceWorkspaceMacro(ActivePlatform) + + if ActivePlatform[0:2] == '..': + ActivePlatform = os.path.realpath(ActivePlatform) + + if ActivePlatform[1] != ':': + ActivePlatform = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ActivePlatform) + + if not os.path.exists(ActivePlatform) : + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist!") + + if ActivePlatform.find(Workspace) == -1: + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist in Workspace!") + + ActivePlatform = ActivePlatform.replace(Workspace, '') + if len(ActivePlatform) > 0 : + if ActivePlatform[0] == '\\' or ActivePlatform[0] == '/': + ActivePlatform = ActivePlatform[1:] + else: + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, "ActivePlatform doesn't exist!") + else : + EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing active platform") + + GenFdsGlobalVariable.ActivePlatform = PathClass(NormPath(ActivePlatform), Workspace) + + BuildConfigurationFile = os.path.normpath(os.path.join(GenFdsGlobalVariable.WorkSpaceDir, "Conf/target.txt")) + if os.path.isfile(BuildConfigurationFile) == True: + TargetTxtClassObject.TargetTxtClassObject(BuildConfigurationFile) + else: + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=BuildConfigurationFile) + + if Options.Macros: + for Pair in Options.Macros: + Pair.strip('"') + List = Pair.split('=') + if len(List) == 2: + FdfParser.InputMacroDict[List[0].strip()] = List[1].strip() + if List[0].strip() == "EFI_SOURCE": + GlobalData.gEfiSource = List[1].strip() + elif List[0].strip() == "EDK_SOURCE": + GlobalData.gEdkSource = List[1].strip() + else: + GlobalData.gEdkGlobal[List[0].strip()] = List[1].strip() + else: + FdfParser.InputMacroDict[List[0].strip()] = None + + """call Workspace build create database""" + os.environ["WORKSPACE"] = Workspace + BuildWorkSpace = WorkspaceDatabase(':memory:', GlobalData.gGlobalDefines) + BuildWorkSpace.InitDatabase() + + # + # Get files real name in workspace dir + # + GlobalData.gAllFiles = DirCache(Workspace) + GlobalData.gWorkspace = Workspace + + if (Options.archList) : + ArchList = Options.archList.split(',') + else: +# EdkLogger.error("GenFds", BuildToolError.OPTION_MISSING, "Missing build ARCH") + ArchList = BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'COMMON'].SupArchList + + TargetArchList = set(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'COMMON'].SupArchList) & set(ArchList) + if len(TargetArchList) == 0: + EdkLogger.error("GenFds", GENFDS_ERROR, "Target ARCH %s not in platform supported ARCH %s" % (str(ArchList), str(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, 'COMMON'].SupArchList))) + + for Arch in ArchList: + GenFdsGlobalVariable.OutputDirFromDscDict[Arch] = NormPath(BuildWorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch].OutputDirectory) + + if (Options.outputDir): + OutputDirFromCommandLine = GenFdsGlobalVariable.ReplaceWorkspaceMacro(Options.outputDir) + for Arch in ArchList: + GenFdsGlobalVariable.OutputDirDict[Arch] = OutputDirFromCommandLine + else: + for Arch in ArchList: + GenFdsGlobalVariable.OutputDirDict[Arch] = os.path.join(GenFdsGlobalVariable.OutputDirFromDscDict[Arch], GenFdsGlobalVariable.TargetName + '_' + GenFdsGlobalVariable.ToolChainTag) + + for Key in GenFdsGlobalVariable.OutputDirDict: + OutputDir = GenFdsGlobalVariable.OutputDirDict[Key] + if OutputDir[0:2] == '..': + OutputDir = os.path.realpath(OutputDir) + + if OutputDir[1] != ':': + OutputDir = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, OutputDir) + + if not os.path.exists(OutputDir): + EdkLogger.error("GenFds", BuildToolError.FILE_NOT_FOUND, ExtraData=OutputDir) + GenFdsGlobalVariable.OutputDirDict[Key] = OutputDir + + """ Parse Fdf file, has to place after build Workspace as FDF may contain macros from DSC file """ + FdfParserObj = FdfParser.FdfParser(FdfFilename) + FdfParserObj.ParseFile() + + if FdfParserObj.CycleReferenceCheck(): + EdkLogger.error("GenFds", BuildToolError.FORMAT_NOT_SUPPORTED, "Cycle Reference Detected in FDF file") + + if (Options.uiFdName) : + if Options.uiFdName.upper() in FdfParserObj.Profile.FdDict.keys(): + GenFds.OnlyGenerateThisFd = Options.uiFdName + else: + EdkLogger.error("GenFds", BuildToolError.OPTION_VALUE_INVALID, + "No such an FD in FDF file: %s" % Options.uiFdName) + + if (Options.uiFvName) : + if Options.uiFvName.upper() in FdfParserObj.Profile.FvDict.keys(): + GenFds.OnlyGenerateThisFv = Options.uiFvName + else: + EdkLogger.error("GenFds", BuildToolError.OPTION_VALUE_INVALID, + "No such an FV in FDF file: %s" % Options.uiFvName) + + """Modify images from build output if the feature of loading driver at fixed address is on.""" + if GenFdsGlobalVariable.FixedLoadAddress: + GenFds.PreprocessImage(BuildWorkSpace, GenFdsGlobalVariable.ActivePlatform) + """Call GenFds""" + GenFds.GenFd('', FdfParserObj, BuildWorkSpace, ArchList) + + """Display FV space info.""" + GenFds.DisplayFvSpaceInfo(FdfParserObj) + + except FdfParser.Warning, X: + EdkLogger.error(X.ToolName, BuildToolError.FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) + ReturnCode = BuildToolError.FORMAT_INVALID + except FatalError, X: + if Options.debug != None: + import traceback + EdkLogger.quiet(traceback.format_exc()) + ReturnCode = X.args[0] + except: + import traceback + EdkLogger.error( + "\nPython", + CODE_ERROR, + "Tools code failure", + ExtraData="Please submit bug report in www.TianoCore.org, attaching following call stack trace!\n", + RaiseError=False + ) + EdkLogger.quiet(traceback.format_exc()) + ReturnCode = CODE_ERROR + return ReturnCode + +gParamCheck = [] +def SingleCheckCallback(option, opt_str, value, parser): + if option not in gParamCheck: + setattr(parser.values, option.dest, value) + gParamCheck.append(option) + else: + parser.error("Option %s only allows one instance in command line!" % option) + +## Parse command line options +# +# Using standard Python module optparse to parse command line option of this tool. +# +# @retval Opt A optparse.Values object containing the parsed options +# @retval Args Target of build command +# +def myOptionParser(): + usage = "%prog [options] -f input_file -a arch_list -b build_target -p active_platform -t tool_chain_tag -D \"MacroName [= MacroValue]\"" + Parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(versionNumber)) + Parser.add_option("-f", "--file", dest="filename", type="string", help="Name of FDF file to convert", action="callback", callback=SingleCheckCallback) + Parser.add_option("-a", "--arch", dest="archList", help="comma separated list containing one or more of: IA32, X64, IPF, ARM or EBC which should be built, overrides target.txt?s TARGET_ARCH") + Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") + Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.") + Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") + Parser.add_option("-p", "--platform", type="string", dest="activePlatform", help="Set the ACTIVE_PLATFORM, overrides target.txt ACTIVE_PLATFORM setting.", + action="callback", callback=SingleCheckCallback) + Parser.add_option("-w", "--workspace", type="string", dest="Workspace", default=os.environ.get('WORKSPACE'), help="Set the WORKSPACE", + action="callback", callback=SingleCheckCallback) + Parser.add_option("-o", "--outputDir", type="string", dest="outputDir", help="Name of Build Output directory", + action="callback", callback=SingleCheckCallback) + Parser.add_option("-r", "--rom_image", dest="uiFdName", help="Build the image using the [FD] section named by FdUiName.") + Parser.add_option("-i", "--FvImage", dest="uiFvName", help="Buld the FV image using the [FV] section named by UiFvName") + Parser.add_option("-b", "--buildtarget", type="choice", choices=['DEBUG','RELEASE'], dest="BuildTarget", help="Build TARGET is one of list: DEBUG, RELEASE.", + action="callback", callback=SingleCheckCallback) + Parser.add_option("-t", "--tagname", type="string", dest="ToolChain", help="Using the tools: TOOL_CHAIN_TAG name to build the platform.", + action="callback", callback=SingleCheckCallback) + Parser.add_option("-D", "--define", action="append", type="string", dest="Macros", help="Macro: \"Name [= Value]\".") + Parser.add_option("-s", "--specifyaddress", dest="FixedAddress", action="store_true", type=None, help="Specify driver load address.") + (Options, args) = Parser.parse_args() + return Options + +## The class implementing the EDK2 flash image generation process +# +# This process includes: +# 1. Collect workspace information, includes platform and module information +# 2. Call methods of Fd class to generate FD +# 3. Call methods of Fv class to generate FV that not belong to FD +# +class GenFds : + FdfParsef = None + # FvName in FDF, FvBinFile name + FvBinDict = {} + OnlyGenerateThisFd = None + OnlyGenerateThisFv = None + + ## GenFd() + # + # @param OutputDir Output directory + # @param FdfParser FDF contents parser + # @param Workspace The directory of workspace + # @param ArchList The Arch list of platform + # + def GenFd (OutputDir, FdfParser, WorkSpace, ArchList): + GenFdsGlobalVariable.SetDir ('', FdfParser, WorkSpace, ArchList) + + GenFdsGlobalVariable.VerboseLogger(" Gen Fd !") + if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): + FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(GenFds.OnlyGenerateThisFd.upper()) + if FdObj != None: + FdObj.GenFd(GenFds.FvBinDict) + elif GenFds.OnlyGenerateThisFv == None: + for FdName in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): + FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[FdName] + FdObj.GenFd(GenFds.FvBinDict) + + GenFdsGlobalVariable.VerboseLogger(" Gen FV ! ") + if GenFds.OnlyGenerateThisFv != None and GenFds.OnlyGenerateThisFv.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): + FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(GenFds.OnlyGenerateThisFv.upper()) + if FvObj != None: + Buffer = StringIO.StringIO() + # Get FV base Address + FvObj.AddToBuffer(Buffer, None, GenFds.GetFvBlockSize(FvObj)) + Buffer.close() + return + elif GenFds.OnlyGenerateThisFd == None: + for FvName in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): + Buffer = StringIO.StringIO('') + FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict[FvName] + # Get FV base Address + FvObj.AddToBuffer(Buffer, None, GenFds.GetFvBlockSize(FvObj)) + Buffer.close() + + if GenFds.OnlyGenerateThisFv == None and GenFds.OnlyGenerateThisFd == None: + GenFdsGlobalVariable.VerboseLogger(" Gen Capsule !") + for CapsuleObj in GenFdsGlobalVariable.FdfParser.Profile.CapsuleList: + CapsuleObj.GenCapsule() + + if GenFdsGlobalVariable.FdfParser.Profile.OptRomDict != {}: + GenFdsGlobalVariable.VerboseLogger(" Gen Option ROM !") + for DriverName in GenFdsGlobalVariable.FdfParser.Profile.OptRomDict.keys(): + OptRomObj = GenFdsGlobalVariable.FdfParser.Profile.OptRomDict[DriverName] + OptRomObj.AddToBuffer(None) + + ## GetFvBlockSize() + # + # @param FvObj Whose block size to get + # @retval int Block size value + # + def GetFvBlockSize(FvObj): + DefaultBlockSize = 0x10000 + FdObj = None + if GenFds.OnlyGenerateThisFd != None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): + FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()] + if FdObj == None: + for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values(): + for ElementRegion in ElementFd.RegionList: + if ElementRegion.RegionType == 'FV': + for ElementRegionData in ElementRegion.RegionDataList: + if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName: + if FvObj.BlockSizeList != []: + return FvObj.BlockSizeList[0][0] + else: + return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList) + if FvObj.BlockSizeList != []: + return FvObj.BlockSizeList[0][0] + return DefaultBlockSize + else: + for ElementRegion in FdObj.RegionList: + if ElementRegion.RegionType == 'FV': + for ElementRegionData in ElementRegion.RegionDataList: + if ElementRegionData != None and ElementRegionData.upper() == FvObj.UiFvName: + if FvObj.BlockSizeList != []: + return FvObj.BlockSizeList[0][0] + else: + return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList) + return DefaultBlockSize + + ## DisplayFvSpaceInfo() + # + # @param FvObj Whose block size to get + # @retval None + # + def DisplayFvSpaceInfo(FdfParser): + + FvSpaceInfoList = [] + MaxFvNameLength = 0 + for FvName in FdfParser.Profile.FvDict: + if len(FvName) > MaxFvNameLength: + MaxFvNameLength = len(FvName) + FvSpaceInfoFileName = os.path.join(GenFdsGlobalVariable.FvDir, FvName.upper() + '.Fv.map') + if os.path.exists(FvSpaceInfoFileName): + FileLinesList = linecache.getlines(FvSpaceInfoFileName) + TotalFound = False + Total = '' + UsedFound = False + Used = '' + FreeFound = False + Free = '' + for Line in FileLinesList: + NameValue = Line.split('=') + if len(NameValue) == 2: + if NameValue[0].strip() == 'EFI_FV_TOTAL_SIZE': + TotalFound = True + Total = NameValue[1].strip() + if NameValue[0].strip() == 'EFI_FV_TAKEN_SIZE': + UsedFound = True + Used = NameValue[1].strip() + if NameValue[0].strip() == 'EFI_FV_SPACE_SIZE': + FreeFound = True + Free = NameValue[1].strip() + + if TotalFound and UsedFound and FreeFound: + FvSpaceInfoList.append((FvName, Total, Used, Free)) + + GenFdsGlobalVariable.InfLogger('\nFV Space Information') + for FvSpaceInfo in FvSpaceInfoList: + Name = FvSpaceInfo[0] + TotalSizeValue = long(FvSpaceInfo[1], 0) + UsedSizeValue = long(FvSpaceInfo[2], 0) + FreeSizeValue = long(FvSpaceInfo[3], 0) + GenFdsGlobalVariable.InfLogger(Name + ' ' + '[' + str((UsedSizeValue+0.0)/TotalSizeValue)[0:4].lstrip('0.') + '%Full] ' + str(TotalSizeValue) + ' total, ' + str(UsedSizeValue) + ' used, ' + str(FreeSizeValue) + ' free') + + ## PreprocessImage() + # + # @param BuildDb Database from build meta data files + # @param DscFile modules from dsc file will be preprocessed + # @retval None + # + def PreprocessImage(BuildDb, DscFile): + PcdDict = BuildDb.BuildObject[DscFile, 'COMMON'].Pcds + PcdValue = '' + for Key in PcdDict: + PcdObj = PcdDict[Key] + if PcdObj.TokenCName == 'PcdBsBaseAddress': + PcdValue = PcdObj.DefaultValue + break + + if PcdValue == '': + return + + Int64PcdValue = long(PcdValue, 0) + if Int64PcdValue == 0 or Int64PcdValue < -1: + return + + TopAddress = 0 + if Int64PcdValue > 0: + TopAddress = Int64PcdValue + + ModuleDict = BuildDb.BuildObject[DscFile, 'COMMON'].Modules + for Key in ModuleDict: + ModuleObj = BuildDb.BuildObject[Key, 'COMMON'] + print ModuleObj.BaseName + ' ' + ModuleObj.ModuleType + + ##Define GenFd as static function + GenFd = staticmethod(GenFd) + GetFvBlockSize = staticmethod(GetFvBlockSize) + DisplayFvSpaceInfo = staticmethod(DisplayFvSpaceInfo) + PreprocessImage = staticmethod(PreprocessImage) + +if __name__ == '__main__': + r = main() + ## 0-127 is a safe return range, and 1 is a standard default error + if r < 0 or r > 127: r = 1 + sys.exit(r) + diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py new file mode 100644 index 0000000000..d556ce7ade --- /dev/null +++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py @@ -0,0 +1,472 @@ +## @file +# Global variables for GenFds +# +# Copyright (c) 2007, Intel Corporation +# +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import sys +import subprocess +import struct +import array + +from Common.BuildToolError import * +from Common import EdkLogger +from Common.Misc import SaveFileOnChange + +## Global variables +# +# +class GenFdsGlobalVariable: + FvDir = '' + OutputDirDict = {} + BinDir = '' + # will be FvDir + os.sep + 'Ffs' + FfsDir = '' + FdfParser = None + LibDir = '' + WorkSpace = None + WorkSpaceDir = '' + EdkSourceDir = '' + OutputDirFromDscDict = {} + TargetName = '' + ToolChainTag = '' + RuleDict = {} + ArchList = None + VtfDict = {} + ActivePlatform = None + FvAddressFileName = '' + VerboseMode = False + DebugLevel = -1 + SharpCounter = 0 + SharpNumberPerLine = 40 + FdfFile = '' + FdfFileTimeStamp = 0 + FixedLoadAddress = False + + SectionHeader = struct.Struct("3B 1B") + + ## SetDir() + # + # @param OutputDir Output directory + # @param FdfParser FDF contents parser + # @param Workspace The directory of workspace + # @param ArchList The Arch list of platform + # + def SetDir (OutputDir, FdfParser, WorkSpace, ArchList): + GenFdsGlobalVariable.VerboseLogger( "GenFdsGlobalVariable.OutputDir :%s" %OutputDir) +# GenFdsGlobalVariable.OutputDirDict = OutputDir + GenFdsGlobalVariable.FdfParser = FdfParser + GenFdsGlobalVariable.WorkSpace = WorkSpace + GenFdsGlobalVariable.FvDir = os.path.join(GenFdsGlobalVariable.OutputDirDict[ArchList[0]], 'FV') + if not os.path.exists(GenFdsGlobalVariable.FvDir) : + os.makedirs(GenFdsGlobalVariable.FvDir) + GenFdsGlobalVariable.FfsDir = os.path.join(GenFdsGlobalVariable.FvDir, 'Ffs') + if not os.path.exists(GenFdsGlobalVariable.FfsDir) : + os.makedirs(GenFdsGlobalVariable.FfsDir) + if ArchList != None: + GenFdsGlobalVariable.ArchList = ArchList + + T_CHAR_LF = '\n' + # + # Create FV Address inf file + # + GenFdsGlobalVariable.FvAddressFileName = os.path.join(GenFdsGlobalVariable.FfsDir, 'FvAddress.inf') + FvAddressFile = open (GenFdsGlobalVariable.FvAddressFileName, 'w') + # + # Add [Options] + # + FvAddressFile.writelines("[options]" + T_CHAR_LF) + BsAddress = '0' + for Arch in ArchList: + if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch].BsBaseAddress: + BsAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch].BsBaseAddress + break + + FvAddressFile.writelines("EFI_BOOT_DRIVER_BASE_ADDRESS = " + \ + BsAddress + \ + T_CHAR_LF) + + RtAddress = '0' + for Arch in ArchList: + if GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch].RtBaseAddress: + RtAddress = GenFdsGlobalVariable.WorkSpace.BuildObject[GenFdsGlobalVariable.ActivePlatform, Arch].RtBaseAddress + + FvAddressFile.writelines("EFI_RUNTIME_DRIVER_BASE_ADDRESS = " + \ + RtAddress + \ + T_CHAR_LF) + + FvAddressFile.close() + + ## ReplaceWorkspaceMacro() + # + # @param String String that may contain macro + # + def ReplaceWorkspaceMacro(String): + Str = String.replace('$(WORKSPACE)', GenFdsGlobalVariable.WorkSpaceDir) + if os.path.exists(Str): + if not os.path.isabs(Str): + Str = os.path.abspath(Str) + else: + Str = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, String) + return os.path.normpath(Str) + + ## Check if the input files are newer than output files + # + # @param Output Path of output file + # @param Input Path list of input files + # + # @retval True if Output doesn't exist, or any Input is newer + # @retval False if all Input is older than Output + # + @staticmethod + def NeedsUpdate(Output, Input): + if not os.path.exists(Output): + return True + # always update "Output" if no "Input" given + if Input == None or len(Input) == 0: + return True + + # if fdf file is changed after the 'Output" is generated, update the 'Output' + OutputTime = os.path.getmtime(Output) + if GenFdsGlobalVariable.FdfFileTimeStamp > OutputTime: + return True + + for F in Input: + # always update "Output" if any "Input" doesn't exist + if not os.path.exists(F): + return True + # always update "Output" if any "Input" is newer than "Output" + if os.path.getmtime(F) > OutputTime: + return True + return False + + @staticmethod + def GenerateSection(Output, Input, Type=None, CompressionType=None, Guid=None, + GuidHdrLen=None, GuidAttr=None, Ui=None, Ver=None): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["GenSec"] + if Type not in [None, '']: + Cmd += ["-s", Type] + if CompressionType not in [None, '']: + Cmd += ["-c", CompressionType] + if Guid != None: + Cmd += ["-g", Guid] + if GuidHdrLen not in [None, '']: + Cmd += ["-l", GuidHdrLen] + if GuidAttr not in [None, '']: + Cmd += ["-r", GuidAttr] + + if Ui not in [None, '']: + #Cmd += ["-n", '"' + Ui + '"'] + SectionData = array.array('B', [0,0,0,0]) + SectionData.fromstring(Ui.encode("utf_16_le")) + SectionData.append(0) + SectionData.append(0) + Len = len(SectionData) + GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x15) + SaveFileOnChange(Output, SectionData.tostring()) + elif Ver not in [None, '']: + #Cmd += ["-j", Ver] + SectionData = array.array('B', [0,0,0,0]) + SectionData.fromstring(Ver.encode("utf_16_le")) + SectionData.append(0) + SectionData.append(0) + Len = len(SectionData) + GenFdsGlobalVariable.SectionHeader.pack_into(SectionData, 0, Len & 0xff, (Len >> 8) & 0xff, (Len >> 16) & 0xff, 0x14) + SaveFileOnChange(Output, SectionData.tostring()) + else: + Cmd += ["-o", Output] + Cmd += Input + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate section") + + @staticmethod + def GenerateFfs(Output, Input, Type, Guid, Fixed=False, CheckSum=False, Align=None, + SectionAlign=None): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["GenFfs", "-t", Type, "-g", Guid] + if Fixed == True: + Cmd += ["-x"] + if CheckSum: + Cmd += ["-s"] + if Align not in [None, '']: + Cmd += ["-a", Align] + + Cmd += ["-o", Output] + for I in range(0, len(Input)): + Cmd += ("-i", Input[I]) + if SectionAlign not in [None, '', []] and SectionAlign[I] not in [None, '']: + Cmd += ("-n", SectionAlign[I]) + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FFS") + + @staticmethod + def GenerateFirmwareVolume(Output, Input, BaseAddress=None, Capsule=False, Dump=False, + AddressFile=None, MapFile=None, FfsList=[]): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input+FfsList): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["GenFv"] + if BaseAddress not in [None, '']: + Cmd += ["-r", BaseAddress] + if Capsule: + Cmd += ["-c"] + if Dump: + Cmd += ["-p"] + if AddressFile not in [None, '']: + Cmd += ["-a", AddressFile] + if MapFile not in [None, '']: + Cmd += ["-m", MapFile] + Cmd += ["-o", Output] + for I in Input: + Cmd += ["-i", I] + + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate FV") + + @staticmethod + def GenerateVtf(Output, Input, BaseAddress=None, FvSize=None): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["GenVtf"] + if BaseAddress not in [None, ''] and FvSize not in [None, ''] \ + and len(BaseAddress) == len(FvSize): + for I in range(0, len(BaseAddress)): + Cmd += ["-r", BaseAddress[I], "-s", FvSize[I]] + Cmd += ["-o", Output] + for F in Input: + Cmd += ["-f", F] + + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate VTF") + + @staticmethod + def GenerateFirmwareImage(Output, Input, Type="efi", SubType=None, Zero=False, + Strip=False, Replace=False, TimeStamp=None, Join=False, + Align=None, Padding=None, Convert=False): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["GenFw"] + if Type.lower() == "te": + Cmd += ["-t"] + if SubType not in [None, '']: + Cmd += ["-e", SubType] + if TimeStamp not in [None, '']: + Cmd += ["-s", TimeStamp] + if Align not in [None, '']: + Cmd += ["-a", Align] + if Padding not in [None, '']: + Cmd += ["-p", Padding] + if Zero: + Cmd += ["-z"] + if Strip: + Cmd += ["-l"] + if Replace: + Cmd += ["-r"] + if Join: + Cmd += ["-j"] + if Convert: + Cmd += ["-m"] + Cmd += ["-o", Output] + Cmd += Input + + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate firmware image") + + @staticmethod + def GenerateOptionRom(Output, EfiInput, BinaryInput, Compress=False, ClassCode=None, + Revision=None, DeviceId=None, VendorId=None): +# if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): +# return +# GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = ["EfiRom"] + if len(EfiInput) > 0: + + if Compress: + Cmd += ["-ec"] + else: + Cmd += ["-e"] + + for EfiFile in EfiInput: + Cmd += [EfiFile] + + if len(BinaryInput) > 0: + Cmd += ["-b"] + for BinFile in BinaryInput: + Cmd += [BinFile] + + if ClassCode != None: + Cmd += ["-l", ClassCode] + if Revision != None: + Cmd += ["-r", Revision] + if DeviceId != None: + Cmd += ["-i", DeviceId] + if VendorId != None: + Cmd += ["-f", VendorId] + + Cmd += ["-o", Output] + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to generate option rom") + + @staticmethod + def GuidTool(Output, Input, ToolPath, Options=''): + if not GenFdsGlobalVariable.NeedsUpdate(Output, Input): + return + GenFdsGlobalVariable.DebugLogger(EdkLogger.DEBUG_5, "%s needs update because of newer %s" % (Output, Input)) + + Cmd = [ToolPath, Options] + Cmd += ["-o", Output] + Cmd += Input + + GenFdsGlobalVariable.CallExternalTool(Cmd, "Failed to call " + ToolPath) + + def CallExternalTool (cmd, errorMess): + + if type(cmd) not in (tuple, list): + GenFdsGlobalVariable.ErrorLogger("ToolError! Invalid parameter type in call to CallExternalTool") + + if GenFdsGlobalVariable.DebugLevel != -1: + cmd += ('--debug', str(GenFdsGlobalVariable.DebugLevel)) + GenFdsGlobalVariable.InfLogger (cmd) + + if GenFdsGlobalVariable.VerboseMode: + cmd += ('-v',) + GenFdsGlobalVariable.InfLogger (cmd) + else: + sys.stdout.write ('#') + sys.stdout.flush() + GenFdsGlobalVariable.SharpCounter = GenFdsGlobalVariable.SharpCounter + 1 + if GenFdsGlobalVariable.SharpCounter % GenFdsGlobalVariable.SharpNumberPerLine == 0: + sys.stdout.write('\n') + + try: + PopenObject = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr= subprocess.PIPE) + except Exception, X: + EdkLogger.error("GenFds", BuildToolError.COMMAND_FAILURE, ExtraData="%s: %s" % (str(X), cmd[0])) + (out, error) = PopenObject.communicate() + + while PopenObject.returncode == None : + PopenObject.wait() + if PopenObject.returncode != 0 or GenFdsGlobalVariable.VerboseMode or GenFdsGlobalVariable.DebugLevel != -1: + GenFdsGlobalVariable.InfLogger ("Return Value = %d" %PopenObject.returncode) + GenFdsGlobalVariable.InfLogger (out) + GenFdsGlobalVariable.InfLogger (error) + if PopenObject.returncode != 0: + print "###", cmd + EdkLogger.error("GenFds", BuildToolError.COMMAND_FAILURE, errorMess) + + def VerboseLogger (msg): + EdkLogger.verbose(msg) + + def InfLogger (msg): + EdkLogger.info(msg) + + def ErrorLogger (msg, File = None, Line = None, ExtraData = None): + EdkLogger.error('GenFds', BuildToolError.GENFDS_ERROR, msg, File, Line, ExtraData) + + def DebugLogger (Level, msg): + EdkLogger.debug(Level, msg) + + ## ReplaceWorkspaceMacro() + # + # @param Str String that may contain macro + # @param MacroDict Dictionary that contains macro value pair + # + def MacroExtend (Str, MacroDict = {}, Arch = 'COMMON'): + if Str == None : + return None + + Dict = {'$(WORKSPACE)' : GenFdsGlobalVariable.WorkSpaceDir, + '$(EDK_SOURCE)' : GenFdsGlobalVariable.EdkSourceDir, +# '$(OUTPUT_DIRECTORY)': GenFdsGlobalVariable.OutputDirFromDsc, + '$(TARGET)' : GenFdsGlobalVariable.TargetName, + '$(TOOL_CHAIN_TAG)' : GenFdsGlobalVariable.ToolChainTag + } + OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[GenFdsGlobalVariable.ArchList[0]] + if Arch != 'COMMON' and Arch in GenFdsGlobalVariable.ArchList: + OutputDir = GenFdsGlobalVariable.OutputDirFromDscDict[Arch] + + Dict['$(OUTPUT_DIRECTORY)'] = OutputDir + + if MacroDict != None and len (MacroDict) != 0: + Dict.update(MacroDict) + + for key in Dict.keys(): + if Str.find(key) >= 0 : + Str = Str.replace (key, Dict[key]) + + if Str.find('$(ARCH)') >= 0: + if len(GenFdsGlobalVariable.ArchList) == 1: + Str = Str.replace('$(ARCH)', GenFdsGlobalVariable.ArchList[0]) + else: + EdkLogger.error("GenFds", GENFDS_ERROR, "No way to determine $(ARCH) for %s" % Str) + + return Str + + ## GetPcdValue() + # + # @param PcdPattern pattern that labels a PCD. + # + def GetPcdValue (PcdPattern): + if PcdPattern == None : + return None + PcdPair = PcdPattern.lstrip('PCD(').rstrip(')').strip().split('.') + TokenSpace = PcdPair[0] + TokenCName = PcdPair[1] + + PcdValue = '' + for Platform in GenFdsGlobalVariable.WorkSpace.PlatformList: + PcdDict = Platform.Pcds + for Key in PcdDict: + PcdObj = PcdDict[Key] + if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace): + if PcdObj.Type != 'FixedAtBuild': + EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern) + if PcdObj.DatumType != 'VOID*': + EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern) + + PcdValue = PcdObj.DefaultValue + return PcdValue + + for Package in GenFdsGlobalVariable.WorkSpace.PackageList: + PcdDict = Package.Pcds + for Key in PcdDict: + PcdObj = PcdDict[Key] + if (PcdObj.TokenCName == TokenCName) and (PcdObj.TokenSpaceGuidCName == TokenSpace): + if PcdObj.Type != 'FixedAtBuild': + EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not FixedAtBuild type." % PcdPattern) + if PcdObj.DatumType != 'VOID*': + EdkLogger.error("GenFds", GENFDS_ERROR, "%s is not VOID* datum type." % PcdPattern) + + PcdValue = PcdObj.DefaultValue + return PcdValue + + return PcdValue + + SetDir = staticmethod(SetDir) + ReplaceWorkspaceMacro = staticmethod(ReplaceWorkspaceMacro) + CallExternalTool = staticmethod(CallExternalTool) + VerboseLogger = staticmethod(VerboseLogger) + InfLogger = staticmethod(InfLogger) + ErrorLogger = staticmethod(ErrorLogger) + DebugLogger = staticmethod(DebugLogger) + MacroExtend = staticmethod (MacroExtend) + GetPcdValue = staticmethod(GetPcdValue) diff --git a/BaseTools/Source/Python/GenFds/GuidSection.py b/BaseTools/Source/Python/GenFds/GuidSection.py new file mode 100644 index 0000000000..e111e0fe50 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/GuidSection.py @@ -0,0 +1,190 @@ +## @file
+# process GUIDed section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+import subprocess
+from Ffs import Ffs
+import os
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import GuidSectionClassObject
+from Common import ToolDefClassObject
+import sys
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## generate GUIDed section
+#
+#
+class GuidSection(GuidSectionClassObject) :
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ GuidSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate GUIDed section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):
+ #
+ # Generate all section
+ #
+ self.KeyStringList = KeyStringList
+ self.CurrentArchList = GenFdsGlobalVariable.ArchList
+ if FfsInf != None:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.NameGuid = FfsInf.__ExtendMacro__(self.NameGuid)
+ self.SectionType = FfsInf.__ExtendMacro__(self.SectionType)
+ self.CurrentArchList = [FfsInf.CurrentArch]
+
+ SectFile = tuple()
+ Index = 0
+ for Sect in self.SectionList:
+ Index = Index + 1
+ SecIndex = '%s.%d' %(SecNum,Index)
+ ReturnSectList, align = Sect.GenSection(OutputPath, ModuleName, SecIndex, KeyStringList,FfsInf, Dict)
+ if ReturnSectList != []:
+ for file in ReturnSectList:
+ SectFile += (file,)
+
+
+ OutputFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ 'SEC' + \
+ SecNum + \
+ Ffs.SectionSuffix['GUIDED']
+ OutputFile = os.path.normpath(OutputFile)
+
+ ExternalTool = None
+ if self.NameGuid != None:
+ ExternalTool = self.__FindExtendTool__()
+ #
+ # If not have GUID , call default
+ # GENCRC32 section
+ #
+ if self.NameGuid == None :
+ GenFdsGlobalVariable.VerboseLogger( "Use GenSection function Generate CRC32 Section")
+ GenFdsGlobalVariable.GenerateSection(OutputFile, SectFile, Section.Section.SectionType[self.SectionType])
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+ #or GUID not in External Tool List
+ elif ExternalTool == None:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "No tool found with GUID %s" % self.NameGuid)
+ else:
+ #
+ # Call GenSection with DUMMY section type.
+ #
+ GenFdsGlobalVariable.GenerateSection(OutputFile+".dummy", SectFile)
+ #
+ # Use external tool process the Output
+ #
+ InputFile = OutputFile+".dummy"
+ TempFile = OutputPath + \
+ os.sep + \
+ ModuleName + \
+ 'SEC' + \
+ SecNum + \
+ '.tmp'
+ TempFile = os.path.normpath(TempFile)
+
+ ExternalToolCmd = (
+ ExternalTool,
+ '-e',
+ '-o', TempFile,
+ InputFile,
+ )
+
+ #
+ # Call external tool
+ #
+ GenFdsGlobalVariable.GuidTool(TempFile, [InputFile], ExternalTool, '-e')
+
+ #
+ # Call Gensection Add Secntion Header
+ #
+ Attribute = None
+ if self.ProcessRequired == True:
+ Attribute = 'PROCSSING_REQUIRED'
+ if self.AuthStatusValid == True:
+ Attribute = 'AUTH_STATUS_VALID'
+ GenFdsGlobalVariable.GenerateSection(OutputFile, [TempFile], Section.Section.SectionType['GUIDED'],
+ Guid=self.NameGuid, GuidAttr=Attribute)
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
+
+ ## __FindExtendTool()
+ #
+ # Find location of tools to process section data
+ #
+ # @param self The object pointer
+ #
+ def __FindExtendTool__(self):
+ # if user not specify filter, try to deduce it from global data.
+ if self.KeyStringList == None or self.KeyStringList == []:
+ Target = GenFdsGlobalVariable.TargetName
+ ToolChain = GenFdsGlobalVariable.ToolChainTag
+ ToolDb = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.WorkSpaceDir).ToolsDefTxtDatabase
+ if ToolChain not in ToolDb['TOOL_CHAIN_TAG']:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Can not find external tool because tool tag %s is not defined in tools_def.txt!" % ToolChain)
+ self.KeyStringList = [Target+'_'+ToolChain+'_'+self.CurrentArchList[0]]
+ for Arch in self.CurrentArchList:
+ if Target+'_'+ToolChain+'_'+Arch not in self.KeyStringList:
+ self.KeyStringList.append(Target+'_'+ToolChain+'_'+Arch)
+
+ ToolDefinition = ToolDefClassObject.ToolDefDict(GenFdsGlobalVariable.WorkSpaceDir).ToolsDefTxtDictionary
+ ToolPathTmp = None
+ for ToolDef in ToolDefinition.items():
+ if self.NameGuid == ToolDef[1]:
+ KeyList = ToolDef[0].split('_')
+ Key = KeyList[0] + \
+ '_' + \
+ KeyList[1] + \
+ '_' + \
+ KeyList[2]
+ if Key in self.KeyStringList and KeyList[4] == 'GUID':
+
+ ToolPath = ToolDefinition.get( Key + \
+ '_' + \
+ KeyList[3] + \
+ '_' + \
+ 'PATH')
+ if ToolPathTmp == None:
+ ToolPathTmp = ToolPath
+ else:
+ if ToolPathTmp != ToolPath:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Don't know which tool to use, %s or %s ?" % (ToolPathTmp, ToolPath))
+
+
+ return ToolPathTmp
+
+
+
diff --git a/BaseTools/Source/Python/GenFds/OptRomFileStatement.py b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py new file mode 100644 index 0000000000..c360c6d9ad --- /dev/null +++ b/BaseTools/Source/Python/GenFds/OptRomFileStatement.py @@ -0,0 +1,50 @@ +## @file
+# process OptionROM generation from FILE statement
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+##
+#
+#
+class OptRomFileStatement:
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ self.FileName = None
+ self.FileType = None
+ self.OverrideAttribs = None
+
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @param Dict dictionary contains macro and value pair
+ # @retval string Generated FFS file name
+ #
+ def GenFfs(self, Dict = {}):
+
+ if self.FileName != None:
+ self.FileName = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+
+ return self.FileName
+
+
+
diff --git a/BaseTools/Source/Python/GenFds/OptRomInfStatement.py b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py new file mode 100644 index 0000000000..b9f0af54c9 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/OptRomInfStatement.py @@ -0,0 +1,147 @@ +## @file
+# process OptionROM generation from INF statement
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import RuleSimpleFile
+import RuleComplexFile
+import Section
+import OptionRom
+import Common.GlobalData as GlobalData
+
+from Common.DataType import *
+from Common.String import *
+from FfsInfStatement import FfsInfStatement
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+
+##
+#
+#
+class OptRomInfStatement (FfsInfStatement):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ FfsInfStatement.__init__(self)
+ self.OverrideAttribs = None
+
+ ## __GetOptRomParams() method
+ #
+ # Parse inf file to get option ROM related parameters
+ #
+ # @param self The object pointer
+ #
+ def __GetOptRomParams(self):
+
+ if self.OverrideAttribs == None:
+ self.OverrideAttribs = OptionRom.OverrideAttribs()
+
+ if self.OverrideAttribs.PciVendorId == None:
+ self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
+
+ if self.OverrideAttribs.PciClassCode == None:
+ self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
+
+ if self.OverrideAttribs.PciDeviceId == None:
+ self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
+
+ if self.OverrideAttribs.PciRevision == None:
+ self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
+
+# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
+# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
+# for Record in RecordList:
+# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+# Name = Record[0]
+ ## GenFfs() method
+ #
+ # Generate FFS
+ #
+ # @param self The object pointer
+ # @retval string Generated .efi file name
+ #
+ def GenFfs(self):
+ #
+ # Parse Inf file get Module related information
+ #
+
+ self.__InfParse__()
+ self.__GetOptRomParams()
+ #
+ # Get the rule of how to generate Ffs file
+ #
+ Rule = self.__GetRule__()
+ GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
+ #FileType = Ffs.Ffs.ModuleTypeToFileType[Rule.ModuleType]
+ #
+ # For the rule only has simpleFile
+ #
+ if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
+ EfiOutputList = self.__GenSimpleFileSection__(Rule)
+ return EfiOutputList
+ #
+ # For Rule has ComplexFile
+ #
+ elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
+ EfiOutputList = self.__GenComplexFileSection__(Rule)
+ return EfiOutputList
+
+ ## __GenSimpleFileSection__() method
+ #
+ # Get .efi files according to simple rule.
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenSimpleFileSection__(self, Rule):
+ #
+ # Prepare the parameter of GenSection
+ #
+
+ OutputFileList = []
+ if Rule.FileName != None:
+ GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
+ OutputFileList.append(GenSecInputFile)
+ else:
+ OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
+
+ return OutputFileList
+
+
+ ## __GenComplexFileSection__() method
+ #
+ # Get .efi by sections in complex Rule
+ #
+ # @param self The object pointer
+ # @param Rule The rule object used to generate section
+ # @retval string File name of the generated section file
+ #
+ def __GenComplexFileSection__(self, Rule):
+
+ OutputFileList = []
+ for Sect in Rule.SectionList:
+ if Sect.SectionType == 'PE32':
+ if Sect.FileName != None:
+ GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
+ OutputFileList.append(GenSecInputFile)
+ else:
+ FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
+ OutputFileList.extend(FileList)
+
+ return OutputFileList
+
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/GenFds/OptionRom.py b/BaseTools/Source/Python/GenFds/OptionRom.py new file mode 100644 index 0000000000..e102e65f1c --- /dev/null +++ b/BaseTools/Source/Python/GenFds/OptionRom.py @@ -0,0 +1,140 @@ +## @file
+# process OptionROM generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import shutil
+import subprocess
+import StringIO
+
+import OptRomInfStatement
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from GenFds import GenFds
+from CommonDataClass.FdfClass import OptionRomClassObject
+from Common.Misc import SaveFileOnChange
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+T_CHAR_LF = '\n'
+
+##
+#
+#
+class OPTIONROM (OptionRomClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ OptionRomClassObject.__init__(self)
+
+
+ ## AddToBuffer()
+ #
+ # Generate Option ROM
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated OptROM data will be put
+ # @retval string Generated OptROM file path
+ #
+ def AddToBuffer (self, Buffer) :
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerating %s Option ROM ..." %self.DriverName)
+
+ EfiFileList = []
+ BinFileList = []
+
+ # Process Modules in FfsList
+ for FfsFile in self.FfsList :
+
+ if isinstance(FfsFile, OptRomInfStatement.OptRomInfStatement):
+ FilePathNameList = FfsFile.GenFfs()
+ if len(FilePathNameList) == 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Module %s not produce .efi files, so NO file could be put into option ROM." % (FfsFile.InfFileName))
+ if FfsFile.OverrideAttribs == None:
+ EfiFileList.extend(FilePathNameList)
+ else:
+ FileName = os.path.basename(FilePathNameList[0])
+ TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
+ if not os.path.exists(TmpOutputDir) :
+ os.makedirs(TmpOutputDir)
+ TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ FilePathNameList,
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
+ FfsFile.OverrideAttribs.PciVendorId)
+ BinFileList.append(TmpOutputFile)
+ else:
+ FilePathName = FfsFile.GenFfs()
+ if FfsFile.OverrideAttribs != None:
+ FileName = os.path.basename(FilePathName)
+ TmpOutputDir = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
+ if not os.path.exists(TmpOutputDir) :
+ os.makedirs(TmpOutputDir)
+ TmpOutputFile = os.path.join(TmpOutputDir, FileName+'.tmp')
+
+ GenFdsGlobalVariable.GenerateOptionRom(TmpOutputFile,
+ [FilePathName],
+ [],
+ FfsFile.OverrideAttribs.NeedCompress,
+ FfsFile.OverrideAttribs.PciClassCode,
+ FfsFile.OverrideAttribs.PciRevision,
+ FfsFile.OverrideAttribs.PciDeviceId,
+ FfsFile.OverrideAttribs.PciVendorId)
+ BinFileList.append(TmpOutputFile)
+ else:
+ if FfsFile.FileType == 'EFI':
+ EfiFileList.append(FilePathName)
+ else:
+ BinFileList.append(FilePathName)
+
+ #
+ # Call EfiRom tool
+ #
+ OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.DriverName)
+ OutputFile = OutputFile + '.rom'
+
+ GenFdsGlobalVariable.GenerateOptionRom(
+ OutputFile,
+ EfiFileList,
+ BinFileList
+ )
+
+ GenFdsGlobalVariable.InfLogger( "\nGenerate %s Option ROM Successfully" %self.DriverName)
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ return OutputFile
+
+class OverrideAttribs:
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+
+ self.PciVendorId = None
+ self.PciClassCode = None
+ self.PciDeviceId = None
+ self.PciRevision = None
+ self.NeedCompress = False
+
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/GenFds/Region.py b/BaseTools/Source/Python/GenFds/Region.py new file mode 100644 index 0000000000..ed16c6fa98 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Region.py @@ -0,0 +1,240 @@ +## @file
+# process FD Region generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from struct import *
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import StringIO
+from CommonDataClass.FdfClass import RegionClassObject
+import os
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+
+## generate Region
+#
+#
+class Region(RegionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RegionClassObject.__init__(self)
+
+
+ ## AddToBuffer()
+ #
+ # Add region data to the Buffer
+ #
+ # @param self The object pointer
+ # @param Buffer The buffer generated region data will be put
+ # @param BaseAddress base address of region
+ # @param BlockSize block size of region
+ # @param BlockNum How many blocks in region
+ # @param ErasePolarity Flash erase polarity
+ # @param VtfDict VTF objects
+ # @param MacroDict macro value pair
+ # @retval string Generated FV file path
+ #
+
+ def AddToBuffer(self, Buffer, BaseAddress, BlockSizeList, ErasePolarity, FvBinDict, vtfDict = None, MacroDict = {}):
+ Size = self.Size
+ GenFdsGlobalVariable.InfLogger('Generate Region at Offset 0x%X' % self.Offset)
+ GenFdsGlobalVariable.InfLogger(" Region Size = 0x%X" %Size)
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ if self.RegionType == 'FV':
+ #
+ # Get Fv from FvDict
+ #
+ FvBuffer = StringIO.StringIO('')
+ RegionBlockSize = self.BlockSizeOfRegion(BlockSizeList)
+ RegionBlockNum = self.BlockNumOfRegion(RegionBlockSize)
+
+ self.FvAddress = int(BaseAddress, 16) + self.Offset
+ FvBaseAddress = '0x%X' %self.FvAddress
+
+ for RegionData in self.RegionDataList:
+
+ if RegionData.endswith(".fv"):
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ GenFdsGlobalVariable.InfLogger(' Region FV File Name = .fv : %s'%RegionData)
+ if RegionData[1] != ':' :
+ RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
+
+ BinFile = open (RegionData, 'r+b')
+ FvBuffer.write(BinFile.read())
+ if FvBuffer.len > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of FV File (%s) is larger than Region Size 0x%X specified." \
+ % (RegionData, Size))
+ break
+
+ if RegionData.upper() in FvBinDict.keys():
+ continue
+
+ FvObj = None
+ if RegionData.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():
+ FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(RegionData.upper())
+
+ if FvObj != None :
+ GenFdsGlobalVariable.InfLogger(' Region Name = FV')
+ #
+ # Call GenFv tool
+ #
+ BlockSize = RegionBlockSize
+ BlockNum = RegionBlockNum
+ if FvObj.BlockSizeList != []:
+ if FvObj.BlockSizeList[0][0] != None:
+ BlockSize = FvObj.BlockSizeList[0][0]
+ if FvObj.BlockSizeList[0][1] != None:
+ BlockNum = FvObj.BlockSizeList[0][1]
+ self.FvAddress = self.FvAddress + FvBuffer.len
+ FvAlignValue = self.GetFvAlignValue(FvObj.FvAlignment)
+ if self.FvAddress % FvAlignValue != 0:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "FV (%s) is NOT %s Aligned!" % (FvObj.UiFvName, FvObj.FvAlignment))
+ FvBaseAddress = '0x%X' %self.FvAddress
+ FileName = FvObj.AddToBuffer(FvBuffer, FvBaseAddress, BlockSize, BlockNum, ErasePolarity, vtfDict)
+
+ if FvBuffer.len > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of FV (%s) is larger than Region Size 0x%X specified." % (RegionData, Size))
+ else:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "FV (%s) is NOT described in FDF file!" % (RegionData))
+
+
+ if FvBuffer.len > 0:
+ Buffer.write(FvBuffer.getvalue())
+ else:
+ BinFile = open (FileName, 'rb')
+ Buffer.write(BinFile.read())
+
+ FvBuffer.close()
+
+ if self.RegionType == 'FILE':
+ FvBuffer = StringIO.StringIO('')
+ for RegionData in self.RegionDataList:
+ RegionData = GenFdsGlobalVariable.MacroExtend(RegionData, MacroDict)
+ GenFdsGlobalVariable.InfLogger(' Region File Name = FILE: %s'%RegionData)
+ if RegionData[1] != ':' :
+ RegionData = os.path.join (GenFdsGlobalVariable.WorkSpaceDir, RegionData)
+ if not os.path.exists(RegionData):
+ EdkLogger.error("GenFds", FILE_NOT_FOUND, ExtraData=RegionData)
+
+ BinFile = open (RegionData, 'r+b')
+ FvBuffer.write(BinFile.read())
+ if FvBuffer.len > Size :
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Size of File (%s) large than Region Size " % RegionData)
+
+ #
+ # If File contents less than region size, append "0xff" after it
+ #
+ if FvBuffer.len < Size:
+ for index in range(0, (Size-FvBuffer.len)):
+ if (ErasePolarity == '1'):
+ FvBuffer.write(pack('B', int('0xFF', 16)))
+ else:
+ FvBuffer.write(pack('B', int('0x00', 16)))
+ Buffer.write(FvBuffer.getvalue())
+ FvBuffer.close()
+
+ if self.RegionType == 'DATA' :
+ GenFdsGlobalVariable.InfLogger(' Region Name = DATA')
+ DataSize = 0
+ for RegionData in self.RegionDataList:
+ Data = RegionData.split(',')
+ DataSize = DataSize + len(Data)
+ if DataSize > Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Size of DATA is larger than Region Size ")
+ else:
+ for item in Data :
+ Buffer.write(pack('B', int(item, 16)))
+ if DataSize < Size:
+ if (ErasePolarity == '1'):
+ PadData = 0xFF
+ else:
+ PadData = 0
+ for i in range(Size - DataSize):
+ Buffer.write(pack('B', PadData))
+
+ if self.RegionType == None:
+ GenFdsGlobalVariable.InfLogger(' Region Name = None')
+ if (ErasePolarity == '1') :
+ PadData = 0xFF
+ else :
+ PadData = 0
+ for i in range(0, Size):
+ Buffer.write(pack('B', PadData))
+
+ def GetFvAlignValue(self, Str):
+ AlignValue = 1
+ Granu = 1
+ Str = Str.strip().upper()
+ if Str.endswith('K'):
+ Granu = 1024
+ Str = Str[:-1]
+ elif Str.endswith('M'):
+ Granu = 1024*1024
+ Str = Str[:-1]
+ elif Str.endswith('G'):
+ Granu = 1024*1024*1024
+ Str = Str[:-1]
+ else:
+ pass
+
+ AlignValue = int(Str)*Granu
+ return AlignValue
+ ## BlockSizeOfRegion()
+ #
+ # @param BlockSizeList List of block information
+ # @retval int Block size of region
+ #
+ def BlockSizeOfRegion(self, BlockSizeList):
+ Offset = 0x00
+ BlockSize = 0
+ for item in BlockSizeList:
+ Offset = Offset + item[0] * item[1]
+ GenFdsGlobalVariable.VerboseLogger ("Offset = 0x%X" %Offset)
+ GenFdsGlobalVariable.VerboseLogger ("self.Offset 0x%X" %self.Offset)
+
+ if self.Offset < Offset :
+ if Offset - self.Offset < self.Size:
+ EdkLogger.error("GenFds", GENFDS_ERROR,
+ "Region at Offset 0x%X can NOT fit into Block array with BlockSize %X" \
+ % (self.Offset, item[0]))
+ BlockSize = item[0]
+ GenFdsGlobalVariable.VerboseLogger ("BlockSize = %X" %BlockSize)
+ return BlockSize
+ return BlockSize
+
+ ## BlockNumOfRegion()
+ #
+ # @param BlockSize block size of region
+ # @retval int Block number of region
+ #
+ def BlockNumOfRegion (self, BlockSize):
+ if BlockSize == 0 :
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Region: %s is not in the FD address scope!" % self.Offset)
+ BlockNum = self.Size / BlockSize
+ GenFdsGlobalVariable.VerboseLogger ("BlockNum = 0x%X" %BlockNum)
+ return BlockNum
+
diff --git a/BaseTools/Source/Python/GenFds/Rule.py b/BaseTools/Source/Python/GenFds/Rule.py new file mode 100644 index 0000000000..40a5f88bab --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Rule.py @@ -0,0 +1,29 @@ +## @file
+# Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from CommonDataClass.FdfClass import RuleClassObject
+
+## Rule base class
+#
+#
+class Rule(RuleClassObject):
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleClassObject.__init__(self)
diff --git a/BaseTools/Source/Python/GenFds/RuleComplexFile.py b/BaseTools/Source/Python/GenFds/RuleComplexFile.py new file mode 100644 index 0000000000..63e65c5970 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/RuleComplexFile.py @@ -0,0 +1,30 @@ +## @file
+# Complex Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Rule
+from CommonDataClass.FdfClass import RuleComplexFileClassObject
+
+## complex rule
+#
+#
+class RuleComplexFile(RuleComplexFileClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleComplexFileClassObject.__init__(self)
diff --git a/BaseTools/Source/Python/GenFds/RuleSimpleFile.py b/BaseTools/Source/Python/GenFds/RuleSimpleFile.py new file mode 100644 index 0000000000..c6fdbd88dc --- /dev/null +++ b/BaseTools/Source/Python/GenFds/RuleSimpleFile.py @@ -0,0 +1,30 @@ +## @file
+# Simple Rule object for generating FFS
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Rule
+from CommonDataClass.FdfClass import RuleSimpleFileClassObject
+
+## simple rule
+#
+#
+class RuleSimpleFile (RuleSimpleFileClassObject) :
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ RuleSimpleFileClassObject.__init__(self)
diff --git a/BaseTools/Source/Python/GenFds/Section.py b/BaseTools/Source/Python/GenFds/Section.py new file mode 100644 index 0000000000..ffca3a11fe --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Section.py @@ -0,0 +1,153 @@ +## @file
+# section base class
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from CommonDataClass.FdfClass import SectionClassObject
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import os, glob
+from Common import EdkLogger
+from Common.BuildToolError import *
+
+## section base class
+#
+#
+class Section (SectionClassObject):
+ SectionType = {
+ 'RAW' : 'EFI_SECTION_RAW',
+ 'FREEFORM' : 'EFI_SECTION_FREEFORM_SUBTYPE_GUID',
+ 'PE32' : 'EFI_SECTION_PE32',
+ 'PIC' : 'EFI_SECTION_PIC',
+ 'TE' : 'EFI_SECTION_TE',
+ 'FV_IMAGE' : 'EFI_SECTION_FIRMWARE_VOLUME_IMAGE',
+ 'DXE_DEPEX' : 'EFI_SECTION_DXE_DEPEX',
+ 'PEI_DEPEX' : 'EFI_SECTION_PEI_DEPEX',
+ 'GUIDED' : 'EFI_SECTION_GUID_DEFINED',
+ 'COMPRESS' : 'EFI_SECTION_COMPRESSION',
+ 'UI' : 'EFI_SECTION_USER_INTERFACE',
+ 'SMM_DEPEX' : 'EFI_SECTION_SMM_DEPEX'
+ }
+
+ BinFileType = {
+ 'GUID' : '.guid',
+ 'ACPI' : '.acpi',
+ 'ASL' : '.asl' ,
+ 'UEFI_APP' : '.app',
+ 'LIB' : '.lib',
+ 'PE32' : '.pe32',
+ 'PIC' : '.pic',
+ 'PEI_DEPEX' : '.depex',
+ 'SEC_PEI_DEPEX' : '.depex',
+ 'TE' : '.te',
+ 'UNI_VER' : '.ver',
+ 'VER' : '.ver',
+ 'UNI_UI' : '.ui',
+ 'UI' : '.ui',
+ 'BIN' : '.bin',
+ 'RAW' : '.raw',
+ 'COMPAT16' : '.comp16',
+ 'FV' : '.fv'
+ }
+
+ SectFileType = {
+ 'SEC_GUID' : '.sec' ,
+ 'SEC_PE32' : '.sec' ,
+ 'SEC_PIC' : '.sec',
+ 'SEC_TE' : '.sec',
+ 'SEC_VER' : '.sec',
+ 'SEC_UI' : '.sec',
+ 'SEC_COMPAT16' : '.sec',
+ 'SEC_BIN' : '.sec'
+ }
+
+ ToolGuid = {
+ '0xa31280ad-0x481e-0x41b6-0x95e8-0x127f-0x4c984779' : 'TianoCompress',
+ '0xee4e5898-0x3914-0x4259-0x9d6e-0xdc7b-0xd79403cf' : 'LzmaCompress'
+ }
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ SectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # virtual function
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ #
+ def GenSection(self, OutputPath, GuidName, SecNum, keyStringList, FfsInf = None, Dict = {}):
+ pass
+
+ ## GetFileList() method
+ #
+ # Generate compressed section
+ #
+ # @param self The object pointer
+ # @param FfsInf FfsInfStatement object that contains file list
+ # @param FileType File type to get
+ # @param FileExtension File extension to get
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (File list, boolean)
+ #
+ def GetFileList(FfsInf, FileType, FileExtension, Dict = {}):
+ if FileType in Section.SectFileType.keys() :
+ IsSect = True
+ else :
+ IsSect = False
+
+ if FileExtension != None:
+ Suffix = FileExtension
+ elif IsSect :
+ Suffix = Section.SectionType.get(FileType)
+ else:
+ Suffix = Section.BinFileType.get(FileType)
+ if FfsInf == None:
+ EdkLogger.error("GenFds", GENFDS_ERROR, 'Inf File does not exist!')
+
+ FileList = []
+ if FileType != None:
+ for File in FfsInf.BinFileList:
+ if File.Arch == "COMMON" or FfsInf.CurrentArch == File.Arch:
+ if File.Type == FileType:
+ if '*' in FfsInf.TargetOverrideList or File.Target == '*' or File.Target in FfsInf.TargetOverrideList or FfsInf.TargetOverrideList == []:
+ FileList.append(File.Path)
+ else:
+ GenFdsGlobalVariable.InfLogger ("\nBuild Target \'%s\' of File %s is not in the Scope of %s specified by INF %s in FDF" %(File.Target, File.File, FfsInf.TargetOverrideList, FfsInf.InfFileName))
+ else:
+ GenFdsGlobalVariable.VerboseLogger ("\nFile Type \'%s\' of File %s in %s is not same with file type \'%s\' from Rule in FDF" %(File.Type, File.File, FfsInf.InfFileName, FileType))
+ else:
+ GenFdsGlobalVariable.InfLogger ("\nCurrent ARCH \'%s\' of File %s is not in the Support Arch Scope of %s specified by INF %s in FDF" %(FfsInf.CurrentArch, File.File, File.Arch, FfsInf.InfFileName))
+
+ if Suffix != None and os.path.exists(FfsInf.EfiOutputPath):
+# FileList.extend(glob.glob(os.path.join(FfsInf.EfiOutputPath, "*" + Suffix)))
+ # Update to search files with suffix in all sub-dirs.
+ Tuple = os.walk(FfsInf.EfiOutputPath)
+ for Dirpath, Dirnames, Filenames in Tuple:
+ for F in Filenames:
+ if os.path.splitext(F)[1] in (Suffix):
+ FullName = os.path.join(Dirpath, F)
+ FileList.append(FullName)
+
+ return FileList, IsSect
+ GetFileList = staticmethod(GetFileList)
diff --git a/BaseTools/Source/Python/GenFds/UiSection.py b/BaseTools/Source/Python/GenFds/UiSection.py new file mode 100644 index 0000000000..e660055f9a --- /dev/null +++ b/BaseTools/Source/Python/GenFds/UiSection.py @@ -0,0 +1,77 @@ +## @file
+# process UI section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Section
+from Ffs import Ffs
+import subprocess
+import os
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import UiSectionClassObject
+
+## generate UI section
+#
+#
+class UiSection (UiSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ UiSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate UI section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self, OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if FfsInf != None:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.StringData = FfsInf.__ExtendMacro__(self.StringData)
+ self.FileName = FfsInf.__ExtendMacro__(self.FileName)
+
+ OutputFile = os.path.join(OutputPath, ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('UI'))
+
+ if self.StringData != None :
+ NameString = self.StringData
+ elif self.FileName != None:
+ FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
+ FileObj = open(FileNameStr, 'r')
+ NameString = FileObj.read()
+ NameString = '\"' + NameString + "\""
+ FileObj.close()
+ else:
+ NameString = ''
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_USER_INTERFACE', Ui=NameString)
+
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/VerSection.py b/BaseTools/Source/Python/GenFds/VerSection.py new file mode 100644 index 0000000000..e27d0a20f9 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/VerSection.py @@ -0,0 +1,82 @@ +## @file
+# process Version section generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from Ffs import Ffs
+import Section
+import os
+import subprocess
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+from CommonDataClass.FdfClass import VerSectionClassObject
+
+## generate version section
+#
+#
+class VerSection (VerSectionClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ VerSectionClassObject.__init__(self)
+
+ ## GenSection() method
+ #
+ # Generate version section
+ #
+ # @param self The object pointer
+ # @param OutputPath Where to place output file
+ # @param ModuleName Which module this section belongs to
+ # @param SecNum Index of section
+ # @param KeyStringList Filter for inputs of section generation
+ # @param FfsInf FfsInfStatement object that contains this section data
+ # @param Dict dictionary contains macro and its value
+ # @retval tuple (Generated file name, section alignment)
+ #
+ def GenSection(self,OutputPath, ModuleName, SecNum, KeyStringList, FfsInf = None, Dict = {}):
+ #
+ # Prepare the parameter of GenSection
+ #
+ if FfsInf != None:
+ self.Alignment = FfsInf.__ExtendMacro__(self.Alignment)
+ self.BuildNum = FfsInf.__ExtendMacro__(self.BuildNum)
+ self.StringData = FfsInf.__ExtendMacro__(self.StringData)
+ self.FileName = FfsInf.__ExtendMacro__(self.FileName)
+
+ OutputFile = os.path.join(OutputPath,
+ ModuleName + 'SEC' + SecNum + Ffs.SectionSuffix.get('VERSION'))
+ OutputFile = os.path.normpath(OutputFile)
+
+ # Get String Data
+ StringData = ''
+ if self.StringData != None:
+ StringData = self.StringData
+ elif self.FileName != None:
+ FileNameStr = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FileName)
+ FileNameStr = GenFdsGlobalVariable.MacroExtend(FileNameStr, Dict)
+ FileObj = open(FileNameStr, 'r')
+ StringData = FileObj.read()
+ StringData = '"' + StringData + '"'
+ FileObj.close()
+ else:
+ StringData = ''
+
+ GenFdsGlobalVariable.GenerateSection(OutputFile, None, 'EFI_SECTION_VERSION',
+ Ui=StringData, Ver=self.BuildNum)
+ OutputFileList = []
+ OutputFileList.append(OutputFile)
+ return OutputFileList, self.Alignment
diff --git a/BaseTools/Source/Python/GenFds/Vtf.py b/BaseTools/Source/Python/GenFds/Vtf.py new file mode 100644 index 0000000000..eebc7b1dab --- /dev/null +++ b/BaseTools/Source/Python/GenFds/Vtf.py @@ -0,0 +1,188 @@ +## @file
+# process VTF generation
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from GenFdsGlobalVariable import GenFdsGlobalVariable
+import os
+from CommonDataClass.FdfClass import VtfClassObject
+T_CHAR_LF = '\n'
+
+## generate VTF
+#
+#
+class Vtf (VtfClassObject):
+
+ ## The constructor
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ VtfClassObject.__init__(self)
+
+ ## GenVtf() method
+ #
+ # Generate VTF
+ #
+ # @param self The object pointer
+ # @param FdAddressDict dictionary contains FV name and its base address
+ # @retval Dict FV and corresponding VTF file name
+ #
+ def GenVtf(self, FdAddressDict) :
+ self.GenBsfInf()
+ OutputFile = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.Vtf')
+ BaseAddArg = self.GetBaseAddressArg(FdAddressDict)
+ OutputArg, VtfRawDict = self.GenOutputArg()
+
+ Cmd = (
+ 'GenVtf',
+ ) + OutputArg + (
+ '-f', self.BsfInfName,
+ ) + BaseAddArg
+
+ GenFdsGlobalVariable.CallExternalTool(Cmd, "GenFv -Vtf Failed!")
+ GenFdsGlobalVariable.SharpCounter = 0
+
+ return VtfRawDict
+
+ ## GenBsfInf() method
+ #
+ # Generate inf used to generate VTF
+ #
+ # @param self The object pointer
+ #
+ def GenBsfInf (self):
+ FvList = self.GetFvList()
+ self.BsfInfName = os.path.join(GenFdsGlobalVariable.FvDir, self.UiName + '.inf')
+ BsfInf = open (self.BsfInfName, 'w+')
+ BsfInf.writelines ("[COMPONENTS]" + T_CHAR_LF)
+
+ for ComponentObj in self.ComponentStatementList :
+ BsfInf.writelines ("COMP_NAME" + \
+ " = " + \
+ ComponentObj.CompName + \
+ T_CHAR_LF )
+ if ComponentObj.CompLoc.upper() == 'NONE':
+ BsfInf.writelines ("COMP_LOC" + \
+ " = " + \
+ 'N' + \
+ T_CHAR_LF )
+
+ elif ComponentObj.FilePos != None:
+ BsfInf.writelines ("COMP_LOC" + \
+ " = " + \
+ ComponentObj.FilePos + \
+ T_CHAR_LF )
+ else:
+ Index = FvList.index(ComponentObj.CompLoc.upper())
+ if Index == 0:
+ BsfInf.writelines ("COMP_LOC" + \
+ " = " + \
+ 'F' + \
+ T_CHAR_LF )
+ elif Index == 1:
+ BsfInf.writelines ("COMP_LOC" + \
+ " = " + \
+ 'S' + \
+ T_CHAR_LF )
+
+ BsfInf.writelines ("COMP_TYPE" + \
+ " = " + \
+ ComponentObj.CompType + \
+ T_CHAR_LF )
+ BsfInf.writelines ("COMP_VER" + \
+ " = " + \
+ ComponentObj.CompVer + \
+ T_CHAR_LF )
+ BsfInf.writelines ("COMP_CS" + \
+ " = " + \
+ ComponentObj.CompCs + \
+ T_CHAR_LF )
+
+ BinPath = ComponentObj.CompBin
+ if BinPath != '-':
+ BinPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(BinPath))
+ BsfInf.writelines ("COMP_BIN" + \
+ " = " + \
+ BinPath + \
+ T_CHAR_LF )
+
+ SymPath = ComponentObj.CompSym
+ if SymPath != '-':
+ SymPath = GenFdsGlobalVariable.MacroExtend(GenFdsGlobalVariable.ReplaceWorkspaceMacro(SymPath))
+ BsfInf.writelines ("COMP_SYM" + \
+ " = " + \
+ SymPath + \
+ T_CHAR_LF )
+ BsfInf.writelines ("COMP_SIZE" + \
+ " = " + \
+ ComponentObj.CompSize + \
+ T_CHAR_LF )
+ BsfInf.writelines (T_CHAR_LF )
+
+ BsfInf.close()
+
+ ## GenFvList() method
+ #
+ # Get FV list referenced by VTF components
+ #
+ # @param self The object pointer
+ #
+ def GetFvList(self):
+ FvList = []
+ for component in self.ComponentStatementList :
+ if component.CompLoc.upper() != 'NONE' and not (component.CompLoc.upper() in FvList):
+ FvList.append(component.CompLoc.upper())
+
+ return FvList
+
+ ## GetBaseAddressArg() method
+ #
+ # Get base address arguments for GenVtf
+ #
+ # @param self The object pointer
+ #
+ def GetBaseAddressArg(self, FdAddressDict):
+ FvList = self.GetFvList()
+ CmdStr = tuple()
+ for i in FvList:
+ (BaseAddress, Size) = FdAddressDict.get(i)
+ CmdStr += (
+ '-r', '0x%x' % BaseAddress,
+ '-s', '0x%x' %Size,
+ )
+ return CmdStr
+
+ ## GenOutputArg() method
+ #
+ # Get output arguments for GenVtf
+ #
+ # @param self The object pointer
+ #
+ def GenOutputArg(self):
+ FvVtfDict = {}
+ OutputFileName = ''
+ FvList = self.GetFvList()
+ Index = 0
+ Arg = tuple()
+ for FvObj in FvList:
+ Index = Index + 1
+ OutputFileName = 'Vtf%d.raw' % Index
+ OutputFileName = os.path.join(GenFdsGlobalVariable.FvDir, OutputFileName)
+ Arg += ('-o', OutputFileName)
+ FvVtfDict[FvObj.upper()] = OutputFileName
+
+ return Arg, FvVtfDict
+
diff --git a/BaseTools/Source/Python/GenFds/__init__.py b/BaseTools/Source/Python/GenFds/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/GenFds/__init__.py diff --git a/BaseTools/Source/Python/Makefile b/BaseTools/Source/Python/Makefile new file mode 100644 index 0000000000..a4e76592ab --- /dev/null +++ b/BaseTools/Source/Python/Makefile @@ -0,0 +1,89 @@ + +!IFNDEF PYTHON_FREEZER_PATH +!ERROR PYTHON_FREEZER_PATH must be defined! +!ENDIF + +FREEZE=$(PYTHON_FREEZER_PATH)\FreezePython.exe + +MODULES=encodings.cp437,encodings.gbk,encodings.utf_16,encodings.utf_8,encodings.utf_16_le,encodings.latin_1 + +BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32 + + +APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe +
+COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\DataType.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\DecClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\DecClassObjectLight.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\Dictionary.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\DscClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\EdkIIWorkspace.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\EdkIIWorkspaceBuild.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\EdkLogger.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\FdfClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\FdfParserLite.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\GlobalData.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\Identification.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\InfClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\InfClassObjectLight.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\MigrationUtilities.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\Misc.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\Parsing.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\String.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\BuildClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\MetaDataTable.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\MetaFileParser.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\MetaFileTable.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\WorkspaceDatabase.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Workspace\__init__.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\AutoGen.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\BuildEngine.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\GenC.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\GenDepex.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\GenMake.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\StrGather.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\UniClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py
+
+
+all: SetPythonPath $(APPLICATIONS) + +SetPythonPath: + set PYTHONPATH=$(BASE_TOOLS_PATH)\Source\Python + +$(BIN_DIR)\build.exe: $(BASE_TOOLS_PATH)\Source\Python\build\build.py $(COMMON_PYTHON) + @pushd . & @cd build & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) build.py & @popd + +$(BIN_DIR)\GenFds.exe: $(BASE_TOOLS_PATH)\Source\Python\GenFds\GenFds.py $(COMMON_PYTHON) + @pushd . & @cd GenFds & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) GenFds.py & @popd + +$(BIN_DIR)\Trim.exe: $(BASE_TOOLS_PATH)\Source\Python\Trim\Trim.py $(COMMON_PYTHON) + @pushd . & @cd Trim & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) Trim.py & @popd + +$(BIN_DIR)\MigrationMsa2Inf.exe: $(BASE_TOOLS_PATH)\Source\Python\MigrationMsa2Inf\MigrationMsa2Inf.py + @pushd . & @cd MigrationMsa2Inf & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) MigrationMsa2Inf.py & @popd + +$(BIN_DIR)\Fpd2Dsc.exe: $(BASE_TOOLS_PATH)\Source\Python\Fpd2Dsc\Fpd2Dsc.py $(COMMON_PYTHON) + @pushd . & @cd Fpd2Dsc & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) Fpd2Dsc.py & @popd +
+$(BIN_DIR)\spd2dec.exe: $(BASE_TOOLS_PATH)\Source\Python\spd2dec\Spd2Dec.py $(COMMON_PYTHON)
+ @pushd . & @cd Spd2Dec & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) Spd2Dec.py & @popd
+
+$(BIN_DIR)\GenDepex.exe: $(BASE_TOOLS_PATH)\Source\Python\AutoGen\GenDepex.py $(COMMON_PYTHON)
+ @pushd . & @cd AutoGen & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) GenDepex.py & @popd
+
+$(BIN_DIR)\TargetTool.exe: $(BASE_TOOLS_PATH)\Source\Python\TargetTool\TargetTool.py $(COMMON_PYTHON)
+ @pushd . & @cd TargetTool & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) TargetTool.py & @popd
+
+clean:
+cleanall: + @del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll + @for %%i in ($(APPLICATIONS)) do @del /f /q %%i + diff --git a/BaseTools/Source/Python/MigrationMsa2Inf/AutoGenExterns.py b/BaseTools/Source/Python/MigrationMsa2Inf/AutoGenExterns.py new file mode 100644 index 0000000000..16f7a6bb2e --- /dev/null +++ b/BaseTools/Source/Python/MigrationMsa2Inf/AutoGenExterns.py @@ -0,0 +1,369 @@ +#!/usr/bin/env python
+#
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+
+#
+# Import Modules
+#
+import re, os, glob
+from Common.XmlRoutines import *
+
+#"ModuleType"=>(PackageGuid, headerFileName) List
+HeaderFiles = {}
+GuidList = []
+GuidMap = {}
+HeaderFileContents = {}
+gTest = {}
+GuidMacro2CName = {}
+GuidAliasList = []
+
+def collectIncludeFolder(pkgDirName, guidType, pkgName):
+ includeFolder = os.path.join(pkgDirName, "Include", guidType)
+ if os.path.exists(includeFolder) and os.path.isdir(includeFolder):
+ for headerFileName in os.listdir(includeFolder):
+ if headerFileName[-2:] == ".h":
+ headerFile = open(os.path.join(includeFolder, headerFileName))
+ HeaderFileContents[(guidType, headerFileName, pkgName)] = headerFile.read()
+ headerFile.close()
+
+GuidMacroReg = re.compile(r"\b(?!EFI_GUID\b)[A-Z0-9_]+_GUID\b")
+GuidCNameReg = re.compile(r"\bg\w+Guid\b")
+GuidAliasReg = re.compile(r"#define\s+([A-Z0-9_]+_GUID)\s+([A-Z0-9_]+_GUID)\b")
+
+def collectPackageInfo(spdFileName):
+ pkgDirName = os.path.dirname(spdFileName)
+
+ spd = XmlParseFile(spdFileName)
+
+ pkgName = XmlElement(spd, "/PackageSurfaceArea/SpdHeader/PackageName")
+ pkgGuid = XmlElement(spd, "/PackageSurfaceArea/SpdHeader/GuidValue")
+
+
+ for IncludePkgHeader in XmlList(spd, "/PackageSurfaceArea/PackageHeaders/IncludePkgHeader"):
+ moduleType = XmlAttribute(IncludePkgHeader, "ModuleType")
+ headerFilePath = XmlElementData(IncludePkgHeader)
+ headerFilePath = re.sub("Include/", "", headerFilePath, 1)
+
+ headerTuple = HeaderFiles.get(moduleType, [])
+ headerTuple.append((pkgGuid, headerFilePath))
+ HeaderFiles[moduleType] = headerTuple
+
+ guidTypes = ["Guid", "Protocol", "Ppi"]
+
+ for guidType in guidTypes:
+ for guidEntry in XmlList(spd, "/PackageSurfaceArea/" + guidType + "Declarations/Entry"):
+ guidCName = XmlElement(guidEntry, "Entry/C_Name")
+ GuidList.append(guidCName)
+
+ collectIncludeFolder(pkgDirName, guidType, pkgName)
+
+ for DecFile in glob.glob(os.path.join(pkgDirName, "*.dec")):
+ fileContents = open(DecFile).read()
+ for GuidCNameMatch in GuidCNameReg.finditer(fileContents):
+ GuidCName = GuidCNameMatch.group(0)
+ if GuidCName not in GuidList:
+ GuidList.append(GuidCName)
+
+def AddGuidMacro2GuidCName(GuidMacros, GuidCNames):
+ for GuidMacro in GuidMacros:
+ GuessGuidCName = "g" + GuidMacro.lower().title().replace("_", "")
+ if GuessGuidCName in GuidCNames:
+ GuidMacro2CName[GuidMacro] = GuessGuidCName
+ elif len(GuidCNames) == 1:
+ GuidMacro2CName[GuidMacro] = GuidCNames[0]
+ else:
+ for GuidCName in GuidCNames:
+ if GuidCName.lower() == GuessGuidCName.lower():
+ GuidMacro2CName[GuidMacro] = GuidCName
+ break
+ else:
+ pass
+ #print "No matching GuidMacro %s" % GuidMacro
+
+
+def TranslateGuid(GuidMacroMatch):
+ GuidMacro = GuidMacroMatch.group(0)
+ return GuidMacro2CName.get(GuidMacro, GuidMacro)
+
+DepexReg = re.compile(r"DEPENDENCY_START(.*?)DEPENDENCY_END", re.DOTALL)
+
+def TranslateDpxSection(fileContents):
+ DepexMatch = DepexReg.search(fileContents)
+ if not DepexMatch:
+ return "", []
+
+ fileContents = DepexMatch.group(1)
+ fileContents = re.sub(r"\s+", " ", fileContents).strip()
+ fileContents = GuidMacroReg.sub(TranslateGuid, fileContents)
+ return fileContents, GuidMacroReg.findall(fileContents)
+
+def InitializeAutoGen(workspace, db):
+
+
+ for spdFile in XmlList(db, "/FrameworkDatabase/PackageList/Filename"):
+ spdFileName = XmlElementData(spdFile)
+ collectPackageInfo(os.path.join(workspace, spdFileName))
+
+
+ BlockCommentReg = re.compile(r"/\*.*?\*/", re.DOTALL)
+ LineCommentReg = re.compile(r"//.*")
+ GuidReg = re.compile(r"\b(" + '|'.join(GuidList) + r")\b")
+
+ for headerFile in HeaderFileContents:
+ Contents = HeaderFileContents[headerFile]
+ Contents = BlockCommentReg.sub("", Contents)
+ Contents = LineCommentReg.sub("", Contents)
+
+ FoundGuids = GuidReg.findall(Contents)
+ for FoundGuid in FoundGuids:
+ GuidMap[FoundGuid] = "%s/%s" % (headerFile[0], headerFile[1])
+ #print "%-40s %s/%s" % (FoundGuid, headerFile[0], headerFile[1])
+
+ GuidMacros = GuidMacroReg.findall(Contents)
+ GuidCNames = GuidCNameReg.findall(Contents)
+
+ for GuidAliasMatch in GuidAliasReg.finditer(Contents):
+ Name1, Name2 = GuidAliasMatch.group(1), GuidAliasMatch.group(2)
+ GuidAliasList.append((Name1, Name2))
+
+ AddGuidMacro2GuidCName(GuidMacros, GuidCNames)
+
+def AddSystemIncludeStatement(moduleType, PackageList):
+ IncludeStatement = "\n"
+
+ headerList = HeaderFiles.get(moduleType, [])
+
+ for pkgGuid in PackageList:
+
+ for pkgTuple in headerList:
+ if pkgTuple[0] == pkgGuid:
+ IncludeStatement += "#include <%s>\n" % pkgTuple[1]
+
+ return IncludeStatement
+
+
+def AddLibraryClassStatement(LibraryClassList):
+ IncludeStatement = "\n"
+ for LibraryClass in LibraryClassList:
+ IncludeStatement += "#include <Library/%s.h>\n" % LibraryClass
+
+ return IncludeStatement
+
+def AddGuidStatement(GuidList):
+ IncludeStatement = "\n"
+ GuidIncludeSet = {}
+ for Guid in GuidList:
+ if Guid in GuidMap:
+ GuidIncludeSet[GuidMap[Guid]] = 1
+ else:
+ print "GUID CName: %s cannot be found in any public header file" % Guid
+
+ for GuidInclude in GuidIncludeSet:
+ IncludeStatement += "#include <%s>\n" % GuidInclude
+
+ return IncludeStatement
+
+DriverBindingMap = {
+ "gEfiDriverBindingProtocolGuid" : "EFI_DRIVER_BINDING_PROTOCOL",
+ "gEfiComponentNameProtocolGuid" : "EFI_COMPONENT_NAME_PROTOCOL",
+ "gEfiDriverConfigurationProtocolGuid" : "EFI_DRIVER_CONFIGURATION_PROTOCOL",
+ "gEfiDriverDiagnosticProtocolGuid" : "EFI_DRIVER_CONFIGURATION_PROTOCOL"
+ }
+
+def AddDriverBindingProtocolStatement(AutoGenDriverModel):
+ InstallStatement = "\n"
+ DBindingHandle = "ImageHandle"
+ GlobalDeclaration = "\n"
+
+
+ for DriverModelItem in AutoGenDriverModel:
+
+ if DriverModelItem[1] == "NULL" and DriverModelItem[2] == "NULL" and DriverModelItem[3] == "NULL":
+ InstallStatement += " Status = EfiLibInstallDriverBinding (\n"
+ InstallStatement += " ImageHandle,\n"
+ InstallStatement += " SystemTable,\n"
+ InstallStatement += " %s,\n" % DriverModelItem[0]
+ InstallStatement += " %s\n" % DBindingHandle
+ InstallStatement += " );\n"
+ else:
+ InstallStatement += " Status = EfiLibInstallAllDriverProtocols (\n"
+ InstallStatement += " ImageHandle,\n"
+ InstallStatement += " SystemTable,\n"
+ InstallStatement += " %s,\n" % DriverModelItem[0]
+ InstallStatement += " %s,\n" % DBindingHandle
+ InstallStatement += " %s,\n" % DriverModelItem[1]
+ InstallStatement += " %s,\n" % DriverModelItem[2]
+ InstallStatement += " %s\n" % DriverModelItem[3]
+ InstallStatement += " );\n"
+
+ InstallStatement += " ASSERT_EFI_ERROR (Status);\n\n"
+
+ GlobalDeclaration += "extern EFI_DRIVER_BINDING_PROTOCOL %s;\n" % DriverModelItem[0][1:]
+ if (DriverModelItem[1] != "NULL"):
+ GlobalDeclaration += "extern EFI_COMPONENT_NAME_PROTOCOL %s;\n" % DriverModelItem[1][1:]
+ if (DriverModelItem[2] != "NULL"):
+ GlobalDeclaration += "extern EFI_DRIVER_CONFIGURATION_PROTOCOL %s;\n" % DriverModelItem[2][1:]
+ if (DriverModelItem[3] != "NULL"):
+ GlobalDeclaration += "extern EFI_DRIVER_CONFIGURATION_PROTOCOL %s;\n" % DriverModelItem[3][1:]
+
+ DBindingHandle = "NULL"
+
+ return (InstallStatement, "", "", GlobalDeclaration)
+
+EventDeclarationTemplate = """
+//
+// Declaration for callback Event.
+//
+VOID
+EFIAPI
+%s (
+ IN EFI_EVENT Event,
+ IN VOID *Context
+ );
+"""
+
+def AddBootServiceEventStatement(EventList):
+ FinalEvent = ""
+ if len(EventList) > 1:
+
+ print "Current prototype does not support multi boot service event"
+ else:
+ FinalEvent = EventList[0]
+
+ CreateStatement = "\n"
+ CreateStatement += " Status = gBS->CreateEvent (\n"
+ CreateStatement += " EVT_SIGNAL_EXIT_BOOT_SERVICES,\n"
+ CreateStatement += " EFI_TPL_NOTIFY,\n"
+ CreateStatement += " " + FinalEvent + ",\n"
+ CreateStatement += " NULL,\n"
+ CreateStatement += " &mExitBootServicesEvent\n"
+ CreateStatement += " );\n"
+ CreateStatement += " ASSERT_EFI_ERROR (Status);\n"
+
+ GlobalDefinition = "\n"
+ GlobalDefinition += "STATIC EFI_EVENT mExitBootServicesEvent = NULL;\n"
+
+ GlobalDeclaration = EventDeclarationTemplate % FinalEvent
+
+ DestroyStatement = "\n"
+ DestroyStatement += " Status = gBS->CloseEvent (mExitBootServicesEvent);\n"
+ DestroyStatement += " ASSERT_EFI_ERROR (Status);\n"
+ return (CreateStatement, "", GlobalDefinition, GlobalDeclaration)
+
+def AddVirtualAddressEventStatement(EventList):
+ FinalEvent = ""
+ if len(EventList) > 1:
+ print "Current prototype does not support multi virtual address change event"
+ else:
+ FinalEvent = EventList[0]
+
+ CreateStatement = "\n"
+
+ CreateStatement += " Status = gBS->CreateEvent (\n"
+ CreateStatement += " EVT_SIGNAL_VIRTUAL_ADDRESS_CHANGE,\n"
+ CreateStatement += " TPL_NOTIFY,\n"
+ CreateStatement += " " + FinalEvent + ",\n"
+ CreateStatement += " NULL,\n"
+ CreateStatement += " &mVirtualAddressChangedEvent\n"
+ CreateStatement += " );\n"
+ CreateStatement += " ASSERT_EFI_ERROR (Status);\n"
+
+ GlobalDefinition = "\n"
+ GlobalDefinition += "STATIC EFI_EVENT mVirtualAddressChangedEvent = NULL;\n"
+
+ GlobalDeclaration = EventDeclarationTemplate % FinalEvent
+
+ DestroyStatement = "\n"
+ DestroyStatement += " Status = gBS->CloseEvent (mVirtualAddressChangedEvent);\n"
+ DestroyStatement += " ASSERT_EFI_ERROR (Status);\n"
+
+ return (CreateStatement, "", GlobalDefinition, GlobalDeclaration)
+
+
+EntryPointDeclarationTemplate = """
+//
+// Declaration for original Entry Point.
+//
+EFI_STATUS
+EFIAPI
+%s (
+ IN EFI_HANDLE ImageHandle,
+ IN EFI_SYSTEM_TABLE *SystemTable
+ );
+"""
+
+EntryPointHeader = r"""
+/**
+ The user Entry Point for module %s. The user code starts with this function.
+
+ @param[in] ImageHandle The firmware allocated handle for the EFI image.
+ @param[in] SystemTable A pointer to the EFI System Table.
+
+ @retval EFI_SUCCESS The entry point is executed successfully.
+ @retval other Some error occurs when executing this entry point.
+
+**/
+"""
+def AddNewEntryPointContentsStatement (moduleName, EntryPoint, InstallStatement = ""):
+ if EntryPoint != "Initialize%s" % moduleName:
+ NewEntryPoint = "Initialize%s" % moduleName
+ else:
+ NewEntryPoint = "NewInitialize%s" % moduleName
+
+ EntryPointContents = EntryPointHeader % moduleName
+ EntryPointContents += "EFI_STATUS\n"
+ EntryPointContents += "EFIAPI\n"
+ EntryPointContents += NewEntryPoint + "(\n"
+ EntryPointContents += " IN EFI_HANDLE ImageHandle,\n"
+ EntryPointContents += " IN EFI_SYSTEM_TABLE *SystemTable\n"
+ EntryPointContents += " )\n"
+ EntryPointContents += "{\n"
+ EntryPointContents += " EFI_STATUS Status;\n"
+ EntryPointContents += InstallStatement + "\n"
+ GlobalDeclaration = ""
+
+ if EntryPoint != "":
+ EntryPointContents += " //\n // Call the original Entry Point\n //\n"
+ EntryPointContents += " Status = %s (ImageHandle, SystemTable);\n\n" % EntryPoint
+ GlobalDeclaration += EntryPointDeclarationTemplate % EntryPoint
+
+ EntryPointContents += " return Status;\n"
+ EntryPointContents += "}\n"
+
+ return (NewEntryPoint, EntryPointContents, GlobalDeclaration)
+
+reFileHeader = re.compile(r"^\s*/\*.*?\*/\s*", re.DOTALL)
+reNext = re.compile(r"#ifndef\s*(\w+)\s*#define\s*\1\s*")
+
+def AddCommonInclusionStatement(fileContents, includeStatement):
+ if includeStatement in fileContents:
+ return fileContents
+
+ insertPos = 0
+ matchFileHeader = reFileHeader.search(fileContents)
+ if matchFileHeader:
+ insertPos = matchFileHeader.end()
+
+ matchFileHeader = reNext.search(fileContents, insertPos)
+ if matchFileHeader:
+ insertPos = matchFileHeader.end()
+
+ includeStatement = "\n%s\n\n" % includeStatement
+ fileContents = fileContents[0:insertPos] + includeStatement + fileContents[insertPos:]
+ return fileContents
+
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+if __name__ == '__main__':
+
+ pass
+
diff --git a/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py b/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py new file mode 100644 index 0000000000..01de21239c --- /dev/null +++ b/BaseTools/Source/Python/MigrationMsa2Inf/MigrationMsa2Inf.py @@ -0,0 +1,2477 @@ +#!/usr/bin/env python +# +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. + + +""" This program converts EDK II MSA files into EDK II Extended INF format files """ + +import os, re, sys, fnmatch, xml.dom.minidom +from optparse import OptionParser +from AutoGenExterns import * +from Common.XmlRoutines import * # XmlParseFile, XmlElement, XmlAttribute, XmlList, XmlElementData, XmlNode +from Common.EdkIIWorkspace import * + +versionNumber = "0.9" +__version__ = "%prog Version " + versionNumber +__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved." + +commonHeaderFilename = "CommonHeader.h" +entryPointFilename = "EntryPoint.c" + +AutoGenLibraryMapping = { + "HiiLib":"HiiLibFramework", + "EdkIfrSupportLib":"IfrSupportLibFramework", + "EdkScsiLib":"ScsiLib", + "EdkUsbLib":"UsbLib", + "EdkFvbServiceLib":"FvbServiceLib", + "EdkGraphicsLib":"GraphicsLib" + } + +def myOptionParser(): + """ Argument Parser """ + usage = "%prog [options] -f input_filename" + parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(versionNumber)) + parser.add_option("-f", "--file", dest="filename", help="Name of MSA file to convert") + parser.add_option("-o", "--output", dest="outfile", help="Specific Name of the INF file to create, otherwise it is the MSA filename with the extension repalced.") + parser.add_option("-a", "--auto", action="store_true", dest="autowrite", default=False, help="Automatically create output files and write the INF file") + parser.add_option("-i", "--interactive", action="store_true", dest="interactive", default=False, help="Set Interactive mode, user must approve each change.") + parser.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Do not print any messages, just return either 0 for succes or 1 for failure") + parser.add_option("-v", "--verbose", action="count", dest="verbose", help="Do not print any messages, just return either 0 for succes or 1 for failure") + parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Enable printing of debug messages.") + parser.add_option("-c", "--convert", action="store_true", dest="convert", default=False, help="Convert package: OldMdePkg->MdePkg EdkModulePkg->MdeModulePkg.") + parser.add_option("-e", "--event", action="store_true", dest="event", default=False, help="Enable handling of Exit Boot Services & Virtual Address Changed Event") + parser.add_option("-m", "--manual", action="store_true", dest="manual", default=False, help="Generate CommonHeader.txt, user picks up & copy it to a module common header") + parser.add_option("-w", "--workspace", dest="workspace", default=str(os.environ.get('WORKSPACE')), help="Specify workspace directory.") + (options, args) = parser.parse_args(sys.argv[1:]) + + return options,args + + +def openDatabase(f): + """ Parse XML in the FrameworkDatabase.db file pointed to by f """ + if (options.debug and options.verbose > 1): + print "Opening the database file:", f + if os.path.exists(f): + fdb = XmlParseFile(f) + else: + return "None" + return fdb + +def openSpd(s): + """ Parse XML in the SPD file pointed to by s """ + if (options.debug and options.verbose > 1): + print "Opening the SPD file:", s + if os.path.exists(s): + spd = XmlParseFile(s) + else: + return "None" + return spd + +def openMsa(m): + """ Parse XML in the MSA file pointed to by m """ + if (options.debug and options.verbose > 1): + print "Opening the MSA file:", m + if os.path.exists(m): + msa = XmlParseFile(m) + else: + return "None" + return msa + +def AddGuid(ArchList, CName, Usage): + """ Add a GUID to the Architecture array that the GUID is valid for. """ + if "IA32" in ArchList: + GuidCNameIa32.insert(0, str(" %-45s # %s" % (CName, Usage))) + if "X64" in ArchList: + GuidCNameX64.insert(0, str(" %-45s # %s" % (CName, Usage))) + if "IPF" in ArchList: + GuidCNameIPF.insert(0, str(" %-45s # %s" % (CName, Usage))) + if "EBC" in ArchList: + GuidCNameEBC.insert(0, str(" %-45s # %s" % (CName, Usage))) + if "ALL" in ArchList: + GuidCName.insert(0, str(" %-45s # %s" % (CName, Usage))) + + +def removeDups(CN, ListName): + """ Remove Duplicate Entries from the Guid List passed in """ + for Entry in ListName[:]: + if " " + CN + " " in Entry: + if (options.verbose > 1): + print "Removing C Name %s Entry from Guids List." % (CN) + ListName.remove(Entry) + +def chkArch(Archs): + """ Process the supported architectures passed in to combine if possible """ + Archs = Archs.upper() + if (("IA32" in Archs) & ("X64" in Archs) & ("IPF" in Archs) & ("EBC" in Archs)): + Archs = "ALL" + if (len(Archs) == 0): + Archs = "ALL" + return Archs + +def saveSourceFile(moduleDir, sourceFilename, sourceFileContents): + newFilename = os.path.join(moduleDir, sourceFilename) + + try: + f = open(newFilename, "w+") + f.write(sourceFileContents) + f.close() + except: + print "IO error in saving %s" % sourceFilename + + return sourceFilename + +def openSourceFile(moduleDir, sourceFilename): + newFilename = os.path.join(moduleDir, sourceFilename) + sourceFileContents = "" + try: + f = open(newFilename, "r") + sourceFileContents = f.read() + f.close() + except: + print "IO error in opening %s" % sourceFilename + + return sourceFileContents + +def MatchOption(eline, ToolChainFamily, Targets, Archs, ToolCode, Value): + IDs = eline.split("_") + + if len(IDs) < 5: + return [] + + MatchedTargets = [] + if (Targets[0] == "*") or IDs[0] in Targets: + MatchedTargets.append(IDs[0]) + elif IDs[0] == "*": + MatchedTargets = Targets + + MatchedArchs = [] + if Archs[0] == "*" or IDs[2] in Archs: + MatchedArchs.append(IDs[2]) + elif IDs[2] == "*": + MatchedArchs = Archs + + if IDs[3] != ToolCode and IDs[3] != "*": + return [] + + result = [] + for arch in MatchedArchs: + for target in MatchedTargets: + line = "%s:%s_%s_%s_%s_FLAGS = %s" % (ToolChainFamily, target, IDs[1], arch, ToolCode, Value) + result.append(line) + + return result + +def main(): + + AutoGenSource = "" + AutoGenHeader = "" + AutoGenDeclaration = "" + AutoGenModuleFolder = None + + workspace = "" + + if (options.workspace == None): + print "ERROR: E0000: WORKSPACE not defined.\n Please set the WORKSPACE environment variable to the location of the EDK II install directory." + sys.exit(1) + else: + workspace = options.workspace + if (options.debug): + print "Using Workspace:", workspace + + try: + options.verbose +=1 + except: + options.verbose = 1 + pass + + + FdbPath = os.path.join(workspace, "Conf") + FdbPath = os.path.join(FdbPath, "FrameworkDatabase.db") + if os.path.exists(FdbPath): + FdbFile = FdbPath + else: + print "ERROR: E0001: WORKSPACE does not contain the FrameworkDatabase File.\n Please run EdkSetup from the EDK II install directory.\n" + sys.exit(1) + + Fdb = openDatabase(FdbFile) + if (Fdb == 'None'): + print "ERROR: E0002 Could not open the Framework Database file:", FdbFile + sys.exit(1) + + if (options.debug): + print "FrameworkDataBase.db file:", FdbFile + + # + InitializeAutoGen(workspace, Fdb) + + if (options.filename): + filename = options.filename + if ((options.verbose > 1) | (options.autowrite)): + print "Filename:", filename + else: + print "ERROR: E0001 - You must specify an input filename" + sys.exit(1) + + if (options.outfile): + outputFile = options.outfile + else: + outputFile = filename.replace('.msa', '.inf') + + if ((options.verbose > 2) or (options.debug)): + print "Output Filename:", outputFile + + Msa = openMsa(filename) + if (Msa == 'None'): + ## Maybe developer think WORKSPACE macro is the root directory of file name + ## So we will try to add WORKSPACE path into filename + MsaFileName = "" + MsaFileName = os.path.join(workspace, filename) + Msa = openMsa(MsaFileName) + if (Msa == 'None'): + print "ERROR: E0002: Could not open the file:", filename + sys.exit(1) + + AutoGenModuleFolder = os.path.dirname(filename) + + MsaHeader = "/ModuleSurfaceArea/MsaHeader/" + MsaDefs = "/ModuleSurfaceArea/ModuleDefinitions/" + BaseName = str(XmlElement(Msa, MsaDefs + "OutputFileBasename")).strip() + + if (len(BaseName) < 1): + BaseName = str(XmlElement(Msa, MsaHeader + "BaseName")).strip() + BaseName = re.sub(' ', '_', BaseName) + + GuidValue = str(XmlElement(Msa, MsaHeader + "GuidValue")).strip() + VerString = str(XmlElement(Msa, MsaHeader + "Version")).strip() + ModType = str(XmlElement(Msa, MsaHeader + "ModuleType")).strip() + CopyRight = str(XmlElement(Msa, MsaHeader + "Copyright")).strip() + Abstract = str(XmlElement(Msa, MsaHeader + "Abstract")).strip() + Description = str(XmlElement(Msa, MsaHeader + "Description")).strip().replace(" ", " ").replace(" ", " ").replace(" ", " ") + if not CopyRight.find("2007"): + CopyRight = CopyRight.replace("2006", "2007") + License = str(XmlElement(Msa, MsaHeader + "License")).strip().replace(" ", " ") + MsaDefs = "/ModuleSurfaceArea/ModuleDefinitions/" + BinModule = "" + try: + BinModule = str(XmlElement(Msa, MsaDefs + "BinaryModule")).strip() + except: + pass + + SupportedArchitectures = "" + try: + SupportedArchitectures = str(XmlElement(Msa, MsaDefs + "SupportedArchitectures")).strip() + except: + pass + + DefinesComments = [] + if (len(SupportedArchitectures) > 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# VALID_ARCHITECTURES = " + SupportedArchitectures + "\n") + DefinesComments.append("#\n") + + MsaExtern = "/ModuleSurfaceArea/Externs/" + PcdIsDriver = "" + try: + PcdIsDriver = str(XmlElement(Msa, MsaExtern + "PcdIsDriver")).strip() + except: + pass + + SpecList = [] + List = [] + try: + List = XmlList(Msa, MsaExtern + "Specification") + except: + pass + + if (len(List) > 0): + for spec in List[:]: + SpecList.insert(0, str(XmlElementData(spec)).strip()) + + DriverModules = [] + LibraryModules = [] + Externlist = [] + Flag = (DefinesComments == []) + + # Data structure to insert autogen code + AutoGenDriverModel = [] + AutoGenExitBootServices = [] + AutoGenVirtualAddressChanged = [] + AutoGenEntryPoint = "" + AutoGenUnload = "" + AutoGenGuid = [] + AutoGenLibClass = [] + AutoGenPackage = [] + AutoGenSourceFiles = [] + OldEntryPoint = "" + OldUnload = "" + + try: + Externlist = XmlList(Msa, MsaExtern + "Extern") + except: + pass + + if (len(Externlist) > 0): + if (options.debug and options.verbose > 2): + print "In Extern Parsing Routine" + for extern in Externlist: + EntryPoint = "" + Unload = "" + DBinding = "" + CompName = "" + Diag = "" + Config = "" + Constr = "" + Destr = "" + CallBack = "" + lFlag = False + AutoGenDriverModelItem = [] + try: + EntryPoint = str(XmlElementData(extern.getElementsByTagName("ModuleEntryPoint")[0])).strip() + AutoGenEntryPoint = EntryPoint + #DriverModules.append(" %-30s = %s\n" % ("ENTRY_POINT" , EntryPoint)) + except: + pass + + try: + Unload = str(XmlElementData(extern.getElementsByTagName("ModuleUnloadImage")[0])).strip() + AutoGenUnload = Unload + DriverModules.append(" %-30s = %s\n" % ("UNLOAD_IMAGE", Unload)) + except: + pass + + try: + DBinding = str(XmlElementData(extern.getElementsByTagName("DriverBinding")[0])).strip() + AutoGenDriverModelItem.append("&" + DBinding) + DefinesComments.append("# %-29s = %-45s\n" % ("DRIVER_BINDING", DBinding)) + lFlag = True + except: + pass + + try: + CompName = str(XmlElementData(extern.getElementsByTagName("ComponentName")[0])).strip() + AutoGenDriverModelItem.append("&" + CompName) + DefinesComments.append("# %-29s = %-45s\n" % ("COMPONENT_NAME", CompName)) + lFlag = True + except: + if lFlag: + AutoGenDriverModelItem.append("NULL") + pass + + try: + Config = str(XmlElementData(extern.getElementsByTagName("DriverConfig")[0])).strip() + AutoGenDriverModelItem.append("&" + Config) + DefinesComments.append("# %-29s = %-45s\n" % ("DRIVER_CONFIG", Config)) + lFlag = True + except: + if lFlag: + AutoGenDriverModelItem.append("NULL") + pass + + try: + Diag = str(XmlElementData(extern.getElementsByTagName("DriverDiag")[0])).strip() + AutoGenDriverModelItem.append("&" + Diag) + DefinesComments.append("# %-29s = %-45s\n" % ("DRIVER_DIAG", Diag)) + lFlag = True + except: + if lFlag: + AutoGenDriverModelItem.append("NULL") + pass + + if len(AutoGenDriverModelItem) > 0: + AutoGenDriverModel.append(AutoGenDriverModelItem) + + try: + Constr = str(XmlElementData(extern.getElementsByTagName("Constructor")[0])).strip() + LibraryModules.append(" %-30s = %s\n" % ("CONSTRUCTOR", Constr)) + except: + pass + + try: + Destr = str(XmlElementData(extern.getElementsByTagName("Destructor")[0])).strip() + LibraryModules.append(" %-30s = %s\n" % ("DESTRUCTOR", Destr)) + except: + pass + + try: + CallBack = str(XmlElement(extern, "Extern/SetVirtualAddressMapCallBack")).strip() + if CallBack != "": + AutoGenVirtualAddressChanged.append(CallBack) + DefinesComments.append("# %-29s = %-45s\n" % ("VIRTUAL_ADDRESS_MAP_CALLBACK", CallBack)) + lFlag = True + except: + + pass + + try: + CallBack = str(XmlElement(extern, "Extern/ExitBootServicesCallBack")).strip() + if CallBack != "": + AutoGenExitBootServices.append(CallBack) + DefinesComments.append("# %-29s = %-45s\n" % ("EXIT_BOOT_SERVICES_CALLBACK", CallBack)) + lFlag = True + except: + pass + + + Flag = False + + """ Get the Module's custom build options """ + MBOlines = [] + MBO = "/ModuleSurfaceArea/ModuleBuildOptions/Options/Option" + mboList = [] + try: + mboList = XmlList(Msa, MBO) + except: + pass + + if (len(mboList) > 0): + for Option in mboList: + Targets = [] + Archs = [] + + bt = "" + try: + bt = str(Option.getAttribute("BuildTargets")) + except: + pass + + if (len(bt) > 0): + if (re.findall(" ", bt) > 0): + Targets = bt.split() + else: + Targets.insert(0, bt) + else: + Targets.insert(0, "*") + + if (options.debug and options.verbose > 2): + print "Targets", len(Targets), Targets + + pro = "" + try: + pro = Option.getAttribute("SupArchList") + if (re.findall(" ", pro) > 0): + Archs = pro.split() + elif (re.findall(",", pro) > 0): + Archs = pro.split(",") + except: + pass + + if (len(pro) == 0): + Archs.insert(0, "*") + + if (options.debug and options.verbose > 2): + print "Archs", len(Archs), Archs + + ToolCode = "" + try: + ToolCode = str(Option.getAttribute("ToolCode")) + except: + pass + + if (len(ToolCode) == 0): + ToolCode="*" + + value = "" + try: + value = str(XmlElementData(Option)) + except: + pass + Tags = [] + TagName = "" + try: + TagName = str(Option.getAttribute("TagName")) + except: + pass + + if (len(TagName) > 0) : + if (options.debug and options.verbose > 2): + print "TagName was defined:", TagName + Tags.insert(0, TagName) + else: + if (options.debug and options.verbose > 2): + print "TagName was NOT defined!" + TagName = "*" + Tags.insert(0, "*") + + Family = "" + try: + Family = str(Option.getAttribute("ToolChainFamily")).strip() + except: + pass + + if (len(Family) > 0): + if (options.debug): + print "Searching tools_def.txt for Tool Tags that belong to:", Family, "family" + TCF = [] + tdFile = "" + tdPath = os.path.join(workspace, "Tools") + tdPath = os.path.join(tdPath, "Conf") + tdPath = os.path.join(tdPath, "tools_def.txt") + tdPath = tdPath.replace("\\", "/") + if os.path.exists(tdPath): + tdFile = tdPath + else: + tdPath = os.path.join(workspace, "Conf") + tdPath = os.path.join(tdPath, "tools_def.txt") + if os.path.exists(tdPath): + tdFile = tdPath + else: + print "ERROR: E0001: WORKSPACE does not contain the tools_def.txt File.\n Please run EdkSetup from the EDK II install directory.\n" + sys.exit(1) + + if (options.debug and options.verbose > 2): + print "Opening:", tdFile + + TagNameList = [] + tools_def = open(tdFile, "r") + for tdline in tools_def: + if "# " in tdline: + continue + if "FAMILY" in tdline: + if (options.debug and options.verbose > 2): + print "Testing for FAMILY:", Family, "in the line:", tdline.strip() + if Family in tdline: + enter = tdline.split("=")[0] + if (options.debug and options.verbose > 2): + print "Adding TNL:", tdline + TagNameList.insert(0, enter) + tools_def.close() + + if (options.debug and options.verbose > 2): + print "TagNameList:", TagNameList + + olinesSet = {} + for eline in TagNameList: + if "# " in eline: + continue + if (options.debug and options.verbose > 2): + print "ToolsDef entry:", eline + + olines = MatchOption(eline, Family, Targets, Archs, ToolCode, value) + for oline in olines: + olinesSet[oline] = 1 + + for oline in olinesSet: + if (options.debug and options.verbose > 2): + print "Adding:", str(oline) + MBOlines.insert(0, oline) + else: + for targ in Targets: + for arch in Archs: + oline = " %s_%s_%s_%s_FLAGS = %s" % (targ, Tags[0], arch, str(ToolCode), str(Value)) + if (options.debug and options.verbose > 2): + print "Adding:", str(oline) + MBOlines.insert(0, oline) + + + + + for tag in Tags: + for targ in Targets: + for arch in Archs: + oline = " " + str(targ) + "_" + str(tag) + "_" + str(arch) + "_" + str(ToolCode) + "_FLAGS = " + str(value) + if (options.debug and options.verbose > 2): + print "Adding:", str(oline) + #MBOlines.insert(0, oline) + + + """ Get the Library Class information """ + MsaLcDefs = "/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass" + LcDefList = [] + try: + LcDefList = XmlList(Msa, MsaLcDefs) + except: + pass + + IamLibrary = [] + LibClassList = [] + LibClassListIa32 = [] + LibClassListX64 = [] + LibClassListIpf = [] + LibClassListEbc = [] + + + if (len(LcDefList) > 0): + for Lc in LcDefList: + lcKeyword = "" + try: + lcKeyword = str(XmlElementData(Lc.getElementsByTagName("Keyword")[0])) + except: + raise SyntaxError, "The MSA is not correctly formed, a Library Class Keyword Element is required" + + lcUsage = "" + try: + lcUsage = str(XmlAttribute(Lc, "Usage")) + except: + raise SyntaxError, "The MSA is not correctly formed, a Usage Attribute is required for all Library Class Elements" + + Archs = "" + try: + Archs = str(XmlAttribute(Lc, "SupArchList")) + except: + pass + + Archs = chkArch(Archs) + + if (options.debug and options.verbose > 2): + print "Attr: ", lcUsage, lcKeyword, Archs + + if (options.convert): + lcKeyword = AutoGenLibraryMapping.get(lcKeyword, lcKeyword) + + if re.findall("PRODUCED", lcUsage, re.IGNORECASE): + try: + lcSupModList = "" + + try: + lcSupModList = str(XmlAttribute(Lc, "SupModuleList")) + except: + lcSupModList = "" + pass + + lcLine = lcKeyword + AutoGenLibClass.append(lcKeyword) + if len(lcSupModList) > 0: + lcLine = lcLine + "|" + lcSupModList + IamLibrary.insert(0, lcLine) + except: + pass + elif lcKeyword != "UefiDriverModelLib": + AutoGenLibClass.append(lcKeyword) + # This section handles the library classes that are CONSUMED + if "IA32" in Archs: + LibClassListIa32.insert(0, lcKeyword) + if "X64" in Archs: + LibClassListX64.insert(0, lcKeyword) + if "IPF" in Archs: + LibClassListIpf.insert(0, lcKeyword) + if "EBC" in Archs: + LibClassListEbc.insert(0, lcKeyword) + if "ALL" in Archs: + LibClassList.insert(0, lcKeyword) + if len(AutoGenDriverModel) > 0 and "UefiLib" not in LibClassList: + AutoGenLibClass.append("UefiLib") + LibClassList.insert(0, "UefiLib") + + AutoGenDxsFiles = [] + """ Get the Source File list """ + SrcFilenames = [] + SrcFilenamesIa32 = [] + SrcFilenamesX64 = [] + SrcFilenamesIpf = [] + SrcFilenamesEbc = [] + SrcFiles = "/ModuleSurfaceArea/SourceFiles/Filename" + SrcList = [] + try: + SrcList = XmlList(Msa, SrcFiles) + except: + pass + + if (len(SrcList) > 0): + for fn in SrcList: + file = "" + Archs = "" + + try: + Archs = fn.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + file = str(XmlElementData(fn)) + except: + pass + + if file.endswith(".dxs"): + AutoGenDxsFiles.append((file, Archs)) + else: + AutoGenSourceFiles.append(file) + if "IA32" in Archs: + SrcFilenamesIa32.insert(0, file) + if "X64" in Archs: + SrcFilenamesX64.insert(0, file) + if "IPF" in Archs: + SrcFilenamesIpf.insert(0, file) + if "EBC" in Archs: + SrcFilenamesEbc.insert(0, file) + if "ALL" in Archs: + SrcFilenames.insert(0, file) + + """ Package Dependency section """ + DbPkgList = "/FrameworkDatabase/PackageList/Filename" + WorkspacePkgs = [] + try: + WorkspacePkgs = XmlList(Fdb, DbPkgList) + except: + print "Could not tet the package data from the database" + sys.exit(1) + + PkgDb = [] + HeaderLocations = [] + + if (options.debug and options.verbose > 1): + print "Found %s packages in the WORKSPACE" % (len(WorkspacePkgs)) + + Dirs = [] + GuidDecls = [] + if (len(WorkspacePkgs) > 0): + SpdHeader = "/PackageSurfaceArea/SpdHeader/" + for Pkg in WorkspacePkgs[:]: + PackageGuid = "" + PackageVersion = "" + file = "" + try: + file = str(XmlElementData(Pkg)) + except: + pass + + if (options.debug and options.verbose > 2): + print "PKG:", file + + if fnmatch.fnmatch(file, "*.dec"): + print "parsing " + os.path.join(workspace, file) + PackageGuid = "" + PackageVersion = "" + try: + Lines = open(os.path.join(workspace, file)).readlines() + except: + print "Could not parse the Package file:", file + sys.exit(1) + + for Line in Lines: + Line = Line.split("#")[0] + Items = Line.split("=") + if len(Items) != 2: + continue + + Key = Items[0].strip().upper() + if Key == "PACKAGE_GUID": + PackageGuid = Items[1].strip() + if Key == "PACKAGE_VERSION": + PackageVersion = Items[1].strip() + + else: + Spd = openSpd(os.path.join(workspace, file)) + if (Spd == 'None'): + print "Could not parse the Package file:", file + sys.exit(1) + + path = os.path.split(file)[0] + file = file.replace(".nspd", ".dec") + file = file.replace(".spd", ".dec") + + try: + PackageGuid = str(XmlElement(Spd, SpdHeader + "GuidValue")) + except: + pass + + try: + PackageVersion = str(XmlElement(Spd, SpdHeader + "Version")) + except: + pass + + file = file + "|" + PackageGuid + "|" + PackageVersion + PkgDb.insert(0, file) + + GuidEntries = [] + try: + GuidEntries = XmlList(Spd, "/PackageSurfaceArea/GuidDeclarations/Entry") + except: + pass + + if (len(GuidEntries) > 0): + for Entry in GuidEntries[:]: + try: + GuidDecls.append(str(XmlElementData(Entry.getElementsByTagName("C_Name")[0])).strip()) + except: + pass + + + pHdrs = [] + try: + pHdrs = XmlList(Spd, "/PackageSurfaceArea/PackageHeaders/IncludePkgHeader") + except: + pass + + if (len(pHdrs) > 0): + for Hdr in pHdrs[:]: + try: + ModTypeList = str(Hdr.getAttribute("ModuleType")) + if (ModType in ModTypeList): + HeaderName= str(XmlElementData(Hdr))[0] + Dirs.insert(0, os.path.join(packagepath,str(os.path.split(HeaderName)))) + except: + pass + + # Get the Guid:Header from the Packages + SpdLcDec = "/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass" + lcList = [] + try: + lcList = XmlList(Spd, SpdLcDec) + except: + pass + + if (len(lcList) > 0): + for Lc in lcList[:]: + Name = "" + try: + Name = Lc.getAttribute("Name") + except: + pass + + Header = "" + try: + Header = XmlElementData(Lc.getElementsByTagName("IncludeHeader")[0]) + except: + pass + + if ((len(Name) > 0) and (len(Header) > 0)): + line = Name + "|" + os.path.join(path, Header) + if (options.debug and options.verbose > 2): + print "Adding:", line + HeaderLocations.insert(0, line) + + ishList = [] + try: + IndStdHeaders = "/PackageSurfaceArea/IndustryStdIncludes/IndustryStdHeader" + ishList = XmlList(Spd, IndStdHeaders) + except: + pass + + if (len(ishList) > 0): + for Lc in ishList[:]: + Name = "" + try: + Name = str(Lc.getAttribute("Name")).strip() + except: + pass + + Header = "" + try: + Header = str(XmlElementData(Lc.getElementsByTagName("IncludeHeader")[0])).strip() + except: + pass + + if ((len(Name) > 0) and (len(Header) > 0)): + line = Name + "|" + os.path.join(path, Header) + HeaderLocations.insert(0, str(line)) + + PkgList = [] + PkgListIa32 = [] + PkgListX64 = [] + PkgListIpf = [] + PkgListEbc = [] + Pkgs = "/ModuleSurfaceArea/PackageDependencies/Package" + pkgL = [] + try: + pkgL = XmlList(Msa, Pkgs) + except: + pass + + + gUnknownPkgGuid = {} + if (len(pkgL) > 0): + if (options.debug and options.verbose > 1): + print "Found %s packages in the module" % (len(pkgL)) + for pkg in pkgL[:]: + Archs = "" + pkgGuid = "" + pkgVer = "" + + FindPkgGuid = False + try: + Archs = pkg.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + pkgGuid = pkg.getAttribute("PackageGuid") + except: + pass + + if options.convert: + if pkgGuid.lower() == "5e0e9358-46b6-4ae2-8218-4ab8b9bbdcec": + pkgGuid = "1E73767F-8F52-4603-AEB4-F29B510B6766" + if pkgGuid.lower() == "68169ab0-d41b-4009-9060-292c253ac43d": + pkgGuid = "BA0D78D6-2CAF-414b-BD4D-B6762A894288" + AutoGenPackage.append(pkgGuid) + try: + pkgVer = pkg.getAttribute("PackageVersion") + except: + pass + + for PkgEntry in PkgDb[:]: + if pkgGuid in PkgEntry: + if len(pkgVer) > 0: + if pkgVer in PkgEntry: + FindPkgGuid = True + if "IA32" in Archs: + PkgListIa32.insert(0, PkgEntry.split("|")[0]) + if "X64" in Archs: + PkgListX64.insert(0, PkgEntry.split("|")[0]) + if "IPF" in Archs: + PkgListIpf.insert(0, PkgEntry.split("|")[0]) + if "EBC" in Archs: + PkgListEbc.insert(0, PkgEntry.split("|")[0]) + if "ALL" in Archs: + PkgList.insert(0, PkgEntry.split("|")[0]) + else: + FindPkgGuid = True + if "IA32" in Archs: + PkgListIa32.insert(0, PkgEntry.split("|")[0]) + if "X64" in Archs: + PkgListX64.insert(0, PkgEntry.split("|")[0]) + if "IPF" in Archs: + PkgListIpf.insert(0, PkgEntry.split("|")[0]) + if "EBC" in Archs: + PkgListEbc.insert(0, PkgEntry.split("|")[0]) + if "ALL" in Archs: + PkgList.insert(0, PkgEntry.split("|")[0]) + + if not FindPkgGuid: + gUnknownPkgGuid[str(pkgGuid)] = 1 + + for UnknownPkgGuid in gUnknownPkgGuid: + print "Cannot resolve package dependency Guid:", UnknownPkgGuid + + PkgList.reverse() + PkgListIa32.reverse() + PkgListX64.reverse() + PkgListIpf.reverse() + PkgListEbc.reverse() + if (options.debug): + print "Package List:", PkgList + + + + """ Setup the Global GuidCName arrays that will hold data from various MSA locations """ + global GuidCName + global GuidCNameIa32 + global GuidCNameX64 + global GuidCNameIPF + global GuidCNameEBC + GuidCName = [] + GuidCNameIa32 = [] + GuidCNameX64 = [] + GuidCNameIPF = [] + GuidCNameEBC = [] + + """ Check for the GUIDs Element """ + Guids = "/ModuleSurfaceArea/Guids/GuidCNames" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + except: + pass + + try: + CName = str(XmlElementData(Guid.getElementsByTagName("GuidCName")[0])) + if CName in GuidDecls: + if (options.debug and options.verbose > 1): + print "Guids Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + else: + raise AssertionError, "Guid %s defined in %s is not declared in any package (.dec) file!" % (CName, filename) + except: + pass + + if (options.debug and options.verbose > 2): + print "Guid C Name List:", GuidCName + + """ Check for Events """ + Guids = "/ModuleSurfaceArea/Events/CreateEvents/EventTypes" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Type = str(XmlElementData(Guid.getElementsByTagName("EventType")[0])) + Usage += " Create Event: " + Type + except: + pass + + try: + CName = str(Guid.getAttribute("EventGuidCName")) + if CName in GuidDecls: + if (options.debug and options.verbose > 1): + print "CreateEvent Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + else: + if (len(DefinesComments) == 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# Create Event Guid C Name: " + CName + " Event Type: " + Type + "\n") + Flag = True + except: + pass + + if (Flag): + DefinesComments.append("#\n") + Flag = False + + Guids = "/ModuleSurfaceArea/Events/SignalEvents/EventTypes" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Type = str(XmlElementData(Guid.getElementsByTagName("EventType")[0])) + Usage += " Signal Event: " + Type + except: + pass + + try: + CName = str(Guid.getAttribute("EventGuidCName")) + if CName in GuidDecls: + if (options.debug and options.verbose > 1): + print "SignalEvent Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + else: + if (len(DefinesComments) == 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# Signal Event Guid C Name: " + CName + " Event Type: " + Type + "\n") + Flag = True + except: + pass + + if (Flag): + DefinesComments.append("#\n") + Flag = False + + """ Check the HOB guids """ + Guids = "/ModuleSurfaceArea/Hobs/HobTypes" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Type = str(XmlElementData(Guid.getElementsByTagName("HobType")[0])) + Usage += " Hob: " + Type + except: + pass + + try: + CName = str(Guid.getAttribute("HobGuidCName")) + if CName in GuidDecls: + if (options.debug and options.verbose > 1): + print "Hob Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + else: + if (len(DefinesComments) == 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# HOB Guid C Name: " + CName + " Hob Type: " + Type + "\n") + Flag = True + except: + if (len(DefinesComments) == 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# HOB: " + Type + "\n") + Flag = True + pass + + if (Flag): + DefinesComments.append("#\n") + Flag = False + + """ Check for the SystemTables Element """ + Guids = "/ModuleSurfaceArea/SystemTables/SystemTableCNames" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Usage += " System Table" + except: + pass + + try: + CName = str(XmlElementData(Guid.getElementsByTagName("SystemTableCName")[0])) + if (options.debug and options.verbose > 1): + print "System Table Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + except: + pass + + """ Check for the DataHubs Element """ + Guids = "/ModuleSurfaceArea/DataHubs/DataHubRecord" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Usage += " Data Hub" + except: + pass + + try: + CName = str(XmlElementData(Guid.getElementsByTagName("DataHubCName")[0])) + if (options.debug and options.verbose > 1): + print "Data Hub Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + except: + pass + + """ Check for the HiiPackages Element """ + Guids = "/ModuleSurfaceArea/HiiPackages/HiiPackage" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + Usage += " HII Formset" + except: + pass + + try: + CName = str(XmlElementData(Guid.getElementsByTagName("HiiCName")[0])) + if (options.debug and options.verbose > 1): + print "Hii Formset Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + except: + pass + + """ Check for the Variables Element """ + Guids = "/ModuleSurfaceArea/Variables/Variable" + GuidList = [] + try: + GuidList = XmlList(Msa, Guids) + except: + pass + + if (len(GuidList) > 0): + for Guid in GuidList: + Archs = "" + Usage = "" + CName = "" + VariableName = "" + + try: + Archs = Guid.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Guid.getAttribute("Usage") + except: + pass + + try: + VariableName = str(XmlElementData(Guid.getElementsByTagName("VariableName")[0])) + CName = str(XmlElementData(Guid.getElementsByTagName("GuidC_Name")[0])) + + HexData = VariableName.strip().split() + UniString = " L\"" + for dig in HexData[:]: + UniString += str(unichr(eval(dig))) + UniString += "\"" + + Usage += UniString + + if CName in set(GuidDecls): + removeDups(CName, GuidCName) + removeDups(CName, GuidCNameIa32) + removeDups(CName, GuidCNameX64) + removeDups(CName, GuidCNameIPF) + removeDups(CName, GuidCNameEBC) + + if (options.debug): + print "Variable Adding Guid CName: %-45s # %s Archs: %s" % (CName, Usage, Archs) + AddGuid(Archs, CName, Usage) + AutoGenGuid.append(CName) + else: + if (len(DefinesComments) == 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + DefinesComments.append("# Variable Guid C Name: " + CName + " Variable Name:" + UniString + "\n") + Flag = True + except: + pass + + if (Flag): + DefinesComments.append("#\n") + Flag = False + + """ Check for the Protocol Element """ + Protocols = "/ModuleSurfaceArea/Protocols/Protocol" + ProtocolList = [] + ProtocolCName = [] + ProtocolCNameIa32 = [] + ProtocolCNameX64 = [] + ProtocolCNameIPF = [] + ProtocolCNameEBC = [] + + try: + ProtocolList = XmlList(Msa, Protocols) + except: + pass + + if (len(ProtocolList) > 0): + for Protocol in ProtocolList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Protocol.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Protocol.getAttribute("Usage") + except: + pass + + try: + CName = str(XmlElementData(Protocol.getElementsByTagName("ProtocolCName")[0])) + AutoGenGuid.append(CName) + removeDups(CName, GuidCName) + removeDups(CName, GuidCNameIa32) + removeDups(CName, GuidCNameX64) + removeDups(CName, GuidCNameIPF) + removeDups(CName, GuidCNameEBC) + + if (options.debug and options.verbose > 1): + print "Found %s - %s - %s " % (CName, Usage, str(len(Archs))) + + if "IA32" in Archs: + ProtocolCNameIa32.insert(0, str(" %-45s # PROTOCOL %s" % (CName, Usage))) + if "X64" in Archs: + ProtocolCNameX64.insert(0, str(" %-45s # PROTOCOL %s" % (CName, Usage))) + if "IPF" in Archs: + ProtocolCNameIPF.insert(0, str(" %-45s # PROTOCOL %s" % (CName, Usage))) + if "EBC" in Archs: + ProtocolCNameEBC.insert(0, str(" %-45s # PROTOCOL %s" % (CName, Usage))) + if "ALL" in Archs: + ProtocolCName.insert(0, str(" %-45s # PROTOCOL %s" % (CName, Usage))) + except: + pass + + + Protocols = "/ModuleSurfaceArea/Protocols/ProtocolNotify" + try: + ProtocolList = XmlList(Msa, Protocols) + except: + pass + + if (len(ProtocolList) > 0): + for Protocol in ProtocolList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Protocol.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Protocol.getAttribute("Usage") + except: + pass + + try: + CName = str(XmlElementData(Protocol.getElementsByTagName("ProtocolNotifyCName")[0])) + AutoGenGuid.append(CName) + removeDups(CName, GuidCName) + removeDups(CName, GuidCNameIa32) + removeDups(CName, GuidCNameX64) + removeDups(CName, GuidCNameIPF) + removeDups(CName, GuidCNameEBC) + + if "IA32" in Archs: + ProtocolCNameIa32.insert(0, " %-45s # PROTOCOL_NOTIFY %s" % (CName, Usage)) + if "X64" in Archs: + ProtocolCNameX64.insert(0, " %-45s # PROTOCOL_NOTIFY %s" % (CName, Usage)) + if "IPF" in Archs: + ProtocolCNameIPF.insert(0, " %-45s # PROTOCOL_NOTIFY %s" % (CName, Usage)) + if "EBC" in Archs: + ProtocolCNameEBC.insert(0, " %-45s # PROTOCOL_NOTIFY %s" % (CName, Usage)) + if "ALL" in Archs: + ProtocolCName.insert(0, " %-45s # PROTOCOL_NOTIFY %s" % (CName, Usage)) + except: + pass + + """ Check for the PPIs Element """ + PPIs = "/ModuleSurfaceArea/PPIs/Ppi" + PPIsList = [] + PpiCName = [] + PpiCNameIa32 = [] + PpiCNameX64 = [] + PpiCNameIPF = [] + PpiCNameEBC = [] + + try: + PPIsList = XmlList(Msa, PPIs) + except: + pass + + if (len(PPIsList) > 0): + for Ppi in PPIsList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = str(Ppi.getAttribute("SupArchList")) + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = str(Ppi.getAttribute("Usage")) + except: + pass + + try: + CName = str(XmlElementData(Ppi.getElementsByTagName("PpiCName")[0])).strip() + AutoGenGuid.append(CName) + removeDups(CName, GuidCName) + removeDups(CName, GuidCNameIa32) + removeDups(CName, GuidCNameX64) + removeDups(CName, GuidCNameIPF) + removeDups(CName, GuidCNameEBC) + + if "IA32" in Archs: + PpiCNameIa32.insert(0, " %-45s # PPI %s" % (CName, Usage)) + if "X64" in Archs: + PpiCNameX64.insert(0, " %-45s # PPI %s" % (CName, Usage)) + if "IPF" in Archs: + PpiCNameIPF.insert(0, " %-45s # PPI %s" % (CName, Usage)) + if "EBC" in Archs: + PpiCNameEBC.insert(0, " %-45s # PPI %s" % (CName, Usage)) + if "ALL" in Archs: + PpiCName.insert(0, " %-45s # PPI %s" % (CName, Usage)) + except: + pass + + + PPIs = "/ModuleSurfaceArea/PPIs/PpiNotify" + try: + PPIsList = XmlList(Msa, PPIs) + except: + pass + + if (len(PPIsList) > 0): + for Ppi in PPIsList: + Archs = "" + Usage = "" + CName = "" + + try: + Archs = Ppi.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + Usage = Ppi.getAttribute("Usage") + except: + pass + + try: + CName = str(XmlElementData(Ppi.getElementsByTagName("PpiNotifyCName")[0])) + AutoGenGuid.append(CName) + removeDups(CName, GuidCName) + removeDups(CName, GuidCNameIa32) + removeDups(CName, GuidCNameX64) + removeDups(CName, GuidCNameIPF) + removeDups(CName, GuidCNameEBC) + + if "IA32" in Archs: + PpiCNameIa32.insert(0, " %-45s # PPI_NOTIFY %s" % (CName, Usage)) + if "X64" in Archs: + PpiCNameX64.insert(0, " %-45s # PPI_NOTIFY %s" % (CName, Usage)) + if "IPF" in Archs: + PpiCNameIPF.insert(0, " %-45s # PPI_NOTIFY %s" % (CName, Usage)) + if "EBC" in Archs: + PpiCNameEBC.insert(0, " %-45s # PPI_NOTIFY %s" % (CName, Usage)) + if "ALL" in Archs: + PpiCName.insert(0, " %-45s # PPI_NOTIFY %s" % (CName, Usage)) + except: + pass + + + """ Get the PCD entries now """ + PcdCoded = "/ModuleSurfaceArea/PcdCoded/PcdEntry" + PcdList = [] + try: + PcdList = XmlList(Msa, PcdCoded) + except: + pass + + (PcdFF, PcdFFIa32, PcdFFX64, PcdFFIpf, PcdFFEbc) = ([],[],[],[],[]) + (PcdFAB, PcdFABIa32, PcdFABX64, PcdFABIpf, PcdFABEbc) = ([],[],[],[],[]) + (PcdPIM, PcdPIMIa32, PcdPIMX64, PcdPIMIpf, PcdPIMEbc) = ([],[],[],[],[]) + (PcdDY, PcdDYIa32, PcdDYX64, PcdDYIpf, PcdDYEbc) = ([],[],[],[],[]) + (PcdDYE, PcdDYEIa32, PcdDYEX64, PcdDYEIpf, PcdDYEEbc) = ([],[],[],[],[]) + + if (len(PcdList) > 0): + for Pcd in PcdList: + Archs = "" + Usage = "" + CName = "" + DefVal = "" + + try: + Archs = Pcd.getAttribute("SupArchList") + except: + pass + + Archs = chkArch(Archs) + + try: + ItemType = Pcd.getAttribute("PcdItemType") + except: + pass + + try: + CName = str(XmlElementData(Pcd.getElementsByTagName("C_Name")[0])) + except: + raise SyntaxError, "ERROR: MSA has a PCD with no Pcd C_Name defined" + + try: + TSGC = str(XmlElementData(Pcd.getElementsByTagName("TokenSpaceGuidCName")[0])) + except: + pass + + try: + DefVal = str(XmlElementData(Pcd.getElementsByTagName("DefaultValue"))) + except: + pass + + if (len(DefVal) > 0): + line = TSGC + "." + CName + "|" + DefVal + else: + line = TSGC + "." + CName + + if (ItemType == "FEATURE_FLAG"): + if ("IA32" in Archs): + PcdFFIa32.insert(0, line) + if ("IPF" in Archs): + PcdFFIpf.insert(0, line) + if ("X64" in Archs): + PcdFFX64.insert(0, line) + if ("EBC" in Archs): + PcdFFEbc.insert(0, line) + if ("ALL" in Archs): + PcdFF.insert(0, line) + elif (ItemType == "FIXED_AT_BUILD"): + if ("IA32" in Archs): + PcdFABIa32.insert(0, line) + if ("IPF" in Archs): + PcdFABIpf.insert(0, line) + if ("X64" in Archs): + PcdFABX64.insert(0, line) + if ("EBC" in Archs): + PcdFABEbc.insert(0, line) + if ("ALL" in Archs): + PcdFAB.insert(0, line) + elif (ItemType == "PATCHABLE_IN_MODULE"): + if ("IA32" in Archs): + PcdPIMIa32.insert(0, line) + if ("IPF" in Archs): + PcdPIMIpf.insert(0, line) + if ("X64" in Archs): + PcdPIMX64.insert(0, line) + if ("EBC" in Archs): + PcdPIMEbc.insert(0, line) + if ("ALL" in Archs): + PcdFAB.insert(0, line) + elif (ItemType == "DYNAMIC_EX"): + if ("IA32" in Archs): + PcdDYEIa32.insert(0, line) + if ("IPF" in Archs): + PcdDYEIpf.insert(0, line) + if ("X64" in Archs): + PcdDYEX64.insert(0, line) + if ("EBC" in Archs): + PcdDYEEbc.insert(0, line) + if ("ALL" in Archs): + PcdDYE.insert(0, line) + else: + if ("IA32" in Archs): + PcdDYIa32.insert(0, line) + if ("IPF" in Archs): + PcdDYIpf.insert(0, line) + if ("X64" in Archs): + PcdDYX64.insert(0, line) + if ("EBC" in Archs): + PcdDYEbc.insert(0, line) + if ("ALL" in Archs): + PcdDY.insert(0, line) + + """ User Extensions Section """ + UEList = [] + UESectionList = [] + try: + UESectionList = XmlList(Msa, "/ModuleSurfaceArea/UserExtensions") + except: + pass + + if (len(UESectionList) > 0): + for UE in UESectionList[:]: + UserId = "" + Identifier = "" + Value = "" + + try: + UserId = str(UE.getAttribute("UserID")) + except: + raise SyntaxError, "ERROR: Malformed MSA, No UserID Specified in UserExtensions element" + + try: + Identifier = str(UE.getAttribute("Identifier")) + except: + raise SyntaxError, "ERROR: Malformed MSA, No Identifier Specified in UserExtensions element" + + if (options.debug): + print "FOUND A UE Element", UserId, Identifier + + try: + Value = str(XmlElementData(UE)) + except: + pass + + Entry = [UserId, Identifier, Value] + UEList.insert(0, Entry) + + + + if (len(Externlist) > 0): + AutoGenSource = "" + AutoGenDefinitionSource = "" + AutoGenEntryPointSource = "" + AutoGenUnloadSource = "" + if (len(AutoGenDriverModel) > 0): + AutoGenCode = AddDriverBindingProtocolStatement(AutoGenDriverModel) + AutoGenEntryPointSource += AutoGenCode[0] + AutoGenUnloadSource += AutoGenCode[1] + AutoGenDeclaration += AutoGenCode[3] + + + if (len(AutoGenExitBootServices) > 0): + print "[Warning] Please manually add Create Event statement for Exit Boot Service Event!" + if options.event: + AutoGenCode = AddBootServiceEventStatement(AutoGenExitBootServices) + AutoGenEntryPointSource += AutoGenCode[0] + AutoGenUnloadSource += AutoGenCode[1] + AutoGenDefinitionSource += AutoGenCode[2] + AutoGenDeclaration += AutoGenCode[3] + + if (len(AutoGenVirtualAddressChanged) > 0): + print "[Warning] Please manually add Create Event statement for Virtual Address Change Event!" + if options.event: + AutoGenCode = AddVirtualAddressEventStatement(AutoGenVirtualAddressChanged) + AutoGenEntryPointSource += AutoGenCode[0] + AutoGenUnloadSource += AutoGenCode[1] + AutoGenDefinitionSource += AutoGenCode[2] + AutoGenDeclaration += AutoGenCode[3] + + if AutoGenEntryPointSource != "": + OldEntryPoint = AutoGenEntryPoint + AutoGenCode = AddNewEntryPointContentsStatement(BaseName, AutoGenEntryPoint, AutoGenEntryPointSource) + AutoGenEntryPoint = AutoGenCode[0] + AutoGenEntryPointSource = AutoGenCode[1] + AutoGenDeclaration += AutoGenCode[2] + + + if AutoGenEntryPoint != "": + DriverModules.insert(0, " %-30s = %s\n" % ("ENTRY_POINT" , AutoGenEntryPoint)) + + AutoGenSource = AutoGenDefinitionSource + AutoGenEntryPointSource + AutoGenUnloadSource + + if (lFlag): + DefinesComments.append("#\n") + + if (Flag and len(DefinesComments) > 0): + DefinesComments.insert(0, "\n#\n# The following information is for reference only and not required by the build tools.\n#\n") + + if (options.debug and options.verbose > 2): + if (len(DriverModules) > 0): + print DriverModules + if (len(LibraryModules) > 0): + print LibraryModules + if (len(DefinesComments) > 0): + print DefinesComments + + Depex = [] + DepexIa32 = [] + DepexX64 = [] + DepexIpf = [] + DepexEbc = [] + + for DxsFile, Archs in AutoGenDxsFiles: + fileContents = openSourceFile(AutoGenModuleFolder, DxsFile) + Contents, Unresolved = TranslateDpxSection(fileContents) + if Contents == "": + print "[warning] Cannot read dxs expression" + else: + if (len(Unresolved) > 0): + print "[warning] Guid Macro(s): %s cannot find corresponding cNames. Please resolve it in [depex] section in extened inf" % ",".join(Unresolved) + + if ("IA32" in Archs): + DepexIa32.insert(0, Contents) + if ("IPF" in Archs): + DepexIpf.insert(0, Contents) + if ("X64" in Archs): + DepexX64.insert(0, Contents) + if ("EBC" in Archs): + DepexEbc.insert(0, Contents) + if ("ALL" in Archs): + Depex.insert(0, Contents) + + AutoGenSourceHeaderFormat = "/**@file\n %s\n\n %s\n %s\n %s\n**/\n\n%s" + includeCommonHeaderFileStatement = "#include \"%s\"" % commonHeaderFilename + + AutoGenHeader += AddSystemIncludeStatement(ModType, AutoGenPackage) + AutoGenHeader += AddGuidStatement(AutoGenGuid) + AutoGenHeader += AddLibraryClassStatement(AutoGenLibClass) + + if options.manual: + saveSourceFile(AutoGenModuleFolder, "CommonHeader.txt", AutoGenHeader) + else: + + commonHeaderFilename2 = re.sub("(?=[^a-z])", "_", commonHeaderFilename) + commonHeaderFilename2 = "_" + commonHeaderFilename2.replace(".", "").upper() + "_" + briefDiscription = "Common header file shared by all source files." + detailedDiscription = "This file includes package header files, library classes and protocol, PPI & GUID definitions.\n" + AutoGenHeader += AutoGenDeclaration + AutoGenHeader = "#ifndef %s\n#define %s\n\n\n%s\n#endif\n" % (commonHeaderFilename2, commonHeaderFilename2, AutoGenHeader) + AutoGenHeader = AutoGenSourceHeaderFormat % (briefDiscription, detailedDiscription, CopyRight, License, AutoGenHeader) + saveSourceFile(AutoGenModuleFolder, commonHeaderFilename, AutoGenHeader) + SrcFilenames.append(commonHeaderFilename) + + for source in AutoGenSourceFiles: + extension = os.path.splitext(source)[1] + if extension == ".c": + sourceContents = openSourceFile(AutoGenModuleFolder, source) + sourceContents = AddCommonInclusionStatement(sourceContents, includeCommonHeaderFileStatement) + saveSourceFile(AutoGenModuleFolder, source, sourceContents) + + + if AutoGenSource != "": + briefDiscription = "Entry Point Source file." + detailedDiscription = "This file contains the user entry point \n" + AutoGenSource = AutoGenSourceHeaderFormat % (briefDiscription, detailedDiscription, CopyRight, License, AutoGenSource) + AutoGenSource = AddCommonInclusionStatement(AutoGenSource, includeCommonHeaderFileStatement) + + saveSourceFile(AutoGenModuleFolder, entryPointFilename, AutoGenSource) + SrcFilenames.append(entryPointFilename) + + + + + # DONE Getting data, now output it in INF format. + Msa.unlink() + Fdb.unlink() + Output = [] + + """ Print the converted data format """ + head = "#/** @file\n" + head += "# " + str(Abstract) + "\n#\n" + head += "# " + str(Description).strip().replace("\n", "\n# ") + "\n" + head += "# " + str(CopyRight) + "\n#\n" + head += "# " + str(License).replace("\n", "\n# ").replace(" ", " ").strip() + "\n#\n" + head += "#\n#**/\n" + + Output.append(head) + if (options.debug): + print head + +## Defines = "\n" + "#"*80+ "\n#\n" +## if (BinModule != "false"): +## Defines += "# Defines Section - statements that will be processed to generate a binary image.\n" +## else: +## Defines += "# Defines Section - statements that will be processed to create a Makefile.\n" +## Defines += "#\n" + "#"*80 + "\n" + + Defines = "\n" + Defines += "[Defines]\n" + Defines += " %-30s = %s\n" % ("INF_VERSION", "0x00010005") + Defines += " %-30s = %s\n" % ("BASE_NAME", BaseName) + Defines += " %-30s = %s\n" % ("FILE_GUID", GuidValue) + Defines += " %-30s = %s\n" % ("MODULE_TYPE", ModType) + Defines += " %-30s = %s\n" % ("VERSION_STRING", VerString) + + if (len(PcdIsDriver) > 0): + Defines += " %-30s = %s\n" % ("PCD_DRIVER", PcdIsDriver) + + if (len(IamLibrary) > 0): + lcstr = "" + for lc in IamLibrary[:]: + lcstr += lc + " " + Defines += " %-30s = %s" % ("LIBRARY_CLASS", lcstr) + Defines += "\n" + + if (len(SpecList) > 0): + for spec in SpecList[:]: + (specname, specval) = spec.split() + Defines += " %-30s = %s\n" % (specname, specval) + Defines += "\n" + + if (len(DriverModules) > 0): + for line in DriverModules[:]: + Defines += line + + if (len(LibraryModules) > 0): + for line in LibraryModules[:]: + Defines += line + + if (len(DefinesComments) > 0): + for line in DefinesComments[:]: + Defines += line + + Output.append(Defines) + + if (options.debug): + print Defines + + if (BinModule != "false"): + """ Binary Module, so sources are really binaries. """ +## Sources = "\n" + "#"*80 + "\n#\n" +## Sources += "# Binaries Section - list of binary files that are required for the build\n# to succeed.\n" +## Sources += "#\n" + "#"*80 + "\n\n" + Sources = "\n" + if ModType == "UEFI_APPLICATION": + FileType = "UEFI_APP" + if options.verbose > 0: + print "WARNING: Binary Module: %s is assuming UEFI_APPLICATION file type." % (options.filename) + else: + FileType = "FV" + if options.verbose > 0: + print "WARNING: Binary Module: %s is assuming FV file type." % (options.filename) + + if (len(SrcFilenames) > 0): + Sources += "[Binaries.common]\n" + for file in SrcFilenames[:]: + file = file.replace("\\", "/") + Sources += " " + FileType + "|" + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesIa32) > 0): + Sources += "[Binaries.Ia32]\n" + for file in SrcFilenamesIa32[:]: + file = file.replace("\\", "/") + Sources += " " + FileType + "|" + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesX64) > 0): + Sources += "[Binaries.X64]\n" + for file in SrcFilenamesX64[:]: + file = file.replace("\\", "/") + Sources += " " + FileType + "|" + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesIpf) > 0): + Sources += "[Binaries.IPF]\n" + for file in SrcFilenamesIpf[:]: + file = file.replace("\\", "/") + Sources += " " + FileType + "|" + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesEbc) > 0): + Sources += "[Binaries.EBC]\n" + for file in SrcFilenamesEbc[:]: + file = file.replace("\\", "/") + Sources += " " + FileType + "|" + file + "\n" + Sources += "\n" + + Output.append(Sources) + if (options.debug): + print Sources + else: +## Sources = "\n" + "#"*80 + "\n#\n" +## Sources += "# Sources Section - list of files that are required for the build to succeed.\n" +## Sources += "#\n" + "#"*80 + "\n\n" + Sources = "\n" + if (len(SrcFilenames) > 0): + Sources += "[Sources.common]\n" + for file in SrcFilenames[:]: + Sources += " " + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesIa32) > 0): + Sources += "[Sources.Ia32]\n" + for file in SrcFilenamesIa32[:]: + Sources += " " + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesX64) > 0): + Sources += "[Sources.X64]\n" + for file in SrcFilenamesX64[:]: + Sources += " " + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesIpf) > 0): + Sources += "[Sources.IPF]\n" + for file in SrcFilenamesIpf[:]: + Sources += " " + file + "\n" + Sources += "\n" + + if (len(SrcFilenamesEbc) > 0): + Sources += "[Sources.EBC]\n" + for file in SrcFilenamesEbc[:]: + Sources += " " + file + "\n" + Sources += "\n" + + Output.append(Sources) + if (options.debug): + print Sources + + + includeLine = "" + if ((len(HeaderLocations) > 0) or (len(Dirs) > 0)): + allLcs = set(LibClassList + LibClassListIa32 + LibClassListX64 + LibClassListIpf + LibClassListEbc + Dirs) + Lines = [] + for line in HeaderLocations[:]: + for Lc in allLcs: + (keyword, header) = line.split("|") + if Lc in keyword: + if (options.debug): + print "FOUND", Lc, "in", keyword, "header", header + path = "$(WORKSPACE)/" + os.path.split(header)[0] + Lines.insert(0, path.strip()) + Includes = "" +## Includes = "\n" + "#"*80 + "\n#\n" +## Includes += "# Includes Section - list of Include locations that are required for\n" +## Includes += "# this module.\n" +## Includes += "#\n" + "#"*80 + "\n\n" +## Includes += "[Includes]\n" +## includeLines = [] +## includeLines = set(Lines) +## if (options.debug): +## print "There are", len(includeLines), "entries" +## for Line in includeLines: +## Includes += " " + str(Line).strip().replace("\\", "/") + "\n" + + Output.append(Includes) + if (options.debug): + print Includes + + + + if ((len(PkgList) + len(PkgListIa32) + len(PkgListX64) + len(PkgListIpf) + len(PkgListEbc)) > 0): + """ We do this if and only if we have Package Dependencies """ +## PackageDepends = "\n" + "#"*80 + "\n#\n" +## PackageDepends += "# Package Dependency Section - list of Package files that are required for\n" +## PackageDepends += "# this module.\n" +## PackageDepends += "#\n" + "#"*80 + "\n\n" + PackageDepends = "\n" + if (len(PkgList) > 0): + PackageDepends += "[Packages]\n" + for lc in PkgList[:]: + lc = lc.replace("\\", "/") + PackageDepends += " " + lc + "\n" + PackageDepends += "\n" + + if (len(PkgListIa32) > 0): + PackageDepends += "[Packages.IA32]\n" + for lc in PkgListIa32[:]: + lc = lc.replace("\\", "/") + PackageDepends += " " + lc + "\n" + PackageDepends += "\n" + + if (len(PkgListX64) > 0): + PackageDepends += "[Packages.X64]\n" + for lc in PkgListX64[:]: + lc = lc.replace("\\", "/") + PackageDepends += " " + lc + "\n" + PackageDepends += "\n" + + if (len(PkgListIpf) > 0): + PackageDepends += "[Packages.IPF]\n" + for lc in PkgListIpf[:]: + lc = lc.replace("\\", "/") + PackageDepends += " " + lc + "\n" + PackageDepends += "\n" + + if (len(PkgListEbc) > 0): + PackageDepends += "[Packages.EBC]\n" + for lc in PkgListEbc[:]: + lc = lc.replace("\\", "/") + PackageDepends += " " + lc + "\n" + PackageDepends += "\n" + + Output.append(PackageDepends) + if (options.debug): + print PackageDepends + + if ((len(LibClassList) + len(LibClassListIa32) + len(LibClassListX64) + len(LibClassListIpf) + len(LibClassListEbc)) > 0): +## LibraryClasses = "\n" + "#"*80 + "\n#\n" +## LibraryClasses += "# Library Class Section - list of Library Classes that are required for\n" +## LibraryClasses += "# this module.\n" +## LibraryClasses += "#\n" + "#"*80 + "\n\n" + + LibraryClasses = "\n" + if (len(LibClassList) > 0): + LibraryClasses += "[LibraryClasses]\n" + for lc in LibClassList[:]: + LibraryClasses += " " + lc + "\n" + LibraryClasses += "\n" + + if (len(LibClassListIa32) > 0): + LibraryClasses += "[LibraryClasses.IA32]\n" + for lc in LibClassListIa32[:]: + LibraryClasses += " " + lc + "\n" + LibraryClasses += "\n" + + if (len(LibClassListX64) > 0): + LibraryClasses += "[LibraryClasses.X64]\n" + for lc in LibClassListX64[:]: + LibraryClasses += " " + lc + "\n" + LibraryClasses += "\n" + + if (len(LibClassListIpf) > 0): + LibraryClasses += "[LibraryClasses.IPF]\n" + for lc in LibClassListIpf[:]: + LibraryClasses += " " + lc + "\n" + LibraryClasses += "\n" + + if (len(LibClassListEbc) > 0): + LibraryClasses += "[LibraryClasses.EBC]\n" + for lc in LibClassListEbc[:]: + LibraryClasses += " " + lc + "\n" + LibraryClasses += "\n" + + Output.append(LibraryClasses) + if (options.debug): + print LibraryClasses + + # Print the Guids sections + if (len(GuidCName) + len(GuidCNameIa32) + len(GuidCNameIPF) + len(GuidCNameX64) + len(GuidCNameEBC)) > 0: +## GuidSection = "\n" + "#"*80 + "\n#\n" +## GuidSection += "# Guid C Name Section - list of Guids that this module uses or produces.\n" +## GuidSection += "#\n" + "#"*80 + "\n\n" + GuidSection = "\n" + if (len(GuidCName) > 0): + GuidSection += "[Guids]\n" + for Guid in GuidCName[:]: + GuidSection += Guid + "\n" + GuidSection += "\n" + + if (len(GuidCNameIa32) > 0): + GuidSection += "[Guids.IA32]\n" + for Guid in GuidCNameIa32[:]: + GuidSection += Guid + "\n" + GuidSection += "\n" + + if (len(GuidCNameX64) > 0): + GuidSection += "[Guids.X64]\n" + for Guid in GuidCNameX64[:]: + GuidSection += Guid + "\n" + GuidSection += "\n" + + if (len(GuidCNameIPF) > 0): + GuidSection += "[Guids.IPF]\n" + for Guid in GuidCNameIPF[:]: + GuidSection += Guid + "\n" + GuidSection += "\n" + + if (len(GuidCNameEBC) > 0): + GuidSection += "[Guids.EBC]\n" + for Guid in GuidCNameEBC[:]: + GuidSection += Guid + "\n" + GuidSection += "\n" + + Output.append(GuidSection) + if (options.debug and options.verbose > 1): + print GuidSection + + # Print the Protocol sections + if (len(ProtocolCName) + len(ProtocolCNameIa32) + len(ProtocolCNameIPF) + len(ProtocolCNameX64) + len(ProtocolCNameEBC)) > 0: +## ProtocolsSection = "\n" + "#"*80 + "\n#\n" +## ProtocolsSection += "# Protocol C Name Section - list of Protocol and Protocol Notify C Names\n" +## ProtocolsSection += "# that this module uses or produces.\n" +## ProtocolsSection += "#\n" + "#"*80 + "\n\n" + + ProtocolsSection = "\n" + if (len(ProtocolCName) > 0): + ProtocolsSection += "[Protocols]\n" + for Guid in ProtocolCName[:]: + ProtocolsSection += Guid + "\n" + ProtocolsSection += "\n" + + if (len(ProtocolCNameIa32) > 0): + ProtocolsSection += "[Protocols.IA32]\n" + for Guid in ProtocolCNameIa32[:]: + ProtocolsSection += Guid + "\n" + ProtocolsSection += "\n" + + if (len(ProtocolCNameX64) > 0): + ProtocolsSection += "[Protocols.X64]\n" + for Guid in ProtocolCNameX64[:]: + ProtocolsSection += Guid + "\n" + ProtocolsSection += "\n" + + if (len(ProtocolCNameIPF) > 0): + ProtocolsSection += "[Protocols.IPF]\n" + for Guid in ProtocolCNameIPF[:]: + ProtocolsSection += Guid + "\n" + ProtocolsSection += "\n" + + if (len(ProtocolCNameEBC) > 0): + ProtocolsSection += "[Protocols.EBC]\n" + for Guid in ProtocolCNameEBC[:]: + ProtocolsSection += Guid + "\n" + ProtocolsSection += "\n" + + Output.append(ProtocolsSection) + if (options.debug): + print ProtocolsSection + + # Print the PPI sections + if (len(PpiCName) + len(PpiCNameIa32) + len(PpiCNameIPF) + len(PpiCNameX64) + len(PpiCNameEBC)) > 0: +## PpiSection = "\n" + "#"*80 + "\n#\n" +## PpiSection += "# PPI C Name Section - list of PPI and PPI Notify C Names that this module\n" +## PpiSection += "# uses or produces.\n" +## PpiSection += "#\n" + "#"*80 + "\n\n" + + PpiSection = "\n" + if (len(PpiCName) > 0): + PpiSection += "[Ppis]\n" + for Guid in PpiCName[:]: + PpiSection += Guid + "\n" + PpiSection += "\n" + + if (len(PpiCNameIa32) > 0): + PpiSection += "[Ppis.IA32]\n" + for Guid in PpiCNameIa32[:]: + PpiSection += Guid + "\n" + PpiSection += "\n" + + if (len(PpiCNameX64) > 0): + PpiSection += "[Ppis.X64]\n" + for Guid in PpiCNameX64[:]: + PpiSection += Guid + "\n" + PpiSection += "\n" + + if (len(PpiCNameIPF) > 0): + PpiSection += "[Ppis.IPF]\n" + for Guid in PpiCNameIPF[:]: + PpiSection += Guid + "\n" + PpiSection += "\n" + + if (len(PpiCNameEBC) > 0): + PpiSection += "[Ppis.EBC]\n" + for Guid in PpiCNameEBC[:]: + PpiSection += Guid + "\n" + PpiSection += "\n" + + Output.append(PpiSection) + if (options.debug): + print PpiSection + + # Print the PCD sections + if ((len(PcdFF)+len(PcdFFIa32)+len(PcdFFX64)+len(PcdFFIpf)+len(PcdFFEbc)) > 0): +## FeatureFlagSection = "\n" + "#"*80 + "\n#\n" +## FeatureFlagSection += "# Pcd FEATURE_FLAG - list of PCDs that this module is coded for.\n" +## FeatureFlagSection += "#\n" + "#"*80 + "\n\n" + + FeatureFlagSection = "\n" + if (len(PcdFF) > 0): + FeatureFlagSection += "[FeaturePcd.common]\n" + for Entry in PcdFF[:]: + FeatureFlagSection += " " + Entry + "\n" + FeatureFlagSection += "\n" + if (len(PcdFFIa32) > 0): + FeatureFlagSection += "[FeaturePcd.IA32]\n" + for Entry in PcdFFIa32[:]: + FeatureFlagSection += " " + Entry + "\n" + FeatureFlagSection += "\n" + if (len(PcdFFX64) > 0): + FeatureFlagSection += "[FeaturePcd.X64]\n" + for Entry in PcdFFX64[:]: + FeatureFlagSection += " " + Entry + "\n" + FeatureFlagSection += "\n" + if (len(PcdFFIpf) > 0): + FeatureFlagSection += "[PcdsFeatureFlag.IPF]\n" + for Entry in PcdFFIpf[:]: + FeatureFlagSection += " " + Entry + "\n" + FeatureFlagSection += "\n" + if (len(PcdFFEbc) > 0): + FeatureFlagSection += "[FeaturePcd.EBC]\n" + for Entry in PcdFFEbc[:]: + FeatureFlagSection += " " + Entry + "\n" + FeatureFlagSection += "\n" + + Output.append(FeatureFlagSection) + if (options.debug): + print FeatureFlagSection + + if ((len(PcdFAB)+len(PcdFABIa32)+len(PcdFABX64)+len(PcdFABIpf)+len(PcdFABEbc)) > 0): +## FixedAtBuildSection = "\n" + "#"*80 + "\n#\n" +## FixedAtBuildSection += "# Pcd FIXED_AT_BUILD - list of PCDs that this module is coded for.\n" +## FixedAtBuildSection += "#\n" + "#"*80 + "\n\n" + + FixedAtBuildSection = "\n" + if (len(PcdFAB) > 0): + FixedAtBuildSection += "[FixedPcd.common]\n" + for Entry in PcdFAB[:]: + FixedAtBuildSection += " " + Entry + "\n" + FixedAtBuildSection += "\n" + if (len(PcdFABIa32) > 0): + FixedAtBuildSection += "[FixedPcd.IA32]\n" + for Entry in PcdFABIa32[:]: + FixedAtBuildSection += " " + Entry + "\n" + FixedAtBuildSection += "\n" + if (len(PcdFABX64) > 0): + FixedAtBuildSection += "[FixedPcd.X64]\n" + for Entry in PcdFABX64[:]: + FixedAtBuildSection += " " + Entry + "\n" + FixedAtBuildSection += "\n" + if (len(PcdFABIpf) > 0): + FixedAtBuildSection += "[FixedPcd.IPF]\n" + for Entry in PcdFABIpf[:]: + FixedAtBuildSection += " " + Entry + "\n" + FixedAtBuildSection += "\n" + if (len(PcdFABEbc) > 0): + FixedAtBuildSection += "[FixedPcd.EBC]\n" + for Entry in PcdFABEbc[:]: + FixedAtBuildSection += " " + Entry + "\n" + FixedAtBuildSection += "\n" + + Output.append(FixedAtBuildSection) + if (options.debug): + print FixedAtBuildSection + + if ((len(PcdPIM)+len(PcdPIMIa32)+len(PcdPIMX64)+len(PcdPIMIpf)+len(PcdPIMEbc)) > 0): +## PatchableInModuleSection = "\n" + "#"*80 + "\n#\n" +## PatchableInModuleSection += "# Pcd PATCHABLE_IN_MODULE - list of PCDs that this module is coded for.\n" +## PatchableInModuleSection += "#\n" + "#"*80 + "\n\n" + + PatchableInModuleSection = "\n" + if (len(PcdPIM) > 0): + PatchableInModuleSection += "[PatchPcd.common]\n" + for Entry in PcdPIM[:]: + PatchableInModuleSection += " " + Entry + "\n" + PatchableInModuleSection += "\n" + if (len(PcdPIMIa32) > 0): + PatchableInModuleSection += "[PatchPcd.IA32]\n" + for Entry in PcdPIMIa32[:]: + PatchableInModuleSection += " " + Entry + "\n" + PatchableInModuleSection += "\n" + if (len(PcdPIMX64) > 0): + PatchableInModuleSection += "[PatchPcd.X64]\n" + for Entry in PcdPIMX64[:]: + PatchableInModuleSection += " " + Entry + "\n" + PatchableInModuleSection += "\n" + if (len(PcdPIMIpf) > 0): + PatchableInModuleSection += "[PatchPcd.IPF]\n" + for Entry in PcdPIMIpf[:]: + PatchableInModuleSection += " " + Entry + "\n" + PatchableInModuleSection += "\n" + if (len(PcdPIMEbc) > 0): + PatchableInModuleSection += "[PatchPcd.EBC]\n" + for Entry in PcdPIMEbc[:]: + PatchableInModuleSection += " " + Entry + "\n" + PatchableInModuleSection += "\n" + + Output.append(PatchableInModuleSection) + if (options.debug): + print PatchableInModuleSection + + if ((len(PcdDYE)+len(PcdDYEIa32)+len(PcdDYEX64)+len(PcdDYEIpf)+len(PcdDYEEbc)) > 0): +## DynamicExSection = "\n" + "#"*80 + "\n#\n" +## DynamicExSection += "# Pcd DYNAMIC_EX - list of PCDs that this module is coded for.\n" +## DynamicExSection += "#\n" + "#"*80 + "\n\n" + + DynamicExSection = "\n" + if (len(PcdDYE) > 0): + DynamicExSection += "[PcdEx.common]\n" + for Entry in PcdDYE[:]: + DynamicExSection += " " + Entry + "\n" + DynamicExSection += "\n" + if (len(PcdDYEIa32) > 0): + DynamicExSection += "[PcdEx.IA32]\n" + for Entry in PcdDYEIa32[:]: + DynamicExSection += " " + Entry + "\n" + DynamicExSection += "\n" + if (len(PcdDYEX64) > 0): + DynamicExSection += "[PcdEx.X64]\n" + for Entry in PcdDYEX64[:]: + DynamicExSection += " " + Entry + "\n" + DynamicExSection += "\n" + if (len(PcdDYEIpf) > 0): + DynamicExSection += "[PcdEx.IPF]\n" + for Entry in PcdDYEIpf[:]: + DynamicExSection += " " + Entry + "\n" + DynamicExSection += "\n" + if (len(PcdDYEEbc) > 0): + DynamicExSection += "[PcdEx.EBC]\n" + for Entry in PcdDYEEbc[:]: + DynamicExSection += " " + Entry + "\n" + DynamicExSection += "\n" + + Output.append(DynamicExSection) + if (options.debug): + print DynamicExSection + + if ((len(PcdDY)+len(PcdDYIa32)+len(PcdDYX64)+len(PcdDYIpf)+len(PcdDYEbc)) > 0): +## DynamicSection = "\n" + "#"*80 + "\n#\n" +## DynamicSection += "# Pcd DYNAMIC - list of PCDs that this module is coded for.\n" +## DynamicSection += "#\n" + "#"*80 + "\n\n" + + DynamicSection = "\n" + if (len(PcdDY) > 0): + DynamicSection += "[Pcd.common]\n" + for Entry in PcdDY[:]: + DynamicSection += " " + Entry + "\n" + DynamicSection += "\n" + if (len(PcdDYIa32) > 0): + DynamicSection += "[Pcd.IA32]\n" + for Entry in PcdDYIa32[:]: + DynamicSection += " " + Entry + "\n" + DynamicSection += "\n" + if (len(PcdDYX64) > 0): + DynamicSection += "[Pcd.X64]\n" + for Entry in PcdDYX64[:]: + DynamicSection += " " + Entry + "\n" + DynamicSection += "\n" + if (len(PcdDYIpf) > 0): + DynamicSection += "[Pcd.IPF]\n" + for Entry in PcdDYIpf[:]: + DynamicSection += " " + Entry + "\n" + DynamicSection += "\n" + if (len(PcdDYEbc) > 0): + DynamicSection += "[Pcd.EBC]\n" + for Entry in PcdDYEbc[:]: + DynamicSection += " " + Entry + "\n" + DynamicSection += "\n" + + Output.append(DynamicSection) + if (options.debug): + print DynamicSection + + if ((len(Depex) + len(DepexIa32) + len(DepexX64) + len(DepexIpf) + len(DepexEbc)) > 0): + """ We do this if and only if we have Package Dependencies """ +## Dpx = "\n" + "#"*80 + "\n#\n" +## Dpx += "# Dependency Expression Section - list of Dependency expressions that are required for\n" +## Dpx += "# this module.\n" +## Dpx += "#\n" + "#"*80 + "\n\n" + Dpx = "\n" + if (len(Depex) > 0): + Dpx += "[Depex]\n" + for lc in Depex[:]: + Dpx += " " + lc + "\n" + Dpx += "\n" + + if (len(DepexIa32) > 0): + Dpx += "[Depex.IA32]\n" + for lc in DepexIa32[:]: + Dpx += " " + lc + "\n" + Dpx += "\n" + + if (len(DepexX64) > 0): + Dpx += "[Depex.X64]\n" + for lc in DepexX64[:]: + Dpx += " " + lc + "\n" + Dpx += "\n" + + if (len(DepexIpf) > 0): + Dpx += "[Depex.IPF]\n" + for lc in DepexIpf[:]: + Dpx += " " + lc + "\n" + Dpx += "\n" + + if (len(DepexEbc) > 0): + Dpx += "[Depex.EBC]\n" + for lc in DepexEbc[:]: + Dpx += " " + lc + "\n" + Dpx += "\n" + + Output.append(Dpx) + if (options.debug): + print Dpx + + if (len(MBOlines) > 0): + BuildSection = "" +## BuildSection = "\n" + "#"*80 + "\n#\n" +## BuildSection += "# Build Options - list of custom build options for this module.\n" +## BuildSection += "#\n" + "#"*80 + "\n\n" + BuildSection += "\n[BuildOptions]\n" + for mbo in MBOlines: + tool, targs = mbo.split("=",2) + BuildSection += " %-40s = %s\n" % (tool.strip(), targs.strip()) + + Output.append(BuildSection) + if (options.debug): + print BuildSection + + + if (len(UEList) > 0): + UserExtensionSection = "" + for UE in UEList[:]: + UserExtensionSection += "[UserExtensions." + UE[0] + '."' + UE[1] + '"]\n' + if (len(UE[2]) > 0): + UserExtensionSection += '"' + UE[2] + '"\n' + else: + UserExtensionSection += "\n" + + Output.append(UserExtensionSection) + if (options.debug): + print UserExtensionSection + + print "write file", outputFile + if (options.autowrite): + fo = open(outputFile, "w") + for Section in Output[:]: + fo.writelines(Section) + if (options.verbose > 1): + print Section + fo.close() + elif (options.outfile): + fo = open(outputFile, "w") + for Section in Output[:]: + fo.writelines(Section) + fo.close() + else: + for Section in Output[:]: + print Section + + +if __name__ == '__main__': + + global options + global args + options,args = myOptionParser() + + main() + sys.exit(0) + diff --git a/BaseTools/Source/Python/MigrationMsa2Inf/__init__.py b/BaseTools/Source/Python/MigrationMsa2Inf/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/MigrationMsa2Inf/__init__.py diff --git a/BaseTools/Source/Python/MkBOM/__init__.py b/BaseTools/Source/Python/MkBOM/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/MkBOM/__init__.py diff --git a/BaseTools/Source/Python/PackagingTool/DependencyRules.py b/BaseTools/Source/Python/PackagingTool/DependencyRules.py new file mode 100644 index 0000000000..402694054e --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/DependencyRules.py @@ -0,0 +1,185 @@ +## @file
+# This file is for installed package information database operations
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os
+
+import Common.EdkLogger as EdkLogger
+import IpiDb
+
+(DEPEX_CHECK_SUCCESS, DEPEX_CHECK_MODULE_NOT_FOUND, \
+DEPEX_CHECK_PACKAGE_NOT_FOUND, DEPEX_CHECK_DP_NOT_FOUND) = (0, 1, 2, 3)
+
+## IpiDb
+#
+# This class represents the installed package information databse
+# Add/Remove/Get installed distribution package information here.
+#
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the database
+#
+# @var Conn: Connection of the database
+# @var Cur: Cursor of the connection
+#
+class DependencyRules(object):
+ def __init__(self, Db):
+ self.IpiDb = Db
+
+ ## Check whether a module exists in current workspace.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def CheckModuleExists(self, Guid, Version, ReturnCode = DEPEX_CHECK_SUCCESS):
+ EdkLogger.verbose("\nCheck module exists in workspace started ...")
+ ModuleList = []
+ ModuleList = self.IpiDb.GetModInPackage(Guid, Version)
+ ModuleList.extend(self.IpiDb.GetStandaloneModule(Guid, Version))
+ EdkLogger.verbose("Check module exists in workspace ... DONE!")
+ if len(ModuleList) > 0:
+ return True
+ else:
+ ReturnCode = DEPEX_CHECK_MODULE_NOT_FOUND
+ return False
+
+
+ ## Check whether a module depex satified by current workspace.
+ #
+ # @param ModuleObj:
+ # @param DpObj:
+ #
+ def CheckModuleDepexSatisfied(self, ModuleObj, DpObj = None, ReturnCode = DEPEX_CHECK_SUCCESS):
+ EdkLogger.verbose("\nCheck module depex met by workspace started ...")
+ for Dep in ModuleObj.PackageDependencies:
+ Exist = self.CheckPackageExists(Dep.PackageGuid, Dep.PackageVersion, ReturnCode)
+ if not Exist:
+ if DpObj == None:
+ ReturnCode = DEPEX_CHECK_PACKAGE_NOT_FOUND
+ return False
+ for GuidVerPair in DpObj.PackageSurfaceArea.keys():
+ if Dep.PackageGuid == GuidVerPair[0]:
+ if Dep.PackageVersion == None or len(Dep.PackageVersion) == 0:
+ break
+ if Dep.PackageVersion == GuidVerPair[1]:
+ break
+ else:
+ ReturnCode = DEPEX_CHECK_PACKAGE_NOT_FOUND
+ return False
+ else:
+ ReturnCode = DEPEX_CHECK_PACKAGE_NOT_FOUND
+ return False
+ return True
+
+ EdkLogger.verbose("Check module depex met by workspace ... DONE!")
+
+ ## Check whether a package exists in current workspace.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def CheckPackageExists(self, Guid, Version, ReturnCode = DEPEX_CHECK_SUCCESS):
+ EdkLogger.verbose("\nCheck package exists in workspace started ...")
+ PkgList = []
+ PkgList = self.IpiDb.GetPackage(Guid, Version)
+ if len(PkgList) > 0:
+ return True
+ else:
+ ReturnCode = DEPEX_CHECK_PACKAGE_NOT_FOUND
+ return False
+
+ EdkLogger.verbose("Check package exists in workspace ... DONE!")
+
+ ## Check whether a package depex satified by current workspace.
+ #
+ # @param ModuleObj:
+ # @param DpObj:
+ #
+ def CheckPackageDepexSatisfied(self, PkgObj, DpObj = None, ReturnCode = DEPEX_CHECK_SUCCESS):
+
+ for ModKey in PkgObj.Modules.keys():
+ ModObj = PkgObj.Modules[ModKey]
+ if self.CheckModuleDepexSatisfied(ModObj, DpObj, ReturnCode):
+ continue
+ else:
+ return False
+ return True
+
+ ## Check whether a DP exists in current workspace.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def CheckDpExists(self, Guid, Version, ReturnCode = DEPEX_CHECK_SUCCESS):
+ EdkLogger.verbose("\nCheck DP exists in workspace started ...")
+ DpList = []
+ DpList = self.IpiDb.GetDp(Guid, Version)
+ if len(DpList) > 0:
+ return True
+ else:
+ ReturnCode = DEPEX_CHECK_DP_NOT_FOUND
+ return False
+
+ EdkLogger.verbose("Check DP exists in workspace ... DONE!")
+
+ ## Check whether a DP depex satified by current workspace.
+ #
+ # @param ModuleObj:
+ # @param DpObj:
+ #
+ def CheckDpDepexSatisfied(self, DpObj, ReturnCode = DEPEX_CHECK_SUCCESS):
+
+ for PkgKey in DpObj.PackageSurfaceArea.keys():
+ PkgObj = DpObj.PackageSurfaceArea[PkgKey]
+ if self.CheckPackageDepexSatisfied(PkgObj, DpObj, ReturnCode):
+ continue
+ else:
+ return False
+
+ for ModKey in DpObj.ModuleSurfaceArea.keys():
+ ModObj = PkgObj.ModuleSurfaceArea[ModKey]
+ if self.CheckModuleDepexSatisfied(ModObj, DpObj, ReturnCode):
+ continue
+ else:
+ return False
+
+ return True
+
+ ## Check whether a DP depex satified by current workspace.
+ #
+ # @param ModuleObj:
+ # @param DpObj:
+ #
+ def CheckDpDepexForRemove(self, DpGuid, DpVersion, ReturnCode = DEPEX_CHECK_SUCCESS):
+
+ # Get mod list that is dependent on pkg installed from this DP.
+ ModList = self.IpiDb.GetDpDependentModuleList(DpGuid, DpVersion)
+
+ if len(ModList) > 0:
+ return False
+
+ return True
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/PackagingTool/InstallPkg.py b/BaseTools/Source/Python/PackagingTool/InstallPkg.py new file mode 100644 index 0000000000..963a654ea1 --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/InstallPkg.py @@ -0,0 +1,309 @@ +## @file
+# Install distribution package.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import sys
+import glob
+import shutil
+import traceback
+import platform
+from optparse import OptionParser
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.XmlParser import *
+from Common.InfClassObjectLight import Inf
+from Common.DecClassObjectLight import Dec
+
+from PackageFile import *
+from IpiDb import *
+from DependencyRules import *
+import md5
+
+# Version and Copyright
+VersionNumber = "0.1"
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2008, Intel Corporation All rights reserved."
+
+## Check environment variables
+#
+# Check environment variables that must be set for build. Currently they are
+#
+# WORKSPACE The directory all packages/platforms start from
+# EDK_TOOLS_PATH The directory contains all tools needed by the build
+# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH
+#
+# If any of above environment variable is not set or has error, the build
+# will be broken.
+#
+def CheckEnvVariable():
+ # check WORKSPACE
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("InstallPkg", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+
+ WorkspaceDir = os.path.normpath(os.environ["WORKSPACE"])
+ if not os.path.exists(WorkspaceDir):
+ EdkLogger.error("InstallPkg", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData="%s" % WorkspaceDir)
+ elif ' ' in WorkspaceDir:
+ EdkLogger.error("InstallPkg", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
+ ExtraData=WorkspaceDir)
+ os.environ["WORKSPACE"] = WorkspaceDir
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Opt A optparse.Values object containing the parsed options
+# @retval Args Target of build command
+#
+def MyOptionParser():
+ UsageString = "%prog -i <distribution_package> [-t] [-f] [-q | -v] [-h]"
+
+ Parser = OptionParser(description=__copyright__,version=__version__,prog="InstallPkg",usage=UsageString)
+
+ Parser.add_option("-?", action="help", help="show this help message and exit")
+
+ Parser.add_option("-i", "--distribution-package", action="store", type="string", dest="PackageFile",
+ help="The distribution package to be installed")
+
+ Parser.add_option("-t", "--install-tools", action="store_true", type=None, dest="Tools",
+ help="Specify it to install tools or ignore the tools of the distribution package.")
+
+ Parser.add_option("-f", "--misc-files", action="store_true", type=None, dest="MiscFiles",
+ help="Specify it to install misc file or ignore the misc files of the distribution package.")
+
+ Parser.add_option("-q", "--quiet", action="store_const", dest="LogLevel", const=EdkLogger.QUIET,
+ help="Disable all messages except FATAL ERRORS.")
+
+ Parser.add_option("-v", "--verbose", action="store_const", dest="LogLevel", const=EdkLogger.VERBOSE,
+ help="Turn on verbose output")
+
+ Parser.add_option("-d", "--debug", action="store", type="int", dest="LogLevel",
+ help="Enable debug messages at specified level.")
+
+ Parser.set_defaults(LogLevel=EdkLogger.INFO)
+
+ (Opt, Args)=Parser.parse_args()
+
+ return Opt
+
+def InstallNewPackage(WorkspaceDir, Path):
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, Path))
+ if os.path.exists(FullPath):
+ print "Directory [%s] already exists, please select another location, press [Enter] with no input to quit:" %Path
+ Input = sys.stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input == '':
+ EdkLogger.error("InstallPkg", UNKNOWN_ERROR, "User interrupt")
+ Input = Input.replace('\r', '').replace('\n', '')
+ return InstallNewPackage(WorkspaceDir, Input)
+ else:
+ return Path
+
+def InstallNewFile(WorkspaceDir, File):
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, File))
+ if os.path.exists(FullPath):
+ print "File [%s] already exists, please select another path, press [Enter] with no input to quit:" %File
+ Input = sys.stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input == '':
+ EdkLogger.error("InstallPkg", UNKNOWN_ERROR, "User interrupt")
+ Input = Input.replace('\r', '').replace('\n', '')
+ return InstallNewFile(WorkspaceDir, Input)
+ else:
+ return File
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Options = None
+ DistFileName = 'dist.pkg'
+ ContentFileName = 'content.zip'
+ DistFile, ContentZipFile, UnpackDir = None, None, None
+
+ Options = MyOptionParser()
+ try:
+ if Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.SetLevel(Options.LogLevel + 1)
+ else:
+ EdkLogger.SetLevel(Options.LogLevel)
+
+ CheckEnvVariable()
+ WorkspaceDir = os.environ["WORKSPACE"]
+ if not Options.PackageFile:
+ EdkLogger.error("InstallPkg", OPTION_NOT_SUPPORTED, ExtraData="Must specify one distribution package")
+
+ # unzip dist.pkg file
+ EdkLogger.quiet("Unzipping and parsing distribution package XML file ... ")
+ DistFile = PackageFile(Options.PackageFile)
+ UnpackDir = os.path.normpath(os.path.join(WorkspaceDir, ".tmp"))
+ DistPkgFile = DistFile.UnpackFile(DistFileName, os.path.normpath(os.path.join(UnpackDir, DistFileName)))
+ if not DistPkgFile:
+ EdkLogger.error("InstallPkg", FILE_NOT_FOUND, "File [%s] is broken in distribution package" %DistFileName)
+
+ # Generate distpkg
+ DistPkgObj = DistributionPackageXml()
+ DistPkg = DistPkgObj.FromXml(DistPkgFile)
+
+ # prepare check dependency
+ Db = IpiDatabase(os.path.normpath(os.path.join(WorkspaceDir, "Conf/DistributionPackageDatabase.db")))
+ Db.InitDatabase()
+ Dep = DependencyRules(Db)
+
+ # Check distribution package exist
+ if Dep.CheckDpExists(DistPkg.Header.Guid, DistPkg.Header.Version):
+ EdkLogger.error("InstallPkg", UNKNOWN_ERROR, "This distribution package has been installed", ExtraData=DistPkg.Header.Name)
+
+ # unzip contents.zip file
+ ContentFile = DistFile.UnpackFile(ContentFileName, os.path.normpath(os.path.join(UnpackDir, ContentFileName)))
+ ContentZipFile = PackageFile(ContentFile)
+ if not ContentFile:
+ EdkLogger.error("InstallPkg", FILE_NOT_FOUND, "File [%s] is broken in distribution package" %ContentFileName)
+
+ # verify MD5 signature
+ Md5Sigature = md5.new(open(ContentFile).read())
+ if DistPkg.Header.Signature != Md5Sigature.hexdigest():
+ EdkLogger.error("InstallPkg", FILE_CHECKSUM_FAILURE, ExtraData=ContentFile)
+
+ # Check package exist and install
+ for Guid,Version,Path in DistPkg.PackageSurfaceArea:
+ PackagePath = os.path.dirname(Path)
+ NewPackagePath = PackagePath
+ Package = DistPkg.PackageSurfaceArea[Guid,Version,Path]
+ EdkLogger.info("Installing package ... %s" % Package.PackageHeader.Name)
+ if Dep.CheckPackageExists(Guid, Version):
+ EdkLogger.quiet("Package [%s] has been installed" %Path)
+ NewPackagePath = InstallNewPackage(WorkspaceDir, PackagePath)
+ Package.FileList = []
+ for Item in Package.MiscFiles.Files:
+ FromFile = os.path.join(PackagePath, Item.Filename)
+ ToFile = os.path.normpath(os.path.join(WorkspaceDir, NewPackagePath, Item.Filename))
+ ContentZipFile.UnpackFile(FromFile, ToFile)
+ Package.FileList.append(ToFile)
+
+ # Update package
+ Package.PackageHeader.CombinePath = Package.PackageHeader.CombinePath.replace(PackagePath, NewPackagePath, 1)
+ # Update modules of package
+ Module = None
+ for ModuleGuid, ModuleVersion, ModulePath in Package.Modules:
+ Module = Package.Modules[ModuleGuid, ModuleVersion, ModulePath]
+ NewModulePath = ModulePath.replace(PackagePath, NewPackagePath, 1)
+ del Package.Modules[ModuleGuid, ModuleVersion, ModulePath]
+ Package.Modules[ModuleGuid, ModuleVersion, NewModulePath] = Module
+ del DistPkg.PackageSurfaceArea[Guid,Version,Path]
+ DistPkg.PackageSurfaceArea[Guid,Version,Package.PackageHeader.CombinePath] = Package
+
+# SaveFileOnChange(os.path.join(Options.InstallDir, ModulePath, Module.Header.Name, ".inf"), Inf.ModuleToInf(Module), False)
+# EdkLogger.info("Installing package ... %s" % Package.Header.Name)
+# shutil.copytree(os.path.join(ContentFileDir, Path), Options.InstallDir)
+# SaveFileOnChange(os.path.join(Options.InstallDir, Path, Package.Header.Name, ".dec"), Dec.PackageToDec(Package), False)
+
+ # Check module exist and install
+ Module = None
+ for Guid,Version,Path in DistPkg.ModuleSurfaceArea:
+ ModulePath = os.path.dirname(Path)
+ NewModulePath = ModulePath
+ Module = DistPkg.ModuleSurfaceArea[Guid,Version,Path]
+ EdkLogger.info("Installing module ... %s" % Module.ModuleHeader.Name)
+ if Dep.CheckModuleExists(Guid, Version):
+ EdkLogger.quiet("Module [%s] has been installed" %Path)
+ NewModulePath = InstallNewPackage(WorkspaceDir, ModulePath)
+ Module.FileList = []
+ for Item in Module.MiscFiles.Files:
+ ModulePath = ModulePath[os.path.normpath(ModulePath).rfind(os.path.normpath('/'))+1:]
+ FromFile = os.path.join(ModulePath, Item.Filename)
+ ToFile = os.path.normpath(os.path.join(WorkspaceDir, NewModulePath, Item.Filename))
+ ContentZipFile.UnpackFile(FromFile, ToFile)
+ Module.FileList.append(ToFile)
+
+# EdkLogger.info("Installing module ... %s" % Module.Header.Name)
+# shutil.copytree(os.path.join(ContentFileDir, Path), Options.InstallDir)
+# SaveFileOnChange(os.path.join(Options.InstallDir, Path, Module.Header.Name, ".inf"), Inf.ModuleToInf(Module), False)
+
+ # Update module
+ Module.ModuleHeader.CombinePath = Module.ModuleHeader.CombinePath.replace(os.path.dirname(Path), NewModulePath, 1)
+ del DistPkg.ModuleSurfaceArea[Guid,Version,Path]
+ DistPkg.ModuleSurfaceArea[Guid,Version,Module.ModuleHeader.CombinePath] = Module
+#
+#
+# for Guid,Version,Path in DistPkg.PackageSurfaceArea:
+# print Guid,Version,Path
+# for item in DistPkg.PackageSurfaceArea[Guid,Version,Path].FileList:
+# print item
+# for Guid,Version,Path in DistPkg.ModuleSurfaceArea:
+# print Guid,Version,Path
+# for item in DistPkg.ModuleSurfaceArea[Guid,Version,Path].FileList:
+# print item
+
+ if Options.Tools:
+ EdkLogger.info("Installing tools ... ")
+ for File in DistPkg.Tools.Files:
+ FromFile = File.Filename
+ ToFile = InstallNewFile(WorkspaceDir, FromFile)
+ ContentZipFile.UnpackFile(FromFile, ToFile)
+ if Options.MiscFiles:
+ EdkLogger.info("Installing misc files ... ")
+ for File in DistPkg.MiscellaneousFiles.Files:
+ FromFile = File.Filename
+ ToFile = InstallNewFile(WorkspaceDir, FromFile)
+ ContentZipFile.UnpackFile(FromFile, ToFile)
+
+ # update database
+ EdkLogger.quiet("Update Distribution Package Database ...")
+ Db.AddDPObject(DistPkg)
+
+ except FatalError, X:
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = X.args[0]
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ except:
+ EdkLogger.error(
+ "\nInstallPkg",
+ CODE_ERROR,
+ "Unknown fatal error when installing [%s]" % Options.PackageFile,
+ ExtraData="\n(Please send email to dev@buildtools.tianocore.org for help, attaching following call stack trace!)\n",
+ RaiseError=False
+ )
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ EdkLogger.quiet("Removing temp files ... ")
+ if DistFile:
+ DistFile.Close()
+ if ContentZipFile:
+ ContentZipFile.Close()
+ if UnpackDir:
+ shutil.rmtree(UnpackDir)
+
+ EdkLogger.quiet("DONE")
+ Progressor.Abort()
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/BaseTools/Source/Python/PackagingTool/IpiDb.py b/BaseTools/Source/Python/PackagingTool/IpiDb.py new file mode 100644 index 0000000000..8661edbca4 --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/IpiDb.py @@ -0,0 +1,629 @@ +## @file
+# This file is for installed package information database operations
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os
+import time
+import Common.EdkLogger as EdkLogger
+
+from CommonDataClass import DistributionPackageClass
+
+## IpiDb
+#
+# This class represents the installed package information databse
+# Add/Remove/Get installed distribution package information here.
+#
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the database
+#
+# @var Conn: Connection of the database
+# @var Cur: Cursor of the connection
+#
+class IpiDatabase(object):
+ def __init__(self, DbPath):
+ Dir = os.path.dirname(DbPath)
+ if not os.path.isdir(Dir):
+ os.mkdir(Dir)
+ self.Conn = sqlite3.connect(DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=4096")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ self.Cur = self.Conn.cursor()
+ self.DpTable = 'DpInfo'
+ self.PkgTable = 'PkgInfo'
+ self.ModInPkgTable = 'ModInPkgInfo'
+ self.StandaloneModTable = 'StandaloneModInfo'
+ self.ModDepexTable = 'ModDepexInfo'
+ self.DpFileListTable = 'DpFileListInfo'
+
+ ## Initialize build database
+ #
+ #
+ def InitDatabase(self):
+ EdkLogger.verbose("\nInitialize IPI database started ...")
+
+ #
+ # Create new table
+ #
+ SqlCommand = """create table IF NOT EXISTS %s (DpGuid TEXT NOT NULL,
+ DpVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ PkgFileName TEXT,
+ PRIMARY KEY (DpGuid, DpVersion)
+ )""" % self.DpTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """create table IF NOT EXISTS %s (FilePath TEXT NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ PRIMARY KEY (FilePath)
+ )""" % self.DpFileListTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """create table IF NOT EXISTS %s (PackageGuid TEXT NOT NULL,
+ PackageVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (PackageGuid, PackageVersion, InstallPath)
+ )""" % self.PkgTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """create table IF NOT EXISTS %s (ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ PackageGuid TEXT,
+ PackageVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (ModuleGuid, ModuleVersion, InstallPath)
+ )""" % self.ModInPkgTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """create table IF NOT EXISTS %s (ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ InstallTime REAL NOT NULL,
+ DpGuid TEXT,
+ DpVersion TEXT,
+ InstallPath TEXT NOT NULL,
+ PRIMARY KEY (ModuleGuid, ModuleVersion, InstallPath)
+ )""" % self.StandaloneModTable
+ self.Cur.execute(SqlCommand)
+
+ SqlCommand = """create table IF NOT EXISTS %s (ModuleGuid TEXT NOT NULL,
+ ModuleVersion TEXT NOT NULL,
+ InstallPath TEXT NOT NULL,
+ DepexGuid TEXT,
+ DepexVersion TEXT
+ )""" % self.ModDepexTable
+ self.Cur.execute(SqlCommand)
+
+ self.Conn.commit()
+
+ EdkLogger.verbose("Initialize IPI database ... DONE!")
+
+ ## Add a distribution install information from DpObj
+ #
+ # @param DpObj:
+ #
+ def AddDPObject(self, DpObj):
+
+ for PkgKey in DpObj.PackageSurfaceArea.keys():
+ PkgGuid = PkgKey[0]
+ PkgVersion = PkgKey[1]
+ PkgInstallPath = PkgKey[2]
+ self.AddPackage(PkgGuid, PkgVersion, DpObj.Header.Guid, DpObj.Header.Version, PkgInstallPath)
+ PkgObj = DpObj.PackageSurfaceArea[PkgKey]
+ for ModKey in PkgObj.Modules.keys():
+ ModGuid = ModKey[0]
+ ModVersion = ModKey[1]
+ ModInstallPath = ModKey[2]
+ self.AddModuleInPackage(ModGuid, ModVersion, PkgGuid, PkgVersion, ModInstallPath)
+ ModObj = PkgObj.Modules[ModKey]
+ for Dep in ModObj.PackageDependencies:
+ DepexGuid = Dep.PackageGuid
+ DepexVersion = Dep.PackageVersion
+ self.AddModuleDepex(ModGuid, ModVersion, ModInstallPath, DepexGuid, DepexVersion)
+ for FilePath in PkgObj.FileList:
+ self.AddDpFilePathList(DpObj.Header.Guid, DpObj.Header.Version, FilePath)
+
+ for ModKey in DpObj.ModuleSurfaceArea.keys():
+ ModGuid = ModKey[0]
+ ModVersion = ModKey[1]
+ ModInstallPath = ModKey[2]
+ self.AddStandaloneModule(ModGuid, ModVersion, DpObj.Header.Guid, DpObj.Header.Version, ModInstallPath)
+ ModObj = DpObj.ModuleSurfaceArea[ModKey]
+ for Dep in ModObj.PackageDependencies:
+ DepexGuid = Dep.PackageGuid
+ DepexVersion = Dep.PackageVersion
+ self.AddModuleDepex(ModGuid, ModVersion, ModInstallPath, DepexGuid, DepexVersion)
+ for FilePath in ModObj.FileList:
+ self.AddDpFilePathList(DpObj.Header.Guid, DpObj.Header.Version, FilePath)
+
+ self.AddDp(DpObj.Header.Guid, DpObj.Header.Version, DpObj.Header.FileName)
+ ## Add a distribution install information
+ #
+ # @param Guid:
+ # @param Version:
+ # @param PkgFileName:
+ #
+ def AddDp(self, Guid, Version, PkgFileName = None):
+
+ if Version == None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ #
+ # Add newly installed DP information to DB.
+ #
+ if PkgFileName == None or len(PkgFileName.strip()) == 0:
+ PkgFileName = 'N/A'
+ (Guid, Version, PkgFileName) = (Guid, Version, PkgFileName)
+ CurrentTime = time.time()
+ SqlCommand = """insert into %s values('%s', '%s', %s, '%s')""" % (self.DpTable, Guid, Version, CurrentTime, PkgFileName)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Add a file list from DP
+ #
+ # @param DpGuid:
+ # @param DpVersion:
+ # @param Path
+ #
+ def AddDpFilePathList(self, DpGuid, DpVersion, Path):
+
+ SqlCommand = """insert into %s values('%s', '%s', '%s')""" % (self.DpFileListTable, Path, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Add a package install information
+ #
+ # @param Guid:
+ # @param Version:
+ # @param DpGuid:
+ # @param DpVersion:
+ # @param Path
+ #
+ def AddPackage(self, Guid, Version, DpGuid = None, DpVersion = None, Path = ''):
+
+ if Version == None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if DpGuid == None or len(DpGuid.strip()) == 0:
+ DpGuid = 'N/A'
+
+ if DpVersion == None or len(DpVersion.strip()) == 0:
+ DpVersion = 'N/A'
+
+ #
+ # Add newly installed package information to DB.
+ #
+
+ CurrentTime = time.time()
+ SqlCommand = """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % (self.PkgTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Add a module that from a package install information
+ #
+ # @param Guid:
+ # @param Version:
+ # @param PkgFileName:
+ #
+ def AddModuleInPackage(self, Guid, Version, PkgGuid = None, PkgVersion = None, Path = ''):
+
+ if Version == None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if PkgGuid == None or len(PkgGuid.strip()) == 0:
+ PkgGuid = 'N/A'
+
+ if PkgVersion == None or len(PkgVersion.strip()) == 0:
+ PkgVersion = 'N/A'
+
+ #
+ # Add module from package information to DB.
+ #
+ CurrentTime = time.time()
+ SqlCommand = """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % (self.ModInPkgTable, Guid, Version, CurrentTime, PkgGuid, PkgVersion, Path)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Add a module that is standalone install information
+ #
+ # @param Guid:
+ # @param Version:
+ # @param PkgFileName:
+ #
+ def AddStandaloneModule(self, Guid, Version, DpGuid = None, DpVersion = None, Path = ''):
+
+ if Version == None or len(Version.strip()) == 0:
+ Version = 'N/A'
+
+ if DpGuid == None or len(DpGuid.strip()) == 0:
+ DpGuid = 'N/A'
+
+ if DpVersion == None or len(DpVersion.strip()) == 0:
+ DpVersion = 'N/A'
+
+ #
+ # Add module standalone information to DB.
+ #
+ CurrentTime = time.time()
+ SqlCommand = """insert into %s values('%s', '%s', %s, '%s', '%s', '%s')""" % (self.StandaloneModTable, Guid, Version, CurrentTime, DpGuid, DpVersion, Path)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Add a module depex
+ #
+ # @param Guid:
+ # @param Version:
+ # @param DepexGuid:
+ # @param DepexVersion:
+ #
+ def AddModuleDepex(self, Guid, Version, Path, DepexGuid = None, DepexVersion = None):
+
+ if DepexGuid == None or len(DepexGuid.strip()) == 0:
+ DepexGuid = 'N/A'
+
+ if DepexVersion == None or len(DepexVersion.strip()) == 0:
+ DepexVersion = 'N/A'
+
+ #
+ # Add module depex information to DB.
+ #
+
+ SqlCommand = """insert into %s values('%s', '%s', '%s', '%s', '%s')""" % (self.ModDepexTable, Guid, Version, Path, DepexGuid, DepexVersion)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ ## Remove a distribution install information, if no version specified, remove all DPs with this Guid.
+ #
+ # @param DpObj:
+ #
+ def RemoveDpObj(self, DpGuid, DpVersion):
+
+ PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
+
+ # delete from ModDepex the standalone module's dependency
+ SqlCommand = """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from StandaloneModInfo as B where B.DpGuid = '%s' and B.DpVersion = '%s')
+ and ModDepexInfo.ModuleVersion in
+ (select ModuleVersion from StandaloneModInfo as B where B.DpGuid = '%s' and B.DpVersion = '%s')
+ and ModDepexInfo.InstallPath in
+ (select InstallPath from StandaloneModInfo as B where B.DpGuid = '%s' and B.DpVersion = '%s') """ \
+ %(DpGuid, DpVersion, DpGuid, DpVersion, DpGuid, DpVersion)
+
+# SqlCommand = """delete from %s where %s.DpGuid ='%s' and %s.DpVersion = '%s' and
+# %s.ModuleGuid = %s.ModuleGuid and %s.ModuleVersion = %s.ModuleVersion and
+# %s.InstallPath = %s.InstallPath""" \
+# % (self.ModDepexTable, self.StandaloneModTable, DpGuid, self.StandaloneModTable, DpVersion, self.ModDepexTable, self.StandaloneModTable, self.ModDepexTable, self.StandaloneModTable, self.ModDepexTable, self.StandaloneModTable)
+# print SqlCommand
+ self.Cur.execute(SqlCommand)
+
+ # delete from ModDepex the from pkg module's dependency
+ for Pkg in PkgList:
+# SqlCommand = """delete from %s where %s.PackageGuid ='%s' and %s.PackageVersion = '%s' and
+# %s.ModuleGuid = %s.ModuleGuid and %s.ModuleVersion = %s.ModuleVersion and
+# %s.InstallPath = %s.InstallPath""" \
+# % (self.ModDepexTable, self.ModInPkgTable, Pkg[0], self.ModInPkgTable, Pkg[1], self.ModDepexTable, self.ModInPkgTable, self.ModDepexTable, self.ModInPkgTable, self.ModDepexTable, self.ModInPkgTable)
+ SqlCommand = """delete from ModDepexInfo where ModDepexInfo.ModuleGuid in
+ (select ModuleGuid from ModInPkgInfo where ModInPkgInfo.PackageGuid ='%s' and ModInPkgInfo.PackageVersion = '%s')
+ and ModDepexInfo.ModuleVersion in
+ (select ModuleVersion from ModInPkgInfo where ModInPkgInfo.PackageGuid ='%s' and ModInPkgInfo.PackageVersion = '%s')
+ and ModDepexInfo.InstallPath in
+ (select InstallPath from ModInPkgInfo where ModInPkgInfo.PackageGuid ='%s' and ModInPkgInfo.PackageVersion = '%s')""" \
+ % (Pkg[0], Pkg[1],Pkg[0], Pkg[1],Pkg[0], Pkg[1])
+
+ self.Cur.execute(SqlCommand)
+
+ # delete the standalone module
+ SqlCommand = """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.StandaloneModTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ # delete the from pkg module
+ for Pkg in PkgList:
+ SqlCommand = """delete from %s where %s.PackageGuid ='%s' and %s.PackageVersion = '%s'""" \
+ % (self.ModInPkgTable, self.ModInPkgTable, Pkg[0], self.ModInPkgTable, Pkg[1])
+ self.Cur.execute(SqlCommand)
+
+ # delete packages
+ SqlCommand = """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.PkgTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ # delete file list from DP
+ SqlCommand = """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.DpFileListTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ # delete DP
+ SqlCommand = """delete from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.DpTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ self.Conn.commit()
+
+ ## Get a list of distribution install information.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def GetDp(self, Guid, Version):
+
+ if Version == None or len(Version.strip()) == 0:
+ Version = 'N/A'
+ EdkLogger.verbose("\nGetting list of DP install information started ...")
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where DpGuid ='%s'""" % (self.DpTable, DpGuid)
+ self.Cur.execute(SqlCommand)
+
+ else:
+ EdkLogger.verbose("\nGetting DP install information started ...")
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.DpTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ DpList = []
+ for DpInfo in self.Cur:
+ DpGuid = DpInfo[0]
+ DpVersion = DpInfo[1]
+ InstallTime = DpInfo[2]
+ PkgFileName = DpInfo[3]
+ DpList.append((DpGuid, DpVersion, InstallTime, PkgFileName))
+
+ EdkLogger.verbose("Getting DP install information ... DONE!")
+ return DpList
+
+ ## Get a list of distribution install file path information.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def GetDpFileList(self, Guid, Version):
+
+ (DpGuid, DpVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where DpGuid ='%s' and DpVersion = '%s'""" % (self.DpFileListTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PathList = []
+ for Result in self.Cur:
+ Path = Result[0]
+ PathList.append(Path)
+
+ return PathList
+
+ ## Get a list of package information.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def GetPackage(self, Guid, Version, DpGuid = '', DpVersion = ''):
+
+ if DpVersion == '' or DpGuid == '':
+
+ (PackageGuid, PackageVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where PackageGuid ='%s' and PackageVersion = '%s'""" % (self.PkgTable, PackageGuid, PackageVersion)
+ self.Cur.execute(SqlCommand)
+
+ elif Version == None or len(Version.strip()) == 0:
+
+ SqlCommand = """select * from %s where PackageGuid ='%s'""" % (self.PkgTable, Guid)
+ self.Cur.execute(SqlCommand)
+ else:
+ (PackageGuid, PackageVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where PackageGuid ='%s' and PackageVersion = '%s'
+ and DpGuid = '%s' and DpVersion = '%s'""" % (self.PkgTable, PackageGuid, PackageVersion, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PkgList = []
+ for PkgInfo in self.Cur:
+ PkgGuid = PkgInfo[0]
+ PkgVersion = PkgInfo[1]
+ InstallTime = PkgInfo[2]
+ InstallPath = PkgInfo[5]
+ PkgList.append((PkgGuid, PkgVersion, InstallTime, DpGuid, DpVersion, InstallPath))
+
+ return PkgList
+
+ ## Get a list of module in package information.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def GetModInPackage(self, Guid, Version, PkgGuid = '', PkgVersion = ''):
+
+ if PkgVersion == '' or PkgGuid == '':
+
+ (ModuleGuid, ModuleVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and ModuleVersion = '%s'""" % (self.ModInPkgTable, ModuleGuid, ModuleVersion)
+ self.Cur.execute(SqlCommand)
+
+ else:
+ (ModuleGuid, ModuleVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and ModuleVersion = '%s' and PackageGuid ='%s' and PackageVersion = '%s'
+ """ % (self.ModInPkgTable, ModuleGuid, ModuleVersion, PkgGuid, PkgVersion)
+ self.Cur.execute(SqlCommand)
+
+ ModList = []
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallTime = ModInfo[2]
+ InstallPath = ModInfo[5]
+ ModList.append((ModGuid, ModVersion, InstallTime, PkgGuid, PkgVersion, InstallPath))
+
+ return ModList
+
+ ## Get a list of module standalone.
+ #
+ # @param Guid:
+ # @param Version:
+ #
+ def GetStandaloneModule(self, Guid, Version, DpGuid = '', DpVersion = ''):
+
+ if DpGuid == '':
+
+ (ModuleGuid, ModuleVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and ModuleVersion = '%s'""" % (self.StandaloneModTable, ModuleGuid, ModuleVersion)
+ self.Cur.execute(SqlCommand)
+
+ else:
+ (ModuleGuid, ModuleVersion) = (Guid, Version)
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and ModuleVersion = '%s' and DpGuid ='%s' and DpVersion = '%s'
+ """ % (self.StandaloneModTable, ModuleGuid, ModuleVersion, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ ModList = []
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallTime = ModInfo[2]
+ InstallPath = ModInfo[5]
+ ModList.append((ModGuid, ModVersion, InstallTime, DpGuid, DpVersion, InstallPath))
+
+ return ModList
+
+ ## Get a list of module information that comes from DP.
+ #
+ # @param DpGuid:
+ # @param DpVersion:
+ #
+ def GetStandaloneModuleInstallPathListFromDp(self, DpGuid, DpVersion):
+
+ PathList = []
+ SqlCommand = """select t1.InstallPath from %s t1 where t1.DpGuid ='%s' and t1.DpVersion = '%s'
+ """ % (self.StandaloneModTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ for Result in self.Cur:
+ InstallPath = Result[0]
+ PathList.append(InstallPath)
+
+ return PathList
+
+ ## Get a list of package information.
+ #
+ # @param DpGuid:
+ # @param DpVersion:
+ #
+ def GetPackageListFromDp(self, DpGuid, DpVersion):
+
+
+ SqlCommand = """select * from %s where DpGuid ='%s' and DpVersion = '%s'
+ """ % (self.PkgTable, DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+
+ PkgList = []
+ for PkgInfo in self.Cur:
+ PkgGuid = PkgInfo[0]
+ PkgVersion = PkgInfo[1]
+ InstallPath = PkgInfo[5]
+ PkgList.append((PkgGuid, PkgVersion, InstallPath))
+
+ return PkgList
+
+ ## Get a list of modules that depends on package information from a DP.
+ #
+ # @param DpGuid:
+ # @param DpVersion:
+ #
+ def GetDpDependentModuleList(self, DpGuid, DpVersion):
+
+ ModList = []
+ PkgList = self.GetPackageListFromDp(DpGuid, DpVersion)
+ if len(PkgList) > 0:
+ return ModList
+
+ for Pkg in PkgList:
+ SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
+ from %s as t1, %s as t2, where t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s' and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
+ t1.PackageGuid != '%s' and t1.PackageVersion != '%s'
+ """ % (self.ModInPkgTable, self.ModDepexTable, Pkg[0], Pkg[1], Pkg[0], Pkg[1])
+ self.Cur.execute(SqlCommand)
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallPath = ModInfo[2]
+ ModList.append((ModGuid, ModVersion, InstallPath))
+
+ SqlCommand = """select t1.ModuleGuid, t1.ModuleVersion, t1.InstallPath
+ from %s as t1, %s as t2, where t1.ModuleGuid = t2.ModuleGuid and
+ t1.ModuleVersion = t2.ModuleVersion and t2.DepexGuid ='%s' and (t2.DepexVersion = '%s' or t2.DepexVersion = 'N/A') and
+ t1.DpGuid != '%s' and t1.DpVersion != '%s'
+ """ % (self.StandaloneModTable, self.ModDepexTable, Pkg[0], Pkg[1], DpGuid, DpVersion)
+ self.Cur.execute(SqlCommand)
+ for ModInfo in self.Cur:
+ ModGuid = ModInfo[0]
+ ModVersion = ModInfo[1]
+ InstallPath = ModInfo[2]
+ ModList.append((ModGuid, ModVersion, InstallPath))
+
+
+ return ModList
+
+ ## Get a module depex
+ #
+ # @param Guid:
+ # @param Version:
+ # @param Path:
+ #
+ def GetModuleDepex(self, Guid, Version, Path):
+
+ #
+ # Get module depex information to DB.
+ #
+
+ SqlCommand = """select * from %s where ModuleGuid ='%s' and ModuleVersion = '%s' and InstallPath ='%s'
+ """ % (self.ModDepexTable, Guid, Version, Path)
+ self.Cur.execute(SqlCommand)
+ self.Conn.commit()
+
+ DepexList = []
+ for DepInfo in self.Cur:
+ DepexGuid = DepInfo[3]
+ DepexVersion = DepInfo[4]
+ DepexList.append((DepexGuid, DepexVersion))
+
+ return DepexList
+
+ ## Close entire database
+ #
+ # Close the connection and cursor
+ #
+ def CloseDb(self):
+
+ self.Cur.close()
+ self.Conn.close()
+
+ ## Convert To Sql String
+ #
+ # 1. Replace "'" with "''" in each item of StringList
+ #
+ # @param StringList: A list for strings to be converted
+ #
+ def __ConvertToSqlString(self, StringList):
+ return map(lambda s: s.replace("'", "''") , StringList)
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+ DATABASE_PATH = "C://MyWork//Conf//.cache//XML.db"
+ Db = IpiDatabase(DATABASE_PATH)
+ Db.InitDatabase()
+
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/PackagingTool/MkPkg.py b/BaseTools/Source/Python/PackagingTool/MkPkg.py new file mode 100644 index 0000000000..660f48f05f --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/MkPkg.py @@ -0,0 +1,294 @@ +## @file
+# Install distribution package.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import os.path
+import sys
+import glob
+import shutil
+import traceback
+import platform
+from optparse import OptionParser
+import md5
+import time
+import uuid
+
+from PackageFile import *
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.XmlParser import *
+from CommonDataClass.DistributionPackageClass import *
+from Common.DecClassObjectLight import Dec
+from Common.InfClassObjectLight import Inf
+
+from PackageFile import *
+
+# Version and Copyright
+VersionNumber = "0.1"
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2008, Intel Corporation All rights reserved."
+
+## Check environment variables
+#
+# Check environment variables that must be set for build. Currently they are
+#
+# WORKSPACE The directory all packages/platforms start from
+# EDK_TOOLS_PATH The directory contains all tools needed by the build
+# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH
+#
+# If any of above environment variable is not set or has error, the build
+# will be broken.
+#
+def CheckEnvVariable():
+ # check WORKSPACE
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("MkPkg", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+
+ WorkspaceDir = os.path.normpath(os.environ["WORKSPACE"])
+ if not os.path.exists(WorkspaceDir):
+ EdkLogger.error("MkPkg", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData="%s" % WorkspaceDir)
+ elif ' ' in WorkspaceDir:
+ EdkLogger.error("MkPkg", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
+ ExtraData=WorkspaceDir)
+ os.environ["WORKSPACE"] = WorkspaceDir
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Opt A optparse.Values object containing the parsed options
+# @retval Args Target of build command
+#
+def MyOptionParser():
+ UsageString = "%prog -m <module_file> -p <package_file> [-o distribution_file] " + \
+ "[-x xml-file-header] [-t tools-directory] [-f misc-files] [-q | -v] [-h]"
+
+ Parser = OptionParser(description=__copyright__,version=__version__,prog="MkPkg",usage=UsageString)
+
+ Parser.add_option("-?", action="help", help="show this help message and exit")
+
+ Parser.add_option("-o", "--output-file", action="store", type="string", dest="DistributionFile",
+ help="Specify the distribution file to be created.")
+
+ Parser.add_option("-f", "--misc-files", action="append", type="string", dest="MiscFiles",
+ help="Specify any misc files.")
+
+ Parser.add_option("-x", "--xml-file-header", action="store", type=None, dest="TemplateFile",
+ help="Specify the xml file which includes header information for creating the distribution file.")
+
+ Parser.add_option("-t", "--tools-directory", action="store", type=None, dest="ToolsDir",
+ help="Specify the directory name of tools.")
+
+ Parser.add_option("-m", "--module", action="append", type="string", dest="ModuleFileList",
+ help="The inf file of module to be distributed standalone.")
+
+ Parser.add_option("-p", "--package", action="append", type="string", dest="PackageFileList",
+ help="The dec file of package to be distributed.")
+
+ Parser.add_option("-q", "--quiet", action="store_const", dest="LogLevel", const=EdkLogger.QUIET,
+ help="Disable all messages except FATAL ERRORS.")
+
+ Parser.add_option("-v", "--verbose", action="store_const", dest="LogLevel", const=EdkLogger.VERBOSE,
+ help="Turn on verbose output")
+
+ Parser.add_option("-d", "--debug", action="store", type="int", dest="LogLevel",
+ help="Enable debug messages at specified level.")
+
+ Parser.set_defaults(LogLevel=EdkLogger.INFO)
+
+ (Opt, Args)=Parser.parse_args()
+ # error check
+ if not Opt.ModuleFileList and not Opt.PackageFileList:
+ EdkLogger.error("MkPkg", OPTION_NOT_SUPPORTED, ExtraData="At least one package file or module file must be specified")
+ if Opt.TemplateFile:
+ if not os.path.exists(Opt.TemplateFile):
+ EdkLogger.error(
+ "\nMkPkg",
+ FILE_NOT_FOUND,
+ "Template file [%s] not found" % Opt.TemplateFile
+ )
+ return Opt
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Options = MyOptionParser()
+ try:
+ if Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.SetLevel(Options.LogLevel + 1)
+ else:
+ EdkLogger.SetLevel(Options.LogLevel)
+
+ CheckEnvVariable()
+ WorkspaceDir = os.environ["WORKSPACE"]
+
+ # Init DistributionFile
+ if not Options.DistributionFile:
+ Options.DistributionFile = "DistributionPackage.zip"
+
+ # Check Tools Dir
+ if Options.ToolsDir:
+ if not os.path.isdir(os.path.normpath(os.path.join(WorkspaceDir, Options.ToolsDir))):
+ EdkLogger.error(
+ "\nMkPkg",
+ FILE_NOT_FOUND,
+ "Tools directory [%s] not found" % Options.ToolsDir
+ )
+
+ # Check misc files
+ if Options.MiscFiles:
+ for Item in Options.MiscFiles:
+ FullPath = os.path.normpath(os.path.join(WorkspaceDir, Item))
+ if not os.path.isfile(FullPath):
+ EdkLogger.error(
+ "\nMkPkg",
+ FILE_NOT_FOUND,
+ "Misc file [%s] not found" % Item
+ )
+
+ #Check package file existing and valid
+ if Options.PackageFileList:
+ for Item in Options.PackageFileList:
+ (Name, Ext) = os.path.splitext(Item)
+ if Ext.upper() != '.DEC':
+ EdkLogger.error(
+ "\nMkPkg",
+ OPTION_VALUE_INVALID,
+ "[%s] is not a valid package name" % Item
+ )
+ Path = os.path.normpath(os.path.join(WorkspaceDir, Item))
+ if not os.path.exists(Path):
+ EdkLogger.error(
+ "\nMkPkg",
+ FILE_NOT_FOUND,
+ "[%s] not found" % Item
+ )
+ #Check module file existing and valid
+ if Options.ModuleFileList:
+ for Item in Options.ModuleFileList:
+ (Name, Ext) = os.path.splitext(Item)
+ if Ext.upper() != '.INF':
+ EdkLogger.error(
+ "\nMkPkg",
+ OPTION_VALUE_INVALID,
+ "[%s] is not a valid module name" % Item
+ )
+ Path = os.path.normpath(os.path.join(WorkspaceDir, Item))
+ if not os.path.exists(Path):
+ EdkLogger.error(
+ "\nMkPkg",
+ FILE_NOT_FOUND,
+ "[%s] not found" % Item
+ )
+
+ ContentFile = PackageFile("content.zip", "w")
+ DistPkg = DistributionPackageClass()
+ DistPkg.GetDistributionPackage(WorkspaceDir, Options.PackageFileList, Options.ModuleFileList)
+ DistPkgXml = DistributionPackageXml()
+ for Item in DistPkg.PackageSurfaceArea:
+ ContentFile.Pack(os.path.dirname(os.path.normpath(os.path.join(WorkspaceDir,Item[2]))))
+ for Item in DistPkg.ModuleSurfaceArea:
+ ContentFile.Pack(os.path.dirname(os.path.normpath(os.path.join(WorkspaceDir,Item[2]))))
+
+ # Add tools files and information
+ if Options.ToolsDir:
+ ToolsFiles = MiscFileClass()
+ ToolsRoot = os.path.normpath(os.path.join(WorkspaceDir, Options.ToolsDir))
+ ContentFile.Pack(ToolsRoot)
+ ToolsFileList = GetFiles(ToolsRoot, ['CVS', '.svn'])
+ for Item in ToolsFileList:
+ OriPath = Item[len(WorkspaceDir)+1:]
+ FileObj = FileClass()
+ FileObj.Filename = OriPath
+ (Name, Ext) = os.path.splitext(OriPath)
+ if Ext.upper() in ['EXE', 'COM', 'EFI']:
+ FileObj.Executable = 'True'
+ ToolsFiles.Files.append(FileObj)
+ DistPkg.Tools = ToolsFiles
+
+ # Add misc files and information
+ if Options.MiscFiles:
+ MiscFiles = MiscFileClass()
+ for Item in Options.MiscFiles:
+ ContentFile.PackFile(Item)
+ FileObj = FileClass()
+ FileObj.Filename = Item
+ (Name, Ext) = os.path.splitext(Item)
+ if Ext.upper() in ['EXE', 'COM', 'EFI']:
+ FileObj.Executable = 'True'
+ MiscFiles.Files.append(FileObj)
+ DistPkg.MiscellaneousFiles = MiscFiles
+
+ print "Compressing Distribution Package File ..."
+ ContentFile.Close()
+
+ # Add temp distribution header
+ if Options.TemplateFile:
+ TempXML = DistributionPackageXml()
+ DistPkg.Header = TempXML.FromXml(Options.TemplateFile).Header
+ # Add init dp information
+ else:
+ DistPkg.Header.Name = 'Distribution Package'
+ DistPkg.Header.Guid = str(uuid.uuid4())
+ DistPkg.Header.Version = '1.0'
+
+ # Add Md5Sigature
+ Md5Sigature = md5.new(open(str(ContentFile)).read())
+ DistPkg.Header.Signature = Md5Sigature.hexdigest()
+ # Add current Date
+ DistPkg.Header.Date = str(time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime()))
+
+ # Finish final dp file
+ DistPkgFile = PackageFile(Options.DistributionFile, "w")
+ DistPkgFile.PackFile(str(ContentFile))
+ DistPkgFile.PackData(DistPkgXml.ToXml(DistPkg), "dist.pkg")
+ DistPkgFile.Close()
+ print "DONE"
+
+ except FatalError, X:
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = X.args[0]
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ except:
+ EdkLogger.error(
+ "\nMkPkg",
+ CODE_ERROR,
+ "Unknown fatal error when creating [%s]" % Options.DistributionFile,
+ ExtraData="\n(Please send email to dev@buildtools.tianocore.org for help, attaching following call stack trace!)\n",
+ RaiseError=False
+ )
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ Progressor.Abort()
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
diff --git a/BaseTools/Source/Python/PackagingTool/PackageFile.py b/BaseTools/Source/Python/PackagingTool/PackageFile.py new file mode 100644 index 0000000000..12544927c9 --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/PackageFile.py @@ -0,0 +1,160 @@ +## @file
+#
+# PackageFile class represents the zip file of a distribution package.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import sys
+import zipfile
+import tempfile
+
+from Common import EdkLogger
+from Common.Misc import *
+from Common.BuildToolError import *
+
+class PackageFile:
+ def __init__(self, FileName, Mode="r"):
+ self._FileName = FileName
+ if Mode not in ["r", "w", "a"]:
+ Mode = "r"
+ try:
+ self._ZipFile = zipfile.ZipFile(FileName, Mode, zipfile.ZIP_DEFLATED)
+ self._Files = {}
+ for F in self._ZipFile.namelist():
+ self._Files[os.path.normpath(F)] = F
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_OPEN_FAILURE,
+ ExtraData="%s (%s)" % (FileName, str(X)))
+
+ BadFile = self._ZipFile.testzip()
+ if BadFile != None:
+ EdkLogger.error("PackagingTool", FILE_CHECKSUM_FAILURE,
+ ExtraData="[%s] in %s" % (BadFile, FileName))
+
+ def __str__(self):
+ return self._FileName
+
+ def Unpack(self, To):
+ for F in self._ZipFile.namelist():
+ ToFile = os.path.normpath(os.path.join(To, F))
+ print F, "->", ToFile
+ self.Extract(F, ToFile)
+
+ def UnpackFile(self, File, ToFile):
+ File = File.replace('\\', '/')
+ if File in self._ZipFile.namelist():
+ print File, "->", ToFile
+ self.Extract(File, ToFile)
+
+ return ToFile
+
+ return ''
+
+ def Extract(self, Which, To):
+ Which = os.path.normpath(Which)
+ if Which not in self._Files:
+ EdkLogger.error("PackagingTool", FILE_NOT_FOUND,
+ ExtraData="[%s] in %s" % (Which, self._FileName))
+ try:
+ FileContent = self._ZipFile.read(self._Files[Which])
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_DECOMPRESS_FAILURE,
+ ExtraData="[%s] in %s (%s)" % (Which, self._FileName, str(X)))
+ try:
+ CreateDirectory(os.path.dirname(To))
+ ToFile = open(To, "wb")
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_OPEN_FAILURE,
+ ExtraData="%s (%s)" % (To, str(X)))
+
+ try:
+ ToFile.write(FileContent)
+ ToFile.close()
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_WRITE_FAILURE,
+ ExtraData="%s (%s)" % (To, str(X)))
+
+ def Remove(self, Files):
+ TmpDir = os.path.join(tempfile.gettempdir(), ".packaging")
+ if os.path.exists(TmpDir):
+ RemoveDirectory(TmpDir, True)
+
+ os.mkdir(TmpDir)
+ self.Unpack(TmpDir)
+ for F in Files:
+ F = os.path.normpath(F)
+ if F not in self._Files:
+ EdkLogger.error("PackagingTool", FILE_NOT_FOUND,
+ ExtraData="%s is not in %s!" % (F, self._FileName))
+ #os.remove(os.path.join(TmpDir, F)) # no need to really remove file
+ self._Files.pop(F)
+ self._ZipFile.close()
+
+ self._ZipFile = zipfile.ZipFile(self._FileName, "w", zipfile.ZIP_DEFLATED)
+ Cwd = os.getcwd()
+ os.chdir(TmpDir)
+ self.PackFiles(self._Files)
+ os.chdir(Cwd)
+ RemoveDirectory(TmpDir, True)
+
+ def Pack(self, Top):
+ if not os.path.isdir(Top):
+ EdkLogger.error("PackagingTool", FILE_UNKNOWN_ERROR, "%s is not a directory!" %Top)
+
+ FilesToPack = []
+ ParentDir = os.path.dirname(Top)
+ BaseDir = os.path.basename(Top)
+ Cwd = os.getcwd()
+ os.chdir(ParentDir)
+ for Root, Dirs, Files in os.walk(BaseDir):
+ if 'CVS' in Dirs:
+ Dirs.remove('CVS')
+ if '.svn' in Dirs:
+ Dirs.remove('.svn')
+ for F in Files:
+ FilesToPack.append(os.path.join(Root, F))
+ self.PackFiles(FilesToPack)
+ os.chdir(Cwd)
+
+ def PackFiles(self, Files):
+ for F in Files:
+ try:
+ print "packing ...", F
+ self._ZipFile.write(F)
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_COMPRESS_FAILURE,
+ ExtraData="%s (%s)" % (F, str(X)))
+
+ def PackFile(self, File, ArcName=None):
+ try:
+ print "packing ...", File
+ self._ZipFile.write(File, ArcName)
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_COMPRESS_FAILURE,
+ ExtraData="%s (%s)" % (File, str(X)))
+
+ def PackData(self, Data, ArcName):
+ try:
+ self._ZipFile.writestr(ArcName, Data)
+ except BaseException, X:
+ EdkLogger.error("PackagingTool", FILE_COMPRESS_FAILURE,
+ ExtraData="%s (%s)" % (ArcName, str(X)))
+
+ def Close(self):
+ self._ZipFile.close()
+
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/PackagingTool/RmPkg.py b/BaseTools/Source/Python/PackagingTool/RmPkg.py new file mode 100644 index 0000000000..e7eedd0776 --- /dev/null +++ b/BaseTools/Source/Python/PackagingTool/RmPkg.py @@ -0,0 +1,218 @@ +## @file
+# Install distribution package.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import sys
+import traceback
+import platform
+from optparse import OptionParser
+
+import Common.EdkLogger as EdkLogger
+from Common.BuildToolError import *
+from Common.Misc import *
+from Common.XmlParser import *
+
+from IpiDb import *
+from DependencyRules import *
+
+# Version and Copyright
+VersionNumber = "0.1"
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2008, Intel Corporation All rights reserved."
+
+## Check environment variables
+#
+# Check environment variables that must be set for build. Currently they are
+#
+# WORKSPACE The directory all packages/platforms start from
+# EDK_TOOLS_PATH The directory contains all tools needed by the build
+# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH
+#
+# If any of above environment variable is not set or has error, the build
+# will be broken.
+#
+def CheckEnvVariable():
+ # check WORKSPACE
+ if "WORKSPACE" not in os.environ:
+ EdkLogger.error("RmPkg", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
+ ExtraData="WORKSPACE")
+
+ WorkspaceDir = os.path.normpath(os.environ["WORKSPACE"])
+ if not os.path.exists(WorkspaceDir):
+ EdkLogger.error("RmPkg", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData="%s" % WorkspaceDir)
+ elif ' ' in WorkspaceDir:
+ EdkLogger.error("RmPkg", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path",
+ ExtraData=WorkspaceDir)
+ os.environ["WORKSPACE"] = WorkspaceDir
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Opt A optparse.Values object containing the parsed options
+# @retval Args Target of build command
+#
+def MyOptionParser():
+ UsageString = "%prog -g <guid> -n <version> [-y] [-q | -v] [-h]"
+
+ Parser = OptionParser(description=__copyright__,version=__version__,prog="RmPkg",usage=UsageString)
+
+ Parser.add_option("-?", action="help", help="show this help message and exit")
+
+# Parser.add_option("-f", "--force", action="store_true", type=None, dest="ForceRemove",
+# help="Force creation - overwrite existing one.")
+
+ Parser.add_option("-y", "--yes", action="store_true", dest="Yes",
+ help="Not asking for confirmation when deleting files.")
+
+ Parser.add_option("-n", "--package-version", action="store", type="string", dest="PackageVersion",
+ help="The version of distribution package to be removed.")
+
+ Parser.add_option("-g", "--package-guid", action="store", type="string", dest="PackageGuid",
+ help="The GUID of distribution package to be removed.")
+
+ Parser.add_option("-q", "--quiet", action="store_const", dest="LogLevel", const=EdkLogger.QUIET,
+ help="Disable all messages except FATAL ERRORS.")
+
+ Parser.add_option("-v", "--verbose", action="store_const", dest="LogLevel", const=EdkLogger.VERBOSE,
+ help="Turn on verbose output")
+
+ Parser.add_option("-d", "--debug", action="store", type="int", dest="LogLevel",
+ help="Enable debug messages at specified level.")
+
+ Parser.set_defaults(LogLevel=EdkLogger.INFO)
+
+ (Opt, Args)=Parser.parse_args()
+
+ return Opt
+
+## Remove all empty dirs under the path
+def RemoveEmptyDirs(Path):
+ # Remove all sub dirs
+ for Root, Dirs, Files in os.walk(Path):
+ for Dir in Dirs:
+ FullPath = os.path.normpath(os.path.join(Root, Dir))
+ if os.path.isdir(FullPath):
+ if os.listdir(FullPath) == []:
+ os.rmdir(FullPath)
+ else:
+ RemoveEmptyDirs(FullPath)
+ # Remove itself
+ if os.path.isdir(Path) and os.listdir(Path) == []:
+ os.rmdir(Path)
+
+
+## Tool entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ EdkLogger.Initialize()
+ Options = MyOptionParser()
+ try:
+ if not Options.PackageGuid and not Options.PackageVersion:
+ EdkLogger.error("RmPkg", OPTION_MISSING, ExtraData="The GUID and Version of distribution package must be specified")
+
+ if Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.SetLevel(Options.LogLevel + 1)
+ else:
+ EdkLogger.SetLevel(Options.LogLevel)
+
+ CheckEnvVariable()
+ WorkspaceDir = os.environ["WORKSPACE"]
+
+ # Prepare check dependency
+ Db = IpiDatabase(os.path.normpath(os.path.join(WorkspaceDir, "Conf/DistributionPackageDatabase.db")))
+ Db.InitDatabase()
+ Dep = DependencyRules(Db)
+
+ Guid = Options.PackageGuid
+ Version = Options.PackageVersion
+
+ # Check Dp existing
+ if not Dep.CheckDpExists(Guid, Version):
+ EdkLogger.error("RmPkg", UNKNOWN_ERROR, "This distribution package are not installed!")
+
+ # Check Dp depex
+ if not Dep.CheckDpDepexForRemove(Guid, Version):
+ print "Some packages/modules are depending on this distribution package, do you really want to remove it?"
+ print "Press Y to delete all files or press other keys to quit:"
+ Input = Input = sys.stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != 'Y':
+ EdkLogger.error("RmPkg", UNKNOWN_ERROR, "User interrupt")
+
+ # Remove all files
+ if not Options.Yes:
+ print "All files of the distribution package will be removed, do you want to continue?"
+ print "Press Y to remove all files or press other keys to quit:"
+ Input = Input = sys.stdin.readline()
+ Input = Input.replace('\r', '').replace('\n', '')
+ if Input.upper() != 'Y':
+ EdkLogger.error("RmPkg", UNKNOWN_ERROR, "User interrupt")
+
+ # Remove all files
+ MissingFileList = []
+ for Item in Db.GetDpFileList(Guid, Version):
+ if os.path.isfile(Item):
+ print "Removing file [%s] ..." % Item
+ os.remove(Item)
+ else:
+ MissingFileList.append(Item)
+
+ # Remove all empty dirs of package
+ for Item in Db.GetPackageListFromDp(Guid, Version):
+ Dir = os.path.dirname(Item[2])
+ RemoveEmptyDirs(Dir)
+
+ # Remove all empty dirs of module
+ for Item in Db.GetStandaloneModuleInstallPathListFromDp(Guid, Version):
+ Dir = os.path.dirname(Item)
+ RemoveEmptyDirs(Dir)
+
+ # update database
+ EdkLogger.quiet("Update Distribution Package Database ...")
+ Db.RemoveDpObj(Guid, Version)
+ EdkLogger.quiet("DONE")
+
+ except FatalError, X:
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = X.args[0]
+ except KeyboardInterrupt:
+ ReturnCode = ABORT_ERROR
+ if Options and Options.LogLevel < EdkLogger.DEBUG_9:
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ except:
+ EdkLogger.error(
+ "\nRmPkg",
+ CODE_ERROR,
+ "Unknown fatal error when removing package",
+ ExtraData="\n(Please send email to dev@buildtools.tianocore.org for help, attaching following call stack trace!)\n",
+ RaiseError=False
+ )
+ EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc())
+ ReturnCode = CODE_ERROR
+ finally:
+ Progressor.Abort()
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/BaseTools/Source/Python/Table/Table.py b/BaseTools/Source/Python/Table/Table.py new file mode 100644 index 0000000000..9a9657a4b3 --- /dev/null +++ b/BaseTools/Source/Python/Table/Table.py @@ -0,0 +1,120 @@ +## @file
+# This file is used to create/update/query/erase a common table
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ def __init__(self, Cursor):
+ self.Cur = Cursor
+ self.Table = ''
+ self.ID = 0
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, SqlCommand):
+ self.Cur.execute(SqlCommand)
+ self.ID = 0
+ EdkLogger.verbose(SqlCommand + " ... DONE!")
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, SqlCommand):
+ self.Exec(SqlCommand)
+
+ ## Query table
+ #
+ # Query all records of the table
+ #
+ def Query(self):
+ EdkLogger.verbose("\nQuery tabel %s started ..." % self.Table)
+ SqlCommand = """select * from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Rs in self.Cur:
+ EdkLogger.verbose(str(Rs))
+
+ TotalCount = self.GetCount()
+ EdkLogger.verbose("*** Total %s records in table %s ***" % (TotalCount, self.Table) )
+ EdkLogger.verbose("Query tabel %s DONE!" % self.Table)
+
+ ## Drop a table
+ #
+ # Drop the table
+ #
+ def Drop(self):
+ SqlCommand = """drop table IF EXISTS %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ EdkLogger.verbose("Drop tabel %s ... DONE!" % self.Table)
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ SqlCommand = """select count(ID) from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ return Item[0]
+
+ ## Generate ID
+ #
+ # Generate an ID if input ID is -1
+ #
+ # @param ID: Input ID
+ #
+ # @retval ID: New generated ID
+ #
+ def GenerateID(self, ID):
+ if ID == -1:
+ self.ID = self.ID + 1
+
+ return self.ID
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetCount()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ RecordSet = self.Cur.fetchall()
+ EdkLogger.debug(4, "RecordSet: %s" % RecordSet)
+ return RecordSet
diff --git a/BaseTools/Source/Python/Table/TableDataModel.py b/BaseTools/Source/Python/Table/TableDataModel.py new file mode 100644 index 0000000000..1e5fe47af1 --- /dev/null +++ b/BaseTools/Source/Python/Table/TableDataModel.py @@ -0,0 +1,95 @@ +## @file
+# This file is used to create/update/query/erase table for data models
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'DataModel'
+
+ ## Create table
+ #
+ # Create table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ self.ID = self.ID + 1
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s')""" % (self.Table, self.ID, CrossIndex, Name, Description)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ CrossIndex = Item[0]
+
+ return CrossIndex
diff --git a/BaseTools/Source/Python/Table/TableDec.py b/BaseTools/Source/Python/Table/TableDec.py new file mode 100644 index 0000000000..f570fd1d10 --- /dev/null +++ b/BaseTools/Source/Python/Table/TableDec.py @@ -0,0 +1,108 @@ +## @file
+# This file is used to create/update/query/erase table for dec datas
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableDec
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDec(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Dec'
+
+ ## Create table
+ #
+ # Create table Dec
+ #
+ # @param ID: ID of a Dec item
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Arch: Arch of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param ID: ID of a Dec item
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Arch: Arch of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/BaseTools/Source/Python/Table/TableDsc.py b/BaseTools/Source/Python/Table/TableDsc.py new file mode 100644 index 0000000000..62608a061a --- /dev/null +++ b/BaseTools/Source/Python/Table/TableDsc.py @@ -0,0 +1,108 @@ +## @file
+# This file is used to create/update/query/erase table for dsc datas
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableDsc
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDsc(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Dsc'
+
+ ## Create table
+ #
+ # Create table Dsc
+ #
+ # @param ID: ID of a Dsc item
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Arch: Arch of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param ID: ID of a Dsc item
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Arch: Arch of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/BaseTools/Source/Python/Table/TableEotReport.py b/BaseTools/Source/Python/Table/TableEotReport.py new file mode 100644 index 0000000000..cdae3b2e39 --- /dev/null +++ b/BaseTools/Source/Python/Table/TableEotReport.py @@ -0,0 +1,76 @@ +## @file
+# This file is used to create/update/query/erase table for ECC reports
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import os, time
+from Table import Table
+from Common.String import ConvertToSqlString2
+import EotToolError as EotToolError
+import EotGlobalData as EotGlobalData
+
+## TableReport
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableEotReport(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Report'
+
+ ## Create table
+ #
+ # Create table report
+ #
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ ModuleID INTEGER DEFAULT -1,
+ ModuleName TEXT DEFAULT '',
+ ModuleGuid TEXT DEFAULT '',
+ SourceFileID INTEGER DEFAULT -1,
+ SourceFileFullPath TEXT DEFAULT '',
+ ItemName TEXT DEFAULT '',
+ ItemType TEXT DEFAULT '',
+ ItemMode TEXT DEFAULT '',
+ GuidName TEXT DEFAULT '',
+ GuidMacro TEXT DEFAULT '',
+ GuidValue TEXT DEFAULT '',
+ BelongsToFunction TEXT DEFAULT '',
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table report
+ #
+ #
+ def Insert(self, ModuleID = -1, ModuleName = '', ModuleGuid = '', SourceFileID = -1, SourceFileFullPath = '', \
+ ItemName = '', ItemType = '', ItemMode = '', GuidName = '', GuidMacro = '', GuidValue = '', BelongsToFunction = '', Enabled = 0):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', %s)""" \
+ % (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
+ ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ def GetMaxID(self):
+ SqlCommand = """select max(ID) from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ return Item[0]
\ No newline at end of file diff --git a/BaseTools/Source/Python/Table/TableFdf.py b/BaseTools/Source/Python/Table/TableFdf.py new file mode 100644 index 0000000000..317bd4149b --- /dev/null +++ b/BaseTools/Source/Python/Table/TableFdf.py @@ -0,0 +1,108 @@ +## @file
+# This file is used to create/update/query/erase table for fdf datas
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableFdf
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableFdf(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Fdf'
+
+ ## Create table
+ #
+ # Create table Fdf
+ #
+ # @param ID: ID of a Fdf item
+ # @param Model: Model of a Fdf item
+ # @param Value1: Value1 of a Fdf item
+ # @param Value2: Value2 of a Fdf item
+ # @param Value3: Value3 of a Fdf item
+ # @param Arch: Arch of a Fdf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which fdf file
+ # @param StartLine: StartLine of a Fdf item
+ # @param StartColumn: StartColumn of a Fdf item
+ # @param EndLine: EndLine of a Fdf item
+ # @param EndColumn: EndColumn of a Fdf item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Fdf
+ #
+ # @param ID: ID of a Fdf item
+ # @param Model: Model of a Fdf item
+ # @param Value1: Value1 of a Fdf item
+ # @param Value2: Value2 of a Fdf item
+ # @param Value3: Value3 of a Fdf item
+ # @param Arch: Arch of a Fdf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which fdf file
+ # @param StartLine: StartLine of a Fdf item
+ # @param StartColumn: StartColumn of a Fdf item
+ # @param EndLine: EndLine of a Fdf item
+ # @param EndColumn: EndColumn of a Fdf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Arch) = ConvertToSqlString((Value1, Value2, Value3, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/BaseTools/Source/Python/Table/TableFile.py b/BaseTools/Source/Python/Table/TableFile.py new file mode 100644 index 0000000000..9be64942ec --- /dev/null +++ b/BaseTools/Source/Python/Table/TableFile.py @@ -0,0 +1,91 @@ +## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from Table import Table
+from Common.String import ConvertToSqlString
+import os
+from CommonDataClass.DataClass import FileClass
+
+## TableFile
+#
+# This class defined a table used for file
+#
+# @param object: Inherited from object class
+#
+class TableFile(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'File'
+
+ ## Create table
+ #
+ # Create table File
+ #
+ # @param ID: ID of a File
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Name VARCHAR NOT NULL,
+ ExtName VARCHAR,
+ Path VARCHAR,
+ FullPath VARCHAR NOT NULL,
+ Model INTEGER DEFAULT 0,
+ TimeStamp VARCHAR NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table File
+ #
+ # @param ID: ID of a File
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
+ self.ID = self.ID + 1
+ (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, '%s')""" \
+ % (self.Table, self.ID, Name, ExtName, Path, FullPath, Model, TimeStamp)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+ ## InsertFile
+ #
+ # Insert one file to table
+ #
+ # @param FileFullPath: The full path of the file
+ # @param Model: The model of the file
+ #
+ # @retval FileID: The ID after record is inserted
+ #
+ def InsertFile(self, FileFullPath, Model):
+ (Filepath, Name) = os.path.split(FileFullPath)
+ (Root, Ext) = os.path.splitext(FileFullPath)
+ TimeStamp = os.stat(FileFullPath)[8]
+ File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
+ return self.Insert(File.Name, File.ExtName, File.Path, File.FullPath, File.Model, TimeStamp)
diff --git a/BaseTools/Source/Python/Table/TableFunction.py b/BaseTools/Source/Python/Table/TableFunction.py new file mode 100644 index 0000000000..c013d0d2fe --- /dev/null +++ b/BaseTools/Source/Python/Table/TableFunction.py @@ -0,0 +1,95 @@ +## @file
+# This file is used to create/update/query/erase table for functions
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableFunction
+#
+# This class defined a table used for function
+#
+# @param Table: Inherited from Table class
+#
+class TableFunction(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Function'
+
+ ## Create table
+ #
+ # Create table Function
+ #
+ # @param ID: ID of a Function
+ # @param Header: Header of a Function
+ # @param Modifier: Modifier of a Function
+ # @param Name: Name of a Function
+ # @param ReturnStatement: ReturnStatement of a Funciont
+ # @param StartLine: StartLine of a Function
+ # @param StartColumn: StartColumn of a Function
+ # @param EndLine: EndLine of a Function
+ # @param EndColumn: EndColumn of a Function
+ # @param BodyStartLine: StartLine of a Function body
+ # @param BodyStartColumn: StartColumn of a Function body
+ # @param BelongsToFile: The Function belongs to which file
+ # @param FunNameStartLine: StartLine of a Function name
+ # @param FunNameStartColumn: StartColumn of a Function name
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Header TEXT,
+ Modifier VARCHAR,
+ Name VARCHAR NOT NULL,
+ ReturnStatement VARCHAR,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ BodyStartLine INTEGER NOT NULL,
+ BodyStartColumn INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ FunNameStartLine INTEGER NOT NULL,
+ FunNameStartColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Function
+ #
+ # @param ID: ID of a Function
+ # @param Header: Header of a Function
+ # @param Modifier: Modifier of a Function
+ # @param Name: Name of a Function
+ # @param ReturnStatement: ReturnStatement of a Funciont
+ # @param StartLine: StartLine of a Function
+ # @param StartColumn: StartColumn of a Function
+ # @param EndLine: EndLine of a Function
+ # @param EndColumn: EndColumn of a Function
+ # @param BodyStartLine: StartLine of a Function body
+ # @param BodyStartColumn: StartColumn of a Function body
+ # @param BelongsToFile: The Function belongs to which file
+ # @param FunNameStartLine: StartLine of a Function name
+ # @param FunNameStartColumn: StartColumn of a Function name
+ #
+ def Insert(self, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn):
+ self.ID = self.ID + 1
+ (Header, Modifier, Name, ReturnStatement) = ConvertToSqlString((Header, Modifier, Name, ReturnStatement))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Header, Modifier, Name, ReturnStatement, StartLine, StartColumn, EndLine, EndColumn, BodyStartLine, BodyStartColumn, BelongsToFile, FunNameStartLine, FunNameStartColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
diff --git a/BaseTools/Source/Python/Table/TableIdentifier.py b/BaseTools/Source/Python/Table/TableIdentifier.py new file mode 100644 index 0000000000..3cf33f20e2 --- /dev/null +++ b/BaseTools/Source/Python/Table/TableIdentifier.py @@ -0,0 +1,90 @@ +## @file
+# This file is used to create/update/query/erase table for Identifiers
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from Common.String import ConvertToSqlString
+from Table import Table
+
+## TableIdentifier
+#
+# This class defined a table used for Identifier
+#
+# @param object: Inherited from object class
+#
+#
+class TableIdentifier(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Identifier'
+
+ ## Create table
+ #
+ # Create table Identifier
+ #
+ # @param ID: ID of a Identifier
+ # @param Modifier: Modifier of a Identifier
+ # @param Type: Type of a Identifier
+ # @param Name: Name of a Identifier
+ # @param Value: Value of a Identifier
+ # @param Model: Model of a Identifier
+ # @param BelongsToFile: The Identifier belongs to which file
+ # @param BelongsToFunction: The Identifier belongs to which function
+ # @param StartLine: StartLine of a Identifier
+ # @param StartColumn: StartColumn of a Identifier
+ # @param EndLine: EndLine of a Identifier
+ # @param EndColumn: EndColumn of a Identifier
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
+ Modifier VARCHAR,
+ Type VARCHAR,
+ Name VARCHAR NOT NULL,
+ Value VARCHAR NOT NULL,
+ Model INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ BelongsToFunction SINGLE DEFAULT -1,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Identifier
+ #
+ # @param ID: ID of a Identifier
+ # @param Modifier: Modifier of a Identifier
+ # @param Type: Type of a Identifier
+ # @param Name: Name of a Identifier
+ # @param Value: Value of a Identifier
+ # @param Model: Model of a Identifier
+ # @param BelongsToFile: The Identifier belongs to which file
+ # @param BelongsToFunction: The Identifier belongs to which function
+ # @param StartLine: StartLine of a Identifier
+ # @param StartColumn: StartColumn of a Identifier
+ # @param EndLine: EndLine of a Identifier
+ # @param EndColumn: EndColumn of a Identifier
+ #
+ def Insert(self, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
+ self.ID = self.ID + 1
+ (Modifier, Type, Name, Value) = ConvertToSqlString((Modifier, Type, Name, Value))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Modifier, Type, Name, Value, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
\ No newline at end of file diff --git a/BaseTools/Source/Python/Table/TableInf.py b/BaseTools/Source/Python/Table/TableInf.py new file mode 100644 index 0000000000..65ca1ce25c --- /dev/null +++ b/BaseTools/Source/Python/Table/TableInf.py @@ -0,0 +1,114 @@ +## @file
+# This file is used to create/update/query/erase table for inf datas
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import CommonDataClass.DataClass as DataClass
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TableInf
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableInf(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Inf'
+
+ ## Create table
+ #
+ # Create table Inf
+ #
+ # @param ID: ID of a Inf item
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Value4: Value4 of a Inf item
+ # @param Value5: Value5 of a Inf item
+ # @param Arch: Arch of a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 VARCHAR NOT NULL,
+ Value2 VARCHAR,
+ Value3 VARCHAR,
+ Value4 VARCHAR,
+ Value5 VARCHAR,
+ Arch VarCHAR,
+ BelongsToItem SINGLE NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Inf
+ #
+ # @param ID: ID of a Inf item
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Value4: Value4 of a Inf item
+ # @param Value5: Value5 of a Inf item
+ # @param Arch: Arch of a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param BelongsToFile: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled):
+ self.ID = self.ID + 1
+ (Value1, Value2, Value3, Value4, Value5, Arch) = ConvertToSqlString((Value1, Value2, Value3, Value4, Value5, Arch))
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', '%s', '%s', '%s', '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, Model, Value1, Value2, Value3, Value4, Value5, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model):
+ SqlCommand = """select ID, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine from %s
+ where Model = %s
+ and Enabled > -1""" % (self.Table, Model)
+ EdkLogger.debug(4, "SqlCommand: %s" % SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.Cur.fetchall()
diff --git a/BaseTools/Source/Python/Table/TablePcd.py b/BaseTools/Source/Python/Table/TablePcd.py new file mode 100644 index 0000000000..ba91d175f0 --- /dev/null +++ b/BaseTools/Source/Python/Table/TablePcd.py @@ -0,0 +1,90 @@ +## @file
+# This file is used to create/update/query/erase table for pcds
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from Table import Table
+from Common.String import ConvertToSqlString
+
+## TablePcd
+#
+# This class defined a table used for pcds
+#
+# @param object: Inherited from object class
+#
+#
+class TablePcd(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Pcd'
+
+ ## Create table
+ #
+ # Create table Pcd
+ #
+ # @param ID: ID of a Pcd
+ # @param CName: CName of a Pcd
+ # @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+ # @param Token: Token of a Pcd
+ # @param DatumType: DatumType of a Pcd
+ # @param Model: Model of a Pcd
+ # @param BelongsToFile: The Pcd belongs to which file
+ # @param BelongsToFunction: The Pcd belongs to which function
+ # @param StartLine: StartLine of a Pcd
+ # @param StartColumn: StartColumn of a Pcd
+ # @param EndLine: EndLine of a Pcd
+ # @param EndColumn: EndColumn of a Pcd
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ CName VARCHAR NOT NULL,
+ TokenSpaceGuidCName VARCHAR NOT NULL,
+ Token INTEGER,
+ DatumType VARCHAR,
+ Model INTEGER NOT NULL,
+ BelongsToFile SINGLE NOT NULL,
+ BelongsToFunction SINGLE DEFAULT -1,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Pcd
+ #
+ # @param ID: ID of a Pcd
+ # @param CName: CName of a Pcd
+ # @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
+ # @param Token: Token of a Pcd
+ # @param DatumType: DatumType of a Pcd
+ # @param Model: Model of a Pcd
+ # @param BelongsToFile: The Pcd belongs to which file
+ # @param BelongsToFunction: The Pcd belongs to which function
+ # @param StartLine: StartLine of a Pcd
+ # @param StartColumn: StartColumn of a Pcd
+ # @param EndLine: EndLine of a Pcd
+ # @param EndColumn: EndColumn of a Pcd
+ #
+ def Insert(self, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn):
+ self.ID = self.ID + 1
+ (CName, TokenSpaceGuidCName, DatumType) = ConvertToSqlString((CName, TokenSpaceGuidCName, DatumType))
+ SqlCommand = """insert into %s values(%s, '%s', '%s', %s, '%s', %s, %s, %s, %s, %s, %s, %s)""" \
+ % (self.Table, self.ID, CName, TokenSpaceGuidCName, Token, DatumType, Model, BelongsToFile, BelongsToFunction, StartLine, StartColumn, EndLine, EndColumn)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
\ No newline at end of file diff --git a/BaseTools/Source/Python/Table/TableQuery.py b/BaseTools/Source/Python/Table/TableQuery.py new file mode 100644 index 0000000000..9a9a66ccb6 --- /dev/null +++ b/BaseTools/Source/Python/Table/TableQuery.py @@ -0,0 +1,66 @@ +## @file
+# This file is used to create/update/query/erase table for Queries
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from Common.String import ConvertToSqlString
+from Table import Table
+
+## TableQuery
+#
+# This class defined a table used for Query
+#
+# @param object: Inherited from object class
+#
+#
+class TableQuery(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Query'
+
+ ## Create table
+ #
+ # Create table Query
+ #
+ # @param ID: ID of a Query
+ # @param Name: Modifier of a Query
+ # @param Value: Type of a Query
+ # @param Model: Model of a Query
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s(ID INTEGER PRIMARY KEY,
+ Name TEXT DEFAULT '',
+ Value TEXT DEFAULT '',
+ Model INTEGER DEFAULT 0
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table Query
+ #
+ # @param ID: ID of a Query
+ # @param Name: Modifier of a Query
+ # @param Value: Type of a Query
+ # @param Model: Model of a Query
+ #
+ def Insert(self, Name, Value, Model):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, '%s', '%s', %s)""" \
+ % (self.Table, self.ID, Name, Value, Model)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/Table/TableReport.py b/BaseTools/Source/Python/Table/TableReport.py new file mode 100644 index 0000000000..042c1b7e9e --- /dev/null +++ b/BaseTools/Source/Python/Table/TableReport.py @@ -0,0 +1,123 @@ +## @file
+# This file is used to create/update/query/erase table for ECC reports
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+import os, time
+from Table import Table
+from Common.String import ConvertToSqlString2
+import EccToolError as EccToolError
+import EccGlobalData as EccGlobalData
+
+## TableReport
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableReport(Table):
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor)
+ self.Table = 'Report'
+
+ ## Create table
+ #
+ # Create table report
+ #
+ # @param ID: ID of an Error
+ # @param ErrorID: ID of an Error TypeModel of a Report item
+ # @param OtherMsg: Other error message besides the standard error message
+ # @param BelongsToItem: The error belongs to which item
+ # @param Enabled: If this error enabled
+ # @param Corrected: if this error corrected
+ #
+ def Create(self):
+ SqlCommand = """create table IF NOT EXISTS %s (ID INTEGER PRIMARY KEY,
+ ErrorID INTEGER NOT NULL,
+ OtherMsg TEXT,
+ BelongsToTable TEXT NOT NULL,
+ BelongsToItem SINGLE NOT NULL,
+ Enabled INTEGER DEFAULT 0,
+ Corrected INTEGER DEFAULT -1
+ )""" % self.Table
+ Table.Create(self, SqlCommand)
+
+ ## Insert table
+ #
+ # Insert a record into table report
+ #
+ # @param ID: ID of an Error
+ # @param ErrorID: ID of an Error TypeModel of a report item
+ # @param OtherMsg: Other error message besides the standard error message
+ # @param BelongsToTable: The error item belongs to which table
+ # @param BelongsToItem: The error belongs to which item
+ # @param Enabled: If this error enabled
+ # @param Corrected: if this error corrected
+ #
+ def Insert(self, ErrorID, OtherMsg = '', BelongsToTable = '', BelongsToItem = -1, Enabled = 0, Corrected = -1):
+ self.ID = self.ID + 1
+ SqlCommand = """insert into %s values(%s, %s, '%s', '%s', %s, %s, %s)""" \
+ % (self.Table, self.ID, ErrorID, ConvertToSqlString2(OtherMsg), BelongsToTable, BelongsToItem, Enabled, Corrected)
+ Table.Insert(self, SqlCommand)
+
+ return self.ID
+
+ ## Query table
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self):
+ SqlCommand = """select ID, ErrorID, OtherMsg, BelongsToTable, BelongsToItem, Corrected from %s
+ where Enabled > -1 order by ErrorID, BelongsToItem""" % (self.Table)
+ return self.Exec(SqlCommand)
+
+ ## Convert to CSV
+ #
+ # Get all enabled records from table report and save them to a .csv file
+ #
+ # @param Filename: To filename to save the report content
+ #
+ def ToCSV(self, Filename = 'Report.csv'):
+ try:
+ File = open(Filename, 'w+')
+ File.write("""No, Error Code, Error Message, File, LineNo, Other Error Message\n""")
+ RecordSet = self.Query()
+ Index = 0
+ for Record in RecordSet:
+ Index = Index + 1
+ ErrorID = Record[1]
+ OtherMsg = Record[2]
+ BelongsToTable = Record[3]
+ BelongsToItem = Record[4]
+ IsCorrected = Record[5]
+ SqlCommand = ''
+ if BelongsToTable == 'File':
+ SqlCommand = """select 0, FullPath from %s where ID = %s
+ """ % (BelongsToTable, BelongsToItem)
+ else:
+ SqlCommand = """select A.StartLine, B.FullPath from %s as A, File as B
+ where A.ID = %s and B.ID = A.BelongsToFile
+ """ % (BelongsToTable, BelongsToItem)
+ NewRecord = self.Exec(SqlCommand)
+ if NewRecord != []:
+ File.write("""%s,%s,"%s",%s,%s,"%s"\n""" % (Index, ErrorID, EccToolError.gEccErrorMessage[ErrorID], NewRecord[0][1], NewRecord[0][0], OtherMsg))
+
+ File.close()
+ except IOError:
+ NewFilename = 'Report_' + time.strftime("%Y%m%d_%H%M%S.csv", time.localtime())
+ EdkLogger.warn("ECC", "The report file %s is locked by other progress, use %s instead!" % (Filename, NewFilename))
+ self.ToCSV(NewFilename)
+
diff --git a/BaseTools/Source/Python/Table/__init__.py b/BaseTools/Source/Python/Table/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/Table/__init__.py diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py new file mode 100644 index 0000000000..69cac95d4f --- /dev/null +++ b/BaseTools/Source/Python/TargetTool/TargetTool.py @@ -0,0 +1,261 @@ +#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+import os
+import sys
+import traceback
+from optparse import OptionParser
+
+import Common.EdkLogger as EdkLogger
+import Common.BuildToolError as BuildToolError
+from Common.DataType import *
+
+# To Do 1.set clean, 2. add item, if the line is disabled.
+
+class TargetTool():
+ def __init__(self, opt, args):
+ self.WorkSpace = os.path.normpath(os.getenv('WORKSPACE'))
+ self.Opt = opt
+ self.Arg = args[0]
+ self.FileName = os.path.normpath(os.path.join(self.WorkSpace, 'Conf', 'target.txt'))
+ if os.path.isfile(self.FileName) == False:
+ print "%s does not exist." % self.FileName
+ sys.exit(1)
+ self.TargetTxtDictionary = {
+ TAB_TAT_DEFINES_ACTIVE_PLATFORM : None,
+ TAB_TAT_DEFINES_TOOL_CHAIN_CONF : None,
+ TAB_TAT_DEFINES_MULTIPLE_THREAD : None,
+ TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : None,
+ TAB_TAT_DEFINES_TARGET : None,
+ TAB_TAT_DEFINES_TOOL_CHAIN_TAG : None,
+ TAB_TAT_DEFINES_TARGET_ARCH : None,
+ TAB_TAT_DEFINES_BUILD_RULE_CONF : None,
+ }
+ self.LoadTargetTxtFile(self.FileName)
+
+ def LoadTargetTxtFile(self, filename):
+ if os.path.exists(filename) and os.path.isfile(filename):
+ return self.ConvertTextFileToDict(filename, '#', '=')
+ else:
+ raise ParseError('LoadTargetTxtFile() : No Target.txt file exists.')
+ return 1
+
+#
+# Convert a text file to a dictionary
+#
+ def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
+ """Convert a text file to a dictionary of (name:value) pairs."""
+ try:
+ f = open(FileName,'r')
+ for Line in f:
+ if Line.startswith(CommentCharacter) or Line.strip() == '':
+ continue
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].strip()
+ if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary.keys():
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM or Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF \
+ or Key == TAB_TAT_DEFINES_MULTIPLE_THREAD or Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER \
+ or Key == TAB_TAT_DEFINES_ACTIVE_MODULE:
+ self.TargetTxtDictionary[Key] = LineList[1].replace('\\', '/').strip()
+ elif Key == TAB_TAT_DEFINES_TARGET or Key == TAB_TAT_DEFINES_TARGET_ARCH \
+ or Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG or Key == TAB_TAT_DEFINES_BUILD_RULE_CONF:
+ self.TargetTxtDictionary[Key] = LineList[1].split()
+ f.close()
+ return 0
+ except:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
+ def Print(self):
+ KeyList = self.TargetTxtDictionary.keys()
+ errMsg = ''
+ for Key in KeyList:
+ if type(self.TargetTxtDictionary[Key]) == type([]):
+ print "%-30s = %s" % (Key, ''.join(elem + ' ' for elem in self.TargetTxtDictionary[Key]))
+ elif self.TargetTxtDictionary[Key] == None:
+ errMsg += " Missing %s configuration information, please use TargetTool to set value!" % Key + os.linesep
+ else:
+ print "%-30s = %s" % (Key, self.TargetTxtDictionary[Key])
+
+ if errMsg != '':
+ print os.linesep + 'Warning:' + os.linesep + errMsg
+
+ def RWFile(self, CommentCharacter, KeySplitCharacter, Num):
+ try:
+ fr = open(self.FileName, 'r')
+ fw = open(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), 'w')
+
+ existKeys = []
+ for Line in fr:
+ if Line.startswith(CommentCharacter) or Line.strip() == '':
+ fw.write(Line)
+ else:
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].strip()
+ if Key.startswith(CommentCharacter) == False and Key in self.TargetTxtDictionary.keys():
+ if Key not in existKeys:
+ existKeys.append(Key)
+ else:
+ print "Warning: Found duplicate key item in original configuration files!"
+
+ if Num == 0:
+ Line = "%-30s = \n" % Key
+ else:
+ ret = GetConfigureKeyValue(self, Key)
+ if ret != None:
+ Line = ret
+ fw.write(Line)
+ for key in self.TargetTxtDictionary.keys():
+ if key not in existKeys:
+ print "Warning: %s does not exist in original configuration file" % key
+ Line = GetConfigureKeyValue(self, key)
+ if Line == None:
+ Line = "%-30s = " % key
+ fw.write(Line)
+
+ fr.close()
+ fw.close()
+ os.remove(self.FileName)
+ os.rename(os.path.normpath(os.path.join(self.WorkSpace, 'Conf\\targetnew.txt')), self.FileName)
+
+ except:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
+def GetConfigureKeyValue(self, Key):
+ Line = None
+ if Key == TAB_TAT_DEFINES_ACTIVE_PLATFORM and self.Opt.DSCFILE != None:
+ dscFullPath = os.path.join(self.WorkSpace, self.Opt.DSCFILE)
+ if os.path.exists(dscFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.DSCFILE)
+ else:
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ "DSC file %s does not exist!" % self.Opt.DSCFILE, RaiseError=False)
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_CONF and self.Opt.TOOL_DEFINITION_FILE != None:
+ tooldefFullPath = os.path.join(self.WorkSpace, self.Opt.TOOL_DEFINITION_FILE)
+ if os.path.exists(tooldefFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_DEFINITION_FILE)
+ else:
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ "Tooldef file %s does not exist!" % self.Opt.TOOL_DEFINITION_FILE, RaiseError=False)
+ elif Key == TAB_TAT_DEFINES_MULTIPLE_THREAD and self.Opt.NUM != None:
+ if self.Opt.NUM >= 2:
+ Line = "%-30s = %s\n" % (Key, 'Enable')
+ else:
+ Line = "%-30s = %s\n" % (Key, 'Disable')
+ elif Key == TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER and self.Opt.NUM != None:
+ Line = "%-30s = %s\n" % (Key, str(self.Opt.NUM))
+ elif Key == TAB_TAT_DEFINES_MULTIPLE_THREAD and self.Opt.ENABLE_MULTI_THREAD != None:
+ Line = "%-30s = %s\n" % (Key, self.Opt.ENABLE_MULTI_THREAD)
+ elif Key == TAB_TAT_DEFINES_TARGET and self.Opt.TARGET != None:
+ Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET))
+ elif Key == TAB_TAT_DEFINES_TARGET_ARCH and self.Opt.TARGET_ARCH != None:
+ Line = "%-30s = %s\n" % (Key, ''.join(elem + ' ' for elem in self.Opt.TARGET_ARCH))
+ elif Key == TAB_TAT_DEFINES_TOOL_CHAIN_TAG and self.Opt.TOOL_CHAIN_TAG != None:
+ Line = "%-30s = %s\n" % (Key, self.Opt.TOOL_CHAIN_TAG)
+ elif Key == TAB_TAT_DEFINES_BUILD_RULE_CONF and self.Opt.BUILD_RULE_FILE != None:
+ buildruleFullPath = os.path.join(self.WorkSpace, self.Opt.BUILD_RULE_FILE)
+ if os.path.exists(buildruleFullPath):
+ Line = "%-30s = %s\n" % (Key, self.Opt.BUILD_RULE_FILE)
+ else:
+ EdkLogger.error("TagetTool", BuildToolError.FILE_NOT_FOUND,
+ "Build rule file %s does not exist!" % self.Opt.BUILD_RULE_FILE, RaiseError=False)
+ return Line
+
+VersionNumber = "0.01"
+__version__ = "%prog Version " + VersionNumber
+__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved."
+__usage__ = "%prog [options] {args} \
+\nArgs: \
+\n Clean clean the all default configuration of target.txt. \
+\n Print print the all default configuration of target.txt. \
+\n Set replace the default configuration with expected value specified by option."
+
+gParamCheck = []
+def SingleCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ setattr(parser.values, option.dest, value)
+ gParamCheck.append(option)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+def RangeCheckCallback(option, opt_str, value, parser):
+ if option not in gParamCheck:
+ gParamCheck.append(option)
+ if value < 1 or value > 8:
+ parser.error("The count of multi-thread is not in valid range of 1 ~ 8.")
+ else:
+ setattr(parser.values, option.dest, value)
+ else:
+ parser.error("Option %s only allows one instance in command line!" % option)
+
+def MyOptionParser():
+ parser = OptionParser(version=__version__,prog="TargetTool.exe",usage=__usage__,description=__copyright__)
+ parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC', 'ARM','0'], dest="TARGET_ARCH",
+ help="ARCHS is one of list: IA32, X64, IPF, ARM or EBC, which replaces target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-p", "--platform", action="callback", type="string", dest="DSCFILE", callback=SingleCheckCallback,
+ help="Specify a DSC file, which replace target.txt's ACTIVE_PLATFORM definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-c", "--tooldef", action="callback", type="string", dest="TOOL_DEFINITION_FILE", callback=SingleCheckCallback,
+ help="Specify the WORKSPACE relative path of tool_def.txt file, which replace target.txt's TOOL_CHAIN_CONF definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-t", "--target", action="append", type="choice", choices=['DEBUG','RELEASE','0'], dest="TARGET",
+ help="TARGET is one of list: DEBUG, RELEASE, which replaces target.txt's TARGET definition. To specify more TARGET, please repeat this option. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-n", "--tagname", action="callback", type="string", dest="TOOL_CHAIN_TAG", callback=SingleCheckCallback,
+ help="Specify the Tool Chain Tagname, which replaces target.txt's TOOL_CHAIN_TAG definition. 0 will clear this setting in target.txt and can't combine with other value.")
+ parser.add_option("-r", "--buildrule", action="callback", type="string", dest="BUILD_RULE_FILE", callback=SingleCheckCallback,
+ help="Specify the build rule configure file, which replaces target.txt's BUILD_RULE_CONF definition. If not specified, the default value Conf/build_rule.txt will be set.")
+ parser.add_option("-m", "--multithreadnum", action="callback", type="int", dest="NUM", callback=RangeCheckCallback,
+ help="Specify the multi-thread number which replace target.txt's MAX_CONCURRENT_THREAD_NUMBER. If the value is less than 2, MULTIPLE_THREAD will be disabled. If the value is larger than 1, MULTIPLE_THREAD will be enabled.")
+ parser.add_option("-e", "--enablemultithread", action="store", type="choice", choices=['Enable', 'Disable'], dest="ENABLE_MULTI_THREAD",
+ help="Specify whether enable multi-thread! If Enable, multi-thread is enabled; If Disable, mutli-thread is disable")
+ (opt, args)=parser.parse_args()
+ return (opt, args)
+
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+ if os.getenv('WORKSPACE') == None:
+ print "ERROR: WORKSPACE should be specified or edksetup script should be executed before run TargetTool"
+ sys.exit(1)
+
+ (opt, args) = MyOptionParser()
+ if len(args) != 1 or (args[0].lower() != 'print' and args[0].lower() != 'clean' and args[0].lower() != 'set'):
+ print "The number of args isn't 1 or the value of args is invalid."
+ sys.exit(1)
+ if opt.NUM != None and opt.NUM < 1:
+ print "The MAX_CONCURRENT_THREAD_NUMBER must be larger than 0."
+ sys.exit(1)
+ if opt.TARGET != None and len(opt.TARGET) > 1:
+ for elem in opt.TARGET:
+ if elem == '0':
+ print "0 will clear the TARGET setting in target.txt and can't combine with other value."
+ sys.exit(1)
+ if opt.TARGET_ARCH != None and len(opt.TARGET_ARCH) > 1:
+ for elem in opt.TARGET_ARCH:
+ if elem == '0':
+ print "0 will clear the TARGET_ARCH setting in target.txt and can't combine with other value."
+ sys.exit(1)
+
+ try:
+ FileHandle = TargetTool(opt, args)
+ if FileHandle.Arg.lower() == 'print':
+ FileHandle.Print()
+ sys.exit(0)
+ elif FileHandle.Arg.lower() == 'clean':
+ FileHandle.RWFile('#', '=', 0)
+ else:
+ FileHandle.RWFile('#', '=', 1)
+ except Exception, e:
+ last_type, last_value, last_tb = sys.exc_info()
+ traceback.print_exception(last_type, last_value, last_tb)
+
diff --git a/BaseTools/Source/Python/TargetTool/__init__.py b/BaseTools/Source/Python/TargetTool/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/TargetTool/__init__.py diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py new file mode 100644 index 0000000000..a55c136edb --- /dev/null +++ b/BaseTools/Source/Python/Trim/Trim.py @@ -0,0 +1,520 @@ +## @file +# Trim files preprocessed by compiler +# +# Copyright (c) 2007, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import sys +import re + +from optparse import OptionParser +from optparse import make_option +from Common.BuildToolError import * +from Common.Misc import * + +import Common.EdkLogger as EdkLogger + +# Version and Copyright +__version_number__ = "0.10" +__version__ = "%prog Version " + __version_number__ +__copyright__ = "Copyright (c) 2007-2008, Intel Corporation. All rights reserved." + +## Regular expression for matching Line Control directive like "#line xxx" +gLineControlDirective = re.compile('^\s*#(?:line)?\s+([0-9]+)\s+"*([^"]*)"') +## Regular expression for matching "typedef struct" +gTypedefPattern = re.compile("^\s*typedef\s+struct\s*[{]*$", re.MULTILINE) +## Regular expression for matching "#pragma pack" +gPragmaPattern = re.compile("^\s*#pragma\s+pack", re.MULTILINE) +## Regular expression for matching HEX number +gHexNumberPattern = re.compile("0[xX]([0-9a-fA-F]+)") +## Regular expression for matching "Include ()" in asl file +gAslIncludePattern = re.compile("^(\s*)[iI]nclude\s*\(\"?([^\"\(\)]+)\"\)", re.MULTILINE) +## Patterns used to convert EDK conventions to EDK2 ECP conventions +gImportCodePatterns = [ + [ + re.compile('^(\s*)\(\*\*PeiServices\)\.PciCfg\s*=\s*([^;\s]+);', re.MULTILINE), + '''\\1{ +\\1 STATIC EFI_PEI_PPI_DESCRIPTOR gEcpPeiPciCfgPpiList = { +\\1 (EFI_PEI_PPI_DESCRIPTOR_PPI | EFI_PEI_PPI_DESCRIPTOR_TERMINATE_LIST), +\\1 &gEcpPeiPciCfgPpiGuid, +\\1 \\2 +\\1 }; +\\1 (**PeiServices).InstallPpi (PeiServices, &gEcpPeiPciCfgPpiList); +\\1}''' + ], + + [ + re.compile('^(\s*)\(\*PeiServices\)->PciCfg\s*=\s*([^;\s]+);', re.MULTILINE), + '''\\1{ +\\1 STATIC EFI_PEI_PPI_DESCRIPTOR gEcpPeiPciCfgPpiList = { +\\1 (EFI_PEI_PPI_DESCRIPTOR_PPI | EFI_PEI_PPI_DESCRIPTOR_TERMINATE_LIST), +\\1 &gEcpPeiPciCfgPpiGuid, +\\1 \\2 +\\1 }; +\\1 (**PeiServices).InstallPpi (PeiServices, &gEcpPeiPciCfgPpiList); +\\1}''' + ], + + [ + re.compile("(\s*).+->Modify[\s\n]*\(", re.MULTILINE), + '\\1PeiLibPciCfgModify (' + ], + + [ + re.compile("(\W*)gRT->ReportStatusCode[\s\n]*\(", re.MULTILINE), + '\\1EfiLibReportStatusCode (' + ], + + [ + re.compile('#include\s+["<]LoadFile\.h[">]', re.MULTILINE), + '#include <FvLoadFile.h>' + ], + + [ + re.compile("(\s*)\S*CreateEvent\s*\([\s\n]*EFI_EVENT_SIGNAL_READY_TO_BOOT[^,]*,((?:[^;]+\n)+)(\s*\));", re.MULTILINE), + '\\1EfiCreateEventReadyToBoot (\\2\\3;' + ], + + [ + re.compile("(\s*)\S*CreateEvent\s*\([\s\n]*EFI_EVENT_SIGNAL_LEGACY_BOOT[^,]*,((?:[^;]+\n)+)(\s*\));", re.MULTILINE), + '\\1EfiCreateEventLegacyBoot (\\2\\3;' + ], +# [ +# re.compile("(\W)(PEI_PCI_CFG_PPI)(\W)", re.MULTILINE), +# '\\1ECP_\\2\\3' +# ] +] + +## file cache to avoid circular include in ASL file +gIncludedAslFile = [] + +## Trim preprocessed source code +# +# Remove extra content made by preprocessor. The preprocessor must enable the +# line number generation option when preprocessing. +# +# @param Source File to be trimmed +# @param Target File to store the trimmed content +# @param Convert If True, convert standard HEX format to MASM format +# +def TrimPreprocessedFile(Source, Target, Convert): + CreateDirectory(os.path.dirname(Target)) + try: + f = open (Source, 'r') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) + + # read whole file + Lines = f.readlines() + f.close() + + PreprocessedFile = "" + InjectedFile = "" + LineIndexOfOriginalFile = None + NewLines = [] + LineControlDirectiveFound = False + for Index in range(len(Lines)): + Line = Lines[Index] + # + # Find out the name of files injected by preprocessor from the lines + # with Line Control directive + # + MatchList = gLineControlDirective.findall(Line) + if MatchList != []: + MatchList = MatchList[0] + if len(MatchList) == 2: + LineNumber = int(MatchList[0], 0) + InjectedFile = MatchList[1] + # The first injetcted file must be the preprocessed file itself + if PreprocessedFile == "": + PreprocessedFile = InjectedFile + LineControlDirectiveFound = True + continue + elif PreprocessedFile == "" or InjectedFile != PreprocessedFile: + continue + + if LineIndexOfOriginalFile == None: + # + # Any non-empty lines must be from original preprocessed file. + # And this must be the first one. + # + LineIndexOfOriginalFile = Index + EdkLogger.verbose("Found original file content starting from line %d" + % (LineIndexOfOriginalFile + 1)) + + # convert HEX number format if indicated + if Convert: + Line = gHexNumberPattern.sub(r"0\1h", Line) + + if LineNumber != None: + EdkLogger.verbose("Got line directive: line=%d" % LineNumber) + # in case preprocessor removed some lines, like blank or comment lines + if LineNumber <= len(NewLines): + # possible? + NewLines[LineNumber - 1] = Line + else: + if LineNumber > (len(NewLines) + 1): + for LineIndex in range(len(NewLines), LineNumber-1): + NewLines.append(os.linesep) + NewLines.append(Line) + LineNumber = None + EdkLogger.verbose("Now we have lines: %d" % len(NewLines)) + else: + NewLines.append(Line) + + # in case there's no line directive or linemarker found + if (not LineControlDirectiveFound) and NewLines == []: + NewLines = Lines + + # save to file + try: + f = open (Target, 'wb') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) + f.writelines(NewLines) + f.close() + +## Trim preprocessed VFR file +# +# Remove extra content made by preprocessor. The preprocessor doesn't need to +# enable line number generation option when preprocessing. +# +# @param Source File to be trimmed +# @param Target File to store the trimmed content +# +def TrimPreprocessedVfr(Source, Target): + CreateDirectory(os.path.dirname(Target)) + + try: + f = open (Source,'r') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) + # read whole file + Lines = f.readlines() + f.close() + + FoundTypedef = False + Brace = 0 + TypedefStart = 0 + TypedefEnd = 0 + for Index in range(len(Lines)): + Line = Lines[Index] + # don't trim the lines from "formset" definition to the end of file + if Line.strip() == 'formset': + break + + if FoundTypedef == False and (Line.find('#line') == 0 or Line.find('# ') == 0): + # empty the line number directive if it's not aomong "typedef struct" + Lines[Index] = "\n" + continue + + if FoundTypedef == False and gTypedefPattern.search(Line) == None: + # keep "#pragram pack" directive + if gPragmaPattern.search(Line) == None: + Lines[Index] = "\n" + continue + elif FoundTypedef == False: + # found "typedef struct", keept its position and set a flag + FoundTypedef = True + TypedefStart = Index + + # match { and } to find the end of typedef definition + if Line.find("{") >= 0: + Brace += 1 + elif Line.find("}") >= 0: + Brace -= 1 + + # "typedef struct" must end with a ";" + if Brace == 0 and Line.find(";") >= 0: + FoundTypedef = False + TypedefEnd = Index + # keep all "typedef struct" except to GUID, EFI_PLABEL and PAL_CALL_RETURN + if Line.strip("} ;\r\n") in ["GUID", "EFI_PLABEL", "PAL_CALL_RETURN"]: + for i in range(TypedefStart, TypedefEnd+1): + Lines[i] = "\n" + + # save all lines trimmed + try: + f = open (Target,'w') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) + f.writelines(Lines) + f.close() + +## Read the content ASL file, including ASL included, recursively +# +# @param Source File to be read +# @param Indent Spaces before the Include() statement +# +def DoInclude(Source, Indent=''): + NewFileContent = [] + # avoid A "include" B and B "include" A + if Source in gIncludedAslFile: + EdkLogger.warn("Trim", "Circular include", + ExtraData= "%s -> %s" % (" -> ".join(gIncludedAslFile), Source)) + return [] + gIncludedAslFile.append(Source) + + try: + F = open(Source,'r') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) + + for Line in F: + Result = gAslIncludePattern.findall(Line) + if len(Result) == 0: + NewFileContent.append("%s%s" % (Indent, Line)) + continue + CurrentIndent = Indent + Result[0][0] + IncludedFile = Result[0][1] + NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent)) + + gIncludedAslFile.pop() + F.close() + + return NewFileContent + + +## Trim ASL file +# +# Replace ASL include statement with the content the included file +# +# @param Source File to be trimmed +# @param Target File to store the trimmed content +# +def TrimAslFile(Source, Target): + CreateDirectory(os.path.dirname(Target)) + + Cwd = os.getcwd() + SourceDir = os.path.dirname(Source) + if SourceDir == '': + SourceDir = '.' + os.chdir(SourceDir) + Lines = DoInclude(Source) + os.chdir(Cwd) + + # save all lines trimmed + try: + f = open (Target,'w') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) + + f.writelines(Lines) + f.close() + +## Trim EDK source code file(s) +# +# +# @param Source File or directory to be trimmed +# @param Target File or directory to store the trimmed content +# +def TrimR8Sources(Source, Target): + if os.path.isdir(Source): + for CurrentDir, Dirs, Files in os.walk(Source): + if '.svn' in Dirs: + Dirs.remove('.svn') + elif "CVS" in Dirs: + Dirs.remove("CVS") + + for FileName in Files: + Dummy, Ext = os.path.splitext(FileName) + if Ext.upper() not in ['.C', '.H']: continue + if Target == None or Target == '': + TrimR8SourceCode( + os.path.join(CurrentDir, FileName), + os.path.join(CurrentDir, FileName) + ) + else: + TrimR8SourceCode( + os.path.join(CurrentDir, FileName), + os.path.join(Target, CurrentDir[len(Source)+1:], FileName) + ) + else: + TrimR8SourceCode(Source, Target) + +## Trim one EDK source code file +# +# Do following replacement: +# +# (**PeiServices\).PciCfg = <*>; +# => { +# STATIC EFI_PEI_PPI_DESCRIPTOR gEcpPeiPciCfgPpiList = { +# (EFI_PEI_PPI_DESCRIPTOR_PPI | EFI_PEI_PPI_DESCRIPTOR_TERMINATE_LIST), +# &gEcpPeiPciCfgPpiGuid, +# <*> +# }; +# (**PeiServices).InstallPpi (PeiServices, &gEcpPeiPciCfgPpiList); +# +# <*>Modify(<*>) +# => PeiLibPciCfgModify (<*>) +# +# gRT->ReportStatusCode (<*>) +# => EfiLibReportStatusCode (<*>) +# +# #include <LoadFile\.h> +# => #include <FvLoadFile.h> +# +# CreateEvent (EFI_EVENT_SIGNAL_READY_TO_BOOT, <*>) +# => EfiCreateEventReadyToBoot (<*>) +# +# CreateEvent (EFI_EVENT_SIGNAL_LEGACY_BOOT, <*>) +# => EfiCreateEventLegacyBoot (<*>) +# +# @param Source File to be trimmed +# @param Target File to store the trimmed content +# +def TrimR8SourceCode(Source, Target): + EdkLogger.verbose("\t%s -> %s" % (Source, Target)) + CreateDirectory(os.path.dirname(Target)) + + try: + f = open (Source,'rb') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source) + # read whole file + Lines = f.read() + f.close() + + NewLines = None + for Re,Repl in gImportCodePatterns: + if NewLines == None: + NewLines = Re.sub(Repl, Lines) + else: + NewLines = Re.sub(Repl, NewLines) + + # save all lines if trimmed + if Source == Target and NewLines == Lines: + return + + try: + f = open (Target,'wb') + except: + EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target) + f.write(NewLines) + f.close() + + +## Parse command line options +# +# Using standard Python module optparse to parse command line option of this tool. +# +# @retval Options A optparse.Values object containing the parsed options +# @retval InputFile Path of file to be trimmed +# +def Options(): + OptionList = [ + make_option("-s", "--source-code", dest="FileType", const="SourceCode", action="store_const", + help="The input file is preprocessed source code, including C or assembly code"), + make_option("-r", "--vfr-file", dest="FileType", const="Vfr", action="store_const", + help="The input file is preprocessed VFR file"), + make_option("-a", "--asl-file", dest="FileType", const="Asl", action="store_const", + help="The input file is ASL file"), + make_option("-8", "--r8-source-code", dest="FileType", const="R8SourceCode", action="store_const", + help="The input file is source code for R8 to be trimmed for ECP"), + + make_option("-c", "--convert-hex", dest="ConvertHex", action="store_true", + help="Convert standard hex format (0xabcd) to MASM format (abcdh)"), + + make_option("-o", "--output", dest="OutputFile", + help="File to store the trimmed content"), + make_option("-v", "--verbose", dest="LogLevel", action="store_const", const=EdkLogger.VERBOSE, + help="Run verbosely"), + make_option("-d", "--debug", dest="LogLevel", type="int", + help="Run with debug information"), + make_option("-q", "--quiet", dest="LogLevel", action="store_const", const=EdkLogger.QUIET, + help="Run quietly"), + make_option("-?", action="help", help="show this help message and exit"), + ] + + # use clearer usage to override default usage message + UsageString = "%prog [-s|-r|-a] [-c] [-v|-d <debug_level>|-q] [-o <output_file>] <input_file>" + + Parser = OptionParser(description=__copyright__, version=__version__, option_list=OptionList, usage=UsageString) + Parser.set_defaults(FileType="Vfr") + Parser.set_defaults(ConvertHex=False) + Parser.set_defaults(LogLevel=EdkLogger.INFO) + + Options, Args = Parser.parse_args() + + # error check + if len(Args) == 0: + EdkLogger.error("Trim", OPTION_MISSING, ExtraData=Parser.get_usage()) + if len(Args) > 1: + EdkLogger.error("Trim", OPTION_NOT_SUPPORTED, ExtraData=Parser.get_usage()) + + InputFile = Args[0] + return Options, InputFile + +## Entrance method +# +# This method mainly dispatch specific methods per the command line options. +# If no error found, return zero value so the caller of this tool can know +# if it's executed successfully or not. +# +# @retval 0 Tool was successful +# @retval 1 Tool failed +# +def Main(): + try: + EdkLogger.Initialize() + CommandOptions, InputFile = Options() + if CommandOptions.LogLevel < EdkLogger.DEBUG_9: + EdkLogger.SetLevel(CommandOptions.LogLevel + 1) + else: + EdkLogger.SetLevel(CommandOptions.LogLevel) + except FatalError, X: + return 1 + + try: + if CommandOptions.FileType == "Vfr": + if CommandOptions.OutputFile == None: + CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii' + TrimPreprocessedVfr(InputFile, CommandOptions.OutputFile) + elif CommandOptions.FileType == "Asl": + if CommandOptions.OutputFile == None: + CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii' + TrimAslFile(InputFile, CommandOptions.OutputFile) + elif CommandOptions.FileType == "R8SourceCode": + TrimR8Sources(InputFile, CommandOptions.OutputFile) + else : + if CommandOptions.OutputFile == None: + CommandOptions.OutputFile = os.path.splitext(InputFile)[0] + '.iii' + TrimPreprocessedFile(InputFile, CommandOptions.OutputFile, CommandOptions.ConvertHex) + except FatalError, X: + import platform + import traceback + if CommandOptions != None and CommandOptions.LogLevel <= EdkLogger.DEBUG_9: + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + return 1 + except: + import traceback + import platform + EdkLogger.error( + "\nTrim", + CODE_ERROR, + "Unknown fatal error when trimming [%s]" % InputFile, + ExtraData="\n(Please send email to dev@buildtools.tianocore.org for help, attaching following call stack trace!)\n", + RaiseError=False + ) + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + return 1 + + return 0 + +if __name__ == '__main__': + r = Main() + ## 0-127 is a safe return range, and 1 is a standard default error + if r < 0 or r > 127: r = 1 + sys.exit(r) + diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py new file mode 100644 index 0000000000..36c2ebf491 --- /dev/null +++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py @@ -0,0 +1,364 @@ +## @file
+# This file is used to define each component of the build database
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+import os
+
+from Common.Misc import sdict
+from Common.Misc import RealPath2
+from Common.BuildToolError import *
+
+## PcdClassObject
+#
+# This Class is used for PcdObject
+#
+# @param object: Inherited from object class
+# @param Name: Input value for Name of Pcd, default is None
+# @param Guid: Input value for Guid of Pcd, default is None
+# @param Type: Input value for Type of Pcd, default is None
+# @param DatumType: Input value for DatumType of Pcd, default is None
+# @param Value: Input value for Value of Pcd, default is None
+# @param Token: Input value for Token of Pcd, default is None
+# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
+# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
+# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+#
+# @var TokenCName: To store value for TokenCName
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var Type: To store value for Type
+# @var DatumType: To store value for DatumType
+# @var TokenValue: To store value for TokenValue
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var SkuInfoList: To store value for SkuInfoList
+# @var IsOverrided: To store value for IsOverrided
+# @var Phase: To store value for Phase, default is "DXE"
+#
+class PcdClassObject(object):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.TokenSpaceGuidValue = GuidValue
+ self.Type = Type
+ self.DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+ self.SkuInfoList = SkuInfoList
+ self.Phase = "DXE"
+ self.Pending = False
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a signle line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
+ 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
+ 'Type=' + str(self.Type) + ', ' + \
+ 'DatumType=' + str(self.DatumType) + ', ' + \
+ 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
+ 'TokenValue=' + str(self.TokenValue) + ', ' + \
+ 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
+ for Item in self.SkuInfoList.values():
+ Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
+ Rtn = Rtn + str(self.IsOverrided)
+
+ return Rtn
+
+ ## Override __eq__ function
+ #
+ # Check whether pcds are the same
+ #
+ # @retval False The two pcds are different
+ # @retval True The two pcds are the same
+ #
+ def __eq__(self, Other):
+ return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+
+ ## Override __hash__ function
+ #
+ # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
+ #
+ # @retval truple() Key for hash table
+ #
+ def __hash__(self):
+ return hash((self.TokenCName, self.TokenSpaceGuidCName))
+
+## LibraryClassObject
+#
+# This Class defines LibraryClassObject used in BuildDatabase
+#
+# @param object: Inherited from object class
+# @param Name: Input value for LibraryClassName, default is None
+# @param SupModList: Input value for SupModList, default is []
+# @param Type: Input value for Type, default is None
+#
+# @var LibraryClass: To store value for LibraryClass
+# @var SupModList: To store value for SupModList
+# @var Type: To store value for Type
+#
+class LibraryClassObject(object):
+ def __init__(self, Name = None, SupModList = [], Type = None):
+ self.LibraryClass = Name
+ self.SupModList = SupModList
+ if Type != None:
+ self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
+
+## ModuleBuildClassObject
+#
+# This Class defines ModuleBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for module meta file path
+# @var BaseName: To store value for BaseName
+# @var ModuleType: To store value for ModuleType
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var PcdIsDriver: To store value for PcdIsDriver
+# @var BinaryModule: To store value for BinaryModule
+# @var CustomMakefile: To store value for CustomMakefile
+# @var Specification: To store value for Specification
+# @var Shadow To store value for Shadow
+# @var LibraryClass: To store value for LibraryClass, it is a list structure as
+# [ LibraryClassObject, ...]
+# @var ModuleEntryPointList: To store value for ModuleEntryPointList
+# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
+# @var ConstructorList: To store value for ConstructorList
+# @var DestructorList: To store value for DestructorList
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryClassObject, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFilesClassObject, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolName, ... ]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiName, ... ]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidName, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var Packages: To store value for Packages, it is a list structure as
+# [ DecFileName, ... ]
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue}
+# @var Depex: To store value for Depex
+#
+class ModuleBuildClassObject(object):
+ def __init__(self):
+ self.AutoGenVersion = 0
+ self.MetaFile = ''
+ self.BaseName = ''
+ self.ModuleType = ''
+ self.Guid = ''
+ self.Version = ''
+ self.PcdIsDriver = ''
+ self.BinaryModule = ''
+ self.Shadow = ''
+ self.SourceOverridePath = ''
+ self.CustomMakefile = {}
+ self.Specification = {}
+ self.LibraryClass = []
+ self.ModuleEntryPointList = []
+ self.ModuleUnloadImageList = []
+ self.ConstructorList = []
+ self.DestructorList = []
+
+ self.Binaries = []
+ self.Sources = []
+ self.LibraryClasses = sdict()
+ self.Libraries = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Guids = []
+ self.Includes = []
+ self.Packages = []
+ self.Pcds = {}
+ self.BuildOptions = {}
+ self.Depex = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether ModuleBuildClassObjects are the same
+ #
+ # @retval False The two ModuleBuildClassObjects are different
+ # @retval True The two ModuleBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PackageBuildClassObject
+#
+# This Class defines PackageBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for package meta file path
+# @var PackageName: To store value for PackageName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var Protocols: To store value for Protocols, it is a set structure as
+# { [ProtocolName] : Protocol Guid, ... }
+# @var Ppis: To store value for Ppis, it is a set structure as
+# { [PpiName] : Ppi Guid, ... }
+# @var Guids: To store value for Guids, it is a set structure as
+# { [GuidName] : Guid, ... }
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName] : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+#
+class PackageBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PackageName = ''
+ self.Guid = ''
+ self.Version = ''
+
+ self.Protocols = {}
+ self.Ppis = {}
+ self.Guids = {}
+ self.Includes = []
+ self.LibraryClasses = {}
+ self.Pcds = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PackageBuildClassObjects are the same
+ #
+ # @retval False The two PackageBuildClassObjects are different
+ # @retval True The two PackageBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PlatformBuildClassObject
+#
+# This Class defines PlatformBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for platform meta-file path
+# @var PlatformName: To store value for PlatformName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var DscSpecification: To store value for DscSpecification
+# @var OutputDirectory: To store value for OutputDirectory
+# @var FlashDefinition: To store value for FlashDefinition
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var SkuIds: To store value for SkuIds, it is a set structure as
+# { 'SkuName' : SkuId, '!include' : includefilename, ...}
+# @var Modules: To store value for Modules, it is a list structure as
+# [ InfFileName, ... ]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ InfFileName, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue }
+#
+class PlatformBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PlatformName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.DscSpecification = ''
+ self.OutputDirectory = ''
+ self.FlashDefinition = ''
+ self.BuildNumber = ''
+ self.MakefileName = ''
+
+ self.SkuIds = {}
+ self.Modules = []
+ self.LibraryInstances = []
+ self.LibraryClasses = {}
+ self.Libraries = {}
+ self.Pcds = {}
+ self.BuildOptions = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PlatformBuildClassObjects are the same
+ #
+ # @retval False The two PlatformBuildClassObjects are different
+ # @retval True The two PlatformBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py new file mode 100644 index 0000000000..c8166bfa90 --- /dev/null +++ b/BaseTools/Source/Python/Workspace/MetaDataTable.py @@ -0,0 +1,335 @@ +## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+
+import Common.EdkLogger as EdkLogger
+from CommonDataClass import DataClass
+from CommonDataClass.DataClass import FileClass
+
+## Convert to SQL required string format
+def ConvertToSqlString(StringList):
+ return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ _COLUMN_ = ''
+ _ID_STEP_ = 1
+ _ID_MAX_ = 0x80000000
+ _DUMMY_ = 0
+
+ def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):
+ self.Cur = Cursor
+ self.Table = Name
+ self.IdBase = int(IdBase)
+ self.ID = int(IdBase)
+ self.Temporary = Temporary
+
+ def __str__(self):
+ return self.Table
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, NewTable=True):
+ if NewTable:
+ self.Drop()
+
+ if self.Temporary:
+ SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ else:
+ SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ self.ID = self.GetId()
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, *Args):
+ self.ID = self.ID + self._ID_STEP_
+ if self.ID >= (self.IdBase + self._ID_MAX_):
+ self.ID = self.IdBase + self._ID_STEP_
+ Values = ", ".join([str(Arg) for Arg in Args])
+ SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.ID
+
+ ## Query table
+ #
+ # Query all records of the table
+ #
+ def Query(self):
+ SqlCommand = """select * from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Rs in self.Cur:
+ EdkLogger.verbose(str(Rs))
+ TotalCount = self.GetId()
+
+ ## Drop a table
+ #
+ # Drop the table
+ #
+ def Drop(self):
+ SqlCommand = """drop table IF EXISTS %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ SqlCommand = """select count(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ return Record[0][0]
+
+ def GetId(self):
+ SqlCommand = """select max(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ Id = Record[0][0]
+ if Id == None:
+ Id = self.IdBase
+ return Id
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetId()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ RecordSet = self.Cur.fetchall()
+ return RecordSet
+
+ def SetEndFlag(self):
+ self.Exec("insert into %s values(%s)" % (self.Table, self._DUMMY_))
+
+ def IsIntegral(self):
+ Result = self.Exec("select min(ID) from %s" % (self.Table))
+ if Result[0][0] != -1:
+ return False
+ return True
+
+## TableFile
+#
+# This class defined a table used for file
+#
+# @param object: Inherited from object class
+#
+class TableFile(Table):
+ _COLUMN_ = '''
+ ID INTEGER PRIMARY KEY,
+ Name VARCHAR NOT NULL,
+ ExtName VARCHAR,
+ Path VARCHAR,
+ FullPath VARCHAR NOT NULL,
+ Model INTEGER DEFAULT 0,
+ TimeStamp SINGLE NOT NULL
+ '''
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'File')
+
+ ## Insert table
+ #
+ # Insert a record into table File
+ #
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
+ (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
+ return Table.Insert(
+ self,
+ Name,
+ ExtName,
+ Path,
+ FullPath,
+ Model,
+ TimeStamp
+ )
+
+ ## InsertFile
+ #
+ # Insert one file to table
+ #
+ # @param FileFullPath: The full path of the file
+ # @param Model: The model of the file
+ #
+ # @retval FileID: The ID after record is inserted
+ #
+ def InsertFile(self, FileFullPath, Model):
+ (Filepath, Name) = os.path.split(FileFullPath)
+ (Root, Ext) = os.path.splitext(FileFullPath)
+ TimeStamp = os.stat(FileFullPath)[8]
+ File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
+ return self.Insert(
+ Name,
+ Ext,
+ Filepath,
+ FileFullPath,
+ Model,
+ TimeStamp
+ )
+
+ ## Get ID of a given file
+ #
+ # @param FilePath Path of file
+ #
+ # @retval ID ID value of given file in the table
+ #
+ def GetFileId(self, FilePath):
+ QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, FilePath)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Get type of a given file
+ #
+ # @param FileId ID of a file
+ #
+ # @retval file_type Model value of given file in the table
+ #
+ def GetFileType(self, FileId):
+ QueryScript = "select Model from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Get file timestamp of a given file
+ #
+ # @param FileId ID of file
+ #
+ # @retval timestamp TimeStamp value of given file in the table
+ #
+ def GetFileTimeStamp(self, FileId):
+ QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Update the timestamp of a given file
+ #
+ # @param FileId ID of file
+ # @param TimeStamp Time stamp of file
+ #
+ def SetFileTimeStamp(self, FileId, TimeStamp):
+ self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
+
+ ## Get list of file with given type
+ #
+ # @param FileType Type value of file
+ #
+ # @retval file_list List of files with the given type
+ #
+ def GetFileList(self, FileType):
+ RecordList = self.Exec("select FullPath from %s where Model=%s" % (self.Table, FileType))
+ if len(RecordList) == 0:
+ return []
+ return [R[0] for R in RecordList]
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ _COLUMN_ = """
+ ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ """
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'DataModel')
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ return Table.Insert(self, CrossIndex, Name, Description)
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ Count = self.GetCount()
+ if Count != None and Count != 0:
+ return
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ CrossIndex = Item[0]
+
+ return CrossIndex
+
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py new file mode 100644 index 0000000000..294237daee --- /dev/null +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -0,0 +1,1131 @@ +## @file +# This file is used to parse meta files +# +# Copyright (c) 2008, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import time + +import Common.EdkLogger as EdkLogger +from CommonDataClass.DataClass import * +from Common.DataType import * +from Common.String import * +from Common.Misc import Blist, GuidStructureStringToGuidString, CheckPcdDatum + +## Base class of parser +# +# This class is used for derivation purpose. The specific parser for one kind +# type file must derive this class and implement some public interfaces. +# +# @param FilePath The path of platform description file +# @param FileType The raw data of DSC file +# @param Table Database used to retrieve module/package information +# @param Macros Macros used for replacement in file +# @param Owner Owner ID (for sub-section parsing) +# @param From ID from which the data comes (for !INCLUDE directive) +# +class MetaFileParser(object): + # data type (file content) for specific file type + DataType = {} + + ## Constructor of MetaFileParser + # + # Initialize object of MetaFileParser + # + # @param FilePath The path of platform description file + # @param FileType The raw data of DSC file + # @param Table Database used to retrieve module/package information + # @param Macros Macros used for replacement in file + # @param Owner Owner ID (for sub-section parsing) + # @param From ID from which the data comes (for !INCLUDE directive) + # + def __init__(self, FilePath, FileType, Table, Macros=None, Owner=-1, From=-1): + self._Table = Table + self._FileType = FileType + self.MetaFile = FilePath + self._FileDir = os.path.dirname(self.MetaFile) + self._Macros = {} + + # for recursive parsing + self._Owner = Owner + self._From = From + + # parsr status for parsing + self._Content = None + self._ValueList = ['', '', '', '', ''] + self._Scope = [] + self._LineIndex = 0 + self._CurrentLine = '' + self._SectionType = MODEL_UNKNOWN + self._SectionName = '' + self._InSubsection = False + self._SubsectionType = MODEL_UNKNOWN + self._SubsectionName = '' + self._LastItem = -1 + self._Enabled = 0 + self._Finished = False + + ## Store the parsed data in table + def _Store(self, *Args): + return self._Table.Insert(*Args) + + ## Virtual method for starting parse + def Start(self): + raise NotImplementedError + + ## Set parsing complete flag in both class and table + def _Done(self): + self._Finished = True + self._Table.SetEndFlag() + + ## Return the table containg parsed data + # + # If the parse complete flag is not set, this method will try to parse the + # file before return the table + # + def _GetTable(self): + if not self._Finished: + self.Start() + return self._Table + + ## Get the parse complete flag + def _GetFinished(self): + return self._Finished + + ## Set the complete flag + def _SetFinished(self, Value): + self._Finished = Value + + ## Use [] style to query data in table, just for readability + # + # DataInfo = [data_type, scope1(arch), scope2(platform,moduletype)] + # + def __getitem__(self, DataInfo): + if type(DataInfo) != type(()): + DataInfo = (DataInfo,) + return self.Table.Query(*DataInfo) + + ## Data parser for the common format in different type of file + # + # The common format in the meatfile is like + # + # xxx1 | xxx2 | xxx3 + # + def _CommonParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + self._ValueList[0:len(TokenList)] = TokenList + + ## Data parser for the format in which there's path + # + # Only path can have macro used. So we need to replace them before use. + # + def _PathParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + self._ValueList[0:len(TokenList)] = TokenList + if len(self._Macros) > 0: + for Index in range(0, len(self._ValueList)): + Value = self._ValueList[Index] + if Value == None or Value == '': + continue + self._ValueList[Index] = NormPath(Value, self._Macros) + + ## Skip unsupported data + def _Skip(self): + EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=self._CurrentLine); + self._ValueList[0:1] = [self._CurrentLine] + + ## Section header parser + # + # The section header is always in following format: + # + # [section_name.arch<.platform|module_type>] + # + def _SectionHeaderParser(self): + self._Scope = [] + self._SectionName = '' + ArchList = set() + for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT): + if Item == '': + continue + ItemList = GetSplitValueList(Item, TAB_SPLIT) + # different section should not mix in one section + if self._SectionName != '' and self._SectionName != ItemList[0].upper(): + EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section", + File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine) + self._SectionName = ItemList[0].upper() + if self._SectionName in self.DataType: + self._SectionType = self.DataType[self._SectionName] + else: + self._SectionType = MODEL_UNKNOWN + EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=self._CurrentLine) + # S1 is always Arch + if len(ItemList) > 1: + S1 = ItemList[1].upper() + else: + S1 = 'COMMON' + ArchList.add(S1) + # S2 may be Platform or ModuleType + if len(ItemList) > 2: + S2 = ItemList[2].upper() + else: + S2 = 'COMMON' + self._Scope.append([S1, S2]) + + # 'COMMON' must not be used with specific ARCHs at the same section + if 'COMMON' in ArchList and len(ArchList) > 1: + EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", + File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine) + + ## [defines] section parser + def _DefineParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + self._ValueList[0:len(TokenList)] = TokenList + if self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No value specified", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) + + ## DEFINE name=value parser + def _MacroParser(self): + TokenList = GetSplitValueList(self._CurrentLine, ' ', 1) + MacroType = TokenList[0] + if len(TokenList) < 2 or TokenList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No macro name/value given", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) + TokenList = GetSplitValueList(TokenList[1], TAB_EQUAL_SPLIT, 1) + if TokenList[0] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) + if len(TokenList) == 1: + self._Macros[TokenList[0]] = '' + else: + # keep the macro definition for later use + self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) + + return TokenList[0], self._Macros[TokenList[0]] + + ## [BuildOptions] section parser + def _BuildOptionParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + TokenList2 = GetSplitValueList(TokenList[0], ':', 1) + if len(TokenList2) == 2: + self._ValueList[0] = TokenList2[0] # toolchain family + self._ValueList[1] = TokenList2[1] # keys + else: + self._ValueList[1] = TokenList[0] + if len(TokenList) == 2: # value + self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros) + + if self._ValueList[1].count('_') != 4: + EdkLogger.error( + 'Parser', + FORMAT_INVALID, + "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1], + ExtraData=self._CurrentLine, + File=self.MetaFile, + Line=self._LineIndex+1 + ) + + _SectionParser = {} + Table = property(_GetTable) + Finished = property(_GetFinished, _SetFinished) + + +## INF file parser class +# +# @param FilePath The path of platform description file +# @param FileType The raw data of DSC file +# @param Table Database used to retrieve module/package information +# @param Macros Macros used for replacement in file +# +class InfParser(MetaFileParser): + # INF file supported data types (one type per section) + DataType = { + TAB_UNKNOWN.upper() : MODEL_UNKNOWN, + TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER, + TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION, + TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE, + TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE, + TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS, + TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE, + TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE, + TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD, + TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE, + TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG, + TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX, + TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC, + TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE, + TAB_GUIDS.upper() : MODEL_EFI_GUID, + TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL, + TAB_PPIS.upper() : MODEL_EFI_PPI, + TAB_DEPEX.upper() : MODEL_EFI_DEPEX, + TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE, + TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION + } + + ## Constructor of InfParser + # + # Initialize object of InfParser + # + # @param FilePath The path of module description file + # @param FileType The raw data of DSC file + # @param Table Database used to retrieve module/package information + # @param Macros Macros used for replacement in file + # + def __init__(self, FilePath, FileType, Table, Macros=None): + MetaFileParser.__init__(self, FilePath, FileType, Table, Macros) + + ## Parser starter + def Start(self): + NmakeLine = '' + try: + self._Content = open(self.MetaFile, 'r').readlines() + except: + EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile) + + # parse the file line by line + IsFindBlockComment = False + + for Index in range(0, len(self._Content)): + # skip empty, commented, block commented lines + Line = CleanString(self._Content[Index], AllowCppStyleComment=True) + NextLine = '' + if Index + 1 < len(self._Content): + NextLine = CleanString(self._Content[Index + 1]) + if Line == '': + continue + if Line.find(DataType.TAB_COMMENT_R8_START) > -1: + IsFindBlockComment = True + continue + if Line.find(DataType.TAB_COMMENT_R8_END) > -1: + IsFindBlockComment = False + continue + if IsFindBlockComment: + continue + + self._LineIndex = Index + self._CurrentLine = Line + + # section header + if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: + self._SectionHeaderParser() + continue + # merge two lines specified by '\' in section NMAKE + elif self._SectionType == MODEL_META_DATA_NMAKE: + if Line[-1] == '\\': + if NextLine == '': + self._CurrentLine = NmakeLine + Line[0:-1] + NmakeLine = '' + else: + if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END: + self._CurrentLine = NmakeLine + Line[0:-1] + NmakeLine = '' + else: + NmakeLine = NmakeLine + ' ' + Line[0:-1] + continue + else: + self._CurrentLine = NmakeLine + Line + NmakeLine = '' + elif Line.upper().startswith('DEFINE '): + # file private macros + self._MacroParser() + continue + + # section content + self._ValueList = ['','',''] + # parse current line, result will be put in self._ValueList + self._SectionParser[self._SectionType](self) + if self._ValueList == None: + continue + # + # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1, + # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 + # + for Arch, Platform in self._Scope: + self._Store(self._SectionType, + self._ValueList[0], + self._ValueList[1], + self._ValueList[2], + Arch, + Platform, + self._Owner, + self._LineIndex+1, + -1, + self._LineIndex+1, + -1, + 0 + ) + self._Done() + + ## Data parser for the format in which there's path + # + # Only path can have macro used. So we need to replace them before use. + # + def _IncludeParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + self._ValueList[0:len(TokenList)] = TokenList + if len(self._Macros) > 0: + for Index in range(0, len(self._ValueList)): + Value = self._ValueList[Index] + if Value.upper().find('$(EFI_SOURCE)\Edk'.upper()) > -1 or Value.upper().find('$(EFI_SOURCE)/Edk'.upper()) > -1: + Value = '$(EDK_SOURCE)' + Value[17:] + if Value.find('$(EFI_SOURCE)') > -1 or Value.find('$(EDK_SOURCE)') > -1: + pass + elif Value.startswith('.'): + pass + elif Value.startswith('$('): + pass + else: + Value = '$(EFI_SOURCE)/' + Value + + if Value == None or Value == '': + continue + self._ValueList[Index] = NormPath(Value, self._Macros) + + ## Parse [Sources] section + # + # Only path can have macro used. So we need to replace them before use. + # + def _SourceFileParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + self._ValueList[0:len(TokenList)] = TokenList + # For Acpi tables, remove macro like ' TABLE_NAME=Sata1' + if 'COMPONENT_TYPE' in self._Macros: + if self._Macros['COMPONENT_TYPE'].upper() == 'ACPITABLE': + self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0] + if self._Macros['BASE_NAME'] == 'Microcode': + pass + if len(self._Macros) > 0: + for Index in range(0, len(self._ValueList)): + Value = self._ValueList[Index] + if Value == None or Value == '': + continue + self._ValueList[Index] = NormPath(Value, self._Macros) + + ## Parse [Binaries] section + # + # Only path can have macro used. So we need to replace them before use. + # + def _BinaryFileParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2) + if len(TokenList) < 2: + EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified", + ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])", + File=self.MetaFile, Line=self._LineIndex+1) + if not TokenList[0]: + EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified", + ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])", + File=self.MetaFile, Line=self._LineIndex+1) + if not TokenList[1]: + EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified", + ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])", + File=self.MetaFile, Line=self._LineIndex+1) + self._ValueList[0:len(TokenList)] = TokenList + self._ValueList[1] = NormPath(self._ValueList[1], self._Macros) + + ## [defines] section parser + def _DefineParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + self._ValueList[0:len(TokenList)] = TokenList + self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False) + if self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No value specified", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) + + ## [nmake] section parser (R8.x style only) + def _NmakeParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + self._ValueList[0:len(TokenList)] = TokenList + # remove macros + self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, False) + # remove self-reference in macro setting + #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''}) + + ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser + def _PcdParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) + self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) + if len(TokenList) > 1: + self._ValueList[2] = TokenList[1] + if self._ValueList[0] == '' or self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified", + ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)", + File=self.MetaFile, Line=self._LineIndex+1) + + ## [depex] section parser + def _DepexParser(self): + self._ValueList[0:1] = [self._CurrentLine] + + _SectionParser = { + MODEL_UNKNOWN : MetaFileParser._Skip, + MODEL_META_DATA_HEADER : _DefineParser, + MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser, + MODEL_EFI_INCLUDE : _IncludeParser, # for R8.x modules + MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for R8.x modules + MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser, + MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser, + MODEL_META_DATA_NMAKE : _NmakeParser, # for R8.x modules + MODEL_PCD_FIXED_AT_BUILD : _PcdParser, + MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser, + MODEL_PCD_FEATURE_FLAG : _PcdParser, + MODEL_PCD_DYNAMIC_EX : _PcdParser, + MODEL_PCD_DYNAMIC : _PcdParser, + MODEL_EFI_SOURCE_FILE : _SourceFileParser, + MODEL_EFI_GUID : MetaFileParser._CommonParser, + MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser, + MODEL_EFI_PPI : MetaFileParser._CommonParser, + MODEL_EFI_DEPEX : _DepexParser, + MODEL_EFI_BINARY_FILE : _BinaryFileParser, + MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip, + } + +## DSC file parser class +# +# @param FilePath The path of platform description file +# @param FileType The raw data of DSC file +# @param Table Database used to retrieve module/package information +# @param Macros Macros used for replacement in file +# @param Owner Owner ID (for sub-section parsing) +# @param From ID from which the data comes (for !INCLUDE directive) +# +class DscParser(MetaFileParser): + # DSC file supported data types (one type per section) + DataType = { + TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID, + TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE, + TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS, + TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION, + TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD, + TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE, + TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG, + TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT, + TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII, + TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD, + TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT, + TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII, + TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD, + TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT, + TAB_COMPONENTS_SOURCE_OVERRIDE_PATH.upper() : MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, + TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER, + TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE, + TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, + TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF, + TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF, + TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF, + TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, + TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF, + } + + # sections which allow "!include" directive + _IncludeAllowedSection = [ + TAB_LIBRARIES.upper(), + TAB_LIBRARY_CLASSES.upper(), + TAB_SKUIDS.upper(), + TAB_COMPONENTS.upper(), + TAB_BUILD_OPTIONS.upper(), + TAB_PCDS_FIXED_AT_BUILD_NULL.upper(), + TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper(), + TAB_PCDS_FEATURE_FLAG_NULL.upper(), + TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper(), + TAB_PCDS_DYNAMIC_HII_NULL.upper(), + TAB_PCDS_DYNAMIC_VPD_NULL.upper(), + TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper(), + TAB_PCDS_DYNAMIC_EX_HII_NULL.upper(), + TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper(), + ] + + # operators which can be used in "!if/!ifdef/!ifndef" directives + _OP_ = { + "!" : lambda a: not a, + "!=" : lambda a,b: a!=b, + "==" : lambda a,b: a==b, + ">" : lambda a,b: a>b, + "<" : lambda a,b: a<b, + "=>" : lambda a,b: a>=b, + ">=" : lambda a,b: a>=b, + "<=" : lambda a,b: a<=b, + "=<" : lambda a,b: a<=b, + } + + ## Constructor of DscParser + # + # Initialize object of DscParser + # + # @param FilePath The path of platform description file + # @param FileType The raw data of DSC file + # @param Table Database used to retrieve module/package information + # @param Macros Macros used for replacement in file + # @param Owner Owner ID (for sub-section parsing) + # @param From ID from which the data comes (for !INCLUDE directive) + # + def __init__(self, FilePath, FileType, Table, Macros=None, Owner=-1, From=-1): + MetaFileParser.__init__(self, FilePath, FileType, Table, Macros, Owner, From) + # to store conditional directive evaluation result + self._Eval = Blist() + + ## Parser starter + def Start(self): + try: + if self._Content == None: + self._Content = open(self.MetaFile, 'r').readlines() + except: + EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile) + + for Index in range(0, len(self._Content)): + Line = CleanString(self._Content[Index]) + # skip empty line + if Line == '': + continue + self._CurrentLine = Line + self._LineIndex = Index + + # section header + if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: + self._SectionHeaderParser() + continue + # subsection ending + elif Line[0] == '}': + self._InSubsection = False + self._SubsectionType = MODEL_UNKNOWN + self._SubsectionName = '' + self._Owner = -1 + continue + # subsection header + elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END: + self._SubsectionHeaderParser() + continue + # directive line + elif Line[0] == '!': + self._DirectiveParser() + continue + # file private macros + elif Line.upper().startswith('DEFINE '): + self._MacroParser() + continue + elif Line.upper().startswith('EDK_GLOBAL '): + (Name, Value) = self._MacroParser() + for Arch, ModuleType in self._Scope: + self._LastItem = self._Store( + MODEL_META_DATA_DEFINE, + Name, + Value, + '', + Arch, + 'COMMON', + self._Owner, + self._From, + self._LineIndex+1, + -1, + self._LineIndex+1, + -1, + self._Enabled + ) + continue + + # section content + if self._InSubsection: + SectionType = self._SubsectionType + SectionName = self._SubsectionName + if self._Owner == -1: + self._Owner = self._LastItem + else: + SectionType = self._SectionType + SectionName = self._SectionName + + self._ValueList = ['', '', ''] + self._SectionParser[SectionType](self) + if self._ValueList == None: + continue + + # + # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1, + # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1 + # + for Arch, ModuleType in self._Scope: + self._LastItem = self._Store( + SectionType, + self._ValueList[0], + self._ValueList[1], + self._ValueList[2], + Arch, + ModuleType, + self._Owner, + self._From, + self._LineIndex+1, + -1, + self._LineIndex+1, + -1, + self._Enabled + ) + self._Done() + + ## [defines] section parser + def _DefineParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + if len(TokenList) < 2: + EdkLogger.error('Parser', FORMAT_INVALID, "No value specified", + ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1) + # 'FLASH_DEFINITION', 'OUTPUT_DIRECTORY' need special processing + if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']: + TokenList[1] = NormPath(TokenList[1], self._Macros) + self._ValueList[0:len(TokenList)] = TokenList + + ## <subsection_header> parser + def _SubsectionHeaderParser(self): + self._SubsectionName = self._CurrentLine[1:-1].upper() + if self._SubsectionName in self.DataType: + self._SubsectionType = self.DataType[self._SubsectionName] + else: + self._SubsectionType = MODEL_UNKNOWN + EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=self._CurrentLine) + + ## Directive statement parser + def _DirectiveParser(self): + self._ValueList = ['','',''] + TokenList = GetSplitValueList(self._CurrentLine, ' ', 1) + self._ValueList[0:len(TokenList)] = TokenList + DirectiveName = self._ValueList[0].upper() + if DirectiveName not in self.DataType: + EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName, + File=self.MetaFile, Line=self._LineIndex+1) + if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '': + EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression", + File=self.MetaFile, Line=self._LineIndex+1, + ExtraData=self._CurrentLine) + # keep the directive in database first + self._LastItem = self._Store( + self.DataType[DirectiveName], + self._ValueList[0], + self._ValueList[1], + self._ValueList[2], + 'COMMON', + 'COMMON', + self._Owner, + self._From, + self._LineIndex + 1, + -1, + self._LineIndex + 1, + -1, + 0 + ) + + # process the directive + if DirectiveName == "!INCLUDE": + if not self._SectionName in self._IncludeAllowedSection: + EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1, + ExtraData="'!include' is not allowed under section [%s]" % self._SectionName) + # the included file must be relative to the parsing file + IncludedFile = os.path.join(self._FileDir, self._ValueList[1]) + Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem) + # set the parser status with current status + Parser._SectionName = self._SectionName + Parser._SectionType = self._SectionType + Parser._Scope = self._Scope + Parser._Enabled = self._Enabled + try: + Parser.Start() + except: + EdkLogger.error("Parser", PARSER_ERROR, File=self.MetaFile, Line=self._LineIndex+1, + ExtraData="Failed to parse content in file %s" % IncludedFile) + # update current status with sub-parser's status + self._SectionName = Parser._SectionName + self._SectionType = Parser._SectionType + self._Scope = Parser._Scope + self._Enabled = Parser._Enabled + else: + if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]: + # evaluate the expression + Result = self._Evaluate(self._ValueList[1]) + if DirectiveName == "!IFNDEF": + Result = not Result + self._Eval.append(Result) + elif DirectiveName in ["!ELSEIF"]: + # evaluate the expression + self._Eval[-1] = (not self._Eval[-1]) & self._Evaluate(self._ValueList[1]) + elif DirectiveName in ["!ELSE"]: + self._Eval[-1] = not self._Eval[-1] + elif DirectiveName in ["!ENDIF"]: + if len(self._Eval) > 0: + self._Eval.pop() + else: + EdkLogger.error("Parser", FORMAT_INVALID, "!IF..[!ELSE]..!ENDIF doesn't match", + File=self.MetaFile, Line=self._LineIndex+1) + if self._Eval.Result == False: + self._Enabled = 0 - len(self._Eval) + else: + self._Enabled = len(self._Eval) + + ## Evaludate the value of expression in "if/ifdef/ifndef" directives + def _Evaluate(self, Expression): + TokenList = Expression.split() + TokenNumber = len(TokenList) + # one operand, guess it's just a macro name + if TokenNumber == 1: + return TokenList[0] in self._Macros + # two operands, suppose it's "!xxx" format + elif TokenNumber == 2: + Op = TokenList[0] + if Op not in self._OP_: + EdkLogger.error('Parser', FORMAT_INVALID, "Unsupported operator [%s]" % Op, File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=Expression) + if TokenList[1].upper() == 'TRUE': + Value = True + else: + Value = False + return self._OP_[Op](Value) + # three operands + elif TokenNumber == 3: + Name = TokenList[0] + if Name not in self._Macros: + return False + Value = TokenList[2] + if Value[0] in ["'", '"'] and Value[-1] in ["'", '"']: + Value = Value[1:-1] + Op = TokenList[1] + if Op not in self._OP_: + EdkLogger.error('Parser', FORMAT_INVALID, "Unsupported operator [%s]" % Op, File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=Expression) + return self._OP_[Op](self._Macros[Name], Value) + else: + EdkLogger.error('Parser', FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1, + ExtraData=Expression) + + ## PCD sections parser + # + # [PcdsFixedAtBuild] + # [PcdsPatchableInModule] + # [PcdsFeatureFlag] + # [PcdsDynamicEx + # [PcdsDynamicExDefault] + # [PcdsDynamicExVpd] + # [PcdsDynamicExHii] + # [PcdsDynamic] + # [PcdsDynamicDefault] + # [PcdsDynamicVpd] + # [PcdsDynamicHii] + # + def _PcdParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) + self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) + if len(TokenList) == 2: + self._ValueList[2] = TokenList[1] + if self._ValueList[0] == '' or self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified", + ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)", + File=self.MetaFile, Line=self._LineIndex+1) + if self._ValueList[2] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given", + ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)", + File=self.MetaFile, Line=self._LineIndex+1) + + ## [components] section parser + def _ComponentParser(self): + if self._CurrentLine[-1] == '{': + self._ValueList[0] = self._CurrentLine[0:-1].strip() + self._InSubsection = True + else: + self._ValueList[0] = self._CurrentLine + if len(self._Macros) > 0: + self._ValueList[0] = NormPath(self._ValueList[0], self._Macros) + + def _LibraryClassParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT) + if len(TokenList) < 2: + EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified", + ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)", + File=self.MetaFile, Line=self._LineIndex+1) + if TokenList[0] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified", + ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)", + File=self.MetaFile, Line=self._LineIndex+1) + if TokenList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified", + ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)", + File=self.MetaFile, Line=self._LineIndex+1) + self._ValueList[0:len(TokenList)] = TokenList + if len(self._Macros) > 0: + self._ValueList[1] = NormPath(self._ValueList[1], self._Macros) + + def _CompponentSourceOverridePathParser(self): + if len(self._Macros) > 0: + self._ValueList[0] = NormPath(self._CurrentLine, self._Macros) + + _SectionParser = { + MODEL_META_DATA_HEADER : _DefineParser, + MODEL_EFI_SKU_ID : MetaFileParser._CommonParser, + MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._PathParser, + MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser, + MODEL_PCD_FIXED_AT_BUILD : _PcdParser, + MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser, + MODEL_PCD_FEATURE_FLAG : _PcdParser, + MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser, + MODEL_PCD_DYNAMIC_HII : _PcdParser, + MODEL_PCD_DYNAMIC_VPD : _PcdParser, + MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser, + MODEL_PCD_DYNAMIC_EX_HII : _PcdParser, + MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser, + MODEL_META_DATA_COMPONENT : _ComponentParser, + MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH : _CompponentSourceOverridePathParser, + MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser, + MODEL_UNKNOWN : MetaFileParser._Skip, + MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip, + } + +## DEC file parser class +# +# @param FilePath The path of platform description file +# @param FileType The raw data of DSC file +# @param Table Database used to retrieve module/package information +# @param Macros Macros used for replacement in file +# +class DecParser(MetaFileParser): + # DEC file supported data types (one type per section) + DataType = { + TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER, + TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE, + TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS, + TAB_GUIDS.upper() : MODEL_EFI_GUID, + TAB_PPIS.upper() : MODEL_EFI_PPI, + TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL, + TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD, + TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE, + TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG, + TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC, + TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX, + } + + ## Constructor of DecParser + # + # Initialize object of DecParser + # + # @param FilePath The path of platform description file + # @param FileType The raw data of DSC file + # @param Table Database used to retrieve module/package information + # @param Macros Macros used for replacement in file + # + def __init__(self, FilePath, FileType, Table, Macro=None): + MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1) + + ## Parser starter + def Start(self): + try: + if self._Content == None: + self._Content = open(self.MetaFile, 'r').readlines() + except: + EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile) + + for Index in range(0, len(self._Content)): + Line = CleanString(self._Content[Index]) + # skip empty line + if Line == '': + continue + self._CurrentLine = Line + self._LineIndex = Index + + # section header + if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END: + self._SectionHeaderParser() + continue + elif Line.startswith('DEFINE '): + self._MacroParser() + continue + elif len(self._SectionType) == 0: + continue + + # section content + self._ValueList = ['','',''] + self._SectionParser[self._SectionType[0]](self) + if self._ValueList == None: + continue + + # + # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1, + # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1 + # + for Arch, ModuleType, Type in self._Scope: + self._LastItem = self._Store( + Type, + self._ValueList[0], + self._ValueList[1], + self._ValueList[2], + Arch, + ModuleType, + self._Owner, + self._LineIndex+1, + -1, + self._LineIndex+1, + -1, + 0 + ) + self._Done() + + ## Section header parser + # + # The section header is always in following format: + # + # [section_name.arch<.platform|module_type>] + # + def _SectionHeaderParser(self): + self._Scope = [] + self._SectionName = '' + self._SectionType = [] + ArchList = set() + for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT): + if Item == '': + continue + ItemList = GetSplitValueList(Item, TAB_SPLIT) + + # different types of PCD are permissible in one section + self._SectionName = ItemList[0].upper() + if self._SectionName in self.DataType: + if self.DataType[self._SectionName] not in self._SectionType: + self._SectionType.append(self.DataType[self._SectionName]) + else: + EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile, + Line=self._LineIndex+1, ExtraData=self._CurrentLine) + continue + + if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1: + EdkLogger.error( + 'Parser', + FORMAT_INVALID, + "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL, + File=self.MetaFile, + Line=self._LineIndex+1, + ExtraData=self._CurrentLine + ) + # S1 is always Arch + if len(ItemList) > 1: + S1 = ItemList[1].upper() + else: + S1 = 'COMMON' + ArchList.add(S1) + # S2 may be Platform or ModuleType + if len(ItemList) > 2: + S2 = ItemList[2].upper() + else: + S2 = 'COMMON' + if [S1, S2, self.DataType[self._SectionName]] not in self._Scope: + self._Scope.append([S1, S2, self.DataType[self._SectionName]]) + + # 'COMMON' must not be used with specific ARCHs at the same section + if 'COMMON' in ArchList and len(ArchList) > 1: + EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs", + File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine) + + ## [guids], [ppis] and [protocols] section parser + def _GuidParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1) + if len(TokenList) < 2: + EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified", + ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)", + File=self.MetaFile, Line=self._LineIndex+1) + if TokenList[0] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified", + ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)", + File=self.MetaFile, Line=self._LineIndex+1) + if TokenList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified", + ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)", + File=self.MetaFile, Line=self._LineIndex+1) + if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format", + ExtraData=self._CurrentLine + \ + " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)", + File=self.MetaFile, Line=self._LineIndex+1) + self._ValueList[0] = TokenList[0] + self._ValueList[1] = TokenList[1] + + ## PCD sections parser + # + # [PcdsFixedAtBuild] + # [PcdsPatchableInModule] + # [PcdsFeatureFlag] + # [PcdsDynamicEx + # [PcdsDynamic] + # + def _PcdParser(self): + TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1) + self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT) + # check PCD information + if self._ValueList[0] == '' or self._ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + # check PCD datum information + if len(TokenList) < 2 or TokenList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + + ValueList = GetSplitValueList(TokenList[1]) + # check if there's enough datum information given + if len(ValueList) != 3: + EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + # check default value + if ValueList[0] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + # check datum type + if ValueList[1] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + # check token of the PCD + if ValueList[2] == '': + EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information", + ExtraData=self._CurrentLine + \ + " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)", + File=self.MetaFile, Line=self._LineIndex+1) + # check format of default value against the datum type + IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0]) + if not IsValid: + EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine, + File=self.MetaFile, Line=self._LineIndex+1) + self._ValueList[2] = TokenList[1] + + _SectionParser = { + MODEL_META_DATA_HEADER : MetaFileParser._DefineParser, + MODEL_EFI_INCLUDE : MetaFileParser._PathParser, + MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser, + MODEL_EFI_GUID : _GuidParser, + MODEL_EFI_PPI : _GuidParser, + MODEL_EFI_PROTOCOL : _GuidParser, + MODEL_PCD_FIXED_AT_BUILD : _PcdParser, + MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser, + MODEL_PCD_FEATURE_FLAG : _PcdParser, + MODEL_PCD_DYNAMIC : _PcdParser, + MODEL_PCD_DYNAMIC_EX : _PcdParser, + MODEL_UNKNOWN : MetaFileParser._Skip, + MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip, + } + +## +# +# This acts like the main() function for the script, unless it is 'import'ed into another +# script. +# +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py new file mode 100644 index 0000000000..22e2afa4c9 --- /dev/null +++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py @@ -0,0 +1,275 @@ +## @file
+# This file is used to create/update/query/erase a meta file table
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from MetaDataTable import Table
+from MetaDataTable import ConvertToSqlString
+
+## Python class representation of table storing module data
+class ModuleTable(Table):
+ # TRICK: use file ID as the part before '.'
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Inf', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert a record into table Inf
+ #
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Scope1: Arch of a Inf item
+ # @param Scope2 Platform os a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None, Platform=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
+
+ if Arch != None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+ if Platform != None and Platform != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing package data
+class PackageTable(Table):
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Dec', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Scope1: Arch of a Dec item
+ # @param Scope2: Module type of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
+
+ if Arch != None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing platform data
+class PlatformTable(Table):
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ FromItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Dsc', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1,
+ FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ FromItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
+
+ if Scope1 != None and Scope1 != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
+ if Scope2 != None and Scope2 != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
+
+ if BelongsToItem != None:
+ ConditionString += " AND BelongsToItem=%s" % BelongsToItem
+ else:
+ ConditionString += " AND BelongsToItem<0"
+
+ if FromItem != None:
+ ConditionString += " AND FromItem=%s" % FromItem
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py new file mode 100644 index 0000000000..8f0056e197 --- /dev/null +++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py @@ -0,0 +1,2274 @@ +## @file +# This file is used to create a database used by build tool +# +# Copyright (c) 2008, Intel Corporation +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import sqlite3 +import os +import os.path + +import Common.EdkLogger as EdkLogger +import Common.GlobalData as GlobalData + +from Common.String import * +from Common.DataType import * +from Common.Misc import * + +from CommonDataClass.CommonClass import SkuInfoClass + +from MetaDataTable import * +from MetaFileTable import * +from MetaFileParser import * +from BuildClassObject import * + +## Platform build information from DSC file +# +# This class is used to retrieve information stored in database and convert them +# into PlatformBuildClassObject form for easier use for AutoGen. +# +class DscBuildData(PlatformBuildClassObject): + # dict used to convert PCD type in database to string used by build tool + _PCD_TYPE_STRING_ = { + MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild", + MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule", + MODEL_PCD_FEATURE_FLAG : "FeatureFlag", + MODEL_PCD_DYNAMIC : "Dynamic", + MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic", + MODEL_PCD_DYNAMIC_HII : "DynamicHii", + MODEL_PCD_DYNAMIC_VPD : "DynamicVpd", + MODEL_PCD_DYNAMIC_EX : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii", + MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd", + } + + # dict used to convert part of [Defines] to members of DscBuildData directly + _PROPERTY_ = { + # + # Required Fields + # + TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName", + TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid", + TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version", + TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification", + #TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory", + #TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList", + #TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets", + #TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName", + #TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition", + TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber", + TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName", + TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress", + TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress", + } + + # used to compose dummy library class name for those forced library instances + _NullLibraryNumber = 0 + + ## Constructor of DscBuildData + # + # Initialize object of DscBuildData + # + # @param FilePath The path of platform description file + # @param RawData The raw data of DSC file + # @param BuildDataBase Database used to retrieve module/package information + # @param Arch The target architecture + # @param Platform (not used for DscBuildData) + # @param Macros Macros used for replacement in DSC file + # + def __init__(self, FilePath, RawData, BuildDataBase, Arch='COMMON', Platform='DUMMY', Macros={}): + self.MetaFile = FilePath + self._RawData = RawData + self._Bdb = BuildDataBase + self._Arch = Arch + self._Macros = Macros + self._Clear() + RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch] + for Record in RecordList: + GlobalData.gEdkGlobal[Record[0]] = Record[1] + + ## XXX[key] = value + def __setitem__(self, key, value): + self.__dict__[self._PROPERTY_[key]] = value + + ## value = XXX[key] + def __getitem__(self, key): + return self.__dict__[self._PROPERTY_[key]] + + ## "in" test support + def __contains__(self, key): + return key in self._PROPERTY_ + + ## Set all internal used members of DscBuildData to None + def _Clear(self): + self._Header = None + self._PlatformName = None + self._Guid = None + self._Version = None + self._DscSpecification = None + self._OutputDirectory = None + self._SupArchList = None + self._BuildTargets = None + self._SkuName = None + self._FlashDefinition = None + self._BuildNumber = None + self._MakefileName = None + self._BsBaseAddress = None + self._RtBaseAddress = None + self._SkuIds = None + self._Modules = None + self._LibraryInstances = None + self._LibraryClasses = None + self._Pcds = None + self._BuildOptions = None + + ## Get architecture + def _GetArch(self): + return self._Arch + + ## Set architecture + # + # Changing the default ARCH to another may affect all other information + # because all information in a platform may be ARCH-related. That's + # why we need to clear all internal used members, in order to cause all + # information to be re-retrieved. + # + # @param Value The value of ARCH + # + def _SetArch(self, Value): + if self._Arch == Value: + return + self._Arch = Value + self._Clear() + + ## Retrieve all information in [Defines] section + # + # (Retriving all [Defines] information in one-shot is just to save time.) + # + def _GetHeaderInfo(self): + RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch] + for Record in RecordList: + Name = Record[0] + # items defined _PROPERTY_ don't need additional processing + if Name in self: + self[Name] = Record[1] + # some special items in [Defines] section need special treatment + elif Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY: + self._OutputDirectory = NormPath(Record[1], self._Macros) + if ' ' in self._OutputDirectory: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY", + File=self.MetaFile, Line=Record[-1], + ExtraData=self._OutputDirectory) + elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION: + self._FlashDefinition = PathClass(NormPath(Record[1], self._Macros), GlobalData.gWorkspace) + ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1], + ExtraData=ErrorInfo) + elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES: + self._SupArchList = GetSplitValueList(Record[1], TAB_VALUE_SPLIT) + elif Name == TAB_DSC_DEFINES_BUILD_TARGETS: + self._BuildTargets = GetSplitValueList(Record[1]) + elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER: + if self._SkuName == None: + self._SkuName = Record[1] + # set _Header to non-None in order to avoid database re-querying + self._Header = 'DUMMY' + + ## Retrieve platform name + def _GetPlatformName(self): + if self._PlatformName == None: + if self._Header == None: + self._GetHeaderInfo() + if self._PlatformName == None: + EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile) + return self._PlatformName + + ## Retrieve file guid + def _GetFileGuid(self): + if self._Guid == None: + if self._Header == None: + self._GetHeaderInfo() + if self._Guid == None: + EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No FILE_GUID", File=self.MetaFile) + return self._Guid + + ## Retrieve platform version + def _GetVersion(self): + if self._Version == None: + if self._Header == None: + self._GetHeaderInfo() + if self._Version == None: + self._Version = '' + return self._Version + + ## Retrieve platform description file version + def _GetDscSpec(self): + if self._DscSpecification == None: + if self._Header == None: + self._GetHeaderInfo() + if self._DscSpecification == None: + self._DscSpecification = '' + return self._DscSpecification + + ## Retrieve OUTPUT_DIRECTORY + def _GetOutpuDir(self): + if self._OutputDirectory == None: + if self._Header == None: + self._GetHeaderInfo() + if self._OutputDirectory == None: + self._OutputDirectory = os.path.join("Build", self._PlatformName) + return self._OutputDirectory + + ## Retrieve SUPPORTED_ARCHITECTURES + def _GetSupArch(self): + if self._SupArchList == None: + if self._Header == None: + self._GetHeaderInfo() + if self._SupArchList == None: + self._SupArchList = ARCH_LIST + return self._SupArchList + + ## Retrieve BUILD_TARGETS + def _GetBuildTarget(self): + if self._BuildTargets == None: + if self._Header == None: + self._GetHeaderInfo() + if self._BuildTargets == None: + self._BuildTargets = ['DEBUG', 'RELEASE'] + return self._BuildTargets + + ## Retrieve SKUID_IDENTIFIER + def _GetSkuName(self): + if self._SkuName == None: + if self._Header == None: + self._GetHeaderInfo() + if self._SkuName == None or self._SkuName not in self.SkuIds: + self._SkuName = 'DEFAULT' + return self._SkuName + + ## Override SKUID_IDENTIFIER + def _SetSkuName(self, Value): + if Value in self.SkuIds: + self._SkuName = Value + + def _GetFdfFile(self): + if self._FlashDefinition == None: + if self._Header == None: + self._GetHeaderInfo() + if self._FlashDefinition == None: + self._FlashDefinition = '' + return self._FlashDefinition + + ## Retrieve FLASH_DEFINITION + def _GetBuildNumber(self): + if self._BuildNumber == None: + if self._Header == None: + self._GetHeaderInfo() + if self._BuildNumber == None: + self._BuildNumber = '' + return self._BuildNumber + + ## Retrieve MAKEFILE_NAME + def _GetMakefileName(self): + if self._MakefileName == None: + if self._Header == None: + self._GetHeaderInfo() + if self._MakefileName == None: + self._MakefileName = '' + return self._MakefileName + + ## Retrieve BsBaseAddress + def _GetBsBaseAddress(self): + if self._BsBaseAddress == None: + if self._Header == None: + self._GetHeaderInfo() + if self._BsBaseAddress == None: + self._BsBaseAddress = '' + return self._BsBaseAddress + + ## Retrieve RtBaseAddress + def _GetRtBaseAddress(self): + if self._RtBaseAddress == None: + if self._Header == None: + self._GetHeaderInfo() + if self._RtBaseAddress == None: + self._RtBaseAddress = '' + return self._RtBaseAddress + + ## Retrieve [SkuIds] section information + def _GetSkuIds(self): + if self._SkuIds == None: + self._SkuIds = {} + RecordList = self._RawData[MODEL_EFI_SKU_ID] + for Record in RecordList: + if Record[0] in [None, '']: + EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number', + File=self.MetaFile, Line=Record[-1]) + if Record[1] in [None, '']: + EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name', + File=self.MetaFile, Line=Record[-1]) + self._SkuIds[Record[1]] = Record[0] + if 'DEFAULT' not in self._SkuIds: + self._SkuIds['DEFAULT'] = 0 + return self._SkuIds + + ## Retrieve [Components] section information + def _GetModules(self): + if self._Modules != None: + return self._Modules + + self._Modules = sdict() + RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + for Record in RecordList: + ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch) + ModuleId = Record[5] + LineNo = Record[6] + + # check the file validation + ErrorCode, ErrorInfo = ModuleFile.Validate('.inf') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo, + ExtraData=ErrorInfo) + # Check duplication + if ModuleFile in self._Modules: + EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo) + + Module = ModuleBuildClassObject() + Module.MetaFile = ModuleFile + + # get module override path + RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId] + if RecordList != []: + Module.SourceOverridePath = os.path.join(GlobalData.gWorkspace, NormPath(RecordList[0][0], Macros)) + + # Check if the source override path exists + if not os.path.isdir(Module.SourceOverridePath): + EdkLogger.error('build', FILE_NOT_FOUND, Message = 'Source override path does not exist:', File=self.MetaFile, ExtraData=Module.SourceOverridePath, Line=LineNo) + + #Add to GlobalData Variables + GlobalData.gOverrideDir[ModuleFile.Key] = Module.SourceOverridePath + + # get module private library instance + RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId] + for Record in RecordList: + LibraryClass = Record[0] + LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch) + LineNo = Record[-1] + + # check the file validation + ErrorCode, ErrorInfo = LibraryPath.Validate('.inf') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo, + ExtraData=ErrorInfo) + + if LibraryClass == '' or LibraryClass == 'NULL': + self._NullLibraryNumber += 1 + LibraryClass = 'NULL%d' % self._NullLibraryNumber + EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass)) + Module.LibraryClasses[LibraryClass] = LibraryPath + if LibraryPath not in self.LibraryInstances: + self.LibraryInstances.append(LibraryPath) + + # get module private PCD setting + for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \ + MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]: + RecordList = self._RawData[Type, self._Arch, None, ModuleId] + for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList: + TokenList = GetSplitValueList(Setting) + DefaultValue = TokenList[0] + if len(TokenList) > 1: + MaxDatumSize = TokenList[1] + else: + MaxDatumSize = '' + TypeString = self._PCD_TYPE_STRING_[Type] + Pcd = PcdClassObject( + PcdCName, + TokenSpaceGuid, + TypeString, + '', + DefaultValue, + '', + MaxDatumSize, + {}, + None + ) + Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd + + # get module private build options + RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId] + for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList: + if (ToolChainFamily, ToolChain) not in Module.BuildOptions: + Module.BuildOptions[ToolChainFamily, ToolChain] = Option + else: + OptionString = Module.BuildOptions[ToolChainFamily, ToolChain] + Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option + + self._Modules[ModuleFile] = Module + return self._Modules + + ## Retrieve all possible library instances used in this platform + def _GetLibraryInstances(self): + if self._LibraryInstances == None: + self._GetLibraryClasses() + return self._LibraryInstances + + ## Retrieve [LibraryClasses] information + def _GetLibraryClasses(self): + if self._LibraryClasses == None: + self._LibraryInstances = [] + # + # tdict is a special dict kind of type, used for selecting correct + # library instance for given library class and module type + # + LibraryClassDict = tdict(True, 3) + # track all library class names + LibraryClassSet = set() + RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + for Record in RecordList: + LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, LineNo = Record + LibraryClassSet.add(LibraryClass) + LibraryInstance = PathClass(NormPath(LibraryInstance, Macros), GlobalData.gWorkspace, Arch=self._Arch) + # check the file validation + ErrorCode, ErrorInfo = LibraryInstance.Validate('.inf') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo, + ExtraData=ErrorInfo) + + if ModuleType != 'COMMON' and ModuleType not in SUP_MODULE_LIST: + EdkLogger.error('build', OPTION_UNKNOWN, "Unknown module type [%s]" % ModuleType, + File=self.MetaFile, ExtraData=LibraryInstance, Line=LineNo) + LibraryClassDict[Arch, ModuleType, LibraryClass] = LibraryInstance + if LibraryInstance not in self._LibraryInstances: + self._LibraryInstances.append(LibraryInstance) + + # resolve the specific library instance for each class and each module type + self._LibraryClasses = tdict(True) + for LibraryClass in LibraryClassSet: + # try all possible module types + for ModuleType in SUP_MODULE_LIST: + LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass] + if LibraryInstance == None: + continue + self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance + + # for R8 style library instances, which are listed in different section + RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch] + for Record in RecordList: + File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch) + LineNo = Record[-1] + # check the file validation + ErrorCode, ErrorInfo = File.Validate('.inf') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo, + ExtraData=ErrorInfo) + if File not in self._LibraryInstances: + self._LibraryInstances.append(File) + # + # we need the module name as the library class name, so we have + # to parse it here. (self._Bdb[] will trigger a file parse if it + # hasn't been parsed) + # + Library = self._Bdb[File, self._Arch] + self._LibraryClasses[Library.BaseName, ':dummy:'] = Library + return self._LibraryClasses + + ## Retrieve all PCD settings in platform + def _GetPcds(self): + if self._Pcds == None: + self._Pcds = {} + self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD)) + self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE)) + self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG)) + self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_DEFAULT)) + self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_HII)) + self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_VPD)) + self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_EX_DEFAULT)) + self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_EX_HII)) + self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_EX_VPD)) + return self._Pcds + + ## Retrieve [BuildOptions] + def _GetBuildOptions(self): + if self._BuildOptions == None: + self._BuildOptions = {} + RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION] + for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList: + self._BuildOptions[ToolChainFamily, ToolChain] = Option + return self._BuildOptions + + ## Retrieve non-dynamic PCD settings + # + # @param Type PCD type + # + # @retval a dict object contains settings of given PCD type + # + def _GetPcd(self, Type): + Pcds = {} + # + # tdict is a special dict kind of type, used for selecting correct + # PCD settings for certain ARCH + # + PcdDict = tdict(True, 3) + PcdSet = set() + # Find out all possible PCD candidates for self._Arch + RecordList = self._RawData[Type, self._Arch] + for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList: + PcdSet.add((PcdCName, TokenSpaceGuid)) + PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting + # Remove redundant PCD candidates + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', '', ''] + Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + PcdValue, DatumType, MaxDatumSize = ValueList + Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject( + PcdCName, + TokenSpaceGuid, + self._PCD_TYPE_STRING_[Type], + DatumType, + PcdValue, + '', + MaxDatumSize, + {}, + None + ) + return Pcds + + ## Retrieve dynamic PCD settings + # + # @param Type PCD type + # + # @retval a dict object contains settings of given PCD type + # + def _GetDynamicPcd(self, Type): + Pcds = {} + # + # tdict is a special dict kind of type, used for selecting correct + # PCD settings for certain ARCH and SKU + # + PcdDict = tdict(True, 4) + PcdSet = set() + # Find out all possible PCD candidates for self._Arch + RecordList = self._RawData[Type, self._Arch] + for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList: + PcdSet.add((PcdCName, TokenSpaceGuid)) + PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting + # Remove redundant PCD candidates, per the ARCH and SKU + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', '', ''] + Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + PcdValue, DatumType, MaxDatumSize = ValueList + + SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', '', PcdValue) + Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject( + PcdCName, + TokenSpaceGuid, + self._PCD_TYPE_STRING_[Type], + DatumType, + PcdValue, + '', + MaxDatumSize, + {self.SkuName : SkuInfo}, + None + ) + return Pcds + + ## Retrieve dynamic HII PCD settings + # + # @param Type PCD type + # + # @retval a dict object contains settings of given PCD type + # + def _GetDynamicHiiPcd(self, Type): + Pcds = {} + # + # tdict is a special dict kind of type, used for selecting correct + # PCD settings for certain ARCH and SKU + # + PcdDict = tdict(True, 4) + PcdSet = set() + RecordList = self._RawData[Type, self._Arch] + # Find out all possible PCD candidates for self._Arch + for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList: + PcdSet.add((PcdCName, TokenSpaceGuid)) + PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting + # Remove redundant PCD candidates, per the ARCH and SKU + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', '', '', ''] + Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + VariableName, VariableGuid, VariableOffset, DefaultValue = ValueList + SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue) + Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject( + PcdCName, + TokenSpaceGuid, + self._PCD_TYPE_STRING_[Type], + '', + DefaultValue, + '', + '', + {self.SkuName : SkuInfo}, + None + ) + return Pcds + + ## Retrieve dynamic VPD PCD settings + # + # @param Type PCD type + # + # @retval a dict object contains settings of given PCD type + # + def _GetDynamicVpdPcd(self, Type): + Pcds = {} + # + # tdict is a special dict kind of type, used for selecting correct + # PCD settings for certain ARCH and SKU + # + PcdDict = tdict(True, 4) + PcdSet = set() + # Find out all possible PCD candidates for self._Arch + RecordList = self._RawData[Type, self._Arch] + for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList: + PcdSet.add((PcdCName, TokenSpaceGuid)) + PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting + # Remove redundant PCD candidates, per the ARCH and SKU + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', ''] + Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + VpdOffset, MaxDatumSize = ValueList + + SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset) + Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject( + PcdCName, + TokenSpaceGuid, + self._PCD_TYPE_STRING_[Type], + '', + '', + '', + MaxDatumSize, + {self.SkuName : SkuInfo}, + None + ) + return Pcds + + ## Add external modules + # + # The external modules are mostly those listed in FDF file, which don't + # need "build". + # + # @param FilePath The path of module description file + # + def AddModule(self, FilePath): + FilePath = NormPath(FilePath) + if FilePath not in self.Modules: + Module = ModuleBuildClassObject() + Module.MetaFile = FilePath + self.Modules.append(Module) + + ## Add external PCDs + # + # The external PCDs are mostly those listed in FDF file to specify address + # or offset information. + # + # @param Name Name of the PCD + # @param Guid Token space guid of the PCD + # @param Value Value of the PCD + # + def AddPcd(self, Name, Guid, Value): + if (Name, Guid) not in self.Pcds: + self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None) + self.Pcds[Name, Guid].DefaultValue = Value + + Arch = property(_GetArch, _SetArch) + Platform = property(_GetPlatformName) + PlatformName = property(_GetPlatformName) + Guid = property(_GetFileGuid) + Version = property(_GetVersion) + DscSpecification = property(_GetDscSpec) + OutputDirectory = property(_GetOutpuDir) + SupArchList = property(_GetSupArch) + BuildTargets = property(_GetBuildTarget) + SkuName = property(_GetSkuName, _SetSkuName) + FlashDefinition = property(_GetFdfFile) + BuildNumber = property(_GetBuildNumber) + MakefileName = property(_GetMakefileName) + BsBaseAddress = property(_GetBsBaseAddress) + RtBaseAddress = property(_GetRtBaseAddress) + + SkuIds = property(_GetSkuIds) + Modules = property(_GetModules) + LibraryInstances = property(_GetLibraryInstances) + LibraryClasses = property(_GetLibraryClasses) + Pcds = property(_GetPcds) + BuildOptions = property(_GetBuildOptions) + +## Platform build information from DSC file +# +# This class is used to retrieve information stored in database and convert them +# into PackageBuildClassObject form for easier use for AutoGen. +# +class DecBuildData(PackageBuildClassObject): + # dict used to convert PCD type in database to string used by build tool + _PCD_TYPE_STRING_ = { + MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild", + MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule", + MODEL_PCD_FEATURE_FLAG : "FeatureFlag", + MODEL_PCD_DYNAMIC : "Dynamic", + MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic", + MODEL_PCD_DYNAMIC_HII : "DynamicHii", + MODEL_PCD_DYNAMIC_VPD : "DynamicVpd", + MODEL_PCD_DYNAMIC_EX : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii", + MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd", + } + + # dict used to convert part of [Defines] to members of DecBuildData directly + _PROPERTY_ = { + # + # Required Fields + # + TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName", + TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid", + TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version", + } + + + ## Constructor of DecBuildData + # + # Initialize object of DecBuildData + # + # @param FilePath The path of package description file + # @param RawData The raw data of DEC file + # @param BuildDataBase Database used to retrieve module information + # @param Arch The target architecture + # @param Platform (not used for DecBuildData) + # @param Macros Macros used for replacement in DSC file + # + def __init__(self, File, RawData, BuildDataBase, Arch='COMMON', Platform='DUMMY', Macros={}): + self.MetaFile = File + self._PackageDir = File.Dir + self._RawData = RawData + self._Bdb = BuildDataBase + self._Arch = Arch + self._Macros = Macros + self._Clear() + + ## XXX[key] = value + def __setitem__(self, key, value): + self.__dict__[self._PROPERTY_[key]] = value + + ## value = XXX[key] + def __getitem__(self, key): + return self.__dict__[self._PROPERTY_[key]] + + ## "in" test support + def __contains__(self, key): + return key in self._PROPERTY_ + + ## Set all internal used members of DecBuildData to None + def _Clear(self): + self._Header = None + self._PackageName = None + self._Guid = None + self._Version = None + self._Protocols = None + self._Ppis = None + self._Guids = None + self._Includes = None + self._LibraryClasses = None + self._Pcds = None + + ## Get architecture + def _GetArch(self): + return self._Arch + + ## Set architecture + # + # Changing the default ARCH to another may affect all other information + # because all information in a platform may be ARCH-related. That's + # why we need to clear all internal used members, in order to cause all + # information to be re-retrieved. + # + # @param Value The value of ARCH + # + def _SetArch(self, Value): + if self._Arch == Value: + return + self._Arch = Value + self._Clear() + + ## Retrieve all information in [Defines] section + # + # (Retriving all [Defines] information in one-shot is just to save time.) + # + def _GetHeaderInfo(self): + RecordList = self._RawData[MODEL_META_DATA_HEADER] + for Record in RecordList: + Name = Record[0] + if Name in self: + self[Name] = Record[1] + self._Header = 'DUMMY' + + ## Retrieve package name + def _GetPackageName(self): + if self._PackageName == None: + if self._Header == None: + self._GetHeaderInfo() + if self._PackageName == None: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile) + return self._PackageName + + ## Retrieve file guid + def _GetFileGuid(self): + if self._Guid == None: + if self._Header == None: + self._GetHeaderInfo() + if self._Guid == None: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile) + return self._Guid + + ## Retrieve package version + def _GetVersion(self): + if self._Version == None: + if self._Header == None: + self._GetHeaderInfo() + if self._Version == None: + self._Version = '' + return self._Version + + ## Retrieve protocol definitions (name/value pairs) + def _GetProtocol(self): + if self._Protocols == None: + # + # tdict is a special kind of dict, used for selecting correct + # protocol defition for given ARCH + # + ProtocolDict = tdict(True) + NameList = [] + # find out all protocol definitions for specific and 'common' arch + RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch] + for Name, Guid, Dummy, Arch, ID, LineNo in RecordList: + if Name not in NameList: + NameList.append(Name) + ProtocolDict[Arch, Name] = Guid + # use sdict to keep the order + self._Protocols = sdict() + for Name in NameList: + # + # limit the ARCH to self._Arch, if no self._Arch found, tdict + # will automatically turn to 'common' ARCH for trying + # + self._Protocols[Name] = ProtocolDict[self._Arch, Name] + return self._Protocols + + ## Retrieve PPI definitions (name/value pairs) + def _GetPpi(self): + if self._Ppis == None: + # + # tdict is a special kind of dict, used for selecting correct + # PPI defition for given ARCH + # + PpiDict = tdict(True) + NameList = [] + # find out all PPI definitions for specific arch and 'common' arch + RecordList = self._RawData[MODEL_EFI_PPI, self._Arch] + for Name, Guid, Dummy, Arch, ID, LineNo in RecordList: + if Name not in NameList: + NameList.append(Name) + PpiDict[Arch, Name] = Guid + # use sdict to keep the order + self._Ppis = sdict() + for Name in NameList: + # + # limit the ARCH to self._Arch, if no self._Arch found, tdict + # will automatically turn to 'common' ARCH for trying + # + self._Ppis[Name] = PpiDict[self._Arch, Name] + return self._Ppis + + ## Retrieve GUID definitions (name/value pairs) + def _GetGuid(self): + if self._Guids == None: + # + # tdict is a special kind of dict, used for selecting correct + # GUID defition for given ARCH + # + GuidDict = tdict(True) + NameList = [] + # find out all protocol definitions for specific and 'common' arch + RecordList = self._RawData[MODEL_EFI_GUID, self._Arch] + for Name, Guid, Dummy, Arch, ID, LineNo in RecordList: + if Name not in NameList: + NameList.append(Name) + GuidDict[Arch, Name] = Guid + # use sdict to keep the order + self._Guids = sdict() + for Name in NameList: + # + # limit the ARCH to self._Arch, if no self._Arch found, tdict + # will automatically turn to 'common' ARCH for trying + # + self._Guids[Name] = GuidDict[self._Arch, Name] + return self._Guids + + ## Retrieve public include paths declared in this package + def _GetInclude(self): + if self._Includes == None: + self._Includes = [] + RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + for Record in RecordList: + File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch) + LineNo = Record[-1] + # validate the path + ErrorCode, ErrorInfo = File.Validate() + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo) + + # avoid duplicate include path + if File not in self._Includes: + self._Includes.append(File) + return self._Includes + + ## Retrieve library class declarations (not used in build at present) + def _GetLibraryClass(self): + if self._LibraryClasses == None: + # + # tdict is a special kind of dict, used for selecting correct + # library class declaration for given ARCH + # + LibraryClassDict = tdict(True) + LibraryClassSet = set() + RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + for LibraryClass, File, Dummy, Arch, ID, LineNo in RecordList: + File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch) + # check the file validation + ErrorCode, ErrorInfo = File.Validate() + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo) + LibraryClassSet.add(LibraryClass) + LibraryClassDict[Arch, LibraryClass] = File + self._LibraryClasses = sdict() + for LibraryClass in LibraryClassSet: + self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass] + return self._LibraryClasses + + ## Retrieve PCD declarations + def _GetPcds(self): + if self._Pcds == None: + self._Pcds = {} + self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD)) + self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE)) + self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG)) + self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC)) + self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX)) + return self._Pcds + + ## Retrieve PCD declarations for given type + def _GetPcd(self, Type): + Pcds = {} + # + # tdict is a special kind of dict, used for selecting correct + # PCD declaration for given ARCH + # + PcdDict = tdict(True, 3) + # for summarizing PCD + PcdSet = set() + # find out all PCDs of the 'type' + RecordList = self._RawData[Type, self._Arch] + for TokenSpaceGuid, PcdCName, Setting, Arch, Dummy1, Dummy2 in RecordList: + PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting + PcdSet.add((PcdCName, TokenSpaceGuid)) + + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', '', ''] + # + # limit the ARCH to self._Arch, if no self._Arch found, tdict + # will automatically turn to 'common' ARCH and try again + # + Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + DefaultValue, DatumType, TokenNumber = ValueList + Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdClassObject( + PcdCName, + TokenSpaceGuid, + self._PCD_TYPE_STRING_[Type], + DatumType, + DefaultValue, + TokenNumber, + '', + {}, + None + ) + return Pcds + + + Arch = property(_GetArch, _SetArch) + PackageName = property(_GetPackageName) + Guid = property(_GetFileGuid) + Version = property(_GetVersion) + + Protocols = property(_GetProtocol) + Ppis = property(_GetPpi) + Guids = property(_GetGuid) + Includes = property(_GetInclude) + LibraryClasses = property(_GetLibraryClass) + Pcds = property(_GetPcds) + +## Module build information from INF file +# +# This class is used to retrieve information stored in database and convert them +# into ModuleBuildClassObject form for easier use for AutoGen. +# +class InfBuildData(ModuleBuildClassObject): + # dict used to convert PCD type in database to string used by build tool + _PCD_TYPE_STRING_ = { + MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild", + MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule", + MODEL_PCD_FEATURE_FLAG : "FeatureFlag", + MODEL_PCD_DYNAMIC : "Dynamic", + MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic", + MODEL_PCD_DYNAMIC_HII : "DynamicHii", + MODEL_PCD_DYNAMIC_VPD : "DynamicVpd", + MODEL_PCD_DYNAMIC_EX : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx", + MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii", + MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd", + } + + # dict used to convert part of [Defines] to members of InfBuildData directly + _PROPERTY_ = { + # + # Required Fields + # + TAB_INF_DEFINES_BASE_NAME : "_BaseName", + TAB_INF_DEFINES_FILE_GUID : "_Guid", + TAB_INF_DEFINES_MODULE_TYPE : "_ModuleType", + # + # Optional Fields + # + TAB_INF_DEFINES_INF_VERSION : "_AutoGenVersion", + TAB_INF_DEFINES_COMPONENT_TYPE : "_ComponentType", + TAB_INF_DEFINES_MAKEFILE_NAME : "_MakefileName", + #TAB_INF_DEFINES_CUSTOM_MAKEFILE : "_CustomMakefile", + TAB_INF_DEFINES_VERSION_NUMBER : "_Version", + TAB_INF_DEFINES_VERSION_STRING : "_Version", + TAB_INF_DEFINES_VERSION : "_Version", + TAB_INF_DEFINES_PCD_IS_DRIVER : "_PcdIsDriver", + TAB_INF_DEFINES_SHADOW : "_Shadow", + + TAB_COMPONENTS_SOURCE_OVERRIDE_PATH : "_SourceOverridePath", + } + + # dict used to convert Component type to Module type + _MODULE_TYPE_ = { + "LIBRARY" : "BASE", + "SECURITY_CORE" : "SEC", + "PEI_CORE" : "PEI_CORE", + "COMBINED_PEIM_DRIVER" : "PEIM", + "PIC_PEIM" : "PEIM", + "RELOCATABLE_PEIM" : "PEIM", + "PE32_PEIM" : "PEIM", + "BS_DRIVER" : "DXE_DRIVER", + "RT_DRIVER" : "DXE_RUNTIME_DRIVER", + "SAL_RT_DRIVER" : "DXE_SAL_DRIVER", + # "BS_DRIVER" : "DXE_SMM_DRIVER", + # "BS_DRIVER" : "UEFI_DRIVER", + "APPLICATION" : "UEFI_APPLICATION", + "LOGO" : "BASE", + } + + # regular expression for converting XXX_FLAGS in [nmake] section to new type + _NMAKE_FLAG_PATTERN_ = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE) + # dict used to convert old tool name used in [nmake] section to new ones + _TOOL_CODE_ = { + "C" : "CC", + "LIB" : "SLINK", + "LINK" : "DLINK", + } + + + ## Constructor of DscBuildData + # + # Initialize object of DscBuildData + # + # @param FilePath The path of platform description file + # @param RawData The raw data of DSC file + # @param BuildDataBase Database used to retrieve module/package information + # @param Arch The target architecture + # @param Platform The name of platform employing this module + # @param Macros Macros used for replacement in DSC file + # + def __init__(self, FilePath, RawData, BuildDatabase, Arch='COMMON', Platform='COMMON', Macros={}): + self.MetaFile = FilePath + self._ModuleDir = FilePath.Dir + self._RawData = RawData + self._Bdb = BuildDatabase + self._Arch = Arch + self._Platform = 'COMMON' + self._Macros = Macros + self._SourceOverridePath = None + if FilePath.Key in GlobalData.gOverrideDir: + self._SourceOverridePath = GlobalData.gOverrideDir[FilePath.Key] + self._Clear() + + ## XXX[key] = value + def __setitem__(self, key, value): + self.__dict__[self._PROPERTY_[key]] = value + + ## value = XXX[key] + def __getitem__(self, key): + return self.__dict__[self._PROPERTY_[key]] + + ## "in" test support + def __contains__(self, key): + return key in self._PROPERTY_ + + ## Set all internal used members of InfBuildData to None + def _Clear(self): + self._Header_ = None + self._AutoGenVersion = None + self._BaseName = None + self._ModuleType = None + self._ComponentType = None + self._BuildType = None + self._Guid = None + self._Version = None + self._PcdIsDriver = None + self._BinaryModule = None + self._Shadow = None + self._MakefileName = None + self._CustomMakefile = None + self._Specification = None + self._LibraryClass = None + self._ModuleEntryPointList = None + self._ModuleUnloadImageList = None + self._ConstructorList = None + self._DestructorList = None + self._Defs = None + self._Binaries = None + self._Sources = None + self._LibraryClasses = None + self._Libraries = None + self._Protocols = None + self._Ppis = None + self._Guids = None + self._Includes = None + self._Packages = None + self._Pcds = None + self._BuildOptions = None + self._Depex = None + #self._SourceOverridePath = None + + ## Get architecture + def _GetArch(self): + return self._Arch + + ## Set architecture + # + # Changing the default ARCH to another may affect all other information + # because all information in a platform may be ARCH-related. That's + # why we need to clear all internal used members, in order to cause all + # information to be re-retrieved. + # + # @param Value The value of ARCH + # + def _SetArch(self, Value): + if self._Arch == Value: + return + self._Arch = Value + self._Clear() + + ## Return the name of platform employing this module + def _GetPlatform(self): + return self._Platform + + ## Change the name of platform employing this module + # + # Changing the default name of platform to another may affect some information + # because they may be PLATFORM-related. That's why we need to clear all internal + # used members, in order to cause all information to be re-retrieved. + # + def _SetPlatform(self, Value): + if self._Platform == Value: + return + self._Platform = Value + self._Clear() + + ## Retrieve all information in [Defines] section + # + # (Retriving all [Defines] information in one-shot is just to save time.) + # + def _GetHeaderInfo(self): + RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform] + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + Name = Record[0] + # items defined _PROPERTY_ don't need additional processing + if Name in self: + self[Name] = Record[1] + # some special items in [Defines] section need special treatment + elif Name == 'EFI_SPECIFICATION_VERSION': + if self._Specification == None: + self._Specification = sdict() + self._Specification[Name] = Record[1] + elif Name == 'EDK_RELEASE_VERSION': + if self._Specification == None: + self._Specification = sdict() + self._Specification[Name] = Record[1] + elif Name == 'PI_SPECIFICATION_VERSION': + if self._Specification == None: + self._Specification = sdict() + self._Specification[Name] = Record[1] + elif Name == 'LIBRARY_CLASS': + if self._LibraryClass == None: + self._LibraryClass = [] + ValueList = GetSplitValueList(Record[1]) + LibraryClass = ValueList[0] + if len(ValueList) > 1: + SupModuleList = GetSplitValueList(ValueList[1], ' ') + else: + SupModuleList = SUP_MODULE_LIST + self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList)) + elif Name == 'ENTRY_POINT': + if self._ModuleEntryPointList == None: + self._ModuleEntryPointList = [] + self._ModuleEntryPointList.append(Record[1]) + elif Name == 'UNLOAD_IMAGE': + if self._ModuleUnloadImageList == None: + self._ModuleUnloadImageList = [] + if Record[1] == '': + continue + self._ModuleUnloadImageList.append(Record[1]) + elif Name == 'CONSTRUCTOR': + if self._ConstructorList == None: + self._ConstructorList = [] + if Record[1] == '': + continue + self._ConstructorList.append(Record[1]) + elif Name == 'DESTRUCTOR': + if self._DestructorList == None: + self._DestructorList = [] + if Record[1] == '': + continue + self._DestructorList.append(Record[1]) + elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE: + TokenList = GetSplitValueList(Record[1]) + if self._CustomMakefile == None: + self._CustomMakefile = {} + if len(TokenList) < 2: + self._CustomMakefile['MSFT'] = TokenList[0] + self._CustomMakefile['GCC'] = TokenList[0] + else: + if TokenList[0] not in ['MSFT', 'GCC']: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, + "No supported family [%s]" % TokenList[0], + File=self.MetaFile, Line=Record[-1]) + self._CustomMakefile[TokenList[0]] = TokenList[1] + else: + if self._Defs == None: + self._Defs = sdict() + self._Defs[Name] = Record[1] + + # + # Retrieve information in sections specific to R8.x modules + # + if self._AutoGenVersion >= 0x00010005: # _AutoGenVersion may be None, which is less than anything + if not self._ModuleType: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, + "MODULE_TYPE is not given", File=self.MetaFile) + if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \ + and 'PCI_CLASS_CODE' in self._Defs: + self._BuildType = 'UEFI_OPTIONROM' + else: + self._BuildType = self._ModuleType.upper() + else: + self._BuildType = self._ComponentType.upper() + if not self._ComponentType: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, + "COMPONENT_TYPE is not given", File=self.MetaFile) + if self._ComponentType in self._MODULE_TYPE_: + self._ModuleType = self._MODULE_TYPE_[self._ComponentType] + if self._ComponentType == 'LIBRARY': + self._LibraryClass = [LibraryClassObject(self._BaseName, SUP_MODULE_LIST)] + # make use some [nmake] section macros + RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform] + for Name,Value,Dummy,Arch,Platform,ID,LineNo in RecordList: + Value = Value.replace('$(PROCESSOR)', self._Arch) + Name = Name.replace('$(PROCESSOR)', self._Arch) + Name, Value = ReplaceMacros((Name, Value), GlobalData.gEdkGlobal, True) + if Name == "IMAGE_ENTRY_POINT": + if self._ModuleEntryPointList == None: + self._ModuleEntryPointList = [] + self._ModuleEntryPointList.append(Value) + elif Name == "DPX_SOURCE": + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + File = PathClass(NormPath(Value, Macros), self._ModuleDir, Arch=self._Arch) + # check the file validation + ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False) + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, + File=self.MetaFile, Line=LineNo) + if self.Sources == None: + self._Sources = [] + self._Sources.append(File) + else: + ToolList = self._NMAKE_FLAG_PATTERN_.findall(Name) + if len(ToolList) == 0 or len(ToolList) != 1: + pass +# EdkLogger.warn("build", "Don't know how to do with macro [%s]" % Name, +# File=self.MetaFile, Line=LineNo) + else: + if self._BuildOptions == None: + self._BuildOptions = sdict() + + if ToolList[0] in self._TOOL_CODE_: + Tool = self._TOOL_CODE_[ToolList[0]] + else: + Tool = ToolList[0] + ToolChain = "*_*_*_%s_FLAGS" % Tool + ToolChainFamily = 'MSFT' # R8.x only support MSFT tool chain + #ignore not replaced macros in value + ValueList = GetSplitValueList(' ' + Value, '/D') + Dummy = ValueList[0] + for Index in range(1, len(ValueList)): + if ValueList[Index][-1] == '=' or ValueList[Index] == '': + continue + Dummy = Dummy + ' /D ' + ValueList[Index] + Value = Dummy.strip() + if (ToolChainFamily, ToolChain) not in self._BuildOptions: + self._BuildOptions[ToolChainFamily, ToolChain] = Value + else: + OptionString = self._BuildOptions[ToolChainFamily, ToolChain] + self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Value + # set _Header to non-None in order to avoid database re-querying + self._Header_ = 'DUMMY' + + ## Retrieve file version + def _GetInfVersion(self): + if self._AutoGenVersion == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._AutoGenVersion == None: + self._AutoGenVersion = 0x00010000 + return self._AutoGenVersion + + ## Retrieve BASE_NAME + def _GetBaseName(self): + if self._BaseName == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._BaseName == None: + EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile) + return self._BaseName + + ## Retrieve MODULE_TYPE + def _GetModuleType(self): + if self._ModuleType == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._ModuleType == None: + self._ModuleType = 'BASE' + if self._ModuleType not in SUP_MODULE_LIST: + self._ModuleType = "USER_DEFINED" + return self._ModuleType + + ## Retrieve COMPONENT_TYPE + def _GetComponentType(self): + if self._ComponentType == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._ComponentType == None: + self._ComponentType = 'USER_DEFINED' + return self._ComponentType + + ## Retrieve "BUILD_TYPE" + def _GetBuildType(self): + if self._BuildType == None: + if self._Header_ == None: + self._GetHeaderInfo() + if not self._BuildType: + self._BuildType = "BASE" + return self._BuildType + + ## Retrieve file guid + def _GetFileGuid(self): + if self._Guid == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._Guid == None: + self._Guid = '00000000-0000-0000-000000000000' + return self._Guid + + ## Retrieve module version + def _GetVersion(self): + if self._Version == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._Version == None: + self._Version = '0.0' + return self._Version + + ## Retrieve PCD_IS_DRIVER + def _GetPcdIsDriver(self): + if self._PcdIsDriver == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._PcdIsDriver == None: + self._PcdIsDriver = '' + return self._PcdIsDriver + + ## Retrieve SHADOW + def _GetShadow(self): + if self._Shadow == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._Shadow != None and self._Shadow.upper() == 'TRUE': + self._Shadow = True + else: + self._Shadow = False + return self._Shadow + + ## Retrieve CUSTOM_MAKEFILE + def _GetMakefile(self): + if self._CustomMakefile == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._CustomMakefile == None: + self._CustomMakefile = {} + return self._CustomMakefile + + ## Retrieve EFI_SPECIFICATION_VERSION + def _GetSpec(self): + if self._Specification == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._Specification == None: + self._Specification = {} + return self._Specification + + ## Retrieve LIBRARY_CLASS + def _GetLibraryClass(self): + if self._LibraryClass == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._LibraryClass == None: + self._LibraryClass = [] + return self._LibraryClass + + ## Retrieve ENTRY_POINT + def _GetEntryPoint(self): + if self._ModuleEntryPointList == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._ModuleEntryPointList == None: + self._ModuleEntryPointList = [] + return self._ModuleEntryPointList + + ## Retrieve UNLOAD_IMAGE + def _GetUnloadImage(self): + if self._ModuleUnloadImageList == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._ModuleUnloadImageList == None: + self._ModuleUnloadImageList = [] + return self._ModuleUnloadImageList + + ## Retrieve CONSTRUCTOR + def _GetConstructor(self): + if self._ConstructorList == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._ConstructorList == None: + self._ConstructorList = [] + return self._ConstructorList + + ## Retrieve DESTRUCTOR + def _GetDestructor(self): + if self._DestructorList == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._DestructorList == None: + self._DestructorList = [] + return self._DestructorList + + ## Retrieve definies other than above ones + def _GetDefines(self): + if self._Defs == None: + if self._Header_ == None: + self._GetHeaderInfo() + if self._Defs == None: + self._Defs = sdict() + return self._Defs + + ## Retrieve binary files + def _GetBinaryFiles(self): + if self._Binaries == None: + self._Binaries = [] + RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource, 'PROCESSOR':self._Arch} + Macros.update(self._Macros) + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + FileType = Record[0] + LineNo = Record[-1] + Target = 'COMMON' + FeatureFlag = [] + if Record[2]: + TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT) + if TokenList: + Target = TokenList[0] + if len(TokenList) > 1: + FeatureFlag = Record[1:] + + File = PathClass(NormPath(Record[1], Macros), self._ModuleDir, '', FileType, True, self._Arch, '', Target) + # check the file validation + ErrorCode, ErrorInfo = File.Validate() + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo) + self._Binaries.append(File) + return self._Binaries + + ## Retrieve source files + def _GetSourceFiles(self): + if self._Sources == None: + self._Sources = [] + RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource, 'PROCESSOR':self._Arch} + Macros.update(self._Macros) + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + LineNo = Record[-1] + ToolChainFamily = Record[1] + TagName = Record[2] + ToolCode = Record[3] + FeatureFlag = Record[4] + if self._AutoGenVersion < 0x00010005: + # old module source files (R8) + File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, self._SourceOverridePath, + '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode) + # check the file validation + ErrorCode, ErrorInfo = File.Validate(CaseSensitive=False) + if ErrorCode != 0: + if File.Ext.lower() == '.h': + EdkLogger.warn('build', 'Include file not found', ExtraData=ErrorInfo, + File=self.MetaFile, Line=LineNo) + continue + else: + EdkLogger.error('build', ErrorCode, ExtraData=File, File=self.MetaFile, Line=LineNo) + else: + File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, '', + '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode) + # check the file validation + ErrorCode, ErrorInfo = File.Validate() + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo) + + self._Sources.append(File) + return self._Sources + + ## Retrieve library classes employed by this module + def _GetLibraryClassUses(self): + if self._LibraryClasses == None: + self._LibraryClasses = sdict() + RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform] + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + Lib = Record[0] + Instance = Record[1] + if Instance != None and Instance != '': + Instance = NormPath(Instance, self._Macros) + self._LibraryClasses[Lib] = Instance + return self._LibraryClasses + + ## Retrieve library names (for R8.x style of modules) + def _GetLibraryNames(self): + if self._Libraries == None: + self._Libraries = [] + RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform] + for Record in RecordList: + # in case of name with '.lib' extension, which is unusual in R8.x inf + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + LibraryName = os.path.splitext(Record[0])[0] + if LibraryName not in self._Libraries: + self._Libraries.append(LibraryName) + return self._Libraries + + ## Retrieve protocols consumed/produced by this module + def _GetProtocols(self): + if self._Protocols == None: + self._Protocols = sdict() + RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform] + for Record in RecordList: + CName = Record[0] + Value = ProtocolValue(CName, self.Packages) + if Value == None: + PackageList = "\n\t".join([str(P) for P in self.Packages]) + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, + "Value of Protocol [%s] is not found under [Protocols] section in" % CName, + ExtraData=PackageList, File=self.MetaFile, Line=Record[-1]) + self._Protocols[CName] = Value + return self._Protocols + + ## Retrieve PPIs consumed/produced by this module + def _GetPpis(self): + if self._Ppis == None: + self._Ppis = sdict() + RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform] + for Record in RecordList: + CName = Record[0] + Value = PpiValue(CName, self.Packages) + if Value == None: + PackageList = "\n\t".join([str(P) for P in self.Packages]) + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, + "Value of PPI [%s] is not found under [Ppis] section in " % CName, + ExtraData=PackageList, File=self.MetaFile, Line=Record[-1]) + self._Ppis[CName] = Value + return self._Ppis + + ## Retrieve GUIDs consumed/produced by this module + def _GetGuids(self): + if self._Guids == None: + self._Guids = sdict() + RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform] + for Record in RecordList: + CName = Record[0] + Value = GuidValue(CName, self.Packages) + if Value == None: + PackageList = "\n\t".join([str(P) for P in self.Packages]) + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, + "Value of Guid [%s] is not found under [Guids] section in" % CName, + ExtraData=PackageList, File=self.MetaFile, Line=Record[-1]) + self._Guids[CName] = Value + return self._Guids + + ## Retrieve include paths necessary for this module (for R8.x style of modules) + def _GetIncludes(self): + if self._Includes == None: + self._Includes = [] + if self._SourceOverridePath: + self._Includes.append(self._SourceOverridePath) + RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch, self._Platform] + # [includes] section must be used only in old (R8.x) inf file + if self.AutoGenVersion >= 0x00010005 and len(RecordList) > 0: + EdkLogger.error('build', FORMAT_NOT_SUPPORTED, "No [include] section allowed", + File=self.MetaFile, Line=RecordList[0][-1]-1) + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + Record[0] = Record[0].replace('$(PROCESSOR)', self._Arch) + Record[0] = ReplaceMacro(Record[0], {'EFI_SOURCE' : GlobalData.gEfiSource}, False) + if Record[0].find('EDK_SOURCE') > -1: + File = NormPath(ReplaceMacro(Record[0], {'EDK_SOURCE' : GlobalData.gEcpSource}, False), self._Macros) + if File[0] == '.': + File = os.path.join(self._ModuleDir, File) + else: + File = os.path.join(GlobalData.gWorkspace, File) + File = RealPath(os.path.normpath(File)) + if File: + self._Includes.append(File) + + #TRICK: let compiler to choose correct header file + File = NormPath(ReplaceMacro(Record[0], {'EDK_SOURCE' : GlobalData.gEdkSource}, False), self._Macros) + if File[0] == '.': + File = os.path.join(self._ModuleDir, File) + else: + File = os.path.join(GlobalData.gWorkspace, File) + File = RealPath(os.path.normpath(File)) + if File: + self._Includes.append(File) + else: + File = NormPath(Record[0], self._Macros) + if File[0] == '.': + File = os.path.join(self._ModuleDir, File) + else: + File = os.path.join(GlobalData.gWorkspace, File) + File = RealPath(os.path.normpath(File)) + if File: + self._Includes.append(File) + return self._Includes + + ## Retrieve packages this module depends on + def _GetPackages(self): + if self._Packages == None: + self._Packages = [] + RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform] + Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource} + Macros.update(self._Macros) + for Record in RecordList: + File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch) + LineNo = Record[-1] + # check the file validation + ErrorCode, ErrorInfo = File.Validate('.dec') + if ErrorCode != 0: + EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo) + # parse this package now. we need it to get protocol/ppi/guid value + Package = self._Bdb[File, self._Arch] + self._Packages.append(Package) + return self._Packages + + ## Retrieve PCDs used in this module + def _GetPcds(self): + if self._Pcds == None: + self._Pcds = {} + self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD)) + self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE)) + self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG)) + self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC)) + self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX)) + return self._Pcds + + ## Retrieve build options specific to this module + def _GetBuildOptions(self): + if self._BuildOptions == None: + self._BuildOptions = sdict() + RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform] + for Record in RecordList: + ToolChainFamily = Record[0] + ToolChain = Record[1] + Option = Record[2] + if (ToolChainFamily, ToolChain) not in self._BuildOptions: + self._BuildOptions[ToolChainFamily, ToolChain] = Option + else: + # concatenate the option string if they're for the same tool + OptionString = self._BuildOptions[ToolChainFamily, ToolChain] + self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option + return self._BuildOptions + + ## Retrieve depedency expression + def _GetDepex(self): + if self._Depex == None: + self._Depex = tdict(False, 2) + RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch] + Depex = {} + for Record in RecordList: + Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False) + Arch = Record[3] + ModuleType = Record[4] + TokenList = Record[0].split() + if (Arch, ModuleType) not in Depex: + Depex[Arch, ModuleType] = [] + DepexList = Depex[Arch, ModuleType] + for Token in TokenList: + if Token in DEPEX_SUPPORTED_OPCODE: + DepexList.append(Token) + elif Token.endswith(".inf"): # module file name + ModuleFile = os.path.normpath(Token) + Module = self.BuildDatabase[ModuleFile] + if Module == None: + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform", + ExtraData=Token, File=self.MetaFile, Line=Record[-1]) + DepexList.append(Module.Guid) + else: + # get the GUID value now + Value = ProtocolValue(Token, self.Packages) + if Value == None: + Value = PpiValue(Token, self.Packages) + if Value == None: + Value = GuidValue(Token, self.Packages) + if Value == None: + PackageList = "\n\t".join([str(P) for P in self.Packages]) + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, + "Value of [%s] is not found in" % Token, + ExtraData=PackageList, File=self.MetaFile, Line=Record[-1]) + DepexList.append(Value) + for Arch, ModuleType in Depex: + self._Depex[Arch, ModuleType] = Depex[Arch, ModuleType] + return self._Depex + + ## Retrieve PCD for given type + def _GetPcd(self, Type): + Pcds = {} + PcdDict = tdict(True, 4) + PcdSet = set() + RecordList = self._RawData[Type, self._Arch, self._Platform] + for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Dummy1, LineNo in RecordList: + PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo) + PcdSet.add((PcdCName, TokenSpaceGuid)) + # get the guid value + if TokenSpaceGuid not in self.Guids: + Value = GuidValue(TokenSpaceGuid, self.Packages) + if Value == None: + PackageList = "\n\t".join([str(P) for P in self.Packages]) + EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, + "Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid, + ExtraData=PackageList, File=self.MetaFile, Line=LineNo) + self.Guids[TokenSpaceGuid] = Value + + # resolve PCD type, value, datum info, etc. by getting its definition from package + for PcdCName, TokenSpaceGuid in PcdSet: + ValueList = ['', ''] + Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid] + if Setting == None: + continue + TokenList = Setting.split(TAB_VALUE_SPLIT) + ValueList[0:len(TokenList)] = TokenList + DefaultValue = ValueList[0] + Pcd = PcdClassObject( + PcdCName, + TokenSpaceGuid, + '', + '', + DefaultValue, + '', + '', + {}, + self.Guids[TokenSpaceGuid] + ) + + # get necessary info from package declaring this PCD + for Package in self.Packages: + # + # 'dynamic' in INF means its type is determined by platform; + # if platform doesn't give its type, use 'lowest' one in the + # following order, if any + # + # "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx" + # + PcdType = self._PCD_TYPE_STRING_[Type] + if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]: + Pcd.Pending = True + for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]: + if (PcdCName, TokenSpaceGuid, T) in Package.Pcds: + PcdType = T + break + else: + Pcd.Pending = False + + if (PcdCName, TokenSpaceGuid, PcdType) in Package.Pcds: + PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType] + Pcd.Type = PcdType + Pcd.TokenValue = PcdInPackage.TokenValue + Pcd.DatumType = PcdInPackage.DatumType + Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize + if Pcd.DefaultValue in [None, '']: + Pcd.DefaultValue = PcdInPackage.DefaultValue + break + else: + EdkLogger.error( + 'build', + PARSER_ERROR, + "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile), + File =self.MetaFile, Line=LineNo, + ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages]) + ) + Pcds[PcdCName, TokenSpaceGuid] = Pcd + return Pcds + + Arch = property(_GetArch, _SetArch) + Platform = property(_GetPlatform, _SetPlatform) + + AutoGenVersion = property(_GetInfVersion) + BaseName = property(_GetBaseName) + ModuleType = property(_GetModuleType) + ComponentType = property(_GetComponentType) + BuildType = property(_GetBuildType) + Guid = property(_GetFileGuid) + Version = property(_GetVersion) + PcdIsDriver = property(_GetPcdIsDriver) + Shadow = property(_GetShadow) + CustomMakefile = property(_GetMakefile) + Specification = property(_GetSpec) + LibraryClass = property(_GetLibraryClass) + ModuleEntryPointList = property(_GetEntryPoint) + ModuleUnloadImageList = property(_GetUnloadImage) + ConstructorList = property(_GetConstructor) + DestructorList = property(_GetDestructor) + Defines = property(_GetDefines) + + Binaries = property(_GetBinaryFiles) + Sources = property(_GetSourceFiles) + LibraryClasses = property(_GetLibraryClassUses) + Libraries = property(_GetLibraryNames) + Protocols = property(_GetProtocols) + Ppis = property(_GetPpis) + Guids = property(_GetGuids) + Includes = property(_GetIncludes) + Packages = property(_GetPackages) + Pcds = property(_GetPcds) + BuildOptions = property(_GetBuildOptions) + Depex = property(_GetDepex) + +## Database +# +# This class defined the build databse for all modules, packages and platform. +# It will call corresponding parser for the given file if it cannot find it in +# the database. +# +# @param DbPath Path of database file +# @param GlobalMacros Global macros used for replacement during file parsing +# @prarm RenewDb=False Create new database file if it's already there +# +class WorkspaceDatabase(object): + # file parser + _FILE_PARSER_ = { + MODEL_FILE_INF : InfParser, + MODEL_FILE_DEC : DecParser, + MODEL_FILE_DSC : DscParser, + MODEL_FILE_FDF : None, #FdfParser, + MODEL_FILE_CIF : None + } + + # file table + _FILE_TABLE_ = { + MODEL_FILE_INF : ModuleTable, + MODEL_FILE_DEC : PackageTable, + MODEL_FILE_DSC : PlatformTable, + } + + # default database file path + _DB_PATH_ = "Conf/.cache/build.db" + + # + # internal class used for call corresponding file parser and caching the result + # to avoid unnecessary re-parsing + # + class BuildObjectFactory(object): + _FILE_TYPE_ = { + ".inf" : MODEL_FILE_INF, + ".dec" : MODEL_FILE_DEC, + ".dsc" : MODEL_FILE_DSC, + ".fdf" : MODEL_FILE_FDF, + } + + # convert to xxxBuildData object + _GENERATOR_ = { + MODEL_FILE_INF : InfBuildData, + MODEL_FILE_DEC : DecBuildData, + MODEL_FILE_DSC : DscBuildData, + MODEL_FILE_FDF : None #FlashDefTable, + } + + _CACHE_ = {} # (FilePath, Arch) : <object> + + # constructor + def __init__(self, WorkspaceDb): + self.WorkspaceDb = WorkspaceDb + + # key = (FilePath, Arch='COMMON') + def __contains__(self, Key): + FilePath = Key[0] + Arch = 'COMMON' + if len(Key) > 1: + Arch = Key[1] + return (FilePath, Arch) in self._CACHE_ + + # key = (FilePath, Arch='COMMON') + def __getitem__(self, Key): + FilePath = Key[0] + Arch = 'COMMON' + Platform = 'COMMON' + if len(Key) > 1: + Arch = Key[1] + if len(Key) > 2: + Platform = Key[2] + + # if it's generated before, just return the cached one + Key = (FilePath, Arch) + if Key in self._CACHE_: + return self._CACHE_[Key] + + # check file type + Ext = FilePath.Ext.lower() + if Ext not in self._FILE_TYPE_: + return None + FileType = self._FILE_TYPE_[Ext] + if FileType not in self._GENERATOR_: + return None + + # get table for current file + MetaFile = self.WorkspaceDb[FilePath, FileType, self.WorkspaceDb._GlobalMacros] + BuildObject = self._GENERATOR_[FileType]( + FilePath, + MetaFile, + self, + Arch, + Platform, + self.WorkspaceDb._GlobalMacros, + ) + self._CACHE_[Key] = BuildObject + return BuildObject + + # placeholder for file format conversion + class TransformObjectFactory: + def __init__(self, WorkspaceDb): + self.WorkspaceDb = WorkspaceDb + + # key = FilePath, Arch + def __getitem__(self, Key): + pass + + ## Constructor of WorkspaceDatabase + # + # @param DbPath Path of database file + # @param GlobalMacros Global macros used for replacement during file parsing + # @prarm RenewDb=False Create new database file if it's already there + # + def __init__(self, DbPath, GlobalMacros={}, RenewDb=False): + self._GlobalMacros = GlobalMacros + + if DbPath == None or DbPath == '': + DbPath = os.path.normpath(os.path.join(GlobalData.gWorkspace, self._DB_PATH_)) + + # don't create necessary path for db in memory + if DbPath != ':memory:': + DbDir = os.path.split(DbPath)[0] + if not os.path.exists(DbDir): + os.makedirs(DbDir)
+ + # remove db file in case inconsistency between db and file in file system + if self._CheckWhetherDbNeedRenew(RenewDb, DbPath):
+ os.remove(DbPath)
+ + # create db with optimized parameters + self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED') + self.Conn.execute("PRAGMA synchronous=OFF") + self.Conn.execute("PRAGMA temp_store=MEMORY") + self.Conn.execute("PRAGMA count_changes=OFF") + self.Conn.execute("PRAGMA cache_size=8192") + #self.Conn.execute("PRAGMA page_size=8192") + + # to avoid non-ascii character conversion issue + self.Conn.text_factory = str + self.Cur = self.Conn.cursor() + + # create table for internal uses + self.TblDataModel = TableDataModel(self.Cur) + self.TblFile = TableFile(self.Cur) + + # conversion object for build or file format conversion purpose + self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self) + self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self) +
+ ## Check whether workspace database need to be renew.
+ # The renew reason maybe:
+ # 1) If user force to renew;
+ # 2) If user do not force renew, and
+ # a) If the time of last modified python source is newer than database file;
+ # b) If the time of last modified frozen executable file is newer than database file;
+ #
+ # @param force User force renew database
+ # @param DbPath The absolute path of workspace database file
+ #
+ # @return Bool value for whether need renew workspace databse
+ #
+ def _CheckWhetherDbNeedRenew (self, force, DbPath):
+ # if database does not exist, we need do nothing
+ if not os.path.exists(DbPath): return False
+
+ # if user force to renew database, then not check whether database is out of date
+ if force: return True
+
+ #
+ # Check the time of last modified source file or build.exe
+ # if is newer than time of database, then database need to be re-created.
+ #
+ timeOfToolModified = 0
+ if hasattr(sys, "frozen"):
+ exePath = os.path.abspath(sys.executable)
+ timeOfToolModified = os.stat(exePath).st_mtime
+ else:
+ curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
+ rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
+ if rootPath == "" or rootPath == None:
+ EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
+determine whether database file is out of date!\n")
+
+ # walk the root path of source or build's binary to get the time last modified.
+
+ for root, dirs, files in os.walk (rootPath):
+ for dir in dirs:
+ # bypass source control folder
+ if dir.lower() in [".svn", "_svn", "cvs"]:
+ dirs.remove(dir)
+
+ for file in files:
+ ext = os.path.splitext(file)[1]
+ if ext.lower() == ".py": # only check .py files
+ fd = os.stat(os.path.join(root, file))
+ if timeOfToolModified < fd.st_mtime:
+ timeOfToolModified = fd.st_mtime
+ if timeOfToolModified > os.stat(DbPath).st_mtime:
+ EdkLogger.verbose("\nWorkspace database is out of data!")
+ return True
+
+ return False
+ + ## Initialize build database + def InitDatabase(self): + EdkLogger.verbose("\nInitialize build database started ...") + + # + # Create new tables + # + self.TblDataModel.Create(False) + self.TblFile.Create(False) + + # + # Initialize table DataModel + # + self.TblDataModel.InitTable() + EdkLogger.verbose("Initialize build database ... DONE!") + + ## Query a table + # + # @param Table: The instance of the table to be queried + # + def QueryTable(self, Table): + Table.Query() + + ## Close entire database + # + # Commit all first + # Close the connection and cursor + # + def Close(self): + self.Conn.commit() + self.Cur.close() + self.Conn.close() + + ## Get unique file ID for the gvien file + def GetFileId(self, FilePath): + return self.TblFile.GetFileId(FilePath) + + ## Get file type value for the gvien file ID + def GetFileType(self, FileId): + return self.TblFile.GetFileType(FileId) + + ## Get time stamp stored in file table + def GetTimeStamp(self, FileId): + return self.TblFile.GetFileTimeStamp(FileId) + + ## Update time stamp in file table + def SetTimeStamp(self, FileId, TimeStamp): + return self.TblFile.SetFileTimeStamp(FileId, TimeStamp) + + ## Check if a table integrity flag exists or not + def CheckIntegrity(self, TableName): + try: + Result = self.Cur.execute("select min(ID) from %s" % (TableName)).fetchall() + if Result[0][0] != -1: + return False + except: + return False + return True + + ## Compose table name for given file type and file ID + def GetTableName(self, FileType, FileId): + return "_%s_%s" % (FileType, FileId) + + ## Return a temp table containing all content of the given file + # + # @param FileInfo The tuple containing path and type of a file + # + def __getitem__(self, FileInfo): + FilePath, FileType, Macros = FileInfo + if FileType not in self._FILE_TABLE_: + return None + + # flag used to indicate if it's parsed or not + FilePath = str(FilePath) + Parsed = False + FileId = self.GetFileId(FilePath) + if FileId != None: + TimeStamp = os.stat(FilePath)[8] + TableName = self.GetTableName(FileType, FileId) + if TimeStamp != self.GetTimeStamp(FileId): + # update the timestamp in database + self.SetTimeStamp(FileId, TimeStamp) + else: + # if the table exists and is integrity, don't parse it + Parsed = self.CheckIntegrity(TableName) + else: + FileId = self.TblFile.InsertFile(FilePath, FileType) + TableName = self.GetTableName(FileType, FileId) + + FileTable = self._FILE_TABLE_[FileType](self.Cur, TableName, FileId) + FileTable.Create(not Parsed) + Parser = self._FILE_PARSER_[FileType](FilePath, FileType, FileTable, Macros) + # set the "Finished" flag in parser in order to avoid re-parsing (if parsed) + Parser.Finished = Parsed + return Parser + + ## Summarize all packages in the database + def _GetPackageList(self): + PackageList = [] + for Module in self.ModuleList: + for Package in Module.Packages: + if Package not in PackageList: + PackageList.append(Package) + return PackageList + + ## Summarize all platforms in the database + def _GetPlatformList(self): + PlatformList = [] + for PlatformFile in self.TblFile.GetFileList(MODEL_FILE_DSC): + try: + Platform = self.BuildObject[PathClass(PlatformFile), 'COMMON'] + except: + Platform = None + if Platform != None: + PlatformList.append(Platform) + return PlatformList + + ## Summarize all modules in the database + def _GetModuleList(self): + ModuleList = [] + for ModuleFile in self.TblFile.GetFileList(MODEL_FILE_INF): + try: + Module = self.BuildObject[PathClass(ModuleFile), 'COMMON'] + except: + Module = None + if Module != None: + ModuleList.append(Module) + return ModuleList + + PlatformList = property(_GetPlatformList) + PackageList = property(_GetPackageList) + ModuleList = property(_GetModuleList) + +## +# +# This acts like the main() function for the script, unless it is 'import'ed into another +# script. +# +if __name__ == '__main__': + pass + diff --git a/BaseTools/Source/Python/Workspace/__init__.py b/BaseTools/Source/Python/Workspace/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/Workspace/__init__.py diff --git a/BaseTools/Source/Python/build/__init__.py b/BaseTools/Source/Python/build/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/build/__init__.py diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py new file mode 100644 index 0000000000..c92b442615 --- /dev/null +++ b/BaseTools/Source/Python/build/build.py @@ -0,0 +1,1436 @@ +## @file +# build a platform or a module +# +# Copyright (c) 2007, Intel Corporation +# +# All rights reserved. This program and the accompanying materials +# are licensed and made available under the terms and conditions of the BSD License +# which accompanies this distribution. The full text of the license may be found at +# http://opensource.org/licenses/bsd-license.php +# +# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, +# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. +# + +## +# Import Modules +# +import os +import re +import sys +import glob +import time +import platform +import traceback + +from threading import * +from optparse import OptionParser +from subprocess import * +from Common import Misc as Utils + +from Common.TargetTxtClassObject import * +from Common.ToolDefClassObject import * +from Common.DataType import * +from AutoGen.AutoGen import * +from Common.BuildToolError import * +from Workspace.WorkspaceDatabase import * + +import Common.EdkLogger +import Common.GlobalData as GlobalData + +# Version and Copyright +VersionNumber = "0.5" +__version__ = "%prog Version " + VersionNumber +__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved." + +## standard targets of build command +gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'clean', 'cleanall', 'cleanlib', 'run'] + +## build configuration file +gBuildConfiguration = "Conf/target.txt" +gBuildCacheDir = "Conf/.cache" +gToolsDefinition = "Conf/tools_def.txt" + +## Check environment PATH variable to make sure the specified tool is found +# +# If the tool is found in the PATH, then True is returned +# Otherwise, False is returned +# +def IsToolInPath(tool): + if os.environ.has_key('PATHEXT'): + extns = os.environ['PATHEXT'].split(os.path.pathsep) + else: + extns = ('',) + for pathDir in os.environ['PATH'].split(os.path.pathsep): + for ext in extns: + if os.path.exists(os.path.join(pathDir, tool + ext)): + return True + return False + +## Check environment variables +# +# Check environment variables that must be set for build. Currently they are +# +# WORKSPACE The directory all packages/platforms start from +# EDK_TOOLS_PATH The directory contains all tools needed by the build +# PATH $(EDK_TOOLS_PATH)/Bin/<sys> must be set in PATH +# +# If any of above environment variable is not set or has error, the build +# will be broken. +# +def CheckEnvVariable(): + # check WORKSPACE + if "WORKSPACE" not in os.environ: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", + ExtraData="WORKSPACE") + + WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"])) + if not os.path.exists(WorkspaceDir): + EdkLogger.error("build", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData="%s" % WorkspaceDir) + elif ' ' in WorkspaceDir: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in WORKSPACE path", + ExtraData=WorkspaceDir) + os.environ["WORKSPACE"] = WorkspaceDir + + # + # Check EFI_SOURCE (R8 build convention). EDK_SOURCE will always point to ECP + # + os.environ["ECP_SOURCE"] = os.path.join(WorkspaceDir, GlobalData.gEdkCompatibilityPkg) + if "EFI_SOURCE" not in os.environ: + os.environ["EFI_SOURCE"] = os.environ["ECP_SOURCE"] + if "EDK_SOURCE" not in os.environ: + os.environ["EDK_SOURCE"] = os.environ["ECP_SOURCE"] + + # + # Unify case of characters on case-insensitive systems + # + EfiSourceDir = os.path.normcase(os.path.normpath(os.environ["EFI_SOURCE"])) + EdkSourceDir = os.path.normcase(os.path.normpath(os.environ["EDK_SOURCE"])) + EcpSourceDir = os.path.normcase(os.path.normpath(os.environ["ECP_SOURCE"])) + + os.environ["EFI_SOURCE"] = EfiSourceDir + os.environ["EDK_SOURCE"] = EdkSourceDir + os.environ["ECP_SOURCE"] = EcpSourceDir + os.environ["EDK_TOOLS_PATH"] = os.path.normcase(os.environ["EDK_TOOLS_PATH"]) + + if not os.path.exists(EcpSourceDir): + EdkLogger.verbose("ECP_SOURCE = %s doesn't exist. R8 modules could not be built." % EcpSourceDir) + elif ' ' in EcpSourceDir: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in ECP_SOURCE path", + ExtraData=EcpSourceDir) + if not os.path.exists(EdkSourceDir): + if EdkSourceDir == EcpSourceDir: + EdkLogger.verbose("EDK_SOURCE = %s doesn't exist. R8 modules could not be built." % EdkSourceDir) + else: + EdkLogger.error("build", PARAMETER_INVALID, "EDK_SOURCE does not exist", + ExtraData=EdkSourceDir) + elif ' ' in EdkSourceDir: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in EDK_SOURCE path", + ExtraData=EdkSourceDir) + if not os.path.exists(EfiSourceDir): + if EfiSourceDir == EcpSourceDir: + EdkLogger.verbose("EFI_SOURCE = %s doesn't exist. R8 modules could not be built." % EfiSourceDir) + else: + EdkLogger.error("build", PARAMETER_INVALID, "EFI_SOURCE does not exist", + ExtraData=EfiSourceDir) + elif ' ' in EfiSourceDir: + EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in EFI_SOURCE path", + ExtraData=EfiSourceDir) + + # change absolute path to relative path to WORKSPACE + if EfiSourceDir.upper().find(WorkspaceDir.upper()) != 0: + EdkLogger.error("build", PARAMETER_INVALID, "EFI_SOURCE is not under WORKSPACE", + ExtraData="WORKSPACE = %s\n EFI_SOURCE = %s" % (WorkspaceDir, EfiSourceDir)) + if EdkSourceDir.upper().find(WorkspaceDir.upper()) != 0: + EdkLogger.error("build", PARAMETER_INVALID, "EDK_SOURCE is not under WORKSPACE", + ExtraData="WORKSPACE = %s\n EDK_SOURCE = %s" % (WorkspaceDir, EdkSourceDir)) + if EcpSourceDir.upper().find(WorkspaceDir.upper()) != 0: + EdkLogger.error("build", PARAMETER_INVALID, "ECP_SOURCE is not under WORKSPACE", + ExtraData="WORKSPACE = %s\n ECP_SOURCE = %s" % (WorkspaceDir, EcpSourceDir)) + + # check EDK_TOOLS_PATH + if "EDK_TOOLS_PATH" not in os.environ: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", + ExtraData="EDK_TOOLS_PATH") + + # check PATH + if "PATH" not in os.environ: + EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found", + ExtraData="PATH") + + # for macro replacement in R9 DSC/DEC/INF file + GlobalData.gGlobalDefines["WORKSPACE"] = "" + + # for macro replacement in R8 INF file + GlobalData.gGlobalDefines["EFI_SOURCE"] = EfiSourceDir + GlobalData.gGlobalDefines["EDK_SOURCE"] = EdkSourceDir + + GlobalData.gWorkspace = WorkspaceDir + GlobalData.gEfiSource = EfiSourceDir + GlobalData.gEdkSource = EdkSourceDir + GlobalData.gEcpSource = EcpSourceDir + +## Get normalized file path +# +# Convert the path to be local format, and remove the WORKSPACE path at the +# beginning if the file path is given in full path. +# +# @param FilePath File path to be normalized +# @param Workspace Workspace path which the FilePath will be checked against +# +# @retval string The normalized file path +# +def NormFile(FilePath, Workspace): + # check if the path is absolute or relative + if os.path.isabs(FilePath): + FileFullPath = os.path.normpath(FilePath) + else: + FileFullPath = os.path.normpath(os.path.join(Workspace, FilePath)) + + # check if the file path exists or not + if not os.path.isfile(FileFullPath): + EdkLogger.error("build", FILE_NOT_FOUND, ExtraData="\t%s (Please give file in absolute path or relative to WORKSPACE)" % FileFullPath) + + # remove workspace directory from the beginning part of the file path + if Workspace[-1] in ["\\", "/"]: + return FileFullPath[len(Workspace):] + else: + return FileFullPath[(len(Workspace) + 1):] + +## Get the output of an external program +# +# This is the entrance method of thread reading output of an external program and +# putting them in STDOUT/STDERR of current program. +# +# @param From The stream message read from +# @param To The stream message put on +# @param ExitFlag The flag used to indicate stopping reading +# +def ReadMessage(From, To, ExitFlag): + while True: + # read one line a time + Line = From.readline() + # empty string means "end" + if Line != None and Line != "": + To(Line.rstrip()) + else: + break + if ExitFlag.isSet(): + break + +## Launch an external program +# +# This method will call subprocess.Popen to execute an external program with +# given options in specified directory. Because of the dead-lock issue during +# redirecting output of the external program, threads are used to to do the +# redirection work. +# +# @param Command A list or string containing the call of the program +# @param WorkingDir The directory in which the program will be running +# +def LaunchCommand(Command, WorkingDir): + # if working directory doesn't exist, Popen() will raise an exception + if not os.path.isdir(WorkingDir): + EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=WorkingDir) + + Proc = None + EndOfProcedure = None + try: + # launch the command + Proc = Popen(Command, stdout=PIPE, stderr=PIPE, env=os.environ, cwd=WorkingDir, bufsize=-1) + + # launch two threads to read the STDOUT and STDERR + EndOfProcedure = Event() + EndOfProcedure.clear() + if Proc.stdout: + StdOutThread = Thread(target=ReadMessage, args=(Proc.stdout, EdkLogger.info, EndOfProcedure)) + StdOutThread.setName("STDOUT-Redirector") + StdOutThread.setDaemon(False) + StdOutThread.start() + + if Proc.stderr: + StdErrThread = Thread(target=ReadMessage, args=(Proc.stderr, EdkLogger.quiet, EndOfProcedure)) + StdErrThread.setName("STDERR-Redirector") + StdErrThread.setDaemon(False) + StdErrThread.start() + + # waiting for program exit + Proc.wait() + except: # in case of aborting + # terminate the threads redirecting the program output + if EndOfProcedure != None: + EndOfProcedure.set() + if Proc == None: + if type(Command) != type(""): + Command = " ".join(Command) + EdkLogger.error("build", COMMAND_FAILURE, "Failed to start command", ExtraData="%s [%s]" % (Command, WorkingDir)) + + if Proc.stdout: + StdOutThread.join() + if Proc.stderr: + StdErrThread.join() + + # check the return code of the program + if Proc.returncode != 0: + if type(Command) != type(""): + Command = " ".join(Command) + EdkLogger.error("build", COMMAND_FAILURE, ExtraData="%s [%s]" % (Command, WorkingDir)) + +## The smallest unit that can be built in multi-thread build mode +# +# This is the base class of build unit. The "Obj" parameter must provide +# __str__(), __eq__() and __hash__() methods. Otherwise there could be build units +# missing build. +# +# Currently the "Obj" should be only ModuleAutoGen or PlatformAutoGen objects. +# +class BuildUnit: + ## The constructor + # + # @param self The object pointer + # @param Obj The object the build is working on + # @param Target The build target name, one of gSupportedTarget + # @param Dependency The BuildUnit(s) which must be completed in advance + # @param WorkingDir The directory build command starts in + # + def __init__(self, Obj, BuildCommand, Target, Dependency, WorkingDir="."): + self.BuildObject = Obj + self.Dependency = Dependency + self.WorkingDir = WorkingDir + self.Target = Target + self.BuildCommand = BuildCommand + if BuildCommand == None or len(BuildCommand) == 0: + EdkLogger.error("build", OPTION_MISSING, "No build command found for", + ExtraData=str(Obj)) + + ## str() method + # + # It just returns the string representaion of self.BuildObject + # + # @param self The object pointer + # + def __str__(self): + return str(self.BuildObject) + + ## "==" operator method + # + # It just compares self.BuildObject with "Other". So self.BuildObject must + # provide its own __eq__() method. + # + # @param self The object pointer + # @param Other The other BuildUnit object compared to + # + def __eq__(self, Other): + return Other != None and self.BuildObject == Other.BuildObject \ + and self.BuildObject.Arch == Other.BuildObject.Arch + + ## hash() method + # + # It just returns the hash value of self.BuildObject which must be hashable. + # + # @param self The object pointer + # + def __hash__(self): + return hash(self.BuildObject) + hash(self.BuildObject.Arch) + + def __repr__(self): + return repr(self.BuildObject) + +## The smallest module unit that can be built by nmake/make command in multi-thread build mode +# +# This class is for module build by nmake/make build system. The "Obj" parameter +# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could +# be make units missing build. +# +# Currently the "Obj" should be only ModuleAutoGen object. +# +class ModuleMakeUnit(BuildUnit): + ## The constructor + # + # @param self The object pointer + # @param Obj The ModuleAutoGen object the build is working on + # @param Target The build target name, one of gSupportedTarget + # + def __init__(self, Obj, Target): + Dependency = [ModuleMakeUnit(La, Target) for La in Obj.LibraryAutoGenList] + BuildUnit.__init__(self, Obj, Obj.BuildCommand, Target, Dependency, Obj.MakeFileDir) + if Target in [None, "", "all"]: + self.Target = "tbuild" + +## The smallest platform unit that can be built by nmake/make command in multi-thread build mode +# +# This class is for platform build by nmake/make build system. The "Obj" parameter +# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could +# be make units missing build. +# +# Currently the "Obj" should be only PlatformAutoGen object. +# +class PlatformMakeUnit(BuildUnit): + ## The constructor + # + # @param self The object pointer + # @param Obj The PlatformAutoGen object the build is working on + # @param Target The build target name, one of gSupportedTarget + # + def __init__(self, Obj, Target): + Dependency = [ModuleMakeUnit(Lib, Target) for Lib in self.BuildObject.LibraryAutoGenList] + Dependency.extend([ModuleMakeUnit(Mod, Target) for Mod in self.BuildObject.ModuleAutoGenList]) + BuildUnit.__init__(self, Obj, Obj.BuildCommand, Target, Dependency, Obj.MakeFileDir) + +## The class representing the task of a module build or platform build +# +# This class manages the build tasks in multi-thread build mode. Its jobs include +# scheduling thread running, catching thread error, monitor the thread status, etc. +# +class BuildTask: + # queue for tasks waiting for schedule + _PendingQueue = sdict() + _PendingQueueLock = threading.Lock() + + # queue for tasks ready for running + _ReadyQueue = sdict() + _ReadyQueueLock = threading.Lock() + + # queue for run tasks + _RunningQueue = sdict() + _RunningQueueLock = threading.Lock() + + # queue containing all build tasks, in case duplicate build + _TaskQueue = sdict() + + # flag indicating error occurs in a running thread + _ErrorFlag = threading.Event() + _ErrorFlag.clear() + _ErrorMessage = "" + + # BoundedSemaphore object used to control the number of running threads + _Thread = None + + # flag indicating if the scheduler is started or not + _SchedulerStopped = threading.Event() + _SchedulerStopped.set() + + ## Start the task scheduler thread + # + # @param MaxThreadNumber The maximum thread number + # @param ExitFlag Flag used to end the scheduler + # + @staticmethod + def StartScheduler(MaxThreadNumber, ExitFlag): + SchedulerThread = Thread(target=BuildTask.Scheduler, args=(MaxThreadNumber, ExitFlag)) + SchedulerThread.setName("Build-Task-Scheduler") + SchedulerThread.setDaemon(False) + SchedulerThread.start() + # wait for the scheduler to be started, especially useful in Linux + while not BuildTask.IsOnGoing(): + time.sleep(0.01) + + ## Scheduler method + # + # @param MaxThreadNumber The maximum thread number + # @param ExitFlag Flag used to end the scheduler + # + @staticmethod + def Scheduler(MaxThreadNumber, ExitFlag): + BuildTask._SchedulerStopped.clear() + try: + # use BoundedSemaphore to control the maximum running threads + BuildTask._Thread = BoundedSemaphore(MaxThreadNumber) + # + # scheduling loop, which will exits when no pending/ready task and + # indicated to do so, or there's error in running thread + # + while (len(BuildTask._PendingQueue) > 0 or len(BuildTask._ReadyQueue) > 0 \ + or not ExitFlag.isSet()) and not BuildTask._ErrorFlag.isSet(): + EdkLogger.debug(EdkLogger.DEBUG_8, "Pending Queue (%d), Ready Queue (%d)" + % (len(BuildTask._PendingQueue), len(BuildTask._ReadyQueue))) + + # get all pending tasks + BuildTask._PendingQueueLock.acquire() + BuildObjectList = BuildTask._PendingQueue.keys() + # + # check if their dependency is resolved, and if true, move them + # into ready queue + # + for BuildObject in BuildObjectList: + Bt = BuildTask._PendingQueue[BuildObject] + if Bt.IsReady(): + BuildTask._ReadyQueue[BuildObject] = BuildTask._PendingQueue.pop(BuildObject) + BuildTask._PendingQueueLock.release() + + # launch build thread until the maximum number of threads is reached + while not BuildTask._ErrorFlag.isSet(): + # empty ready queue, do nothing further + if len(BuildTask._ReadyQueue) == 0: + break + + # wait for active thread(s) exit + BuildTask._Thread.acquire(True) + + # start a new build thread + Bo = BuildTask._ReadyQueue.keys()[0] + Bt = BuildTask._ReadyQueue.pop(Bo) + + # move into running queue + BuildTask._RunningQueueLock.acquire() + BuildTask._RunningQueue[Bo] = Bt + BuildTask._RunningQueueLock.release() + + Bt.Start() + # avoid tense loop + time.sleep(0.01) + + # avoid tense loop + time.sleep(0.01) + + # wait for all running threads exit + if BuildTask._ErrorFlag.isSet(): + EdkLogger.quiet("\nWaiting for all build threads exit...") + # while not BuildTask._ErrorFlag.isSet() and \ + while len(BuildTask._RunningQueue) > 0: + EdkLogger.verbose("Waiting for thread ending...(%d)" % len(BuildTask._RunningQueue)) + EdkLogger.debug(EdkLogger.DEBUG_8, "Threads [%s]" % ", ".join([Th.getName() for Th in threading.enumerate()])) + # avoid tense loop + time.sleep(0.1) + except BaseException, X: + # + # TRICK: hide the output of threads left runing, so that the user can + # catch the error message easily + # + EdkLogger.SetLevel(EdkLogger.ERROR) + BuildTask._ErrorFlag.set() + BuildTask._ErrorMessage = "build thread scheduler error\n\t%s" % str(X) + + BuildTask._PendingQueue.clear() + BuildTask._ReadyQueue.clear() + BuildTask._RunningQueue.clear() + BuildTask._TaskQueue.clear() + BuildTask._SchedulerStopped.set() + + ## Wait for all running method exit + # + @staticmethod + def WaitForComplete(): + BuildTask._SchedulerStopped.wait() + + ## Check if the scheduler is running or not + # + @staticmethod + def IsOnGoing(): + return not BuildTask._SchedulerStopped.isSet() + + ## Abort the build + @staticmethod + def Abort(): + if BuildTask.IsOnGoing(): + BuildTask._ErrorFlag.set() + BuildTask.WaitForComplete() + + ## Check if there's error in running thread + # + # Since the main thread cannot catch exceptions in other thread, we have to + # use threading.Event to communicate this formation to main thread. + # + @staticmethod + def HasError(): + return BuildTask._ErrorFlag.isSet() + + ## Get error message in running thread + # + # Since the main thread cannot catch exceptions in other thread, we have to + # use a static variable to communicate this message to main thread. + # + @staticmethod + def GetErrorMessage(): + return BuildTask._ErrorMessage + + ## Factory method to create a BuildTask object + # + # This method will check if a module is building or has been built. And if + # true, just return the associated BuildTask object in the _TaskQueue. If + # not, create and return a new BuildTask object. The new BuildTask object + # will be appended to the _PendingQueue for scheduling later. + # + # @param BuildItem A BuildUnit object representing a build object + # @param Dependency The dependent build object of BuildItem + # + @staticmethod + def New(BuildItem, Dependency=None): + if BuildItem in BuildTask._TaskQueue: + Bt = BuildTask._TaskQueue[BuildItem] + return Bt + + Bt = BuildTask() + Bt._Init(BuildItem, Dependency) + BuildTask._TaskQueue[BuildItem] = Bt + + BuildTask._PendingQueueLock.acquire() + BuildTask._PendingQueue[BuildItem] = Bt + BuildTask._PendingQueueLock.release() + + return Bt + + ## The real constructor of BuildTask + # + # @param BuildItem A BuildUnit object representing a build object + # @param Dependency The dependent build object of BuildItem + # + def _Init(self, BuildItem, Dependency=None): + self.BuildItem = BuildItem + + self.DependencyList = [] + if Dependency == None: + Dependency = BuildItem.Dependency + else: + Dependency.extend(BuildItem.Dependency) + self.AddDependency(Dependency) + # flag indicating build completes, used to avoid unnecessary re-build + self.CompleteFlag = False + + ## Check if all dependent build tasks are completed or not + # + def IsReady(self): + ReadyFlag = True + for Dep in self.DependencyList: + if Dep.CompleteFlag == True: + continue + ReadyFlag = False + break + + return ReadyFlag + + ## Add dependent build task + # + # @param Dependency The list of dependent build objects + # + def AddDependency(self, Dependency): + for Dep in Dependency: + self.DependencyList.append(BuildTask.New(Dep)) # BuildTask list + + ## The thread wrapper of LaunchCommand function + # + # @param Command A list or string contains the call of the command + # @param WorkingDir The directory in which the program will be running + # + def _CommandThread(self, Command, WorkingDir): + try: + LaunchCommand(Command, WorkingDir) + self.CompleteFlag = True + except: + # + # TRICK: hide the output of threads left runing, so that the user can + # catch the error message easily + # + if not BuildTask._ErrorFlag.isSet(): + GlobalData.gBuildingModule = "%s [%s, %s, %s]" % (str(self.BuildItem.BuildObject), + self.BuildItem.BuildObject.Arch, + self.BuildItem.BuildObject.ToolChain, + self.BuildItem.BuildObject.BuildTarget + ) + EdkLogger.SetLevel(EdkLogger.ERROR) + BuildTask._ErrorFlag.set() + BuildTask._ErrorMessage = "%s broken\n %s [%s]" % \ + (threading.currentThread().getName(), Command, WorkingDir) + # indicate there's a thread is available for another build task + BuildTask._RunningQueueLock.acquire() + BuildTask._RunningQueue.pop(self.BuildItem) + BuildTask._RunningQueueLock.release() + BuildTask._Thread.release() + + ## Start build task thread + # + def Start(self): + EdkLogger.quiet("Building ... %s" % repr(self.BuildItem)) + Command = self.BuildItem.BuildCommand + [self.BuildItem.Target] + self.BuildTread = Thread(target=self._CommandThread, args=(Command, self.BuildItem.WorkingDir)) + self.BuildTread.setName("build thread") + self.BuildTread.setDaemon(False) + self.BuildTread.start() + +## The class implementing the EDK2 build process +# +# The build process includes: +# 1. Load configuration from target.txt and tools_def.txt in $(WORKSPACE)/Conf +# 2. Parse DSC file of active platform +# 3. Parse FDF file if any +# 4. Establish build database, including parse all other files (module, package) +# 5. Create AutoGen files (C code file, depex file, makefile) if necessary +# 6. Call build command +# +class Build(): + ## Constructor + # + # Constructor will load all necessary configurations, parse platform, modules + # and packages and the establish a database for AutoGen. + # + # @param Target The build command target, one of gSupportedTarget + # @param WorkspaceDir The directory of workspace + # @param Platform The DSC file of active platform + # @param Module The INF file of active module, if any + # @param Arch The Arch list of platform or module + # @param ToolChain The name list of toolchain + # @param BuildTarget The "DEBUG" or "RELEASE" build + # @param FlashDefinition The FDF file of active platform + # @param FdList=[] The FD names to be individually built + # @param FvList=[] The FV names to be individually built + # @param MakefileType The type of makefile (for MSFT make or GNU make) + # @param SilentMode Indicate multi-thread build mode + # @param ThreadNumber The maximum number of thread if in multi-thread build mode + # @param SkipAutoGen Skip AutoGen step + # @param Reparse Re-parse all meta files + # @param SkuId SKU id from command line + # + def __init__(self, Target, WorkspaceDir, Platform, Module, Arch, ToolChain, + BuildTarget, FlashDefinition, FdList=[], FvList=[], + MakefileType="nmake", SilentMode=False, ThreadNumber=2, + SkipAutoGen=False, Reparse=False, SkuId=None): + + self.WorkspaceDir = WorkspaceDir + self.Target = Target + self.PlatformFile = Platform + self.ModuleFile = Module + self.ArchList = Arch + self.ToolChainList = ToolChain + self.BuildTargetList= BuildTarget + self.Fdf = FlashDefinition + self.FdList = FdList + self.FvList = FvList + self.MakefileType = MakefileType + self.SilentMode = SilentMode + self.ThreadNumber = ThreadNumber + self.SkipAutoGen = SkipAutoGen + self.Reparse = Reparse + self.SkuId = SkuId + self.SpawnMode = True + + self.TargetTxt = TargetTxtClassObject() + self.ToolDef = ToolDefClassObject() + #self.Db = WorkspaceDatabase(None, GlobalData.gGlobalDefines, self.Reparse) + self.Db = WorkspaceDatabase(None, {}, self.Reparse) + self.BuildDatabase = self.Db.BuildObject + self.Platform = None + + # print dot charater during doing some time-consuming work + self.Progress = Utils.Progressor() + + # parse target.txt, tools_def.txt, and platform file + #self.RestoreBuildData() + self.LoadConfiguration() + self.InitBuild() + + # print current build environment and configuration + EdkLogger.quiet("%-24s = %s" % ("WORKSPACE", os.environ["WORKSPACE"])) + EdkLogger.quiet("%-24s = %s" % ("ECP_SOURCE", os.environ["ECP_SOURCE"])) + EdkLogger.quiet("%-24s = %s" % ("EDK_SOURCE", os.environ["EDK_SOURCE"])) + EdkLogger.quiet("%-24s = %s" % ("EFI_SOURCE", os.environ["EFI_SOURCE"])) + EdkLogger.quiet("%-24s = %s" % ("EDK_TOOLS_PATH", os.environ["EDK_TOOLS_PATH"])) + + EdkLogger.info('\n%-24s = %s' % ("TARGET_ARCH", ' '.join(self.ArchList))) + EdkLogger.info('%-24s = %s' % ("TARGET", ' '.join(self.BuildTargetList))) + EdkLogger.info('%-24s = %s' % ("TOOL_CHAIN_TAG", ' '.join(self.ToolChainList))) + + EdkLogger.info('\n%-24s = %s' % ("Active Platform", self.PlatformFile)) + + if self.Fdf != None and self.Fdf != "": + EdkLogger.info('%-24s = %s' % ("Flash Image Definition", self.Fdf)) + + if self.ModuleFile != None and self.ModuleFile != "": + EdkLogger.info('%-24s = %s' % ("Active Module", self.ModuleFile)) + + os.chdir(self.WorkspaceDir) + self.Progress.Start("\nProcessing meta-data") + + ## Load configuration + # + # This method will parse target.txt and get the build configurations. + # + def LoadConfiguration(self): + # + # Check target.txt and tools_def.txt and Init them + # + BuildConfigurationFile = os.path.normpath(os.path.join(self.WorkspaceDir, gBuildConfiguration)) + if os.path.isfile(BuildConfigurationFile) == True: + StatusCode = self.TargetTxt.LoadTargetTxtFile(BuildConfigurationFile) + + ToolDefinitionFile = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF] + if ToolDefinitionFile == '': + ToolDefinitionFile = gToolsDefinition + ToolDefinitionFile = os.path.normpath(os.path.join(self.WorkspaceDir, ToolDefinitionFile)) + if os.path.isfile(ToolDefinitionFile) == True: + StatusCode = self.ToolDef.LoadToolDefFile(ToolDefinitionFile) + else: + EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=ToolDefinitionFile) + else: + EdkLogger.error("build", FILE_NOT_FOUND, ExtraData=BuildConfigurationFile) + + # if no ARCH given in command line, get it from target.txt + if self.ArchList == None or len(self.ArchList) == 0: + self.ArchList = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET_ARCH] + + # if no build target given in command line, get it from target.txt + if self.BuildTargetList == None or len(self.BuildTargetList) == 0: + self.BuildTargetList = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET] + + # if no tool chain given in command line, get it from target.txt + if self.ToolChainList == None or len(self.ToolChainList) == 0: + self.ToolChainList = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG] + if self.ToolChainList == None or len(self.ToolChainList) == 0: + EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, ExtraData="No toolchain given. Don't know how to build.\n") + + # check if the tool chains are defined or not + NewToolChainList = [] + for ToolChain in self.ToolChainList: + if ToolChain not in self.ToolDef.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]: + EdkLogger.warn("build", "Tool chain [%s] is not defined" % ToolChain) + else: + NewToolChainList.append(ToolChain) + # if no tool chain available, break the build + if len(NewToolChainList) == 0: + EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, + ExtraData="[%s] not defined. No toolchain available for build!\n" % ", ".join(self.ToolChainList)) + else: + self.ToolChainList = NewToolChainList + + if self.ThreadNumber == None: + self.ThreadNumber = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER] + if self.ThreadNumber == '': + self.ThreadNumber = 0 + else: + self.ThreadNumber = int(self.ThreadNumber, 0) + + if self.ThreadNumber == 0: + self.ThreadNumber = 1 + + if not self.PlatformFile: + PlatformFile = self.TargetTxt.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM] + if not PlatformFile: + # Try to find one in current directory + WorkingDirectory = os.getcwd() + FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.dsc'))) + FileNum = len(FileList) + if FileNum >= 2: + EdkLogger.error("build", OPTION_MISSING, + ExtraData="There are %d DSC files in %s. Use '-p' to specify one.\n" % (FileNum, WorkingDirectory)) + elif FileNum == 1: + PlatformFile = FileList[0] + else: + EdkLogger.error("build", RESOURCE_NOT_AVAILABLE, + ExtraData="No active platform specified in target.txt or command line! Nothing can be built.\n") + + self.PlatformFile = PathClass(NormFile(PlatformFile, self.WorkspaceDir), self.WorkspaceDir) + ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc", False) + if ErrorCode != 0: + EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo) + + ## Initialize build configuration + # + # This method will parse DSC file and merge the configurations from + # command line and target.txt, then get the final build configurations. + # + def InitBuild(self): + ErrorCode, ErrorInfo = self.PlatformFile.Validate(".dsc") + if ErrorCode != 0: + EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo) + + # create metafile database + self.Db.InitDatabase() + + # we need information in platform description file to determine how to build + self.Platform = self.BuildDatabase[self.PlatformFile, 'COMMON'] + if not self.Fdf: + self.Fdf = self.Platform.FlashDefinition + + if self.SkuId == None or self.SkuId == '': + self.SkuId = self.Platform.SkuName + + # check FD/FV build target + if self.Fdf == None or self.Fdf == "": + if self.FdList != []: + EdkLogger.info("No flash definition file found. FD [%s] will be ignored." % " ".join(self.FdList)) + self.FdList = [] + if self.FvList != []: + EdkLogger.info("No flash definition file found. FV [%s] will be ignored." % " ".join(self.FvList)) + self.FvList = [] + else: + FdfParserObj = FdfParser(str(self.Fdf)) + FdfParserObj.ParseFile() + for fvname in self.FvList: + if fvname.upper() not in FdfParserObj.Profile.FvDict.keys(): + EdkLogger.error("build", OPTION_VALUE_INVALID, + "No such an FV in FDF file: %s" % fvname) + + # + # Merge Arch + # + if self.ArchList == None or len(self.ArchList) == 0: + ArchList = set(self.Platform.SupArchList) + else: + ArchList = set(self.ArchList) & set(self.Platform.SupArchList) + if len(ArchList) == 0: + EdkLogger.error("build", PARAMETER_INVALID, + ExtraData = "Active platform supports [%s] only, but [%s] is given." + % (" ".join(self.Platform.SupArchList), " ".join(self.ArchList))) + elif len(ArchList) != len(self.ArchList): + SkippedArchList = set(self.ArchList).symmetric_difference(set(self.Platform.SupArchList)) + EdkLogger.verbose("\nArch [%s] is ignored because active platform supports [%s] but [%s] is specified !" + % (" ".join(SkippedArchList), " ".join(self.Platform.SupArchList), " ".join(self.ArchList))) + self.ArchList = tuple(ArchList) + + # Merge build target + if self.BuildTargetList == None or len(self.BuildTargetList) == 0: + BuildTargetList = self.Platform.BuildTargets + else: + BuildTargetList = list(set(self.BuildTargetList) & set(self.Platform.BuildTargets)) + if BuildTargetList == []: + EdkLogger.error("build", PARAMETER_INVALID, "Active platform only supports [%s], but [%s] is given" + % (" ".join(self.Platform.BuildTargets), " ".join(self.BuildTargetList))) + self.BuildTargetList = BuildTargetList + + ## Build a module or platform + # + # Create autogen code and makfile for a module or platform, and the launch + # "make" command to build it + # + # @param Target The target of build command + # @param Platform The platform file + # @param Module The module file + # @param BuildTarget The name of build target, one of "DEBUG", "RELEASE" + # @param ToolChain The name of toolchain to build + # @param Arch The arch of the module/platform + # @param CreateDepModuleCodeFile Flag used to indicate creating code + # for dependent modules/Libraries + # @param CreateDepModuleMakeFile Flag used to indicate creating makefile + # for dependent modules/Libraries + # + def _Build(self, Target, AutoGenObject, CreateDepsCodeFile=True, CreateDepsMakeFile=True): + if AutoGenObject == None: + return False + + # skip file generation for cleanxxx targets, run and fds target + if Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']: + # for target which must generate AutoGen code and makefile + if not self.SkipAutoGen or Target == 'genc': + self.Progress.Start("Generating code") + AutoGenObject.CreateCodeFile(CreateDepsCodeFile) + self.Progress.Stop("done!") + if Target == "genc": + return True + + if not self.SkipAutoGen or Target == 'genmake': + self.Progress.Start("Generating makefile") + AutoGenObject.CreateMakeFile(CreateDepsMakeFile) + self.Progress.Stop("done!") + if Target == "genmake": + return True + else: + # always recreate top/platform makefile when clean, just in case of inconsistency + AutoGenObject.CreateCodeFile(False) + AutoGenObject.CreateMakeFile(False) + + if EdkLogger.GetLevel() == EdkLogger.QUIET: + EdkLogger.quiet("Building ... %s" % repr(AutoGenObject)) + + BuildCommand = AutoGenObject.BuildCommand + if BuildCommand == None or len(BuildCommand) == 0: + EdkLogger.error("build", OPTION_MISSING, ExtraData="No MAKE command found for [%s, %s, %s]" % Key) + + BuildCommand = BuildCommand + [Target] + LaunchCommand(BuildCommand, AutoGenObject.MakeFileDir) + if Target == 'cleanall': + try: + #os.rmdir(AutoGenObject.BuildDir) + RemoveDirectory(AutoGenObject.BuildDir, True) + except WindowsError, X: + EdkLogger.error("build", FILE_DELETE_FAILURE, ExtraData=str(X)) + return True + + ## Build active platform for different build targets and different tool chains + # + def _BuildPlatform(self): + for BuildTarget in self.BuildTargetList: + for ToolChain in self.ToolChainList: + Wa = WorkspaceAutoGen( + self.WorkspaceDir, + self.Platform, + BuildTarget, + ToolChain, + self.ArchList, + self.BuildDatabase, + self.TargetTxt, + self.ToolDef, + self.Fdf, + self.FdList, + self.FvList, + self.SkuId + ) + self.Progress.Stop("done!") + self._Build(self.Target, Wa) + + ## Build active module for different build targets, different tool chains and different archs + # + def _BuildModule(self): + for BuildTarget in self.BuildTargetList: + for ToolChain in self.ToolChainList: + # + # module build needs platform build information, so get platform + # AutoGen first + # + Wa = WorkspaceAutoGen( + self.WorkspaceDir, + self.Platform, + BuildTarget, + ToolChain, + self.ArchList, + self.BuildDatabase, + self.TargetTxt, + self.ToolDef, + self.Fdf, + self.FdList, + self.FvList, + self.SkuId + ) + Wa.CreateMakeFile(False) + self.Progress.Stop("done!") + MaList = [] + for Arch in self.ArchList: + Ma = ModuleAutoGen(Wa, self.ModuleFile, BuildTarget, ToolChain, Arch, self.PlatformFile) + if Ma == None: continue + MaList.append(Ma) + self._Build(self.Target, Ma) + if MaList == []: + EdkLogger.error( + 'build', + BUILD_ERROR, + "Module for [%s] is not a component of active platform."\ + " Please make sure that the ARCH and inf file path are"\ + " given in the same as in [%s]" %\ + (', '.join(self.ArchList), self.Platform), + ExtraData=self.ModuleFile + ) + + ## Build a platform in multi-thread mode + # + def _MultiThreadBuildPlatform(self): + for BuildTarget in self.BuildTargetList: + for ToolChain in self.ToolChainList: + Wa = WorkspaceAutoGen( + self.WorkspaceDir, + self.Platform, + BuildTarget, + ToolChain, + self.ArchList, + self.BuildDatabase, + self.TargetTxt, + self.ToolDef, + self.Fdf, + self.FdList, + self.FvList, + self.SkuId + ) + Wa.CreateMakeFile(False) + + # multi-thread exit flag + ExitFlag = threading.Event() + ExitFlag.clear() + for Arch in self.ArchList: + Pa = PlatformAutoGen(Wa, self.PlatformFile, BuildTarget, ToolChain, Arch) + if Pa == None: + continue + for Module in Pa.Platform.Modules: + # Get ModuleAutoGen object to generate C code file and makefile + Ma = ModuleAutoGen(Wa, Module, BuildTarget, ToolChain, Arch, self.PlatformFile) + if Ma == None: + continue + # Not to auto-gen for targets 'clean', 'cleanlib', 'cleanall', 'run', 'fds' + if self.Target not in ['clean', 'cleanlib', 'cleanall', 'run', 'fds']: + # for target which must generate AutoGen code and makefile + if not self.SkipAutoGen or self.Target == 'genc': + Ma.CreateCodeFile(True) + if self.Target == "genc": + continue + + if not self.SkipAutoGen or self.Target == 'genmake': + Ma.CreateMakeFile(True) + if self.Target == "genmake": + continue + self.Progress.Stop("done!") + # Generate build task for the module + Bt = BuildTask.New(ModuleMakeUnit(Ma, self.Target)) + # Break build if any build thread has error + if BuildTask.HasError(): + # we need a full version of makefile for platform + ExitFlag.set() + BuildTask.WaitForComplete() + Pa.CreateMakeFile(False) + EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule) + # Start task scheduler + if not BuildTask.IsOnGoing(): + BuildTask.StartScheduler(self.ThreadNumber, ExitFlag) + + # in case there's an interruption. we need a full version of makefile for platform + Pa.CreateMakeFile(False) + if BuildTask.HasError(): + EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule) + + # + # All modules have been put in build tasks queue. Tell task scheduler + # to exit if all tasks are completed + # + ExitFlag.set() + BuildTask.WaitForComplete() + + # + # Check for build error, and raise exception if one + # has been signaled. + # + if BuildTask.HasError(): + EdkLogger.error("build", BUILD_ERROR, "Failed to build module", ExtraData=GlobalData.gBuildingModule) + + # Generate FD image if there's a FDF file found + if self.Fdf != '' and self.Target in ["", "all", "fds"]: + LaunchCommand(Wa.BuildCommand + ["fds"], Wa.MakeFileDir) + + ## Generate GuidedSectionTools.txt in the FV directories. + # + def CreateGuidedSectionToolsFile(self): + for Arch in self.ArchList: + for BuildTarget in self.BuildTargetList: + for ToolChain in self.ToolChainList: + FvDir = os.path.join( + self.WorkspaceDir, + self.Platform.OutputDirectory, + '_'.join((BuildTarget, ToolChain)), + 'FV' + ) + if not os.path.exists(FvDir): + continue + # Build up the list of supported architectures for this build + prefix = '%s_%s_%s_' % (BuildTarget, ToolChain, Arch) + + # Look through the tool definitions for GUIDed tools + guidAttribs = [] + for (attrib, value) in self.ToolDef.ToolsDefTxtDictionary.iteritems(): + if attrib.upper().endswith('_GUID'): + split = attrib.split('_') + thisPrefix = '_'.join(split[0:3]) + '_' + if thisPrefix == prefix: + guid = self.ToolDef.ToolsDefTxtDictionary[attrib] + guid = guid.lower() + toolName = split[3] + path = '_'.join(split[0:4]) + '_PATH' + path = self.ToolDef.ToolsDefTxtDictionary[path] + path = self.GetFullPathOfTool(path) + guidAttribs.append((guid, toolName, path)) + + # Write out GuidedSecTools.txt + toolsFile = os.path.join(FvDir, 'GuidedSectionTools.txt') + toolsFile = open(toolsFile, 'wt') + for guidedSectionTool in guidAttribs: + print >> toolsFile, ' '.join(guidedSectionTool) + toolsFile.close() + + ## Returns the full path of the tool. + # + def GetFullPathOfTool (self, tool): + if os.path.exists(tool): + return os.path.realpath(tool) + else: + # We need to search for the tool using the + # PATH environment variable. + for dirInPath in os.environ['PATH'].split(os.pathsep): + foundPath = os.path.join(dirInPath, tool) + if os.path.exists(foundPath): + return os.path.realpath(foundPath) + + # If the tool was not found in the path then we just return + # the input tool. + return tool + + ## Launch the module or platform build + # + def Launch(self): + if self.ModuleFile == None or self.ModuleFile == "": + if not self.SpawnMode or self.Target not in ["", "all"]: + self.SpawnMode = False + self._BuildPlatform() + else: + self._MultiThreadBuildPlatform() + self.CreateGuidedSectionToolsFile() + else: + self.SpawnMode = False + self._BuildModule() + + ## Do some clean-up works when error occurred + def Relinquish(self): + OldLogLevel = EdkLogger.GetLevel() + EdkLogger.SetLevel(EdkLogger.ERROR) + #self.DumpBuildData() + Utils.Progressor.Abort() + if self.SpawnMode == True: + BuildTask.Abort() + EdkLogger.SetLevel(OldLogLevel) + + def DumpBuildData(self): + CacheDirectory = os.path.join(self.WorkspaceDir, gBuildCacheDir) + Utils.CreateDirectory(CacheDirectory) + Utils.DataDump(Utils.gFileTimeStampCache, os.path.join(CacheDirectory, "gFileTimeStampCache")) + Utils.DataDump(Utils.gDependencyDatabase, os.path.join(CacheDirectory, "gDependencyDatabase")) + + def RestoreBuildData(self): + FilePath = os.path.join(self.WorkspaceDir, gBuildCacheDir, "gFileTimeStampCache") + if Utils.gFileTimeStampCache == {} and os.path.isfile(FilePath): + Utils.gFileTimeStampCache = Utils.DataRestore(FilePath) + if Utils.gFileTimeStampCache == None: + Utils.gFileTimeStampCache = {} + + FilePath = os.path.join(self.WorkspaceDir, gBuildCacheDir, "gDependencyDatabase") + if Utils.gDependencyDatabase == {} and os.path.isfile(FilePath): + Utils.gDependencyDatabase = Utils.DataRestore(FilePath) + if Utils.gDependencyDatabase == None: + Utils.gDependencyDatabase = {} + +def ParseDefines(DefineList=[]): + DefineDict = {} + if DefineList != None: + for Define in DefineList: + DefineTokenList = Define.split("=", 1) + if len(DefineTokenList) == 1: + DefineDict[DefineTokenList[0]] = "" + else: + DefineDict[DefineTokenList[0]] = DefineTokenList[1].strip() + return DefineDict + +gParamCheck = [] +def SingleCheckCallback(option, opt_str, value, parser): + if option not in gParamCheck: + setattr(parser.values, option.dest, value) + gParamCheck.append(option) + else: + parser.error("Option %s only allows one instance in command line!" % option) + +## Parse command line options +# +# Using standard Python module optparse to parse command line option of this tool. +# +# @retval Opt A optparse.Values object containing the parsed options +# @retval Args Target of build command +# +def MyOptionParser(): + Parser = OptionParser(description=__copyright__,version=__version__,prog="build.exe",usage="%prog [options] [all|fds|genc|genmake|clean|cleanall|cleanlib|modules|libraries|run]") + Parser.add_option("-a", "--arch", action="append", type="choice", choices=['IA32','X64','IPF','EBC','ARM'], dest="TargetArch", + help="ARCHS is one of list: IA32, X64, IPF, ARM or EBC, which overrides target.txt's TARGET_ARCH definition. To specify more archs, please repeat this option.") + Parser.add_option("-p", "--platform", action="callback", type="string", dest="PlatformFile", callback=SingleCheckCallback, + help="Build the platform specified by the DSC file name argument, overriding target.txt's ACTIVE_PLATFORM definition.") + Parser.add_option("-m", "--module", action="callback", type="string", dest="ModuleFile", callback=SingleCheckCallback, + help="Build the module specified by the INF file name argument.") + Parser.add_option("-b", "--buildtarget", action="append", type="choice", choices=['DEBUG','RELEASE'], dest="BuildTarget", + help="BuildTarget is one of list: DEBUG, RELEASE, which overrides target.txt's TARGET definition. To specify more TARGET, please repeat this option.") + Parser.add_option("-t", "--tagname", action="append", type="string", dest="ToolChain", + help="Using the Tool Chain Tagname to build the platform, overriding target.txt's TOOL_CHAIN_TAG definition.") + Parser.add_option("-x", "--sku-id", action="callback", type="string", dest="SkuId", callback=SingleCheckCallback, + help="Using this name of SKU ID to build the platform, overriding SKUID_IDENTIFIER in DSC file.") + + Parser.add_option("-n", action="callback", type="int", dest="ThreadNumber", callback=SingleCheckCallback, + help="Build the platform using multi-threaded compiler. The value overrides target.txt's MAX_CONCURRENT_THREAD_NUMBER. Less than 2 will disable multi-thread builds.") + + Parser.add_option("-f", "--fdf", action="callback", type="string", dest="FdfFile", callback=SingleCheckCallback, + help="The name of the FDF file to use, which overrides the setting in the DSC file.") + Parser.add_option("-r", "--rom-image", action="append", type="string", dest="RomImage", default=[], + help="The name of FD to be generated. The name must be from [FD] section in FDF file.") + Parser.add_option("-i", "--fv-image", action="append", type="string", dest="FvImage", default=[], + help="The name of FV to be generated. The name must be from [FV] section in FDF file.") + + Parser.add_option("-u", "--skip-autogen", action="store_true", dest="SkipAutoGen", help="Skip AutoGen step.") + Parser.add_option("-e", "--re-parse", action="store_true", dest="Reparse", help="Re-parse all meta-data files.") + + Parser.add_option("-c", "--case-insensitive", action="store_true", dest="CaseInsensitive", help="Don't check case of file name.") + + # Parser.add_option("-D", "--define", action="append", dest="Defines", metavar="NAME[=[VALUE]]", + # help="Define global macro which can be used in DSC/DEC/INF files.") + + Parser.add_option("-w", "--warning-as-error", action="store_true", dest="WarningAsError", help="Treat warning in tools as error.") + Parser.add_option("-j", "--log", action="store", dest="LogFile", help="Put log in specified file as well as on console.") + + Parser.add_option("-s", "--silent", action="store_true", type=None, dest="SilentMode", + help="Make use of silent mode of (n)make.") + Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.") + Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed, "\ + "including library instances selected, final dependency expression, "\ + "and warning messages, etc.") + Parser.add_option("-d", "--debug", action="store", type="int", help="Enable debug messages at specified level.") + + (Opt, Args)=Parser.parse_args() + return (Opt, Args) + +## Tool entrance method +# +# This method mainly dispatch specific methods per the command line options. +# If no error found, return zero value so the caller of this tool can know +# if it's executed successfully or not. +# +# @retval 0 Tool was successful +# @retval 1 Tool failed +# +def Main(): + StartTime = time.time() + + # Initialize log system + EdkLogger.Initialize() + + # + # Parse the options and args + # + (Option, Target) = MyOptionParser() + GlobalData.gOptions = Option + GlobalData.gCaseInsensitive = Option.CaseInsensitive + + # Set log level + if Option.verbose != None: + EdkLogger.SetLevel(EdkLogger.VERBOSE) + elif Option.quiet != None: + EdkLogger.SetLevel(EdkLogger.QUIET) + elif Option.debug != None: + EdkLogger.SetLevel(Option.debug + 1) + else: + EdkLogger.SetLevel(EdkLogger.INFO) + + if Option.LogFile != None: + EdkLogger.SetLogFile(Option.LogFile) + + if Option.WarningAsError == True: + EdkLogger.SetWarningAsError() + + if platform.platform().find("Windows") >= 0: + GlobalData.gIsWindows = True + else: + GlobalData.gIsWindows = False + + EdkLogger.quiet(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[%s]\n" % platform.platform()) + ReturnCode = 0 + MyBuild = None + try: + if len(Target) == 0: + Target = "all" + elif len(Target) >= 2: + EdkLogger.error("build", OPTION_NOT_SUPPORTED, "More than one targets are not supported.", + ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) + else: + Target = Target[0].lower() + + if Target not in gSupportedTarget: + EdkLogger.error("build", OPTION_NOT_SUPPORTED, "Not supported target [%s]." % Target, + ExtraData="Please select one of: %s" %(' '.join(gSupportedTarget))) + + # GlobalData.gGlobalDefines = ParseDefines(Option.Defines) + # + # Check environment variable: EDK_TOOLS_PATH, WORKSPACE, PATH + # + CheckEnvVariable() + Workspace = os.getenv("WORKSPACE") + # + # Get files real name in workspace dir + # + GlobalData.gAllFiles = Utils.DirCache(Workspace) + + WorkingDirectory = os.getcwd() + if not Option.ModuleFile: + FileList = glob.glob(os.path.normpath(os.path.join(WorkingDirectory, '*.inf'))) + FileNum = len(FileList) + if FileNum >= 2: + EdkLogger.error("build", OPTION_NOT_SUPPORTED, "There are %d INF files in %s." % (FileNum, WorkingDirectory), + ExtraData="Please use '-m <INF_FILE_PATH>' switch to choose one.") + elif FileNum == 1: + Option.ModuleFile = NormFile(FileList[0], Workspace) + + if Option.ModuleFile: + Option.ModuleFile = PathClass(Option.ModuleFile, Workspace) + ErrorCode, ErrorInfo = Option.ModuleFile.Validate(".inf", False) + if ErrorCode != 0: + EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo) + + if Option.PlatformFile != None: + Option.PlatformFile = PathClass(Option.PlatformFile, Workspace) + ErrorCode, ErrorInfo = Option.PlatformFile.Validate(".dsc", False) + if ErrorCode != 0: + EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo) + + if Option.FdfFile != None: + Option.FdfFile = PathClass(Option.FdfFile, Workspace) + ErrorCode, ErrorInfo = Option.FdfFile.Validate(".fdf", False) + if ErrorCode != 0: + EdkLogger.error("build", ErrorCode, ExtraData=ErrorInfo) + + MyBuild = Build(Target, Workspace, Option.PlatformFile, Option.ModuleFile, + Option.TargetArch, Option.ToolChain, Option.BuildTarget, + Option.FdfFile, Option.RomImage, Option.FvImage, + None, Option.SilentMode, Option.ThreadNumber, + Option.SkipAutoGen, Option.Reparse, Option.SkuId) + MyBuild.Launch() + #MyBuild.DumpBuildData() + except FatalError, X: + if MyBuild != None: + # for multi-thread build exits safely + MyBuild.Relinquish() + if Option != None and Option.debug != None: + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + ReturnCode = X.args[0] + except Warning, X: + # error from Fdf parser + if MyBuild != None: + # for multi-thread build exits safely + MyBuild.Relinquish() + if Option != None and Option.debug != None: + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + else: + EdkLogger.error(X.ToolName, FORMAT_INVALID, File=X.FileName, Line=X.LineNumber, ExtraData=X.Message, RaiseError = False) + ReturnCode = FORMAT_INVALID + except KeyboardInterrupt: + ReturnCode = ABORT_ERROR + if Option != None and Option.debug != None: + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + except: + if MyBuild != None: + # for multi-thread build exits safely + MyBuild.Relinquish() + + # try to get the meta-file from the object causing exception + Tb = sys.exc_info()[-1] + MetaFile = GlobalData.gProcessingFile + while Tb != None: + if 'self' in Tb.tb_frame.f_locals and hasattr(Tb.tb_frame.f_locals['self'], 'MetaFile'): + MetaFile = Tb.tb_frame.f_locals['self'].MetaFile + Tb = Tb.tb_next + EdkLogger.error( + "\nbuild", + CODE_ERROR, + "Unknown fatal error when processing [%s]" % MetaFile, + ExtraData="\n(Please send email to dev@buildtools.tianocore.org for help, attaching following call stack trace!)\n", + RaiseError=False + ) + EdkLogger.quiet("(Python %s on %s) " % (platform.python_version(), sys.platform) + traceback.format_exc()) + ReturnCode = CODE_ERROR + finally: + Utils.Progressor.Abort() + + if MyBuild != None: + MyBuild.Db.Close() + + if ReturnCode == 0: + Conclusion = "Done" + elif ReturnCode == ABORT_ERROR: + Conclusion = "Aborted" + else: + Conclusion = "Failed" + FinishTime = time.time() + BuildDuration = time.strftime("%M:%S", time.gmtime(int(round(FinishTime - StartTime)))) + EdkLogger.SetLevel(EdkLogger.QUIET) + EdkLogger.quiet("\n- %s -\n%s [%s]" % (Conclusion, time.strftime("%H:%M:%S, %b.%d %Y", time.localtime()), BuildDuration)) + + return ReturnCode + +if __name__ == '__main__': + r = Main() + ## 0-127 is a safe return range, and 1 is a standard default error + if r < 0 or r > 127: r = 1 + sys.exit(r) + diff --git a/BaseTools/Source/Python/fpd2dsc/EdkIIWorkspaceGuidsInfo.py b/BaseTools/Source/Python/fpd2dsc/EdkIIWorkspaceGuidsInfo.py new file mode 100644 index 0000000000..528dbf3ddb --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/EdkIIWorkspaceGuidsInfo.py @@ -0,0 +1,327 @@ +## @file
+# Collects the Guid Information in current workspace.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import fnmatch
+from Common.EdkIIWorkspace import EdkIIWorkspace
+from Common.MigrationUtilities import *
+
+## A class for EdkII work space to resolve Guids
+#
+# This class inherits from EdkIIWorkspace and collects the Guids information
+# in current workspace. The Guids information is important to translate the
+# package Guids and recommended library instances Guids to relative file path
+# (to workspace directory) in MSA files.
+#
+class EdkIIWorkspaceGuidsInfo(EdkIIWorkspace):
+
+ ## The classconstructor
+ #
+ # The constructor initialize workspace directory. It does not collect
+ # pakage and module Guids info at initialization; instead, it collects them
+ # on the fly.
+ #
+ # @param self The object pointer
+ #
+ def __init__(self):
+ # Initialize parent class.
+ EdkIIWorkspace.__init__(self)
+ # The internal map from Guid to FilePath.
+ self.__GuidToFilePath = {}
+ # The internal package directory list.
+ self.__PackageDirList = []
+ # The internal flag to indicate whether package Guids info has been initialized
+ # to avoid re-collection collected.
+ self.__PackageGuidInitialized = False
+ # The internal flag to indicate whether module Guids info has been initialized
+ # to avoid re-collection collected.
+ self.__ModuleGuidInitialized = False
+
+ ## Add Guid, Version and FilePath to Guids database
+ #
+ # Add Guid, Version and FilePath to Guids database. It constructs a map
+ # table from Guid, Version to FilePath internally. If also detects possible
+ # Guid collision. For now, the version information is simply ignored and
+ # Guid value itself acts as master key.
+ #
+ # @param self The object pointer
+ # @param Guid The Guid Value
+ # @param Version The version information
+ # @param FilePath The Guid related file path
+ #
+ # @retval True The Guid value is successfully added to map table
+ # @retval False The Guid is an empty string or the map table
+ # already contains a same Guid
+ #
+ def __AddGuidToFilePath(self, Guid, Version, FilePath):
+ if Guid == "":
+ EdkLogger.info("Cannot find Guid in file %s" % FilePath)
+ return False
+ #Add the Guid value to map table to ensure case insensitive comparison.
+ OldFilePath = self.__GuidToFilePath.setdefault(Guid.lower(), FilePath)
+ if OldFilePath == FilePath:
+ EdkLogger.verbose("File %s has new Guid '%s'" % (FilePath, Guid))
+ return True
+ else:
+ EdkLogger.info("File %s has duplicate Guid with & %s" % (FilePath, OldFilePath))
+ return False
+
+
+ ## Gets file information from a module description file
+ #
+ # Extracts Module Name, File Guid and Version number from INF, MSA and NMSA
+ # file. It supports to exact such information from text based INF file or
+ # XML based (N)MSA file.
+ #
+ # @param self The object pointer
+ # @param FileName The input module file name
+ #
+ # @retval True This module file represents a new module discovered
+ # in current workspace
+ # @retval False This module file is not regarded as a valid module
+ # The File Guid cannot be extracted or the another
+ # file with the same Guid already exists
+ #
+ def __GetModuleFileInfo(self, FileName):
+ if fnmatch.fnmatch(FileName, "*.inf"):
+ TagTuple = ("BASE_NAME", "FILE_GUID", "VERSION_STRING")
+ (Name, Guid, Version) = GetTextFileInfo(FileName, TagTuple)
+ else :
+ XmlTag1 = "ModuleSurfaceArea/MsaHeader/ModuleName"
+ XmlTag2 = "ModuleSurfaceArea/MsaHeader/GuidValue"
+ XmlTag3 = "ModuleSurfaceArea/MsaHeader/Version"
+ TagTuple = (XmlTag1, XmlTag2, XmlTag3)
+ (Name, Guid, Version) = GetXmlFileInfo(FileName, TagTuple)
+
+ return self.__AddGuidToFilePath(Guid, Version, FileName)
+
+
+ ## Gets file information from a package description file
+ #
+ # Extracts Package Name, File Guid and Version number from INF, SPD and NSPD
+ # file. It supports to exact such information from text based DEC file or
+ # XML based (N)SPD file. EDK Compatibility Package is hardcoded to be
+ # ignored since no EDKII INF file depends on that package.
+ #
+ # @param self The object pointer
+ # @param FileName The input package file name
+ #
+ # @retval True This package file represents a new package
+ # discovered in current workspace
+ # @retval False This package is not regarded as a valid package
+ # The File Guid cannot be extracted or the another
+ # file with the same Guid already exists
+ #
+ def __GetPackageFileInfo(self, FileName):
+ if fnmatch.fnmatch(FileName, "*.dec"):
+ TagTuple = ("PACKAGE_NAME", "PACKAGE_GUID", "PACKAGE_VERSION")
+ (Name, Guid, Version) = GetTextFileInfo(FileName, TagTuple)
+ else:
+ XmlTag1 = "PackageSurfaceArea/SpdHeader/PackageName"
+ XmlTag2 = "PackageSurfaceArea/SpdHeader/GuidValue"
+ XmlTag3 = "PackageSurfaceArea/SpdHeader/Version"
+ TagTuple = (XmlTag1, XmlTag2, XmlTag3)
+ (Name, Guid, Version) = GetXmlFileInfo(FileName, TagTuple)
+
+ if Name == "EdkCompatibilityPkg":
+ # Do not scan EDK compatibitilty package to avoid Guid collision
+ # with those in EDK Glue Library.
+ EdkLogger.verbose("Bypass EDK Compatibility Pkg")
+ return False
+
+ return self.__AddGuidToFilePath(Guid, Version, FileName)
+
+ ## Iterate on all package files listed in framework database file
+ #
+ # Yields all package description files listed in framework database files.
+ # The framework database file describes the packages current workspace
+ # includes.
+ #
+ # @param self The object pointer
+ #
+ def __FrameworkDatabasePackageFiles(self):
+ XmlFrameworkDb = XmlParseFile(self.WorkspaceFile)
+ XmlTag = "FrameworkDatabase/PackageList/Filename"
+ for PackageFile in XmlElementList(XmlFrameworkDb, XmlTag):
+ yield os.path.join(self.WorkspaceDir, PackageFile)
+
+
+ ## Iterate on all package files in current workspace directory
+ #
+ # Yields all package description files listed in current workspace
+ # directory. This happens when no framework database file exists.
+ #
+ # @param self The object pointer
+ #
+ def __TraverseAllPackageFiles(self):
+ for Path, Dirs, Files in os.walk(self.WorkspaceDir):
+ # Ignore svn version control directory.
+ if ".svn" in Dirs:
+ Dirs.remove(".svn")
+ if "Build" in Dirs:
+ Dirs.remove("Build")
+ # Assume priority from high to low: DEC, NSPD, SPD.
+ PackageFiles = fnmatch.filter(Files, "*.dec")
+ if len(PackageFiles) == 0:
+ PackageFiles = fnmatch.filter(Files, "*.nspd")
+ if len(PackageFiles) == 0:
+ PackageFiles = fnmatch.filter(Files, "*.spd")
+
+ for File in PackageFiles:
+ # Assume no more package decription file in sub-directory.
+ del Dirs[:]
+ yield os.path.join(Path, File)
+
+ ## Iterate on all module files in current package directory
+ #
+ # Yields all module description files listed in current package
+ # directory.
+ #
+ # @param self The object pointer
+ #
+ def __TraverseAllModuleFiles(self):
+ for PackageDir in self.__PackageDirList:
+ for Path, Dirs, Files in os.walk(PackageDir):
+ # Ignore svn version control directory.
+ if ".svn" in Dirs:
+ Dirs.remove(".svn")
+ # Assume priority from high to low: INF, NMSA, MSA.
+ ModuleFiles = fnmatch.filter(Files, "*.inf")
+ if len(ModuleFiles) == 0:
+ ModuleFiles = fnmatch.filter(Files, "*.nmsa")
+ if len(ModuleFiles) == 0:
+ ModuleFiles = fnmatch.filter(Files, "*.msa")
+
+ for File in ModuleFiles:
+ yield os.path.join(Path, File)
+
+ ## Initialize package Guids info mapping table
+ #
+ # Collects all package guids map to package decription file path. This
+ # function is invokes on demand to avoid unnecessary directory scan.
+ #
+ # @param self The object pointer
+ #
+ def __InitializePackageGuidInfo(self):
+ if self.__PackageGuidInitialized:
+ return
+
+ EdkLogger.verbose("Start to collect Package Guids Info.")
+
+ WorkspaceFile = os.path.join("Conf", "FrameworkDatabase.db")
+ self.WorkspaceFile = os.path.join(self.WorkspaceDir, WorkspaceFile)
+
+ # Try to find the frameworkdatabase file to discover package lists
+ if os.path.exists(self.WorkspaceFile):
+ TraversePackage = self.__FrameworkDatabasePackageFiles
+ EdkLogger.verbose("Package list bases on: %s" % self.WorkspaceFile)
+ else:
+ TraversePackage = self.__TraverseAllPackageFiles
+ EdkLogger.verbose("Package list in: %s" % self.WorkspaceDir)
+
+ for FileName in TraversePackage():
+ if self.__GetPackageFileInfo(FileName):
+ PackageDir = os.path.dirname(FileName)
+ EdkLogger.verbose("Find new package directory %s" % PackageDir)
+ self.__PackageDirList.append(PackageDir)
+
+ self.__PackageGuidInitialized = True
+
+ ## Initialize module Guids info mapping table
+ #
+ # Collects all module guids map to module decription file path. This
+ # function is invokes on demand to avoid unnecessary directory scan.
+ #
+ # @param self The object pointer
+ #
+ def __InitializeModuleGuidInfo(self):
+ if self.__ModuleGuidInitialized:
+ return
+ EdkLogger.verbose("Start to collect Module Guids Info")
+
+ self.__InitializePackageGuidInfo()
+ for FileName in self.__TraverseAllModuleFiles():
+ if self.__GetModuleFileInfo(FileName):
+ EdkLogger.verbose("Find new module %s" % FileName)
+
+ self.__ModuleGuidInitialized = True
+
+ ## Get Package file path by Package Guid and Version
+ #
+ # Translates the Package Guid and Version to a file path relative
+ # to workspace directory. If no package in current workspace match the
+ # input Guid, an empty file path is returned. For now, the version
+ # value is simply ignored.
+ #
+ # @param self The object pointer
+ # @param Guid The Package Guid value to look for
+ # @param Version The Package Version value to look for
+ #
+ def ResolvePackageFilePath(self, Guid, Version = ""):
+ self.__InitializePackageGuidInfo()
+
+ EdkLogger.verbose("Resolve Package Guid '%s'" % Guid)
+ FileName = self.__GuidToFilePath.get(Guid.lower(), "")
+ if FileName == "":
+ EdkLogger.info("Cannot resolve Package Guid '%s'" % Guid)
+ else:
+ FileName = self.WorkspaceRelativePath(FileName)
+ FileName = os.path.splitext(FileName)[0] + ".dec"
+ FileName = FileName.replace("\\", "/")
+ return FileName
+
+ ## Get Module file path by Module Guid and Version
+ #
+ # Translates the Module Guid and Version to a file path relative
+ # to workspace directory. If no module in current workspace match the
+ # input Guid, an empty file path is returned. For now, the version
+ # value is simply ignored.
+ #
+ # @param self The object pointer
+ # @param Guid The Module Guid value to look for
+ # @param Version The Module Version value to look for
+ #
+ def ResolveModuleFilePath(self, Guid, Version = ""):
+ self.__InitializeModuleGuidInfo()
+
+ EdkLogger.verbose("Resolve Module Guid '%s'" % Guid)
+ FileName = self.__GuidToFilePath.get(Guid.lower(), "")
+ if FileName == "":
+ EdkLogger.info("Cannot resolve Module Guid '%s'" % Guid)
+ else:
+ FileName = self.WorkspaceRelativePath(FileName)
+ FileName = os.path.splitext(FileName)[0] + ".inf"
+ FileName = FileName.replace("\\", "/")
+ return FileName
+
+# A global class object of EdkIIWorkspaceGuidsInfo for external reference.
+gEdkIIWorkspaceGuidsInfo = EdkIIWorkspaceGuidsInfo()
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ # Test the translation of package Guid.
+# MdePkgGuid = "1E73767F-8F52-4603-AEB4-F29B510B6766"
+# OldMdePkgGuid = "5e0e9358-46b6-4ae2-8218-4ab8b9bbdcec"
+# print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(MdePkgGuid)
+# print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(OldMdePkgGuid)
+
+ # Test the translation of module Guid.
+# UefiLibGuid = "3a004ba5-efe0-4a61-9f1a-267a46ae5ba9"
+# UefiDriverModelLibGuid = "52af22ae-9901-4484-8cdc-622dd5838b09"
+# print gEdkIIWorkspaceGuidsInfo.ResolvePlatformFilePath(UefiLibGuid)
+# print gEdkIIWorkspaceGuidsInfo.ResolvePlatformFilePath(UefiDriverModelLibGuid)
+ pass
\ No newline at end of file diff --git a/BaseTools/Source/Python/fpd2dsc/LoadFpd.py b/BaseTools/Source/Python/fpd2dsc/LoadFpd.py new file mode 100644 index 0000000000..cc97ec5521 --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/LoadFpd.py @@ -0,0 +1,1039 @@ +## @file
+# Open an FPD file and load all its contents to a PlatformClass object.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from CommonDataClass.PlatformClass import *
+from CommonDataClass.FdfClass import *
+from Common.XmlRoutines import *
+from Common.MigrationUtilities import *
+from EdkIIWorkspaceGuidsInfo import gEdkIIWorkspaceGuidsInfo
+
+## Load Platform Header
+#
+# Read an input Platform XML DOM object and return Platform Header class object
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+# @param FpdFileName The file path of FPD File
+#
+# @retvel PlatformHeader A new Platform Header object loaded from XmlFpd
+#
+def LoadPlatformHeader(XmlFpd, FpdFileName):
+ PlatformHeader = PlatformHeaderClass()
+
+ XmlTag = "PlatformSurfaceArea/PlatformHeader"
+ FpdHeader = XmlNode(XmlFpd, XmlTag)
+
+ SetIdentification(PlatformHeader, FpdHeader, "PlatformName", FpdFileName)
+ SetCommonHeader(PlatformHeader, FpdHeader)
+
+ XmlTag = "PlatformSurfaceArea/PlatformHeader/Specification"
+ List = XmlElement(XmlFpd, XmlTag).split()
+ SpecificationName = List[0]
+ SpecificationValue = List[1]
+ PlatformHeader.Specification = {SpecificationName:SpecificationValue}
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/SupportedArchitectures"
+ PlatformHeader.SupArchList = XmlElement(XmlFpd, XmlTag).split()
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/BuildTargets"
+ PlatformHeader.BuildTargets = XmlElement(XmlFpd, XmlTag).split()
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/IntermediateDirectories"
+ PlatformHeader.IntermediateDirectories = XmlElement(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/OutputDirectory"
+ PlatformHeader.OutputDirectory = XmlElement(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/SkuInfo"
+ List = map(LoadSkuId, XmlList(XmlFpd, XmlTag))
+ if List != []:
+ PlatformHeader.SkuIdName = List[0]
+
+ return PlatformHeader
+
+## Load a Platform SkuId
+#
+# Read an input Platform XML DOM object and return a list of Platform SkuId
+# contained in the DOM object.
+#
+# @param XmlPlatformSkuInfo An XML DOM object read from FPD file
+#
+# @retvel PlatformSkuInfo A SkuInfo loaded from XmlFpd
+#
+def LoadPlatformSkuInfo(XmlPlatformSkuInfo):
+ XmlTag = "SkuInfo/SkuId"
+ SkuInfo = []
+ SkuId = XmlElement(XmlPlatformSkuInfo, XmlTag)
+ SkuInfo.append(SkuId)
+
+ XmlTag = "SkuInfo/Value"
+ Value = XmlElement(XmlPlatformSkuInfo, XmlTag)
+ SkuInfo.append(Value)
+ return SkuInfo
+
+## Load a Platform SkuId
+#
+# Read an input Platform XML DOM object and return a list of Platform SkuId
+# contained in the DOM object.
+#
+# @param XmlSkuInfo An XML DOM object read from FPD file
+#
+# @retvel List A list of SkuId and SkuValue loaded from XmlFpd
+#
+def LoadSkuId(XmlSkuInfo):
+ XmlTag = "SkuInfo/UiSkuName"
+ SkuValue = XmlElement(XmlSkuInfo, XmlTag)
+
+ XmlTag = "SkuInfo/UiSkuName"
+ List = map(LoadSkuID, XmlList(XmlSkuInfo, XmlTag))
+ if List != []:
+ SkuID = List[0]
+ #SkuID = XmlAttribute(XmlSkuInfo, XmlTag)
+ List = []
+ List.append(SkuID)
+ List.append(SkuValue)
+ return List
+
+def LoadSkuID(XmlUiSkuName):
+ XmlTag = "SkuID"
+ SkuID = XmlAttribute(XmlUiSkuName, XmlTag)
+ return SkuID
+
+## Load a list of Platform SkuIds
+#
+# Read an input Platform XML DOM object and return a list of Platform SkuId
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PlatformSkuIds A platform SkuIds object loaded from XmlFpd
+#
+def LoadPlatformSkuInfos(XmlFpd):
+ PlatformSkuIds = SkuInfoListClass()
+
+ SkuInfoList = []
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/SkuInfo"
+ List = map(LoadSkuId, XmlList(XmlFpd, XmlTag))
+ SkuInfoList = List
+
+ XmlTag = "PlatformSurfaceArea/PlatformDefinitions/SkuInfo/UiSkuName"
+ Value = XmlElement(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/DynamicPcdBuildDefinitions/PcdBuildData/SkuInfo"
+ # here return a List
+ List = map(LoadPlatformSkuInfo, XmlList(XmlFpd, XmlTag))
+
+ for SkuInfo in List:
+ SkuId = SkuInfo[0]
+ Value = SkuInfo[1]
+
+ SkuInfoList.append(SkuInfo)
+
+ PlatformSkuIds.SkuInfoList = SkuInfoList
+
+ return PlatformSkuIds
+
+## Load Platform Module Build Option
+#
+# Read an input Platform XML DOM object and return Platform Module Build Option class object
+# contained in the DOM object.
+#
+# @param XmlModuleBuildOption An XML DOM object read from FPD file
+#
+# @retvel PlatformBuildOption A Platform Build Option object loaded from XmlFpd
+#
+def LoadModuleBuildOption(XmlModuleBuildOption):
+ PlatformBuildOption = PlatformBuildOptionClass()
+ PlatformBuildOption.UserDefinedAntTasks = {}
+
+ XmlTag = "BuildOptions/Options/Option"
+ PlatformBuildOption.Options = map(LoadBuildOption, XmlList(XmlModuleBuildOption, XmlTag))
+
+ PlatformBuildOption.UserExtensions = {}
+ PlatformBuildOption.FfsKeyList = {}
+ return PlatformBuildOption
+
+## Load Platform Module Extern
+#
+# Read an input Platform XML DOM object and return Platform Module Extern class object
+# contained in the DOM object.
+#
+# @param XmlModuleExtern An XML DOM object read from FPD file
+#
+# @retvel PlatformModuleExtern A Platform Module Extern object loaded from XmlFpd
+#
+def LoadModuleExtern(XmlModuleExtern):
+ PlatformModuleExtern = []
+
+ XmlTag = "Externs/PcdIsDriver"
+ PcdIsDriver = XmlElement(XmlModuleExtern, XmlTag)
+ PlatformModuleExtern.append(PcdIsDriver)
+
+ XmlTag = "Externs/Specification"
+ Specification = XmlElement(XmlModuleExtern, XmlTag)
+ PlatformModuleExtern.append(Specification)
+
+ XmlTag = "Externs/Extern"
+
+ return PlatformModuleExtern
+
+## Load Platform ModuleSaBuildOptions
+#
+# Read an input Platform XML DOM object and return Platform ModuleSaBuildOptions class object
+# contained in the DOM object.
+#
+# @param XmlModuleSaBuildOptions An XML DOM object read from FPD file
+#
+# @retvel PlatformBuildOptions A list of Platform ModuleSaBuildOption object loaded from XmlFpd
+#
+def LoadPlatformModuleSaBuildOption(XmlModuleSA):
+ PlatformModuleSaBuildOption = PlatformBuildOptionClasses()
+
+ XmlTag = "ModuleSA/ModuleSaBuildOptions/FvBinding"
+ PlatformModuleSaBuildOption.FvBinding = XmlElement(XmlModuleSA, XmlTag)
+
+ XmlTag = "ModuleSA/ModuleSaBuildOptions/FfsFormatKey"
+ PlatformModuleSaBuildOption.FfsFormatKey = XmlElement(XmlModuleSA, XmlTag)
+
+ XmlTag = "ModuleSA/ModuleSaBuildOptions/FfsFileNameGuid"
+ PlatformModuleSaBuildOption.FfsFileNameGuid = XmlElement(XmlModuleSA, XmlTag)
+
+ XmlTag = "ModuleSA/ModuleSaBuildOptions/Options/Option"
+ PlatformModuleSaBuildOption.BuildOptionList = map(LoadBuildOption, XmlList(XmlModuleSA, XmlTag))
+
+ return PlatformModuleSaBuildOption
+
+## Load a list of Platform Library Classes
+#
+# Read an input Platform XML DOM object and return a list of Library Classes
+# contained in the DOM object.
+#
+# @param XmlLibraryInstance An XML DOM object read from FPD file
+#
+# @retvel LibraryInstance A Library Instance loaded from XmlFpd
+#
+def LoadPlatformModuleLibraryInstance(XmlLibraryInstance):
+ LibraryInstance = []
+
+ XmlTag = "ModuleGuid"
+ ModuleGuid = XmlAttribute(XmlLibraryInstance, XmlTag)
+
+ ModulePath = gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(ModuleGuid)
+ ModuleMSAFile = ModulePath.replace('.inf', '.msa')
+ WorkSpace = os.getenv('WORKSPACE')
+ ModuleMSAFileName = os.path.join(WorkSpace, ModuleMSAFile)
+ XmlMsa = XmlParseFile(ModuleMSAFileName)
+
+ XmlTag = "ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass/Keyword"
+ Name = XmlElement(XmlMsa, XmlTag)
+ LibraryInstance.append(Name)
+ LibraryInstance.append(ModulePath)
+
+ #XmlTag = "PackageGuid"
+ #PackageGuid = XmlAttribute(XmlLibraryInstance, XmlTag)
+ #LibraryInstance.append(PackageGuid)
+ return LibraryInstance
+
+## Load a Library Class
+#
+# Read an input Platform XML DOM object and return a library class object
+# contained in the DOM object.
+#
+# @param XmlLibraryClass An XML DOM object read from FPD file
+#
+# @retvel SupModuleList A Library Class Supported Module List object loaded from XmlFpd
+#
+def LoadLibraryClassSupModuleList(XmlLibraryClass):
+ XmlTag = "Usage"
+ Usage = XmlAttribute(XmlLibraryClass, XmlTag)
+ if Usage == "ALWAYS_PRODUCED":
+ XmlTag = "SupModuleList"
+ SupModuleList = XmlAttribute(XmlLibraryClass, XmlTag).split()
+ return SupModuleList
+
+## Load Platform Library Class
+#
+# Read an input Platform XML DOM object and return Platform module class object
+# contained in the DOM object.
+#
+# @param XmlLibraries An XML DOM object read from FPD file
+#
+# @retvel PlatformLibraryClass A Platform Library Class object loaded from XmlFpd
+#
+def LoadPlatformLibraryClass(XmlPlatformLibraryClass):
+ PlatformLibraryInstance = PlatformLibraryClass()
+
+ XmlTag = "ModuleGuid"
+ LibraryInstanceModuleGuid = XmlAttribute(XmlPlatformLibraryClass, XmlTag)
+
+ XmlTag = "PackageGuid"
+ LibraryInstancePackageGuid = XmlAttribute(XmlPlatformLibraryClass, XmlTag)
+
+ LibraryInstancePath = gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(LibraryInstanceModuleGuid)
+
+ if LibraryInstancePath != "": # if LibraryInstancePath == "" that's because the module guid cannot be resolved
+ PlatformLibraryInstance.FilePath = LibraryInstancePath
+ # replace *.inf to *.msa
+ LibraryInstanceMSAName = LibraryInstancePath.replace('.inf', '.msa')
+ WorkSpace = os.getenv('WORKSPACE')
+ LibraryInstanceMSAPath = os.path.join(WorkSpace, LibraryInstanceMSAName)
+
+ PlatformLibraryInstance.FilePath = LibraryInstancePath
+
+ XmlMsa = XmlParseFile(LibraryInstanceMSAPath)
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/ModuleName"
+ PlatformLibraryInstance.Name = XmlElement(XmlMsa, XmlTag)
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/ModuleType"
+ PlatformLibraryInstance.ModuleType = XmlElement(XmlMsa, XmlTag)
+
+ if PlatformLibraryInstance.ModuleType != "BASE":
+ XmlTag = "ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass"
+ List = map(LoadLibraryClassSupModuleList, XmlList(XmlMsa, XmlTag))
+ if List != []:
+ PlatformLibraryInstance.SupModuleList = List[0]
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/SupportedArchitectures"
+ PlatformLibraryInstance.SupArchList = XmlElement(XmlMsa, XmlTag).split()
+
+ PlatformLibraryInstance.ModuleGuid = LibraryInstanceModuleGuid
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/Version"
+ PlatformLibraryInstance.ModuleVersion = XmlElement(XmlMsa, XmlTag)
+
+ PlatformLibraryInstance.PackageGuid = LibraryInstancePackageGuid
+ PlatformLibraryInstance.PackageVersion = ''
+
+ return PlatformLibraryInstance
+
+## Load Platform Library Classes
+#
+# Read an input Platform XML DOM object and return Platform module class object
+# contained in the DOM object.
+#
+# @param XmlLibraries An XML DOM object read from FPD file
+#
+# @retvel PlatformLibraryClasses A list of Platform Library Class object loaded from XmlFpd
+#
+def LoadPlatformLibraryClasses(XmlFpd):
+ PlatformLibraryInstances = PlatformLibraryClasses()
+ PlatformLibraryInstances.LibraryList = []
+
+ List = []
+ XmlTag = "PlatformSurfaceArea/FrameworkModules/ModuleSA/Libraries/Instance"
+ List = map(LoadPlatformLibraryClass, XmlList(XmlFpd, XmlTag))
+ #List.sort()
+ if List == []:
+ print "Error"
+ else:
+ PlatformLibraryInstances.LibraryList = List
+
+ return PlatformLibraryInstances
+
+## Load Platform module
+#
+# Read an input Platform XML DOM object and return Platform module class object
+# contained in the DOM object.
+#
+# @param XmlModuleSA An XML DOM object read from FPD file
+#
+# @retvel PlatformModule A Platform module object loaded from XmlFpd
+#
+def LoadModuleSA(XmlModuleSA):
+ PlatformModule = PlatformModuleClass()
+
+ # three parts: Libraries instances, PcdBuildDefinition, ModuleSaBuildOptions
+ XmlTag = "ModuleSA/Libraries/Instance"
+
+ PlatformModule.LibraryClasses = map(LoadPlatformModuleLibraryInstance, XmlList(XmlModuleSA, XmlTag))
+
+ XmlTag = "ModuleSA/PcdBuildDefinition/PcdData"
+ PlatformModule.PcdBuildDefinitions = map(LoadPlatformPcdData, XmlList(XmlModuleSA, XmlTag))
+
+ XmlTag = "ModuleSA/ModuleSaBuildOptions"
+ PlatformModule.ModuleSaBuildOption = LoadPlatformModuleSaBuildOption(XmlModuleSA)
+
+ XmlTag = "ModuleSA/BuildOptions"
+ PlatformModule.BuildOptions = map(LoadModuleBuildOption, XmlList(XmlModuleSA, XmlTag)) #bugbug fix me
+
+ XmlTag = "ModuleSA/Externs"
+ PlatformModule.Externs = map(LoadModuleExtern, XmlList(XmlModuleSA, XmlTag)) #bugbug fix me
+
+ XmlTag = "SupArchList"
+ PlatformModule.SupArchList = XmlAttribute(XmlModuleSA, XmlTag).split()
+
+ # the package guid which the module depends on, do not care for now
+ XmlTag = "PackageGuid"
+ PlatformModule.PackageGuid = XmlAttribute(XmlModuleSA, XmlTag)
+
+ # the module guid, use this guid to get the module *.msa file and convert it to *.inf file with path
+ XmlTag = "ModuleGuid"
+ PlatformModule.ModuleGuid = XmlAttribute(XmlModuleSA, XmlTag)
+ # use this guid to find the *.msa file path or FilePath $(WORKSPACE)/EdkModulePkg/Core/Dxe/DxeMain.msa
+ # then convert $(WORKSPACE)/EdkModulePkg/Core/Dxe/DxeMain.msa to $(WORKSPACE)/EdkModulePkg/Core/Dxe/DxeMain.inf, it's FilePath
+ PlatformModulePath = gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(PlatformModule.ModuleGuid)
+
+ PlatformModule.FilePath = PlatformModulePath # *.inf file path
+ # *.inf back to *.msa
+ ModuleMSAFileName = PlatformModulePath.replace('.inf', '.msa')
+ WorkSpace = os.getenv('WORKSPACE')
+ ModuleMSAFileName = os.path.join(WorkSpace, ModuleMSAFileName)
+ # Open this module
+ #ModuleMSA = open(ModuleMSAFileName, "r")
+ XmlMsa = XmlParseFile(ModuleMSAFileName)
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/ModuleName"
+ PlatformModule.Name = XmlElement(XmlMsa, XmlTag) # ModuleName
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/ModuleType"
+ PlatformModule.ModuleType = XmlElement(XmlMsa, XmlTag)
+
+ # IA32, X64, IPF and EBC which the module support arch
+ #XmlTag = "ModuleSurfaceArea/ModuleDefinitions/SupportedArchitectures"
+ #PlatformModule.SupArchList = XmlElement(XmlMsa, XmlTag).split()
+
+ #XmlTag = "ModuleSurfaceArea/MsaHeader/"
+ PlatformModule.Type = '' #LIBRARY | LIBRARY_CLASS | MODULE, used by dsc. New in DSC spec
+
+ PlatformModule.ExecFilePath = '' # New in DSC spec
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader/Specification"
+ PlatformModule.Specifications = XmlElement(XmlMsa, XmlTag).split()
+
+ return PlatformModule
+
+## Load Platform modules
+#
+# Read an input Platform XML DOM object and return a list of Platform modules class object
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PlatformModules A list of Platform modules object loaded from XmlFpd
+#
+def LoadPlatformModules(XmlFpd):
+ PlatformModules = PlatformModuleClasses()
+
+ XmlTag = "PlatformSurfaceArea/FrameworkModules/ModuleSA"
+ PlatformModules.ModuleList = map(LoadModuleSA, XmlList(XmlFpd, XmlTag))
+
+ return PlatformModules
+
+## Load Platform Flash Definition File
+#
+# Read an input Platform XML DOM object and return Platform Flash Definition File class object
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+# @param FpdFileName The file path of FPD File
+#
+# @retvel PlatformFlashDefinitionFile A new Platform Flash Definition File object loaded from XmlFpd
+#
+def LoadPlatformFlashDefinitionFile(XmlFpd, FpdFileName):
+ PlatformFlashDefinitionFile = PlatformFlashDefinitionFileClass()
+
+ XmlTag = "PlatformSurfaceArea/Flash/FlashDefinitionFile"
+ PlatformFlashDefinitionFile.FilePath = XmlElement(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/Flash/FlashDefinitionFile/Id"
+ PlatformFlashDefinitionFile.Id = XmlAttribute(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/Flash/FlashDefinitionFile/UiName"
+ PlatformFlashDefinitionFile.UiName = XmlAttribute(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/Flash/FlashDefinitionFile/Preferred"
+ PlatformFlashDefinitionFile.Preferred = XmlAttribute(XmlFpd, XmlTag)
+
+ return PlatformFlashDefinitionFile
+
+## Load Platform User Defined Ant Tasks
+#
+# Read an input Platform XML DOM object and return platform
+# User Defined Ant Tasks contained in the DOM object.
+#
+# @param XmlUserDefinedAntTasks An XML DOM object read from FPD file
+#
+# @retvel AntTask An Ant Task loaded from XmlFpd
+#
+def LoadUserDefinedAntTasks(XmlFpd):
+ Dict = {}
+ AntTask = PlatformAntTaskClass()
+
+ XmlTag = "PlatformSurfaceArea/BuildOptions/UserDefinedAntTasks/AntTask/Id"
+ AntTask.Id = XmlAttribute(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/BuildOptions/UserDefinedAntTasks/AntTask/AntCmdOptions"
+ AntTask.AntCmdOptions = XmlElement(XmlFpd, XmlTag)
+
+ XmlTag = "PlatformSurfaceArea/BuildOptions/UserDefinedAntTasks/AntTask/Filename"
+ AntTask.FilePath = XmlElement(XmlFpd, XmlTag)
+
+ Dict[AntTask.Id] = AntTask
+ return Dict
+
+## Load Platform Build Options
+#
+# Read an input Platform XML DOM object and return a list of platform
+# Build Option contained in the DOM object.
+#
+# @param XmlBuildOptions An XML DOM object read from FPD file
+#
+# @retvel PlatformBuildOptions A list of platform Build Options loaded from XmlFpd
+#
+def LoadBuildOptions(XmlBuildOptions):
+ XmlTag = "Option"
+ return map(LoadBuildOption, XmlList(XmlBuildOptions, XmlTag)) # LoadBuildOption is a method in MigrationUtilities.py
+
+## Load Platform Build Option
+#
+# Read an input Platform XML DOM object and return a Build Option
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PlatformBuildOption A Build Options loaded from XmlFpd
+#
+def LoadPlatformBuildOption(XmlBuildOptions):
+ PlatformBuildOption = PlatformBuildOptionClass()
+
+ # handle UserDefinedAntTasks
+ XmlTag = "BuildOptions/UserDefinedAntTasks/AntTask"
+ PlatformBuildOption.UserDefinedAntTasks = LoadUserDefinedAntTasks(XmlTag)
+
+ # handle Options
+ XmlTag = "BuildOptions/Options/Option"
+ PlatformBuildOption.Options = map(LoadBuildOption, XmlList(XmlBuildOptions, XmlTag))
+
+ # handle UserExtensions
+ XmlTag = "BuildOptions/UserExtensions"
+ PlatformBuildOption.UserExtensions = LoadUserExtensions(XmlTag) # from MigrationUtilities.py LoadUserExtensions
+
+ # handle Ffs
+ XmlTag = "BuildOptions/Ffs/FfsKey"
+ PlatformBuildOption.FfsKeyList = map(LoadPlatformFfs, XmlList(XmlBuildOptions, XmlTag))
+
+ return PlatformBuildOption
+
+## Load Platform Ffs Dictionary
+#
+# Read an input Platform XML DOM object and return a platform Ffs Dictionary
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel Dict A platform Ffs Dict loaded from XmlFpd
+#
+def LoadPlatformFfsDict(XmlFpd):
+ Dict = {}
+ XmlTag = "PlatformSurfaceArea/BuildOptions/Ffs"
+ List = map(LoadPlatformFfs, XmlList(XmlFpd, XmlTag))
+ if List != []:
+ for Ffs in List:
+ Dict[Ffs.Key] = Ffs
+ return Dict
+
+## Load Platform Ffs Section
+#
+# Read an input Platform XML DOM object and return a platform Ffs Section
+# contained in the DOM object.
+#
+# @param XmlFfs An XML DOM object read from FPD file
+#
+# @retvel PlatformFfsSection A platform Ffs Section loaded from XmlFpd
+#
+def LoadPlatformFfsSection(XmlFfsSection):
+ PlatformFfsSection = PlatformFfsSectionClass()
+
+ XmlTag = ""
+ PlatformFfsSection.BindingOrder = ''
+
+ XmlTag = ""
+ PlatformFfsSection.Compressible = ''
+
+ XmlTag = "SectionType"
+ PlatformFfsSection.SectionType = XmlAttribute(XmlFfsSection, XmlTag)
+
+ XmlTag = ""
+ PlatformFfsSection.EncapsulationType = ''
+
+ XmlTag = ""
+ PlatformFfsSection.ToolName = ''
+
+ XmlTag = ""
+ PlatformFfsSection.Filenames = []
+
+ XmlTag = ""
+ PlatformFfsSection.Args = ''
+
+ XmlTag = ""
+ PlatformFfsSection.OutFile = ''
+
+ XmlTag = ""
+ PlatformFfsSection.OutputFileExtension = ''
+
+ XmlTag = ""
+ PlatformFfsSection.ToolNameElement = ''
+
+ return PlatformFfsSection
+
+## Load Platform Ffs Sections
+#
+# Read an input Platform XML DOM object and return a platform Ffs Sections
+# contained in the DOM object.
+#
+# @param XmlFfs An XML DOM object read from FPD file
+#
+# @retvel PlatformFfsSections A platform Ffs Sections loaded from XmlFpd
+#
+def LoadFfsSections():
+ PlatformFfsSections = PlatformFfsSectionsClass()
+ PlatformFfsSections.BindingOrder = ''
+ PlatformFfsSections.Compressible = ''
+ PlatformFfsSections.SectionType = ''
+ PlatformFfsSections.EncapsulationType = ''
+ PlatformFfsSections.ToolName = ''
+ PlatformFfsSections.Section = []
+ PlatformFfsSections.Sections = []
+
+ return PlatformFfsSections
+
+## Load Platform Ffs Sections
+#
+# Read an input Platform XML DOM object and return a platform Ffs Sections
+# contained in the DOM object.
+#
+# @param XmlFfs An XML DOM object read from FPD file
+#
+# @retvel PlatformFfsSections A platform Ffs Sections loaded from XmlFpd
+#
+def LoadPlatformFfsSections(XmlFfsSections):
+ PlatformFfsSections = PlatformFfsSectionsClass()
+
+ XmlTag = ""
+ PlatformFfsSections.BindingOrder = ''
+
+ XmlTag = ""
+ Compressible = ''
+
+ XmlTag = ""
+ SectionType = ''
+
+ XmlTag = "EncapsulationType"
+ EncapsulationType = XmlAttribute(XmlFfsSections, XmlTag)
+
+ XmlTag = ""
+ ToolName = ''
+
+ XmlTag = "Sections/Section"
+ Section = [] #[ PlatformFfsSectionClass, ... ]
+ Section = map(LoadPlatformFfsSection, XmlList(XmlFfsSections, XmlTag))
+
+
+ XmlTag = "Sections/Sections"
+ Sections = map(LoadFfsSections, XmlList(XmlFfsSections, XmlTag)) #[ PlatformFfsSectionsClass, ...]
+
+ return PlatformFfsSections
+
+## Load Platform Ffs Attribute
+#
+# Read an input Platform XML DOM object and return a platform Ffs Attribute
+# contained in the DOM object.
+#
+# @param XmlFfs An XML DOM object read from FPD file
+#
+# @retvel List A platform Ffs Attribute loaded from XmlFpd
+#
+def LoadFfsAttribute(XmlFfs):
+ List = []
+ XmlTag = "Ffs/Attribute"
+ for XmlAttr in XmlList(XmlFfs, XmlTag):
+ XmlTag = "Name"
+ Name = XmlAttribute(XmlAttr, XmlTag)
+ XmlTag = "Value"
+ Value = XmlAttribute(XmlAttr, XmlTag)
+ List.append([Name,Value])
+ return List
+
+## Load a list of Platform Build Options
+#
+# Read an input Platform XML DOM object and return a list of Build Options
+# contained in the DOM object.
+#
+# @param XmlFfs An XML DOM object read from FPD file
+#
+# @retvel PlatformFfsKey A platform Ffs key loaded from XmlFpd
+#
+def LoadPlatformFfs(XmlFfs):
+ PlatformFfs = PlatformFfsClass()
+
+ PlatformFfs.Attribute = {}
+ Dict = {}
+
+ List = LoadFfsAttribute(XmlFfs)
+
+ XmlTag = "Ffs/Sections/Sections"
+ PlatformFfs.Sections = map(LoadPlatformFfsSections, XmlList(XmlFfs, XmlTag)) #[PlatformFfsSectionsClass, ...]
+
+ for Item in List:
+ Name = Item[0]
+ Value = Item[1]
+ for Item in PlatformFfs.Sections:
+ Dict[(Name, Item)] = Value
+ PlatformFfs.Attribute = Dict
+
+ XmlTag = "Ffs/FfsKey"
+ PlatformFfs.Key = XmlAttribute(XmlFfs, XmlTag)
+
+ return PlatformFfs
+
+## Load a list of Platform Build Options
+#
+# Read an input Platform XML DOM object and return a list of Build Options
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PlatformBuildOptions A list of Build Options loaded from XmlFpd
+#
+def LoadPlatformBuildOptions(XmlFpd):
+ PlatformBuildOptions = PlatformBuildOptionClass()
+
+ PlatformBuildOptions.UserDefinedAntTasks = LoadUserDefinedAntTasks(XmlFpd)
+
+ XmlTag = "PlatformSurfaceArea/BuildOptions/Options/Option"
+ PlatformBuildOptions.Options = map(LoadBuildOption, XmlList(XmlFpd, XmlTag))
+
+ PlatformBuildOptions.UserExtensions = LoadPlatformUserExtension(XmlFpd)
+
+ PlatformBuildOptions.FfsKeyList = LoadPlatformFfsDict(XmlFpd)
+
+ return PlatformBuildOptions
+
+## Load Platform Pcd Data
+#
+# Read an input Platform XML DOM object and return Platform module class object
+# contained in the DOM object.
+#
+# @param XmlPcd An XML DOM object read from FPD file
+#
+# @retvel PlatformPcdData A Platform Pcd object loaded from XmlFpd
+#
+def LoadPlatformPcdData(XmlPcdData):
+ PcdData = PcdClass() # defined in CommonDataClass.CommonClass.py
+
+ XmlTag = "ItemType"
+ PcdData.ItemType = XmlAttribute(XmlPcdData, XmlTag) #DYNAMIC
+
+ XmlTag = "PcdData/C_Name"
+ PcdData.C_NAME = XmlElement(XmlPcdData, XmlTag)
+
+ XmlTag = "PcdData/Token"
+ PcdData.Token = XmlElement(XmlPcdData, XmlTag)
+
+ XmlTag = "PcdData/TokenSpaceGuidCName"
+ PcdData.TokenSpaceGuidCName = XmlElement(XmlPcdData, XmlTag)
+
+ XmlTag = "PcdData/DatumType"
+ PcdData.DatumType = XmlElement(XmlPcdData, XmlTag)
+
+ XmlTag = "PcdData/MaxDatumSize"
+ PcdData.MaxDatumSize = XmlElement(XmlPcdData, XmlTag)
+
+ XmlTag = "PcdData/Value"
+ PcdData.Value = XmlElement(XmlPcdData, XmlTag)
+
+ return PcdData
+
+## Load a Platform Pcd Build Data
+#
+# Read an input Platform XML DOM object and return a list of Pcd Dynamic
+# contained in the DOM object.
+#
+# @param XmlPcdBuildData An XML DOM object read from FPD file
+#
+# @retvel PcdBuildData A Platform Pcd Build Data loaded from XmlFpd
+#
+def LoadPlatformPcdBuildData(XmlPcdBuildData):
+ PcdBuildData = PcdClass() # defined in CommonDataClass.CommonClass.py
+
+ XmlTag = "ItemType"
+ PcdBuildData.ItemType = XmlAttribute(XmlPcdBuildData, XmlTag) #DYNAMIC
+
+ XmlTag = "PcdBuildData/C_Name"
+ PcdBuildData.C_NAME = XmlElement(XmlPcdBuildData, XmlTag)
+
+ XmlTag = "PcdBuildData/Token"
+ PcdBuildData.Token = XmlElement(XmlPcdBuildData, XmlTag)
+
+ XmlTag = "PcdBuildData/TokenSpaceGuidCName"
+ PcdBuildData.TokenSpaceGuidCName = XmlElement(XmlPcdBuildData, XmlTag)
+
+ XmlTag = "PcdBuildData/DatumType"
+ PcdBuildData.DatumType = XmlElement(XmlPcdBuildData, XmlTag)
+
+ XmlTag = "PcdBuildData/MaxDatumSize"
+ PcdBuildData.MaxDatumSize = XmlElement(XmlPcdBuildData, XmlTag)
+
+ #XmlTag = "PcdBuildData/Value"
+ #PcdBuildData.Value = XmlElement(XmlPcdBuildData, XmlTag)
+
+ return PcdBuildData
+
+## Load a list of Platform Pcd Dynamic
+#
+# Read an input Platform XML DOM object and return a list of Pcd Dynamic
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PcdDynamic A list of Pcd Dynamic loaded from XmlFpd
+#
+def LoadDynamicPcdBuildDefinitions(XmlFpd):
+ DynamicPcdBuildDefinitions = []
+ XmlTag = "PlatformSurfaceArea/DynamicPcdBuildDefinitions/PcdBuildData"
+ return map(LoadPlatformPcdBuildData, XmlList(XmlFpd, XmlTag))
+
+## Load a Platform NameValue object
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlNameValue An XML DOM object read from FPD file
+#
+# @retvel NameValue A Platform NameValue object
+#
+def LoadNameValue(XmlNameValue):
+ NameValue = []
+
+ XmlTag = "Name"
+ Name = XmlAttribute(XmlNameValue, XmlTag)
+ NameValue.append(Name)
+
+ XmlTag = "Value"
+ Value = XmlAttribute(XmlNameValue, XmlTag)
+ NameValue.append(Value)
+
+ return NameValue
+
+## Load a Platform Fv Image Name object
+#
+# Read an input Platform XML DOM object and return a platform Fv Image
+# Name contained in the DOM object.
+#
+# @param XmlFvImageNames An XML DOM object read from FPD file
+#
+# @retvel FvImageNames A Platform Fv Image Name object
+#
+def LoadFvImageNames(XmlFvImageNames):
+ XmlTag = "FvImageNames"
+ FvImageNames = XmlElement(XmlFvImageNames, XmlTag)
+ return FvImageNames
+
+## Load a Platform Fv Image option object
+#
+# Read an input Platform XML DOM object and return a platform Fv Image
+# Option contained in the DOM object.
+#
+# @param XmlFvImageOptions An XML DOM object read from FPD file
+#
+# @retvel PlatformFvImageOption A Platform Fv Image Option object
+#
+def LoadFvImageOptions(XmlFvImageOptions):
+ PlatformFvImageOption = PlatformFvImageOptionClass()
+
+ XmlTag = ""
+ PlatformFvImageOption.FvImageOptionName = ''
+
+ XmlTag = ""
+ PlatformFvImageOption.FvImageOptionValues = []
+
+ XmlTag = "FvImageOptions/NameValue"
+ List = map(LoadNameValue, XmlList(XmlFvImageOptions, XmlTag))
+
+ return PlatformFvImageOption
+
+## Load a Platform Fv Image object
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlFvImage An XML DOM object read from Fpd file
+#
+# @retvel PlatformFvImage A Platform Fv Image object
+#
+def LoadPlatformFvImage(XmlFvImage):
+ PlatformFvImage = PlatformFvImageClass()
+
+ XmlTag = "Name"
+ PlatformFvImage.Name = XmlAttribute(XmlFvImage, XmlTag)
+
+ XmlTag = "Value"
+ PlatformFvImage.Value = XmlAttribute(XmlFvImage, XmlTag)
+
+ XmlTag = "Type"
+ PlatformFvImage.Type = XmlAttribute(XmlFvImage, XmlTag)
+
+ XmlTag = "FvImage/FvImageNames"
+ PlatformFvImage.FvImageNames = map(LoadFvImageNames, XmlList(XmlFvImage, XmlTag))
+
+ XmlTag = "FvImage/FvImageOptions"
+ PlatformFvImage.FvImageOptions = map(LoadFvImageOptions, XmlList(XmlFvImage, XmlTag))
+
+ return PlatformFvImage
+
+## Load a Platform fdf object
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlFvImages An XML DOM object read from FPD file
+#
+# @retvel PlatformFdf A Platform fdf object
+#
+def LoadPlatformFvImages(XmlFvImages):
+ List = []
+
+ XmlTag = "FvImages/NameValue"
+ NameValues = map(LoadNameValue, XmlList(XmlFvImages, XmlTag))
+ List.append(NameValues)
+
+ XmlTag = "FvImages/FvImage"
+ FvImages = map(LoadPlatformFvImage, XmlList(XmlFvImages, XmlTag))
+ List.append(FvImages)
+
+ XmlTag = "FvImages/FvImageName"
+ FvImageNames = map(LoadPlatformFvImageName, XmlList(XmlFvImages, XmlTag))
+ List.append(FvImageNames)
+
+ return List
+
+## Load a Platform Fv Image Name object
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlFvImageName An XML DOM object read from FPD file
+#
+# @retvel PlatformFvImageName A Platform Fv Image Name object
+#
+def LoadPlatformFvImageName(XmlFvImageName):
+ PlatformFvImageName = PlatformFvImageNameClass()
+
+ XmlTag = "Name"
+ PlatformFvImageName.Name = XmlAttribute(XmlFvImageName, XmlTag)
+
+ XmlTag = "Type"
+ PlatformFvImageName.Type = XmlAttribute(XmlFvImageName, XmlTag)
+
+ XmlTag = "FvImageOptions"
+ PlatformFvImageName.FvImageOptions = map(LoadFvImageOptions, XmlList(XmlFvImageName, XmlTag))
+
+ return PlatformFvImageName
+
+## Load a list of Platform fdf objects
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel PlatformFdfs A list of Platform fdf object
+#
+def LoadPlatformFdfs(XmlFpd):
+ PlatformFvImages = PlatformFvImagesClass()
+
+ XmlTag = "PlatformSurfaceArea/Flash/FvImages"
+ PlatformFvImages.FvImages = map(LoadPlatformFvImages, XmlList(XmlFpd, XmlTag))
+
+ return PlatformFvImages
+
+## Load a Platform User Extensions
+#
+# Read an input Platform XML DOM object and return an User Extension
+# contained in the DOM object.
+#
+# @param XmlUserExtension An XML DOM object read from FPD file
+#
+# @retvel PlatformUserExtensions A platform User Extension loaded from XmlFpd
+#
+def LoadPlatformUserExtension(XmlFpd):
+ Dict = {}
+
+ PlatformUserExtensions = UserExtensionsClass()
+
+ XmlTag = "PlatformSurfaceArea/BuildOptions/UserExtensions"
+ List = map(LoadUserExtensions, XmlList(XmlFpd, XmlTag))
+ if List != []:
+ for Item in List:
+ UserID = Item.UserID
+ Identifier = Item.Identifier
+ Dict[(UserID, Identifier)] = Item
+ #XmlTag = "PlatformSurfaceArea/BuildOptions/UserExtensions/UserID"
+ #PlatformUserExtensions.UserID = XmlAttribute(XmlFpd, XmlTag)
+
+ #XmlTag = "PlatformSurfaceArea/BuildOptions/UserExtensions/Identifier"
+ #PlatformUserExtensions.Identifier = XmlAttribute(XmlFpd, XmlTag)
+
+ #PlatformUserExtensions.Content = XmlElementData(XmlFpd)
+ #Dict[(PlatformUserExtensions.UserID,PlatformUserExtensions.Identifier)] = PlatformUserExtensions
+ #return PlatformUserExtensions
+ return Dict
+
+## Load a list of Platform User Extensions
+#
+# Read an input Platform XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlFpd An XML DOM object read from FPD file
+#
+# @retvel UserExtensions A list of platform User Extensions loaded from XmlFpd
+#
+def LoadPlatformUserExtensions(XmlFpd):
+ XmlTag = "PlatformSurfaceArea/UserExtensions"
+ return map(LoadUserExtensions, XmlList(XmlFpd, XmlTag)) # from MigrationUtilities.py LoadUserExtensions
+
+## Load a new Platform class object
+#
+# Read an input FPD File and return a new Platform class Object.
+#
+# @param FpdFileName An XML DOM object read from FPD file
+#
+# @retvel Platform A new Platform class object loaded from FPD File
+#
+def LoadFpd(FpdFileName):
+ XmlFpd = XmlParseFile(FpdFileName)
+ EdkLogger.verbose("Load FPD File: %s" % FpdFileName)
+
+ Platform = PlatformClass()
+ Platform.Header = LoadPlatformHeader(XmlFpd, FpdFileName)
+ Platform.SkuInfos = LoadPlatformSkuInfos(XmlFpd)
+ Platform.Libraries = [] #New in dsc spec, do not handle for now
+ Platform.LibraryClasses = LoadPlatformLibraryClasses(XmlFpd)
+ Platform.Modules = LoadPlatformModules(XmlFpd)
+ Platform.FlashDefinitionFile = LoadPlatformFlashDefinitionFile(XmlFpd, FpdFileName)
+ Platform.BuildOptions = LoadPlatformBuildOptions(XmlFpd)
+ Platform.DynamicPcdBuildDefinitions = LoadDynamicPcdBuildDefinitions(XmlFpd)
+ Platform.Fdf = LoadPlatformFdfs(XmlFpd)
+ Platform.UserExtensions = LoadPlatformUserExtensions(XmlFpd)
+
+ return Platform
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
\ No newline at end of file diff --git a/BaseTools/Source/Python/fpd2dsc/MigrationUtilities.py b/BaseTools/Source/Python/fpd2dsc/MigrationUtilities.py new file mode 100644 index 0000000000..d3c724832c --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/MigrationUtilities.py @@ -0,0 +1,563 @@ +## @file
+# Contains several utilitities shared by migration tools.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+from optparse import OptionParser
+from Common.BuildToolError import *
+from XmlRoutines import *
+from CommonDataClass.CommonClass import *
+
+## Set all fields of CommonClass object.
+#
+# Set all attributes of CommonClass object from XML Dom object of XmlCommon.
+#
+# @param Common The destine CommonClass object.
+# @param XmlCommon The source XML Dom object.
+#
+def SetCommon(Common, XmlCommon):
+ XmlTag = "Usage"
+ Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = "FeatureFlag"
+ Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
+
+ XmlTag = "SupArchList"
+ Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = XmlNodeName(XmlCommon) + "/" + "HelpText"
+ Common.HelpText = XmlElement(XmlCommon, XmlTag)
+
+
+## Set some fields of CommonHeaderClass object.
+#
+# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from
+# XML Dom object of XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
+ XmlParentTag = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParentTag + "/" + NameTag
+ CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "GuidValue"
+ CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "Version"
+ CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
+
+ CommonHeader.FileName = os.path.basename(FileName)
+ CommonHeader.FullPath = os.path.abspath(FileName)
+
+
+## Regular expression to match specification and value.
+mReSpecification = re.compile(r"(?P<Specification>\w+)\s+(?P<Value>\w*)")
+
+## Add specification to specification dictionary.
+#
+# Abstract specification name, value pair from Specification String and add them
+# to specification dictionary.
+#
+# @param SpecificationDict The destine Specification dictionary.
+# @param SpecificationString The source Specification String from which the
+# specification name and value pair is abstracted.
+#
+def AddToSpecificationDict(SpecificationDict, SpecificationString):
+ """Abstract specification name, value pair from Specification String"""
+ for SpecificationMatch in mReSpecification.finditer(SpecificationString):
+ Specification = SpecificationMatch.group("Specification")
+ Value = SpecificationMatch.group("Value")
+ SpecificationDict[Specification] = Value
+
+## Set all fields of CommonHeaderClass object.
+#
+# Set all attributes of CommonHeaderClass object from XML Dom object of
+# XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetCommonHeader(CommonHeader, XmlCommonHeader):
+ """Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
+ XmlParent = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParent + "/" + "Abstract"
+ CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Description"
+ CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Copyright"
+ CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "License"
+ CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Specification"
+ Specification = XmlElement(XmlCommonHeader, XmlTag)
+
+ AddToSpecificationDict(CommonHeader.Specification, Specification)
+
+ XmlTag = XmlParent + "/" + "ModuleType"
+ CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
+
+
+## Load a new Cloned Record class object.
+#
+# Read an input XML ClonedRecord DOM object and return an object of Cloned Record
+# contained in the DOM object.
+#
+# @param XmlCloned A child XML DOM object in a Common XML DOM.
+#
+# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.
+#
+def LoadClonedRecord(XmlCloned):
+ ClonedRecord = ClonedRecordClass()
+
+ XmlTag = "Id"
+ ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
+
+ XmlTag = "FarGuid"
+ ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageGuid"
+ ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageVersion"
+ ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleGuid"
+ ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleVersion"
+ ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
+
+ return ClonedRecord
+
+
+## Load a new Guid/Protocol/Ppi common class object.
+#
+# Read an input XML Guid/Protocol/Ppi DOM object and return an object of
+# Guid/Protocol/Ppi contained in the DOM object.
+#
+# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.
+#
+# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object
+# created by XmlGuidProtocolPpiCommon.
+#
+def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
+ GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
+
+ XmlTag = "Name"
+ GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
+ if XmlParent == "Entry":
+ XmlTag = "%s/C_Name" % XmlParent
+ elif XmlParent == "GuidCNames":
+ XmlTag = "%s/GuidCName" % XmlParent
+ else:
+ XmlTag = "%s/%sCName" % (XmlParent, XmlParent)
+
+ GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlTag = XmlParent + "/" + "GuidValue"
+ GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ if XmlParent.endswith("Notify"):
+ GuidProtocolPpiCommon.Notify = True
+
+ XmlTag = "GuidTypeList"
+ GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.SupModuleList = SupModules.split()
+
+ SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
+
+ return GuidProtocolPpiCommon
+
+
+## Load a new Pcd class object.
+#
+# Read an input XML Pcd DOM object and return an object of Pcd
+# contained in the DOM object.
+#
+# @param XmlPcd A child XML DOM object in a Common XML DOM.
+#
+# @retvel Pcd A new Pcd object created by XmlPcd.
+#
+def LoadPcd(XmlPcd):
+ """Return a new PcdClass object equivalent to XmlPcd"""
+ Pcd = PcdClass()
+
+ XmlTag = "PcdEntry/C_Name"
+ Pcd.CName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/Token"
+ Pcd.Token = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/TokenSpaceGuidCName"
+ Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DatumType"
+ Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/MaxDatumSize"
+ Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DefaultValue"
+ Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdItemType"
+ Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/ValidUsage"
+ Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
+
+ XmlTag = "SupModuleList"
+ Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
+
+ SetCommon(Pcd, XmlPcd)
+
+ return Pcd
+
+
+## Load a new LibraryClass class object.
+#
+# Read an input XML LibraryClass DOM object and return an object of LibraryClass
+# contained in the DOM object.
+#
+# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.
+#
+# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.
+#
+def LoadLibraryClass(XmlLibraryClass):
+ LibraryClass = LibraryClassClass()
+
+ XmlTag = "LibraryClass/Keyword"
+ LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
+ if LibraryClass.LibraryClass == "":
+ XmlTag = "Name"
+ LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
+
+ XmlTag = "LibraryClass/IncludeHeader"
+ LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
+
+ XmlTag = "RecommendedInstanceVersion"
+ RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
+
+ XmlTag = "RecommendedInstanceGuid"
+ RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.SupModuleList = SupModules.split()
+
+ SetCommon(LibraryClass, XmlLibraryClass)
+
+ return LibraryClass
+
+
+## Load a new Build Option class object.
+#
+# Read an input XML BuildOption DOM object and return an object of Build Option
+# contained in the DOM object.
+#
+# @param XmlBuildOption A child XML DOM object in a Common XML DOM.
+#
+# @retvel BuildOption A new Build Option object created by XmlBuildOption.
+#
+def LoadBuildOption(XmlBuildOption):
+ """Return a new BuildOptionClass object equivalent to XmlBuildOption"""
+ BuildOption = BuildOptionClass()
+
+ BuildOption.Option = XmlElementData(XmlBuildOption)
+
+ XmlTag = "BuildTargets"
+ BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ XmlTag = "ToolChainFamily"
+ BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "TagName"
+ BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "ToolCode"
+ BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "SupArchList"
+ BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ return BuildOption
+
+
+## Load a new User Extensions class object.
+#
+# Read an input XML UserExtensions DOM object and return an object of User
+# Extensions contained in the DOM object.
+#
+# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.
+#
+# @retvel UserExtensions A new User Extensions object created by
+# XmlUserExtensions.
+#
+def LoadUserExtensions(XmlUserExtensions):
+ UserExtensions = UserExtensionsClass()
+
+ XmlTag = "UserId"
+ UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ XmlTag = "Identifier"
+ UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ UserExtensions.Content = XmlElementData(XmlUserExtensions)
+
+ return UserExtensions
+
+
+## Store content to a text file object.
+#
+# Write some text file content to a text file object. The contents may echo
+# in screen in a verbose way.
+#
+# @param TextFile The text file object.
+# @param Content The string object to be written to a text file.
+#
+def StoreTextFile(TextFile, Content):
+ EdkLogger.verbose(Content)
+ TextFile.write(Content)
+
+
+## Add item to a section.
+#
+# Add an Item with specific CPU architecture to section dictionary.
+# The possible duplication is ensured to be removed.
+#
+# @param Section Section dictionary indexed by CPU architecture.
+# @param Arch CPU architecture: Ia32, X64, Ipf, Ebc or Common.
+# @param Item The Item to be added to section dictionary.
+#
+def AddToSection(Section, Arch, Item):
+ SectionArch = Section.get(Arch, [])
+ if Item not in SectionArch:
+ SectionArch.append(Item)
+ Section[Arch] = SectionArch
+
+
+## Get section contents.
+#
+# Return the content of section named SectionName.
+# the contents is based on Methods and ObjectLists.
+#
+# @param SectionName The name of the section.
+# @param Method A function returning a string item of an object.
+# @param ObjectList The list of object.
+#
+# @retval Section The string content of a section.
+#
+def GetSection(SectionName, Method, ObjectList):
+ SupportedArches = ["common", "Ia32", "X64", "Ipf", "Ebc"]
+ SectionDict = {}
+ for Object in ObjectList:
+ Item = Method(Object)
+ if Item == "":
+ continue
+ Item = " %s" % Item
+ Arches = Object.SupArchList
+ if len(Arches) == 0:
+ AddToSection(SectionDict, "common", Item)
+ else:
+ for Arch in SupportedArches:
+ if Arch.upper() in Arches:
+ AddToSection(SectionDict, Arch, Item)
+
+ Section = ""
+ for Arch in SupportedArches:
+ SectionArch = "\n".join(SectionDict.get(Arch, []))
+ if SectionArch != "":
+ Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
+ Section += "\n"
+ if Section != "":
+ Section += "\n"
+ return Section
+
+
+## Store file header to a text file.
+#
+# Write standard file header to a text file. The content includes copyright,
+# abstract, description and license extracted from CommonHeader class object.
+#
+# @param TextFile The text file object.
+# @param CommonHeader The source CommonHeader class object.
+#
+def StoreHeader(TextFile, CommonHeader):
+ CopyRight = CommonHeader.Copyright
+ Abstract = CommonHeader.Abstract
+ Description = CommonHeader.Description
+ License = CommonHeader.License
+
+ Header = "#/** @file\n#\n"
+ Header += "# " + Abstract + "\n#\n"
+ Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
+ Header += "# " + CopyRight + "\n#\n"
+ Header += "# " + License.replace("\n", "\n# ").replace(" ", " ")
+ Header += "\n#\n#**/\n\n"
+
+ StoreTextFile(TextFile, Header)
+
+## Store file header to a text file.
+#
+# Write Defines section to a text file. DefinesTupleList determines the content.
+#
+# @param TextFile The text file object.
+# @param DefinesTupleList The list of (Tag, Value) to be added as one item.
+#
+def StoreDefinesSection(TextFile, DefinesTupleList):
+ Section = "[Defines]\n"
+ for DefineItem in DefinesTupleList:
+ Section += " %-30s = %s\n" % DefineItem
+
+ Section += "\n\n"
+ StoreTextFile(TextFile, Section)
+
+
+## Add item to PCD dictionary.
+#
+# Add an PcdClass object to PCD dictionary. The key is generated from
+# PcdItemType.
+#
+# @param PcdDict PCD dictionary indexed by Pcd Item Type.
+# @param Arch CPU architecture: Ia32, X64, Ipf, Ebc or Common.
+# @param Item The Item to be added to section dictionary.
+#
+def AddToPcdsDict(PcdDict, PcdItemType, PcdCode):
+ PcdSectionName = PcdItemType
+ PcdSectionName = PcdSectionName.title()
+ PcdSectionName = PcdSectionName.replace("_", "")
+ PcdSectionName = "Pcds" + PcdSectionName
+ PcdDict.setdefault(PcdSectionName, []).append(PcdCode)
+
+## Regular expression to match an equation.
+mReEquation = re.compile(r"\s*(\S+)\s*=\s*(\S*)\s*")
+
+## Return a value tuple matching information in a text fle.
+#
+# Parse the text file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the text file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetTextFileInfo(FileName, TagTuple):
+ ValueTuple = [""] * len(TagTuple)
+ try:
+ for Line in open(FileName):
+ Line = Line.split("#", 1)[0]
+ MatchEquation = mReEquation.match(Line)
+ if MatchEquation:
+ Tag = MatchEquation.group(1).upper()
+ Value = MatchEquation.group(2)
+ for Index in range(len(TagTuple)):
+ if TagTuple[Index] == Tag:
+ ValueTuple[Index] = Value
+ except:
+ EdkLogger.info("IO Error in reading file %s" % FileName)
+
+ return ValueTuple
+
+## Return a value tuple matching information in an XML fle.
+#
+# Parse the XML file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the XML file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetXmlFileInfo(FileName, TagTuple):
+ XmlDom = XmlParseFile(FileName)
+ return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
+
+# Version and Copyright
+__version_number__ = "1.0"
+__version__ = "%prog Version " + __version_number__
+__copyright__ = "Copyright (c) 2007, Intel Corporation. All rights reserved."
+
+## Parse migration command line options
+#
+# Use standard Python module optparse to parse command line option of this tool.
+#
+# @param Source The source file type.
+# @param Destinate The destinate file type.
+#
+# @retval Options A optparse object containing the parsed options.
+# @retval InputFile Path of an source file to be migrated.
+#
+def MigrationOptionParser(Source, Destinate):
+ # use clearer usage to override default usage message
+ UsageString = "%prog [-a] [-o <output_file>] <input_file>"
+
+ Parser = OptionParser(description=__copyright__, version=__version__, usage=UsageString)
+
+ HelpText = "The name of the %s file to be created." % Destinate
+ Parser.add_option("-o", "--output", dest="OutputFile", help=HelpText)
+
+ HelpText = "Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate)
+ Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help=HelpText)
+
+ Options, Args = Parser.parse_args()
+
+ # error check
+ if len(Args) == 0:
+ raise MigrationError(OPTION_MISSING, name="Input file", usage=Parser.get_usage())
+ if len(Args) > 1:
+ raise MigrationError(OPTION_NOT_SUPPORTED, name="Too many input files", usage=Parser.get_usage())
+
+ InputFile = Args[0]
+ if not os.path.exists(InputFile):
+ raise MigrationError(FILE_NOT_FOUND, name=InputFile)
+
+ if Options.OutputFile:
+ if Options.AutoWrite:
+ raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage())
+ else:
+ if Options.AutoWrite:
+ Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower()
+ else:
+ raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage())
+
+ return Options, InputFile
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/fpd2dsc/StoreDsc.py b/BaseTools/Source/Python/fpd2dsc/StoreDsc.py new file mode 100644 index 0000000000..8d07ab9c5b --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/StoreDsc.py @@ -0,0 +1,765 @@ +## @file
+# Store a Platform class object to an INF file.
+#
+# Copyright (c) 2007 - 2009, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from LoadFpd import LoadFpd
+from CommonDataClass.PlatformClass import *
+from CommonDataClass.FdfClass import *
+from Common.MigrationUtilities import *
+from Common.ToolDefClassObject import *
+from Common.TargetTxtClassObject import *
+
+## Store Defines section
+#
+# Write [Defines] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Defines section
+# @param Platform An input Platform class object
+#
+def StorePlatformDefinesSection(DscFile, Platform):
+ PlatformHeader = Platform.Header
+
+ DefinesTupleList = []
+
+ if PlatformHeader.Name != "":
+ DefinesTupleList.append(("PLATFORM_NAME", PlatformHeader.Name))
+
+ if PlatformHeader.Guid != "":
+ DefinesTupleList.append(("PLATFORM_GUID", PlatformHeader.Guid))
+
+ if PlatformHeader.Version != "":
+ DefinesTupleList.append(("PLATFORM_VERSION", PlatformHeader.Version))
+ for key in PlatformHeader.Specification.keys():
+ SpecificationValue = PlatformHeader.Specification.get(key)
+ DefinesTupleList.append(("DSC_ SPECIFICATION", SpecificationValue))
+
+ if PlatformHeader.OutputDirectory != "":
+ DefinesTupleList.append(("OUTPUT_DIRECTORY", PlatformHeader.OutputDirectory))
+
+ if PlatformHeader.SupArchList != "":
+ String = "|".join(PlatformHeader.SupArchList)
+ DefinesTupleList.append(("SUPPORTED_ARCHITECTURES", String))
+
+ if PlatformHeader.BuildTargets != "":
+ String = "|".join(PlatformHeader.BuildTargets)
+ DefinesTupleList.append(("BUILD_TARGETS", String))
+
+ if PlatformHeader.SkuIdName != "":
+ #DefinesTupleList.append(("SKUID_IDENTIFIER", PlatformHeader.SkuIdName))
+ String = "|".join(PlatformHeader.SkuIdName)
+ if String != "":
+ DefinesTupleList.append(("SKUID_IDENTIFIER", String))
+
+ String = Platform.FlashDefinitionFile.FilePath
+ if String != "":
+ DefinesTupleList.append(("FLASH_DEFINITION", String))
+
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Defines Section - statements that will be processed to create a Makefile.")
+ List.append("#")
+ List.append("################################################################################")
+ Section = "\n".join(List)
+ Section += "\n"
+ StoreTextFile(DscFile, Section)
+
+ StoreDefinesSection(DscFile, DefinesTupleList)
+
+## Store SkuIds section
+#
+# Write [SkuIds] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Library Classes section
+# @param Platform An input Platform class object
+#
+def StorePlatformSkuIdsSection(DscFile, Platform):
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# SKU Identification section - list of all SKU IDs supported by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ Section = "\n".join(List)
+ Section += "\n"
+
+ Section += "[SkuIds]" + '\n'
+
+ List = Platform.SkuInfos.SkuInfoList
+ for Item in List:
+ Section = Section + "%s" % Item[0] + '|' + "%s" % Item[1] + '\n'
+ Section = Section + '\n'
+
+ StoreTextFile(DscFile, Section)
+
+## Store Build Options section
+#
+# Write [BuildOptions] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Build Options section
+# @param Platform An input Platform class object
+#
+def StorePlatformBuildOptionsSection(DscFile, Platform):
+ # which is from tools_def.txt
+ StandardBuildTargets = ["DEBUG", "RELEASE"]
+ SupportedArches = ["COMMON", "IA32", "X64", "IPF", "EBC", "ARM"]
+ Target = TargetTxtClassObject()
+ WorkSpace = os.getenv('WORKSPACE')
+ Target.LoadTargetTxtFile(WorkSpace + '\\Conf\\target.txt')
+ ToolDef = ToolDefClassObject()
+ ToolDef.LoadToolDefFile(WorkSpace + '\\' + Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF])
+ # Now we have got ToolDef object
+ #ToolDef.ToolsDefTxtDictionary
+ Dict = ToolDef.ToolsDefTxtDatabase
+
+ Dict1 = ToolDef.ToolsDefTxtDictionary # we care the info in this Dict
+ #
+ # We only support *(DEBUG/RELEASE) and *(All Arch: IA32, X64, IPF and EBC) for now
+ #
+ SectionWINDDK = ''
+ SectionVS2003 = ''
+ SectionVS2005EXP = ''
+ SectionVS2005STD = ''
+ SectionVS2005PRO = ''
+ SectionVS2005TEAMSUITE = ''
+ SectionUNIXGCC = ''
+ SectionCYGWINGCC = ''
+ SectionELFGCC = ''
+ SectionICC = ''
+ SectionMYTOOLS = ''
+ for key in Dict1.keys():
+ if key.find("_CC_FLAGS") != -1:
+ if key.find('WINDDK3790x1830') != -1:
+ SectionWINDDK = " = " + Dict1.get(key) + "\n"
+ elif key.find('VS2003') != -1:
+ SectionVS2003 = " = " + Dict1.get(key)+ "\n"
+ elif key.find('VS2005EXP') != -1:
+ SectionVS2005EXP = " = " + Dict1.get(key) + "\n"
+ elif key.find('VS2005STD') != -1:
+ SectionVS2005STD = " = " + Dict1.get(key) + "\n"
+ elif key.find('VS2005PRO') != -1:
+ SectionVS2005PRO = " = " + Dict1.get(key) + "\n"
+ elif key.find('VS2005TEAMSUITE') != -1:
+ SectionVS2005TEAMSUITE = " = " + Dict1.get(key) + "\n"
+ elif key.find('UNIXGCC') != -1:
+ SectionUNIXGCC = " = " + Dict1.get(key) + "\n"
+ elif key.find('CYGWINGCC') != -1:
+ SectionCYGWINGCC = " = " + Dict1.get(key) + "\n"
+ elif key.find('ELFGCC') != -1:
+ SectionELFGCC = " = " + Dict1.get(key) + "\n"
+ elif key.find('ICC') != -1:
+ SectionICC = " = " + Dict1.get(key) + "\n"
+ elif key.find('MYTOOLS') != -1:
+ SectionMYTOOLS = " = " + Dict1.get(key) + "\n"
+ else:
+ print "Error!"
+
+ #
+ # First need to check which arch
+ #
+ Archs = Platform.Header.SupArchList
+ BuildTargets = Platform.Header.BuildTargets
+ #if BuildTargets == StandardBuildTargets:
+ #print "Debug and Release both support" # skip debug/release string search
+ #else:
+ #print "need to search debug/release string"
+
+ if len(Archs) == 4:
+ Arch = "*"
+ SectionName = "[BuildOptions.Common]\n"
+ else:
+ for Arch in Archs:
+ if Arch == 'IA32':
+ SectionName = "[BuildOptions.IA32]\n"
+ elif Arch == 'X64':
+ SectionName = "[BuildOptions.X64]\n"
+ elif Arch == 'IPF':
+ SectionName = "[BuildOptions.IPF]\n"
+ elif Arch == 'EBC':
+ SectionName = "[BuildOptions.EBC]\n"
+ else:
+ print 'Error!'
+ Section = ""
+ if SectionWINDDK != "":
+ SectionWINDDK = "*_WINDDK3790x1830_" + Arch + "_CC_FLAGS" + SectionWINDDK
+ Section += SectionWINDDK
+ if SectionVS2003 != "":
+ SectionVS2003 = "*_VS2003_" + Arch + "_CC_FLAGS" + SectionVS2003
+ Section += SectionVS2003
+ if SectionVS2005EXP != "":
+ SectionVS2005EXP = "*_VS2005EXP_" + Arch + "_CC_FLAGS" + SectionVS2005EXP
+ Section += SectionVS2005EXP
+ if SectionVS2005STD != "":
+ SectionVS2005STD = "*_VS2005STD_" + Arch + "_CC_FLAGS" + SectionVS2005STD
+ Section += SectionVS2005STD
+ if SectionVS2005PRO != "":
+ SectionVS2005PRO = "*_VS2005PRO_" + Arch + "_CC_FLAGS" + SectionVS2005PRO
+ Section += SectionVS2005PRO
+ if SectionVS2005TEAMSUITE != "":
+ SectionVS2005TEAMSUITE = "*_VS2005TEAMSUITE_" + Arch + "_CC_FLAGS" + SectionVS2005TEAMSUITE
+ Section += SectionVS2005TEAMSUITE
+ if SectionUNIXGCC != "":
+ SectionUNIXGCC = "*_UNIXGCC_" + Arch + "_CC_FLAGS" + SectionUNIXGCC
+ Section += SectionUNIXGCC
+ if SectionCYGWINGCC != "":
+ SectionCYGWINGCC = "*_CYGWINGCC_" + Arch + "_CC_FLAGS" + SectionCYGWINGCC
+ Section += SectionCYGWINGCC
+ if SectionELFGCC != "":
+ SectionELFGCC = "*_ELFGCC_" + Arch + "_CC_FLAGS" + SectionELFGCC
+ Section += SectionELFGCC
+ if SectionICC != "":
+ SectionICC = "*_ICC_" + Arch + "_CC_FLAGS" + SectionICC
+ Section += SectionICC
+ if SectionMYTOOLS != "":
+ SectionMYTOOLS = "*_MYTOOLS_" + Arch + "_CC_FLAGS" + SectionMYTOOLS
+ Section += SectionMYTOOLS
+
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Build Options section - list of all Build Options supported by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+
+ Section = SectionHeader + SectionName + Section
+ Section += "\n"
+ StoreTextFile(DscFile, Section)
+
+## Store Libraries section
+#
+# Write [Libraries] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Library Classes section
+# @param Platform An input Platform class object
+#
+def StorePlatformLibrariesSection(DscFile,Platform):
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Libraries section - list of all Libraries needed by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+
+ Section = SectionHeader + '[Libraries]\n\n'
+ StoreTextFile(DscFile, Section)
+
+## Return a Platform Library Class Item
+#
+# Read the input LibraryClass class object and return one line of Library Class Item.
+#
+# @param LibraryClass An input LibraryClass class object
+#
+# @retval LibraryClassItem A Module Library Class Item
+#
+def GetPlatformLibraryClassItem(LibraryClass):
+ LibraryClassList = []
+ LibraryClassList.append(LibraryClass.Name)
+ LibraryClassList.append(LibraryClass.FilePath)
+
+ return "|$(WORKSPACE)/".join(LibraryClassList).rstrip("|")
+
+## Add item to a LibraryClass section
+#
+# Add an Item with specific Module Type to section dictionary.
+# The possible duplication is ensured to be removed.
+#
+# @param Section Section dictionary indexed by CPU architecture
+# @param SupModuleList LibraryClass SupModuleList: BASE, SEC, PEI_CORE, PEIM, etc
+# @param Item The Item to be added to section dictionary
+#
+def AddToLibraryClassSection(Section, SupModuleList, Item):
+ for ModuleType in SupModuleList:
+ SectionModule = Section.get(ModuleType, [])
+ if Item not in SectionModule:
+ SectionModule.append(Item)
+ Section[ModuleType] = SectionModule
+
+## Get Library Classes section contents
+#
+# Return the content of section named SectionName.
+# the contents is based on Methods and ObjectLists.
+#
+# @param SectionName The name of the section
+# @param Method A function returning a string item of an object
+# @param ObjectList The list of object
+#
+# @retval Section The string content of a section
+#
+def GetLibraryClassesSection(SectionName, Method, ObjectList):
+ SupportedArches = ["COMMON", "IA32", "X64", "IPF", "EBC"]
+ ModuleTypes = ["BASE","SEC","PEI_CORE","PEIM","DXE_CORE","DXE_DRIVER","DXE_SMM_DRIVER","DXE_SAL_DRIVER","DXE_RUNTIME_DRIVER","UEFI_DRIVER","UEFI_APPLICATION"]
+ SectionCommonDict = {}
+ SectionIA32Dict = {}
+ SectionX64Dict = {}
+ SectionIPFDict = {}
+ SectionEBCDict = {}
+ #ObjectList = list(set(ObjectList)) # delete the same element in the list
+ for Object in ObjectList:
+ if Object == None:
+ continue
+ Item = Method(Object)
+ if Item == "":
+ continue
+ Item = " %s" % Item
+ Arches = Object.SupArchList
+ if len(Arches) == 4:
+ ModuleType = Object.ModuleType
+ # [LibraryClasses.Common.ModuleType]
+ if ModuleType == "BASE":
+ SupModuleList = ["BASE"]
+ AddToLibraryClassSection(SectionCommonDict, SupModuleList, Item)
+ else:
+ #
+ SupModuleList = Object.SupModuleList
+ #AddToSection(SectionDict, "|".join(SupModuleList), Item)
+ AddToLibraryClassSection(SectionCommonDict, SupModuleList, Item)
+ else:
+ # Arch
+ for Arch in SupportedArches:
+ if Arch.upper() in Arches:
+ if Arch == "IA32":
+ # [LibraryClasses.IA32.ModuleType]
+ ModuleType = Object.ModuleType
+ if ModuleType == "BASE":
+ SupModuleList = ["BASE"]
+ AddToLibraryClassSection(SectionIA32Dict, SupModuleList, Item)
+ else:
+ SupModuleList = Object.SupModuleList
+ AddToLibraryClassSection(SectionIA32Dict, SupModuleList, Item)
+ elif Arch == "X64":
+ # [LibraryClasses.X64.ModuleType]
+ ModuleType = Object.ModuleType
+ if ModuleType == "BASE":
+ SupModuleList = ["BASE"]
+ AddToLibraryClassSection(SectionX64Dict, SupModuleList, Item)
+ else:
+ SupModuleList = Object.SupModuleList
+ AddToLibraryClassSection(SectionX64Dict, SupModuleList, Item)
+ elif Arch == "IPF":
+ # [LibraryClasses.IPF.ModuleType]
+ ModuleType = Object.ModuleType
+ if ModuleType == "BASE":
+ SupModuleList = ["BASE"]
+ AddToLibraryClassSection(SectionIPFDict, SupModuleList, Item)
+ else:
+ SupModuleList = Object.SupModuleList
+ AddToLibraryClassSection(SectionIPFDict, SupModuleList, Item)
+ elif Arch == "EBC":
+ # [LibraryClasses.EBC.ModuleType]
+ ModuleType = Object.ModuleType
+ if ModuleType == "BASE":
+ SupModuleList = ["BASE"]
+ AddToLibraryClassSection(SectionEBCDict, SupModuleList, Item)
+ else:
+ SupModuleList = Object.SupModuleList
+ AddToLibraryClassSection(SectionEBCDict, SupModuleList, Item)
+
+ Section = ""
+ for ModuleType in ModuleTypes:
+ SectionCommonModule = "\n".join(SectionCommonDict.get(ModuleType, []))
+ if SectionCommonModule != "":
+ Section += "[%s.Common.%s]\n%s\n" % (SectionName, ModuleType, SectionCommonModule)
+ Section += "\n"
+ for ModuleType in ModuleTypes:
+ ListIA32 = SectionIA32Dict.get(ModuleType, [])
+ if ListIA32 != []:
+ SectionIA32Module = "\n".join(SectionIA32Dict.get(ModuleType, []))
+ if SectionIA32Module != "":
+ Section += "[%s.IA32.%s]\n%s\n" % (SectionName, ModuleType, SectionIA32Module)
+ Section += "\n"
+ ListX64 = SectionX64Dict.get(ModuleType, [])
+ if ListX64 != []:
+ SectionX64Module = "\n".join(SectionX64Dict.get(ModuleType, []))
+ if SectionX64Module != "":
+ Section += "[%s.X64.%s]\n%s\n" % (SectionName, ModuleType, SectionX64Module)
+ Section += "\n"
+ ListIPF = SectionIPFDict.get(ModuleType, [])
+ if ListIPF != []:
+ SectionIPFModule = "\n".join(SectionIPFDict.get(ModuleType, []))
+ if SectionIPFModule != "":
+ Section += "[%s.IPF.%s]\n%s\n" % (SectionName, ModuleType, SectionIPFModule)
+ Section += "\n"
+ ListEBC = SectionEBCDict.get(ModuleType, [])
+ if ListEBC != []:
+ SectionEBCModule = "\n".join(SectionEBCDict.get(ModuleType, []))
+ if SectionEBCModule != "":
+ Section += "[%s.EBC.%s]\n%s\n" % (SectionName, ModuleType, SectionEBCModule)
+ Section += "\n"
+
+ if Section != "":
+ Section += "\n"
+ return Section
+
+## Store Library Classes section
+#
+# Write [LibraryClasses] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Library Classes section
+# @param Platform An input Platform class object
+#
+def StorePlatformLibraryClassesSection(DscFile, Platform):
+ Section = GetLibraryClassesSection("LibraryClasses", GetPlatformLibraryClassItem, Platform.LibraryClasses.LibraryList)
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Library Class section - list of all Library Classes needed by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+ Section = SectionHeader + Section
+ StoreTextFile(DscFile, Section)
+
+## Store Pcd section
+#
+# Write [Pcd] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Build Options section
+# @param Platform An input Platform class object
+#
+def StorePlatformPcdSection(DscFile, Platform):
+ # {PcdsFixedAtBuild:String1, PcdsFixedAtBuild:String2, PcdsPatchableInModule:String3}
+ SectionDict = {}
+ #
+ # [PcdsFixedAtBuild], [PcdsPatchableInModule] and [PcdsFeatureFlag] are from platform.modules
+ # [PcdsDynamic] is from platform.DynamicPcdBuildDefinitions
+ #
+ Modules = Platform.Modules.ModuleList # it's a list of modules
+ for Module in Modules:
+ PcdBuildDefinitions = Module.PcdBuildDefinitions # it's a list of PcdData
+ for PcdData in PcdBuildDefinitions:
+ if PcdData.ItemType == "FEATURE_FLAG":
+ List = []
+ List.append(PcdData.TokenSpaceGuidCName + "." + PcdData.C_NAME)
+ List.append(PcdData.Value)
+ String = "|".join(List)
+ ItemType = PcdData.ItemType
+ SectionPcdsFeatureFlag = SectionDict.get(ItemType, [])
+ if String not in SectionPcdsFeatureFlag:
+ SectionPcdsFeatureFlag.append(String)
+ SectionDict[ItemType] = SectionPcdsFeatureFlag
+ else:
+ List = []
+ List.append(PcdData.TokenSpaceGuidCName + "." + PcdData.C_NAME)
+ List.append(PcdData.Value)
+ List.append(PcdData.Token)
+ List.append(PcdData.DatumType)
+ List.append(PcdData.MaxDatumSize)
+ String = "|".join(List)
+ ItemType = PcdData.ItemType
+ if PcdData.ItemType == "FIXED_AT_BUILD":
+ SectionPcdsFixedAtBuild = SectionDict.get(ItemType, [])
+ if String not in SectionPcdsFixedAtBuild:
+ SectionPcdsFixedAtBuild.append(String)
+ SectionDict[ItemType] = SectionPcdsFixedAtBuild
+ #elif PcdData.ItemType == "FEATURE_FLAG":
+ #SectionPcdsFeatureFlag = SectionDict.get(ItemType, [])
+ #if String not in SectionPcdsFeatureFlag:
+ #SectionPcdsFeatureFlag.append(String)
+ #SectionDict[ItemType] = SectionPcdsFeatureFlag
+ elif PcdData.ItemType == "PATCHABLE_IN_MODULE":
+ SectionPcdsPatchableInModule = SectionDict.get(ItemType, [])
+ if String not in SectionPcdsPatchableInModule:
+ SectionPcdsPatchableInModule.append(String)
+ SectionDict[ItemType] = SectionPcdsPatchableInModule
+ elif PcdData.ItemType == "DYNAMIC":
+ SectionPcdsDynamic = SectionDict.get(ItemType, [])
+ if String not in SectionPcdsDynamic:
+ SectionPcdsDynamic.append(String)
+ SectionDict[ItemType] = SectionPcdsDynamic
+
+ DynamicPcdBuildDefinitions = Platform.DynamicPcdBuildDefinitions # It's a list
+ for PcdBuildData in DynamicPcdBuildDefinitions:
+ List = []
+ List.append(PcdData.TokenSpaceGuidCName + "." + PcdData.C_NAME)
+ List.append(PcdData.Token)
+ List.append(PcdData.DatumType)
+ List.append(PcdData.MaxDatumSize)
+ String = "|".join(List)
+ if PcdBuildData.ItemType == "DYNAMIC":
+ ItemType = PcdBuildData.ItemType
+ SectionPcdsDynamic = SectionDict.get(ItemType, [])
+ if String not in SectionPcdsDynamic:
+ SectionPcdsDynamic.append(String)
+ SectionDict[ItemType] = SectionPcdsDynamic
+ ItemType = "FIXED_AT_BUILD"
+ Section = "[PcdsFixedAtBuild]\n " + "\n ".join(SectionDict.get(ItemType, []))
+ ItemType = "FEATURE_FLAG"
+ Section += "\n\n[PcdsFeatureFlag]\n " + "\n ".join(SectionDict.get(ItemType, []))
+ ItemType = "PATCHABLE_IN_MODULE"
+ Section += "\n\n[PcdsPatchableInModule]\n " + "\n ".join(SectionDict.get(ItemType, []))
+ Section += "\n\n"
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Pcd Dynamic Section - list of all EDK II PCD Entries defined by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ String = "\n".join(List)
+ Section += String
+ ItemType = "DYNAMIC"
+ Section += "\n\n[PcdsDynamic]\n " + "\n ".join(SectionDict.get(ItemType, []))
+ Section += "\n\n"
+
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Pcd Section - list of all EDK II PCD Entries defined by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+ Section = SectionHeader + Section
+ StoreTextFile(DscFile, Section)
+
+## Add item to a section
+#
+# Add an Item with specific CPU architecture to section dictionary.
+# The possible duplication is ensured to be removed.
+#
+# @param Section Section dictionary indexed by CPU architecture
+# @param Arch CPU architecture: Ia32, X64, Ipf, Ebc or Common
+# @param Item The Item to be added to section dictionary
+#
+def AddToSection(Section, Arch, Item):
+ SectionArch = Section.get(Arch, [])
+ if Item not in SectionArch:
+ SectionArch.append(Item)
+ Section[Arch] = SectionArch
+
+## Get section contents
+#
+# Return the content of section named SectionName.
+# the contents is based on Methods and ObjectLists.
+#
+# @param SectionName The name of the section
+# @param Method A function returning a string item of an object
+# @param ObjectList The list of object
+#
+# @retval Section The string content of a section
+#
+def GetSection(SectionName, Method, ObjectList):
+ SupportedArches = ["COMMON", "IA32", "X64", "IPF", "EBC"]
+ SectionDict = {}
+ for Object in ObjectList:
+ if Object.FilePath == "":
+ continue
+ Item = Method(Object)
+ if Item == "":
+ continue
+ Item = " %s" % Item
+ Arches = Object.SupArchList
+ if len(Arches) == 4:
+ AddToSection(SectionDict, "common", Item)
+ else:
+ for Arch in SupportedArches:
+ if Arch.upper() in Arches:
+ AddToSection(SectionDict, Arch, Item)
+
+ Section = ""
+ for Arch in SupportedArches:
+ SectionArch = "\n".join(SectionDict.get(Arch, []))
+ if SectionArch != "":
+ Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
+ Section += "\n"
+ if Section != "":
+ Section += "\n"
+ return Section
+
+## Return a Platform Component Item
+#
+# Read the input Platform Component object and return one line of Platform Component Item.
+#
+# @param Component An input Platform Component class object
+#
+# @retval ComponentItem A Platform Component Item
+#
+def GetPlatformComponentItem(Component):
+ List = []
+ Section = {}
+
+ List.append("$(WORKSPACE)/" + Component.FilePath)
+
+ LibraryClasses = Component.LibraryClasses
+ if LibraryClasses != []:
+ List = []
+ List.append("$(WORKSPACE)/" + Component.FilePath + " {")
+ List.append("<LibraryClasses>")
+ for LibraryClass in LibraryClasses:
+ if LibraryClass == ["", ""]:
+ continue
+ List.append(" " + LibraryClass[0] + "|$(WORKSPACE)/" + LibraryClass[1])
+
+ PcdBuildDefinitions = Component.PcdBuildDefinitions
+ for PcdData in PcdBuildDefinitions:
+ if PcdData.ItemType == "FEATURE_FLAG":
+ List1 = []
+ List1.append(PcdData.TokenSpaceGuidCName + "." + PcdData.C_NAME)
+ List1.append(PcdData.Value)
+ String = "|".join(List1)
+ ItemType = PcdData.ItemType
+ SectionPcd = Section.get(ItemType, [])
+ if String not in SectionPcd:
+ SectionPcd.append(String)
+ Section[ItemType] = SectionPcd
+ else:
+ List1 = []
+ List1.append(PcdData.TokenSpaceGuidCName + "." + PcdData.C_NAME)
+ List1.append(PcdData.Value)
+ List1.append(PcdData.Token)
+ List1.append(PcdData.DatumType)
+ List1.append(PcdData.MaxDatumSize)
+ String = "|".join(List1)
+ ItemType = PcdData.ItemType
+ if ItemType == "FIXED_AT_BUILD":
+ SectionPcd = Section.get(ItemType, [])
+ if String not in SectionPcd:
+ SectionPcd.append(String)
+ Section[ItemType] = SectionPcd
+ #elif ItemType == "FEATURE_FLAG":
+ #SectionPcd = Section.get(ItemType, [])
+ #if String not in SectionPcd:
+ #SectionPcd.append(String)
+ #Section[ItemType] = SectionPcd
+ elif ItemType == "PATCHABLE_IN_MODULE":
+ SectionPcd = Section.get(ItemType, [])
+ if String not in SectionPcd:
+ SectionPcd.append(String)
+ Section[ItemType] = SectionPcd
+ elif ItemType == "DYNAMIC":
+ SectionPcd = Section.get(ItemType, [])
+ if String not in SectionPcd:
+ SectionPcd.append(String)
+ Section[ItemType] = SectionPcd
+
+ ItemType = "FIXED_AT_BUILD"
+ if Section.get(ItemType, []) != []:
+ List.append("<PcdsFixedAtBuild>")
+ List.append(" " + "\n ".join(Section.get(ItemType,[])))
+ ItemType = "FEATURE_FLAG"
+ if Section.get(ItemType, []) != []:
+ List.append("<PcdsFeatureFlag>")
+ List.append(" " + "\n ".join(Section.get(ItemType,[])))
+ ItemType = "PATCHABLE_IN_MODULE"
+ if Section.get(ItemType, []) != []:
+ List.append("<PcdsPatchableInModule>")
+ List.append(" " + "\n ".join(Section.get(ItemType,[])))
+ ItemType = "DYNAMIC"
+ if Section.get(ItemType, []) != []:
+ List.append("<PcdsDynamic>")
+ List.append(" " + "\n ".join(Section.get(ItemType,[])))
+
+ ListOption = []
+ SectionOption = ""
+ ListBuildOptions = Component.BuildOptions # a list
+ if ListBuildOptions != []:
+ SectionOption += "\n <BuildOptions>\n"
+ for BuildOptions in ListBuildOptions:
+ Options = BuildOptions.Options
+ for Option in Options:
+ for Item in Option.BuildTargetList:
+ ListOption.append(Item)
+ List.append(Option.ToolChainFamily)
+ for Item in Option.SupArchList:
+ ListOption.append(Item)
+ ListOption.append(Option.ToolCode)
+ ListOption.append("FLAGS")
+ #print ListOption
+ SectionOption += " " + "_".join(List) + " = " + Option.Option + "\n"
+ ListOption = []
+ if SectionOption != "":
+ List.append(SectionOption)
+ if List != ["$(WORKSPACE)/" + Component.FilePath]:
+ List.append("}\n")
+
+ return "\n ".join(List)
+
+## Store Components section.
+#
+# Write [Components] section to the DscFile based on Platform class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the Components section
+# @param Platform An input Platform class object
+#
+def StorePlatformComponentsSection(DscFile, Platform):
+ Section = GetSection("Components", GetPlatformComponentItem, Platform.Modules.ModuleList)
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# Components Section - list of all EDK II Modules needed by this Platform.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+ Section = SectionHeader + Section
+ StoreTextFile(DscFile, Section)
+
+## Store User Extensions section.
+#
+# Write [UserExtensions] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DscFile The output DSC file to store the User Extensions section
+# @param Platform An input Platform class object
+#
+def StorePlatformUserExtensionsSection(DscFile, Platform):
+ Section = "".join(map(GetUserExtensions, Platform.UserExtensions))
+ List = []
+ List.append("################################################################################")
+ List.append("#")
+ List.append("# User Extensions Section - list of all User Extensions specified by user.")
+ List.append("#")
+ List.append("################################################################################")
+ SectionHeader = "\n".join(List)
+ SectionHeader += "\n"
+ Section = SectionHeader + Section
+ StoreTextFile(DscFile, Section)
+
+## Store a Platform class object to a new DSC file.
+#
+# Read an input Platform class object and save the contents to a new DSC file.
+#
+# @param DSCFileName The output DSC file
+# @param Platform An input Platform class object
+#
+def StoreDsc(DscFileName, Platform):
+ DscFile = open(DscFileName, "w+")
+ EdkLogger.info("Save file to %s" % DscFileName)
+
+ StoreHeader(DscFile, Platform.Header)
+ StorePlatformDefinesSection(DscFile, Platform)
+ StorePlatformBuildOptionsSection(DscFile,Platform)
+ StorePlatformSkuIdsSection(DscFile,Platform)
+ StorePlatformLibrariesSection(DscFile,Platform) # new in dsc, Edk I components, list of INF files
+ StorePlatformLibraryClassesSection(DscFile, Platform) # LibraryClasses are from Modules
+ StorePlatformPcdSection(DscFile, Platform)
+ #StorePlatformPcdDynamicSection(DscFile, Platform)
+ StorePlatformComponentsSection(DscFile,Platform)
+ StorePlatformUserExtensionsSection(DscFile,Platform)
+ DscFile.close()
+
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/fpd2dsc/__init__.py b/BaseTools/Source/Python/fpd2dsc/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/__init__.py diff --git a/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py b/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py new file mode 100644 index 0000000000..a22ff5a685 --- /dev/null +++ b/BaseTools/Source/Python/fpd2dsc/fpd2dsc.py @@ -0,0 +1,116 @@ +## @file
+# Convert an XML-based FPD file to a text-based DSC file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, re, sys, xml.dom.minidom #XmlRoutines, EdkIIWorkspace
+from LoadFpd import LoadFpd
+from StoreDsc import StoreDsc
+from optparse import OptionParser
+
+# Version and Copyright
+__version_number__ = "1.0"
+__version__ = "%prog Version " + __version_number__
+__copyright__ = "Copyright (c) 2007, Intel Corporation All rights reserved."
+
+## Parse command line options
+#
+# Using standard Python module optparse to parse command line option of this tool.
+#
+# @retval Options A optparse.Values object containing the parsed options
+# @retval Args All the arguments got from the command line
+#
+def MyOptionParser():
+ """ Argument Parser """
+ usage = "%prog [options] input_filename"
+ parser = OptionParser(usage=usage,description=__copyright__,version="%prog " + str(__version_number__))
+ parser.add_option("-o", "--output", dest="outfile", help="Specific Name of the DSC file to create, otherwise it is the FPD filename with the extension repalced.")
+ parser.add_option("-a", "--auto", action="store_true", dest="autowrite", default=False, help="Automatically create output files and write the DSC file")
+ parser.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Do not print any messages, just return either 0 for succes or 1 for failure")
+ parser.add_option("-v", "--verbose", action="count", dest="verbose", help="Do not print any messages, just return either 0 for succes or 1 for failure")
+ parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Enable printing of debug messages.")
+ parser.add_option("-w", "--workspace", dest="workspace", default=str(os.environ.get('WORKSPACE')), help="Specify workspace directory.")
+ (options, args) = parser.parse_args(sys.argv[1:])
+
+ return options,args
+
+## Entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful
+# @retval 1 Tool failed
+#
+def Main():
+ global Options
+ global Args
+ global WorkSpace
+ Options,Args = MyOptionParser()
+
+ WorkSpace = ""
+ #print Options.workspace
+ if (Options.workspace == None):
+ print "ERROR: E0000: WORKSPACE not defined.\n Please set the WORKSPACE environment variable to the location of the EDK II install directory."
+ sys.exit(1)
+ else:
+ WorkSpace = Options.workspace
+ if (Options.debug):
+ print "Using Workspace:", WorkSpace
+ try:
+ Options.verbose +=1
+ except:
+ Options.verbose = 1
+ pass
+
+ InputFile = ""
+ if Args == []:
+ print "usage:" "%prog [options] input_filename"
+ else:
+ InputFile = Args[0]
+ #print InputFile
+ if InputFile != "":
+ FileName = InputFile
+ if ((Options.verbose > 1) | (Options.autowrite)):
+ print "FileName:",InputFile
+ else:
+ print "ERROR: E0001 - You must specify an input filename"
+ sys.exit(1)
+
+ if (Options.outfile):
+ OutputFile = Options.outfile
+ else:
+ OutputFile = FileName.replace('.fpd', '.dsc')
+
+ if ((Options.verbose > 2) or (Options.debug)):
+ print "Output Filename:", OutputFile
+
+ try:
+ Platform = LoadFpd(FileName)
+ StoreDsc(OutputFile, Platform)
+ return 0
+ except Exception, e:
+ print e
+ return 1
+
+if __name__ == '__main__':
+ sys.exit(Main())
+ #pass
+ #global Options
+ #global Args
+ #Options,Args = MyOptionParser()
+
+ #Main()
+ #sys.exit(0)
\ No newline at end of file diff --git a/BaseTools/Source/Python/msa2inf/ConvertModule.py b/BaseTools/Source/Python/msa2inf/ConvertModule.py new file mode 100644 index 0000000000..e0d7b88695 --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/ConvertModule.py @@ -0,0 +1,112 @@ +## @file
+# Convert an MSA Module class object ot an INF Module class object by filling
+# several info required by INF file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from LoadMsa import LoadMsa
+from StoreInf import StoreInf
+from Common.MigrationUtilities import *
+from EdkIIWorkspaceGuidsInfo import gEdkIIWorkspaceGuidsInfo
+
+#The default INF version number tool generates.
+gInfVersion = "0x00010005"
+
+## Add required version information.
+#
+# Add the default INF version, EFI specificiation version and EDK release
+# version to Module class object.
+#
+# @param Module An input Module class object.
+#
+def AddModuleMiscVersion(Module):
+ Version = gInfVersion
+ Module.Header.InfVersion = Version
+
+ Version = Module.Header.Specification.get("EFI_SPECIFICATION_VERSION", "")
+ Module.Header.EfiSpecificationVersion = Version
+
+ Version = Module.Header.Specification.get("EDK_RELEASE_VERSION", "")
+ Module.Header.EdkReleaseVersion = Version
+
+
+## Add Module produced library class.
+#
+# Add the produced library class from library class list whose usage type is
+# always produced.
+#
+# @param Module An input Module class object.
+#
+def AddModuleProducedLibraryClass(Module):
+ for LibraryClass in Module.LibraryClasses:
+ if "ALWAYS_PRODUCED" in LibraryClass.Usage:
+ Module.Header.LibraryClass.append(LibraryClass)
+
+
+## Add Module Package Dependency path.
+#
+# Translate Package Dependency Guid to a file path relative to workspace.
+#
+# @param Module An input Module class object.
+#
+def AddModulePackageDependencyPath(Module):
+ for PackageDependency in Module.PackageDependencies:
+ PackageGuid = PackageDependency.PackageGuid
+ PackageVersion = PackageDependency.PackageVersion
+
+ GuidToFilePath = gEdkIIWorkspaceGuidsInfo.ResolvePackageFilePath
+ PackageFilePath = GuidToFilePath(PackageGuid, PackageVersion)
+ PackageDependency.FilePath = PackageFilePath
+
+
+## Add Module Recommended Library Instance path.
+#
+# Translate Module Recommened Library Instance Guid to a file path relative to
+# workspace.
+#
+# @param Module An input Module class object.
+#
+def AddModuleRecommonedLibraryInstancePath(Module):
+ for LibraryClass in Module.LibraryClasses:
+ if "ALWAYS_PRODUCED" in LibraryClass.Usage:
+ continue
+
+ if LibraryClass.RecommendedInstanceGuid == "":
+ continue
+
+ LibraryGuid = LibraryClass.RecommendedInstanceGuid
+ LibraryVersion = LibraryClass.RecommendedIntanceVersion
+
+ GuidToFilePath = gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath
+ LibraryInstance = GuidToFilePath(LibraryGuid, LibraryVersion)
+ LibraryClass.RecommendedIntance = LibraryInstance
+
+
+## Convert MSA Module class object to INF Module class object.
+#
+# Convert MSA module class ojbect to INF Module class object by filling in
+# several information required by INF file.
+#
+# @param Module An input Module class object.
+#
+def ConvertMsaModuleToInfModule(Module):
+ AddModuleMiscVersion(Module)
+ AddModuleProducedLibraryClass(Module)
+ AddModulePackageDependencyPath(Module)
+ AddModuleRecommonedLibraryInstancePath(Module)
+
+
+if __name__ == '__main__':
+ pass
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/msa2inf/EdkIIWorkspaceGuidsInfo.py b/BaseTools/Source/Python/msa2inf/EdkIIWorkspaceGuidsInfo.py new file mode 100644 index 0000000000..f1c8d60d75 --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/EdkIIWorkspaceGuidsInfo.py @@ -0,0 +1,325 @@ +## @file
+# Collects the Guid Information in current workspace.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import fnmatch
+from Common.EdkIIWorkspace import EdkIIWorkspace
+from Common.MigrationUtilities import *
+
+## A class for EdkII work space to resolve Guids.
+#
+# This class inherits from EdkIIWorkspace and collects the Guids information
+# in current workspace. The Guids information is important to translate the
+# package Guids and recommended library instances Guids to relative file path
+# (to workspace directory) in MSA files.
+#
+class EdkIIWorkspaceGuidsInfo(EdkIIWorkspace):
+
+ ## The classconstructor.
+ #
+ # The constructor initialize workspace directory. It does not collect
+ # pakage and module Guids info at initialization; instead, it collects them
+ # on the fly.
+ #
+ # @param self The object pointer.
+ #
+ def __init__(self):
+ # Initialize parent class.
+ EdkIIWorkspace.__init__(self)
+ # The internal map from Guid to FilePath.
+ self.__GuidToFilePath = {}
+ # The internal package directory list.
+ self.__PackageDirList = []
+ # The internal flag to indicate whether package Guids info has been
+ # to avoid re-collection collected.
+ self.__PackageGuidInitialized = False
+ # The internal flag to indicate whether module Guids info has been
+ # to avoid re-collection collected.
+ self.__ModuleGuidInitialized = False
+
+ ## Add Guid, Version and FilePath to Guids database.
+ #
+ # Add Guid, Version and FilePath to Guids database. It constructs a map
+ # table from Guid, Version to FilePath internally. If also detects possible
+ # Guid collision. For now, the version information is simply ignored and
+ # Guid value itself acts as master key.
+ #
+ # @param self The object pointer.
+ # @param Guid The Guid Value.
+ # @param Version The version information
+ #
+ # @retval True The Guid value is successfully added to map table.
+ # @retval False The Guid is an empty string or the map table
+ # already contains a same Guid.
+ #
+ def __AddGuidToFilePath(self, Guid, Version, FilePath):
+ if Guid == "":
+ EdkLogger.info("Cannot find Guid in file %s" % FilePath)
+ return False
+ #Add the Guid value to map table to ensure case insensitive comparison.
+ OldFilePath = self.__GuidToFilePath.setdefault(Guid.lower(), FilePath)
+ if OldFilePath == FilePath:
+ EdkLogger.verbose("File %s has new Guid '%s'" % (FilePath, Guid))
+ return True
+ else:
+ EdkLogger.info("File %s has duplicate Guid with & %s" % (FilePath, OldFilePath))
+ return False
+
+
+ ## Gets file information from a module description file.
+ #
+ # Extracts Module Name, File Guid and Version number from INF, MSA and NMSA
+ # file. It supports to exact such information from text based INF file or
+ # XML based (N)MSA file.
+ #
+ # @param self The object pointer.
+ # @param FileName The input module file name.
+ #
+ # @retval True This module file represents a new module discovered
+ # in current workspace.
+ # @retval False This module file is not regarded as a valid module.
+ # The File Guid cannot be extracted or the another
+ # file with the same Guid already exists
+ #
+ def __GetModuleFileInfo(self, FileName):
+ if fnmatch.fnmatch(FileName, "*.inf"):
+ TagTuple = ("BASE_NAME", "FILE_GUID", "VERSION_STRING")
+ (Name, Guid, Version) = GetTextFileInfo(FileName, TagTuple)
+ else :
+ XmlTag1 = "ModuleSurfaceArea/MsaHeader/ModuleName"
+ XmlTag2 = "ModuleSurfaceArea/MsaHeader/GuidValue"
+ XmlTag3 = "ModuleSurfaceArea/MsaHeader/Version"
+ TagTuple = (XmlTag1, XmlTag2, XmlTag3)
+ (Name, Guid, Version) = GetXmlFileInfo(FileName, TagTuple)
+
+ return self.__AddGuidToFilePath(Guid, Version, FileName)
+
+
+ ## Gets file information from a package description file.
+ #
+ # Extracts Package Name, File Guid and Version number from INF, SPD and NSPD
+ # file. It supports to exact such information from text based DEC file or
+ # XML based (N)SPD file. EDK Compatibility Package is hardcoded to be
+ # ignored since no EDKII INF file depends on that package.
+ #
+ # @param self The object pointer.
+ # @param FileName The input package file name.
+ #
+ # @retval True This package file represents a new package
+ # discovered in current workspace.
+ # @retval False This package is not regarded as a valid package.
+ # The File Guid cannot be extracted or the another
+ # file with the same Guid already exists
+ #
+ def __GetPackageFileInfo(self, FileName):
+ if fnmatch.fnmatch(FileName, "*.dec"):
+ TagTuple = ("PACKAGE_NAME", "PACKAGE_GUID", "PACKAGE_VERSION")
+ (Name, Guid, Version) = GetTextFileInfo(FileName, TagTuple)
+ else:
+ XmlTag1 = "PackageSurfaceArea/SpdHeader/PackageName"
+ XmlTag2 = "PackageSurfaceArea/SpdHeader/GuidValue"
+ XmlTag3 = "PackageSurfaceArea/SpdHeader/Version"
+ TagTuple = (XmlTag1, XmlTag2, XmlTag3)
+ (Name, Guid, Version) = GetXmlFileInfo(FileName, TagTuple)
+
+ if Name == "EdkCompatibilityPkg":
+ # Do not scan EDK compatibitilty package to avoid Guid collision
+ # with those in EDK Glue Library.
+ EdkLogger.verbose("Bypass EDK Compatibility Pkg")
+ return False
+
+ return self.__AddGuidToFilePath(Guid, Version, FileName)
+
+ ## Iterate on all package files listed in framework database file.
+ #
+ # Yields all package description files listed in framework database files.
+ # The framework database file describes the packages current workspace
+ # includes.
+ #
+ # @param self The object pointer.
+ #
+ def __FrameworkDatabasePackageFiles(self):
+ XmlFrameworkDb = XmlParseFile(self.WorkspaceFile)
+ XmlTag = "FrameworkDatabase/PackageList/Filename"
+ for PackageFile in XmlElementList(XmlFrameworkDb, XmlTag):
+ yield os.path.join(self.WorkspaceDir, PackageFile)
+
+
+ ## Iterate on all package files in current workspace directory.
+ #
+ # Yields all package description files listed in current workspace
+ # directory. This happens when no framework database file exists.
+ #
+ # @param self The object pointer.
+ #
+ def __TraverseAllPackageFiles(self):
+ for Path, Dirs, Files in os.walk(self.WorkspaceDir):
+ # Ignore svn version control directory.
+ if ".svn" in Dirs:
+ Dirs.remove(".svn")
+ if "Build" in Dirs:
+ Dirs.remove("Build")
+ # Assume priority from high to low: DEC, NSPD, SPD.
+ PackageFiles = fnmatch.filter(Files, "*.dec")
+ if len(PackageFiles) == 0:
+ PackageFiles = fnmatch.filter(Files, "*.nspd")
+ if len(PackageFiles) == 0:
+ PackageFiles = fnmatch.filter(Files, "*.spd")
+
+ for File in PackageFiles:
+ # Assume no more package decription file in sub-directory.
+ del Dirs[:]
+ yield os.path.join(Path, File)
+
+ ## Iterate on all module files in current package directory.
+ #
+ # Yields all module description files listed in current package
+ # directory.
+ #
+ # @param self The object pointer.
+ #
+ def __TraverseAllModuleFiles(self):
+ for PackageDir in self.__PackageDirList:
+ for Path, Dirs, Files in os.walk(PackageDir):
+ # Ignore svn version control directory.
+ if ".svn" in Dirs:
+ Dirs.remove(".svn")
+ # Assume priority from high to low: INF, NMSA, MSA.
+ ModuleFiles = fnmatch.filter(Files, "*.inf")
+ if len(ModuleFiles) == 0:
+ ModuleFiles = fnmatch.filter(Files, "*.nmsa")
+ if len(ModuleFiles) == 0:
+ ModuleFiles = fnmatch.filter(Files, "*.msa")
+
+ for File in ModuleFiles:
+ yield os.path.join(Path, File)
+
+ ## Initialize package Guids info mapping table.
+ #
+ # Collects all package guids map to package decription file path. This
+ # function is invokes on demand to avoid unnecessary directory scan.
+ #
+ # @param self The object pointer.
+ #
+ def __InitializePackageGuidInfo(self):
+ if self.__PackageGuidInitialized:
+ return
+
+ EdkLogger.verbose("Start to collect Package Guids Info.")
+
+ WorkspaceFile = os.path.join("Conf", "FrameworkDatabase.db")
+ self.WorkspaceFile = os.path.join(self.WorkspaceDir, WorkspaceFile)
+
+ # Try to find the frameworkdatabase file to discover package lists
+ if os.path.exists(self.WorkspaceFile):
+ TraversePackage = self.__FrameworkDatabasePackageFiles
+ EdkLogger.verbose("Package list bases on: %s" % self.WorkspaceFile)
+ else:
+ TraversePackage = self.__TraverseAllPackageFiles
+ EdkLogger.verbose("Package list in: %s" % self.WorkspaceDir)
+
+ for FileName in TraversePackage():
+ if self.__GetPackageFileInfo(FileName):
+ PackageDir = os.path.dirname(FileName)
+ EdkLogger.verbose("Find new package directory %s" % PackageDir)
+ self.__PackageDirList.append(PackageDir)
+
+ self.__PackageGuidInitialized = True
+
+ ## Initialize module Guids info mapping table.
+ #
+ # Collects all module guids map to module decription file path. This
+ # function is invokes on demand to avoid unnecessary directory scan.
+ #
+ # @param self The object pointer.
+ #
+ def __InitializeModuleGuidInfo(self):
+ if self.__ModuleGuidInitialized:
+ return
+ EdkLogger.verbose("Start to collect Module Guids Info")
+
+ self.__InitializePackageGuidInfo()
+ for FileName in self.__TraverseAllModuleFiles():
+ if self.__GetModuleFileInfo(FileName):
+ EdkLogger.verbose("Find new module %s" % FileName)
+
+ self.__ModuleGuidInitialized = True
+
+ ## Get Package file path by Package guid and Version.
+ #
+ # Translates the Package Guid and Version to a file path relative
+ # to workspace directory. If no package in current workspace match the
+ # input Guid, an empty file path is returned. For now, the version
+ # value is simply ignored.
+ #
+ # @param self The object pointer.
+ # @param Guid The Package Guid value to look for.
+ # @param Version The Package Version value to look for.
+ #
+ def ResolvePackageFilePath(self, Guid, Version = ""):
+ self.__InitializePackageGuidInfo()
+
+ EdkLogger.verbose("Resolve Package Guid '%s'" % Guid)
+ FileName = self.__GuidToFilePath.get(Guid.lower(), "")
+ if FileName == "":
+ EdkLogger.info("Cannot resolve Package Guid '%s'" % Guid)
+ else:
+ FileName = self.WorkspaceRelativePath(FileName)
+ FileName = os.path.splitext(FileName)[0] + ".dec"
+ FileName = FileName.replace("\\", "/")
+ return FileName
+
+ ## Get Module file path by Package guid and Version.
+ #
+ # Translates the Module Guid and Version to a file path relative
+ # to workspace directory. If no module in current workspace match the
+ # input Guid, an empty file path is returned. For now, the version
+ # value is simply ignored.
+ #
+ # @param self The object pointer.
+ # @param Guid The Module Guid value to look for.
+ # @param Version The Module Version value to look for.
+ #
+ def ResolveModuleFilePath(self, Guid, Version = ""):
+ self.__InitializeModuleGuidInfo()
+
+ EdkLogger.verbose("Resolve Module Guid '%s'" % Guid)
+ FileName = self.__GuidToFilePath.get(Guid.lower(), "")
+ if FileName == "":
+ EdkLogger.info("Cannot resolve Module Guid '%s'" % Guid)
+ else:
+ FileName = self.WorkspaceRelativePath(FileName)
+ FileName = os.path.splitext(FileName)[0] + ".inf"
+ FileName = FileName.replace("\\", "/")
+ return FileName
+
+# A global class object of EdkIIWorkspaceGuidsInfo for external reference.
+gEdkIIWorkspaceGuidsInfo = EdkIIWorkspaceGuidsInfo()
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ # Test the translation of package Guid.
+ MdePkgGuid = "1E73767F-8F52-4603-AEB4-F29B510B6766"
+ OldMdePkgGuid = "5e0e9358-46b6-4ae2-8218-4ab8b9bbdcec"
+ print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(MdePkgGuid)
+ print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(OldMdePkgGuid)
+
+ # Test the translation of module Guid.
+ UefiLibGuid = "3a004ba5-efe0-4a61-9f1a-267a46ae5ba9"
+ UefiDriverModelLibGuid = "52af22ae-9901-4484-8cdc-622dd5838b09"
+ print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(UefiLibGuid)
+ print gEdkIIWorkspaceGuidsInfo.ResolveModuleFilePath(UefiDriverModelLibGuid)
diff --git a/BaseTools/Source/Python/msa2inf/LoadMsa.py b/BaseTools/Source/Python/msa2inf/LoadMsa.py new file mode 100644 index 0000000000..4995f2cd0c --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/LoadMsa.py @@ -0,0 +1,747 @@ +## @file
+# Open an MSA file and load all its contents to a ModuleClass object.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from CommonDataClass.ModuleClass import *
+from Common.XmlRoutines import *
+from Common.MigrationUtilities import *
+
+
+## Load a list of Module Cloned Records.
+#
+# Read an input Module XML DOM object and return a list of Cloned Records
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel ClonedRecords A list of Cloned Records loaded from XmlMsa.
+#
+def LoadModuleClonedRecords(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/ClonedFrom/Cloned"
+ return map(LoadClonedRecord, XmlList(XmlMsa, XmlTag))
+
+## Load Module Header.
+#
+# Read an input Module XML DOM object and return Module Header class object
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+# @param MsaFileName The file path of MSA File.
+#
+# @retvel ModuleHeader A new Module Header object loaded from XmlMsa.
+#
+def LoadModuleHeader(XmlMsa, MsaFileName):
+ ModuleHeader = ModuleHeaderClass()
+
+ XmlTag = "ModuleSurfaceArea/MsaHeader"
+ MsaHeader = XmlNode(XmlMsa, XmlTag)
+
+ SetIdentification(ModuleHeader, MsaHeader, "ModuleName", MsaFileName)
+ SetCommonHeader(ModuleHeader, MsaHeader)
+
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/SupportedArchitectures"
+ ModuleHeader.SupArchList = XmlElement(XmlMsa, XmlTag).split()
+
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/BinaryModule"
+ if XmlElement(XmlMsa, XmlTag).lower() == "true":
+ ModuleHeader.BinaryModule = True
+
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/OutputFileBasename"
+ ModuleHeader.OutputFileBasename = XmlElement(XmlMsa, XmlTag)
+
+ XmlTag = "ModuleSurfaceArea/ModuleDefinitions/ClonedForm"
+ ModuleHeader.ClonedFrom = LoadModuleClonedRecords(XmlMsa)
+
+ XmlTag = "ModuleSurfaceArea/Externs/PcdDriverTypes"
+ ModuleHeader.PcdIsDriver = XmlElement(XmlMsa, XmlTag)
+
+ XmlTag = "ModuleSurfaceArea/Externs/TianoR8FlashMap_h"
+ if XmlElement(XmlMsa, XmlTag).lower() == "true":
+ ModuleHeader.TianoR8FlashMap_h = True
+
+ XmlTag = "ModuleSurfaceArea/Externs/Specification"
+ for Specification in XmlElementList(XmlMsa, XmlTag):
+ AddToSpecificationDict(ModuleHeader.Specification, Specification)
+
+ return ModuleHeader
+
+
+## Load a list of Module Library Classes.
+#
+# Read an input Module XML DOM object and return a list of Library Classes
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel LibraryClasses A list of Library Classes loaded from XmlMsa.
+#
+def LoadModuleLibraryClasses(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass"
+ return map(LoadLibraryClass, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Source class object.
+#
+# Read an input XML Source DOM object and return an object of Source
+# contained in the DOM object.
+#
+# @param XmlFilename A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleSource A new Source object created by XmlFilename.
+#
+def LoadModuleSource(XmlFilename):
+ ModuleSource = ModuleSourceFileClass()
+
+ ModuleSource.SourceFile = XmlElementData(XmlFilename)
+
+ XmlTag = "TagName"
+ ModuleSource.TagName = XmlAttribute(XmlFilename, XmlTag)
+
+ XmlTag = "ToolCode"
+ ModuleSource.ToolCode = XmlAttribute(XmlFilename, XmlTag)
+
+ XmlTag = "ToolChainFamily"
+ ModuleSource.ToolChainFamily = XmlAttribute(XmlFilename, XmlTag)
+
+ SetCommon(ModuleSource, XmlFilename)
+
+ return ModuleSource
+
+
+## Load a list of Module Sources.
+#
+# Read an input Module XML DOM object and return a list of Sources
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Sources A list of Sources loaded from XmlMsa.
+#
+def LoadModuleSources(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/SourceFiles/Filename"
+ return map(LoadModuleSource, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Binary class object.
+#
+# Read an input XML Binary DOM object and return an object of Binary
+# contained in the DOM object.
+#
+# @param XmlFilename A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleBinary A new Binary object created by XmlFilename.
+#
+def LoadModuleBinary(XmlFilename):
+ ModuleBinary = ModuleBinaryFileClass()
+
+ ModuleBinary.BinaryFile = XmlElementData(XmlFilename)
+
+ XmlTag = "FileType"
+ ModuleBinary.FileType = XmlElementAttribute(XmlFilename, XmlTag)
+
+ SetCommon(ModuleBinary, XmlFilename)
+
+
+## Load a list of Module Binaries.
+#
+# Read an input Module XML DOM object and return a list of Binaries
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Binaries A list of Binaries loaded from XmlMsa.
+#
+def LoadModuleBinaries(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/BinaryFiles/Filename"
+ return map(LoadModuleBinary, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module Non Processed Files.
+#
+# Read an input Module XML DOM object and return a list of Non Processed Files
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel NonProcessedFiles A list of Non Processed Files loaded from XmlMsa.
+#
+def LoadModuleNonProcessedFiles(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/NonProcessedFiles/Filename"
+ return XmlElementList(XmlMsa, XmlTag)
+
+
+## Load a new Module Package Dependency class object.
+#
+# Read an input XML PackageDependency DOM object and return an object of Package Dependency
+# contained in the DOM object.
+#
+# @param XmlPackage A child XML DOM object in Module XML DOM.
+#
+# @retvel ModulePackageDependency A new Package Dependency object created by XmlPackage.
+#
+def LoadModulePackageDependency(XmlPackage):
+ ModulePackageDependency = ModulePackageDependencyClass()
+
+ XmlTag = "PackageGuid"
+ PackageKey = XmlAttribute(XmlPackage, XmlTag)
+
+ #
+ #TODO: Add resolution for Package name, package Version
+ #
+ ModulePackageDependency.PackageGuid = PackageKey
+ SetCommon(ModulePackageDependency, XmlPackage)
+
+ return ModulePackageDependency
+
+
+## Load a list of Module Package Dependencies.
+#
+# Read an input Module XML DOM object and return a list of Package Dependencies
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel PackageDependencies A list of Package Dependencies loaded from XmlMsa.
+#
+def LoadModulePackageDependencies(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/PackageDependencies/Package"
+ return map(LoadModulePackageDependency, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module Protocols.
+#
+# Read an input Module XML DOM object and return a list of Protocols
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Protocols A list of Protocols loaded from XmlMsa.
+#
+def LoadModuleProtocols(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/Protocols/Protocol"
+ XmlProtocolList = XmlList(XmlMsa, XmlTag)
+
+ XmlTag = "ModuleSurfaceArea/Protocols/ProtocolNotify"
+ XmlProtocolList += XmlList(XmlMsa, XmlTag)
+
+ return map(LoadGuidProtocolPpiCommon, XmlProtocolList)
+
+
+## Load a list of Module Ppis.
+#
+# Read an input Module XML DOM object and return a list of Ppis
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Ppis A list of Ppis loaded from XmlMsa.
+#
+def LoadModulePpis(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/PPIs/Ppi"
+ XmlPpiList = XmlList(XmlMsa, XmlTag)
+
+ XmlTag = "ModuleSurfaceArea/PPIs/PpiNotify"
+ XmlPpiList += XmlList(XmlMsa, XmlTag)
+
+ return map(LoadGuidProtocolPpiCommon, XmlPpiList)
+
+
+## Load a new Module Event class object.
+#
+# Read an input XML Event DOM object and return an object of Event
+# contained in the DOM object.
+#
+# @param XmlEvent A child XML DOM object in Module XML DOM.
+# @param Type Specify the event type: SIGNAL_EVENT or CREATE_EVENT.
+#
+# @retvel ModuleEvent A new Event object created by XmlEvent.
+#
+def LoadModuleEvent(XmlEvent, Type):
+ ModuleEvent = ModuleEventClass()
+
+ XmlTag = "EventTypes/EventType"
+ ModuleEvent.CName = XmlElement(XmlEvent, XmlTag)
+
+ XmlTag = "EventGuidCName"
+ ModuleEvent.GuidCName = XmlAttribute(XmlEvent, XmlTag)
+
+ ModuleEvent.Type = Type
+
+ SetCommon(ModuleEvent, XmlEvent)
+
+ return ModuleEvent
+
+
+## Load a list of Module Events.
+#
+# Read an input Module XML DOM object and return a list of Events
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Events A list of Events loaded from XmlMsa.
+#
+def LoadModuleEvents(XmlMsa):
+ ModuleEvents = []
+
+ XmlTag = "ModuleSurfaceArea/Events/CreateEvents/EventTypes"
+ for XmlCreateEvent in XmlList(XmlMsa, XmlTag):
+ ModuleEvent = LoadModuleEvent(XmlCreateEvent, "CREATE_EVENT")
+ ModuleEvents.append(ModuleEvent)
+
+ XmlTag = "ModuleSurfaceArea/Events/SignalEvents/EventTypes"
+ for XmlCreateEvent in XmlList(XmlMsa, XmlTag):
+ ModuleEvent = LoadModuleEvent(XmlCreateEvent, "SIGNAL_EVENT")
+ ModuleEvents.append(ModuleEvent)
+
+ return ModuleEvents
+
+
+## Load a new Module Hob class object.
+#
+# Read an input XML Hob DOM object and return an object of Hob
+# contained in the DOM object.
+#
+# @param XmlHob A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleHob A new Hob object created by XmlHob.
+#
+def LoadModuleHob(XmlHob):
+ ModuleHob = ModuleHobClass()
+
+ XmlTag = "HobTypes/HobType"
+ ModuleHob.Type = XmlElement(XmlHob, XmlTag)
+
+ XmlTag = "HobGuidCName"
+ ModuleHob.GuidCName = XmlAttribute(XmlHob, XmlTag)
+
+ SetCommon(ModuleHob, XmlHob)
+
+ return ModuleHob
+
+
+## Load a list of Module Hobs.
+#
+# Read an input Module XML DOM object and return a list of Hobs
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Hobs A list of Hobs loaded from XmlMsa.
+#
+def LoadModuleHobs(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/Hobs/HobTypes"
+ return map(LoadModuleHob, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Variable class object.
+#
+# Read an input XML Variable DOM object and return an object of Variable
+# contained in the DOM object.
+#
+# @param XmlVariable A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleVariable A new Variable object created by XmlVariable.
+#
+def LoadModuleVariable(XmlVariable):
+ ModuleVariable = ModuleVariableClass()
+
+ XmlTag = "Variable/VariableName"
+ HexWordArray = XmlElement(XmlVariable, XmlTag).split()
+ try:
+ ModuleVariable.Name = "".join([unichr(int(a, 16)) for a in HexWordArray])
+ except:
+ ModuleVariable.Name = ""
+
+ XmlTag = "Variable/GuidC_Name"
+ ModuleVariable.GuidCName = XmlElement(XmlVariable, XmlTag)
+
+ SetCommon(ModuleVariable, XmlVariable)
+
+ return ModuleVariable
+
+
+## Load a list of Module Variables.
+#
+# Read an input Module XML DOM object and return a list of Variables
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Variables A list of Variables loaded from XmlMsa.
+#
+def LoadModuleVariables(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/Variables/Variable"
+ return map(LoadModuleVariable, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Boot Mode class object.
+#
+# Read an input XML BootMode DOM object and return an object of Boot Mode
+# contained in the DOM object.
+#
+# @param XmlBootMode A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleBootMode A new Boot Mode object created by XmlBootMode.
+#
+def LoadModuleBootMode(XmlBootMode):
+ ModuleBootMode = ModuleBootModeClass()
+
+ XmlTag = "BootModeName"
+ ModuleBootMode.Name = XmlAttribute(XmlBootMode, XmlTag)
+
+ SetCommon(ModuleBootMode, XmlBootMode)
+
+ return ModuleBootMode
+
+
+## Load a list of Module Boot Modes.
+#
+# Read an input Module XML DOM object and return a list of Boot Modes
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel BootModes A list of Boot Modes loaded from XmlMsa.
+#
+def LoadModuleBootModes(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/BootModes/BootMode"
+ return map(LoadModuleBootMode, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module System Table class object.
+#
+# Read an input XML SystemTable DOM object and return an object of System Table
+# contained in the DOM object.
+#
+# @param XmlSystemTable A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleSystemTable A new System Table object created by XmlSystemTable.
+#
+def LoadModuleSystemTable(XmlSystemTable):
+ ModuleSystemTable = ModuleSystemTableClass()
+
+ XmlTag = "SystemTable/SystemTableCName"
+ ModuleSystemTable.CName = XmlElement(XmlSystemTable, XmlTag)
+
+ SetCommon(ModuleSystemTable, XmlSystemTable)
+
+ return ModuleSystemTable
+
+
+## Load a list of Module System Tables.
+#
+# Read an input Module XML DOM object and return a list of System Tables
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel SystemTables A list of System Tables loaded from XmlMsa.
+#
+def LoadModuleSystemTables(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/SystemTables/SystemTableCNames"
+ return map(LoadModuleSystemTable, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Data Hub class object.
+#
+# Read an input XML DataHub DOM object and return an object of Data Hub
+# contained in the DOM object.
+#
+# @param XmlDataHub A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleDataHub A new Data Hub object created by XmlDataHub.
+#
+def LoadModuleDataHub(XmlDataHub):
+ ModuleDataHub = ModuleDataHubClass()
+
+ XmlTag = "DataHub/DataHubCName"
+ ModuleDataHub.CName = XmlElement(XmlDataHub, "DataHubCName")
+
+ SetCommon(ModuleDataHub, XmlDataHub)
+
+ return ModuleDataHub
+
+
+## Load a list of Module Data Hubs.
+#
+# Read an input Module XML DOM object and return a list of Data Hubs
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel DataHubs A list of Data Hubs loaded from XmlMsa.
+#
+def LoadModuleDataHubs(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/DataHubs/DataHubRecord"
+ return map(LoadModuleDataHub, XmlList(XmlMsa, XmlTag))
+
+
+## Load a new Module Hii Package class object.
+#
+# Read an input XML HiiPackage DOM object and return an object of Hii Package
+# contained in the DOM object.
+#
+# @param XmlHiiPackage A child XML DOM object in Module XML DOM.
+#
+# @retvel ModuleHiiPackage A new Hii Package object created by XmlHiiPackage.
+#
+def LoadModuleHiiPackage(XmlHiiPackage):
+ ModuleHiiPackage = ModuleHiiPackageClass()
+
+ XmlTag = "HiiPackage/HiiPackageCName"
+ ModuleHiiPackage.CName = XmlElement(XmlHiiPackage, "HiiCName")
+
+ SetCommon(ModuleHiiPackage, XmlHiiPackage)
+
+ return ModuleHiiPackage
+
+
+## Load a list of Module Hii Packages.
+#
+# Read an input Module XML DOM object and return a list of Hii Packages
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel HiiPackages A list of Hii Packages loaded from XmlMsa.
+#
+def LoadModuleHiiPackages(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/HiiPackages/HiiPackage"
+ return map(LoadModuleHiiPackage, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module Guids.
+#
+# Read an input Module XML DOM object and return a list of Guids
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel Guids A list of Guids loaded from XmlMsa.
+#
+def LoadModuleGuids(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/Guids/GuidCNames"
+ return map(LoadGuidProtocolPpiCommon, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module Pcd Codes.
+#
+# Read an input Module XML DOM object and return a list of Pcd Codes
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel PcdCodes A list of Pcd Codes loaded from XmlMsa.
+#
+def LoadModulePcdCodes(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/PcdCoded/PcdEntry"
+ return map(LoadPcd, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module Extern Images.
+#
+# Read an input Module XML DOM object and return a list of Extern Images
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel ExternImages A list of Extern Images loaded from XmlMsa.
+#
+def LoadModuleExternImages(XmlMsa):
+ ModuleExternImages = []
+
+ XmlTag = "ModuleSurfaceArea/Externs/Extern"
+ for XmlExtern in XmlList(XmlMsa, XmlTag):
+ XmlTag = "Extern/ModuleEntryPoint"
+ ModuleEntryPoint = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/ModuleUnloadImage"
+ ModuleUnloadImage = XmlElement(XmlExtern, XmlTag)
+ if ModuleEntryPoint == "" and ModuleUnloadImage == "":
+ continue
+
+ ModuleExtern = ModuleExternImageClass()
+ ModuleExtern.ModuleEntryPoint = ModuleEntryPoint
+ ModuleExtern.ModuleUnloadImage = ModuleUnloadImage
+ ModuleExternImages.append(ModuleExtern)
+
+ return ModuleExternImages
+
+
+## Load a list of Module Extern Libraries.
+#
+# Read an input Module XML DOM object and return a list of Extern Libraries
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel ExternLibraries A list of Extern Libraries loaded from XmlMsa.
+#
+def LoadModuleExternLibraries(XmlMsa):
+ ModuleExternLibraries = []
+
+ XmlTag = "ModuleSurfaceArea/Externs/Extern"
+ for XmlExtern in XmlList(XmlMsa, XmlTag):
+ XmlTag = "Extern/Constructor"
+ Constructor = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/Destructor"
+ Destructor = XmlElement(XmlExtern, XmlTag)
+ if Constructor == "" and Destructor == "":
+ continue
+
+ ModuleExtern = ModuleExternLibraryClass()
+ ModuleExtern.Constructor = Constructor
+ ModuleExtern.Destructor = Destructor
+ ModuleExternLibraries.append(ModuleExtern)
+
+ return ModuleExternLibraries
+
+
+## Load a list of Module Extern Drivers.
+#
+# Read an input Module XML DOM object and return a list of Extern Drivers
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel ExternDrivers A list of Extern Drivers loaded from XmlMsa.
+#
+def LoadModuleExternDrivers(XmlMsa):
+ ModuleExternDrivers = []
+
+ XmlTag = "ModuleSurfaceArea/Externs/Extern"
+ for XmlExtern in XmlList(XmlMsa, XmlTag):
+ XmlTag = "Extern/DriverBinding"
+ DriverBinding = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/ComponentName"
+ ComponentName = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/DriverConfig"
+ DriverConfig = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/DriverDiag"
+ DriverDiag = XmlElement(XmlExtern, XmlTag)
+ if DriverBinding == "":
+ continue
+
+ ModuleExtern = ModuleExternDriverClass()
+ ModuleExtern.DriverBinding = DriverBinding
+ ModuleExtern.ComponentName = ComponentName
+ ModuleExtern.DriverConfig = DriverConfig
+ ModuleExtern.DriverDiag = DriverDiag
+ ModuleExternDrivers.append(ModuleExtern)
+
+ return ModuleExternDrivers
+
+
+## Load a list of Module Extern Call Backs.
+#
+# Read an input Module XML DOM object and return a list of Extern Call Backs
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel ExternCallBacks A list of Extern Call Backs loaded from XmlMsa.
+#
+def LoadModuleExternCallBacks(XmlMsa):
+ ModuleExternCallBacks = []
+
+ XmlTag = "ModuleSurfaceArea/Externs/Extern"
+ for XmlExtern in XmlList(XmlMsa, XmlTag):
+ XmlTag = "Extern/SetVirtualAddressMapCallBack"
+ SetVirtualAddressMap = XmlElement(XmlExtern, XmlTag)
+ XmlTag = "Extern/ExitBootServicesCallBack"
+ ExitBootServices = XmlElement(XmlExtern, XmlTag)
+ if SetVirtualAddressMap == "" and ExitBootServices == "":
+ continue
+
+ ModuleExtern = ModuleExternCallBackClass()
+ ModuleExtern.ExitBootServicesCallBack = ExitBootServices
+ ModuleExtern.SetVirtualAddressMapCallBack = SetVirtualAddressMap
+ ModuleExternCallBacks.append(ModuleExtern)
+
+ return ModuleExternCallBacks
+
+
+## Load a list of Module Build Options.
+#
+# Read an input Module XML DOM object and return a list of Build Options
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel BuildOptions A list of Build Options loaded from XmlMsa.
+#
+def LoadModuleBuildOptions(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/ModuleBuildOptions/Options/Option"
+ return map(LoadBuildOption, XmlList(XmlMsa, XmlTag))
+
+
+## Load a list of Module User Extensions.
+#
+# Read an input Module XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlMsa An XML DOM object read from MSA file.
+#
+# @retvel UserExtensions A list of User Extensions loaded from XmlMsa.
+#
+def LoadModuleUserExtensions(XmlMsa):
+ XmlTag = "ModuleSurfaceArea/UserExtensions"
+ return map(LoadUserExtensions, XmlList(XmlMsa, XmlTag))
+
+## Load a new Module class object.
+#
+# Read an input MSA File and return a new Module class Object.
+#
+# @param MsaFileName An XML DOM object read from MSA file.
+#
+# @retvel Module A new Module class object loaded from MSA File.
+#
+def LoadMsa(MsaFileName):
+ XmlMsa = XmlParseFile(MsaFileName)
+ EdkLogger.verbose("Load MSA File: %s" % MsaFileName)
+
+ Module = ModuleClass()
+ Module.Header = LoadModuleHeader(XmlMsa, MsaFileName)
+ Module.LibraryClasses = LoadModuleLibraryClasses(XmlMsa)
+ Module.Sources = LoadModuleSources(XmlMsa)
+ Module.BinaryFiles = LoadModuleBinaries(XmlMsa)
+ Module.NonProcessedFiles = LoadModuleNonProcessedFiles(XmlMsa)
+ Module.PackageDependencies = LoadModulePackageDependencies(XmlMsa)
+ Module.Protocols = LoadModuleProtocols(XmlMsa)
+ Module.Ppis = LoadModulePpis(XmlMsa)
+ Module.Events = LoadModuleEvents(XmlMsa)
+ Module.Hobs = LoadModuleHobs(XmlMsa)
+ Module.Variables = LoadModuleVariables(XmlMsa)
+ Module.BootModes = LoadModuleBootModes(XmlMsa)
+ Module.SystemTables = LoadModuleSystemTables(XmlMsa)
+ Module.DataHubs = LoadModuleDataHubs(XmlMsa)
+ Module.HiiPackages = LoadModuleHiiPackages(XmlMsa)
+ Module.Guids = LoadModuleGuids(XmlMsa)
+ Module.PcdCodes = LoadModulePcdCodes(XmlMsa)
+ Module.ExternImages = LoadModuleExternImages(XmlMsa)
+ Module.ExternLibraries = LoadModuleExternLibraries(XmlMsa)
+ Module.ExternDrivers = LoadModuleExternDrivers(XmlMsa)
+ Module.ExternCallBacks = LoadModuleExternCallBacks(XmlMsa)
+ Module.BuildOptions = LoadModuleBuildOptions(XmlMsa)
+ Module.UserExtensions = LoadModuleUserExtensions(XmlMsa)
+
+ return Module
+
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
\ No newline at end of file diff --git a/BaseTools/Source/Python/msa2inf/Msa2Inf.py b/BaseTools/Source/Python/msa2inf/Msa2Inf.py new file mode 100644 index 0000000000..5873b1d198 --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/Msa2Inf.py @@ -0,0 +1,44 @@ +## @file
+# Convert an XML-based MSA file to a text-based INF file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sys
+from Common.MigrationUtilities import *
+from LoadMsa import LoadMsa
+from StoreInf import StoreInf
+from ConvertModule import ConvertMsaModuleToInfModule
+
+## Entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful.
+# @retval 1 Tool failed.
+#
+def Main():
+ try:
+ Options, InputFile = MigrationOptionParser("MSA", "INF", "%prog")
+ Module = LoadMsa(InputFile)
+ ConvertMsaModuleToInfModule(Module)
+ StoreInf(Options.OutputFile, Module)
+ return 0
+ except Exception, e:
+ print e
+ return 1
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/BaseTools/Source/Python/msa2inf/StoreInf.py b/BaseTools/Source/Python/msa2inf/StoreInf.py new file mode 100644 index 0000000000..bb58dc2f2f --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/StoreInf.py @@ -0,0 +1,442 @@ +## @file
+# Store a Module class object to an INF file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from LoadMsa import LoadMsa
+from CommonDataClass.ModuleClass import *
+from Common.MigrationUtilities import *
+
+## Get the produced library class.
+#
+# Return the item of Library Class based on Library .
+#
+# @param LibraryClasses A list of library classes the module produces.
+#
+# @retval LibraryClassItem A text format library class item.
+#
+def GetModuleLibraryClass(LibraryClasses):
+ ProducedLibraryClasses = []
+ for LibraryClass in LibraryClasses:
+ ProducedLibraryClass = LibraryClass.LibraryClass
+ SupportedModueTypes = " ".join(LibraryClass.SupModuleList)
+ if SupportedModueTypes != "":
+ ProducedLibraryClass += "|" + SupportedModueTypes
+ ProducedLibraryClasses.append(ProducedLibraryClass)
+
+ return "|".join(ProducedLibraryClasses)
+
+
+## Store Defines section.
+#
+# Write [Defines] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Defines section.
+# @param Module An input Module class object.
+#
+def StoreModuleDefinesSection(InfFile, Module):
+ ModuleHeader = Module.Header
+
+ DefinesTupleList = []
+ DefinesTupleList.append(("INF_VERSION", ModuleHeader.InfVersion))
+
+ if ModuleHeader.Name != "":
+ DefinesTupleList.append(("BASE_NAME", ModuleHeader.Name))
+
+ if ModuleHeader.Guid != "":
+ DefinesTupleList.append(("FILE_GUID", ModuleHeader.Guid))
+
+ if ModuleHeader.Version != "":
+ DefinesTupleList.append(("VERSION_STRING", ModuleHeader.Version))
+
+ if ModuleHeader.ModuleType != "":
+ DefinesTupleList.append(("MODULE_TYPE", ModuleHeader.ModuleType))
+
+ if ModuleHeader.EfiSpecificationVersion != "":
+ DefinesTupleList.append(("EFI_SPECIFICATION_VERSION", ModuleHeader.EfiSpecificationVersion))
+
+ if ModuleHeader.EdkReleaseVersion != "":
+ DefinesTupleList.append(("EDK_RELEASE_VERSION", ModuleHeader.EdkReleaseVersion))
+
+ ProducedLibraryClass = GetModuleLibraryClass(ModuleHeader.LibraryClass)
+ if ProducedLibraryClass != "":
+ DefinesTupleList.append(("LIBRARY_CLASS", ProducedLibraryClass))
+
+ if ModuleHeader.MakefileName != "":
+ DefinesTupleList.append(("MAKEFILE_NAME", ModuleHeader.MakeFileName))
+
+ if ModuleHeader.PcdIsDriver != "":
+ DefinesTupleList.append(("PCD_DRIVER", "TRUE"))
+
+ if len(Module.ExternImages) > 0:
+ ModuleEntryPoint = Module.ExternImages[0].ModuleEntryPoint
+ ModuleUnloadImage = Module.ExternImages[0].ModuleUnloadImage
+ if ModuleEntryPoint != "":
+ DefinesTupleList.append(("ENTRY_POINT", ModuleEntryPoint))
+ if ModuleUnloadImage != "":
+ DefinesTupleList.append(("UNLOAD_IMAGE", ModuleUnloadImage))
+
+ if len(Module.ExternLibraries) > 0:
+ Constructor = Module.ExternLibraries[0].Constructor
+ Destructor = Module.ExternLibraries[0].Destructor
+ if Constructor != "":
+ DefinesTupleList.append(("CONSTRUCTOR", Constructor))
+ if Destructor != "":
+ DefinesTupleList.append(("DESTRUCTOR", Destructor))
+
+ StoreDefinesSection(InfFile, DefinesTupleList)
+
+
+## Return a Module Source Item.
+#
+# Read the input ModuleSourceFile class object and return one line of Source Item.
+#
+# @param ModuleSourceFile An input ModuleSourceFile class object.
+#
+# @retval SourceItem A Module Source Item.
+#
+def GetModuleSourceItem(ModuleSourceFile):
+ Source = []
+ Source.append(ModuleSourceFile.SourceFile)
+ Source.append(ModuleSourceFile.ToolChainFamily)
+ Source.append(ModuleSourceFile.TagName)
+ Source.append(ModuleSourceFile.ToolCode)
+ Source.append(ModuleSourceFile.FeatureFlag)
+ return "|".join(Source).rstrip("|")
+
+
+## Store Sources section.
+#
+# Write [Sources] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Sources section.
+# @param Module An input Module class object.
+#
+def StoreModuleSourcesSection(InfFile, Module):
+ Section = GetSection("Sources", GetModuleSourceItem, Module.Sources)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Binary Item.
+#
+# Read the input ModuleBinaryFile class object and return one line of Binary Item.
+#
+# @param ModuleBinaryFile An input ModuleBinaryFile class object.
+#
+# @retval BinaryItem A Module Binary Item.
+#
+def GetModuleBinaryItem(ModuleBinaryFile):
+ Binary = []
+ Binary.append(ModuleBinaryFile.FileType)
+ Binary.append(ModuleBinaryFile.BinaryFile)
+ Binary.append(ModuleBinaryFile.Target)
+ Binary.append(ModuleBinaryFile.FeatureFlag)
+ return "|".join(Binary).rstrip("|")
+
+
+## Store Binaries section.
+#
+# Write [Binaries] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Binaries section.
+# @param Module An input Module class object.
+#
+def StoreModuleBinariesSection(InfFile, Module):
+ Section = GetSection("Binaries", GetModuleBinaryItem, Module.Binaries)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Library Class Item.
+#
+# Read the input LibraryClass class object and return one line of Library Class Item.
+#
+# @param LibraryClass An input LibraryClass class object.
+#
+# @retval LibraryClassItem A Module Library Class Item.
+#
+def GetModuleLibraryClassItem(LibraryClass):
+ if "ALWAYS_PRODUCED" in LibraryClass.Usage:
+ return ""
+
+ LibraryClassList = []
+ LibraryClassList.append(LibraryClass.LibraryClass)
+ LibraryClassList.append(LibraryClass.RecommendedInstance)
+ LibraryClassList.append(LibraryClass.FeatureFlag)
+
+ return "|".join(LibraryClassList).rstrip("|")
+
+
+## Store Library Classes section.
+#
+# Write [LibraryClasses] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Library Classes section.
+# @param Module An input Module class object.
+#
+def StoreModuleLibraryClassesSection(InfFile, Module):
+ Section = GetSection("LibraryClasses", GetModuleLibraryClassItem, Module.LibraryClasses)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Package Item.
+#
+# Read the input PackageDependency class object and return one line of Package Item.
+#
+# @param PackageDependency An input PackageDependency class object.
+#
+# @retval PackageItem A Module Package Item.
+#
+def GetModulePackageItem(PackageDependency):
+ return PackageDependency.FilePath
+
+
+## Store Packages section.
+#
+# Write [Packages] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Packages section.
+# @param Module An input Module class object.
+#
+def StoreModulePackagesSection(InfFile, Module):
+ Section = GetSection("Packages", GetModulePackageItem, Module.PackageDependencies)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Guid C Name Item.
+#
+# Read the input Guid class object and return one line of Guid C Name Item.
+#
+# @param Guid An input Guid class object.
+#
+# @retval GuidCNameItem A Module Guid C Name Item.
+#
+def GetModuleGuidCNameItem(Guid):
+ try:
+ return Guid.GuidCName
+ except:
+ return Guid.CName
+
+
+## Store Protocols section.
+#
+# Write [Protocols] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Protocols section.
+# @param Module An input Module class object.
+#
+def StoreModuleProtocolsSection(InfFile, Module):
+ Section = GetSection("Protocols", GetModuleGuidCNameItem, Module.Protocols)
+ StoreTextFile(InfFile, Section)
+
+
+## Store Ppis section.
+#
+# Write [Ppis] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Ppis section.
+# @param Module An input Module class object.
+#
+def StoreModulePpisSection(InfFile, Module):
+ Section = GetSection("Ppis", GetModuleGuidCNameItem, Module.Ppis)
+ StoreTextFile(InfFile, Section)
+
+
+## Store Guids section.
+#
+# Write [Guids] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Guids section.
+# @param Module An input Module class object.
+#
+def StoreModuleGuidsSection(InfFile, Module):
+ Guids = []
+ Guids += Module.Guids
+ Guids += Module.Events
+ Guids += Module.Hobs
+ Guids += Module.Variables
+ Guids += Module.SystemTables
+ Guids += Module.DataHubs
+ Guids += Module.HiiPackages
+ Section = GetSection("Guids", GetModuleGuidCNameItem, Guids)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Pcd Item.
+#
+# Read the input Pcd class object and return one line of Pcd Item.
+#
+# @param Pcd An input Pcd class object.
+#
+# @retval PcdItem A Module Pcd Item.
+#
+def GetModulePcdItem(Pcd):
+ PcdItem = "%s.%s" % (Pcd.TokenSpaceGuidCName, Pcd.CName)
+ if Pcd.DefaultValue != "":
+ PcdItem = "%s|%s" % (PcdItem, Pcd.DefaultValue)
+
+ return PcdItem
+
+
+## DEC Pcd Section Name dictionary indexed by PCD Item Type.
+mInfPcdSectionNameDict = {
+ "FEATURE_FLAG" : "FeaturePcd",
+ "FIXED_AT_BUILD" : "FixedPcd",
+ "PATCHABLE_IN_MODULE" : "PatchPcd",
+ "DYNAMIC" : "Pcd",
+ "DYNAMIC_EX" : "PcdEx"
+ }
+
+## Store Pcds section.
+#
+# Write [(PcdType)] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Pcds section.
+# @param Module An input Module class object.
+#
+def StoreModulePcdsSection(InfFile, Module):
+ PcdsDict = {}
+ for Pcd in Module.PcdCodes:
+ PcdSectionName = mInfPcdSectionNameDict.get(Pcd.ItemType)
+ if PcdSectionName:
+ PcdsDict.setdefault(PcdSectionName, []).append(Pcd)
+ else:
+ EdkLogger.info("Unknown Pcd Item Type: %s" % Pcd.ItemType)
+
+ Section = ""
+ for PcdSectionName in PcdsDict:
+ Pcds = PcdsDict[PcdSectionName]
+ Section += GetSection(PcdSectionName, GetModulePcdItem, Pcds)
+ Section += "\n"
+
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Depex Item.
+#
+# Read the input Depex class object and return one line of Depex Item.
+#
+# @param Depex An input Depex class object.
+#
+# @retval DepexItem A Module Depex Item.
+#
+def GetModuleDepexItem(Depex):
+ return Depex.Depex
+
+
+## Store Depex section.
+#
+# Write [Depex] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Depex section.
+# @param Module An input Module class object.
+#
+def StoreModuleDepexSection(InfFile, Module):
+ Section = GetSection("Depex", GetModuleDepexItem, Module.Depex)
+ StoreTextFile(InfFile, Section)
+
+
+## Return a Module Build Option Item.
+#
+# Read the input BuildOption class object and return one line of Build Option Item.
+#
+# @param BuildOption An input BuildOption class object.
+#
+# @retval BuildOptionItem A Module Build Option Item.
+#
+def GetModuleBuildOptionItem(BuildOption):
+ BuildTarget = BuildOption.BuildTarget
+ if BuildTarget == "":
+ BuildTarget = "*"
+
+ TagName = BuildOption.TagName
+ if TagName == "":
+ TagName = "*"
+
+ ToolCode = BuildOption.ToolCode
+ if ToolCode == "":
+ ToolCode = "*"
+
+ Item = "_".join((BuildTarget, TagName, "*", ToolCode, "Flag"))
+
+ ToolChainFamily = BuildOption.ToolChainFamily
+ if ToolChainFamily != "":
+ Item = "%s:%s" % (ToolChainFamily, Item)
+
+ return "%-30s = %s" % (Item, BuildOption.Option)
+
+
+## Store Build Options section.
+#
+# Write [BuildOptions] section to the InfFile based on Module class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param InfFile The output INF file to store the Build Options section.
+# @param Module An input Module class object.
+#
+def StoreModuleBuildOptionsSection(InfFile, Module):
+ Section = GetSection("BuildOption", GetModuleBuildOptionItem, Module.BuildOptions)
+ StoreTextFile(InfFile, Section)
+
+
+## Store User Extensions section.
+#
+# Write [UserExtensions] section to the InfFile based on Module class object.
+#
+# @param InfFile The output INF file to store the User Extensions section.
+# @param Module An input Module class object.
+#
+def StoreModuleUserExtensionsSection(InfFile, Module):
+ Section = "".join(map(GetUserExtensions, Module.UserExtensions))
+ StoreTextFile(InfFile, Section)
+
+
+## Store a Module class object to a new INF file.
+#
+# Read an input Module class object and save the contents to a new INF file.
+#
+# @param INFFileName The output INF file.
+# @param Module An input Package class object.
+#
+def StoreInf(InfFileName, Module):
+ InfFile = open(InfFileName, "w+")
+ EdkLogger.info("Save file to %s" % InfFileName)
+
+ StoreHeader(InfFile, Module.Header)
+ StoreModuleDefinesSection(InfFile, Module)
+ StoreModuleSourcesSection(InfFile, Module)
+ StoreModuleBinariesSection(InfFile, Module)
+ StoreModulePackagesSection(InfFile, Module)
+ StoreModuleLibraryClassesSection(InfFile, Module)
+ StoreModuleProtocolsSection(InfFile, Module)
+ StoreModulePpisSection(InfFile, Module)
+ StoreModuleGuidsSection(InfFile, Module)
+ StoreModulePcdsSection(InfFile, Module)
+ StoreModuleDepexSection(InfFile, Module)
+ StoreModuleBuildOptionsSection(InfFile, Module)
+ StoreModuleUserExtensionsSection(InfFile, Module)
+
+ InfFile.close()
+
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/msa2inf/__init__.py b/BaseTools/Source/Python/msa2inf/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/msa2inf/__init__.py diff --git a/BaseTools/Source/Python/spd2dec/ConvertPackage.py b/BaseTools/Source/Python/spd2dec/ConvertPackage.py new file mode 100644 index 0000000000..57bc098bfa --- /dev/null +++ b/BaseTools/Source/Python/spd2dec/ConvertPackage.py @@ -0,0 +1,66 @@ +## @file
+# Convert an SPD Package class object ot a DEC Package class object by filling
+# some fields required by DEC file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from Common.MigrationUtilities import *
+from LoadSpd import LoadSpd
+from StoreDec import StoreDec
+
+#The default DEC version number tool generates.
+gDecVersion = "0x00010005"
+
+
+## Add required version information.
+#
+# Add the default DEC specification version to Package class object.
+#
+# @param Package An input Package class object.
+#
+def AddPackageMiscVersion(Package):
+ PackageHeader = Package.Header
+ PackageHeader.DecSpecification = gDecVersion
+
+## Add package include information.
+#
+# Adds the default "Include" folder to if that directory exists.
+#
+# @param Package An input Package class object.
+#
+def AddPackageInclude(Package):
+ PackageDir = os.path.dirname(Package.Header.FullPath)
+ DefaultIncludeDir = os.path.join(PackageDir, "Include")
+ if os.path.exists(DefaultIncludeDir):
+ Include = IncludeClass()
+ Include.FilePath = "Include"
+ Package.Includes.insert(0, Include)
+
+## Convert SPD Package class object to DEC Package class object.
+#
+# Convert SPD Package class ojbect to DEC Package class object by filling in
+# several information required by DEC file.
+#
+# @param Package An input Package class object.
+#
+def ConvertSpdPackageToDecPackage(Package):
+ AddPackageMiscVersion(Package)
+ AddPackageInclude(Package)
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/spd2dec/LoadSpd.py b/BaseTools/Source/Python/spd2dec/LoadSpd.py new file mode 100644 index 0000000000..e94e7fc317 --- /dev/null +++ b/BaseTools/Source/Python/spd2dec/LoadSpd.py @@ -0,0 +1,273 @@ +## @file
+# Open an SPD file and load all its contents to a PackageClass object.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from Common.XmlRoutines import *
+from Common.MigrationUtilities import *
+from CommonDataClass.PackageClass import *
+
+
+## Load a list of Package Cloned Records.
+#
+# Read an input Package XML DOM object and return a list of Cloned Records
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel ClonedRecords A list of Cloned Records loaded from XmlSpd.
+#
+def LoadPackageClonedRecords(XmlSpd):
+ XmlTag = "PackageSurfaceArea/PackageDefinitions/ClonedFrom/Cloned"
+ return map(LoadClonedRecord, XmlList(XmlSpd, XmlTag))
+
+
+## Load Package Header.
+#
+# Read an input Package XML DOM object and return Package Header class object
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+# @param SpdFileName The file path of SPD File.
+#
+# @retvel PackageHeader A new Package Header object loaded from XmlSpd.
+#
+def LoadPackageHeader(XmlSpd, SpdFileName):
+ PackageHeader = PackageHeaderClass()
+
+ XmlTag = "PackageSurfaceArea/SpdHeader"
+ SpdHeader = XmlNode(XmlSpd, XmlTag)
+
+ SetIdentification(PackageHeader, SpdHeader, "PackageName", SpdFileName)
+ SetCommonHeader(PackageHeader, SpdHeader)
+
+ XmlTag = "PackageSurfaceArea/PackageDefinitions/ReadOnly"
+ if XmlElement(XmlSpd, XmlTag).lower() == "true":
+ PackageHeader.ReadOnly = True
+
+ XmlTag = "PackageSurfaceArea/PackageDefinitions/RePackage"
+ if XmlElement(XmlSpd, XmlTag).lower() == "true":
+ PackageHeader.RePackage = True
+
+ PackageHeader.ClonedFrom = LoadPackageClonedRecords(XmlSpd)
+
+ return PackageHeader
+
+
+## Load a list of Package Library Classes.
+#
+# Read an input Package XML DOM object and return a list of Library Classes
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel LibraryClasses A list of Library Classes loaded from XmlSpd.
+#
+def LoadPackageLibraryClasses(XmlSpd):
+ XmlTag = "PackageSurfaceArea/LibraryClassDeclarations/LibraryClass"
+ return map(LoadLibraryClass, XmlList(XmlSpd, XmlTag))
+
+
+## Load a new Package Industry Std Header class object.
+#
+# Read an input XML IndustryStdHeader DOM object and return an object of
+# Industry Std Header contained in the DOM object.
+#
+# @param XmlIndustryStdHeader A child XML DOM object in Package XML DOM.
+#
+# @retvel PackageIndustryStdHeader A new Industry Std Header object created by XmlIndustryStdHeader.
+#
+def LoadPackageIndustryStdHeader(XmlIndustryStdHeader):
+ PackageIndustryStdHeader = PackageIndustryStdHeaderClass()
+
+ XmlTag = "Name"
+ Name = XmlAttribute(XmlIndustryStdHeader, XmlTag)
+ PackageIndustryStdHeader.Name = Name
+
+ XmlTag = "IndustryStdHeader/IncludeHeader"
+ IncludeHeader = XmlElement(XmlIndustryStdHeader, XmlTag)
+ PackageIndustryStdHeader.IncludeHeader = IncludeHeader
+
+ SetCommon(PackageIndustryStdHeader, XmlIndustryStdHeader)
+
+ return PackageIndustryStdHeader
+
+
+## Load a list of Package Industry Std Headers.
+#
+# Read an input Package XML DOM object and return a list of Industry Std Headers
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel IndustryStdHeaders A list of Industry Std Headers loaded from XmlSpd.
+#
+def LoadPackageIndustryStdHeaders(XmlSpd):
+ XmlTag = "PackageSurfaceArea/IndustryStdIncludes/IndustryStdHeader"
+ return map(LoadPackageIndustryStdHeader, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package Module Files.
+#
+# Read an input Package XML DOM object and return a list of Module Files
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel ModuleFiles A list of Module Files loaded from XmlSpd.
+#
+def LoadPackageModuleFiles(XmlSpd):
+ XmlTag = "PackageSurfaceArea/MsaFiles/Filename"
+ return XmlElementList(XmlSpd, XmlTag)
+
+
+## Load a new Package Include Pkg Header class object.
+#
+# Read an input XML IncludePkgHeader DOM object and return an object of Include
+# Package Header contained in the DOM object.
+#
+# @param XmlPackageIncludeHeader A child XML DOM object in Package XML DOM.
+#
+# @retvel PackageIncludePkgHeader A new Include Pkg Header object created by
+# XmlPackageIncludeHeader.
+#
+def LoadPackageIncludePkgHeader(XmlPackageIncludeHeader):
+ PackageIncludeHeader = PackageIncludePkgHeaderClass()
+
+ IncludeHeader = XmlElementData(XmlPackageIncludeHeader)
+ PackageIncludeHeader.IncludeHeader = IncludeHeader
+
+ XmlTag = "ModuleType"
+ ModuleTypes = XmlAttribute(XmlPackageIncludeHeader, XmlTag)
+ PackageIncludeHeader.ModuleType = ModuleTypes.split()
+
+ return PackageIncludeHeader
+
+
+## Load a list of Package Include Pkg Headers.
+#
+# Read an input Package XML DOM object and return a list of Include Pkg Headers
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel IncludePkgHeaders A list of Include Pkg Headers loaded from XmlSpd.
+#
+def LoadPackageIncludePkgHeaders(XmlSpd):
+ XmlTag = "PackageSurfaceArea/PackageHeaders/IncludePkgHeader"
+ return map(LoadPackageIncludePkgHeader, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package Guid Declarations.
+#
+# Read an input Package XML DOM object and return a list of Guid Declarations
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel GuidDeclarations A list of Guid Declarations loaded from XmlSpd.
+#
+def LoadPackageGuidDeclarations(XmlSpd):
+ XmlTag = "PackageSurfaceArea/GuidDeclarations/Entry"
+ return map(LoadGuidProtocolPpiCommon, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package Protocol Declarations.
+#
+# Read an input Package XML DOM object and return a list of Protocol Declarations
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel ProtocolDeclarations A list of Protocol Declarations loaded from XmlSpd.
+#
+def LoadPackageProtocolDeclarations(XmlSpd):
+ XmlTag = "PackageSurfaceArea/ProtocolDeclarations/Entry"
+ return map(LoadGuidProtocolPpiCommon, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package Ppi Declarations.
+#
+# Read an input Package XML DOM object and return a list of Ppi Declarations
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel PpiDeclarations A list of Ppi Declarations loaded from XmlSpd.
+#
+def LoadPackagePpiDeclarations(XmlSpd):
+ XmlTag = "PackageSurfaceArea/PpiDeclarations/Entry"
+ return map(LoadGuidProtocolPpiCommon, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package Pcd Declarations.
+#
+# Read an input Package XML DOM object and return a list of Pcd Declarations
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel PcdDeclarations A list of Pcd Declarations loaded from XmlSpd.
+#
+def LoadPackagePcdDeclarations(XmlSpd):
+ XmlTag = "PackageSurfaceArea/PcdDeclarations/PcdEntry"
+ return map(LoadPcd, XmlList(XmlSpd, XmlTag))
+
+
+## Load a list of Package User Extensions.
+#
+# Read an input Package XML DOM object and return a list of User Extensions
+# contained in the DOM object.
+#
+# @param XmlSpd An XML DOM object read from SPD file.
+#
+# @retvel UserExtensions A list of User Extensions loaded from XmlSpd.
+#
+def LoadPackageUserExtensions(XmlSpd):
+ XmlTag = "PackageSurfaceArea/UserExtensions"
+ return map(LoadUserExtensions, XmlList(XmlSpd, XmlTag))
+
+
+## Load a new Package class object.
+#
+# Read an input SPD File and return a new Package class Object.
+#
+# @param SpdFileName An XML DOM object read from SPD file.
+#
+# @retvel Package A new Module class object loaded from SPD File.
+#
+def LoadSpd(SpdFileName):
+ XmlSpd = XmlParseFile(SpdFileName)
+ EdkLogger.verbose("Xml Object loaded for file %s" % SpdFileName)
+
+ Package = PackageClass()
+ Package.Header = LoadPackageHeader(XmlSpd, SpdFileName)
+ Package.LibraryClassDeclarations = LoadPackageLibraryClasses(XmlSpd)
+ Package.IndustryStdHeaders = LoadPackageIndustryStdHeaders(XmlSpd)
+ Package.ModuleFiles = LoadPackageModuleFiles(XmlSpd)
+ Package.PackageIncludePkgHeaders = LoadPackageIncludePkgHeaders(XmlSpd)
+ Package.GuidDeclarations = LoadPackageGuidDeclarations(XmlSpd)
+ Package.ProtocolDeclarations = LoadPackageProtocolDeclarations(XmlSpd)
+ Package.PpiDeclarations = LoadPackagePpiDeclarations(XmlSpd)
+ Package.PcdDeclarations = LoadPackagePcdDeclarations(XmlSpd)
+ Package.UserExtensions = LoadPackageUserExtensions(XmlSpd)
+
+ return Package
+
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/spd2dec/Spd2Dec.py b/BaseTools/Source/Python/spd2dec/Spd2Dec.py new file mode 100644 index 0000000000..46d058344b --- /dev/null +++ b/BaseTools/Source/Python/spd2dec/Spd2Dec.py @@ -0,0 +1,46 @@ +## @file
+# Convert an XML-based SPD file to a text-based DEC file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sys
+from Common.MigrationUtilities import *
+from LoadSpd import LoadSpd
+from StoreDec import StoreDec
+from ConvertPackage import ConvertSpdPackageToDecPackage
+
+## Entrance method
+#
+# This method mainly dispatch specific methods per the command line options.
+# If no error found, return zero value so the caller of this tool can know
+# if it's executed successfully or not.
+#
+# @retval 0 Tool was successful.
+# @retval 1 Tool failed.
+#
+def Main():
+ try:
+ Options, InputFile = MigrationOptionParser("SPD", "DEC", "%prog")
+ Package = LoadSpd(InputFile)
+ ConvertSpdPackageToDecPackage(Package)
+ StoreDec(Options.OutputFile, Package)
+ return 0
+ except Exception, e:
+ print e
+ return 1
+
+if __name__ == '__main__':
+ sys.exit(Main())
+
+
diff --git a/BaseTools/Source/Python/spd2dec/StoreDec.py b/BaseTools/Source/Python/spd2dec/StoreDec.py new file mode 100644 index 0000000000..67cbd11e9b --- /dev/null +++ b/BaseTools/Source/Python/spd2dec/StoreDec.py @@ -0,0 +1,247 @@ +## @file
+# Store a Package class object to a DEC file.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from Common.MigrationUtilities import *
+from LoadSpd import LoadSpd
+from CommonDataClass.PackageClass import *
+
+
+## Store Defines section.
+#
+# Write [Defines] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Defines section.
+# @param Package An input Package class object.
+#
+def StorePackageDefinesSection(DecFile, Package):
+ DefinesTupleList = []
+ DefinesTupleList.append(("DEC_VERSION", Package.Header.DecSpecification))
+ DefinesTupleList.append(("PACKAGE_NAME", Package.Header.Name))
+ DefinesTupleList.append(("PACKAGE_GUID", Package.Header.Guid))
+
+ StoreDefinesSection(DecFile, DefinesTupleList)
+
+
+## Return a Package Include Class Item.
+#
+# Read the input Include class object and return one Include Class Item.
+#
+# @param Include An input Include class object.
+#
+# @retval IncludeClassItem A Package Include Class Item.
+#
+def GetPackageIncludeClassItem(Include):
+ return Include.FilePath
+
+
+## Store Includes section.
+#
+# Write [Includes] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Includes section.
+# @param Package An input Package class object.
+#
+def StorePackageIncludesSection(DecFile, Package):
+ Includes = Package.Includes
+ Section = GetSection("Includes", GetPackageIncludeClassItem, Includes)
+ StoreTextFile(DecFile, Section)
+
+
+## Return a Package Library Class Item.
+#
+# Read the input LibraryClass class object and return one Library Class Item.
+#
+# @param LibraryClass An input LibraryClass class object.
+#
+# @retval LibraryClassItem A Package Library Class Item.
+#
+def GetPackageLibraryClassItem(LibraryClass):
+ return "|".join((LibraryClass.LibraryClass, LibraryClass.IncludeHeader))
+
+
+## Store Library Classes section.
+#
+# Write [LibraryClasses] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Library Classes
+# section.
+# @param Package An input Package class object.
+#
+def StorePackageLibraryClassesSection(DecFile, Package):
+ LibraryClasses = Package.LibraryClassDeclarations
+ Section = GetSection("LibraryClasses", GetPackageLibraryClassItem, LibraryClasses)
+ StoreTextFile(DecFile, Section)
+
+
+## Return a Package Guid Declaration Item.
+#
+# Read the input Guid class object and return one line of Guid Declaration Item.
+#
+# @param Guid An input Guid class object.
+#
+# @retval GuidDeclarationItem A Package Guid Declaration Item.
+#
+def GetPackageGuidDeclarationItem(Guid):
+ GuidCName = Guid.CName
+ GuidValue = Guid.Guid.replace("-", "")
+ GuidValueList = [GuidValue[0:8]]
+ GuidValueList += [GuidValue[i : i + 4] for i in range(8, 16, 4)]
+ GuidValueList += [GuidValue[i : i + 2] for i in range(16, 32, 2)]
+
+ GuidCFormat = "{0x%s" + ", 0x%s" * 2 + ", {0x%s" + ", 0x%s" * 7 + "}}"
+ GuidCValue = GuidCFormat % tuple(GuidValueList)
+ return "%-30s = %s" % (GuidCName, GuidCValue)
+
+
+## Store Protocols section.
+#
+# Write [Protocols] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Protocols section.
+# @param Package An input Package class object.
+#
+def StorePackageProtocolsSection(DecFile, Package):
+ Protocols = Package.ProtocolDeclarations
+ Section = GetSection("Protocols", GetPackageGuidDeclarationItem, Protocols)
+ StoreTextFile(DecFile, Section)
+
+
+## Store Ppis section.
+#
+# Write [Ppis] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Ppis section.
+# @param Package An input Package class object.
+#
+def StorePackagePpisSection(DecFile, Package):
+ Ppis = Package.PpiDeclarations
+ Section = GetSection("Ppis", GetPackageGuidDeclarationItem, Ppis)
+ StoreTextFile(DecFile, Section)
+
+
+## Store Guids section.
+#
+# Write [Guids] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Guids section.
+# @param Package An input Package class object.
+#
+def StorePackageGuidsSection(DecFile, Package):
+ Guids = Package.GuidDeclarations
+ Section = GetSection("Guids", GetPackageGuidDeclarationItem, Guids)
+ StoreTextFile(DecFile, Section)
+
+
+## Return a Package Pcd Item.
+#
+# Read the input Pcd class object and return one line of Pcd Item.
+#
+# @param Pcd An input Pcd class object.
+#
+# @retval PcdItem A Package Pcd Item.
+#
+def GetPackagePcdItem(Pcd):
+ PcdPair = "%s.%s" % (Pcd.TokenSpaceGuidCName, Pcd.CName)
+ DatumType = Pcd.DatumType
+ DefaultValue = Pcd.DefaultValue
+ Token = Pcd.Token
+ PcdList = [PcdPair, DefaultValue, DatumType, Token]
+ return "|".join(PcdList)
+
+
+## DEC Pcd Section Name dictionary indexed by PCD Item Type.
+mDecPcdSectionNameDict = {
+ "FEATURE_FLAG" : "PcdsFeatureFlag",
+ "FIXED_AT_BUILD" : "PcdsFixedAtBuild",
+ "PATCHABLE_IN_MODULE" : "PcdsPatchableInModule",
+ "DYNAMIC" : "PcdsDynamic",
+ "DYNAMIC_EX" : "PcdsDynamicEx"
+ }
+
+## Store Pcds section.
+#
+# Write [Pcds*] section to the DecFile based on Package class object.
+# Different CPU architectures are specified in the subsection if possible.
+#
+# @param DecFile The output DEC file to store the Pcds section.
+# @param Package An input Package class object.
+#
+def StorePackagePcdsSection(DecFile, Package):
+ PcdsDict = {}
+ for Pcd in Package.PcdDeclarations:
+ for PcdItemType in Pcd.ValidUsage:
+ PcdSectionName = mDecPcdSectionNameDict.get(PcdItemType)
+ if PcdSectionName:
+ PcdsDict.setdefault(PcdSectionName, []).append(Pcd)
+ else:
+ EdkLogger.info("Unknown Pcd Item Type: %s" % PcdItemType)
+
+ Section = ""
+ for PcdSectionName in PcdsDict:
+ Pcds = PcdsDict[PcdSectionName]
+ Section += GetSection(PcdSectionName, GetPackagePcdItem, Pcds)
+
+ StoreTextFile(DecFile, Section)
+
+
+## Store User Extensions section.
+#
+# Write [UserExtensions] section to the DecFile based on Package class object.
+#
+# @param DecFile The output DEC file to store the User Extensions section.
+# @param Package An input Package class object.
+#
+def StorePackageUserExtensionsSection(DecFile, Package):
+ Section = "".join(map(GetUserExtensions, Package.UserExtensions))
+ StoreTextFile(DecFile, Section)
+
+
+## Store a Package class object to a new DEC file.
+#
+# Read an input Package class object and ave the contents to a new DEC file.
+#
+# @param DecFileName The output DEC file.
+# @param Package An input Package class object.
+#
+def StoreDec(DecFileName, Package):
+ DecFile = open(DecFileName, "w+")
+ EdkLogger.info("Save file to %s" % DecFileName)
+
+ StoreHeader(DecFile, Package.Header)
+ StorePackageDefinesSection(DecFile, Package)
+ StorePackageIncludesSection(DecFile, Package)
+ StorePackageLibraryClassesSection(DecFile, Package)
+ StorePackageProtocolsSection(DecFile, Package)
+ StorePackagePpisSection(DecFile, Package)
+ StorePackageGuidsSection(DecFile, Package)
+ StorePackagePcdsSection(DecFile, Package)
+ StorePackageUserExtensionsSection(DecFile, Package)
+
+ DecFile.close()
+
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
+
\ No newline at end of file diff --git a/BaseTools/Source/Python/spd2dec/__init__.py b/BaseTools/Source/Python/spd2dec/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/BaseTools/Source/Python/spd2dec/__init__.py |