summaryrefslogtreecommitdiff
path: root/BaseTools/Source/Python/Workspace
diff options
context:
space:
mode:
authorlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>2009-07-17 09:10:31 +0000
committerlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>2009-07-17 09:10:31 +0000
commit30fdf1140b8d1ce93f3821d986fa165552023440 (patch)
treec45c336a8955b1d03ea56d6c915a0e68a43b4ee9 /BaseTools/Source/Python/Workspace
parent577e30cdb473e4af8e65fd6f75236691d0c8dfb3 (diff)
downloadedk2-platforms-30fdf1140b8d1ce93f3821d986fa165552023440.tar.xz
Check In tool source code based on Build tool project revision r1655.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@8964 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'BaseTools/Source/Python/Workspace')
-rw-r--r--BaseTools/Source/Python/Workspace/BuildClassObject.py364
-rw-r--r--BaseTools/Source/Python/Workspace/MetaDataTable.py335
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py1131
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileTable.py275
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py2274
-rw-r--r--BaseTools/Source/Python/Workspace/__init__.py0
6 files changed, 4379 insertions, 0 deletions
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
new file mode 100644
index 0000000000..36c2ebf491
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -0,0 +1,364 @@
+## @file
+# This file is used to define each component of the build database
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+import os
+
+from Common.Misc import sdict
+from Common.Misc import RealPath2
+from Common.BuildToolError import *
+
+## PcdClassObject
+#
+# This Class is used for PcdObject
+#
+# @param object: Inherited from object class
+# @param Name: Input value for Name of Pcd, default is None
+# @param Guid: Input value for Guid of Pcd, default is None
+# @param Type: Input value for Type of Pcd, default is None
+# @param DatumType: Input value for DatumType of Pcd, default is None
+# @param Value: Input value for Value of Pcd, default is None
+# @param Token: Input value for Token of Pcd, default is None
+# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
+# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
+# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+#
+# @var TokenCName: To store value for TokenCName
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var Type: To store value for Type
+# @var DatumType: To store value for DatumType
+# @var TokenValue: To store value for TokenValue
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var SkuInfoList: To store value for SkuInfoList
+# @var IsOverrided: To store value for IsOverrided
+# @var Phase: To store value for Phase, default is "DXE"
+#
+class PcdClassObject(object):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.TokenSpaceGuidValue = GuidValue
+ self.Type = Type
+ self.DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+ self.SkuInfoList = SkuInfoList
+ self.Phase = "DXE"
+ self.Pending = False
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a signle line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
+ 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
+ 'Type=' + str(self.Type) + ', ' + \
+ 'DatumType=' + str(self.DatumType) + ', ' + \
+ 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
+ 'TokenValue=' + str(self.TokenValue) + ', ' + \
+ 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
+ for Item in self.SkuInfoList.values():
+ Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
+ Rtn = Rtn + str(self.IsOverrided)
+
+ return Rtn
+
+ ## Override __eq__ function
+ #
+ # Check whether pcds are the same
+ #
+ # @retval False The two pcds are different
+ # @retval True The two pcds are the same
+ #
+ def __eq__(self, Other):
+ return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+
+ ## Override __hash__ function
+ #
+ # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
+ #
+ # @retval truple() Key for hash table
+ #
+ def __hash__(self):
+ return hash((self.TokenCName, self.TokenSpaceGuidCName))
+
+## LibraryClassObject
+#
+# This Class defines LibraryClassObject used in BuildDatabase
+#
+# @param object: Inherited from object class
+# @param Name: Input value for LibraryClassName, default is None
+# @param SupModList: Input value for SupModList, default is []
+# @param Type: Input value for Type, default is None
+#
+# @var LibraryClass: To store value for LibraryClass
+# @var SupModList: To store value for SupModList
+# @var Type: To store value for Type
+#
+class LibraryClassObject(object):
+ def __init__(self, Name = None, SupModList = [], Type = None):
+ self.LibraryClass = Name
+ self.SupModList = SupModList
+ if Type != None:
+ self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
+
+## ModuleBuildClassObject
+#
+# This Class defines ModuleBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for module meta file path
+# @var BaseName: To store value for BaseName
+# @var ModuleType: To store value for ModuleType
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var PcdIsDriver: To store value for PcdIsDriver
+# @var BinaryModule: To store value for BinaryModule
+# @var CustomMakefile: To store value for CustomMakefile
+# @var Specification: To store value for Specification
+# @var Shadow To store value for Shadow
+# @var LibraryClass: To store value for LibraryClass, it is a list structure as
+# [ LibraryClassObject, ...]
+# @var ModuleEntryPointList: To store value for ModuleEntryPointList
+# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
+# @var ConstructorList: To store value for ConstructorList
+# @var DestructorList: To store value for DestructorList
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryClassObject, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFilesClassObject, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolName, ... ]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiName, ... ]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidName, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var Packages: To store value for Packages, it is a list structure as
+# [ DecFileName, ... ]
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue}
+# @var Depex: To store value for Depex
+#
+class ModuleBuildClassObject(object):
+ def __init__(self):
+ self.AutoGenVersion = 0
+ self.MetaFile = ''
+ self.BaseName = ''
+ self.ModuleType = ''
+ self.Guid = ''
+ self.Version = ''
+ self.PcdIsDriver = ''
+ self.BinaryModule = ''
+ self.Shadow = ''
+ self.SourceOverridePath = ''
+ self.CustomMakefile = {}
+ self.Specification = {}
+ self.LibraryClass = []
+ self.ModuleEntryPointList = []
+ self.ModuleUnloadImageList = []
+ self.ConstructorList = []
+ self.DestructorList = []
+
+ self.Binaries = []
+ self.Sources = []
+ self.LibraryClasses = sdict()
+ self.Libraries = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Guids = []
+ self.Includes = []
+ self.Packages = []
+ self.Pcds = {}
+ self.BuildOptions = {}
+ self.Depex = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether ModuleBuildClassObjects are the same
+ #
+ # @retval False The two ModuleBuildClassObjects are different
+ # @retval True The two ModuleBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PackageBuildClassObject
+#
+# This Class defines PackageBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for package meta file path
+# @var PackageName: To store value for PackageName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var Protocols: To store value for Protocols, it is a set structure as
+# { [ProtocolName] : Protocol Guid, ... }
+# @var Ppis: To store value for Ppis, it is a set structure as
+# { [PpiName] : Ppi Guid, ... }
+# @var Guids: To store value for Guids, it is a set structure as
+# { [GuidName] : Guid, ... }
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName] : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+#
+class PackageBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PackageName = ''
+ self.Guid = ''
+ self.Version = ''
+
+ self.Protocols = {}
+ self.Ppis = {}
+ self.Guids = {}
+ self.Includes = []
+ self.LibraryClasses = {}
+ self.Pcds = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PackageBuildClassObjects are the same
+ #
+ # @retval False The two PackageBuildClassObjects are different
+ # @retval True The two PackageBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
+## PlatformBuildClassObject
+#
+# This Class defines PlatformBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var MetaFile: To store value for platform meta-file path
+# @var PlatformName: To store value for PlatformName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var DscSpecification: To store value for DscSpecification
+# @var OutputDirectory: To store value for OutputDirectory
+# @var FlashDefinition: To store value for FlashDefinition
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var SkuIds: To store value for SkuIds, it is a set structure as
+# { 'SkuName' : SkuId, '!include' : includefilename, ...}
+# @var Modules: To store value for Modules, it is a list structure as
+# [ InfFileName, ... ]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ InfFileName, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue }
+#
+class PlatformBuildClassObject(object):
+ def __init__(self):
+ self.MetaFile = ''
+ self.PlatformName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.DscSpecification = ''
+ self.OutputDirectory = ''
+ self.FlashDefinition = ''
+ self.BuildNumber = ''
+ self.MakefileName = ''
+
+ self.SkuIds = {}
+ self.Modules = []
+ self.LibraryInstances = []
+ self.LibraryClasses = {}
+ self.Libraries = {}
+ self.Pcds = {}
+ self.BuildOptions = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member MetaFile of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return str(self.MetaFile)
+
+ ## Override __eq__ function
+ #
+ # Check whether PlatformBuildClassObjects are the same
+ #
+ # @retval False The two PlatformBuildClassObjects are different
+ # @retval True The two PlatformBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.MetaFile == Other
+
+ ## Override __hash__ function
+ #
+ # Use MetaFile as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.MetaFile)
+
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py
new file mode 100644
index 0000000000..c8166bfa90
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/MetaDataTable.py
@@ -0,0 +1,335 @@
+## @file
+# This file is used to create/update/query/erase table for files
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+
+import Common.EdkLogger as EdkLogger
+from CommonDataClass import DataClass
+from CommonDataClass.DataClass import FileClass
+
+## Convert to SQL required string format
+def ConvertToSqlString(StringList):
+ return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)
+
+## TableFile
+#
+# This class defined a common table
+#
+# @param object: Inherited from object class
+#
+# @param Cursor: Cursor of the database
+# @param TableName: Name of the table
+#
+class Table(object):
+ _COLUMN_ = ''
+ _ID_STEP_ = 1
+ _ID_MAX_ = 0x80000000
+ _DUMMY_ = 0
+
+ def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):
+ self.Cur = Cursor
+ self.Table = Name
+ self.IdBase = int(IdBase)
+ self.ID = int(IdBase)
+ self.Temporary = Temporary
+
+ def __str__(self):
+ return self.Table
+
+ ## Create table
+ #
+ # Create a table
+ #
+ def Create(self, NewTable=True):
+ if NewTable:
+ self.Drop()
+
+ if self.Temporary:
+ SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ else:
+ SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
+ EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ self.ID = self.GetId()
+
+ ## Insert table
+ #
+ # Insert a record into a table
+ #
+ def Insert(self, *Args):
+ self.ID = self.ID + self._ID_STEP_
+ if self.ID >= (self.IdBase + self._ID_MAX_):
+ self.ID = self.IdBase + self._ID_STEP_
+ Values = ", ".join([str(Arg) for Arg in Args])
+ SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ return self.ID
+
+ ## Query table
+ #
+ # Query all records of the table
+ #
+ def Query(self):
+ SqlCommand = """select * from %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+ for Rs in self.Cur:
+ EdkLogger.verbose(str(Rs))
+ TotalCount = self.GetId()
+
+ ## Drop a table
+ #
+ # Drop the table
+ #
+ def Drop(self):
+ SqlCommand = """drop table IF EXISTS %s""" % self.Table
+ self.Cur.execute(SqlCommand)
+
+ ## Get count
+ #
+ # Get a count of all records of the table
+ #
+ # @retval Count: Total count of all records
+ #
+ def GetCount(self):
+ SqlCommand = """select count(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ return Record[0][0]
+
+ def GetId(self):
+ SqlCommand = """select max(ID) from %s""" % self.Table
+ Record = self.Cur.execute(SqlCommand).fetchall()
+ Id = Record[0][0]
+ if Id == None:
+ Id = self.IdBase
+ return Id
+
+ ## Init the ID of the table
+ #
+ # Init the ID of the table
+ #
+ def InitID(self):
+ self.ID = self.GetId()
+
+ ## Exec
+ #
+ # Exec Sql Command, return result
+ #
+ # @param SqlCommand: The SqlCommand to be executed
+ #
+ # @retval RecordSet: The result after executed
+ #
+ def Exec(self, SqlCommand):
+ EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
+ self.Cur.execute(SqlCommand)
+ RecordSet = self.Cur.fetchall()
+ return RecordSet
+
+ def SetEndFlag(self):
+ self.Exec("insert into %s values(%s)" % (self.Table, self._DUMMY_))
+
+ def IsIntegral(self):
+ Result = self.Exec("select min(ID) from %s" % (self.Table))
+ if Result[0][0] != -1:
+ return False
+ return True
+
+## TableFile
+#
+# This class defined a table used for file
+#
+# @param object: Inherited from object class
+#
+class TableFile(Table):
+ _COLUMN_ = '''
+ ID INTEGER PRIMARY KEY,
+ Name VARCHAR NOT NULL,
+ ExtName VARCHAR,
+ Path VARCHAR,
+ FullPath VARCHAR NOT NULL,
+ Model INTEGER DEFAULT 0,
+ TimeStamp SINGLE NOT NULL
+ '''
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'File')
+
+ ## Insert table
+ #
+ # Insert a record into table File
+ #
+ # @param Name: Name of a File
+ # @param ExtName: ExtName of a File
+ # @param Path: Path of a File
+ # @param FullPath: FullPath of a File
+ # @param Model: Model of a File
+ # @param TimeStamp: TimeStamp of a File
+ #
+ def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
+ (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
+ return Table.Insert(
+ self,
+ Name,
+ ExtName,
+ Path,
+ FullPath,
+ Model,
+ TimeStamp
+ )
+
+ ## InsertFile
+ #
+ # Insert one file to table
+ #
+ # @param FileFullPath: The full path of the file
+ # @param Model: The model of the file
+ #
+ # @retval FileID: The ID after record is inserted
+ #
+ def InsertFile(self, FileFullPath, Model):
+ (Filepath, Name) = os.path.split(FileFullPath)
+ (Root, Ext) = os.path.splitext(FileFullPath)
+ TimeStamp = os.stat(FileFullPath)[8]
+ File = FileClass(-1, Name, Ext, Filepath, FileFullPath, Model, '', [], [], [])
+ return self.Insert(
+ Name,
+ Ext,
+ Filepath,
+ FileFullPath,
+ Model,
+ TimeStamp
+ )
+
+ ## Get ID of a given file
+ #
+ # @param FilePath Path of file
+ #
+ # @retval ID ID value of given file in the table
+ #
+ def GetFileId(self, FilePath):
+ QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, FilePath)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Get type of a given file
+ #
+ # @param FileId ID of a file
+ #
+ # @retval file_type Model value of given file in the table
+ #
+ def GetFileType(self, FileId):
+ QueryScript = "select Model from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Get file timestamp of a given file
+ #
+ # @param FileId ID of file
+ #
+ # @retval timestamp TimeStamp value of given file in the table
+ #
+ def GetFileTimeStamp(self, FileId):
+ QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
+ RecordList = self.Exec(QueryScript)
+ if len(RecordList) == 0:
+ return None
+ return RecordList[0][0]
+
+ ## Update the timestamp of a given file
+ #
+ # @param FileId ID of file
+ # @param TimeStamp Time stamp of file
+ #
+ def SetFileTimeStamp(self, FileId, TimeStamp):
+ self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
+
+ ## Get list of file with given type
+ #
+ # @param FileType Type value of file
+ #
+ # @retval file_list List of files with the given type
+ #
+ def GetFileList(self, FileType):
+ RecordList = self.Exec("select FullPath from %s where Model=%s" % (self.Table, FileType))
+ if len(RecordList) == 0:
+ return []
+ return [R[0] for R in RecordList]
+
+## TableDataModel
+#
+# This class defined a table used for data model
+#
+# @param object: Inherited from object class
+#
+#
+class TableDataModel(Table):
+ _COLUMN_ = """
+ ID INTEGER PRIMARY KEY,
+ CrossIndex INTEGER NOT NULL,
+ Name VARCHAR NOT NULL,
+ Description VARCHAR
+ """
+ def __init__(self, Cursor):
+ Table.__init__(self, Cursor, 'DataModel')
+
+ ## Insert table
+ #
+ # Insert a record into table DataModel
+ #
+ # @param ID: ID of a ModelType
+ # @param CrossIndex: CrossIndex of a ModelType
+ # @param Name: Name of a ModelType
+ # @param Description: Description of a ModelType
+ #
+ def Insert(self, CrossIndex, Name, Description):
+ (Name, Description) = ConvertToSqlString((Name, Description))
+ return Table.Insert(self, CrossIndex, Name, Description)
+
+ ## Init table
+ #
+ # Create all default records of table DataModel
+ #
+ def InitTable(self):
+ EdkLogger.verbose("\nInitialize table DataModel started ...")
+ Count = self.GetCount()
+ if Count != None and Count != 0:
+ return
+ for Item in DataClass.MODEL_LIST:
+ CrossIndex = Item[1]
+ Name = Item[0]
+ Description = Item[0]
+ self.Insert(CrossIndex, Name, Description)
+ EdkLogger.verbose("Initialize table DataModel ... DONE!")
+
+ ## Get CrossIndex
+ #
+ # Get a model's cross index from its name
+ #
+ # @param ModelName: Name of the model
+ # @retval CrossIndex: CrossIndex of the model
+ #
+ def GetCrossIndex(self, ModelName):
+ CrossIndex = -1
+ SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
+ self.Cur.execute(SqlCommand)
+ for Item in self.Cur:
+ CrossIndex = Item[0]
+
+ return CrossIndex
+
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
new file mode 100644
index 0000000000..294237daee
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -0,0 +1,1131 @@
+## @file
+# This file is used to parse meta files
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import time
+
+import Common.EdkLogger as EdkLogger
+from CommonDataClass.DataClass import *
+from Common.DataType import *
+from Common.String import *
+from Common.Misc import Blist, GuidStructureStringToGuidString, CheckPcdDatum
+
+## Base class of parser
+#
+# This class is used for derivation purpose. The specific parser for one kind
+# type file must derive this class and implement some public interfaces.
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class MetaFileParser(object):
+ # data type (file content) for specific file type
+ DataType = {}
+
+ ## Constructor of MetaFileParser
+ #
+ # Initialize object of MetaFileParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Table, Macros=None, Owner=-1, From=-1):
+ self._Table = Table
+ self._FileType = FileType
+ self.MetaFile = FilePath
+ self._FileDir = os.path.dirname(self.MetaFile)
+ self._Macros = {}
+
+ # for recursive parsing
+ self._Owner = Owner
+ self._From = From
+
+ # parsr status for parsing
+ self._Content = None
+ self._ValueList = ['', '', '', '', '']
+ self._Scope = []
+ self._LineIndex = 0
+ self._CurrentLine = ''
+ self._SectionType = MODEL_UNKNOWN
+ self._SectionName = ''
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._LastItem = -1
+ self._Enabled = 0
+ self._Finished = False
+
+ ## Store the parsed data in table
+ def _Store(self, *Args):
+ return self._Table.Insert(*Args)
+
+ ## Virtual method for starting parse
+ def Start(self):
+ raise NotImplementedError
+
+ ## Set parsing complete flag in both class and table
+ def _Done(self):
+ self._Finished = True
+ self._Table.SetEndFlag()
+
+ ## Return the table containg parsed data
+ #
+ # If the parse complete flag is not set, this method will try to parse the
+ # file before return the table
+ #
+ def _GetTable(self):
+ if not self._Finished:
+ self.Start()
+ return self._Table
+
+ ## Get the parse complete flag
+ def _GetFinished(self):
+ return self._Finished
+
+ ## Set the complete flag
+ def _SetFinished(self, Value):
+ self._Finished = Value
+
+ ## Use [] style to query data in table, just for readability
+ #
+ # DataInfo = [data_type, scope1(arch), scope2(platform,moduletype)]
+ #
+ def __getitem__(self, DataInfo):
+ if type(DataInfo) != type(()):
+ DataInfo = (DataInfo,)
+ return self.Table.Query(*DataInfo)
+
+ ## Data parser for the common format in different type of file
+ #
+ # The common format in the meatfile is like
+ #
+ # xxx1 | xxx2 | xxx3
+ #
+ def _CommonParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _PathParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ if len(self._Macros) > 0:
+ for Index in range(0, len(self._ValueList)):
+ Value = self._ValueList[Index]
+ if Value == None or Value == '':
+ continue
+ self._ValueList[Index] = NormPath(Value, self._Macros)
+
+ ## Skip unsupported data
+ def _Skip(self):
+ EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine);
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ ArchList = set()
+ for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+ if Item == '':
+ continue
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ # different section should not mix in one section
+ if self._SectionName != '' and self._SectionName != ItemList[0].upper():
+ EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName in self.DataType:
+ self._SectionType = self.DataType[self._SectionName]
+ else:
+ self._SectionType = MODEL_UNKNOWN
+ EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = 'COMMON'
+ ArchList.add(S1)
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ S2 = ItemList[2].upper()
+ else:
+ S2 = 'COMMON'
+ self._Scope.append([S1, S2])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+
+ ## [defines] section parser
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ if self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+ ## DEFINE name=value parser
+ def _MacroParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
+ MacroType = TokenList[0]
+ if len(TokenList) < 2 or TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No macro name/value given",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ TokenList = GetSplitValueList(TokenList[1], TAB_EQUAL_SPLIT, 1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ if len(TokenList) == 1:
+ self._Macros[TokenList[0]] = ''
+ else:
+ # keep the macro definition for later use
+ self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False)
+
+ return TokenList[0], self._Macros[TokenList[0]]
+
+ ## [BuildOptions] section parser
+ def _BuildOptionParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
+ if len(TokenList2) == 2:
+ self._ValueList[0] = TokenList2[0] # toolchain family
+ self._ValueList[1] = TokenList2[1] # keys
+ else:
+ self._ValueList[1] = TokenList[0]
+ if len(TokenList) == 2: # value
+ self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
+
+ if self._ValueList[1].count('_') != 4:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
+ ExtraData=self._CurrentLine,
+ File=self.MetaFile,
+ Line=self._LineIndex+1
+ )
+
+ _SectionParser = {}
+ Table = property(_GetTable)
+ Finished = property(_GetFinished, _SetFinished)
+
+
+## INF file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class InfParser(MetaFileParser):
+ # INF file supported data types (one type per section)
+ DataType = {
+ TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+ ## Constructor of InfParser
+ #
+ # Initialize object of InfParser
+ #
+ # @param FilePath The path of module description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ #
+ def __init__(self, FilePath, FileType, Table, Macros=None):
+ MetaFileParser.__init__(self, FilePath, FileType, Table, Macros)
+
+ ## Parser starter
+ def Start(self):
+ NmakeLine = ''
+ try:
+ self._Content = open(self.MetaFile, 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ # parse the file line by line
+ IsFindBlockComment = False
+
+ for Index in range(0, len(self._Content)):
+ # skip empty, commented, block commented lines
+ Line = CleanString(self._Content[Index], AllowCppStyleComment=True)
+ NextLine = ''
+ if Index + 1 < len(self._Content):
+ NextLine = CleanString(self._Content[Index + 1])
+ if Line == '':
+ continue
+ if Line.find(DataType.TAB_COMMENT_R8_START) > -1:
+ IsFindBlockComment = True
+ continue
+ if Line.find(DataType.TAB_COMMENT_R8_END) > -1:
+ IsFindBlockComment = False
+ continue
+ if IsFindBlockComment:
+ continue
+
+ self._LineIndex = Index
+ self._CurrentLine = Line
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ continue
+ # merge two lines specified by '\' in section NMAKE
+ elif self._SectionType == MODEL_META_DATA_NMAKE:
+ if Line[-1] == '\\':
+ if NextLine == '':
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
+ self._CurrentLine = NmakeLine + Line[0:-1]
+ NmakeLine = ''
+ else:
+ NmakeLine = NmakeLine + ' ' + Line[0:-1]
+ continue
+ else:
+ self._CurrentLine = NmakeLine + Line
+ NmakeLine = ''
+ elif Line.upper().startswith('DEFINE '):
+ # file private macros
+ self._MacroParser()
+ continue
+
+ # section content
+ self._ValueList = ['','','']
+ # parse current line, result will be put in self._ValueList
+ self._SectionParser[self._SectionType](self)
+ if self._ValueList == None:
+ continue
+ #
+ # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, Platform in self._Scope:
+ self._Store(self._SectionType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ Platform,
+ self._Owner,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ 0
+ )
+ self._Done()
+
+ ## Data parser for the format in which there's path
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _IncludeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ if len(self._Macros) > 0:
+ for Index in range(0, len(self._ValueList)):
+ Value = self._ValueList[Index]
+ if Value.upper().find('$(EFI_SOURCE)\Edk'.upper()) > -1 or Value.upper().find('$(EFI_SOURCE)/Edk'.upper()) > -1:
+ Value = '$(EDK_SOURCE)' + Value[17:]
+ if Value.find('$(EFI_SOURCE)') > -1 or Value.find('$(EDK_SOURCE)') > -1:
+ pass
+ elif Value.startswith('.'):
+ pass
+ elif Value.startswith('$('):
+ pass
+ else:
+ Value = '$(EFI_SOURCE)/' + Value
+
+ if Value == None or Value == '':
+ continue
+ self._ValueList[Index] = NormPath(Value, self._Macros)
+
+ ## Parse [Sources] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _SourceFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
+ if 'COMPONENT_TYPE' in self._Macros:
+ if self._Macros['COMPONENT_TYPE'].upper() == 'ACPITABLE':
+ self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
+ if self._Macros['BASE_NAME'] == 'Microcode':
+ pass
+ if len(self._Macros) > 0:
+ for Index in range(0, len(self._ValueList)):
+ Value = self._ValueList[Index]
+ if Value == None or Value == '':
+ continue
+ self._ValueList[Index] = NormPath(Value, self._Macros)
+
+ ## Parse [Binaries] section
+ #
+ # Only path can have macro used. So we need to replace them before use.
+ #
+ def _BinaryFileParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if not TokenList[0]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if not TokenList[1]:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
+ ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ self._ValueList[1] = NormPath(self._ValueList[1], self._Macros)
+
+ ## [defines] section parser
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ self._Macros[TokenList[0]] = ReplaceMacro(TokenList[1], self._Macros, False)
+ if self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+
+ ## [nmake] section parser (R8.x style only)
+ def _NmakeParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ # remove macros
+ self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, False)
+ # remove self-reference in macro setting
+ #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
+
+ ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(TokenList) > 1:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+ ## [depex] section parser
+ def _DepexParser(self):
+ self._ValueList[0:1] = [self._CurrentLine]
+
+ _SectionParser = {
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_HEADER : _DefineParser,
+ MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
+ MODEL_EFI_INCLUDE : _IncludeParser, # for R8.x modules
+ MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for R8.x modules
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
+ MODEL_META_DATA_NMAKE : _NmakeParser, # for R8.x modules
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_EFI_SOURCE_FILE : _SourceFileParser,
+ MODEL_EFI_GUID : MetaFileParser._CommonParser,
+ MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
+ MODEL_EFI_PPI : MetaFileParser._CommonParser,
+ MODEL_EFI_DEPEX : _DepexParser,
+ MODEL_EFI_BINARY_FILE : _BinaryFileParser,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ }
+
+## DSC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+# @param Owner Owner ID (for sub-section parsing)
+# @param From ID from which the data comes (for !INCLUDE directive)
+#
+class DscParser(MetaFileParser):
+ # DSC file supported data types (one type per section)
+ DataType = {
+ TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
+ TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_COMPONENTS_SOURCE_OVERRIDE_PATH.upper() : MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH,
+ TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
+ TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
+ TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
+ TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
+ TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
+ TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
+ TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
+ }
+
+ # sections which allow "!include" directive
+ _IncludeAllowedSection = [
+ TAB_LIBRARIES.upper(),
+ TAB_LIBRARY_CLASSES.upper(),
+ TAB_SKUIDS.upper(),
+ TAB_COMPONENTS.upper(),
+ TAB_BUILD_OPTIONS.upper(),
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper(),
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper(),
+ TAB_PCDS_FEATURE_FLAG_NULL.upper(),
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper(),
+ TAB_PCDS_DYNAMIC_HII_NULL.upper(),
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper(),
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper(),
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper(),
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper(),
+ ]
+
+ # operators which can be used in "!if/!ifdef/!ifndef" directives
+ _OP_ = {
+ "!" : lambda a: not a,
+ "!=" : lambda a,b: a!=b,
+ "==" : lambda a,b: a==b,
+ ">" : lambda a,b: a>b,
+ "<" : lambda a,b: a<b,
+ "=>" : lambda a,b: a>=b,
+ ">=" : lambda a,b: a>=b,
+ "<=" : lambda a,b: a<=b,
+ "=<" : lambda a,b: a<=b,
+ }
+
+ ## Constructor of DscParser
+ #
+ # Initialize object of DscParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ # @param Owner Owner ID (for sub-section parsing)
+ # @param From ID from which the data comes (for !INCLUDE directive)
+ #
+ def __init__(self, FilePath, FileType, Table, Macros=None, Owner=-1, From=-1):
+ MetaFileParser.__init__(self, FilePath, FileType, Table, Macros, Owner, From)
+ # to store conditional directive evaluation result
+ self._Eval = Blist()
+
+ ## Parser starter
+ def Start(self):
+ try:
+ if self._Content == None:
+ self._Content = open(self.MetaFile, 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ for Index in range(0, len(self._Content)):
+ Line = CleanString(self._Content[Index])
+ # skip empty line
+ if Line == '':
+ continue
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ continue
+ # subsection ending
+ elif Line[0] == '}':
+ self._InSubsection = False
+ self._SubsectionType = MODEL_UNKNOWN
+ self._SubsectionName = ''
+ self._Owner = -1
+ continue
+ # subsection header
+ elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
+ self._SubsectionHeaderParser()
+ continue
+ # directive line
+ elif Line[0] == '!':
+ self._DirectiveParser()
+ continue
+ # file private macros
+ elif Line.upper().startswith('DEFINE '):
+ self._MacroParser()
+ continue
+ elif Line.upper().startswith('EDK_GLOBAL '):
+ (Name, Value) = self._MacroParser()
+ for Arch, ModuleType in self._Scope:
+ self._LastItem = self._Store(
+ MODEL_META_DATA_DEFINE,
+ Name,
+ Value,
+ '',
+ Arch,
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
+ continue
+
+ # section content
+ if self._InSubsection:
+ SectionType = self._SubsectionType
+ SectionName = self._SubsectionName
+ if self._Owner == -1:
+ self._Owner = self._LastItem
+ else:
+ SectionType = self._SectionType
+ SectionName = self._SectionName
+
+ self._ValueList = ['', '', '']
+ self._SectionParser[SectionType](self)
+ if self._ValueList == None:
+ continue
+
+ #
+ # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
+ # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
+ #
+ for Arch, ModuleType in self._Scope:
+ self._LastItem = self._Store(
+ SectionType,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
+ self._Done()
+
+ ## [defines] section parser
+ def _DefineParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
+ ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex+1)
+ # 'FLASH_DEFINITION', 'OUTPUT_DIRECTORY' need special processing
+ if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
+ TokenList[1] = NormPath(TokenList[1], self._Macros)
+ self._ValueList[0:len(TokenList)] = TokenList
+
+ ## <subsection_header> parser
+ def _SubsectionHeaderParser(self):
+ self._SubsectionName = self._CurrentLine[1:-1].upper()
+ if self._SubsectionName in self.DataType:
+ self._SubsectionType = self.DataType[self._SubsectionName]
+ else:
+ self._SubsectionType = MODEL_UNKNOWN
+ EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+
+ ## Directive statement parser
+ def _DirectiveParser(self):
+ self._ValueList = ['','','']
+ TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ DirectiveName = self._ValueList[0].upper()
+ if DirectiveName not in self.DataType:
+ EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
+ EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
+ File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine)
+ # keep the directive in database first
+ self._LastItem = self._Store(
+ self.DataType[DirectiveName],
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex + 1,
+ -1,
+ self._LineIndex + 1,
+ -1,
+ 0
+ )
+
+ # process the directive
+ if DirectiveName == "!INCLUDE":
+ if not self._SectionName in self._IncludeAllowedSection:
+ EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
+ # the included file must be relative to the parsing file
+ IncludedFile = os.path.join(self._FileDir, self._ValueList[1])
+ Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
+ # set the parser status with current status
+ Parser._SectionName = self._SectionName
+ Parser._SectionType = self._SectionType
+ Parser._Scope = self._Scope
+ Parser._Enabled = self._Enabled
+ try:
+ Parser.Start()
+ except:
+ EdkLogger.error("Parser", PARSER_ERROR, File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData="Failed to parse content in file %s" % IncludedFile)
+ # update current status with sub-parser's status
+ self._SectionName = Parser._SectionName
+ self._SectionType = Parser._SectionType
+ self._Scope = Parser._Scope
+ self._Enabled = Parser._Enabled
+ else:
+ if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
+ # evaluate the expression
+ Result = self._Evaluate(self._ValueList[1])
+ if DirectiveName == "!IFNDEF":
+ Result = not Result
+ self._Eval.append(Result)
+ elif DirectiveName in ["!ELSEIF"]:
+ # evaluate the expression
+ self._Eval[-1] = (not self._Eval[-1]) & self._Evaluate(self._ValueList[1])
+ elif DirectiveName in ["!ELSE"]:
+ self._Eval[-1] = not self._Eval[-1]
+ elif DirectiveName in ["!ENDIF"]:
+ if len(self._Eval) > 0:
+ self._Eval.pop()
+ else:
+ EdkLogger.error("Parser", FORMAT_INVALID, "!IF..[!ELSE]..!ENDIF doesn't match",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if self._Eval.Result == False:
+ self._Enabled = 0 - len(self._Eval)
+ else:
+ self._Enabled = len(self._Eval)
+
+ ## Evaludate the value of expression in "if/ifdef/ifndef" directives
+ def _Evaluate(self, Expression):
+ TokenList = Expression.split()
+ TokenNumber = len(TokenList)
+ # one operand, guess it's just a macro name
+ if TokenNumber == 1:
+ return TokenList[0] in self._Macros
+ # two operands, suppose it's "!xxx" format
+ elif TokenNumber == 2:
+ Op = TokenList[0]
+ if Op not in self._OP_:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Unsupported operator [%s]" % Op, File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=Expression)
+ if TokenList[1].upper() == 'TRUE':
+ Value = True
+ else:
+ Value = False
+ return self._OP_[Op](Value)
+ # three operands
+ elif TokenNumber == 3:
+ Name = TokenList[0]
+ if Name not in self._Macros:
+ return False
+ Value = TokenList[2]
+ if Value[0] in ["'", '"'] and Value[-1] in ["'", '"']:
+ Value = Value[1:-1]
+ Op = TokenList[1]
+ if Op not in self._OP_:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Unsupported operator [%s]" % Op, File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=Expression)
+ return self._OP_[Op](self._Macros[Name], Value)
+ else:
+ EdkLogger.error('Parser', FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
+ ExtraData=Expression)
+
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamicExDefault]
+ # [PcdsDynamicExVpd]
+ # [PcdsDynamicExHii]
+ # [PcdsDynamic]
+ # [PcdsDynamicDefault]
+ # [PcdsDynamicVpd]
+ # [PcdsDynamicHii]
+ #
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(TokenList) == 2:
+ self._ValueList[2] = TokenList[1]
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if self._ValueList[2] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+ ## [components] section parser
+ def _ComponentParser(self):
+ if self._CurrentLine[-1] == '{':
+ self._ValueList[0] = self._CurrentLine[0:-1].strip()
+ self._InSubsection = True
+ else:
+ self._ValueList[0] = self._CurrentLine
+ if len(self._Macros) > 0:
+ self._ValueList[0] = NormPath(self._ValueList[0], self._Macros)
+
+ def _LibraryClassParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
+ ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:len(TokenList)] = TokenList
+ if len(self._Macros) > 0:
+ self._ValueList[1] = NormPath(self._ValueList[1], self._Macros)
+
+ def _CompponentSourceOverridePathParser(self):
+ if len(self._Macros) > 0:
+ self._ValueList[0] = NormPath(self._CurrentLine, self._Macros)
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : _DefineParser,
+ MODEL_EFI_SKU_ID : MetaFileParser._CommonParser,
+ MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._PathParser,
+ MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_VPD : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
+ MODEL_META_DATA_COMPONENT : _ComponentParser,
+ MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH : _CompponentSourceOverridePathParser,
+ MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ }
+
+## DEC file parser class
+#
+# @param FilePath The path of platform description file
+# @param FileType The raw data of DSC file
+# @param Table Database used to retrieve module/package information
+# @param Macros Macros used for replacement in file
+#
+class DecParser(MetaFileParser):
+ # DEC file supported data types (one type per section)
+ DataType = {
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ }
+
+ ## Constructor of DecParser
+ #
+ # Initialize object of DecParser
+ #
+ # @param FilePath The path of platform description file
+ # @param FileType The raw data of DSC file
+ # @param Table Database used to retrieve module/package information
+ # @param Macros Macros used for replacement in file
+ #
+ def __init__(self, FilePath, FileType, Table, Macro=None):
+ MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
+
+ ## Parser starter
+ def Start(self):
+ try:
+ if self._Content == None:
+ self._Content = open(self.MetaFile, 'r').readlines()
+ except:
+ EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
+
+ for Index in range(0, len(self._Content)):
+ Line = CleanString(self._Content[Index])
+ # skip empty line
+ if Line == '':
+ continue
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # section header
+ if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
+ self._SectionHeaderParser()
+ continue
+ elif Line.startswith('DEFINE '):
+ self._MacroParser()
+ continue
+ elif len(self._SectionType) == 0:
+ continue
+
+ # section content
+ self._ValueList = ['','','']
+ self._SectionParser[self._SectionType[0]](self)
+ if self._ValueList == None:
+ continue
+
+ #
+ # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
+ # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
+ #
+ for Arch, ModuleType, Type in self._Scope:
+ self._LastItem = self._Store(
+ Type,
+ self._ValueList[0],
+ self._ValueList[1],
+ self._ValueList[2],
+ Arch,
+ ModuleType,
+ self._Owner,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ 0
+ )
+ self._Done()
+
+ ## Section header parser
+ #
+ # The section header is always in following format:
+ #
+ # [section_name.arch<.platform|module_type>]
+ #
+ def _SectionHeaderParser(self):
+ self._Scope = []
+ self._SectionName = ''
+ self._SectionType = []
+ ArchList = set()
+ for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
+ if Item == '':
+ continue
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+
+ # different types of PCD are permissible in one section
+ self._SectionName = ItemList[0].upper()
+ if self._SectionName in self.DataType:
+ if self.DataType[self._SectionName] not in self._SectionType:
+ self._SectionType.append(self.DataType[self._SectionName])
+ else:
+ EdkLogger.warn("Parser", "Unrecognized section", File=self.MetaFile,
+ Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+ continue
+
+ if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
+ EdkLogger.error(
+ 'Parser',
+ FORMAT_INVALID,
+ "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
+ File=self.MetaFile,
+ Line=self._LineIndex+1,
+ ExtraData=self._CurrentLine
+ )
+ # S1 is always Arch
+ if len(ItemList) > 1:
+ S1 = ItemList[1].upper()
+ else:
+ S1 = 'COMMON'
+ ArchList.add(S1)
+ # S2 may be Platform or ModuleType
+ if len(ItemList) > 2:
+ S2 = ItemList[2].upper()
+ else:
+ S2 = 'COMMON'
+ if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
+ self._Scope.append([S1, S2, self.DataType[self._SectionName]])
+
+ # 'COMMON' must not be used with specific ARCHs at the same section
+ if 'COMMON' in ArchList and len(ArchList) > 1:
+ EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
+ File=self.MetaFile, Line=self._LineIndex+1, ExtraData=self._CurrentLine)
+
+ ## [guids], [ppis] and [protocols] section parser
+ def _GuidParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
+ if len(TokenList) < 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
+ ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
+ ExtraData=self._CurrentLine + \
+ " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0] = TokenList[0]
+ self._ValueList[1] = TokenList[1]
+
+ ## PCD sections parser
+ #
+ # [PcdsFixedAtBuild]
+ # [PcdsPatchableInModule]
+ # [PcdsFeatureFlag]
+ # [PcdsDynamicEx
+ # [PcdsDynamic]
+ #
+ def _PcdParser(self):
+ TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
+ self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ # check PCD information
+ if self._ValueList[0] == '' or self._ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check PCD datum information
+ if len(TokenList) < 2 or TokenList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+
+ ValueList = GetSplitValueList(TokenList[1])
+ # check if there's enough datum information given
+ if len(ValueList) != 3:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check default value
+ if ValueList[0] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check datum type
+ if ValueList[1] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check token of the PCD
+ if ValueList[2] == '':
+ EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
+ ExtraData=self._CurrentLine + \
+ " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ # check format of default value against the datum type
+ IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
+ if not IsValid:
+ EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[2] = TokenList[1]
+
+ _SectionParser = {
+ MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
+ MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
+ MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
+ MODEL_EFI_GUID : _GuidParser,
+ MODEL_EFI_PPI : _GuidParser,
+ MODEL_EFI_PROTOCOL : _GuidParser,
+ MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
+ MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
+ MODEL_PCD_FEATURE_FLAG : _PcdParser,
+ MODEL_PCD_DYNAMIC : _PcdParser,
+ MODEL_PCD_DYNAMIC_EX : _PcdParser,
+ MODEL_UNKNOWN : MetaFileParser._Skip,
+ MODEL_META_DATA_USER_EXTENSION : MetaFileParser._Skip,
+ }
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py
new file mode 100644
index 0000000000..22e2afa4c9
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py
@@ -0,0 +1,275 @@
+## @file
+# This file is used to create/update/query/erase a meta file table
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import Common.EdkLogger as EdkLogger
+from MetaDataTable import Table
+from MetaDataTable import ConvertToSqlString
+
+## Python class representation of table storing module data
+class ModuleTable(Table):
+ # TRICK: use file ID as the part before '.'
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Inf', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert a record into table Inf
+ #
+ # @param Model: Model of a Inf item
+ # @param Value1: Value1 of a Inf item
+ # @param Value2: Value2 of a Inf item
+ # @param Value3: Value3 of a Inf item
+ # @param Scope1: Arch of a Inf item
+ # @param Scope2 Platform os a Inf item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Inf item
+ # @param StartColumn: StartColumn of a Inf item
+ # @param EndLine: EndLine of a Inf item
+ # @param EndColumn: EndColumn of a Inf item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ # @param Platform The Platform attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None, Platform=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
+
+ if Arch != None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+ if Platform != None and Platform != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing package data
+class PackageTable(Table):
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Dec', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dec
+ #
+ # @param Model: Model of a Dec item
+ # @param Value1: Value1 of a Dec item
+ # @param Value2: Value2 of a Dec item
+ # @param Value3: Value3 of a Dec item
+ # @param Scope1: Arch of a Dec item
+ # @param Scope2: Module type of a Dec item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param StartLine: StartLine of a Dec item
+ # @param StartColumn: StartColumn of a Dec item
+ # @param EndLine: EndLine of a Dec item
+ # @param EndColumn: EndColumn of a Dec item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
+ BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Arch: The Arch attribute of Record
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Arch=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,ID,StartLine"
+
+ if Arch != None and Arch != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
+## Python class representation of table storing platform data
+class PlatformTable(Table):
+ _ID_STEP_ = 0.00000001
+ _ID_MAX_ = 0.99999999
+ _COLUMN_ = '''
+ ID REAL PRIMARY KEY,
+ Model INTEGER NOT NULL,
+ Value1 TEXT NOT NULL,
+ Value2 TEXT,
+ Value3 TEXT,
+ Scope1 TEXT,
+ Scope2 TEXT,
+ BelongsToItem REAL NOT NULL,
+ FromItem REAL NOT NULL,
+ StartLine INTEGER NOT NULL,
+ StartColumn INTEGER NOT NULL,
+ EndLine INTEGER NOT NULL,
+ EndColumn INTEGER NOT NULL,
+ Enabled INTEGER DEFAULT 0
+ '''
+ # used as table end flag, in case the changes to database is not committed to db file
+ _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
+
+ ## Constructor
+ def __init__(self, Cursor, Name='Dsc', IdBase=0, Temporary=False):
+ Table.__init__(self, Cursor, Name, IdBase, Temporary)
+
+ ## Insert table
+ #
+ # Insert a record into table Dsc
+ #
+ # @param Model: Model of a Dsc item
+ # @param Value1: Value1 of a Dsc item
+ # @param Value2: Value2 of a Dsc item
+ # @param Value3: Value3 of a Dsc item
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ # @param StartLine: StartLine of a Dsc item
+ # @param StartColumn: StartColumn of a Dsc item
+ # @param EndLine: EndLine of a Dsc item
+ # @param EndColumn: EndColumn of a Dsc item
+ # @param Enabled: If this item enabled
+ #
+ def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1,
+ FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
+ (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
+ return Table.Insert(
+ self,
+ Model,
+ Value1,
+ Value2,
+ Value3,
+ Scope1,
+ Scope2,
+ BelongsToItem,
+ FromItem,
+ StartLine,
+ StartColumn,
+ EndLine,
+ EndColumn,
+ Enabled
+ )
+
+ ## Query table
+ #
+ # @param Model: The Model of Record
+ # @param Scope1: Arch of a Dsc item
+ # @param Scope2: Module type of a Dsc item
+ # @param BelongsToItem: The item belongs to which another item
+ # @param FromItem: The item belongs to which dsc file
+ #
+ # @retval: A recordSet of all found records
+ #
+ def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
+ ConditionString = "Model=%s AND Enabled>=0" % Model
+ ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
+
+ if Scope1 != None and Scope1 != 'COMMON':
+ ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
+ if Scope2 != None and Scope2 != 'COMMON':
+ ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
+
+ if BelongsToItem != None:
+ ConditionString += " AND BelongsToItem=%s" % BelongsToItem
+ else:
+ ConditionString += " AND BelongsToItem<0"
+
+ if FromItem != None:
+ ConditionString += " AND FromItem=%s" % FromItem
+
+ SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
+ return self.Exec(SqlCommand)
+
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
new file mode 100644
index 0000000000..8f0056e197
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -0,0 +1,2274 @@
+## @file
+# This file is used to create a database used by build tool
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os
+import os.path
+
+import Common.EdkLogger as EdkLogger
+import Common.GlobalData as GlobalData
+
+from Common.String import *
+from Common.DataType import *
+from Common.Misc import *
+
+from CommonDataClass.CommonClass import SkuInfoClass
+
+from MetaDataTable import *
+from MetaFileTable import *
+from MetaFileParser import *
+from BuildClassObject import *
+
+## Platform build information from DSC file
+#
+# This class is used to retrieve information stored in database and convert them
+# into PlatformBuildClassObject form for easier use for AutoGen.
+#
+class DscBuildData(PlatformBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
+ MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
+ MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
+ MODEL_PCD_DYNAMIC : "Dynamic",
+ MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
+ MODEL_PCD_DYNAMIC_HII : "DynamicHii",
+ MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
+ MODEL_PCD_DYNAMIC_EX : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
+ MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
+ }
+
+ # dict used to convert part of [Defines] to members of DscBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName",
+ TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid",
+ TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version",
+ TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification",
+ #TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory",
+ #TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList",
+ #TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets",
+ #TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName",
+ #TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition",
+ TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber",
+ TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
+ TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
+ TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
+ }
+
+ # used to compose dummy library class name for those forced library instances
+ _NullLibraryNumber = 0
+
+ ## Constructor of DscBuildData
+ #
+ # Initialize object of DscBuildData
+ #
+ # @param FilePath The path of platform description file
+ # @param RawData The raw data of DSC file
+ # @param BuildDataBase Database used to retrieve module/package information
+ # @param Arch The target architecture
+ # @param Platform (not used for DscBuildData)
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, FilePath, RawData, BuildDataBase, Arch='COMMON', Platform='DUMMY', Macros={}):
+ self.MetaFile = FilePath
+ self._RawData = RawData
+ self._Bdb = BuildDataBase
+ self._Arch = Arch
+ self._Macros = Macros
+ self._Clear()
+ RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
+ for Record in RecordList:
+ GlobalData.gEdkGlobal[Record[0]] = Record[1]
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Set all internal used members of DscBuildData to None
+ def _Clear(self):
+ self._Header = None
+ self._PlatformName = None
+ self._Guid = None
+ self._Version = None
+ self._DscSpecification = None
+ self._OutputDirectory = None
+ self._SupArchList = None
+ self._BuildTargets = None
+ self._SkuName = None
+ self._FlashDefinition = None
+ self._BuildNumber = None
+ self._MakefileName = None
+ self._BsBaseAddress = None
+ self._RtBaseAddress = None
+ self._SkuIds = None
+ self._Modules = None
+ self._LibraryInstances = None
+ self._LibraryClasses = None
+ self._Pcds = None
+ self._BuildOptions = None
+
+ ## Get architecture
+ def _GetArch(self):
+ return self._Arch
+
+ ## Set architecture
+ #
+ # Changing the default ARCH to another may affect all other information
+ # because all information in a platform may be ARCH-related. That's
+ # why we need to clear all internal used members, in order to cause all
+ # information to be re-retrieved.
+ #
+ # @param Value The value of ARCH
+ #
+ def _SetArch(self, Value):
+ if self._Arch == Value:
+ return
+ self._Arch = Value
+ self._Clear()
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retriving all [Defines] information in one-shot is just to save time.)
+ #
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
+ for Record in RecordList:
+ Name = Record[0]
+ # items defined _PROPERTY_ don't need additional processing
+ if Name in self:
+ self[Name] = Record[1]
+ # some special items in [Defines] section need special treatment
+ elif Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY:
+ self._OutputDirectory = NormPath(Record[1], self._Macros)
+ if ' ' in self._OutputDirectory:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY",
+ File=self.MetaFile, Line=Record[-1],
+ ExtraData=self._OutputDirectory)
+ elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION:
+ self._FlashDefinition = PathClass(NormPath(Record[1], self._Macros), GlobalData.gWorkspace)
+ ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1],
+ ExtraData=ErrorInfo)
+ elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES:
+ self._SupArchList = GetSplitValueList(Record[1], TAB_VALUE_SPLIT)
+ elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
+ self._BuildTargets = GetSplitValueList(Record[1])
+ elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
+ if self._SkuName == None:
+ self._SkuName = Record[1]
+ # set _Header to non-None in order to avoid database re-querying
+ self._Header = 'DUMMY'
+
+ ## Retrieve platform name
+ def _GetPlatformName(self):
+ if self._PlatformName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._PlatformName == None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
+ return self._PlatformName
+
+ ## Retrieve file guid
+ def _GetFileGuid(self):
+ if self._Guid == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._Guid == None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No FILE_GUID", File=self.MetaFile)
+ return self._Guid
+
+ ## Retrieve platform version
+ def _GetVersion(self):
+ if self._Version == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._Version == None:
+ self._Version = ''
+ return self._Version
+
+ ## Retrieve platform description file version
+ def _GetDscSpec(self):
+ if self._DscSpecification == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._DscSpecification == None:
+ self._DscSpecification = ''
+ return self._DscSpecification
+
+ ## Retrieve OUTPUT_DIRECTORY
+ def _GetOutpuDir(self):
+ if self._OutputDirectory == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._OutputDirectory == None:
+ self._OutputDirectory = os.path.join("Build", self._PlatformName)
+ return self._OutputDirectory
+
+ ## Retrieve SUPPORTED_ARCHITECTURES
+ def _GetSupArch(self):
+ if self._SupArchList == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._SupArchList == None:
+ self._SupArchList = ARCH_LIST
+ return self._SupArchList
+
+ ## Retrieve BUILD_TARGETS
+ def _GetBuildTarget(self):
+ if self._BuildTargets == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._BuildTargets == None:
+ self._BuildTargets = ['DEBUG', 'RELEASE']
+ return self._BuildTargets
+
+ ## Retrieve SKUID_IDENTIFIER
+ def _GetSkuName(self):
+ if self._SkuName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._SkuName == None or self._SkuName not in self.SkuIds:
+ self._SkuName = 'DEFAULT'
+ return self._SkuName
+
+ ## Override SKUID_IDENTIFIER
+ def _SetSkuName(self, Value):
+ if Value in self.SkuIds:
+ self._SkuName = Value
+
+ def _GetFdfFile(self):
+ if self._FlashDefinition == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._FlashDefinition == None:
+ self._FlashDefinition = ''
+ return self._FlashDefinition
+
+ ## Retrieve FLASH_DEFINITION
+ def _GetBuildNumber(self):
+ if self._BuildNumber == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._BuildNumber == None:
+ self._BuildNumber = ''
+ return self._BuildNumber
+
+ ## Retrieve MAKEFILE_NAME
+ def _GetMakefileName(self):
+ if self._MakefileName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._MakefileName == None:
+ self._MakefileName = ''
+ return self._MakefileName
+
+ ## Retrieve BsBaseAddress
+ def _GetBsBaseAddress(self):
+ if self._BsBaseAddress == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._BsBaseAddress == None:
+ self._BsBaseAddress = ''
+ return self._BsBaseAddress
+
+ ## Retrieve RtBaseAddress
+ def _GetRtBaseAddress(self):
+ if self._RtBaseAddress == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._RtBaseAddress == None:
+ self._RtBaseAddress = ''
+ return self._RtBaseAddress
+
+ ## Retrieve [SkuIds] section information
+ def _GetSkuIds(self):
+ if self._SkuIds == None:
+ self._SkuIds = {}
+ RecordList = self._RawData[MODEL_EFI_SKU_ID]
+ for Record in RecordList:
+ if Record[0] in [None, '']:
+ EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number',
+ File=self.MetaFile, Line=Record[-1])
+ if Record[1] in [None, '']:
+ EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
+ File=self.MetaFile, Line=Record[-1])
+ self._SkuIds[Record[1]] = Record[0]
+ if 'DEFAULT' not in self._SkuIds:
+ self._SkuIds['DEFAULT'] = 0
+ return self._SkuIds
+
+ ## Retrieve [Components] section information
+ def _GetModules(self):
+ if self._Modules != None:
+ return self._Modules
+
+ self._Modules = sdict()
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ ModuleId = Record[5]
+ LineNo = Record[6]
+
+ # check the file validation
+ ErrorCode, ErrorInfo = ModuleFile.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+ # Check duplication
+ if ModuleFile in self._Modules:
+ EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
+
+ Module = ModuleBuildClassObject()
+ Module.MetaFile = ModuleFile
+
+ # get module override path
+ RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId]
+ if RecordList != []:
+ Module.SourceOverridePath = os.path.join(GlobalData.gWorkspace, NormPath(RecordList[0][0], Macros))
+
+ # Check if the source override path exists
+ if not os.path.isdir(Module.SourceOverridePath):
+ EdkLogger.error('build', FILE_NOT_FOUND, Message = 'Source override path does not exist:', File=self.MetaFile, ExtraData=Module.SourceOverridePath, Line=LineNo)
+
+ #Add to GlobalData Variables
+ GlobalData.gOverrideDir[ModuleFile.Key] = Module.SourceOverridePath
+
+ # get module private library instance
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
+ for Record in RecordList:
+ LibraryClass = Record[0]
+ LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ LineNo = Record[-1]
+
+ # check the file validation
+ ErrorCode, ErrorInfo = LibraryPath.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+
+ if LibraryClass == '' or LibraryClass == 'NULL':
+ self._NullLibraryNumber += 1
+ LibraryClass = 'NULL%d' % self._NullLibraryNumber
+ EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass))
+ Module.LibraryClasses[LibraryClass] = LibraryPath
+ if LibraryPath not in self.LibraryInstances:
+ self.LibraryInstances.append(LibraryPath)
+
+ # get module private PCD setting
+ for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
+ MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
+ RecordList = self._RawData[Type, self._Arch, None, ModuleId]
+ for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
+ TokenList = GetSplitValueList(Setting)
+ DefaultValue = TokenList[0]
+ if len(TokenList) > 1:
+ MaxDatumSize = TokenList[1]
+ else:
+ MaxDatumSize = ''
+ TypeString = self._PCD_TYPE_STRING_[Type]
+ Pcd = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ TypeString,
+ '',
+ DefaultValue,
+ '',
+ MaxDatumSize,
+ {},
+ None
+ )
+ Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
+
+ # get module private build options
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
+ if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
+ Module.BuildOptions[ToolChainFamily, ToolChain] = Option
+ else:
+ OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
+ Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
+
+ self._Modules[ModuleFile] = Module
+ return self._Modules
+
+ ## Retrieve all possible library instances used in this platform
+ def _GetLibraryInstances(self):
+ if self._LibraryInstances == None:
+ self._GetLibraryClasses()
+ return self._LibraryInstances
+
+ ## Retrieve [LibraryClasses] information
+ def _GetLibraryClasses(self):
+ if self._LibraryClasses == None:
+ self._LibraryInstances = []
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # library instance for given library class and module type
+ #
+ LibraryClassDict = tdict(True, 3)
+ # track all library class names
+ LibraryClassSet = set()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, LineNo = Record
+ LibraryClassSet.add(LibraryClass)
+ LibraryInstance = PathClass(NormPath(LibraryInstance, Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = LibraryInstance.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+
+ if ModuleType != 'COMMON' and ModuleType not in SUP_MODULE_LIST:
+ EdkLogger.error('build', OPTION_UNKNOWN, "Unknown module type [%s]" % ModuleType,
+ File=self.MetaFile, ExtraData=LibraryInstance, Line=LineNo)
+ LibraryClassDict[Arch, ModuleType, LibraryClass] = LibraryInstance
+ if LibraryInstance not in self._LibraryInstances:
+ self._LibraryInstances.append(LibraryInstance)
+
+ # resolve the specific library instance for each class and each module type
+ self._LibraryClasses = tdict(True)
+ for LibraryClass in LibraryClassSet:
+ # try all possible module types
+ for ModuleType in SUP_MODULE_LIST:
+ LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
+ if LibraryInstance == None:
+ continue
+ self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
+
+ # for R8 style library instances, which are listed in different section
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch]
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ LineNo = Record[-1]
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate('.inf')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
+ ExtraData=ErrorInfo)
+ if File not in self._LibraryInstances:
+ self._LibraryInstances.append(File)
+ #
+ # we need the module name as the library class name, so we have
+ # to parse it here. (self._Bdb[] will trigger a file parse if it
+ # hasn't been parsed)
+ #
+ Library = self._Bdb[File, self._Arch]
+ self._LibraryClasses[Library.BaseName, ':dummy:'] = Library
+ return self._LibraryClasses
+
+ ## Retrieve all PCD settings in platform
+ def _GetPcds(self):
+ if self._Pcds == None:
+ self._Pcds = {}
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_DEFAULT))
+ self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_HII))
+ self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_VPD))
+ self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_EX_DEFAULT))
+ self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_EX_HII))
+ self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_EX_VPD))
+ return self._Pcds
+
+ ## Retrieve [BuildOptions]
+ def _GetBuildOptions(self):
+ if self._BuildOptions == None:
+ self._BuildOptions = {}
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION]
+ for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
+ self._BuildOptions[ToolChainFamily, ToolChain] = Option
+ return self._BuildOptions
+
+ ## Retrieve non-dynamic PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetPcd(self, Type):
+ Pcds = {}
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH
+ #
+ PcdDict = tdict(True, 3)
+ PcdSet = set()
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting
+ # Remove redundant PCD candidates
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '', '']
+ Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ PcdValue, DatumType, MaxDatumSize = ValueList
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ PcdValue,
+ '',
+ MaxDatumSize,
+ {},
+ None
+ )
+ return Pcds
+
+ ## Retrieve dynamic PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicPcd(self, Type):
+ Pcds = {}
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 4)
+ PcdSet = set()
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '', '']
+ Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ PcdValue, DatumType, MaxDatumSize = ValueList
+
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', '', PcdValue)
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ PcdValue,
+ '',
+ MaxDatumSize,
+ {self.SkuName : SkuInfo},
+ None
+ )
+ return Pcds
+
+ ## Retrieve dynamic HII PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicHiiPcd(self, Type):
+ Pcds = {}
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 4)
+ PcdSet = set()
+ RecordList = self._RawData[Type, self._Arch]
+ # Find out all possible PCD candidates for self._Arch
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '', '', '']
+ Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ VariableName, VariableGuid, VariableOffset, DefaultValue = ValueList
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue)
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ '',
+ DefaultValue,
+ '',
+ '',
+ {self.SkuName : SkuInfo},
+ None
+ )
+ return Pcds
+
+ ## Retrieve dynamic VPD PCD settings
+ #
+ # @param Type PCD type
+ #
+ # @retval a dict object contains settings of given PCD type
+ #
+ def _GetDynamicVpdPcd(self, Type):
+ Pcds = {}
+ #
+ # tdict is a special dict kind of type, used for selecting correct
+ # PCD settings for certain ARCH and SKU
+ #
+ PcdDict = tdict(True, 4)
+ PcdSet = set()
+ # Find out all possible PCD candidates for self._Arch
+ RecordList = self._RawData[Type, self._Arch]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+ PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
+ # Remove redundant PCD candidates, per the ARCH and SKU
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '']
+ Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ VpdOffset, MaxDatumSize = ValueList
+
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)
+ Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ '',
+ '',
+ '',
+ MaxDatumSize,
+ {self.SkuName : SkuInfo},
+ None
+ )
+ return Pcds
+
+ ## Add external modules
+ #
+ # The external modules are mostly those listed in FDF file, which don't
+ # need "build".
+ #
+ # @param FilePath The path of module description file
+ #
+ def AddModule(self, FilePath):
+ FilePath = NormPath(FilePath)
+ if FilePath not in self.Modules:
+ Module = ModuleBuildClassObject()
+ Module.MetaFile = FilePath
+ self.Modules.append(Module)
+
+ ## Add external PCDs
+ #
+ # The external PCDs are mostly those listed in FDF file to specify address
+ # or offset information.
+ #
+ # @param Name Name of the PCD
+ # @param Guid Token space guid of the PCD
+ # @param Value Value of the PCD
+ #
+ def AddPcd(self, Name, Guid, Value):
+ if (Name, Guid) not in self.Pcds:
+ self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None)
+ self.Pcds[Name, Guid].DefaultValue = Value
+
+ Arch = property(_GetArch, _SetArch)
+ Platform = property(_GetPlatformName)
+ PlatformName = property(_GetPlatformName)
+ Guid = property(_GetFileGuid)
+ Version = property(_GetVersion)
+ DscSpecification = property(_GetDscSpec)
+ OutputDirectory = property(_GetOutpuDir)
+ SupArchList = property(_GetSupArch)
+ BuildTargets = property(_GetBuildTarget)
+ SkuName = property(_GetSkuName, _SetSkuName)
+ FlashDefinition = property(_GetFdfFile)
+ BuildNumber = property(_GetBuildNumber)
+ MakefileName = property(_GetMakefileName)
+ BsBaseAddress = property(_GetBsBaseAddress)
+ RtBaseAddress = property(_GetRtBaseAddress)
+
+ SkuIds = property(_GetSkuIds)
+ Modules = property(_GetModules)
+ LibraryInstances = property(_GetLibraryInstances)
+ LibraryClasses = property(_GetLibraryClasses)
+ Pcds = property(_GetPcds)
+ BuildOptions = property(_GetBuildOptions)
+
+## Platform build information from DSC file
+#
+# This class is used to retrieve information stored in database and convert them
+# into PackageBuildClassObject form for easier use for AutoGen.
+#
+class DecBuildData(PackageBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
+ MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
+ MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
+ MODEL_PCD_DYNAMIC : "Dynamic",
+ MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
+ MODEL_PCD_DYNAMIC_HII : "DynamicHii",
+ MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
+ MODEL_PCD_DYNAMIC_EX : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
+ MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
+ }
+
+ # dict used to convert part of [Defines] to members of DecBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
+ TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
+ TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
+ }
+
+
+ ## Constructor of DecBuildData
+ #
+ # Initialize object of DecBuildData
+ #
+ # @param FilePath The path of package description file
+ # @param RawData The raw data of DEC file
+ # @param BuildDataBase Database used to retrieve module information
+ # @param Arch The target architecture
+ # @param Platform (not used for DecBuildData)
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, File, RawData, BuildDataBase, Arch='COMMON', Platform='DUMMY', Macros={}):
+ self.MetaFile = File
+ self._PackageDir = File.Dir
+ self._RawData = RawData
+ self._Bdb = BuildDataBase
+ self._Arch = Arch
+ self._Macros = Macros
+ self._Clear()
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Set all internal used members of DecBuildData to None
+ def _Clear(self):
+ self._Header = None
+ self._PackageName = None
+ self._Guid = None
+ self._Version = None
+ self._Protocols = None
+ self._Ppis = None
+ self._Guids = None
+ self._Includes = None
+ self._LibraryClasses = None
+ self._Pcds = None
+
+ ## Get architecture
+ def _GetArch(self):
+ return self._Arch
+
+ ## Set architecture
+ #
+ # Changing the default ARCH to another may affect all other information
+ # because all information in a platform may be ARCH-related. That's
+ # why we need to clear all internal used members, in order to cause all
+ # information to be re-retrieved.
+ #
+ # @param Value The value of ARCH
+ #
+ def _SetArch(self, Value):
+ if self._Arch == Value:
+ return
+ self._Arch = Value
+ self._Clear()
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retriving all [Defines] information in one-shot is just to save time.)
+ #
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER]
+ for Record in RecordList:
+ Name = Record[0]
+ if Name in self:
+ self[Name] = Record[1]
+ self._Header = 'DUMMY'
+
+ ## Retrieve package name
+ def _GetPackageName(self):
+ if self._PackageName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._PackageName == None:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
+ return self._PackageName
+
+ ## Retrieve file guid
+ def _GetFileGuid(self):
+ if self._Guid == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._Guid == None:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
+ return self._Guid
+
+ ## Retrieve package version
+ def _GetVersion(self):
+ if self._Version == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._Version == None:
+ self._Version = ''
+ return self._Version
+
+ ## Retrieve protocol definitions (name/value pairs)
+ def _GetProtocol(self):
+ if self._Protocols == None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # protocol defition for given ARCH
+ #
+ ProtocolDict = tdict(True)
+ NameList = []
+ # find out all protocol definitions for specific and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch]
+ for Name, Guid, Dummy, Arch, ID, LineNo in RecordList:
+ if Name not in NameList:
+ NameList.append(Name)
+ ProtocolDict[Arch, Name] = Guid
+ # use sdict to keep the order
+ self._Protocols = sdict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Protocols[Name] = ProtocolDict[self._Arch, Name]
+ return self._Protocols
+
+ ## Retrieve PPI definitions (name/value pairs)
+ def _GetPpi(self):
+ if self._Ppis == None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # PPI defition for given ARCH
+ #
+ PpiDict = tdict(True)
+ NameList = []
+ # find out all PPI definitions for specific arch and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_PPI, self._Arch]
+ for Name, Guid, Dummy, Arch, ID, LineNo in RecordList:
+ if Name not in NameList:
+ NameList.append(Name)
+ PpiDict[Arch, Name] = Guid
+ # use sdict to keep the order
+ self._Ppis = sdict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Ppis[Name] = PpiDict[self._Arch, Name]
+ return self._Ppis
+
+ ## Retrieve GUID definitions (name/value pairs)
+ def _GetGuid(self):
+ if self._Guids == None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # GUID defition for given ARCH
+ #
+ GuidDict = tdict(True)
+ NameList = []
+ # find out all protocol definitions for specific and 'common' arch
+ RecordList = self._RawData[MODEL_EFI_GUID, self._Arch]
+ for Name, Guid, Dummy, Arch, ID, LineNo in RecordList:
+ if Name not in NameList:
+ NameList.append(Name)
+ GuidDict[Arch, Name] = Guid
+ # use sdict to keep the order
+ self._Guids = sdict()
+ for Name in NameList:
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH for trying
+ #
+ self._Guids[Name] = GuidDict[self._Arch, Name]
+ return self._Guids
+
+ ## Retrieve public include paths declared in this package
+ def _GetInclude(self):
+ if self._Includes == None:
+ self._Includes = []
+ RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch)
+ LineNo = Record[-1]
+ # validate the path
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+
+ # avoid duplicate include path
+ if File not in self._Includes:
+ self._Includes.append(File)
+ return self._Includes
+
+ ## Retrieve library class declarations (not used in build at present)
+ def _GetLibraryClass(self):
+ if self._LibraryClasses == None:
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # library class declaration for given ARCH
+ #
+ LibraryClassDict = tdict(True)
+ LibraryClassSet = set()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ for LibraryClass, File, Dummy, Arch, ID, LineNo in RecordList:
+ File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ LibraryClassSet.add(LibraryClass)
+ LibraryClassDict[Arch, LibraryClass] = File
+ self._LibraryClasses = sdict()
+ for LibraryClass in LibraryClassSet:
+ self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass]
+ return self._LibraryClasses
+
+ ## Retrieve PCD declarations
+ def _GetPcds(self):
+ if self._Pcds == None:
+ self._Pcds = {}
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
+ return self._Pcds
+
+ ## Retrieve PCD declarations for given type
+ def _GetPcd(self, Type):
+ Pcds = {}
+ #
+ # tdict is a special kind of dict, used for selecting correct
+ # PCD declaration for given ARCH
+ #
+ PcdDict = tdict(True, 3)
+ # for summarizing PCD
+ PcdSet = set()
+ # find out all PCDs of the 'type'
+ RecordList = self._RawData[Type, self._Arch]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, Dummy1, Dummy2 in RecordList:
+ PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '', '']
+ #
+ # limit the ARCH to self._Arch, if no self._Arch found, tdict
+ # will automatically turn to 'common' ARCH and try again
+ #
+ Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ DefaultValue, DatumType, TokenNumber = ValueList
+ Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ self._PCD_TYPE_STRING_[Type],
+ DatumType,
+ DefaultValue,
+ TokenNumber,
+ '',
+ {},
+ None
+ )
+ return Pcds
+
+
+ Arch = property(_GetArch, _SetArch)
+ PackageName = property(_GetPackageName)
+ Guid = property(_GetFileGuid)
+ Version = property(_GetVersion)
+
+ Protocols = property(_GetProtocol)
+ Ppis = property(_GetPpi)
+ Guids = property(_GetGuid)
+ Includes = property(_GetInclude)
+ LibraryClasses = property(_GetLibraryClass)
+ Pcds = property(_GetPcds)
+
+## Module build information from INF file
+#
+# This class is used to retrieve information stored in database and convert them
+# into ModuleBuildClassObject form for easier use for AutoGen.
+#
+class InfBuildData(ModuleBuildClassObject):
+ # dict used to convert PCD type in database to string used by build tool
+ _PCD_TYPE_STRING_ = {
+ MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
+ MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
+ MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
+ MODEL_PCD_DYNAMIC : "Dynamic",
+ MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
+ MODEL_PCD_DYNAMIC_HII : "DynamicHii",
+ MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
+ MODEL_PCD_DYNAMIC_EX : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
+ MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
+ MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
+ }
+
+ # dict used to convert part of [Defines] to members of InfBuildData directly
+ _PROPERTY_ = {
+ #
+ # Required Fields
+ #
+ TAB_INF_DEFINES_BASE_NAME : "_BaseName",
+ TAB_INF_DEFINES_FILE_GUID : "_Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "_ModuleType",
+ #
+ # Optional Fields
+ #
+ TAB_INF_DEFINES_INF_VERSION : "_AutoGenVersion",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "_ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "_MakefileName",
+ #TAB_INF_DEFINES_CUSTOM_MAKEFILE : "_CustomMakefile",
+ TAB_INF_DEFINES_VERSION_NUMBER : "_Version",
+ TAB_INF_DEFINES_VERSION_STRING : "_Version",
+ TAB_INF_DEFINES_VERSION : "_Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "_PcdIsDriver",
+ TAB_INF_DEFINES_SHADOW : "_Shadow",
+
+ TAB_COMPONENTS_SOURCE_OVERRIDE_PATH : "_SourceOverridePath",
+ }
+
+ # dict used to convert Component type to Module type
+ _MODULE_TYPE_ = {
+ "LIBRARY" : "BASE",
+ "SECURITY_CORE" : "SEC",
+ "PEI_CORE" : "PEI_CORE",
+ "COMBINED_PEIM_DRIVER" : "PEIM",
+ "PIC_PEIM" : "PEIM",
+ "RELOCATABLE_PEIM" : "PEIM",
+ "PE32_PEIM" : "PEIM",
+ "BS_DRIVER" : "DXE_DRIVER",
+ "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
+ "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
+ # "BS_DRIVER" : "DXE_SMM_DRIVER",
+ # "BS_DRIVER" : "UEFI_DRIVER",
+ "APPLICATION" : "UEFI_APPLICATION",
+ "LOGO" : "BASE",
+ }
+
+ # regular expression for converting XXX_FLAGS in [nmake] section to new type
+ _NMAKE_FLAG_PATTERN_ = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
+ # dict used to convert old tool name used in [nmake] section to new ones
+ _TOOL_CODE_ = {
+ "C" : "CC",
+ "LIB" : "SLINK",
+ "LINK" : "DLINK",
+ }
+
+
+ ## Constructor of DscBuildData
+ #
+ # Initialize object of DscBuildData
+ #
+ # @param FilePath The path of platform description file
+ # @param RawData The raw data of DSC file
+ # @param BuildDataBase Database used to retrieve module/package information
+ # @param Arch The target architecture
+ # @param Platform The name of platform employing this module
+ # @param Macros Macros used for replacement in DSC file
+ #
+ def __init__(self, FilePath, RawData, BuildDatabase, Arch='COMMON', Platform='COMMON', Macros={}):
+ self.MetaFile = FilePath
+ self._ModuleDir = FilePath.Dir
+ self._RawData = RawData
+ self._Bdb = BuildDatabase
+ self._Arch = Arch
+ self._Platform = 'COMMON'
+ self._Macros = Macros
+ self._SourceOverridePath = None
+ if FilePath.Key in GlobalData.gOverrideDir:
+ self._SourceOverridePath = GlobalData.gOverrideDir[FilePath.Key]
+ self._Clear()
+
+ ## XXX[key] = value
+ def __setitem__(self, key, value):
+ self.__dict__[self._PROPERTY_[key]] = value
+
+ ## value = XXX[key]
+ def __getitem__(self, key):
+ return self.__dict__[self._PROPERTY_[key]]
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._PROPERTY_
+
+ ## Set all internal used members of InfBuildData to None
+ def _Clear(self):
+ self._Header_ = None
+ self._AutoGenVersion = None
+ self._BaseName = None
+ self._ModuleType = None
+ self._ComponentType = None
+ self._BuildType = None
+ self._Guid = None
+ self._Version = None
+ self._PcdIsDriver = None
+ self._BinaryModule = None
+ self._Shadow = None
+ self._MakefileName = None
+ self._CustomMakefile = None
+ self._Specification = None
+ self._LibraryClass = None
+ self._ModuleEntryPointList = None
+ self._ModuleUnloadImageList = None
+ self._ConstructorList = None
+ self._DestructorList = None
+ self._Defs = None
+ self._Binaries = None
+ self._Sources = None
+ self._LibraryClasses = None
+ self._Libraries = None
+ self._Protocols = None
+ self._Ppis = None
+ self._Guids = None
+ self._Includes = None
+ self._Packages = None
+ self._Pcds = None
+ self._BuildOptions = None
+ self._Depex = None
+ #self._SourceOverridePath = None
+
+ ## Get architecture
+ def _GetArch(self):
+ return self._Arch
+
+ ## Set architecture
+ #
+ # Changing the default ARCH to another may affect all other information
+ # because all information in a platform may be ARCH-related. That's
+ # why we need to clear all internal used members, in order to cause all
+ # information to be re-retrieved.
+ #
+ # @param Value The value of ARCH
+ #
+ def _SetArch(self, Value):
+ if self._Arch == Value:
+ return
+ self._Arch = Value
+ self._Clear()
+
+ ## Return the name of platform employing this module
+ def _GetPlatform(self):
+ return self._Platform
+
+ ## Change the name of platform employing this module
+ #
+ # Changing the default name of platform to another may affect some information
+ # because they may be PLATFORM-related. That's why we need to clear all internal
+ # used members, in order to cause all information to be re-retrieved.
+ #
+ def _SetPlatform(self, Value):
+ if self._Platform == Value:
+ return
+ self._Platform = Value
+ self._Clear()
+
+ ## Retrieve all information in [Defines] section
+ #
+ # (Retriving all [Defines] information in one-shot is just to save time.)
+ #
+ def _GetHeaderInfo(self):
+ RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ Name = Record[0]
+ # items defined _PROPERTY_ don't need additional processing
+ if Name in self:
+ self[Name] = Record[1]
+ # some special items in [Defines] section need special treatment
+ elif Name == 'EFI_SPECIFICATION_VERSION':
+ if self._Specification == None:
+ self._Specification = sdict()
+ self._Specification[Name] = Record[1]
+ elif Name == 'EDK_RELEASE_VERSION':
+ if self._Specification == None:
+ self._Specification = sdict()
+ self._Specification[Name] = Record[1]
+ elif Name == 'PI_SPECIFICATION_VERSION':
+ if self._Specification == None:
+ self._Specification = sdict()
+ self._Specification[Name] = Record[1]
+ elif Name == 'LIBRARY_CLASS':
+ if self._LibraryClass == None:
+ self._LibraryClass = []
+ ValueList = GetSplitValueList(Record[1])
+ LibraryClass = ValueList[0]
+ if len(ValueList) > 1:
+ SupModuleList = GetSplitValueList(ValueList[1], ' ')
+ else:
+ SupModuleList = SUP_MODULE_LIST
+ self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
+ elif Name == 'ENTRY_POINT':
+ if self._ModuleEntryPointList == None:
+ self._ModuleEntryPointList = []
+ self._ModuleEntryPointList.append(Record[1])
+ elif Name == 'UNLOAD_IMAGE':
+ if self._ModuleUnloadImageList == None:
+ self._ModuleUnloadImageList = []
+ if Record[1] == '':
+ continue
+ self._ModuleUnloadImageList.append(Record[1])
+ elif Name == 'CONSTRUCTOR':
+ if self._ConstructorList == None:
+ self._ConstructorList = []
+ if Record[1] == '':
+ continue
+ self._ConstructorList.append(Record[1])
+ elif Name == 'DESTRUCTOR':
+ if self._DestructorList == None:
+ self._DestructorList = []
+ if Record[1] == '':
+ continue
+ self._DestructorList.append(Record[1])
+ elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
+ TokenList = GetSplitValueList(Record[1])
+ if self._CustomMakefile == None:
+ self._CustomMakefile = {}
+ if len(TokenList) < 2:
+ self._CustomMakefile['MSFT'] = TokenList[0]
+ self._CustomMakefile['GCC'] = TokenList[0]
+ else:
+ if TokenList[0] not in ['MSFT', 'GCC']:
+ EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
+ "No supported family [%s]" % TokenList[0],
+ File=self.MetaFile, Line=Record[-1])
+ self._CustomMakefile[TokenList[0]] = TokenList[1]
+ else:
+ if self._Defs == None:
+ self._Defs = sdict()
+ self._Defs[Name] = Record[1]
+
+ #
+ # Retrieve information in sections specific to R8.x modules
+ #
+ if self._AutoGenVersion >= 0x00010005: # _AutoGenVersion may be None, which is less than anything
+ if not self._ModuleType:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "MODULE_TYPE is not given", File=self.MetaFile)
+ if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \
+ and 'PCI_CLASS_CODE' in self._Defs:
+ self._BuildType = 'UEFI_OPTIONROM'
+ else:
+ self._BuildType = self._ModuleType.upper()
+ else:
+ self._BuildType = self._ComponentType.upper()
+ if not self._ComponentType:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "COMPONENT_TYPE is not given", File=self.MetaFile)
+ if self._ComponentType in self._MODULE_TYPE_:
+ self._ModuleType = self._MODULE_TYPE_[self._ComponentType]
+ if self._ComponentType == 'LIBRARY':
+ self._LibraryClass = [LibraryClassObject(self._BaseName, SUP_MODULE_LIST)]
+ # make use some [nmake] section macros
+ RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform]
+ for Name,Value,Dummy,Arch,Platform,ID,LineNo in RecordList:
+ Value = Value.replace('$(PROCESSOR)', self._Arch)
+ Name = Name.replace('$(PROCESSOR)', self._Arch)
+ Name, Value = ReplaceMacros((Name, Value), GlobalData.gEdkGlobal, True)
+ if Name == "IMAGE_ENTRY_POINT":
+ if self._ModuleEntryPointList == None:
+ self._ModuleEntryPointList = []
+ self._ModuleEntryPointList.append(Value)
+ elif Name == "DPX_SOURCE":
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ File = PathClass(NormPath(Value, Macros), self._ModuleDir, Arch=self._Arch)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False)
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
+ File=self.MetaFile, Line=LineNo)
+ if self.Sources == None:
+ self._Sources = []
+ self._Sources.append(File)
+ else:
+ ToolList = self._NMAKE_FLAG_PATTERN_.findall(Name)
+ if len(ToolList) == 0 or len(ToolList) != 1:
+ pass
+# EdkLogger.warn("build", "Don't know how to do with macro [%s]" % Name,
+# File=self.MetaFile, Line=LineNo)
+ else:
+ if self._BuildOptions == None:
+ self._BuildOptions = sdict()
+
+ if ToolList[0] in self._TOOL_CODE_:
+ Tool = self._TOOL_CODE_[ToolList[0]]
+ else:
+ Tool = ToolList[0]
+ ToolChain = "*_*_*_%s_FLAGS" % Tool
+ ToolChainFamily = 'MSFT' # R8.x only support MSFT tool chain
+ #ignore not replaced macros in value
+ ValueList = GetSplitValueList(' ' + Value, '/D')
+ Dummy = ValueList[0]
+ for Index in range(1, len(ValueList)):
+ if ValueList[Index][-1] == '=' or ValueList[Index] == '':
+ continue
+ Dummy = Dummy + ' /D ' + ValueList[Index]
+ Value = Dummy.strip()
+ if (ToolChainFamily, ToolChain) not in self._BuildOptions:
+ self._BuildOptions[ToolChainFamily, ToolChain] = Value
+ else:
+ OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
+ self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Value
+ # set _Header to non-None in order to avoid database re-querying
+ self._Header_ = 'DUMMY'
+
+ ## Retrieve file version
+ def _GetInfVersion(self):
+ if self._AutoGenVersion == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._AutoGenVersion == None:
+ self._AutoGenVersion = 0x00010000
+ return self._AutoGenVersion
+
+ ## Retrieve BASE_NAME
+ def _GetBaseName(self):
+ if self._BaseName == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._BaseName == None:
+ EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
+ return self._BaseName
+
+ ## Retrieve MODULE_TYPE
+ def _GetModuleType(self):
+ if self._ModuleType == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._ModuleType == None:
+ self._ModuleType = 'BASE'
+ if self._ModuleType not in SUP_MODULE_LIST:
+ self._ModuleType = "USER_DEFINED"
+ return self._ModuleType
+
+ ## Retrieve COMPONENT_TYPE
+ def _GetComponentType(self):
+ if self._ComponentType == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._ComponentType == None:
+ self._ComponentType = 'USER_DEFINED'
+ return self._ComponentType
+
+ ## Retrieve "BUILD_TYPE"
+ def _GetBuildType(self):
+ if self._BuildType == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if not self._BuildType:
+ self._BuildType = "BASE"
+ return self._BuildType
+
+ ## Retrieve file guid
+ def _GetFileGuid(self):
+ if self._Guid == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._Guid == None:
+ self._Guid = '00000000-0000-0000-000000000000'
+ return self._Guid
+
+ ## Retrieve module version
+ def _GetVersion(self):
+ if self._Version == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._Version == None:
+ self._Version = '0.0'
+ return self._Version
+
+ ## Retrieve PCD_IS_DRIVER
+ def _GetPcdIsDriver(self):
+ if self._PcdIsDriver == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._PcdIsDriver == None:
+ self._PcdIsDriver = ''
+ return self._PcdIsDriver
+
+ ## Retrieve SHADOW
+ def _GetShadow(self):
+ if self._Shadow == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._Shadow != None and self._Shadow.upper() == 'TRUE':
+ self._Shadow = True
+ else:
+ self._Shadow = False
+ return self._Shadow
+
+ ## Retrieve CUSTOM_MAKEFILE
+ def _GetMakefile(self):
+ if self._CustomMakefile == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._CustomMakefile == None:
+ self._CustomMakefile = {}
+ return self._CustomMakefile
+
+ ## Retrieve EFI_SPECIFICATION_VERSION
+ def _GetSpec(self):
+ if self._Specification == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._Specification == None:
+ self._Specification = {}
+ return self._Specification
+
+ ## Retrieve LIBRARY_CLASS
+ def _GetLibraryClass(self):
+ if self._LibraryClass == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._LibraryClass == None:
+ self._LibraryClass = []
+ return self._LibraryClass
+
+ ## Retrieve ENTRY_POINT
+ def _GetEntryPoint(self):
+ if self._ModuleEntryPointList == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._ModuleEntryPointList == None:
+ self._ModuleEntryPointList = []
+ return self._ModuleEntryPointList
+
+ ## Retrieve UNLOAD_IMAGE
+ def _GetUnloadImage(self):
+ if self._ModuleUnloadImageList == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._ModuleUnloadImageList == None:
+ self._ModuleUnloadImageList = []
+ return self._ModuleUnloadImageList
+
+ ## Retrieve CONSTRUCTOR
+ def _GetConstructor(self):
+ if self._ConstructorList == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._ConstructorList == None:
+ self._ConstructorList = []
+ return self._ConstructorList
+
+ ## Retrieve DESTRUCTOR
+ def _GetDestructor(self):
+ if self._DestructorList == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._DestructorList == None:
+ self._DestructorList = []
+ return self._DestructorList
+
+ ## Retrieve definies other than above ones
+ def _GetDefines(self):
+ if self._Defs == None:
+ if self._Header_ == None:
+ self._GetHeaderInfo()
+ if self._Defs == None:
+ self._Defs = sdict()
+ return self._Defs
+
+ ## Retrieve binary files
+ def _GetBinaryFiles(self):
+ if self._Binaries == None:
+ self._Binaries = []
+ RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource, 'PROCESSOR':self._Arch}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ FileType = Record[0]
+ LineNo = Record[-1]
+ Target = 'COMMON'
+ FeatureFlag = []
+ if Record[2]:
+ TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
+ if TokenList:
+ Target = TokenList[0]
+ if len(TokenList) > 1:
+ FeatureFlag = Record[1:]
+
+ File = PathClass(NormPath(Record[1], Macros), self._ModuleDir, '', FileType, True, self._Arch, '', Target)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ self._Binaries.append(File)
+ return self._Binaries
+
+ ## Retrieve source files
+ def _GetSourceFiles(self):
+ if self._Sources == None:
+ self._Sources = []
+ RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource, 'PROCESSOR':self._Arch}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ LineNo = Record[-1]
+ ToolChainFamily = Record[1]
+ TagName = Record[2]
+ ToolCode = Record[3]
+ FeatureFlag = Record[4]
+ if self._AutoGenVersion < 0x00010005:
+ # old module source files (R8)
+ File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, self._SourceOverridePath,
+ '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate(CaseSensitive=False)
+ if ErrorCode != 0:
+ if File.Ext.lower() == '.h':
+ EdkLogger.warn('build', 'Include file not found', ExtraData=ErrorInfo,
+ File=self.MetaFile, Line=LineNo)
+ continue
+ else:
+ EdkLogger.error('build', ErrorCode, ExtraData=File, File=self.MetaFile, Line=LineNo)
+ else:
+ File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, '',
+ '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode)
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate()
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+
+ self._Sources.append(File)
+ return self._Sources
+
+ ## Retrieve library classes employed by this module
+ def _GetLibraryClassUses(self):
+ if self._LibraryClasses == None:
+ self._LibraryClasses = sdict()
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ Lib = Record[0]
+ Instance = Record[1]
+ if Instance != None and Instance != '':
+ Instance = NormPath(Instance, self._Macros)
+ self._LibraryClasses[Lib] = Instance
+ return self._LibraryClasses
+
+ ## Retrieve library names (for R8.x style of modules)
+ def _GetLibraryNames(self):
+ if self._Libraries == None:
+ self._Libraries = []
+ RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
+ for Record in RecordList:
+ # in case of name with '.lib' extension, which is unusual in R8.x inf
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ LibraryName = os.path.splitext(Record[0])[0]
+ if LibraryName not in self._Libraries:
+ self._Libraries.append(LibraryName)
+ return self._Libraries
+
+ ## Retrieve protocols consumed/produced by this module
+ def _GetProtocols(self):
+ if self._Protocols == None:
+ self._Protocols = sdict()
+ RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = ProtocolValue(CName, self.Packages)
+ if Value == None:
+ PackageList = "\n\t".join([str(P) for P in self.Packages])
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Protocol [%s] is not found under [Protocols] section in" % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ self._Protocols[CName] = Value
+ return self._Protocols
+
+ ## Retrieve PPIs consumed/produced by this module
+ def _GetPpis(self):
+ if self._Ppis == None:
+ self._Ppis = sdict()
+ RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = PpiValue(CName, self.Packages)
+ if Value == None:
+ PackageList = "\n\t".join([str(P) for P in self.Packages])
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of PPI [%s] is not found under [Ppis] section in " % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ self._Ppis[CName] = Value
+ return self._Ppis
+
+ ## Retrieve GUIDs consumed/produced by this module
+ def _GetGuids(self):
+ if self._Guids == None:
+ self._Guids = sdict()
+ RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
+ for Record in RecordList:
+ CName = Record[0]
+ Value = GuidValue(CName, self.Packages)
+ if Value == None:
+ PackageList = "\n\t".join([str(P) for P in self.Packages])
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Guid [%s] is not found under [Guids] section in" % CName,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ self._Guids[CName] = Value
+ return self._Guids
+
+ ## Retrieve include paths necessary for this module (for R8.x style of modules)
+ def _GetIncludes(self):
+ if self._Includes == None:
+ self._Includes = []
+ if self._SourceOverridePath:
+ self._Includes.append(self._SourceOverridePath)
+ RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch, self._Platform]
+ # [includes] section must be used only in old (R8.x) inf file
+ if self.AutoGenVersion >= 0x00010005 and len(RecordList) > 0:
+ EdkLogger.error('build', FORMAT_NOT_SUPPORTED, "No [include] section allowed",
+ File=self.MetaFile, Line=RecordList[0][-1]-1)
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ Record[0] = Record[0].replace('$(PROCESSOR)', self._Arch)
+ Record[0] = ReplaceMacro(Record[0], {'EFI_SOURCE' : GlobalData.gEfiSource}, False)
+ if Record[0].find('EDK_SOURCE') > -1:
+ File = NormPath(ReplaceMacro(Record[0], {'EDK_SOURCE' : GlobalData.gEcpSource}, False), self._Macros)
+ if File[0] == '.':
+ File = os.path.join(self._ModuleDir, File)
+ else:
+ File = os.path.join(GlobalData.gWorkspace, File)
+ File = RealPath(os.path.normpath(File))
+ if File:
+ self._Includes.append(File)
+
+ #TRICK: let compiler to choose correct header file
+ File = NormPath(ReplaceMacro(Record[0], {'EDK_SOURCE' : GlobalData.gEdkSource}, False), self._Macros)
+ if File[0] == '.':
+ File = os.path.join(self._ModuleDir, File)
+ else:
+ File = os.path.join(GlobalData.gWorkspace, File)
+ File = RealPath(os.path.normpath(File))
+ if File:
+ self._Includes.append(File)
+ else:
+ File = NormPath(Record[0], self._Macros)
+ if File[0] == '.':
+ File = os.path.join(self._ModuleDir, File)
+ else:
+ File = os.path.join(GlobalData.gWorkspace, File)
+ File = RealPath(os.path.normpath(File))
+ if File:
+ self._Includes.append(File)
+ return self._Includes
+
+ ## Retrieve packages this module depends on
+ def _GetPackages(self):
+ if self._Packages == None:
+ self._Packages = []
+ RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
+ Macros = {"EDK_SOURCE":GlobalData.gEcpSource, "EFI_SOURCE":GlobalData.gEfiSource}
+ Macros.update(self._Macros)
+ for Record in RecordList:
+ File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
+ LineNo = Record[-1]
+ # check the file validation
+ ErrorCode, ErrorInfo = File.Validate('.dec')
+ if ErrorCode != 0:
+ EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
+ # parse this package now. we need it to get protocol/ppi/guid value
+ Package = self._Bdb[File, self._Arch]
+ self._Packages.append(Package)
+ return self._Packages
+
+ ## Retrieve PCDs used in this module
+ def _GetPcds(self):
+ if self._Pcds == None:
+ self._Pcds = {}
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
+ self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
+ return self._Pcds
+
+ ## Retrieve build options specific to this module
+ def _GetBuildOptions(self):
+ if self._BuildOptions == None:
+ self._BuildOptions = sdict()
+ RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
+ for Record in RecordList:
+ ToolChainFamily = Record[0]
+ ToolChain = Record[1]
+ Option = Record[2]
+ if (ToolChainFamily, ToolChain) not in self._BuildOptions:
+ self._BuildOptions[ToolChainFamily, ToolChain] = Option
+ else:
+ # concatenate the option string if they're for the same tool
+ OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
+ self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
+ return self._BuildOptions
+
+ ## Retrieve depedency expression
+ def _GetDepex(self):
+ if self._Depex == None:
+ self._Depex = tdict(False, 2)
+ RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
+ Depex = {}
+ for Record in RecordList:
+ Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
+ Arch = Record[3]
+ ModuleType = Record[4]
+ TokenList = Record[0].split()
+ if (Arch, ModuleType) not in Depex:
+ Depex[Arch, ModuleType] = []
+ DepexList = Depex[Arch, ModuleType]
+ for Token in TokenList:
+ if Token in DEPEX_SUPPORTED_OPCODE:
+ DepexList.append(Token)
+ elif Token.endswith(".inf"): # module file name
+ ModuleFile = os.path.normpath(Token)
+ Module = self.BuildDatabase[ModuleFile]
+ if Module == None:
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
+ ExtraData=Token, File=self.MetaFile, Line=Record[-1])
+ DepexList.append(Module.Guid)
+ else:
+ # get the GUID value now
+ Value = ProtocolValue(Token, self.Packages)
+ if Value == None:
+ Value = PpiValue(Token, self.Packages)
+ if Value == None:
+ Value = GuidValue(Token, self.Packages)
+ if Value == None:
+ PackageList = "\n\t".join([str(P) for P in self.Packages])
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of [%s] is not found in" % Token,
+ ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
+ DepexList.append(Value)
+ for Arch, ModuleType in Depex:
+ self._Depex[Arch, ModuleType] = Depex[Arch, ModuleType]
+ return self._Depex
+
+ ## Retrieve PCD for given type
+ def _GetPcd(self, Type):
+ Pcds = {}
+ PcdDict = tdict(True, 4)
+ PcdSet = set()
+ RecordList = self._RawData[Type, self._Arch, self._Platform]
+ for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Dummy1, LineNo in RecordList:
+ PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo)
+ PcdSet.add((PcdCName, TokenSpaceGuid))
+ # get the guid value
+ if TokenSpaceGuid not in self.Guids:
+ Value = GuidValue(TokenSpaceGuid, self.Packages)
+ if Value == None:
+ PackageList = "\n\t".join([str(P) for P in self.Packages])
+ EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
+ "Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
+ ExtraData=PackageList, File=self.MetaFile, Line=LineNo)
+ self.Guids[TokenSpaceGuid] = Value
+
+ # resolve PCD type, value, datum info, etc. by getting its definition from package
+ for PcdCName, TokenSpaceGuid in PcdSet:
+ ValueList = ['', '']
+ Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
+ if Setting == None:
+ continue
+ TokenList = Setting.split(TAB_VALUE_SPLIT)
+ ValueList[0:len(TokenList)] = TokenList
+ DefaultValue = ValueList[0]
+ Pcd = PcdClassObject(
+ PcdCName,
+ TokenSpaceGuid,
+ '',
+ '',
+ DefaultValue,
+ '',
+ '',
+ {},
+ self.Guids[TokenSpaceGuid]
+ )
+
+ # get necessary info from package declaring this PCD
+ for Package in self.Packages:
+ #
+ # 'dynamic' in INF means its type is determined by platform;
+ # if platform doesn't give its type, use 'lowest' one in the
+ # following order, if any
+ #
+ # "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
+ #
+ PcdType = self._PCD_TYPE_STRING_[Type]
+ if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
+ Pcd.Pending = True
+ for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
+ if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:
+ PcdType = T
+ break
+ else:
+ Pcd.Pending = False
+
+ if (PcdCName, TokenSpaceGuid, PcdType) in Package.Pcds:
+ PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType]
+ Pcd.Type = PcdType
+ Pcd.TokenValue = PcdInPackage.TokenValue
+ Pcd.DatumType = PcdInPackage.DatumType
+ Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
+ if Pcd.DefaultValue in [None, '']:
+ Pcd.DefaultValue = PcdInPackage.DefaultValue
+ break
+ else:
+ EdkLogger.error(
+ 'build',
+ PARSER_ERROR,
+ "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdCName, self.MetaFile),
+ File =self.MetaFile, Line=LineNo,
+ ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])
+ )
+ Pcds[PcdCName, TokenSpaceGuid] = Pcd
+ return Pcds
+
+ Arch = property(_GetArch, _SetArch)
+ Platform = property(_GetPlatform, _SetPlatform)
+
+ AutoGenVersion = property(_GetInfVersion)
+ BaseName = property(_GetBaseName)
+ ModuleType = property(_GetModuleType)
+ ComponentType = property(_GetComponentType)
+ BuildType = property(_GetBuildType)
+ Guid = property(_GetFileGuid)
+ Version = property(_GetVersion)
+ PcdIsDriver = property(_GetPcdIsDriver)
+ Shadow = property(_GetShadow)
+ CustomMakefile = property(_GetMakefile)
+ Specification = property(_GetSpec)
+ LibraryClass = property(_GetLibraryClass)
+ ModuleEntryPointList = property(_GetEntryPoint)
+ ModuleUnloadImageList = property(_GetUnloadImage)
+ ConstructorList = property(_GetConstructor)
+ DestructorList = property(_GetDestructor)
+ Defines = property(_GetDefines)
+
+ Binaries = property(_GetBinaryFiles)
+ Sources = property(_GetSourceFiles)
+ LibraryClasses = property(_GetLibraryClassUses)
+ Libraries = property(_GetLibraryNames)
+ Protocols = property(_GetProtocols)
+ Ppis = property(_GetPpis)
+ Guids = property(_GetGuids)
+ Includes = property(_GetIncludes)
+ Packages = property(_GetPackages)
+ Pcds = property(_GetPcds)
+ BuildOptions = property(_GetBuildOptions)
+ Depex = property(_GetDepex)
+
+## Database
+#
+# This class defined the build databse for all modules, packages and platform.
+# It will call corresponding parser for the given file if it cannot find it in
+# the database.
+#
+# @param DbPath Path of database file
+# @param GlobalMacros Global macros used for replacement during file parsing
+# @prarm RenewDb=False Create new database file if it's already there
+#
+class WorkspaceDatabase(object):
+ # file parser
+ _FILE_PARSER_ = {
+ MODEL_FILE_INF : InfParser,
+ MODEL_FILE_DEC : DecParser,
+ MODEL_FILE_DSC : DscParser,
+ MODEL_FILE_FDF : None, #FdfParser,
+ MODEL_FILE_CIF : None
+ }
+
+ # file table
+ _FILE_TABLE_ = {
+ MODEL_FILE_INF : ModuleTable,
+ MODEL_FILE_DEC : PackageTable,
+ MODEL_FILE_DSC : PlatformTable,
+ }
+
+ # default database file path
+ _DB_PATH_ = "Conf/.cache/build.db"
+
+ #
+ # internal class used for call corresponding file parser and caching the result
+ # to avoid unnecessary re-parsing
+ #
+ class BuildObjectFactory(object):
+ _FILE_TYPE_ = {
+ ".inf" : MODEL_FILE_INF,
+ ".dec" : MODEL_FILE_DEC,
+ ".dsc" : MODEL_FILE_DSC,
+ ".fdf" : MODEL_FILE_FDF,
+ }
+
+ # convert to xxxBuildData object
+ _GENERATOR_ = {
+ MODEL_FILE_INF : InfBuildData,
+ MODEL_FILE_DEC : DecBuildData,
+ MODEL_FILE_DSC : DscBuildData,
+ MODEL_FILE_FDF : None #FlashDefTable,
+ }
+
+ _CACHE_ = {} # (FilePath, Arch) : <object>
+
+ # constructor
+ def __init__(self, WorkspaceDb):
+ self.WorkspaceDb = WorkspaceDb
+
+ # key = (FilePath, Arch='COMMON')
+ def __contains__(self, Key):
+ FilePath = Key[0]
+ Arch = 'COMMON'
+ if len(Key) > 1:
+ Arch = Key[1]
+ return (FilePath, Arch) in self._CACHE_
+
+ # key = (FilePath, Arch='COMMON')
+ def __getitem__(self, Key):
+ FilePath = Key[0]
+ Arch = 'COMMON'
+ Platform = 'COMMON'
+ if len(Key) > 1:
+ Arch = Key[1]
+ if len(Key) > 2:
+ Platform = Key[2]
+
+ # if it's generated before, just return the cached one
+ Key = (FilePath, Arch)
+ if Key in self._CACHE_:
+ return self._CACHE_[Key]
+
+ # check file type
+ Ext = FilePath.Ext.lower()
+ if Ext not in self._FILE_TYPE_:
+ return None
+ FileType = self._FILE_TYPE_[Ext]
+ if FileType not in self._GENERATOR_:
+ return None
+
+ # get table for current file
+ MetaFile = self.WorkspaceDb[FilePath, FileType, self.WorkspaceDb._GlobalMacros]
+ BuildObject = self._GENERATOR_[FileType](
+ FilePath,
+ MetaFile,
+ self,
+ Arch,
+ Platform,
+ self.WorkspaceDb._GlobalMacros,
+ )
+ self._CACHE_[Key] = BuildObject
+ return BuildObject
+
+ # placeholder for file format conversion
+ class TransformObjectFactory:
+ def __init__(self, WorkspaceDb):
+ self.WorkspaceDb = WorkspaceDb
+
+ # key = FilePath, Arch
+ def __getitem__(self, Key):
+ pass
+
+ ## Constructor of WorkspaceDatabase
+ #
+ # @param DbPath Path of database file
+ # @param GlobalMacros Global macros used for replacement during file parsing
+ # @prarm RenewDb=False Create new database file if it's already there
+ #
+ def __init__(self, DbPath, GlobalMacros={}, RenewDb=False):
+ self._GlobalMacros = GlobalMacros
+
+ if DbPath == None or DbPath == '':
+ DbPath = os.path.normpath(os.path.join(GlobalData.gWorkspace, self._DB_PATH_))
+
+ # don't create necessary path for db in memory
+ if DbPath != ':memory:':
+ DbDir = os.path.split(DbPath)[0]
+ if not os.path.exists(DbDir):
+ os.makedirs(DbDir)
+
+ # remove db file in case inconsistency between db and file in file system
+ if self._CheckWhetherDbNeedRenew(RenewDb, DbPath):
+ os.remove(DbPath)
+
+ # create db with optimized parameters
+ self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED')
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ self.Conn.execute("PRAGMA temp_store=MEMORY")
+ self.Conn.execute("PRAGMA count_changes=OFF")
+ self.Conn.execute("PRAGMA cache_size=8192")
+ #self.Conn.execute("PRAGMA page_size=8192")
+
+ # to avoid non-ascii character conversion issue
+ self.Conn.text_factory = str
+ self.Cur = self.Conn.cursor()
+
+ # create table for internal uses
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+
+ # conversion object for build or file format conversion purpose
+ self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self)
+ self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self)
+
+ ## Check whether workspace database need to be renew.
+ # The renew reason maybe:
+ # 1) If user force to renew;
+ # 2) If user do not force renew, and
+ # a) If the time of last modified python source is newer than database file;
+ # b) If the time of last modified frozen executable file is newer than database file;
+ #
+ # @param force User force renew database
+ # @param DbPath The absolute path of workspace database file
+ #
+ # @return Bool value for whether need renew workspace databse
+ #
+ def _CheckWhetherDbNeedRenew (self, force, DbPath):
+ # if database does not exist, we need do nothing
+ if not os.path.exists(DbPath): return False
+
+ # if user force to renew database, then not check whether database is out of date
+ if force: return True
+
+ #
+ # Check the time of last modified source file or build.exe
+ # if is newer than time of database, then database need to be re-created.
+ #
+ timeOfToolModified = 0
+ if hasattr(sys, "frozen"):
+ exePath = os.path.abspath(sys.executable)
+ timeOfToolModified = os.stat(exePath).st_mtime
+ else:
+ curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
+ rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
+ if rootPath == "" or rootPath == None:
+ EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
+determine whether database file is out of date!\n")
+
+ # walk the root path of source or build's binary to get the time last modified.
+
+ for root, dirs, files in os.walk (rootPath):
+ for dir in dirs:
+ # bypass source control folder
+ if dir.lower() in [".svn", "_svn", "cvs"]:
+ dirs.remove(dir)
+
+ for file in files:
+ ext = os.path.splitext(file)[1]
+ if ext.lower() == ".py": # only check .py files
+ fd = os.stat(os.path.join(root, file))
+ if timeOfToolModified < fd.st_mtime:
+ timeOfToolModified = fd.st_mtime
+ if timeOfToolModified > os.stat(DbPath).st_mtime:
+ EdkLogger.verbose("\nWorkspace database is out of data!")
+ return True
+
+ return False
+
+ ## Initialize build database
+ def InitDatabase(self):
+ EdkLogger.verbose("\nInitialize build database started ...")
+
+ #
+ # Create new tables
+ #
+ self.TblDataModel.Create(False)
+ self.TblFile.Create(False)
+
+ #
+ # Initialize table DataModel
+ #
+ self.TblDataModel.InitTable()
+ EdkLogger.verbose("Initialize build database ... DONE!")
+
+ ## Query a table
+ #
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close entire database
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ self.Conn.commit()
+ self.Cur.close()
+ self.Conn.close()
+
+ ## Get unique file ID for the gvien file
+ def GetFileId(self, FilePath):
+ return self.TblFile.GetFileId(FilePath)
+
+ ## Get file type value for the gvien file ID
+ def GetFileType(self, FileId):
+ return self.TblFile.GetFileType(FileId)
+
+ ## Get time stamp stored in file table
+ def GetTimeStamp(self, FileId):
+ return self.TblFile.GetFileTimeStamp(FileId)
+
+ ## Update time stamp in file table
+ def SetTimeStamp(self, FileId, TimeStamp):
+ return self.TblFile.SetFileTimeStamp(FileId, TimeStamp)
+
+ ## Check if a table integrity flag exists or not
+ def CheckIntegrity(self, TableName):
+ try:
+ Result = self.Cur.execute("select min(ID) from %s" % (TableName)).fetchall()
+ if Result[0][0] != -1:
+ return False
+ except:
+ return False
+ return True
+
+ ## Compose table name for given file type and file ID
+ def GetTableName(self, FileType, FileId):
+ return "_%s_%s" % (FileType, FileId)
+
+ ## Return a temp table containing all content of the given file
+ #
+ # @param FileInfo The tuple containing path and type of a file
+ #
+ def __getitem__(self, FileInfo):
+ FilePath, FileType, Macros = FileInfo
+ if FileType not in self._FILE_TABLE_:
+ return None
+
+ # flag used to indicate if it's parsed or not
+ FilePath = str(FilePath)
+ Parsed = False
+ FileId = self.GetFileId(FilePath)
+ if FileId != None:
+ TimeStamp = os.stat(FilePath)[8]
+ TableName = self.GetTableName(FileType, FileId)
+ if TimeStamp != self.GetTimeStamp(FileId):
+ # update the timestamp in database
+ self.SetTimeStamp(FileId, TimeStamp)
+ else:
+ # if the table exists and is integrity, don't parse it
+ Parsed = self.CheckIntegrity(TableName)
+ else:
+ FileId = self.TblFile.InsertFile(FilePath, FileType)
+ TableName = self.GetTableName(FileType, FileId)
+
+ FileTable = self._FILE_TABLE_[FileType](self.Cur, TableName, FileId)
+ FileTable.Create(not Parsed)
+ Parser = self._FILE_PARSER_[FileType](FilePath, FileType, FileTable, Macros)
+ # set the "Finished" flag in parser in order to avoid re-parsing (if parsed)
+ Parser.Finished = Parsed
+ return Parser
+
+ ## Summarize all packages in the database
+ def _GetPackageList(self):
+ PackageList = []
+ for Module in self.ModuleList:
+ for Package in Module.Packages:
+ if Package not in PackageList:
+ PackageList.append(Package)
+ return PackageList
+
+ ## Summarize all platforms in the database
+ def _GetPlatformList(self):
+ PlatformList = []
+ for PlatformFile in self.TblFile.GetFileList(MODEL_FILE_DSC):
+ try:
+ Platform = self.BuildObject[PathClass(PlatformFile), 'COMMON']
+ except:
+ Platform = None
+ if Platform != None:
+ PlatformList.append(Platform)
+ return PlatformList
+
+ ## Summarize all modules in the database
+ def _GetModuleList(self):
+ ModuleList = []
+ for ModuleFile in self.TblFile.GetFileList(MODEL_FILE_INF):
+ try:
+ Module = self.BuildObject[PathClass(ModuleFile), 'COMMON']
+ except:
+ Module = None
+ if Module != None:
+ ModuleList.append(Module)
+ return ModuleList
+
+ PlatformList = property(_GetPlatformList)
+ PackageList = property(_GetPackageList)
+ ModuleList = property(_GetModuleList)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Workspace/__init__.py b/BaseTools/Source/Python/Workspace/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/BaseTools/Source/Python/Workspace/__init__.py