summaryrefslogtreecommitdiff
path: root/BaseTools/Source/Python/Workspace
diff options
context:
space:
mode:
Diffstat (limited to 'BaseTools/Source/Python/Workspace')
-rw-r--r--BaseTools/Source/Python/Workspace/BuildClassObject.py371
-rw-r--r--BaseTools/Source/Python/Workspace/MetaDataTable.py338
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileCommentParser.py51
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py1951
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileTable.py395
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceCommon.py246
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py3154
-rw-r--r--BaseTools/Source/Python/Workspace/__init__.py15
8 files changed, 0 insertions, 6521 deletions
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
deleted file mode 100644
index ea26e5e5a1..0000000000
--- a/BaseTools/Source/Python/Workspace/BuildClassObject.py
+++ /dev/null
@@ -1,371 +0,0 @@
-## @file
-# This file is used to define each component of the build database
-#
-# Copyright (c) 2007 - 2015, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-import Common.LongFilePathOs as os
-
-from Common.Misc import sdict
-from Common.Misc import RealPath2
-from Common.BuildToolError import *
-
-## PcdClassObject
-#
-# This Class is used for PcdObject
-#
-# @param object: Inherited from object class
-# @param Name: Input value for Name of Pcd, default is None
-# @param Guid: Input value for Guid of Pcd, default is None
-# @param Type: Input value for Type of Pcd, default is None
-# @param DatumType: Input value for DatumType of Pcd, default is None
-# @param Value: Input value for Value of Pcd, default is None
-# @param Token: Input value for Token of Pcd, default is None
-# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
-# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
-# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
-# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
-#
-# @var TokenCName: To store value for TokenCName
-# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
-# @var Type: To store value for Type
-# @var DatumType: To store value for DatumType
-# @var TokenValue: To store value for TokenValue
-# @var MaxDatumSize: To store value for MaxDatumSize
-# @var SkuInfoList: To store value for SkuInfoList
-# @var IsOverrided: To store value for IsOverrided
-# @var Phase: To store value for Phase, default is "DXE"
-#
-class PcdClassObject(object):
- def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False, GuidValue = None, validateranges = [], validlists = [], expressions = []):
- self.TokenCName = Name
- self.TokenSpaceGuidCName = Guid
- self.TokenSpaceGuidValue = GuidValue
- self.Type = Type
- self.DatumType = DatumType
- self.DefaultValue = Value
- self.TokenValue = Token
- self.MaxDatumSize = MaxDatumSize
- self.SkuInfoList = SkuInfoList
- self.Phase = "DXE"
- self.Pending = False
- self.IsOverrided = IsOverrided
- self.IsFromBinaryInf = False
- self.IsFromDsc = False
- self.validateranges = validateranges
- self.validlists = validlists
- self.expressions = expressions
-
- ## Convert the class to a string
- #
- # Convert each member of the class to string
- # Organize to a signle line format string
- #
- # @retval Rtn Formatted String
- #
- def __str__(self):
- Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
- 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
- 'Type=' + str(self.Type) + ', ' + \
- 'DatumType=' + str(self.DatumType) + ', ' + \
- 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
- 'TokenValue=' + str(self.TokenValue) + ', ' + \
- 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
- for Item in self.SkuInfoList.values():
- Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
- Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
-
- return Rtn
-
- ## Override __eq__ function
- #
- # Check whether pcds are the same
- #
- # @retval False The two pcds are different
- # @retval True The two pcds are the same
- #
- def __eq__(self, Other):
- return Other and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
-
- ## Override __hash__ function
- #
- # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
- #
- # @retval truple() Key for hash table
- #
- def __hash__(self):
- return hash((self.TokenCName, self.TokenSpaceGuidCName))
-
-## LibraryClassObject
-#
-# This Class defines LibraryClassObject used in BuildDatabase
-#
-# @param object: Inherited from object class
-# @param Name: Input value for LibraryClassName, default is None
-# @param SupModList: Input value for SupModList, default is []
-# @param Type: Input value for Type, default is None
-#
-# @var LibraryClass: To store value for LibraryClass
-# @var SupModList: To store value for SupModList
-# @var Type: To store value for Type
-#
-class LibraryClassObject(object):
- def __init__(self, Name = None, SupModList = [], Type = None):
- self.LibraryClass = Name
- self.SupModList = SupModList
- if Type != None:
- self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
-
-## ModuleBuildClassObject
-#
-# This Class defines ModuleBuildClass
-#
-# @param object: Inherited from object class
-#
-# @var MetaFile: To store value for module meta file path
-# @var BaseName: To store value for BaseName
-# @var ModuleType: To store value for ModuleType
-# @var Guid: To store value for Guid
-# @var Version: To store value for Version
-# @var PcdIsDriver: To store value for PcdIsDriver
-# @var BinaryModule: To store value for BinaryModule
-# @var CustomMakefile: To store value for CustomMakefile
-# @var Specification: To store value for Specification
-# @var Shadow To store value for Shadow
-# @var LibraryClass: To store value for LibraryClass, it is a list structure as
-# [ LibraryClassObject, ...]
-# @var ModuleEntryPointList: To store value for ModuleEntryPointList
-# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
-# @var ConstructorList: To store value for ConstructorList
-# @var DestructorList: To store value for DestructorList
-# @var Binaries: To store value for Binaries, it is a list structure as
-# [ ModuleBinaryClassObject, ...]
-# @var Sources: To store value for Sources, it is a list structure as
-# [ ModuleSourceFilesClassObject, ... ]
-# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
-# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
-# @var Protocols: To store value for Protocols, it is a list structure as
-# [ ProtocolName, ... ]
-# @var Ppis: To store value for Ppis, it is a list structure as
-# [ PpiName, ... ]
-# @var Guids: To store value for Guids, it is a list structure as
-# [ GuidName, ... ]
-# @var Includes: To store value for Includes, it is a list structure as
-# [ IncludePath, ... ]
-# @var Packages: To store value for Packages, it is a list structure as
-# [ DecFileName, ... ]
-# @var Pcds: To store value for Pcds, it is a set structure as
-# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
-# @var BuildOptions: To store value for BuildOptions, it is a set structure as
-# { [BuildOptionKey] : BuildOptionValue}
-# @var Depex: To store value for Depex
-#
-class ModuleBuildClassObject(object):
- def __init__(self):
- self.AutoGenVersion = 0
- self.MetaFile = ''
- self.BaseName = ''
- self.ModuleType = ''
- self.Guid = ''
- self.Version = ''
- self.PcdIsDriver = ''
- self.BinaryModule = ''
- self.Shadow = ''
- self.SourceOverridePath = ''
- self.CustomMakefile = {}
- self.Specification = {}
- self.LibraryClass = []
- self.ModuleEntryPointList = []
- self.ModuleUnloadImageList = []
- self.ConstructorList = []
- self.DestructorList = []
-
- self.Binaries = []
- self.Sources = []
- self.LibraryClasses = sdict()
- self.Libraries = []
- self.Protocols = []
- self.Ppis = []
- self.Guids = []
- self.Includes = []
- self.Packages = []
- self.Pcds = {}
- self.BuildOptions = {}
- self.Depex = {}
-
- ## Convert the class to a string
- #
- # Convert member MetaFile of the class to a string
- #
- # @retval string Formatted String
- #
- def __str__(self):
- return str(self.MetaFile)
-
- ## Override __eq__ function
- #
- # Check whether ModuleBuildClassObjects are the same
- #
- # @retval False The two ModuleBuildClassObjects are different
- # @retval True The two ModuleBuildClassObjects are the same
- #
- def __eq__(self, Other):
- return self.MetaFile == Other
-
- ## Override __hash__ function
- #
- # Use MetaFile as key in hash table
- #
- # @retval string Key for hash table
- #
- def __hash__(self):
- return hash(self.MetaFile)
-
-## PackageBuildClassObject
-#
-# This Class defines PackageBuildClass
-#
-# @param object: Inherited from object class
-#
-# @var MetaFile: To store value for package meta file path
-# @var PackageName: To store value for PackageName
-# @var Guid: To store value for Guid
-# @var Version: To store value for Version
-# @var Protocols: To store value for Protocols, it is a set structure as
-# { [ProtocolName] : Protocol Guid, ... }
-# @var Ppis: To store value for Ppis, it is a set structure as
-# { [PpiName] : Ppi Guid, ... }
-# @var Guids: To store value for Guids, it is a set structure as
-# { [GuidName] : Guid, ... }
-# @var Includes: To store value for Includes, it is a list structure as
-# [ IncludePath, ... ]
-# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
-# { [LibraryClassName] : LibraryClassInfFile }
-# @var Pcds: To store value for Pcds, it is a set structure as
-# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
-#
-class PackageBuildClassObject(object):
- def __init__(self):
- self.MetaFile = ''
- self.PackageName = ''
- self.Guid = ''
- self.Version = ''
-
- self.Protocols = {}
- self.Ppis = {}
- self.Guids = {}
- self.Includes = []
- self.LibraryClasses = {}
- self.Pcds = {}
-
- ## Convert the class to a string
- #
- # Convert member MetaFile of the class to a string
- #
- # @retval string Formatted String
- #
- def __str__(self):
- return str(self.MetaFile)
-
- ## Override __eq__ function
- #
- # Check whether PackageBuildClassObjects are the same
- #
- # @retval False The two PackageBuildClassObjects are different
- # @retval True The two PackageBuildClassObjects are the same
- #
- def __eq__(self, Other):
- return self.MetaFile == Other
-
- ## Override __hash__ function
- #
- # Use MetaFile as key in hash table
- #
- # @retval string Key for hash table
- #
- def __hash__(self):
- return hash(self.MetaFile)
-
-## PlatformBuildClassObject
-#
-# This Class defines PlatformBuildClass
-#
-# @param object: Inherited from object class
-#
-# @var MetaFile: To store value for platform meta-file path
-# @var PlatformName: To store value for PlatformName
-# @var Guid: To store value for Guid
-# @var Version: To store value for Version
-# @var DscSpecification: To store value for DscSpecification
-# @var OutputDirectory: To store value for OutputDirectory
-# @var FlashDefinition: To store value for FlashDefinition
-# @var BuildNumber: To store value for BuildNumber
-# @var MakefileName: To store value for MakefileName
-# @var SkuIds: To store value for SkuIds, it is a set structure as
-# { 'SkuName' : SkuId, '!include' : includefilename, ...}
-# @var Modules: To store value for Modules, it is a list structure as
-# [ InfFileName, ... ]
-# @var Libraries: To store value for Libraries, it is a list structure as
-# [ InfFileName, ... ]
-# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
-# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
-# @var Pcds: To store value for Pcds, it is a set structure as
-# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
-# @var BuildOptions: To store value for BuildOptions, it is a set structure as
-# { [BuildOptionKey] : BuildOptionValue }
-#
-class PlatformBuildClassObject(object):
- def __init__(self):
- self.MetaFile = ''
- self.PlatformName = ''
- self.Guid = ''
- self.Version = ''
- self.DscSpecification = ''
- self.OutputDirectory = ''
- self.FlashDefinition = ''
- self.BuildNumber = ''
- self.MakefileName = ''
-
- self.SkuIds = {}
- self.Modules = []
- self.LibraryInstances = []
- self.LibraryClasses = {}
- self.Libraries = {}
- self.Pcds = {}
- self.BuildOptions = {}
-
- ## Convert the class to a string
- #
- # Convert member MetaFile of the class to a string
- #
- # @retval string Formatted String
- #
- def __str__(self):
- return str(self.MetaFile)
-
- ## Override __eq__ function
- #
- # Check whether PlatformBuildClassObjects are the same
- #
- # @retval False The two PlatformBuildClassObjects are different
- # @retval True The two PlatformBuildClassObjects are the same
- #
- def __eq__(self, Other):
- return self.MetaFile == Other
-
- ## Override __hash__ function
- #
- # Use MetaFile as key in hash table
- #
- # @retval string Key for hash table
- #
- def __hash__(self):
- return hash(self.MetaFile)
-
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py
deleted file mode 100644
index ee4ba6869f..0000000000
--- a/BaseTools/Source/Python/Workspace/MetaDataTable.py
+++ /dev/null
@@ -1,338 +0,0 @@
-## @file
-# This file is used to create/update/query/erase table for files
-#
-# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import Common.LongFilePathOs as os
-
-import Common.EdkLogger as EdkLogger
-from CommonDataClass import DataClass
-from CommonDataClass.DataClass import FileClass
-
-## Convert to SQL required string format
-def ConvertToSqlString(StringList):
- return map(lambda s: "'" + s.replace("'", "''") + "'", StringList)
-
-## TableFile
-#
-# This class defined a common table
-#
-# @param object: Inherited from object class
-#
-# @param Cursor: Cursor of the database
-# @param TableName: Name of the table
-#
-class Table(object):
- _COLUMN_ = ''
- _ID_STEP_ = 1
- _ID_MAX_ = 0x80000000
- _DUMMY_ = 0
-
- def __init__(self, Cursor, Name='', IdBase=0, Temporary=False):
- self.Cur = Cursor
- self.Table = Name
- self.IdBase = int(IdBase)
- self.ID = int(IdBase)
- self.Temporary = Temporary
-
- def __str__(self):
- return self.Table
-
- ## Create table
- #
- # Create a table
- #
- def Create(self, NewTable=True):
- if NewTable:
- self.Drop()
-
- if self.Temporary:
- SqlCommand = """create temp table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
- else:
- SqlCommand = """create table IF NOT EXISTS %s (%s)""" % (self.Table, self._COLUMN_)
- EdkLogger.debug(EdkLogger.DEBUG_8, SqlCommand)
- self.Cur.execute(SqlCommand)
- self.ID = self.GetId()
-
- ## Insert table
- #
- # Insert a record into a table
- #
- def Insert(self, *Args):
- self.ID = self.ID + self._ID_STEP_
- if self.ID >= (self.IdBase + self._ID_MAX_):
- self.ID = self.IdBase + self._ID_STEP_
- Values = ", ".join([str(Arg) for Arg in Args])
- SqlCommand = "insert into %s values(%s, %s)" % (self.Table, self.ID, Values)
- EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
- self.Cur.execute(SqlCommand)
- return self.ID
-
- ## Query table
- #
- # Query all records of the table
- #
- def Query(self):
- SqlCommand = """select * from %s""" % self.Table
- self.Cur.execute(SqlCommand)
- for Rs in self.Cur:
- EdkLogger.verbose(str(Rs))
- TotalCount = self.GetId()
-
- ## Drop a table
- #
- # Drop the table
- #
- def Drop(self):
- SqlCommand = """drop table IF EXISTS %s""" % self.Table
- self.Cur.execute(SqlCommand)
-
- ## Get count
- #
- # Get a count of all records of the table
- #
- # @retval Count: Total count of all records
- #
- def GetCount(self):
- SqlCommand = """select count(ID) from %s""" % self.Table
- Record = self.Cur.execute(SqlCommand).fetchall()
- return Record[0][0]
-
- def GetId(self):
- SqlCommand = """select max(ID) from %s""" % self.Table
- Record = self.Cur.execute(SqlCommand).fetchall()
- Id = Record[0][0]
- if Id == None:
- Id = self.IdBase
- return Id
-
- ## Init the ID of the table
- #
- # Init the ID of the table
- #
- def InitID(self):
- self.ID = self.GetId()
-
- ## Exec
- #
- # Exec Sql Command, return result
- #
- # @param SqlCommand: The SqlCommand to be executed
- #
- # @retval RecordSet: The result after executed
- #
- def Exec(self, SqlCommand):
- EdkLogger.debug(EdkLogger.DEBUG_5, SqlCommand)
- self.Cur.execute(SqlCommand)
- RecordSet = self.Cur.fetchall()
- return RecordSet
-
- def SetEndFlag(self):
- self.Exec("insert into %s values(%s)" % (self.Table, self._DUMMY_))
- #
- # Need to execution commit for table data changed.
- #
- self.Cur.connection.commit()
-
- def IsIntegral(self):
- Result = self.Exec("select min(ID) from %s" % (self.Table))
- if Result[0][0] != -1:
- return False
- return True
-
- def GetAll(self):
- return self.Exec("select * from %s where ID > 0 order by ID" % (self.Table))
-
-## TableFile
-#
-# This class defined a table used for file
-#
-# @param object: Inherited from object class
-#
-class TableFile(Table):
- _COLUMN_ = '''
- ID INTEGER PRIMARY KEY,
- Name VARCHAR NOT NULL,
- ExtName VARCHAR,
- Path VARCHAR,
- FullPath VARCHAR NOT NULL,
- Model INTEGER DEFAULT 0,
- TimeStamp SINGLE NOT NULL
- '''
- def __init__(self, Cursor):
- Table.__init__(self, Cursor, 'File')
-
- ## Insert table
- #
- # Insert a record into table File
- #
- # @param Name: Name of a File
- # @param ExtName: ExtName of a File
- # @param Path: Path of a File
- # @param FullPath: FullPath of a File
- # @param Model: Model of a File
- # @param TimeStamp: TimeStamp of a File
- #
- def Insert(self, Name, ExtName, Path, FullPath, Model, TimeStamp):
- (Name, ExtName, Path, FullPath) = ConvertToSqlString((Name, ExtName, Path, FullPath))
- return Table.Insert(
- self,
- Name,
- ExtName,
- Path,
- FullPath,
- Model,
- TimeStamp
- )
-
- ## InsertFile
- #
- # Insert one file to table
- #
- # @param FileFullPath: The full path of the file
- # @param Model: The model of the file
- #
- # @retval FileID: The ID after record is inserted
- #
- def InsertFile(self, File, Model):
- return self.Insert(
- File.Name,
- File.Ext,
- File.Dir,
- File.Path,
- Model,
- File.TimeStamp
- )
-
- ## Get ID of a given file
- #
- # @param FilePath Path of file
- #
- # @retval ID ID value of given file in the table
- #
- def GetFileId(self, File):
- QueryScript = "select ID from %s where FullPath = '%s'" % (self.Table, str(File))
- RecordList = self.Exec(QueryScript)
- if len(RecordList) == 0:
- return None
- return RecordList[0][0]
-
- ## Get type of a given file
- #
- # @param FileId ID of a file
- #
- # @retval file_type Model value of given file in the table
- #
- def GetFileType(self, FileId):
- QueryScript = "select Model from %s where ID = '%s'" % (self.Table, FileId)
- RecordList = self.Exec(QueryScript)
- if len(RecordList) == 0:
- return None
- return RecordList[0][0]
-
- ## Get file timestamp of a given file
- #
- # @param FileId ID of file
- #
- # @retval timestamp TimeStamp value of given file in the table
- #
- def GetFileTimeStamp(self, FileId):
- QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
- RecordList = self.Exec(QueryScript)
- if len(RecordList) == 0:
- return None
- return RecordList[0][0]
-
- ## Update the timestamp of a given file
- #
- # @param FileId ID of file
- # @param TimeStamp Time stamp of file
- #
- def SetFileTimeStamp(self, FileId, TimeStamp):
- self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
-
- ## Get list of file with given type
- #
- # @param FileType Type value of file
- #
- # @retval file_list List of files with the given type
- #
- def GetFileList(self, FileType):
- RecordList = self.Exec("select FullPath from %s where Model=%s" % (self.Table, FileType))
- if len(RecordList) == 0:
- return []
- return [R[0] for R in RecordList]
-
-## TableDataModel
-#
-# This class defined a table used for data model
-#
-# @param object: Inherited from object class
-#
-#
-class TableDataModel(Table):
- _COLUMN_ = """
- ID INTEGER PRIMARY KEY,
- CrossIndex INTEGER NOT NULL,
- Name VARCHAR NOT NULL,
- Description VARCHAR
- """
- def __init__(self, Cursor):
- Table.__init__(self, Cursor, 'DataModel')
-
- ## Insert table
- #
- # Insert a record into table DataModel
- #
- # @param ID: ID of a ModelType
- # @param CrossIndex: CrossIndex of a ModelType
- # @param Name: Name of a ModelType
- # @param Description: Description of a ModelType
- #
- def Insert(self, CrossIndex, Name, Description):
- (Name, Description) = ConvertToSqlString((Name, Description))
- return Table.Insert(self, CrossIndex, Name, Description)
-
- ## Init table
- #
- # Create all default records of table DataModel
- #
- def InitTable(self):
- EdkLogger.verbose("\nInitialize table DataModel started ...")
- Count = self.GetCount()
- if Count != None and Count != 0:
- return
- for Item in DataClass.MODEL_LIST:
- CrossIndex = Item[1]
- Name = Item[0]
- Description = Item[0]
- self.Insert(CrossIndex, Name, Description)
- EdkLogger.verbose("Initialize table DataModel ... DONE!")
-
- ## Get CrossIndex
- #
- # Get a model's cross index from its name
- #
- # @param ModelName: Name of the model
- # @retval CrossIndex: CrossIndex of the model
- #
- def GetCrossIndex(self, ModelName):
- CrossIndex = -1
- SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
- self.Cur.execute(SqlCommand)
- for Item in self.Cur:
- CrossIndex = Item[0]
-
- return CrossIndex
-
diff --git a/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py b/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
deleted file mode 100644
index df1e90faf5..0000000000
--- a/BaseTools/Source/Python/Workspace/MetaFileCommentParser.py
+++ /dev/null
@@ -1,51 +0,0 @@
-## @file
-# This file is used to check format of comments
-#
-# Copyright (c) 2012, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-from CommonDataClass.DataClass import (
- MODEL_PCD_PATCHABLE_IN_MODULE,
- MODEL_PCD_DYNAMIC_EX,
- MODEL_PCD_DYNAMIC,
- MODEL_EFI_GUID,
- MODEL_EFI_PPI,
- MODEL_EFI_PROTOCOL
-)
-from Common.BuildToolError import FORMAT_INVALID
-import Common.EdkLogger as EdkLogger
-
-UsageList = ("PRODUCES", "PRODUCED", "ALWAYS_PRODUCES", "ALWAYS_PRODUCED", "SOMETIMES_PRODUCES",
- "SOMETIMES_PRODUCED", "CONSUMES", "CONSUMED", "ALWAYS_CONSUMES", "ALWAYS_CONSUMED",
- "SOMETIMES_CONSUMES", "SOMETIMES_CONSUMED", "SOMETIME_CONSUMES")
-ErrorMsgMap = {
- MODEL_EFI_GUID : "The usage for this GUID is not listed in this INF: %s[%d]:%s",
- MODEL_EFI_PPI : "The usage for this PPI is not listed in this INF: %s[%d]:%s.",
- MODEL_EFI_PROTOCOL : "The usage for this Protocol is not listed in this INF: %s[%d]:%s.",
- MODEL_PCD_DYNAMIC : "The usage for this PCD is not listed in this INF: %s[%d]:%s."
-}
-
-def CheckInfComment(SectionType, Comments, InfFile, LineNo, ValueList):
- if SectionType in [MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_EX, MODEL_PCD_DYNAMIC]:
- CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0]+'.'+ValueList[1], ErrorMsgMap[MODEL_PCD_DYNAMIC])
- elif SectionType in [MODEL_EFI_GUID, MODEL_EFI_PPI]:
- CheckUsage(Comments, UsageList, InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
- elif SectionType == MODEL_EFI_PROTOCOL:
- CheckUsage(Comments, UsageList + ("TO_START", "BY_START"), InfFile, LineNo, ValueList[0], ErrorMsgMap[SectionType])
-
-def CheckUsage(Comments, Usages, InfFile, LineNo, Value, ErrorMsg):
- for Comment in Comments:
- for Word in Comment[0].replace('#', ' ').split():
- if Word in Usages:
- return
- EdkLogger.error(
- "Parser", FORMAT_INVALID,
- ErrorMsg % (InfFile, LineNo, Value)
- )
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
deleted file mode 100644
index d094403a00..0000000000
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ /dev/null
@@ -1,1951 +0,0 @@
-## @file
-# This file is used to parse meta files
-#
-# Copyright (c) 2008 - 2017, Intel Corporation. All rights reserved.<BR>
-# (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import Common.LongFilePathOs as os
-import re
-import time
-import copy
-
-import Common.EdkLogger as EdkLogger
-import Common.GlobalData as GlobalData
-
-from CommonDataClass.DataClass import *
-from Common.DataType import *
-from Common.String import *
-from Common.Misc import GuidStructureStringToGuidString, CheckPcdDatum, PathClass, AnalyzePcdData, AnalyzeDscPcd, AnalyzePcdExpression
-from Common.Expression import *
-from CommonDataClass.Exceptions import *
-from Common.LongFilePathSupport import OpenLongFilePath as open
-
-from MetaFileTable import MetaFileStorage
-from MetaFileCommentParser import CheckInfComment
-
-## A decorator used to parse macro definition
-def ParseMacro(Parser):
- def MacroParser(self):
- Match = gMacroDefPattern.match(self._CurrentLine)
- if not Match:
- # Not 'DEFINE/EDK_GLOBAL' statement, call decorated method
- Parser(self)
- return
-
- TokenList = GetSplitValueList(self._CurrentLine[Match.end(1):], TAB_EQUAL_SPLIT, 1)
- # Syntax check
- if not TokenList[0]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No macro name given",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if len(TokenList) < 2:
- TokenList.append('')
-
- Type = Match.group(1)
- Name, Value = TokenList
- # Global macros can be only defined via environment variable
- if Name in GlobalData.gGlobalDefines:
- EdkLogger.error('Parser', FORMAT_INVALID, "%s can only be defined via environment variable" % Name,
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- # Only upper case letters, digit and '_' are allowed
- if not gMacroNamePattern.match(Name):
- EdkLogger.error('Parser', FORMAT_INVALID, "The macro name must be in the pattern [A-Z][A-Z0-9_]*",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
-
- Value = ReplaceMacro(Value, self._Macros)
- if Type in self.DataType:
- self._ItemType = self.DataType[Type]
- else:
- self._ItemType = MODEL_META_DATA_DEFINE
- # DEFINE defined macros
- if Type == TAB_DSC_DEFINES_DEFINE:
- #
- # First judge whether this DEFINE is in conditional directive statements or not.
- #
- if type(self) == DscParser and self._InDirective > -1:
- pass
- else:
- if type(self) == DecParser:
- if MODEL_META_DATA_HEADER in self._SectionType:
- self._FileLocalMacros[Name] = Value
- else:
- self._ConstructSectionMacroDict(Name, Value)
- elif self._SectionType == MODEL_META_DATA_HEADER:
- self._FileLocalMacros[Name] = Value
- else:
- self._ConstructSectionMacroDict(Name, Value)
-
- # EDK_GLOBAL defined macros
- elif type(self) != DscParser:
- EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used in .dsc file",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- elif self._SectionType != MODEL_META_DATA_HEADER:
- EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL can only be used under [Defines] section",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- elif (Name in self._FileLocalMacros) and (self._FileLocalMacros[Name] != Value):
- EdkLogger.error('Parser', FORMAT_INVALID, "EDK_GLOBAL defined a macro with the same name and different value as one defined by 'DEFINE'",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
-
- self._ValueList = [Type, Name, Value]
-
- return MacroParser
-
-## Base class of parser
-#
-# This class is used for derivation purpose. The specific parser for one kind
-# type file must derive this class and implement some public interfaces.
-#
-# @param FilePath The path of platform description file
-# @param FileType The raw data of DSC file
-# @param Table Database used to retrieve module/package information
-# @param Macros Macros used for replacement in file
-# @param Owner Owner ID (for sub-section parsing)
-# @param From ID from which the data comes (for !INCLUDE directive)
-#
-class MetaFileParser(object):
- # data type (file content) for specific file type
- DataType = {}
-
- # Parser objects used to implement singleton
- MetaFiles = {}
-
- ## Factory method
- #
- # One file, one parser object. This factory method makes sure that there's
- # only one object constructed for one meta file.
- #
- # @param Class class object of real AutoGen class
- # (InfParser, DecParser or DscParser)
- # @param FilePath The path of meta file
- # @param *args The specific class related parameters
- # @param **kwargs The specific class related dict parameters
- #
- def __new__(Class, FilePath, *args, **kwargs):
- if FilePath in Class.MetaFiles:
- return Class.MetaFiles[FilePath]
- else:
- ParserObject = super(MetaFileParser, Class).__new__(Class)
- Class.MetaFiles[FilePath] = ParserObject
- return ParserObject
-
- ## Constructor of MetaFileParser
- #
- # Initialize object of MetaFileParser
- #
- # @param FilePath The path of platform description file
- # @param FileType The raw data of DSC file
- # @param Arch Default Arch value for filtering sections
- # @param Table Database used to retrieve module/package information
- # @param Owner Owner ID (for sub-section parsing)
- # @param From ID from which the data comes (for !INCLUDE directive)
- #
- def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
- self._Table = Table
- self._RawTable = Table
- self._Arch = Arch
- self._FileType = FileType
- self.MetaFile = FilePath
- self._FileDir = self.MetaFile.Dir
- self._Defines = {}
- self._FileLocalMacros = {}
- self._SectionsMacroDict = {}
-
- # for recursive parsing
- self._Owner = [Owner]
- self._From = From
-
- # parsr status for parsing
- self._ValueList = ['', '', '', '', '']
- self._Scope = []
- self._LineIndex = 0
- self._CurrentLine = ''
- self._SectionType = MODEL_UNKNOWN
- self._SectionName = ''
- self._InSubsection = False
- self._SubsectionType = MODEL_UNKNOWN
- self._SubsectionName = ''
- self._ItemType = MODEL_UNKNOWN
- self._LastItem = -1
- self._Enabled = 0
- self._Finished = False
- self._PostProcessed = False
- # Different version of meta-file has different way to parse.
- self._Version = 0
-
- ## Store the parsed data in table
- def _Store(self, *Args):
- return self._Table.Insert(*Args)
-
- ## Virtual method for starting parse
- def Start(self):
- raise NotImplementedError
-
- ## Notify a post-process is needed
- def DoPostProcess(self):
- self._PostProcessed = False
-
- ## Set parsing complete flag in both class and table
- def _Done(self):
- self._Finished = True
- ## Do not set end flag when processing included files
- if self._From == -1:
- self._Table.SetEndFlag()
-
- def _PostProcess(self):
- self._PostProcessed = True
-
- ## Get the parse complete flag
- def _GetFinished(self):
- return self._Finished
-
- ## Set the complete flag
- def _SetFinished(self, Value):
- self._Finished = Value
-
- ## Remove records that do not match given Filter Arch
- def _FilterRecordList(self, RecordList, FilterArch):
- NewRecordList = []
- for Record in RecordList:
- Arch = Record[3]
- if Arch == 'COMMON' or Arch == FilterArch:
- NewRecordList.append(Record)
- return NewRecordList
-
- ## Use [] style to query data in table, just for readability
- #
- # DataInfo = [data_type, scope1(arch), scope2(platform/moduletype)]
- #
- def __getitem__(self, DataInfo):
- if type(DataInfo) != type(()):
- DataInfo = (DataInfo,)
-
- # Parse the file first, if necessary
- if not self._Finished:
- if self._RawTable.IsIntegrity():
- self._Finished = True
- else:
- self._Table = self._RawTable
- self._PostProcessed = False
- self.Start()
-
- # No specific ARCH or Platform given, use raw data
- if self._RawTable and (len(DataInfo) == 1 or DataInfo[1] == None):
- return self._FilterRecordList(self._RawTable.Query(*DataInfo), self._Arch)
-
- # Do post-process if necessary
- if not self._PostProcessed:
- self._PostProcess()
-
- return self._FilterRecordList(self._Table.Query(*DataInfo), DataInfo[1])
-
- ## Data parser for the common format in different type of file
- #
- # The common format in the meatfile is like
- #
- # xxx1 | xxx2 | xxx3
- #
- @ParseMacro
- def _CommonParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- self._ValueList[0:len(TokenList)] = TokenList
-
- ## Data parser for the format in which there's path
- #
- # Only path can have macro used. So we need to replace them before use.
- #
- @ParseMacro
- def _PathParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- self._ValueList[0:len(TokenList)] = TokenList
- # Don't do macro replacement for dsc file at this point
- if type(self) != DscParser:
- Macros = self._Macros
- self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
-
- ## Skip unsupported data
- def _Skip(self):
- EdkLogger.warn("Parser", "Unrecognized content", File=self.MetaFile,
- Line=self._LineIndex + 1, ExtraData=self._CurrentLine);
- self._ValueList[0:1] = [self._CurrentLine]
-
- ## Skip unsupported data for UserExtension Section
- def _SkipUserExtension(self):
- self._ValueList[0:1] = [self._CurrentLine]
-
- ## Section header parser
- #
- # The section header is always in following format:
- #
- # [section_name.arch<.platform|module_type>]
- #
- def _SectionHeaderParser(self):
- self._Scope = []
- self._SectionName = ''
- ArchList = set()
- for Item in GetSplitValueList(self._CurrentLine[1:-1], TAB_COMMA_SPLIT):
- if Item == '':
- continue
- ItemList = GetSplitValueList(Item, TAB_SPLIT,2)
- # different section should not mix in one section
- if self._SectionName != '' and self._SectionName != ItemList[0].upper():
- EdkLogger.error('Parser', FORMAT_INVALID, "Different section names in the same section",
- File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
- self._SectionName = ItemList[0].upper()
- if self._SectionName in self.DataType:
- self._SectionType = self.DataType[self._SectionName]
- # Check if the section name is valid
- if self._SectionName not in SECTIONS_HAVE_ITEM_AFTER_ARCH and len(ItemList) > 3:
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
- elif self._Version >= 0x00010005:
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
- else:
- self._SectionType = MODEL_UNKNOWN
-
- # S1 is always Arch
- if len(ItemList) > 1:
- S1 = ItemList[1].upper()
- else:
- S1 = 'COMMON'
- ArchList.add(S1)
-
- # S2 may be Platform or ModuleType
- if len(ItemList) > 2:
- if self._SectionName.upper() in SECTIONS_HAVE_ITEM_PCD:
- S2 = ItemList[2]
- else:
- S2 = ItemList[2].upper()
- else:
- S2 = 'COMMON'
- self._Scope.append([S1, S2])
-
- # 'COMMON' must not be used with specific ARCHs at the same section
- if 'COMMON' in ArchList and len(ArchList) > 1:
- EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
- File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
- # If the section information is needed later, it should be stored in database
- self._ValueList[0] = self._SectionName
-
- ## [defines] section parser
- @ParseMacro
- def _DefineParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- self._ValueList[1:len(TokenList)] = TokenList
- if not self._ValueList[1]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if not self._ValueList[2]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
-
- self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
- Name, Value = self._ValueList[1], self._ValueList[2]
- MacroUsed = GlobalData.gMacroRefPattern.findall(Value)
- if len(MacroUsed) != 0:
- for Macro in MacroUsed:
- if Macro in GlobalData.gGlobalDefines:
- EdkLogger.error("Parser", FORMAT_INVALID, "Global macro %s is not permitted." % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- else:
- EdkLogger.error("Parser", FORMAT_INVALID, "%s not defined" % (Macro), ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- # Sometimes, we need to make differences between EDK and EDK2 modules
- if Name == 'INF_VERSION':
- if re.match(r'0[xX][\da-f-A-F]{5,8}', Value):
- self._Version = int(Value, 0)
- elif re.match(r'\d+\.\d+', Value):
- ValueList = Value.split('.')
- Major = '%04o' % int(ValueList[0], 0)
- Minor = '%04o' % int(ValueList[1], 0)
- self._Version = int('0x' + Major + Minor, 0)
- else:
- EdkLogger.error('Parser', FORMAT_INVALID, "Invalid version number",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
-
- if type(self) == InfParser and self._Version < 0x00010005:
- # EDK module allows using defines as macros
- self._FileLocalMacros[Name] = Value
- self._Defines[Name] = Value
-
- ## [BuildOptions] section parser
- @ParseMacro
- def _BuildOptionParser(self):
- self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
- if len(TokenList2) == 2:
- self._ValueList[0] = TokenList2[0] # toolchain family
- self._ValueList[1] = TokenList2[1] # keys
- else:
- self._ValueList[1] = TokenList[0]
- if len(TokenList) == 2 and type(self) != DscParser: # value
- self._ValueList[2] = ReplaceMacro(TokenList[1], self._Macros)
-
- if self._ValueList[1].count('_') != 4:
- EdkLogger.error(
- 'Parser',
- FORMAT_INVALID,
- "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
- ExtraData=self._CurrentLine,
- File=self.MetaFile,
- Line=self._LineIndex + 1
- )
- def GetValidExpression(self, TokenSpaceGuid, PcdCName):
- return self._Table.GetValidExpression(TokenSpaceGuid, PcdCName)
- def _GetMacros(self):
- Macros = {}
- Macros.update(self._FileLocalMacros)
- Macros.update(self._GetApplicableSectionMacro())
- return Macros
-
- ## Construct section Macro dict
- def _ConstructSectionMacroDict(self, Name, Value):
- ScopeKey = [(Scope[0], Scope[1]) for Scope in self._Scope]
- ScopeKey = tuple(ScopeKey)
- SectionDictKey = self._SectionType, ScopeKey
- #
- # DecParser SectionType is a list, will contain more than one item only in Pcd Section
- # As Pcd section macro usage is not alllowed, so here it is safe
- #
- if type(self) == DecParser:
- SectionDictKey = self._SectionType[0], ScopeKey
- if SectionDictKey not in self._SectionsMacroDict:
- self._SectionsMacroDict[SectionDictKey] = {}
- SectionLocalMacros = self._SectionsMacroDict[SectionDictKey]
- SectionLocalMacros[Name] = Value
-
- ## Get section Macros that are applicable to current line, which may come from other sections
- ## that share the same name while scope is wider
- def _GetApplicableSectionMacro(self):
- Macros = {}
-
- ComComMacroDict = {}
- ComSpeMacroDict = {}
- SpeSpeMacroDict = {}
-
- ActiveSectionType = self._SectionType
- if type(self) == DecParser:
- ActiveSectionType = self._SectionType[0]
-
- for (SectionType, Scope) in self._SectionsMacroDict:
- if SectionType != ActiveSectionType:
- continue
-
- for ActiveScope in self._Scope:
- Scope0, Scope1 = ActiveScope[0], ActiveScope[1]
- if(Scope0, Scope1) not in Scope:
- break
- else:
- SpeSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
-
- for ActiveScope in self._Scope:
- Scope0, Scope1 = ActiveScope[0], ActiveScope[1]
- if(Scope0, Scope1) not in Scope and (Scope0, "COMMON") not in Scope and ("COMMON", Scope1) not in Scope:
- break
- else:
- ComSpeMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
-
- if ("COMMON", "COMMON") in Scope:
- ComComMacroDict.update(self._SectionsMacroDict[(SectionType, Scope)])
-
- Macros.update(ComComMacroDict)
- Macros.update(ComSpeMacroDict)
- Macros.update(SpeSpeMacroDict)
-
- return Macros
-
- _SectionParser = {}
- Finished = property(_GetFinished, _SetFinished)
- _Macros = property(_GetMacros)
-
-
-## INF file parser class
-#
-# @param FilePath The path of platform description file
-# @param FileType The raw data of DSC file
-# @param Table Database used to retrieve module/package information
-# @param Macros Macros used for replacement in file
-#
-class InfParser(MetaFileParser):
- # INF file supported data types (one type per section)
- DataType = {
- TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
- TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
- TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
- TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
- TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
- TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
- TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
- TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
- TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
- TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
- TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
- TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
- TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
- TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
- TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
- TAB_GUIDS.upper() : MODEL_EFI_GUID,
- TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
- TAB_PPIS.upper() : MODEL_EFI_PPI,
- TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
- TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
- TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
- }
-
- ## Constructor of InfParser
- #
- # Initialize object of InfParser
- #
- # @param FilePath The path of module description file
- # @param FileType The raw data of DSC file
- # @param Arch Default Arch value for filtering sections
- # @param Table Database used to retrieve module/package information
- #
- def __init__(self, FilePath, FileType, Arch, Table):
- # prevent re-initialization
- if hasattr(self, "_Table"):
- return
- MetaFileParser.__init__(self, FilePath, FileType, Arch, Table)
- self.PcdsDict = {}
-
- ## Parser starter
- def Start(self):
- NmakeLine = ''
- Content = ''
- try:
- Content = open(str(self.MetaFile), 'r').readlines()
- except:
- EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
-
- # parse the file line by line
- IsFindBlockComment = False
- GetHeaderComment = False
- TailComments = []
- SectionComments = []
- Comments = []
-
- for Index in range(0, len(Content)):
- # skip empty, commented, block commented lines
- Line, Comment = CleanString2(Content[Index], AllowCppStyleComment=True)
- NextLine = ''
- if Index + 1 < len(Content):
- NextLine, NextComment = CleanString2(Content[Index + 1])
- if Line == '':
- if Comment:
- Comments.append((Comment, Index + 1))
- elif GetHeaderComment:
- SectionComments.extend(Comments)
- Comments = []
- continue
- if Line.find(DataType.TAB_COMMENT_EDK_START) > -1:
- IsFindBlockComment = True
- continue
- if Line.find(DataType.TAB_COMMENT_EDK_END) > -1:
- IsFindBlockComment = False
- continue
- if IsFindBlockComment:
- continue
-
- self._LineIndex = Index
- self._CurrentLine = Line
-
- # section header
- if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
- if not GetHeaderComment:
- for Cmt, LNo in Comments:
- self._Store(MODEL_META_DATA_HEADER_COMMENT, Cmt, '', '', 'COMMON',
- 'COMMON', self._Owner[-1], LNo, -1, LNo, -1, 0)
- GetHeaderComment = True
- else:
- TailComments.extend(SectionComments + Comments)
- Comments = []
- self._SectionHeaderParser()
- # Check invalid sections
- if self._Version < 0x00010005:
- if self._SectionType in [MODEL_META_DATA_BUILD_OPTION,
- MODEL_EFI_LIBRARY_CLASS,
- MODEL_META_DATA_PACKAGE,
- MODEL_PCD_FIXED_AT_BUILD,
- MODEL_PCD_PATCHABLE_IN_MODULE,
- MODEL_PCD_FEATURE_FLAG,
- MODEL_PCD_DYNAMIC_EX,
- MODEL_PCD_DYNAMIC,
- MODEL_EFI_GUID,
- MODEL_EFI_PROTOCOL,
- MODEL_EFI_PPI,
- MODEL_META_DATA_USER_EXTENSION]:
- EdkLogger.error('Parser', FORMAT_INVALID,
- "Section [%s] is not allowed in inf file without version" % (self._SectionName),
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- elif self._SectionType in [MODEL_EFI_INCLUDE,
- MODEL_EFI_LIBRARY_INSTANCE,
- MODEL_META_DATA_NMAKE]:
- EdkLogger.error('Parser', FORMAT_INVALID,
- "Section [%s] is not allowed in inf file with version 0x%08x" % (self._SectionName, self._Version),
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- continue
- # merge two lines specified by '\' in section NMAKE
- elif self._SectionType == MODEL_META_DATA_NMAKE:
- if Line[-1] == '\\':
- if NextLine == '':
- self._CurrentLine = NmakeLine + Line[0:-1]
- NmakeLine = ''
- else:
- if NextLine[0] == TAB_SECTION_START and NextLine[-1] == TAB_SECTION_END:
- self._CurrentLine = NmakeLine + Line[0:-1]
- NmakeLine = ''
- else:
- NmakeLine = NmakeLine + ' ' + Line[0:-1]
- continue
- else:
- self._CurrentLine = NmakeLine + Line
- NmakeLine = ''
-
- # section content
- self._ValueList = ['', '', '']
- # parse current line, result will be put in self._ValueList
- self._SectionParser[self._SectionType](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
- self._ItemType = -1
- Comments = []
- continue
- if Comment:
- Comments.append((Comment, Index + 1))
- if GlobalData.gOptions and GlobalData.gOptions.CheckUsage:
- CheckInfComment(self._SectionType, Comments, str(self.MetaFile), Index + 1, self._ValueList)
- #
- # Model, Value1, Value2, Value3, Arch, Platform, BelongsToItem=-1,
- # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
- #
- for Arch, Platform in self._Scope:
- LastItem = self._Store(self._SectionType,
- self._ValueList[0],
- self._ValueList[1],
- self._ValueList[2],
- Arch,
- Platform,
- self._Owner[-1],
- self._LineIndex + 1,
- - 1,
- self._LineIndex + 1,
- - 1,
- 0
- )
- for Comment, LineNo in Comments:
- self._Store(MODEL_META_DATA_COMMENT, Comment, '', '', Arch, Platform,
- LastItem, LineNo, -1, LineNo, -1, 0)
- Comments = []
- SectionComments = []
- TailComments.extend(SectionComments + Comments)
- if IsFindBlockComment:
- EdkLogger.error("Parser", FORMAT_INVALID, "Open block comments (starting with /*) are expected to end with */",
- File=self.MetaFile)
-
- # If there are tail comments in INF file, save to database whatever the comments are
- for Comment in TailComments:
- self._Store(MODEL_META_DATA_TAIL_COMMENT, Comment[0], '', '', 'COMMON',
- 'COMMON', self._Owner[-1], -1, -1, -1, -1, 0)
- self._Done()
-
- ## Data parser for the format in which there's path
- #
- # Only path can have macro used. So we need to replace them before use.
- #
- def _IncludeParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- self._ValueList[0:len(TokenList)] = TokenList
- Macros = self._Macros
- if Macros:
- for Index in range(0, len(self._ValueList)):
- Value = self._ValueList[Index]
- if not Value:
- continue
-
- if Value.upper().find('$(EFI_SOURCE)\Edk'.upper()) > -1 or Value.upper().find('$(EFI_SOURCE)/Edk'.upper()) > -1:
- Value = '$(EDK_SOURCE)' + Value[17:]
- if Value.find('$(EFI_SOURCE)') > -1 or Value.find('$(EDK_SOURCE)') > -1:
- pass
- elif Value.startswith('.'):
- pass
- elif Value.startswith('$('):
- pass
- else:
- Value = '$(EFI_SOURCE)/' + Value
-
- self._ValueList[Index] = ReplaceMacro(Value, Macros)
-
- ## Parse [Sources] section
- #
- # Only path can have macro used. So we need to replace them before use.
- #
- @ParseMacro
- def _SourceFileParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- self._ValueList[0:len(TokenList)] = TokenList
- Macros = self._Macros
- # For Acpi tables, remove macro like ' TABLE_NAME=Sata1'
- if 'COMPONENT_TYPE' in Macros:
- if self._Defines['COMPONENT_TYPE'].upper() == 'ACPITABLE':
- self._ValueList[0] = GetSplitValueList(self._ValueList[0], ' ', 1)[0]
- if self._Defines['BASE_NAME'] == 'Microcode':
- pass
- self._ValueList = [ReplaceMacro(Value, Macros) for Value in self._ValueList]
-
- ## Parse [Binaries] section
- #
- # Only path can have macro used. So we need to replace them before use.
- #
- @ParseMacro
- def _BinaryFileParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 2)
- if len(TokenList) < 2:
- EdkLogger.error('Parser', FORMAT_INVALID, "No file type or path specified",
- ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if not TokenList[0]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No file type specified",
- ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if not TokenList[1]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No file path specified",
- ExtraData=self._CurrentLine + " (<FileType> | <FilePath> [| <Target>])",
- File=self.MetaFile, Line=self._LineIndex + 1)
- self._ValueList[0:len(TokenList)] = TokenList
- self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
-
- ## [nmake] section parser (Edk.x style only)
- def _NmakeParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- self._ValueList[0:len(TokenList)] = TokenList
- # remove macros
- self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros)
- # remove self-reference in macro setting
- #self._ValueList[1] = ReplaceMacro(self._ValueList[1], {self._ValueList[0]:''})
-
- ## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
- @ParseMacro
- def _PcdParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
- if len(ValueList) != 2:
- EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
- ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- self._ValueList[0:1] = ValueList
- if len(TokenList) > 1:
- self._ValueList[2] = TokenList[1]
- if self._ValueList[0] == '' or self._ValueList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
- ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
- if self._ValueList[2] != '':
- InfPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
- if InfPcdValueList[0] in ['True', 'true', 'TRUE']:
- self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '1', 1);
- elif InfPcdValueList[0] in ['False', 'false', 'FALSE']:
- self._ValueList[2] = TokenList[1].replace(InfPcdValueList[0], '0', 1);
- if (self._ValueList[0], self._ValueList[1]) not in self.PcdsDict:
- self.PcdsDict[self._ValueList[0], self._ValueList[1]] = self._SectionType
- elif self.PcdsDict[self._ValueList[0], self._ValueList[1]] != self._SectionType:
- EdkLogger.error('Parser', FORMAT_INVALID, "It is not permissible to list a specified PCD in different PCD type sections.",
- ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- ## [depex] section parser
- @ParseMacro
- def _DepexParser(self):
- self._ValueList[0:1] = [self._CurrentLine]
-
- _SectionParser = {
- MODEL_UNKNOWN : MetaFileParser._Skip,
- MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
- MODEL_META_DATA_BUILD_OPTION : MetaFileParser._BuildOptionParser,
- MODEL_EFI_INCLUDE : _IncludeParser, # for Edk.x modules
- MODEL_EFI_LIBRARY_INSTANCE : MetaFileParser._CommonParser, # for Edk.x modules
- MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
- MODEL_META_DATA_PACKAGE : MetaFileParser._PathParser,
- MODEL_META_DATA_NMAKE : _NmakeParser, # for Edk.x modules
- MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
- MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
- MODEL_PCD_FEATURE_FLAG : _PcdParser,
- MODEL_PCD_DYNAMIC_EX : _PcdParser,
- MODEL_PCD_DYNAMIC : _PcdParser,
- MODEL_EFI_SOURCE_FILE : _SourceFileParser,
- MODEL_EFI_GUID : MetaFileParser._CommonParser,
- MODEL_EFI_PROTOCOL : MetaFileParser._CommonParser,
- MODEL_EFI_PPI : MetaFileParser._CommonParser,
- MODEL_EFI_DEPEX : _DepexParser,
- MODEL_EFI_BINARY_FILE : _BinaryFileParser,
- MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
- }
-
-## DSC file parser class
-#
-# @param FilePath The path of platform description file
-# @param FileType The raw data of DSC file
-# @param Table Database used to retrieve module/package information
-# @param Macros Macros used for replacement in file
-# @param Owner Owner ID (for sub-section parsing)
-# @param From ID from which the data comes (for !INCLUDE directive)
-#
-class DscParser(MetaFileParser):
- # DSC file supported data types (one type per section)
- DataType = {
- TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
- TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
- TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
- TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
- TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
- TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
- TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
- TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
- TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
- TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
- TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
- TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
- TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
- TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
- TAB_COMPONENTS_SOURCE_OVERRIDE_PATH.upper() : MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH,
- TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
- TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
- TAB_DSC_DEFINES_EDKGLOBAL : MODEL_META_DATA_GLOBAL_DEFINE,
- TAB_INCLUDE.upper() : MODEL_META_DATA_INCLUDE,
- TAB_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
- TAB_IF_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
- TAB_IF_N_DEF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF,
- TAB_ELSE_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF,
- TAB_ELSE.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE,
- TAB_END_IF.upper() : MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF,
- TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
- }
-
- # Valid names in define section
- DefineKeywords = [
- "DSC_SPECIFICATION",
- "PLATFORM_NAME",
- "PLATFORM_GUID",
- "PLATFORM_VERSION",
- "SKUID_IDENTIFIER",
- "PCD_INFO_GENERATION",
- "PCD_VAR_CHECK_GENERATION",
- "SUPPORTED_ARCHITECTURES",
- "BUILD_TARGETS",
- "OUTPUT_DIRECTORY",
- "FLASH_DEFINITION",
- "BUILD_NUMBER",
- "RFC_LANGUAGES",
- "ISO_LANGUAGES",
- "TIME_STAMP_FILE",
- "VPD_TOOL_GUID",
- "FIX_LOAD_TOP_MEMORY_ADDRESS",
- "PREBUILD",
- "POSTBUILD"
- ]
-
- SubSectionDefineKeywords = [
- "FILE_GUID"
- ]
-
- SymbolPattern = ValueExpression.SymbolPattern
-
- IncludedFiles = set()
-
- ## Constructor of DscParser
- #
- # Initialize object of DscParser
- #
- # @param FilePath The path of platform description file
- # @param FileType The raw data of DSC file
- # @param Arch Default Arch value for filtering sections
- # @param Table Database used to retrieve module/package information
- # @param Owner Owner ID (for sub-section parsing)
- # @param From ID from which the data comes (for !INCLUDE directive)
- #
- def __init__(self, FilePath, FileType, Arch, Table, Owner= -1, From= -1):
- # prevent re-initialization
- if hasattr(self, "_Table"):
- return
- MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, Owner, From)
- self._Version = 0x00010005 # Only EDK2 dsc file is supported
- # to store conditional directive evaluation result
- self._DirectiveStack = []
- self._DirectiveEvalStack = []
- self._Enabled = 1
-
- #
- # Specify whether current line is in uncertain condition
- #
- self._InDirective = -1
-
- # Final valid replacable symbols
- self._Symbols = {}
- #
- # Map the ID between the original table and new table to track
- # the owner item
- #
- self._IdMapping = {-1:-1}
-
- ## Parser starter
- def Start(self):
- Content = ''
- try:
- Content = open(str(self.MetaFile), 'r').readlines()
- except:
- EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
-
- OwnerId = {}
- for Index in range(0, len(Content)):
- Line = CleanString(Content[Index])
- # skip empty line
- if Line == '':
- continue
-
- self._CurrentLine = Line
- self._LineIndex = Index
- if self._InSubsection and self._Owner[-1] == -1:
- self._Owner.append(self._LastItem)
-
- # section header
- if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
- self._SectionType = MODEL_META_DATA_SECTION_HEADER
- # subsection ending
- elif Line[0] == '}' and self._InSubsection:
- self._InSubsection = False
- self._SubsectionType = MODEL_UNKNOWN
- self._SubsectionName = ''
- self._Owner[-1] = -1
- OwnerId = {}
- continue
- # subsection header
- elif Line[0] == TAB_OPTION_START and Line[-1] == TAB_OPTION_END:
- self._SubsectionType = MODEL_META_DATA_SUBSECTION_HEADER
- # directive line
- elif Line[0] == '!':
- self._DirectiveParser()
- continue
- if Line[0] == TAB_OPTION_START and not self._InSubsection:
- EdkLogger.error("Parser", FILE_READ_FAILURE, "Missing the '{' before %s in Line %s" % (Line, Index+1),ExtraData=self.MetaFile)
-
- if self._InSubsection:
- SectionType = self._SubsectionType
- else:
- SectionType = self._SectionType
- self._ItemType = SectionType
-
- self._ValueList = ['', '', '']
- self._SectionParser[SectionType](self)
- if self._ValueList == None:
- continue
- #
- # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
- # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
- #
- for Arch, ModuleType in self._Scope:
- Owner = self._Owner[-1]
- if self._SubsectionType != MODEL_UNKNOWN:
- Owner = OwnerId[Arch]
- self._LastItem = self._Store(
- self._ItemType,
- self._ValueList[0],
- self._ValueList[1],
- self._ValueList[2],
- Arch,
- ModuleType,
- Owner,
- self._From,
- self._LineIndex + 1,
- - 1,
- self._LineIndex + 1,
- - 1,
- self._Enabled
- )
- if self._SubsectionType == MODEL_UNKNOWN and self._InSubsection:
- OwnerId[Arch] = self._LastItem
-
- if self._DirectiveStack:
- Type, Line, Text = self._DirectiveStack[-1]
- EdkLogger.error('Parser', FORMAT_INVALID, "No matching '!endif' found",
- ExtraData=Text, File=self.MetaFile, Line=Line)
- self._Done()
-
- ## <subsection_header> parser
- def _SubsectionHeaderParser(self):
- self._SubsectionName = self._CurrentLine[1:-1].upper()
- if self._SubsectionName in self.DataType:
- self._SubsectionType = self.DataType[self._SubsectionName]
- else:
- self._SubsectionType = MODEL_UNKNOWN
- EdkLogger.warn("Parser", "Unrecognized sub-section", File=self.MetaFile,
- Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
- self._ValueList[0] = self._SubsectionName
-
- ## Directive statement parser
- def _DirectiveParser(self):
- self._ValueList = ['', '', '']
- TokenList = GetSplitValueList(self._CurrentLine, ' ', 1)
- self._ValueList[0:len(TokenList)] = TokenList
-
- # Syntax check
- DirectiveName = self._ValueList[0].upper()
- if DirectiveName not in self.DataType:
- EdkLogger.error("Parser", FORMAT_INVALID, "Unknown directive [%s]" % DirectiveName,
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- if DirectiveName in ['!IF', '!IFDEF', '!IFNDEF']:
- self._InDirective += 1
-
- if DirectiveName in ['!ENDIF']:
- self._InDirective -= 1
-
- if DirectiveName in ['!IF', '!IFDEF', '!INCLUDE', '!IFNDEF', '!ELSEIF'] and self._ValueList[1] == '':
- EdkLogger.error("Parser", FORMAT_INVALID, "Missing expression",
- File=self.MetaFile, Line=self._LineIndex + 1,
- ExtraData=self._CurrentLine)
-
- ItemType = self.DataType[DirectiveName]
- Scope = [['COMMON', 'COMMON']]
- if ItemType == MODEL_META_DATA_INCLUDE:
- Scope = self._Scope
- if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
- # Remove all directives between !if and !endif, including themselves
- while self._DirectiveStack:
- # Remove any !else or !elseif
- DirectiveInfo = self._DirectiveStack.pop()
- if DirectiveInfo[0] in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
- break
- else:
- EdkLogger.error("Parser", FORMAT_INVALID, "Redundant '!endif'",
- File=self.MetaFile, Line=self._LineIndex + 1,
- ExtraData=self._CurrentLine)
- elif ItemType != MODEL_META_DATA_INCLUDE:
- # Break if there's a !else is followed by a !elseif
- if ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF and \
- self._DirectiveStack and \
- self._DirectiveStack[-1][0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
- EdkLogger.error("Parser", FORMAT_INVALID, "'!elseif' after '!else'",
- File=self.MetaFile, Line=self._LineIndex + 1,
- ExtraData=self._CurrentLine)
- self._DirectiveStack.append((ItemType, self._LineIndex + 1, self._CurrentLine))
-
- #
- # Model, Value1, Value2, Value3, Arch, ModuleType, BelongsToItem=-1, BelongsToFile=-1,
- # LineBegin=-1, ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, Enabled=-1
- #
- for Arch, ModuleType in Scope:
- self._LastItem = self._Store(
- ItemType,
- self._ValueList[0],
- self._ValueList[1],
- self._ValueList[2],
- Arch,
- ModuleType,
- self._Owner[-1],
- self._From,
- self._LineIndex + 1,
- - 1,
- self._LineIndex + 1,
- - 1,
- 0
- )
-
- ## [defines] section parser
- @ParseMacro
- def _DefineParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- self._ValueList[1:len(TokenList)] = TokenList
-
- # Syntax check
- if not self._ValueList[1]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No name specified",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if not self._ValueList[2]:
- EdkLogger.error('Parser', FORMAT_INVALID, "No value specified",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if (not self._ValueList[1] in self.DefineKeywords and
- (self._InSubsection and self._ValueList[1] not in self.SubSectionDefineKeywords)):
- EdkLogger.error('Parser', FORMAT_INVALID,
- "Unknown keyword found: %s. "
- "If this is a macro you must "
- "add it as a DEFINE in the DSC" % self._ValueList[1],
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- if not self._InSubsection:
- self._Defines[self._ValueList[1]] = self._ValueList[2]
- self._ItemType = self.DataType[TAB_DSC_DEFINES.upper()]
-
- @ParseMacro
- def _SkuIdParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- if len(TokenList) != 2:
- EdkLogger.error('Parser', FORMAT_INVALID, "Correct format is '<Integer>|<UiName>'",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- self._ValueList[0:len(TokenList)] = TokenList
-
- ## Parse Edk style of library modules
- @ParseMacro
- def _LibraryInstanceParser(self):
- self._ValueList[0] = self._CurrentLine
-
- ## PCD sections parser
- #
- # [PcdsFixedAtBuild]
- # [PcdsPatchableInModule]
- # [PcdsFeatureFlag]
- # [PcdsDynamicEx
- # [PcdsDynamicExDefault]
- # [PcdsDynamicExVpd]
- # [PcdsDynamicExHii]
- # [PcdsDynamic]
- # [PcdsDynamicDefault]
- # [PcdsDynamicVpd]
- # [PcdsDynamicHii]
- #
- @ParseMacro
- def _PcdParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
- if len(TokenList) == 2:
- self._ValueList[2] = TokenList[1]
- if self._ValueList[0] == '' or self._ValueList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
- ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if self._ValueList[2] == '':
- #
- # The PCD values are optional for FIXEDATBUILD and PATCHABLEINMODULE
- #
- if self._SectionType in (MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE):
- return
- EdkLogger.error('Parser', FORMAT_INVALID, "No PCD value given",
- ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<TokenCName>|<PcdValue>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- # Validate the datum type of Dynamic Defaul PCD and DynamicEx Default PCD
- ValueList = GetSplitValueList(self._ValueList[2])
- if len(ValueList) > 1 and ValueList[1] != TAB_VOID \
- and self._ItemType in [MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_EX_DEFAULT]:
- EdkLogger.error('Parser', FORMAT_INVALID, "The datum type '%s' of PCD is wrong" % ValueList[1],
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
-
- # if value are 'True', 'true', 'TRUE' or 'False', 'false', 'FALSE', replace with integer 1 or 0.
- DscPcdValueList = GetSplitValueList(TokenList[1], TAB_VALUE_SPLIT, 1)
- if DscPcdValueList[0] in ['True', 'true', 'TRUE']:
- self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '1', 1);
- elif DscPcdValueList[0] in ['False', 'false', 'FALSE']:
- self._ValueList[2] = TokenList[1].replace(DscPcdValueList[0], '0', 1);
-
-
- ## [components] section parser
- @ParseMacro
- def _ComponentParser(self):
- if self._CurrentLine[-1] == '{':
- self._ValueList[0] = self._CurrentLine[0:-1].strip()
- self._InSubsection = True
- else:
- self._ValueList[0] = self._CurrentLine
-
- ## [LibraryClasses] section
- @ParseMacro
- def _LibraryClassParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT)
- if len(TokenList) < 2:
- EdkLogger.error('Parser', FORMAT_INVALID, "No library class or instance specified",
- ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if TokenList[0] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No library class specified",
- ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if TokenList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No library instance specified",
- ExtraData=self._CurrentLine + " (<LibraryClassName>|<LibraryInstancePath>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- self._ValueList[0:len(TokenList)] = TokenList
-
- def _CompponentSourceOverridePathParser(self):
- self._ValueList[0] = self._CurrentLine
-
- ## [BuildOptions] section parser
- @ParseMacro
- def _BuildOptionParser(self):
- self._CurrentLine = CleanString(self._CurrentLine, BuildOption=True)
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- TokenList2 = GetSplitValueList(TokenList[0], ':', 1)
- if len(TokenList2) == 2:
- self._ValueList[0] = TokenList2[0] # toolchain family
- self._ValueList[1] = TokenList2[1] # keys
- else:
- self._ValueList[1] = TokenList[0]
- if len(TokenList) == 2: # value
- self._ValueList[2] = TokenList[1]
-
- if self._ValueList[1].count('_') != 4:
- EdkLogger.error(
- 'Parser',
- FORMAT_INVALID,
- "'%s' must be in format of <TARGET>_<TOOLCHAIN>_<ARCH>_<TOOL>_FLAGS" % self._ValueList[1],
- ExtraData=self._CurrentLine,
- File=self.MetaFile,
- Line=self._LineIndex + 1
- )
-
- ## Override parent's method since we'll do all macro replacements in parser
- def _GetMacros(self):
- Macros = {}
- Macros.update(self._FileLocalMacros)
- Macros.update(self._GetApplicableSectionMacro())
- Macros.update(GlobalData.gEdkGlobal)
- Macros.update(GlobalData.gPlatformDefines)
- Macros.update(GlobalData.gCommandLineDefines)
- # PCD cannot be referenced in macro definition
- if self._ItemType not in [MODEL_META_DATA_DEFINE, MODEL_META_DATA_GLOBAL_DEFINE]:
- Macros.update(self._Symbols)
- return Macros
-
- def _PostProcess(self):
- Processer = {
- MODEL_META_DATA_SECTION_HEADER : self.__ProcessSectionHeader,
- MODEL_META_DATA_SUBSECTION_HEADER : self.__ProcessSubsectionHeader,
- MODEL_META_DATA_HEADER : self.__ProcessDefine,
- MODEL_META_DATA_DEFINE : self.__ProcessDefine,
- MODEL_META_DATA_GLOBAL_DEFINE : self.__ProcessDefine,
- MODEL_META_DATA_INCLUDE : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IF : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF : self.__ProcessDirective,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF : self.__ProcessDirective,
- MODEL_EFI_SKU_ID : self.__ProcessSkuId,
- MODEL_EFI_LIBRARY_INSTANCE : self.__ProcessLibraryInstance,
- MODEL_EFI_LIBRARY_CLASS : self.__ProcessLibraryClass,
- MODEL_PCD_FIXED_AT_BUILD : self.__ProcessPcd,
- MODEL_PCD_PATCHABLE_IN_MODULE : self.__ProcessPcd,
- MODEL_PCD_FEATURE_FLAG : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_DEFAULT : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_HII : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_VPD : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_EX_DEFAULT : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_EX_HII : self.__ProcessPcd,
- MODEL_PCD_DYNAMIC_EX_VPD : self.__ProcessPcd,
- MODEL_META_DATA_COMPONENT : self.__ProcessComponent,
- MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH : self.__ProcessSourceOverridePath,
- MODEL_META_DATA_BUILD_OPTION : self.__ProcessBuildOption,
- MODEL_UNKNOWN : self._Skip,
- MODEL_META_DATA_USER_EXTENSION : self._SkipUserExtension,
- }
-
- self._Table = MetaFileStorage(self._RawTable.Cur, self.MetaFile, MODEL_FILE_DSC, True)
- self._Table.Create()
- self._DirectiveStack = []
- self._DirectiveEvalStack = []
- self._FileWithError = self.MetaFile
- self._FileLocalMacros = {}
- self._SectionsMacroDict = {}
- GlobalData.gPlatformDefines = {}
-
- # Get all macro and PCD which has straitforward value
- self.__RetrievePcdValue()
- self._Content = self._RawTable.GetAll()
- self._ContentIndex = 0
- self._InSubsection = False
- while self._ContentIndex < len(self._Content) :
- Id, self._ItemType, V1, V2, V3, S1, S2, Owner, self._From, \
- LineStart, ColStart, LineEnd, ColEnd, Enabled = self._Content[self._ContentIndex]
-
- if self._From < 0:
- self._FileWithError = self.MetaFile
-
- self._ContentIndex += 1
-
- self._Scope = [[S1, S2]]
- #
- # For !include directive, handle it specially,
- # merge arch and module type in case of duplicate items
- #
- while self._ItemType == MODEL_META_DATA_INCLUDE:
- if self._ContentIndex >= len(self._Content):
- break
- Record = self._Content[self._ContentIndex]
- if LineStart == Record[9] and LineEnd == Record[11]:
- if [Record[5], Record[6]] not in self._Scope:
- self._Scope.append([Record[5], Record[6]])
- self._ContentIndex += 1
- else:
- break
-
- self._LineIndex = LineStart - 1
- self._ValueList = [V1, V2, V3]
-
- if Owner > 0 and Owner in self._IdMapping:
- self._InSubsection = True
- else:
- self._InSubsection = False
- try:
- Processer[self._ItemType]()
- except EvaluationException, Excpt:
- #
- # Only catch expression evaluation error here. We need to report
- # the precise number of line on which the error occurred
- #
- if hasattr(Excpt, 'Pcd'):
- if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
- Info = GlobalData.gPlatformOtherPcds[Excpt.Pcd]
- EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
- " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
- " of the DSC file, and it is currently defined in this section:"
- " %s, line #: %d." % (Excpt.Pcd, Info[0], Info[1]),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
- Line=self._LineIndex + 1)
- else:
- EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
- Line=self._LineIndex + 1)
- else:
- EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
- Line=self._LineIndex + 1)
- except MacroException, Excpt:
- EdkLogger.error('Parser', FORMAT_INVALID, str(Excpt),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
- Line=self._LineIndex + 1)
-
- if self._ValueList == None:
- continue
-
- NewOwner = self._IdMapping.get(Owner, -1)
- self._Enabled = int((not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack))
- self._LastItem = self._Store(
- self._ItemType,
- self._ValueList[0],
- self._ValueList[1],
- self._ValueList[2],
- S1,
- S2,
- NewOwner,
- self._From,
- self._LineIndex + 1,
- - 1,
- self._LineIndex + 1,
- - 1,
- self._Enabled
- )
- self._IdMapping[Id] = self._LastItem
-
- GlobalData.gPlatformDefines.update(self._FileLocalMacros)
- self._PostProcessed = True
- self._Content = None
-
- def __ProcessSectionHeader(self):
- self._SectionName = self._ValueList[0]
- if self._SectionName in self.DataType:
- self._SectionType = self.DataType[self._SectionName]
- else:
- self._SectionType = MODEL_UNKNOWN
-
- def __ProcessSubsectionHeader(self):
- self._SubsectionName = self._ValueList[0]
- if self._SubsectionName in self.DataType:
- self._SubsectionType = self.DataType[self._SubsectionName]
- else:
- self._SubsectionType = MODEL_UNKNOWN
-
- def __RetrievePcdValue(self):
- Content = open(str(self.MetaFile), 'r').readlines()
- GlobalData.gPlatformOtherPcds['DSCFILE'] = str(self.MetaFile)
- for PcdType in (MODEL_PCD_PATCHABLE_IN_MODULE, MODEL_PCD_DYNAMIC_DEFAULT, MODEL_PCD_DYNAMIC_HII,
- MODEL_PCD_DYNAMIC_VPD, MODEL_PCD_DYNAMIC_EX_DEFAULT, MODEL_PCD_DYNAMIC_EX_HII,
- MODEL_PCD_DYNAMIC_EX_VPD):
- Records = self._RawTable.Query(PcdType, BelongsToItem= -1.0)
- for TokenSpaceGuid, PcdName, Value, Dummy2, Dummy3, ID, Line in Records:
- Name = TokenSpaceGuid + '.' + PcdName
- if Name not in GlobalData.gPlatformOtherPcds:
- PcdLine = Line
- while not Content[Line - 1].lstrip().startswith(TAB_SECTION_START):
- Line -= 1
- GlobalData.gPlatformOtherPcds[Name] = (CleanString(Content[Line - 1]), PcdLine, PcdType)
-
- def __ProcessDefine(self):
- if not self._Enabled:
- return
-
- Type, Name, Value = self._ValueList
- Value = ReplaceMacro(Value, self._Macros, False)
- #
- # If it is <Defines>, return
- #
- if self._InSubsection:
- self._ValueList = [Type, Name, Value]
- return
-
- if self._ItemType == MODEL_META_DATA_DEFINE:
- if self._SectionType == MODEL_META_DATA_HEADER:
- self._FileLocalMacros[Name] = Value
- else:
- self._ConstructSectionMacroDict(Name, Value)
- elif self._ItemType == MODEL_META_DATA_GLOBAL_DEFINE:
- GlobalData.gEdkGlobal[Name] = Value
-
- #
- # Keyword in [Defines] section can be used as Macros
- #
- if (self._ItemType == MODEL_META_DATA_HEADER) and (self._SectionType == MODEL_META_DATA_HEADER):
- self._FileLocalMacros[Name] = Value
-
- self._ValueList = [Type, Name, Value]
-
- def __ProcessDirective(self):
- Result = None
- if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF]:
- Macros = self._Macros
- Macros.update(GlobalData.gGlobalDefines)
- try:
- Result = ValueExpression(self._ValueList[1], Macros)()
- except SymbolNotFound, Exc:
- EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc), self._ValueList[1])
- Result = False
- except WrnExpression, Excpt:
- #
- # Catch expression evaluation warning here. We need to report
- # the precise number of line and return the evaluation result
- #
- EdkLogger.warn('Parser', "Suspicious expression: %s" % str(Excpt),
- File=self._FileWithError, ExtraData=' '.join(self._ValueList),
- Line=self._LineIndex + 1)
- Result = Excpt.result
-
- if self._ItemType in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
- self._DirectiveStack.append(self._ItemType)
- if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
- Result = bool(Result)
- else:
- Macro = self._ValueList[1]
- Macro = Macro[2:-1] if (Macro.startswith("$(") and Macro.endswith(")")) else Macro
- Result = Macro in self._Macros
- if self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF:
- Result = not Result
- self._DirectiveEvalStack.append(Result)
- elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF:
- self._DirectiveStack.append(self._ItemType)
- self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
- self._DirectiveEvalStack.append(bool(Result))
- elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE:
- self._DirectiveStack.append(self._ItemType)
- self._DirectiveEvalStack[-1] = not self._DirectiveEvalStack[-1]
- self._DirectiveEvalStack.append(True)
- elif self._ItemType == MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF:
- # Back to the nearest !if/!ifdef/!ifndef
- while self._DirectiveStack:
- self._DirectiveEvalStack.pop()
- Directive = self._DirectiveStack.pop()
- if Directive in [MODEL_META_DATA_CONDITIONAL_STATEMENT_IF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF,
- MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF]:
- break
- elif self._ItemType == MODEL_META_DATA_INCLUDE:
- # The included file must be relative to workspace or same directory as DSC file
- __IncludeMacros = {}
- #
- # Allow using system environment variables in path after !include
- #
- __IncludeMacros['WORKSPACE'] = GlobalData.gGlobalDefines['WORKSPACE']
- if "ECP_SOURCE" in GlobalData.gGlobalDefines.keys():
- __IncludeMacros['ECP_SOURCE'] = GlobalData.gGlobalDefines['ECP_SOURCE']
- #
- # During GenFds phase call DSC parser, will go into this branch.
- #
- elif "ECP_SOURCE" in GlobalData.gCommandLineDefines.keys():
- __IncludeMacros['ECP_SOURCE'] = GlobalData.gCommandLineDefines['ECP_SOURCE']
-
- __IncludeMacros['EFI_SOURCE'] = GlobalData.gGlobalDefines['EFI_SOURCE']
- __IncludeMacros['EDK_SOURCE'] = GlobalData.gGlobalDefines['EDK_SOURCE']
- #
- # Allow using MACROs comes from [Defines] section to keep compatible.
- #
- __IncludeMacros.update(self._Macros)
-
- IncludedFile = NormPath(ReplaceMacro(self._ValueList[1], __IncludeMacros, RaiseError=True))
- #
- # First search the include file under the same directory as DSC file
- #
- IncludedFile1 = PathClass(IncludedFile, self.MetaFile.Dir)
- ErrorCode, ErrorInfo1 = IncludedFile1.Validate()
- if ErrorCode != 0:
- #
- # Also search file under the WORKSPACE directory
- #
- IncludedFile1 = PathClass(IncludedFile, GlobalData.gWorkspace)
- ErrorCode, ErrorInfo2 = IncludedFile1.Validate()
- if ErrorCode != 0:
- EdkLogger.error('parser', ErrorCode, File=self._FileWithError,
- Line=self._LineIndex + 1, ExtraData=ErrorInfo1 + "\n" + ErrorInfo2)
-
- self._FileWithError = IncludedFile1
-
- IncludedFileTable = MetaFileStorage(self._Table.Cur, IncludedFile1, MODEL_FILE_DSC, False)
- Owner = self._Content[self._ContentIndex - 1][0]
- Parser = DscParser(IncludedFile1, self._FileType, self._Arch, IncludedFileTable,
- Owner=Owner, From=Owner)
-
- self.IncludedFiles.add (IncludedFile1)
-
- # Does not allow lower level included file to include upper level included file
- if Parser._From != Owner and int(Owner) > int (Parser._From):
- EdkLogger.error('parser', FILE_ALREADY_EXIST, File=self._FileWithError,
- Line=self._LineIndex + 1, ExtraData="{0} is already included at a higher level.".format(IncludedFile1))
-
-
- # set the parser status with current status
- Parser._SectionName = self._SectionName
- Parser._SectionType = self._SectionType
- Parser._Scope = self._Scope
- Parser._Enabled = self._Enabled
- # Parse the included file
- Parser.Start()
-
- # update current status with sub-parser's status
- self._SectionName = Parser._SectionName
- self._SectionType = Parser._SectionType
- self._Scope = Parser._Scope
- self._Enabled = Parser._Enabled
-
- # Insert all records in the table for the included file into dsc file table
- Records = IncludedFileTable.GetAll()
- if Records:
- self._Content[self._ContentIndex:self._ContentIndex] = Records
- self._Content.pop(self._ContentIndex - 1)
- self._ValueList = None
- self._ContentIndex -= 1
-
- def __ProcessSkuId(self):
- self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=True)
- for Value in self._ValueList]
-
- def __ProcessLibraryInstance(self):
- self._ValueList = [ReplaceMacro(Value, self._Macros) for Value in self._ValueList]
-
- def __ProcessLibraryClass(self):
- self._ValueList[1] = ReplaceMacro(self._ValueList[1], self._Macros, RaiseError=True)
-
- def __ProcessPcd(self):
- if self._ItemType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
- self._ValueList[2] = ReplaceMacro(self._ValueList[2], self._Macros, RaiseError=True)
- return
-
- ValList, Valid, Index = AnalyzeDscPcd(self._ValueList[2], self._ItemType)
- if not Valid:
- EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self._FileWithError, Line=self._LineIndex + 1,
- ExtraData="%s.%s|%s" % (self._ValueList[0], self._ValueList[1], self._ValueList[2]))
- PcdValue = ValList[Index]
- if PcdValue:
- try:
- ValList[Index] = ValueExpression(PcdValue, self._Macros)(True)
- except WrnExpression, Value:
- ValList[Index] = Value.result
-
- if ValList[Index] == 'True':
- ValList[Index] = '1'
- if ValList[Index] == 'False':
- ValList[Index] = '0'
-
- if (not self._DirectiveEvalStack) or (False not in self._DirectiveEvalStack):
- GlobalData.gPlatformPcds[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
- self._Symbols[TAB_SPLIT.join(self._ValueList[0:2])] = PcdValue
- self._ValueList[2] = '|'.join(ValList)
-
- def __ProcessComponent(self):
- self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
-
- def __ProcessSourceOverridePath(self):
- self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
-
- def __ProcessBuildOption(self):
- self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
- for Value in self._ValueList]
-
- _SectionParser = {
- MODEL_META_DATA_HEADER : _DefineParser,
- MODEL_EFI_SKU_ID : _SkuIdParser,
- MODEL_EFI_LIBRARY_INSTANCE : _LibraryInstanceParser,
- MODEL_EFI_LIBRARY_CLASS : _LibraryClassParser,
- MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
- MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
- MODEL_PCD_FEATURE_FLAG : _PcdParser,
- MODEL_PCD_DYNAMIC_DEFAULT : _PcdParser,
- MODEL_PCD_DYNAMIC_HII : _PcdParser,
- MODEL_PCD_DYNAMIC_VPD : _PcdParser,
- MODEL_PCD_DYNAMIC_EX_DEFAULT : _PcdParser,
- MODEL_PCD_DYNAMIC_EX_HII : _PcdParser,
- MODEL_PCD_DYNAMIC_EX_VPD : _PcdParser,
- MODEL_META_DATA_COMPONENT : _ComponentParser,
- MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH : _CompponentSourceOverridePathParser,
- MODEL_META_DATA_BUILD_OPTION : _BuildOptionParser,
- MODEL_UNKNOWN : MetaFileParser._Skip,
- MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
- MODEL_META_DATA_SECTION_HEADER : MetaFileParser._SectionHeaderParser,
- MODEL_META_DATA_SUBSECTION_HEADER : _SubsectionHeaderParser,
- }
-
- _Macros = property(_GetMacros)
-
-## DEC file parser class
-#
-# @param FilePath The path of platform description file
-# @param FileType The raw data of DSC file
-# @param Table Database used to retrieve module/package information
-# @param Macros Macros used for replacement in file
-#
-class DecParser(MetaFileParser):
- # DEC file supported data types (one type per section)
- DataType = {
- TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
- TAB_DSC_DEFINES_DEFINE : MODEL_META_DATA_DEFINE,
- TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
- TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
- TAB_GUIDS.upper() : MODEL_EFI_GUID,
- TAB_PPIS.upper() : MODEL_EFI_PPI,
- TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
- TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
- TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
- TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
- TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
- TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
- TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION,
- }
-
- ## Constructor of DecParser
- #
- # Initialize object of DecParser
- #
- # @param FilePath The path of platform description file
- # @param FileType The raw data of DSC file
- # @param Arch Default Arch value for filtering sections
- # @param Table Database used to retrieve module/package information
- #
- def __init__(self, FilePath, FileType, Arch, Table):
- # prevent re-initialization
- if hasattr(self, "_Table"):
- return
- MetaFileParser.__init__(self, FilePath, FileType, Arch, Table, -1)
- self._Comments = []
- self._Version = 0x00010005 # Only EDK2 dec file is supported
- self._AllPCDs = [] # Only for check duplicate PCD
- self._AllPcdDict = {}
-
- ## Parser starter
- def Start(self):
- Content = ''
- try:
- Content = open(str(self.MetaFile), 'r').readlines()
- except:
- EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
-
- for Index in range(0, len(Content)):
- Line, Comment = CleanString2(Content[Index])
- self._CurrentLine = Line
- self._LineIndex = Index
-
- # save comment for later use
- if Comment:
- self._Comments.append((Comment, self._LineIndex + 1))
- # skip empty line
- if Line == '':
- continue
-
- # section header
- if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
- self._SectionHeaderParser()
- self._Comments = []
- continue
- elif len(self._SectionType) == 0:
- self._Comments = []
- continue
-
- # section content
- self._ValueList = ['', '', '']
- self._SectionParser[self._SectionType[0]](self)
- if self._ValueList == None or self._ItemType == MODEL_META_DATA_DEFINE:
- self._ItemType = -1
- self._Comments = []
- continue
-
- #
- # Model, Value1, Value2, Value3, Arch, BelongsToItem=-1, LineBegin=-1,
- # ColumnBegin=-1, LineEnd=-1, ColumnEnd=-1, FeatureFlag='', Enabled=-1
- #
- for Arch, ModuleType, Type in self._Scope:
- self._LastItem = self._Store(
- Type,
- self._ValueList[0],
- self._ValueList[1],
- self._ValueList[2],
- Arch,
- ModuleType,
- self._Owner[-1],
- self._LineIndex + 1,
- - 1,
- self._LineIndex + 1,
- - 1,
- 0
- )
- for Comment, LineNo in self._Comments:
- self._Store(
- MODEL_META_DATA_COMMENT,
- Comment,
- self._ValueList[0],
- self._ValueList[1],
- Arch,
- ModuleType,
- self._LastItem,
- LineNo,
- - 1,
- LineNo,
- - 1,
- 0
- )
- self._Comments = []
- self._Done()
-
-
- ## Section header parser
- #
- # The section header is always in following format:
- #
- # [section_name.arch<.platform|module_type>]
- #
- def _SectionHeaderParser(self):
- self._Scope = []
- self._SectionName = ''
- self._SectionType = []
- ArchList = set()
- PrivateList = set()
- Line = self._CurrentLine.replace("%s%s" % (TAB_COMMA_SPLIT, TAB_SPACE_SPLIT), TAB_COMMA_SPLIT)
- for Item in Line[1:-1].split(TAB_COMMA_SPLIT):
- if Item == '':
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR,
- "section name can NOT be empty or incorrectly use separator comma",
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
- ItemList = Item.split(TAB_SPLIT)
-
- # different types of PCD are permissible in one section
- self._SectionName = ItemList[0].upper()
- if self._SectionName in self.DataType:
- if self.DataType[self._SectionName] not in self._SectionType:
- self._SectionType.append(self.DataType[self._SectionName])
- else:
- EdkLogger.error("Parser", FORMAT_UNKNOWN_ERROR, "%s is not a valid section name" % Item,
- self.MetaFile, self._LineIndex + 1, self._CurrentLine)
-
- if MODEL_PCD_FEATURE_FLAG in self._SectionType and len(self._SectionType) > 1:
- EdkLogger.error(
- 'Parser',
- FORMAT_INVALID,
- "%s must not be in the same section of other types of PCD" % TAB_PCDS_FEATURE_FLAG_NULL,
- File=self.MetaFile,
- Line=self._LineIndex + 1,
- ExtraData=self._CurrentLine
- )
- # S1 is always Arch
- if len(ItemList) > 1:
- S1 = ItemList[1].upper()
- else:
- S1 = 'COMMON'
- ArchList.add(S1)
- # S2 may be Platform or ModuleType
- if len(ItemList) > 2:
- S2 = ItemList[2].upper()
- # only Includes, GUIDs, PPIs, Protocols section have Private tag
- if self._SectionName in [TAB_INCLUDES.upper(), TAB_GUIDS.upper(), TAB_PROTOCOLS.upper(), TAB_PPIS.upper()]:
- if S2 != 'PRIVATE':
- EdkLogger.error("Parser", FORMAT_INVALID, 'Please use keyword "Private" as section tag modifier.',
- File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
- else:
- S2 = 'COMMON'
- PrivateList.add(S2)
- if [S1, S2, self.DataType[self._SectionName]] not in self._Scope:
- self._Scope.append([S1, S2, self.DataType[self._SectionName]])
-
- # 'COMMON' must not be used with specific ARCHs at the same section
- if 'COMMON' in ArchList and len(ArchList) > 1:
- EdkLogger.error('Parser', FORMAT_INVALID, "'common' ARCH must not be used with specific ARCHs",
- File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
-
- # It is not permissible to mix section tags without the Private attribute with section tags with the Private attribute
- if 'COMMON' in PrivateList and len(PrivateList) > 1:
- EdkLogger.error('Parser', FORMAT_INVALID, "Can't mix section tags without the Private attribute with section tags with the Private attribute",
- File=self.MetaFile, Line=self._LineIndex + 1, ExtraData=self._CurrentLine)
-
- ## [guids], [ppis] and [protocols] section parser
- @ParseMacro
- def _GuidParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_EQUAL_SPLIT, 1)
- if len(TokenList) < 2:
- EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name or value specified",
- ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if TokenList[0] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No GUID name specified",
- ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if TokenList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No GUID value specified",
- ExtraData=self._CurrentLine + " (<CName> = <GuidValueInCFormat>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- if TokenList[1][0] != '{' or TokenList[1][-1] != '}' or GuidStructureStringToGuidString(TokenList[1]) == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "Invalid GUID value format",
- ExtraData=self._CurrentLine + \
- " (<CName> = <GuidValueInCFormat:{8,4,4,{2,2,2,2,2,2,2,2}}>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- self._ValueList[0] = TokenList[0]
- self._ValueList[1] = TokenList[1]
-
- ## PCD sections parser
- #
- # [PcdsFixedAtBuild]
- # [PcdsPatchableInModule]
- # [PcdsFeatureFlag]
- # [PcdsDynamicEx
- # [PcdsDynamic]
- #
- @ParseMacro
- def _PcdParser(self):
- TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
- ValueRe = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*')
- # check PCD information
- if self._ValueList[0] == '' or self._ValueList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No token space GUID or PCD name specified",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check format of token space GUID CName
- if not ValueRe.match(self._ValueList[0]):
- EdkLogger.error('Parser', FORMAT_INVALID, "The format of the token space GUID CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check format of PCD CName
- if not ValueRe.match(self._ValueList[1]):
- EdkLogger.error('Parser', FORMAT_INVALID, "The format of the PCD CName is invalid. The correct format is '(a-zA-Z_)[a-zA-Z0-9_]*'",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check PCD datum information
- if len(TokenList) < 2 or TokenList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "No PCD Datum information given",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
-
- ValueRe = re.compile(r'^\s*L?\".*\|.*\"')
- PtrValue = ValueRe.findall(TokenList[1])
-
- # Has VOID* type string, may contain "|" character in the string.
- if len(PtrValue) != 0:
- ptrValueList = re.sub(ValueRe, '', TokenList[1])
- ValueList = AnalyzePcdExpression(ptrValueList)
- ValueList[0] = PtrValue[0]
- else:
- ValueList = AnalyzePcdExpression(TokenList[1])
-
-
- # check if there's enough datum information given
- if len(ValueList) != 3:
- EdkLogger.error('Parser', FORMAT_INVALID, "Invalid PCD Datum information given",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check default value
- if ValueList[0] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "Missing DefaultValue in PCD Datum information",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check datum type
- if ValueList[1] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "Missing DatumType in PCD Datum information",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
- # check token of the PCD
- if ValueList[2] == '':
- EdkLogger.error('Parser', FORMAT_INVALID, "Missing Token in PCD Datum information",
- ExtraData=self._CurrentLine + \
- " (<TokenSpaceGuidCName>.<PcdCName>|<DefaultValue>|<DatumType>|<Token>)",
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- PcdValue = ValueList[0]
- if PcdValue:
- try:
- ValueList[0] = ValueExpression(PcdValue, self._AllPcdDict)(True)
- except WrnExpression, Value:
- ValueList[0] = Value.result
-
- if ValueList[0] == 'True':
- ValueList[0] = '1'
- if ValueList[0] == 'False':
- ValueList[0] = '0'
-
- # check format of default value against the datum type
- IsValid, Cause = CheckPcdDatum(ValueList[1], ValueList[0])
- if not IsValid:
- EdkLogger.error('Parser', FORMAT_INVALID, Cause, ExtraData=self._CurrentLine,
- File=self.MetaFile, Line=self._LineIndex + 1)
-
- if ValueList[0] in ['True', 'true', 'TRUE']:
- ValueList[0] = '1'
- elif ValueList[0] in ['False', 'false', 'FALSE']:
- ValueList[0] = '0'
-
- # check for duplicate PCD definition
- if (self._Scope[0], self._ValueList[0], self._ValueList[1]) in self._AllPCDs:
- EdkLogger.error('Parser', FORMAT_INVALID,
- "The same PCD name and GUID have been already defined",
- ExtraData=self._CurrentLine, File=self.MetaFile, Line=self._LineIndex + 1)
- else:
- self._AllPCDs.append((self._Scope[0], self._ValueList[0], self._ValueList[1]))
- self._AllPcdDict[TAB_SPLIT.join(self._ValueList[0:2])] = ValueList[0]
-
- self._ValueList[2] = ValueList[0].strip() + '|' + ValueList[1].strip() + '|' + ValueList[2].strip()
-
- _SectionParser = {
- MODEL_META_DATA_HEADER : MetaFileParser._DefineParser,
- MODEL_EFI_INCLUDE : MetaFileParser._PathParser,
- MODEL_EFI_LIBRARY_CLASS : MetaFileParser._PathParser,
- MODEL_EFI_GUID : _GuidParser,
- MODEL_EFI_PPI : _GuidParser,
- MODEL_EFI_PROTOCOL : _GuidParser,
- MODEL_PCD_FIXED_AT_BUILD : _PcdParser,
- MODEL_PCD_PATCHABLE_IN_MODULE : _PcdParser,
- MODEL_PCD_FEATURE_FLAG : _PcdParser,
- MODEL_PCD_DYNAMIC : _PcdParser,
- MODEL_PCD_DYNAMIC_EX : _PcdParser,
- MODEL_UNKNOWN : MetaFileParser._Skip,
- MODEL_META_DATA_USER_EXTENSION : MetaFileParser._SkipUserExtension,
- }
-
-##
-#
-# This acts like the main() function for the script, unless it is 'import'ed into another
-# script.
-#
-if __name__ == '__main__':
- pass
-
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py
deleted file mode 100644
index aedcacada1..0000000000
--- a/BaseTools/Source/Python/Workspace/MetaFileTable.py
+++ /dev/null
@@ -1,395 +0,0 @@
-## @file
-# This file is used to create/update/query/erase a meta file table
-#
-# Copyright (c) 2008 - 2016, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import uuid
-
-import Common.EdkLogger as EdkLogger
-from Common.BuildToolError import FORMAT_INVALID
-
-from MetaDataTable import Table, TableFile
-from MetaDataTable import ConvertToSqlString
-from CommonDataClass.DataClass import MODEL_FILE_DSC, MODEL_FILE_DEC, MODEL_FILE_INF, \
- MODEL_FILE_OTHERS
-
-class MetaFileTable(Table):
- # TRICK: use file ID as the part before '.'
- _ID_STEP_ = 0.00000001
- _ID_MAX_ = 0.99999999
-
- ## Constructor
- def __init__(self, Cursor, MetaFile, FileType, Temporary):
- self.MetaFile = MetaFile
-
- self._FileIndexTable = TableFile(Cursor)
- self._FileIndexTable.Create(False)
-
- FileId = self._FileIndexTable.GetFileId(MetaFile)
- if not FileId:
- FileId = self._FileIndexTable.InsertFile(MetaFile, FileType)
-
- if Temporary:
- TableName = "_%s_%s_%s" % (FileType, FileId, uuid.uuid4().hex)
- else:
- TableName = "_%s_%s" % (FileType, FileId)
-
- #Table.__init__(self, Cursor, TableName, FileId, False)
- Table.__init__(self, Cursor, TableName, FileId, Temporary)
- self.Create(not self.IsIntegrity())
-
- def IsIntegrity(self):
- try:
- TimeStamp = self.MetaFile.TimeStamp
- Result = self.Cur.execute("select ID from %s where ID<0" % (self.Table)).fetchall()
- if not Result:
- # update the timestamp in database
- self._FileIndexTable.SetFileTimeStamp(self.IdBase, TimeStamp)
- return False
-
- if TimeStamp != self._FileIndexTable.GetFileTimeStamp(self.IdBase):
- # update the timestamp in database
- self._FileIndexTable.SetFileTimeStamp(self.IdBase, TimeStamp)
- return False
- except Exception, Exc:
- EdkLogger.debug(EdkLogger.DEBUG_5, str(Exc))
- return False
- return True
-
-## Python class representation of table storing module data
-class ModuleTable(MetaFileTable):
- _ID_STEP_ = 0.00000001
- _ID_MAX_ = 0.99999999
- _COLUMN_ = '''
- ID REAL PRIMARY KEY,
- Model INTEGER NOT NULL,
- Value1 TEXT NOT NULL,
- Value2 TEXT,
- Value3 TEXT,
- Scope1 TEXT,
- Scope2 TEXT,
- BelongsToItem REAL NOT NULL,
- StartLine INTEGER NOT NULL,
- StartColumn INTEGER NOT NULL,
- EndLine INTEGER NOT NULL,
- EndColumn INTEGER NOT NULL,
- Enabled INTEGER DEFAULT 0
- '''
- # used as table end flag, in case the changes to database is not committed to db file
- _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
-
- ## Constructor
- def __init__(self, Cursor, MetaFile, Temporary):
- MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_INF, Temporary)
-
- ## Insert a record into table Inf
- #
- # @param Model: Model of a Inf item
- # @param Value1: Value1 of a Inf item
- # @param Value2: Value2 of a Inf item
- # @param Value3: Value3 of a Inf item
- # @param Scope1: Arch of a Inf item
- # @param Scope2 Platform os a Inf item
- # @param BelongsToItem: The item belongs to which another item
- # @param StartLine: StartLine of a Inf item
- # @param StartColumn: StartColumn of a Inf item
- # @param EndLine: EndLine of a Inf item
- # @param EndColumn: EndColumn of a Inf item
- # @param Enabled: If this item enabled
- #
- def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
- BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
- (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
- return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
- Scope2,
- BelongsToItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
- Enabled
- )
-
- ## Query table
- #
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
- # @param Platform The Platform attribute of Record
- #
- # @retval: A recordSet of all found records
- #
- def Query(self, Model, Arch=None, Platform=None, BelongsToItem=None):
- ConditionString = "Model=%s AND Enabled>=0" % Model
- ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
-
- if Arch != None and Arch != 'COMMON':
- ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
- if Platform != None and Platform != 'COMMON':
- ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Platform
- if BelongsToItem != None:
- ConditionString += " AND BelongsToItem=%s" % BelongsToItem
-
- SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
- return self.Exec(SqlCommand)
-
-## Python class representation of table storing package data
-class PackageTable(MetaFileTable):
- _COLUMN_ = '''
- ID REAL PRIMARY KEY,
- Model INTEGER NOT NULL,
- Value1 TEXT NOT NULL,
- Value2 TEXT,
- Value3 TEXT,
- Scope1 TEXT,
- Scope2 TEXT,
- BelongsToItem REAL NOT NULL,
- StartLine INTEGER NOT NULL,
- StartColumn INTEGER NOT NULL,
- EndLine INTEGER NOT NULL,
- EndColumn INTEGER NOT NULL,
- Enabled INTEGER DEFAULT 0
- '''
- # used as table end flag, in case the changes to database is not committed to db file
- _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1"
-
- ## Constructor
- def __init__(self, Cursor, MetaFile, Temporary):
- MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DEC, Temporary)
-
- ## Insert table
- #
- # Insert a record into table Dec
- #
- # @param Model: Model of a Dec item
- # @param Value1: Value1 of a Dec item
- # @param Value2: Value2 of a Dec item
- # @param Value3: Value3 of a Dec item
- # @param Scope1: Arch of a Dec item
- # @param Scope2: Module type of a Dec item
- # @param BelongsToItem: The item belongs to which another item
- # @param StartLine: StartLine of a Dec item
- # @param StartColumn: StartColumn of a Dec item
- # @param EndLine: EndLine of a Dec item
- # @param EndColumn: EndColumn of a Dec item
- # @param Enabled: If this item enabled
- #
- def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON',
- BelongsToItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=0):
- (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
- return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
- Scope2,
- BelongsToItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
- Enabled
- )
-
- ## Query table
- #
- # @param Model: The Model of Record
- # @param Arch: The Arch attribute of Record
- #
- # @retval: A recordSet of all found records
- #
- def Query(self, Model, Arch=None):
- ConditionString = "Model=%s AND Enabled>=0" % Model
- ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
-
- if Arch != None and Arch != 'COMMON':
- ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Arch
-
- SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
- return self.Exec(SqlCommand)
-
- def GetValidExpression(self, TokenSpaceGuid, PcdCName):
- SqlCommand = "select Value1,StartLine from %s WHERE Value2='%s' and Value3='%s'" % (self.Table, TokenSpaceGuid, PcdCName)
- self.Cur.execute(SqlCommand)
- validateranges = []
- validlists = []
- expressions = []
- try:
- for row in self.Cur:
- comment = row[0]
-
- LineNum = row[1]
- comment = comment.strip("#")
- comment = comment.strip()
- oricomment = comment
- if comment.startswith("@ValidRange"):
- comment = comment.replace("@ValidRange", "", 1)
- validateranges.append(comment.split("|")[1].strip())
- if comment.startswith("@ValidList"):
- comment = comment.replace("@ValidList", "", 1)
- validlists.append(comment.split("|")[1].strip())
- if comment.startswith("@Expression"):
- comment = comment.replace("@Expression", "", 1)
- expressions.append(comment.split("|")[1].strip())
- except Exception, Exc:
- ValidType = ""
- if oricomment.startswith("@ValidRange"):
- ValidType = "@ValidRange"
- if oricomment.startswith("@ValidList"):
- ValidType = "@ValidList"
- if oricomment.startswith("@Expression"):
- ValidType = "@Expression"
- EdkLogger.error('Parser', FORMAT_INVALID, "The syntax for %s of PCD %s.%s is incorrect" % (ValidType,TokenSpaceGuid, PcdCName),
- ExtraData=oricomment,File=self.MetaFile, Line=LineNum)
- return set(), set(), set()
- return set(validateranges), set(validlists), set(expressions)
-## Python class representation of table storing platform data
-class PlatformTable(MetaFileTable):
- _COLUMN_ = '''
- ID REAL PRIMARY KEY,
- Model INTEGER NOT NULL,
- Value1 TEXT NOT NULL,
- Value2 TEXT,
- Value3 TEXT,
- Scope1 TEXT,
- Scope2 TEXT,
- BelongsToItem REAL NOT NULL,
- FromItem REAL NOT NULL,
- StartLine INTEGER NOT NULL,
- StartColumn INTEGER NOT NULL,
- EndLine INTEGER NOT NULL,
- EndColumn INTEGER NOT NULL,
- Enabled INTEGER DEFAULT 0
- '''
- # used as table end flag, in case the changes to database is not committed to db file
- _DUMMY_ = "-1, -1, '====', '====', '====', '====', '====', -1, -1, -1, -1, -1, -1, -1"
-
- ## Constructor
- def __init__(self, Cursor, MetaFile, Temporary):
- MetaFileTable.__init__(self, Cursor, MetaFile, MODEL_FILE_DSC, Temporary)
-
- ## Insert table
- #
- # Insert a record into table Dsc
- #
- # @param Model: Model of a Dsc item
- # @param Value1: Value1 of a Dsc item
- # @param Value2: Value2 of a Dsc item
- # @param Value3: Value3 of a Dsc item
- # @param Scope1: Arch of a Dsc item
- # @param Scope2: Module type of a Dsc item
- # @param BelongsToItem: The item belongs to which another item
- # @param FromItem: The item belongs to which dsc file
- # @param StartLine: StartLine of a Dsc item
- # @param StartColumn: StartColumn of a Dsc item
- # @param EndLine: EndLine of a Dsc item
- # @param EndColumn: EndColumn of a Dsc item
- # @param Enabled: If this item enabled
- #
- def Insert(self, Model, Value1, Value2, Value3, Scope1='COMMON', Scope2='COMMON', BelongsToItem=-1,
- FromItem=-1, StartLine=-1, StartColumn=-1, EndLine=-1, EndColumn=-1, Enabled=1):
- (Value1, Value2, Value3, Scope1, Scope2) = ConvertToSqlString((Value1, Value2, Value3, Scope1, Scope2))
- return Table.Insert(
- self,
- Model,
- Value1,
- Value2,
- Value3,
- Scope1,
- Scope2,
- BelongsToItem,
- FromItem,
- StartLine,
- StartColumn,
- EndLine,
- EndColumn,
- Enabled
- )
-
- ## Query table
- #
- # @param Model: The Model of Record
- # @param Scope1: Arch of a Dsc item
- # @param Scope2: Module type of a Dsc item
- # @param BelongsToItem: The item belongs to which another item
- # @param FromItem: The item belongs to which dsc file
- #
- # @retval: A recordSet of all found records
- #
- def Query(self, Model, Scope1=None, Scope2=None, BelongsToItem=None, FromItem=None):
- ConditionString = "Model=%s AND Enabled>0" % Model
- ValueString = "Value1,Value2,Value3,Scope1,Scope2,ID,StartLine"
-
- if Scope1 != None and Scope1 != 'COMMON':
- ConditionString += " AND (Scope1='%s' OR Scope1='COMMON')" % Scope1
- if Scope2 != None and Scope2 != 'COMMON':
- # Cover the case that CodeBase is 'COMMON' for BuildOptions section
- if '.' in Scope2:
- Index = Scope2.index('.')
- NewScope = 'COMMON'+ Scope2[Index:]
- ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT' OR Scope2='%s')" % (Scope2, NewScope)
- else:
- ConditionString += " AND (Scope2='%s' OR Scope2='COMMON' OR Scope2='DEFAULT')" % Scope2
-
- if BelongsToItem != None:
- ConditionString += " AND BelongsToItem=%s" % BelongsToItem
- else:
- ConditionString += " AND BelongsToItem<0"
-
- if FromItem != None:
- ConditionString += " AND FromItem=%s" % FromItem
-
- SqlCommand = "SELECT %s FROM %s WHERE %s" % (ValueString, self.Table, ConditionString)
- return self.Exec(SqlCommand)
-
-## Factory class to produce different storage for different type of meta-file
-class MetaFileStorage(object):
- _FILE_TABLE_ = {
- MODEL_FILE_INF : ModuleTable,
- MODEL_FILE_DEC : PackageTable,
- MODEL_FILE_DSC : PlatformTable,
- MODEL_FILE_OTHERS : MetaFileTable,
- }
-
- _FILE_TYPE_ = {
- ".inf" : MODEL_FILE_INF,
- ".dec" : MODEL_FILE_DEC,
- ".dsc" : MODEL_FILE_DSC,
- }
-
- ## Constructor
- def __new__(Class, Cursor, MetaFile, FileType=None, Temporary=False):
- # no type given, try to find one
- if not FileType:
- if MetaFile.Type in self._FILE_TYPE_:
- FileType = Class._FILE_TYPE_[MetaFile.Type]
- else:
- FileType = MODEL_FILE_OTHERS
-
- # don't pass the type around if it's well known
- if FileType == MODEL_FILE_OTHERS:
- Args = (Cursor, MetaFile, FileType, Temporary)
- else:
- Args = (Cursor, MetaFile, Temporary)
-
- # create the storage object and return it to caller
- return Class._FILE_TABLE_[FileType](*Args)
-
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
deleted file mode 100644
index c224b8e0e6..0000000000
--- a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py
+++ /dev/null
@@ -1,246 +0,0 @@
-## @file
-# Common routines used by workspace
-#
-# Copyright (c) 2012 - 2016, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-from Common.Misc import sdict
-from Common.DataType import SUP_MODULE_USER_DEFINED
-from BuildClassObject import LibraryClassObject
-import Common.GlobalData as GlobalData
-
-## Get all packages from platform for specified arch, target and toolchain
-#
-# @param Platform: DscBuildData instance
-# @param BuildDatabase: The database saves all data for all metafiles
-# @param Arch: Current arch
-# @param Target: Current target
-# @param Toolchain: Current toolchain
-# @retval: List of packages which are DecBuildData instances
-#
-def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain):
- PkgSet = set()
- for ModuleFile in Platform.Modules:
- Data = BuildDatabase[ModuleFile, Arch, Target, Toolchain]
- PkgSet.update(Data.Packages)
- for Lib in GetLiabraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
- PkgSet.update(Lib.Packages)
- return list(PkgSet)
-
-## Get all declared PCD from platform for specified arch, target and toolchain
-#
-# @param Platform: DscBuildData instance
-# @param BuildDatabase: The database saves all data for all metafiles
-# @param Arch: Current arch
-# @param Target: Current target
-# @param Toolchain: Current toolchain
-# @retval: A dictionary contains instances of PcdClassObject with key (PcdCName, TokenSpaceGuid)
-#
-def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain):
- PkgList = GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain)
- DecPcds = {}
- for Pkg in PkgList:
- for Pcd in Pkg.Pcds:
- PcdCName = Pcd[0]
- PcdTokenName = Pcd[1]
- if GlobalData.MixedPcd:
- for PcdItem in GlobalData.MixedPcd.keys():
- if (PcdCName, PcdTokenName) in GlobalData.MixedPcd[PcdItem]:
- PcdCName = PcdItem[0]
- break
- if (PcdCName, PcdTokenName) not in DecPcds.keys():
- DecPcds[PcdCName, PcdTokenName] = Pkg.Pcds[Pcd]
- return DecPcds
-
-## Get all dependent libraries for a module
-#
-# @param Module: InfBuildData instance
-# @param Platform: DscBuildData instance
-# @param BuildDatabase: The database saves all data for all metafiles
-# @param Arch: Current arch
-# @param Target: Current target
-# @param Toolchain: Current toolchain
-# @retval: List of dependent libraries which are InfBuildData instances
-#
-def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
- if Module.AutoGenVersion >= 0x00010005:
- return _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain)
- else:
- return _ResolveLibraryReference(Module, Platform)
-
-def _GetModuleLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
- ModuleType = Module.ModuleType
-
- # for overriding library instances with module specific setting
- PlatformModule = Platform.Modules[str(Module)]
-
- # add forced library instances (specified under LibraryClasses sections)
- #
- # If a module has a MODULE_TYPE of USER_DEFINED,
- # do not link in NULL library class instances from the global [LibraryClasses.*] sections.
- #
- if Module.ModuleType != SUP_MODULE_USER_DEFINED:
- for LibraryClass in Platform.LibraryClasses.GetKeys():
- if LibraryClass.startswith("NULL") and Platform.LibraryClasses[LibraryClass, Module.ModuleType]:
- Module.LibraryClasses[LibraryClass] = Platform.LibraryClasses[LibraryClass, Module.ModuleType]
-
- # add forced library instances (specified in module overrides)
- for LibraryClass in PlatformModule.LibraryClasses:
- if LibraryClass.startswith("NULL"):
- Module.LibraryClasses[LibraryClass] = PlatformModule.LibraryClasses[LibraryClass]
-
- # EdkII module
- LibraryConsumerList = [Module]
- Constructor = []
- ConsumedByList = sdict()
- LibraryInstance = sdict()
-
- while len(LibraryConsumerList) > 0:
- M = LibraryConsumerList.pop()
- for LibraryClassName in M.LibraryClasses:
- if LibraryClassName not in LibraryInstance:
- # override library instance for this module
- if LibraryClassName in PlatformModule.LibraryClasses:
- LibraryPath = PlatformModule.LibraryClasses[LibraryClassName]
- else:
- LibraryPath = Platform.LibraryClasses[LibraryClassName, ModuleType]
- if LibraryPath == None or LibraryPath == "":
- LibraryPath = M.LibraryClasses[LibraryClassName]
- if LibraryPath == None or LibraryPath == "":
- return []
-
- LibraryModule = BuildDatabase[LibraryPath, Arch, Target, Toolchain]
- # for those forced library instance (NULL library), add a fake library class
- if LibraryClassName.startswith("NULL"):
- LibraryModule.LibraryClass.append(LibraryClassObject(LibraryClassName, [ModuleType]))
- elif LibraryModule.LibraryClass == None \
- or len(LibraryModule.LibraryClass) == 0 \
- or (ModuleType != 'USER_DEFINED'
- and ModuleType not in LibraryModule.LibraryClass[0].SupModList):
- # only USER_DEFINED can link against any library instance despite of its SupModList
- return []
-
- LibraryInstance[LibraryClassName] = LibraryModule
- LibraryConsumerList.append(LibraryModule)
- else:
- LibraryModule = LibraryInstance[LibraryClassName]
-
- if LibraryModule == None:
- continue
-
- if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
- Constructor.append(LibraryModule)
-
- if LibraryModule not in ConsumedByList:
- ConsumedByList[LibraryModule] = []
- # don't add current module itself to consumer list
- if M != Module:
- if M in ConsumedByList[LibraryModule]:
- continue
- ConsumedByList[LibraryModule].append(M)
- #
- # Initialize the sorted output list to the empty set
- #
- SortedLibraryList = []
- #
- # Q <- Set of all nodes with no incoming edges
- #
- LibraryList = [] #LibraryInstance.values()
- Q = []
- for LibraryClassName in LibraryInstance:
- M = LibraryInstance[LibraryClassName]
- LibraryList.append(M)
- if ConsumedByList[M] == []:
- Q.append(M)
-
- #
- # start the DAG algorithm
- #
- while True:
- EdgeRemoved = True
- while Q == [] and EdgeRemoved:
- EdgeRemoved = False
- # for each node Item with a Constructor
- for Item in LibraryList:
- if Item not in Constructor:
- continue
- # for each Node without a constructor with an edge e from Item to Node
- for Node in ConsumedByList[Item]:
- if Node in Constructor:
- continue
- # remove edge e from the graph if Node has no constructor
- ConsumedByList[Item].remove(Node)
- EdgeRemoved = True
- if ConsumedByList[Item] == []:
- # insert Item into Q
- Q.insert(0, Item)
- break
- if Q != []:
- break
- # DAG is done if there's no more incoming edge for all nodes
- if Q == []:
- break
-
- # remove node from Q
- Node = Q.pop()
- # output Node
- SortedLibraryList.append(Node)
-
- # for each node Item with an edge e from Node to Item do
- for Item in LibraryList:
- if Node not in ConsumedByList[Item]:
- continue
- # remove edge e from the graph
- ConsumedByList[Item].remove(Node)
-
- if ConsumedByList[Item] != []:
- continue
- # insert Item into Q, if Item has no other incoming edges
- Q.insert(0, Item)
-
- #
- # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
- #
- for Item in LibraryList:
- if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
- return []
- if Item not in SortedLibraryList:
- SortedLibraryList.append(Item)
-
- #
- # Build the list of constructor and destructir names
- # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
- #
- SortedLibraryList.reverse()
- return SortedLibraryList
-
-def _ResolveLibraryReference(Module, Platform):
- LibraryConsumerList = [Module]
-
- # "CompilerStub" is a must for Edk modules
- if Module.Libraries:
- Module.Libraries.append("CompilerStub")
- LibraryList = []
- while len(LibraryConsumerList) > 0:
- M = LibraryConsumerList.pop()
- for LibraryName in M.Libraries:
- Library = Platform.LibraryClasses[LibraryName, ':dummy:']
- if Library == None:
- for Key in Platform.LibraryClasses.data.keys():
- if LibraryName.upper() == Key.upper():
- Library = Platform.LibraryClasses[Key, ':dummy:']
- break
- if Library == None:
- continue
-
- if Library not in LibraryList:
- LibraryList.append(Library)
- LibraryConsumerList.append(Library)
- return LibraryList
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
deleted file mode 100644
index c1af5c7fe3..0000000000
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ /dev/null
@@ -1,3154 +0,0 @@
-## @file
-# This file is used to create a database used by build tool
-#
-# Copyright (c) 2008 - 2017, Intel Corporation. All rights reserved.<BR>
-# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#
-
-##
-# Import Modules
-#
-import sqlite3
-import Common.LongFilePathOs as os
-import pickle
-import uuid
-
-import Common.EdkLogger as EdkLogger
-import Common.GlobalData as GlobalData
-from Common.MultipleWorkspace import MultipleWorkspace as mws
-
-from Common.String import *
-from Common.DataType import *
-from Common.Misc import *
-from types import *
-
-from CommonDataClass.CommonClass import SkuInfoClass
-
-from MetaDataTable import *
-from MetaFileTable import *
-from MetaFileParser import *
-from BuildClassObject import *
-from WorkspaceCommon import GetDeclaredPcd
-from Common.Misc import AnalyzeDscPcd
-from Common.Misc import ProcessDuplicatedInf
-import re
-from Common.Parsing import IsValidWord
-from Common.VariableAttributes import VariableAttributes
-import Common.GlobalData as GlobalData
-
-## Platform build information from DSC file
-#
-# This class is used to retrieve information stored in database and convert them
-# into PlatformBuildClassObject form for easier use for AutoGen.
-#
-class DscBuildData(PlatformBuildClassObject):
- # dict used to convert PCD type in database to string used by build tool
- _PCD_TYPE_STRING_ = {
- MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
- MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
- MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
- MODEL_PCD_DYNAMIC : "Dynamic",
- MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
- MODEL_PCD_DYNAMIC_HII : "DynamicHii",
- MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
- MODEL_PCD_DYNAMIC_EX : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
- MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
- }
-
- # dict used to convert part of [Defines] to members of DscBuildData directly
- _PROPERTY_ = {
- #
- # Required Fields
- #
- TAB_DSC_DEFINES_PLATFORM_NAME : "_PlatformName",
- TAB_DSC_DEFINES_PLATFORM_GUID : "_Guid",
- TAB_DSC_DEFINES_PLATFORM_VERSION : "_Version",
- TAB_DSC_DEFINES_DSC_SPECIFICATION : "_DscSpecification",
- #TAB_DSC_DEFINES_OUTPUT_DIRECTORY : "_OutputDirectory",
- #TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES : "_SupArchList",
- #TAB_DSC_DEFINES_BUILD_TARGETS : "_BuildTargets",
- TAB_DSC_DEFINES_SKUID_IDENTIFIER : "_SkuName",
- #TAB_DSC_DEFINES_FLASH_DEFINITION : "_FlashDefinition",
- TAB_DSC_DEFINES_BUILD_NUMBER : "_BuildNumber",
- TAB_DSC_DEFINES_MAKEFILE_NAME : "_MakefileName",
- TAB_DSC_DEFINES_BS_BASE_ADDRESS : "_BsBaseAddress",
- TAB_DSC_DEFINES_RT_BASE_ADDRESS : "_RtBaseAddress",
- #TAB_DSC_DEFINES_RFC_LANGUAGES : "_RFCLanguages",
- #TAB_DSC_DEFINES_ISO_LANGUAGES : "_ISOLanguages",
- }
-
- # used to compose dummy library class name for those forced library instances
- _NullLibraryNumber = 0
-
- ## Constructor of DscBuildData
- #
- # Initialize object of DscBuildData
- #
- # @param FilePath The path of platform description file
- # @param RawData The raw data of DSC file
- # @param BuildDataBase Database used to retrieve module/package information
- # @param Arch The target architecture
- # @param Platform (not used for DscBuildData)
- # @param Macros Macros used for replacement in DSC file
- #
- def __init__(self, FilePath, RawData, BuildDataBase, Arch='COMMON', Target=None, Toolchain=None):
- self.MetaFile = FilePath
- self._RawData = RawData
- self._Bdb = BuildDataBase
- self._Arch = Arch
- self._Target = Target
- self._Toolchain = Toolchain
- self._Clear()
- self._HandleOverridePath()
-
- ## XXX[key] = value
- def __setitem__(self, key, value):
- self.__dict__[self._PROPERTY_[key]] = value
-
- ## value = XXX[key]
- def __getitem__(self, key):
- return self.__dict__[self._PROPERTY_[key]]
-
- ## "in" test support
- def __contains__(self, key):
- return key in self._PROPERTY_
-
- ## Set all internal used members of DscBuildData to None
- def _Clear(self):
- self._Header = None
- self._PlatformName = None
- self._Guid = None
- self._Version = None
- self._DscSpecification = None
- self._OutputDirectory = None
- self._SupArchList = None
- self._BuildTargets = None
- self._SkuName = None
- self._SkuIdentifier = None
- self._AvilableSkuIds = None
- self._PcdInfoFlag = None
- self._VarCheckFlag = None
- self._FlashDefinition = None
- self._Prebuild = None
- self._Postbuild = None
- self._BuildNumber = None
- self._MakefileName = None
- self._BsBaseAddress = None
- self._RtBaseAddress = None
- self._SkuIds = None
- self._Modules = None
- self._LibraryInstances = None
- self._LibraryClasses = None
- self._Pcds = None
- self._DecPcds = None
- self._BuildOptions = None
- self._ModuleTypeOptions = None
- self._LoadFixAddress = None
- self._RFCLanguages = None
- self._ISOLanguages = None
- self._VpdToolGuid = None
- self.__Macros = None
-
-
- ## handle Override Path of Module
- def _HandleOverridePath(self):
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
- Macros = self._Macros
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- for Record in RecordList:
- ModuleId = Record[5]
- LineNo = Record[6]
- ModuleFile = PathClass(NormPath(Record[0]), GlobalData.gWorkspace, Arch=self._Arch)
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH, self._Arch, None, ModuleId]
- if RecordList != []:
- SourceOverridePath = mws.join(GlobalData.gWorkspace, NormPath(RecordList[0][0]))
-
- # Check if the source override path exists
- if not os.path.isdir(SourceOverridePath):
- EdkLogger.error('build', FILE_NOT_FOUND, Message='Source override path does not exist:', File=self.MetaFile, ExtraData=SourceOverridePath, Line=LineNo)
-
- #Add to GlobalData Variables
- GlobalData.gOverrideDir[ModuleFile.Key] = SourceOverridePath
-
- ## Get current effective macros
- def _GetMacros(self):
- if self.__Macros == None:
- self.__Macros = {}
- self.__Macros.update(GlobalData.gPlatformDefines)
- self.__Macros.update(GlobalData.gGlobalDefines)
- self.__Macros.update(GlobalData.gCommandLineDefines)
- return self.__Macros
-
- ## Get architecture
- def _GetArch(self):
- return self._Arch
-
- ## Set architecture
- #
- # Changing the default ARCH to another may affect all other information
- # because all information in a platform may be ARCH-related. That's
- # why we need to clear all internal used members, in order to cause all
- # information to be re-retrieved.
- #
- # @param Value The value of ARCH
- #
- def _SetArch(self, Value):
- if self._Arch == Value:
- return
- self._Arch = Value
- self._Clear()
-
- ## Retrieve all information in [Defines] section
- #
- # (Retriving all [Defines] information in one-shot is just to save time.)
- #
- def _GetHeaderInfo(self):
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
- for Record in RecordList:
- Name = Record[1]
- # items defined _PROPERTY_ don't need additional processing
-
- # some special items in [Defines] section need special treatment
- if Name == TAB_DSC_DEFINES_OUTPUT_DIRECTORY:
- self._OutputDirectory = NormPath(Record[2], self._Macros)
- if ' ' in self._OutputDirectory:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in OUTPUT_DIRECTORY",
- File=self.MetaFile, Line=Record[-1],
- ExtraData=self._OutputDirectory)
- elif Name == TAB_DSC_DEFINES_FLASH_DEFINITION:
- self._FlashDefinition = PathClass(NormPath(Record[2], self._Macros), GlobalData.gWorkspace)
- ErrorCode, ErrorInfo = self._FlashDefinition.Validate('.fdf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=Record[-1],
- ExtraData=ErrorInfo)
- elif Name == TAB_DSC_PREBUILD:
- PrebuildValue = Record[2]
- if Record[2][0] == '"':
- if Record[2][-1] != '"':
- EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_PREBUILD,
- File=self.MetaFile, Line=Record[-1])
- PrebuildValue = Record[2][1:-1]
- self._Prebuild = PathClass(NormPath(PrebuildValue, self._Macros), GlobalData.gWorkspace)
- elif Name == TAB_DSC_POSTBUILD:
- PostbuildValue = Record[2]
- if Record[2][0] == '"':
- if Record[2][-1] != '"':
- EdkLogger.error('build', FORMAT_INVALID, 'Missing double quotes in the end of %s statement.' % TAB_DSC_POSTBUILD,
- File=self.MetaFile, Line=Record[-1])
- PostbuildValue = Record[2][1:-1]
- self._Postbuild = PathClass(NormPath(PostbuildValue, self._Macros), GlobalData.gWorkspace)
- elif Name == TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES:
- self._SupArchList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
- elif Name == TAB_DSC_DEFINES_BUILD_TARGETS:
- self._BuildTargets = GetSplitValueList(Record[2])
- elif Name == TAB_DSC_DEFINES_SKUID_IDENTIFIER:
- if self._SkuName == None:
- self._SkuName = Record[2]
- self._SkuIdentifier = Record[2]
- self._AvilableSkuIds = Record[2]
- elif Name == TAB_DSC_DEFINES_PCD_INFO_GENERATION:
- self._PcdInfoFlag = Record[2]
- elif Name == TAB_DSC_DEFINES_PCD_VAR_CHECK_GENERATION:
- self._VarCheckFlag = Record[2]
- elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
- try:
- self._LoadFixAddress = int (Record[2], 0)
- except:
- EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (Record[2]))
- elif Name == TAB_DSC_DEFINES_RFC_LANGUAGES:
- if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for RFC_LANGUAGES must have double quotes around it, for example: RFC_LANGUAGES = "en-us;zh-hans"',
- File=self.MetaFile, Line=Record[-1])
- LanguageCodes = Record[2][1:-1]
- if not LanguageCodes:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more RFC4646 format language code must be provided for RFC_LANGUAGES statement',
- File=self.MetaFile, Line=Record[-1])
- LanguageList = GetSplitValueList(LanguageCodes, TAB_SEMI_COLON_SPLIT)
- # check whether there is empty entries in the list
- if None in LanguageList:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more empty language code is in RFC_LANGUAGES statement',
- File=self.MetaFile, Line=Record[-1])
- self._RFCLanguages = LanguageList
- elif Name == TAB_DSC_DEFINES_ISO_LANGUAGES:
- if not Record[2] or Record[2][0] != '"' or Record[2][-1] != '"' or len(Record[2]) == 1:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'language code for ISO_LANGUAGES must have double quotes around it, for example: ISO_LANGUAGES = "engchn"',
- File=self.MetaFile, Line=Record[-1])
- LanguageCodes = Record[2][1:-1]
- if not LanguageCodes:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'one or more ISO639-2 format language code must be provided for ISO_LANGUAGES statement',
- File=self.MetaFile, Line=Record[-1])
- if len(LanguageCodes)%3:
- EdkLogger.error('build', FORMAT_NOT_SUPPORTED, 'bad ISO639-2 format for ISO_LANGUAGES',
- File=self.MetaFile, Line=Record[-1])
- LanguageList = []
- for i in range(0, len(LanguageCodes), 3):
- LanguageList.append(LanguageCodes[i:i+3])
- self._ISOLanguages = LanguageList
- elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
- #
- # try to convert GUID to a real UUID value to see whether the GUID is format
- # for VPD_TOOL_GUID is correct.
- #
- try:
- uuid.UUID(Record[2])
- except:
- EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
- self._VpdToolGuid = Record[2]
- elif Name in self:
- self[Name] = Record[2]
- # set _Header to non-None in order to avoid database re-querying
- self._Header = 'DUMMY'
-
- ## Retrieve platform name
- def _GetPlatformName(self):
- if self._PlatformName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._PlatformName == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_NAME", File=self.MetaFile)
- return self._PlatformName
-
- ## Retrieve file guid
- def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Guid == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_GUID", File=self.MetaFile)
- return self._Guid
-
- ## Retrieve platform version
- def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Version == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
- return self._Version
-
- ## Retrieve platform description file version
- def _GetDscSpec(self):
- if self._DscSpecification == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._DscSpecification == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
- return self._DscSpecification
-
- ## Retrieve OUTPUT_DIRECTORY
- def _GetOutpuDir(self):
- if self._OutputDirectory == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._OutputDirectory == None:
- self._OutputDirectory = os.path.join("Build", self._PlatformName)
- return self._OutputDirectory
-
- ## Retrieve SUPPORTED_ARCHITECTURES
- def _GetSupArch(self):
- if self._SupArchList == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._SupArchList == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No SUPPORTED_ARCHITECTURES", File=self.MetaFile)
- return self._SupArchList
-
- ## Retrieve BUILD_TARGETS
- def _GetBuildTarget(self):
- if self._BuildTargets == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BuildTargets == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BUILD_TARGETS", File=self.MetaFile)
- return self._BuildTargets
-
- def _GetPcdInfoFlag(self):
- if self._PcdInfoFlag == None or self._PcdInfoFlag.upper() == 'FALSE':
- return False
- elif self._PcdInfoFlag.upper() == 'TRUE':
- return True
- else:
- return False
- def _GetVarCheckFlag(self):
- if self._VarCheckFlag == None or self._VarCheckFlag.upper() == 'FALSE':
- return False
- elif self._VarCheckFlag.upper() == 'TRUE':
- return True
- else:
- return False
- def _GetAviableSkuIds(self):
- if self._AvilableSkuIds:
- return self._AvilableSkuIds
- return self.SkuIdentifier
- def _GetSkuIdentifier(self):
- if self._SkuName:
- return self._SkuName
- if self._SkuIdentifier == None:
- if self._Header == None:
- self._GetHeaderInfo()
- return self._SkuIdentifier
- ## Retrieve SKUID_IDENTIFIER
- def _GetSkuName(self):
- if self._SkuName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if (self._SkuName == None or self._SkuName not in self.SkuIds):
- self._SkuName = 'DEFAULT'
- return self._SkuName
-
- ## Override SKUID_IDENTIFIER
- def _SetSkuName(self, Value):
- self._SkuName = Value
- self._Pcds = None
-
- def _GetFdfFile(self):
- if self._FlashDefinition == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._FlashDefinition == None:
- self._FlashDefinition = ''
- return self._FlashDefinition
-
- def _GetPrebuild(self):
- if self._Prebuild == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Prebuild == None:
- self._Prebuild = ''
- return self._Prebuild
-
- def _GetPostbuild(self):
- if self._Postbuild == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Postbuild == None:
- self._Postbuild = ''
- return self._Postbuild
-
- ## Retrieve FLASH_DEFINITION
- def _GetBuildNumber(self):
- if self._BuildNumber == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BuildNumber == None:
- self._BuildNumber = ''
- return self._BuildNumber
-
- ## Retrieve MAKEFILE_NAME
- def _GetMakefileName(self):
- if self._MakefileName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._MakefileName == None:
- self._MakefileName = ''
- return self._MakefileName
-
- ## Retrieve BsBaseAddress
- def _GetBsBaseAddress(self):
- if self._BsBaseAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._BsBaseAddress == None:
- self._BsBaseAddress = ''
- return self._BsBaseAddress
-
- ## Retrieve RtBaseAddress
- def _GetRtBaseAddress(self):
- if self._RtBaseAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._RtBaseAddress == None:
- self._RtBaseAddress = ''
- return self._RtBaseAddress
-
- ## Retrieve the top address for the load fix address
- def _GetLoadFixAddress(self):
- if self._LoadFixAddress == None:
- if self._Header == None:
- self._GetHeaderInfo()
-
- if self._LoadFixAddress == None:
- self._LoadFixAddress = self._Macros.get(TAB_FIX_LOAD_TOP_MEMORY_ADDRESS, '0')
-
- try:
- self._LoadFixAddress = int (self._LoadFixAddress, 0)
- except:
- EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (self._LoadFixAddress))
-
- #
- # If command line defined, should override the value in DSC file.
- #
- if 'FIX_LOAD_TOP_MEMORY_ADDRESS' in GlobalData.gCommandLineDefines.keys():
- try:
- self._LoadFixAddress = int(GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS'], 0)
- except:
- EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS %s is not valid dec or hex string" % (GlobalData.gCommandLineDefines['FIX_LOAD_TOP_MEMORY_ADDRESS']))
-
- if self._LoadFixAddress < 0:
- EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid negative value 0x%x" % (self._LoadFixAddress))
- if self._LoadFixAddress != 0xFFFFFFFFFFFFFFFF and self._LoadFixAddress % 0x1000 != 0:
- EdkLogger.error("build", PARAMETER_INVALID, "FIX_LOAD_TOP_MEMORY_ADDRESS is set to the invalid unaligned 4K value 0x%x" % (self._LoadFixAddress))
-
- return self._LoadFixAddress
-
- ## Retrieve RFCLanguage filter
- def _GetRFCLanguages(self):
- if self._RFCLanguages == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._RFCLanguages == None:
- self._RFCLanguages = []
- return self._RFCLanguages
-
- ## Retrieve ISOLanguage filter
- def _GetISOLanguages(self):
- if self._ISOLanguages == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._ISOLanguages == None:
- self._ISOLanguages = []
- return self._ISOLanguages
- ## Retrieve the GUID string for VPD tool
- def _GetVpdToolGuid(self):
- if self._VpdToolGuid == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._VpdToolGuid == None:
- self._VpdToolGuid = ''
- return self._VpdToolGuid
-
- ## Retrieve [SkuIds] section information
- def _GetSkuIds(self):
- if self._SkuIds == None:
- self._SkuIds = sdict()
- RecordList = self._RawData[MODEL_EFI_SKU_ID, self._Arch]
- for Record in RecordList:
- if Record[0] in [None, '']:
- EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID number',
- File=self.MetaFile, Line=Record[-1])
- if Record[1] in [None, '']:
- EdkLogger.error('build', FORMAT_INVALID, 'No Sku ID name',
- File=self.MetaFile, Line=Record[-1])
- self._SkuIds[Record[1]] = Record[0]
- if 'DEFAULT' not in self._SkuIds:
- self._SkuIds['DEFAULT'] = '0'
- if 'COMMON' not in self._SkuIds:
- self._SkuIds['COMMON'] = '0'
- return self._SkuIds
-
- ## Retrieve [Components] section information
- def _GetModules(self):
- if self._Modules != None:
- return self._Modules
-
- self._Modules = sdict()
- RecordList = self._RawData[MODEL_META_DATA_COMPONENT, self._Arch]
- Macros = self._Macros
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- for Record in RecordList:
- DuplicatedFile = False
-
- ModuleFile = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- ModuleId = Record[5]
- LineNo = Record[6]
-
- # check the file validation
- ErrorCode, ErrorInfo = ModuleFile.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
- # Check duplication
- # If arch is COMMON, no duplicate module is checked since all modules in all component sections are selected
- if self._Arch != 'COMMON' and ModuleFile in self._Modules:
- DuplicatedFile = True
-
- Module = ModuleBuildClassObject()
- Module.MetaFile = ModuleFile
-
- # get module private library instance
- RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, ModuleId]
- for Record in RecordList:
- LibraryClass = Record[0]
- LibraryPath = PathClass(NormPath(Record[1], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- LineNo = Record[-1]
-
- # check the file validation
- ErrorCode, ErrorInfo = LibraryPath.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
-
- if LibraryClass == '' or LibraryClass == 'NULL':
- self._NullLibraryNumber += 1
- LibraryClass = 'NULL%d' % self._NullLibraryNumber
- EdkLogger.verbose("Found forced library for %s\n\t%s [%s]" % (ModuleFile, LibraryPath, LibraryClass))
- Module.LibraryClasses[LibraryClass] = LibraryPath
- if LibraryPath not in self.LibraryInstances:
- self.LibraryInstances.append(LibraryPath)
-
- # get module private PCD setting
- for Type in [MODEL_PCD_FIXED_AT_BUILD, MODEL_PCD_PATCHABLE_IN_MODULE, \
- MODEL_PCD_FEATURE_FLAG, MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
- RecordList = self._RawData[Type, self._Arch, None, ModuleId]
- for TokenSpaceGuid, PcdCName, Setting, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- TokenList = GetSplitValueList(Setting)
- DefaultValue = TokenList[0]
- if len(TokenList) > 1:
- MaxDatumSize = TokenList[1]
- else:
- MaxDatumSize = ''
- TypeString = self._PCD_TYPE_STRING_[Type]
- Pcd = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- TypeString,
- '',
- DefaultValue,
- '',
- MaxDatumSize,
- {},
- False,
- None
- )
- Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
-
- # get module private build options
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, None, ModuleId]
- for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- if (ToolChainFamily, ToolChain) not in Module.BuildOptions:
- Module.BuildOptions[ToolChainFamily, ToolChain] = Option
- else:
- OptionString = Module.BuildOptions[ToolChainFamily, ToolChain]
- Module.BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
-
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, None, ModuleId]
- if DuplicatedFile and not RecordList:
- EdkLogger.error('build', FILE_DUPLICATED, File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
- if RecordList:
- if len(RecordList) != 1:
- EdkLogger.error('build', OPTION_UNKNOWN, 'Only FILE_GUID can be listed in <Defines> section.',
- File=self.MetaFile, ExtraData=str(ModuleFile), Line=LineNo)
- ModuleFile = ProcessDuplicatedInf(ModuleFile, RecordList[0][2], GlobalData.gWorkspace)
- ModuleFile.Arch = self._Arch
-
- self._Modules[ModuleFile] = Module
- return self._Modules
-
- ## Retrieve all possible library instances used in this platform
- def _GetLibraryInstances(self):
- if self._LibraryInstances == None:
- self._GetLibraryClasses()
- return self._LibraryInstances
-
- ## Retrieve [LibraryClasses] information
- def _GetLibraryClasses(self):
- if self._LibraryClasses == None:
- self._LibraryInstances = []
- #
- # tdict is a special dict kind of type, used for selecting correct
- # library instance for given library class and module type
- #
- LibraryClassDict = tdict(True, 3)
- # track all library class names
- LibraryClassSet = set()
- RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, None, -1]
- Macros = self._Macros
- for Record in RecordList:
- LibraryClass, LibraryInstance, Dummy, Arch, ModuleType, Dummy, LineNo = Record
- if LibraryClass == '' or LibraryClass == 'NULL':
- self._NullLibraryNumber += 1
- LibraryClass = 'NULL%d' % self._NullLibraryNumber
- EdkLogger.verbose("Found forced library for arch=%s\n\t%s [%s]" % (Arch, LibraryInstance, LibraryClass))
- LibraryClassSet.add(LibraryClass)
- LibraryInstance = PathClass(NormPath(LibraryInstance, Macros), GlobalData.gWorkspace, Arch=self._Arch)
- # check the file validation
- ErrorCode, ErrorInfo = LibraryInstance.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
-
- if ModuleType != 'COMMON' and ModuleType not in SUP_MODULE_LIST:
- EdkLogger.error('build', OPTION_UNKNOWN, "Unknown module type [%s]" % ModuleType,
- File=self.MetaFile, ExtraData=LibraryInstance, Line=LineNo)
- LibraryClassDict[Arch, ModuleType, LibraryClass] = LibraryInstance
- if LibraryInstance not in self._LibraryInstances:
- self._LibraryInstances.append(LibraryInstance)
-
- # resolve the specific library instance for each class and each module type
- self._LibraryClasses = tdict(True)
- for LibraryClass in LibraryClassSet:
- # try all possible module types
- for ModuleType in SUP_MODULE_LIST:
- LibraryInstance = LibraryClassDict[self._Arch, ModuleType, LibraryClass]
- if LibraryInstance == None:
- continue
- self._LibraryClasses[LibraryClass, ModuleType] = LibraryInstance
-
- # for Edk style library instances, which are listed in different section
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch]
- for Record in RecordList:
- File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- LineNo = Record[-1]
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate('.inf')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, File=self.MetaFile, Line=LineNo,
- ExtraData=ErrorInfo)
- if File not in self._LibraryInstances:
- self._LibraryInstances.append(File)
- #
- # we need the module name as the library class name, so we have
- # to parse it here. (self._Bdb[] will trigger a file parse if it
- # hasn't been parsed)
- #
- Library = self._Bdb[File, self._Arch, self._Target, self._Toolchain]
- self._LibraryClasses[Library.BaseName, ':dummy:'] = Library
- return self._LibraryClasses
-
- def _ValidatePcd(self, PcdCName, TokenSpaceGuid, Setting, PcdType, LineNo):
- if self._DecPcds == None:
- self._DecPcds = GetDeclaredPcd(self, self._Bdb, self._Arch, self._Target, self._Toolchain)
- FdfInfList = []
- if GlobalData.gFdfParser:
- FdfInfList = GlobalData.gFdfParser.Profile.InfList
-
- PkgSet = set()
- for Inf in FdfInfList:
- ModuleFile = PathClass(NormPath(Inf), GlobalData.gWorkspace, Arch=self._Arch)
- if ModuleFile in self._Modules:
- continue
- ModuleData = self._Bdb[ModuleFile, self._Arch, self._Target, self._Toolchain]
- PkgSet.update(ModuleData.Packages)
- DecPcds = {}
- for Pkg in PkgSet:
- for Pcd in Pkg.Pcds:
- DecPcds[Pcd[0], Pcd[1]] = Pkg.Pcds[Pcd]
- self._DecPcds.update(DecPcds)
-
- if (PcdCName, TokenSpaceGuid) not in self._DecPcds:
- EdkLogger.error('build', PARSER_ERROR,
- "Pcd (%s.%s) defined in DSC is not declared in DEC files. Arch: ['%s']" % (TokenSpaceGuid, PcdCName, self._Arch),
- File=self.MetaFile, Line=LineNo)
- ValueList, IsValid, Index = AnalyzeDscPcd(Setting, PcdType, self._DecPcds[PcdCName, TokenSpaceGuid].DatumType)
- if not IsValid and PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
- EdkLogger.error('build', FORMAT_INVALID, "Pcd format incorrect.", File=self.MetaFile, Line=LineNo,
- ExtraData="%s.%s|%s" % (TokenSpaceGuid, PcdCName, Setting))
- if ValueList[Index] and PcdType not in [MODEL_PCD_FEATURE_FLAG, MODEL_PCD_FIXED_AT_BUILD]:
- try:
- ValueList[Index] = ValueExpression(ValueList[Index], GlobalData.gPlatformPcds)(True)
- except WrnExpression, Value:
- ValueList[Index] = Value.result
- except EvaluationException, Excpt:
- if hasattr(Excpt, 'Pcd'):
- if Excpt.Pcd in GlobalData.gPlatformOtherPcds:
- EdkLogger.error('Parser', FORMAT_INVALID, "Cannot use this PCD (%s) in an expression as"
- " it must be defined in a [PcdsFixedAtBuild] or [PcdsFeatureFlag] section"
- " of the DSC file" % Excpt.Pcd,
- File=self.MetaFile, Line=LineNo)
- else:
- EdkLogger.error('Parser', FORMAT_INVALID, "PCD (%s) is not defined in DSC file" % Excpt.Pcd,
- File=self.MetaFile, Line=LineNo)
- else:
- EdkLogger.error('Parser', FORMAT_INVALID, "Invalid expression: %s" % str(Excpt),
- File=self.MetaFile, Line=LineNo)
- if ValueList[Index] == 'True':
- ValueList[Index] = '1'
- elif ValueList[Index] == 'False':
- ValueList[Index] = '0'
- if ValueList[Index]:
- Valid, ErrStr = CheckPcdDatum(self._DecPcds[PcdCName, TokenSpaceGuid].DatumType, ValueList[Index])
- if not Valid:
- EdkLogger.error('build', FORMAT_INVALID, ErrStr, File=self.MetaFile, Line=LineNo,
- ExtraData="%s.%s" % (TokenSpaceGuid, PcdCName))
- return ValueList
-
- ## Retrieve all PCD settings in platform
- def _GetPcds(self):
- if self._Pcds == None:
- self._Pcds = sdict()
- self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
- self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
- self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
- self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_DEFAULT))
- self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_HII))
- self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_VPD))
- self._Pcds.update(self._GetDynamicPcd(MODEL_PCD_DYNAMIC_EX_DEFAULT))
- self._Pcds.update(self._GetDynamicHiiPcd(MODEL_PCD_DYNAMIC_EX_HII))
- self._Pcds.update(self._GetDynamicVpdPcd(MODEL_PCD_DYNAMIC_EX_VPD))
- return self._Pcds
-
- ## Retrieve [BuildOptions]
- def _GetBuildOptions(self):
- if self._BuildOptions == None:
- self._BuildOptions = sdict()
- #
- # Retrieve build option for EDKII and EDK style module
- #
- for CodeBase in (EDKII_NAME, EDK_NAME):
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, CodeBase]
- for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4 in RecordList:
- CurKey = (ToolChainFamily, ToolChain, CodeBase)
- #
- # Only flags can be appended
- #
- if CurKey not in self._BuildOptions or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
- self._BuildOptions[CurKey] = Option
- else:
- self._BuildOptions[CurKey] += ' ' + Option
- return self._BuildOptions
-
- def GetBuildOptionsByModuleType(self, Edk, ModuleType):
- if self._ModuleTypeOptions == None:
- self._ModuleTypeOptions = sdict()
- if (Edk, ModuleType) not in self._ModuleTypeOptions:
- options = sdict()
- self._ModuleTypeOptions[Edk, ModuleType] = options
- DriverType = '%s.%s' % (Edk, ModuleType)
- CommonDriverType = '%s.%s' % ('COMMON', ModuleType)
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, DriverType]
- for ToolChainFamily, ToolChain, Option, Arch, Type, Dummy3, Dummy4 in RecordList:
- if Type == DriverType or Type == CommonDriverType:
- Key = (ToolChainFamily, ToolChain, Edk)
- if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
- options[Key] = Option
- else:
- options[Key] += ' ' + Option
- return self._ModuleTypeOptions[Edk, ModuleType]
-
- ## Retrieve non-dynamic PCD settings
- #
- # @param Type PCD type
- #
- # @retval a dict object contains settings of given PCD type
- #
- def _GetPcd(self, Type):
- Pcds = sdict()
- #
- # tdict is a special dict kind of type, used for selecting correct
- # PCD settings for certain ARCH
- #
-
- SkuObj = SkuClass(self.SkuIdentifier,self.SkuIds)
-
- PcdDict = tdict(True, 3)
- PcdSet = set()
- # Find out all possible PCD candidates for self._Arch
- RecordList = self._RawData[Type, self._Arch]
- PcdValueDict = sdict()
- for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- if SkuName in (SkuObj.SystemSkuId,'DEFAULT','COMMON'):
- PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
- PcdDict[Arch, PcdCName, TokenSpaceGuid,SkuName] = Setting
-
- #handle pcd value override
- for PcdCName, TokenSpaceGuid, SkuName,Dummy4 in PcdSet:
- Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid,SkuName]
- if Setting == None:
- continue
- PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
- if (PcdCName, TokenSpaceGuid) in PcdValueDict:
- PcdValueDict[PcdCName, TokenSpaceGuid][SkuName] = (PcdValue,DatumType,MaxDatumSize)
- else:
- PcdValueDict[PcdCName, TokenSpaceGuid] = {SkuName:(PcdValue,DatumType,MaxDatumSize)}
-
- PcdsKeys = PcdValueDict.keys()
- for PcdCName,TokenSpaceGuid in PcdsKeys:
-
- PcdSetting = PcdValueDict[PcdCName, TokenSpaceGuid]
- PcdValue = None
- DatumType = None
- MaxDatumSize = None
- if 'COMMON' in PcdSetting:
- PcdValue,DatumType,MaxDatumSize = PcdSetting['COMMON']
- if 'DEFAULT' in PcdSetting:
- PcdValue,DatumType,MaxDatumSize = PcdSetting['DEFAULT']
- if SkuObj.SystemSkuId in PcdSetting:
- PcdValue,DatumType,MaxDatumSize = PcdSetting[SkuObj.SystemSkuId]
-
- Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- self._PCD_TYPE_STRING_[Type],
- DatumType,
- PcdValue,
- '',
- MaxDatumSize,
- {},
- False,
- None
- )
- return Pcds
-
- ## Retrieve dynamic PCD settings
- #
- # @param Type PCD type
- #
- # @retval a dict object contains settings of given PCD type
- #
- def _GetDynamicPcd(self, Type):
-
- SkuObj = SkuClass(self.SkuIdentifier,self.SkuIds)
-
- Pcds = sdict()
- #
- # tdict is a special dict kind of type, used for selecting correct
- # PCD settings for certain ARCH and SKU
- #
- PcdDict = tdict(True, 4)
- PcdList = []
- # Find out all possible PCD candidates for self._Arch
- RecordList = self._RawData[Type, self._Arch]
- AvailableSkuIdSet = SkuObj.AvailableSkuIdSet.copy()
-
- AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
- for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- if SkuName not in AvailableSkuIdSet:
- continue
-
- PcdList.append((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
- PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
- # Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid, SkuName, Dummy4 in PcdList:
-
- Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
- continue
-
- PcdValue, DatumType, MaxDatumSize = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
- SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName], '', '', '', '', '', PcdValue)
- if (PcdCName,TokenSpaceGuid) in Pcds.keys():
- pcdObject = Pcds[PcdCName,TokenSpaceGuid]
- pcdObject.SkuInfoList[SkuName] = SkuInfo
- if MaxDatumSize.strip():
- CurrentMaxSize = int(MaxDatumSize.strip(),0)
- else:
- CurrentMaxSize = 0
- if pcdObject.MaxDatumSize:
- PcdMaxSize = int(pcdObject.MaxDatumSize,0)
- else:
- PcdMaxSize = 0
- if CurrentMaxSize > PcdMaxSize:
- pcdObject.MaxDatumSize = str(CurrentMaxSize)
- else:
- Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- self._PCD_TYPE_STRING_[Type],
- DatumType,
- PcdValue,
- '',
- MaxDatumSize,
- {SkuName : SkuInfo},
- False,
- None
- )
-
- for pcd in Pcds.values():
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
- if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- valuefromDec = pcdDecObject.DefaultValue
- SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '', '', '', valuefromDec)
- pcd.SkuInfoList['DEFAULT'] = SkuInfo
- elif 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- pcd.SkuInfoList['DEFAULT'] = pcd.SkuInfoList['COMMON']
- del(pcd.SkuInfoList['COMMON'])
- elif 'DEFAULT' in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- del(pcd.SkuInfoList['COMMON'])
- if SkuObj.SkuUsageType == SkuObj.SINGLE:
- if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
- pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
- del(pcd.SkuInfoList['DEFAULT'])
-
- return Pcds
-
- def CompareVarAttr(self, Attr1, Attr2):
- if not Attr1 or not Attr2: # for empty string
- return True
- Attr1s = [attr.strip() for attr in Attr1.split(",")]
- Attr1Set = set(Attr1s)
- Attr2s = [attr.strip() for attr in Attr2.split(",")]
- Attr2Set = set(Attr2s)
- if Attr2Set == Attr1Set:
- return True
- else:
- return False
- ## Retrieve dynamic HII PCD settings
- #
- # @param Type PCD type
- #
- # @retval a dict object contains settings of given PCD type
- #
- def _GetDynamicHiiPcd(self, Type):
-
- SkuObj = SkuClass(self.SkuIdentifier,self.SkuIds)
- VariableAttrs = {}
-
- Pcds = sdict()
- #
- # tdict is a special dict kind of type, used for selecting correct
- # PCD settings for certain ARCH and SKU
- #
- PcdDict = tdict(True, 4)
- PcdSet = set()
- RecordList = self._RawData[Type, self._Arch]
- # Find out all possible PCD candidates for self._Arch
- AvailableSkuIdSet = SkuObj.AvailableSkuIdSet.copy()
-
- AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
- for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- if SkuName not in AvailableSkuIdSet:
- continue
- PcdSet.add((PcdCName, TokenSpaceGuid, SkuName,Dummy4))
- PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
- # Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid,SkuName, Dummy4 in PcdSet:
-
- Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
- continue
- VariableName, VariableGuid, VariableOffset, DefaultValue, VarAttribute = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
-
- rt, Msg = VariableAttributes.ValidateVarAttributes(VarAttribute)
- if not rt:
- EdkLogger.error("build", PCD_VARIABLE_ATTRIBUTES_ERROR, "Variable attributes settings for %s is incorrect.\n %s" % (".".join((TokenSpaceGuid, PcdCName)), Msg),
- ExtraData = "[%s]" % VarAttribute)
- ExceedMax = False
- FormatCorrect = True
- if VariableOffset.isdigit():
- if int(VariableOffset,10) > 0xFFFF:
- ExceedMax = True
- elif re.match(r'[\t\s]*0[xX][a-fA-F0-9]+$',VariableOffset):
- if int(VariableOffset,16) > 0xFFFF:
- ExceedMax = True
- # For Offset written in "A.B"
- elif VariableOffset.find('.') > -1:
- VariableOffsetList = VariableOffset.split(".")
- if not (len(VariableOffsetList) == 2
- and IsValidWord(VariableOffsetList[0])
- and IsValidWord(VariableOffsetList[1])):
- FormatCorrect = False
- else:
- FormatCorrect = False
- if not FormatCorrect:
- EdkLogger.error('Build', FORMAT_INVALID, "Invalid syntax or format of the variable offset value is incorrect for %s." % ".".join((TokenSpaceGuid,PcdCName)))
-
- if ExceedMax:
- EdkLogger.error('Build', OPTION_VALUE_INVALID, "The variable offset value must not exceed the maximum value of 0xFFFF (UINT16) for %s." % ".".join((TokenSpaceGuid,PcdCName)))
- if (VariableName, VariableGuid) not in VariableAttrs:
- VariableAttrs[(VariableName, VariableGuid)] = VarAttribute
- else:
- if not self.CompareVarAttr(VariableAttrs[(VariableName, VariableGuid)], VarAttribute):
- EdkLogger.error('Build', PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR, "The variable %s.%s for DynamicHii PCDs has conflicting attributes [%s] and [%s] " % (VariableGuid, VariableName, VarAttribute, VariableAttrs[(VariableName, VariableGuid)]))
-
- SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName], VariableName, VariableGuid, VariableOffset, DefaultValue, VariableAttribute = VarAttribute)
- pcdDecObject = self._DecPcds[PcdCName, TokenSpaceGuid]
- if (PcdCName,TokenSpaceGuid) in Pcds.keys():
- pcdObject = Pcds[PcdCName,TokenSpaceGuid]
- pcdObject.SkuInfoList[SkuName] = SkuInfo
- else:
- Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- self._PCD_TYPE_STRING_[Type],
- '',
- DefaultValue,
- '',
- '',
- {SkuName : SkuInfo},
- False,
- None,
- pcdDecObject.validateranges,
- pcdDecObject.validlists,
- pcdDecObject.expressions
- )
-
-
- for pcd in Pcds.values():
- SkuInfoObj = pcd.SkuInfoList.values()[0]
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
- # Only fix the value while no value provided in DSC file.
- for sku in pcd.SkuInfoList.values():
- if (sku.HiiDefaultValue == "" or sku.HiiDefaultValue==None):
- sku.HiiDefaultValue = pcdDecObject.DefaultValue
- if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- valuefromDec = pcdDecObject.DefaultValue
- SkuInfo = SkuInfoClass('DEFAULT', '0', SkuInfoObj.VariableName, SkuInfoObj.VariableGuid, SkuInfoObj.VariableOffset, valuefromDec)
- pcd.SkuInfoList['DEFAULT'] = SkuInfo
- elif 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- pcd.SkuInfoList['DEFAULT'] = pcd.SkuInfoList['COMMON']
- del(pcd.SkuInfoList['COMMON'])
- elif 'DEFAULT' in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- del(pcd.SkuInfoList['COMMON'])
-
- if SkuObj.SkuUsageType == SkuObj.SINGLE:
- if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
- pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
- del(pcd.SkuInfoList['DEFAULT'])
-
-
- if pcd.MaxDatumSize.strip():
- MaxSize = int(pcd.MaxDatumSize,0)
- else:
- MaxSize = 0
- if pcdDecObject.DatumType == 'VOID*':
- for (skuname,skuobj) in pcd.SkuInfoList.items():
- datalen = 0
- if skuobj.HiiDefaultValue.startswith("L"):
- datalen = (len(skuobj.HiiDefaultValue)- 3 + 1) * 2
- elif skuobj.HiiDefaultValue.startswith("{"):
- datalen = len(skuobj.HiiDefaultValue.split(","))
- else:
- datalen = len(skuobj.HiiDefaultValue) -2 + 1
- if datalen>MaxSize:
- MaxSize = datalen
- pcd.MaxDatumSize = str(MaxSize)
- return Pcds
-
- ## Retrieve dynamic VPD PCD settings
- #
- # @param Type PCD type
- #
- # @retval a dict object contains settings of given PCD type
- #
- def _GetDynamicVpdPcd(self, Type):
-
- SkuObj = SkuClass(self.SkuIdentifier,self.SkuIds)
-
- Pcds = sdict()
- #
- # tdict is a special dict kind of type, used for selecting correct
- # PCD settings for certain ARCH and SKU
- #
- PcdDict = tdict(True, 4)
- PcdList = []
- # Find out all possible PCD candidates for self._Arch
- RecordList = self._RawData[Type, self._Arch]
- AvailableSkuIdSet = SkuObj.AvailableSkuIdSet.copy()
-
- AvailableSkuIdSet.update({'DEFAULT':0,'COMMON':0})
- for TokenSpaceGuid, PcdCName, Setting, Arch, SkuName, Dummy3, Dummy4 in RecordList:
- if SkuName not in AvailableSkuIdSet:
- continue
-
- PcdList.append((PcdCName, TokenSpaceGuid,SkuName, Dummy4))
- PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
- # Remove redundant PCD candidates, per the ARCH and SKU
- for PcdCName, TokenSpaceGuid, SkuName,Dummy4 in PcdList:
- Setting = PcdDict[self._Arch, SkuName, PcdCName, TokenSpaceGuid]
- if Setting == None:
- continue
- #
- # For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
- # For the Integer & Boolean type, the optional data can only be InitialValue.
- # At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
- # until the DEC parser has been called.
- #
- VpdOffset, MaxDatumSize, InitialValue = self._ValidatePcd(PcdCName, TokenSpaceGuid, Setting, Type, Dummy4)
- SkuInfo = SkuInfoClass(SkuName, self.SkuIds[SkuName], '', '', '', '', VpdOffset, InitialValue)
- if (PcdCName,TokenSpaceGuid) in Pcds.keys():
- pcdObject = Pcds[PcdCName,TokenSpaceGuid]
- pcdObject.SkuInfoList[SkuName] = SkuInfo
- if MaxDatumSize.strip():
- CurrentMaxSize = int(MaxDatumSize.strip(),0)
- else:
- CurrentMaxSize = 0
- if pcdObject.MaxDatumSize:
- PcdMaxSize = int(pcdObject.MaxDatumSize,0)
- else:
- PcdMaxSize = 0
- if CurrentMaxSize > PcdMaxSize:
- pcdObject.MaxDatumSize = str(CurrentMaxSize)
- else:
- Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- self._PCD_TYPE_STRING_[Type],
- '',
- InitialValue,
- '',
- MaxDatumSize,
- {SkuName : SkuInfo},
- False,
- None
- )
- for pcd in Pcds.values():
- SkuInfoObj = pcd.SkuInfoList.values()[0]
- pcdDecObject = self._DecPcds[pcd.TokenCName,pcd.TokenSpaceGuidCName]
- if 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' not in pcd.SkuInfoList.keys():
- valuefromDec = pcdDecObject.DefaultValue
- SkuInfo = SkuInfoClass('DEFAULT', '0', '', '', '','',SkuInfoObj.VpdOffset, valuefromDec)
- pcd.SkuInfoList['DEFAULT'] = SkuInfo
- elif 'DEFAULT' not in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- pcd.SkuInfoList['DEFAULT'] = pcd.SkuInfoList['COMMON']
- del(pcd.SkuInfoList['COMMON'])
- elif 'DEFAULT' in pcd.SkuInfoList.keys() and 'COMMON' in pcd.SkuInfoList.keys():
- del(pcd.SkuInfoList['COMMON'])
- if SkuObj.SkuUsageType == SkuObj.SINGLE:
- if 'DEFAULT' in pcd.SkuInfoList.keys() and SkuObj.SystemSkuId not in pcd.SkuInfoList.keys():
- pcd.SkuInfoList[SkuObj.SystemSkuId] = pcd.SkuInfoList['DEFAULT']
- del(pcd.SkuInfoList['DEFAULT'])
-
- return Pcds
-
- ## Add external modules
- #
- # The external modules are mostly those listed in FDF file, which don't
- # need "build".
- #
- # @param FilePath The path of module description file
- #
- def AddModule(self, FilePath):
- FilePath = NormPath(FilePath)
- if FilePath not in self.Modules:
- Module = ModuleBuildClassObject()
- Module.MetaFile = FilePath
- self.Modules.append(Module)
-
- ## Add external PCDs
- #
- # The external PCDs are mostly those listed in FDF file to specify address
- # or offset information.
- #
- # @param Name Name of the PCD
- # @param Guid Token space guid of the PCD
- # @param Value Value of the PCD
- #
- def AddPcd(self, Name, Guid, Value):
- if (Name, Guid) not in self.Pcds:
- self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
- self.Pcds[Name, Guid].DefaultValue = Value
-
- _Macros = property(_GetMacros)
- Arch = property(_GetArch, _SetArch)
- Platform = property(_GetPlatformName)
- PlatformName = property(_GetPlatformName)
- Guid = property(_GetFileGuid)
- Version = property(_GetVersion)
- DscSpecification = property(_GetDscSpec)
- OutputDirectory = property(_GetOutpuDir)
- SupArchList = property(_GetSupArch)
- BuildTargets = property(_GetBuildTarget)
- SkuName = property(_GetSkuName, _SetSkuName)
- SkuIdentifier = property(_GetSkuIdentifier)
- AvilableSkuIds = property(_GetAviableSkuIds)
- PcdInfoFlag = property(_GetPcdInfoFlag)
- VarCheckFlag = property(_GetVarCheckFlag)
- FlashDefinition = property(_GetFdfFile)
- Prebuild = property(_GetPrebuild)
- Postbuild = property(_GetPostbuild)
- BuildNumber = property(_GetBuildNumber)
- MakefileName = property(_GetMakefileName)
- BsBaseAddress = property(_GetBsBaseAddress)
- RtBaseAddress = property(_GetRtBaseAddress)
- LoadFixAddress = property(_GetLoadFixAddress)
- RFCLanguages = property(_GetRFCLanguages)
- ISOLanguages = property(_GetISOLanguages)
- VpdToolGuid = property(_GetVpdToolGuid)
- SkuIds = property(_GetSkuIds)
- Modules = property(_GetModules)
- LibraryInstances = property(_GetLibraryInstances)
- LibraryClasses = property(_GetLibraryClasses)
- Pcds = property(_GetPcds)
- BuildOptions = property(_GetBuildOptions)
-
-## Platform build information from DEC file
-#
-# This class is used to retrieve information stored in database and convert them
-# into PackageBuildClassObject form for easier use for AutoGen.
-#
-class DecBuildData(PackageBuildClassObject):
- # dict used to convert PCD type in database to string used by build tool
- _PCD_TYPE_STRING_ = {
- MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
- MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
- MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
- MODEL_PCD_DYNAMIC : "Dynamic",
- MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
- MODEL_PCD_DYNAMIC_HII : "DynamicHii",
- MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
- MODEL_PCD_DYNAMIC_EX : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
- MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
- }
-
- # dict used to convert part of [Defines] to members of DecBuildData directly
- _PROPERTY_ = {
- #
- # Required Fields
- #
- TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
- TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
- TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
- TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
- }
-
-
- ## Constructor of DecBuildData
- #
- # Initialize object of DecBuildData
- #
- # @param FilePath The path of package description file
- # @param RawData The raw data of DEC file
- # @param BuildDataBase Database used to retrieve module information
- # @param Arch The target architecture
- # @param Platform (not used for DecBuildData)
- # @param Macros Macros used for replacement in DSC file
- #
- def __init__(self, File, RawData, BuildDataBase, Arch='COMMON', Target=None, Toolchain=None):
- self.MetaFile = File
- self._PackageDir = File.Dir
- self._RawData = RawData
- self._Bdb = BuildDataBase
- self._Arch = Arch
- self._Target = Target
- self._Toolchain = Toolchain
- self._Clear()
-
- ## XXX[key] = value
- def __setitem__(self, key, value):
- self.__dict__[self._PROPERTY_[key]] = value
-
- ## value = XXX[key]
- def __getitem__(self, key):
- return self.__dict__[self._PROPERTY_[key]]
-
- ## "in" test support
- def __contains__(self, key):
- return key in self._PROPERTY_
-
- ## Set all internal used members of DecBuildData to None
- def _Clear(self):
- self._Header = None
- self._PackageName = None
- self._Guid = None
- self._Version = None
- self._PkgUniFile = None
- self._Protocols = None
- self._Ppis = None
- self._Guids = None
- self._Includes = None
- self._LibraryClasses = None
- self._Pcds = None
- self.__Macros = None
- self._PrivateProtocols = None
- self._PrivatePpis = None
- self._PrivateGuids = None
- self._PrivateIncludes = None
-
- ## Get current effective macros
- def _GetMacros(self):
- if self.__Macros == None:
- self.__Macros = {}
- self.__Macros.update(GlobalData.gGlobalDefines)
- return self.__Macros
-
- ## Get architecture
- def _GetArch(self):
- return self._Arch
-
- ## Set architecture
- #
- # Changing the default ARCH to another may affect all other information
- # because all information in a platform may be ARCH-related. That's
- # why we need to clear all internal used members, in order to cause all
- # information to be re-retrieved.
- #
- # @param Value The value of ARCH
- #
- def _SetArch(self, Value):
- if self._Arch == Value:
- return
- self._Arch = Value
- self._Clear()
-
- ## Retrieve all information in [Defines] section
- #
- # (Retriving all [Defines] information in one-shot is just to save time.)
- #
- def _GetHeaderInfo(self):
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch]
- for Record in RecordList:
- Name = Record[1]
- if Name in self:
- self[Name] = Record[2]
- self._Header = 'DUMMY'
-
- ## Retrieve package name
- def _GetPackageName(self):
- if self._PackageName == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._PackageName == None:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_NAME", File=self.MetaFile)
- return self._PackageName
-
- ## Retrieve file guid
- def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Guid == None:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "No PACKAGE_GUID", File=self.MetaFile)
- return self._Guid
-
- ## Retrieve package version
- def _GetVersion(self):
- if self._Version == None:
- if self._Header == None:
- self._GetHeaderInfo()
- if self._Version == None:
- self._Version = ''
- return self._Version
-
- ## Retrieve protocol definitions (name/value pairs)
- def _GetProtocol(self):
- if self._Protocols == None:
- #
- # tdict is a special kind of dict, used for selecting correct
- # protocol defition for given ARCH
- #
- ProtocolDict = tdict(True)
- PrivateProtocolDict = tdict(True)
- NameList = []
- PrivateNameList = []
- PublicNameList = []
- # find out all protocol definitions for specific and 'common' arch
- RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch]
- for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
- if PrivateFlag == 'PRIVATE':
- if Name not in PrivateNameList:
- PrivateNameList.append(Name)
- PrivateProtocolDict[Arch, Name] = Guid
- if Name in PublicNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- else:
- if Name not in PublicNameList:
- PublicNameList.append(Name)
- if Name in PrivateNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- if Name not in NameList:
- NameList.append(Name)
- ProtocolDict[Arch, Name] = Guid
- # use sdict to keep the order
- self._Protocols = sdict()
- self._PrivateProtocols = sdict()
- for Name in NameList:
- #
- # limit the ARCH to self._Arch, if no self._Arch found, tdict
- # will automatically turn to 'common' ARCH for trying
- #
- self._Protocols[Name] = ProtocolDict[self._Arch, Name]
- for Name in PrivateNameList:
- self._PrivateProtocols[Name] = PrivateProtocolDict[self._Arch, Name]
- return self._Protocols
-
- ## Retrieve PPI definitions (name/value pairs)
- def _GetPpi(self):
- if self._Ppis == None:
- #
- # tdict is a special kind of dict, used for selecting correct
- # PPI defition for given ARCH
- #
- PpiDict = tdict(True)
- PrivatePpiDict = tdict(True)
- NameList = []
- PrivateNameList = []
- PublicNameList = []
- # find out all PPI definitions for specific arch and 'common' arch
- RecordList = self._RawData[MODEL_EFI_PPI, self._Arch]
- for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
- if PrivateFlag == 'PRIVATE':
- if Name not in PrivateNameList:
- PrivateNameList.append(Name)
- PrivatePpiDict[Arch, Name] = Guid
- if Name in PublicNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- else:
- if Name not in PublicNameList:
- PublicNameList.append(Name)
- if Name in PrivateNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- if Name not in NameList:
- NameList.append(Name)
- PpiDict[Arch, Name] = Guid
- # use sdict to keep the order
- self._Ppis = sdict()
- self._PrivatePpis = sdict()
- for Name in NameList:
- #
- # limit the ARCH to self._Arch, if no self._Arch found, tdict
- # will automatically turn to 'common' ARCH for trying
- #
- self._Ppis[Name] = PpiDict[self._Arch, Name]
- for Name in PrivateNameList:
- self._PrivatePpis[Name] = PrivatePpiDict[self._Arch, Name]
- return self._Ppis
-
- ## Retrieve GUID definitions (name/value pairs)
- def _GetGuid(self):
- if self._Guids == None:
- #
- # tdict is a special kind of dict, used for selecting correct
- # GUID defition for given ARCH
- #
- GuidDict = tdict(True)
- PrivateGuidDict = tdict(True)
- NameList = []
- PrivateNameList = []
- PublicNameList = []
- # find out all protocol definitions for specific and 'common' arch
- RecordList = self._RawData[MODEL_EFI_GUID, self._Arch]
- for Name, Guid, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
- if PrivateFlag == 'PRIVATE':
- if Name not in PrivateNameList:
- PrivateNameList.append(Name)
- PrivateGuidDict[Arch, Name] = Guid
- if Name in PublicNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- else:
- if Name not in PublicNameList:
- PublicNameList.append(Name)
- if Name in PrivateNameList:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % Name, File=self.MetaFile, Line=LineNo)
- if Name not in NameList:
- NameList.append(Name)
- GuidDict[Arch, Name] = Guid
- # use sdict to keep the order
- self._Guids = sdict()
- self._PrivateGuids = sdict()
- for Name in NameList:
- #
- # limit the ARCH to self._Arch, if no self._Arch found, tdict
- # will automatically turn to 'common' ARCH for trying
- #
- self._Guids[Name] = GuidDict[self._Arch, Name]
- for Name in PrivateNameList:
- self._PrivateGuids[Name] = PrivateGuidDict[self._Arch, Name]
- return self._Guids
-
- ## Retrieve public include paths declared in this package
- def _GetInclude(self):
- if self._Includes == None:
- self._Includes = []
- self._PrivateIncludes = []
- PublicInclues = []
- RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch]
- Macros = self._Macros
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- for Record in RecordList:
- File = PathClass(NormPath(Record[0], Macros), self._PackageDir, Arch=self._Arch)
- LineNo = Record[-1]
- # validate the path
- ErrorCode, ErrorInfo = File.Validate()
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
-
- # avoid duplicate include path
- if File not in self._Includes:
- self._Includes.append(File)
- if Record[4] == 'PRIVATE':
- if File not in self._PrivateIncludes:
- self._PrivateIncludes.append(File)
- if File in PublicInclues:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
- else:
- if File not in PublicInclues:
- PublicInclues.append(File)
- if File in self._PrivateIncludes:
- EdkLogger.error('build', OPTION_CONFLICT, "Can't determine %s's attribute, it is both defined as Private and non-Private attribute in DEC file." % File, File=self.MetaFile, Line=LineNo)
-
- return self._Includes
-
- ## Retrieve library class declarations (not used in build at present)
- def _GetLibraryClass(self):
- if self._LibraryClasses == None:
- #
- # tdict is a special kind of dict, used for selecting correct
- # library class declaration for given ARCH
- #
- LibraryClassDict = tdict(True)
- LibraryClassSet = set()
- RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch]
- Macros = self._Macros
- for LibraryClass, File, Dummy, Arch, PrivateFlag, ID, LineNo in RecordList:
- File = PathClass(NormPath(File, Macros), self._PackageDir, Arch=self._Arch)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate()
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
- LibraryClassSet.add(LibraryClass)
- LibraryClassDict[Arch, LibraryClass] = File
- self._LibraryClasses = sdict()
- for LibraryClass in LibraryClassSet:
- self._LibraryClasses[LibraryClass] = LibraryClassDict[self._Arch, LibraryClass]
- return self._LibraryClasses
-
- ## Retrieve PCD declarations
- def _GetPcds(self):
- if self._Pcds == None:
- self._Pcds = sdict()
- self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
- self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
- self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
- self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
- self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
- return self._Pcds
-
- ## Retrieve PCD declarations for given type
- def _GetPcd(self, Type):
- Pcds = sdict()
- #
- # tdict is a special kind of dict, used for selecting correct
- # PCD declaration for given ARCH
- #
- PcdDict = tdict(True, 3)
- # for summarizing PCD
- PcdSet = set()
- # find out all PCDs of the 'type'
- RecordList = self._RawData[Type, self._Arch]
- for TokenSpaceGuid, PcdCName, Setting, Arch, PrivateFlag, Dummy1, Dummy2 in RecordList:
- PcdDict[Arch, PcdCName, TokenSpaceGuid] = Setting
- PcdSet.add((PcdCName, TokenSpaceGuid))
-
- for PcdCName, TokenSpaceGuid in PcdSet:
- #
- # limit the ARCH to self._Arch, if no self._Arch found, tdict
- # will automatically turn to 'common' ARCH and try again
- #
- Setting = PcdDict[self._Arch, PcdCName, TokenSpaceGuid]
- if Setting == None:
- continue
-
- DefaultValue, DatumType, TokenNumber = AnalyzePcdData(Setting)
-
- validateranges, validlists, expressions = self._RawData.GetValidExpression(TokenSpaceGuid, PcdCName)
- Pcds[PcdCName, TokenSpaceGuid, self._PCD_TYPE_STRING_[Type]] = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- self._PCD_TYPE_STRING_[Type],
- DatumType,
- DefaultValue,
- TokenNumber,
- '',
- {},
- False,
- None,
- list(validateranges),
- list(validlists),
- list(expressions)
- )
- return Pcds
-
-
- _Macros = property(_GetMacros)
- Arch = property(_GetArch, _SetArch)
- PackageName = property(_GetPackageName)
- Guid = property(_GetFileGuid)
- Version = property(_GetVersion)
-
- Protocols = property(_GetProtocol)
- Ppis = property(_GetPpi)
- Guids = property(_GetGuid)
- Includes = property(_GetInclude)
- LibraryClasses = property(_GetLibraryClass)
- Pcds = property(_GetPcds)
-
-## Module build information from INF file
-#
-# This class is used to retrieve information stored in database and convert them
-# into ModuleBuildClassObject form for easier use for AutoGen.
-#
-class InfBuildData(ModuleBuildClassObject):
- # dict used to convert PCD type in database to string used by build tool
- _PCD_TYPE_STRING_ = {
- MODEL_PCD_FIXED_AT_BUILD : "FixedAtBuild",
- MODEL_PCD_PATCHABLE_IN_MODULE : "PatchableInModule",
- MODEL_PCD_FEATURE_FLAG : "FeatureFlag",
- MODEL_PCD_DYNAMIC : "Dynamic",
- MODEL_PCD_DYNAMIC_DEFAULT : "Dynamic",
- MODEL_PCD_DYNAMIC_HII : "DynamicHii",
- MODEL_PCD_DYNAMIC_VPD : "DynamicVpd",
- MODEL_PCD_DYNAMIC_EX : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_DEFAULT : "DynamicEx",
- MODEL_PCD_DYNAMIC_EX_HII : "DynamicExHii",
- MODEL_PCD_DYNAMIC_EX_VPD : "DynamicExVpd",
- }
-
- # dict used to convert part of [Defines] to members of InfBuildData directly
- _PROPERTY_ = {
- #
- # Required Fields
- #
- TAB_INF_DEFINES_BASE_NAME : "_BaseName",
- TAB_INF_DEFINES_FILE_GUID : "_Guid",
- TAB_INF_DEFINES_MODULE_TYPE : "_ModuleType",
- #
- # Optional Fields
- #
- #TAB_INF_DEFINES_INF_VERSION : "_AutoGenVersion",
- TAB_INF_DEFINES_COMPONENT_TYPE : "_ComponentType",
- TAB_INF_DEFINES_MAKEFILE_NAME : "_MakefileName",
- #TAB_INF_DEFINES_CUSTOM_MAKEFILE : "_CustomMakefile",
- TAB_INF_DEFINES_DPX_SOURCE :"_DxsFile",
- TAB_INF_DEFINES_VERSION_NUMBER : "_Version",
- TAB_INF_DEFINES_VERSION_STRING : "_Version",
- TAB_INF_DEFINES_VERSION : "_Version",
- TAB_INF_DEFINES_PCD_IS_DRIVER : "_PcdIsDriver",
- TAB_INF_DEFINES_SHADOW : "_Shadow",
-
- TAB_COMPONENTS_SOURCE_OVERRIDE_PATH : "_SourceOverridePath",
- }
-
- # dict used to convert Component type to Module type
- _MODULE_TYPE_ = {
- "LIBRARY" : "BASE",
- "SECURITY_CORE" : "SEC",
- "PEI_CORE" : "PEI_CORE",
- "COMBINED_PEIM_DRIVER" : "PEIM",
- "PIC_PEIM" : "PEIM",
- "RELOCATABLE_PEIM" : "PEIM",
- "PE32_PEIM" : "PEIM",
- "BS_DRIVER" : "DXE_DRIVER",
- "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
- "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
- "DXE_SMM_DRIVER" : "DXE_SMM_DRIVER",
- # "SMM_DRIVER" : "DXE_SMM_DRIVER",
- # "BS_DRIVER" : "DXE_SMM_DRIVER",
- # "BS_DRIVER" : "UEFI_DRIVER",
- "APPLICATION" : "UEFI_APPLICATION",
- "LOGO" : "BASE",
- }
-
- # regular expression for converting XXX_FLAGS in [nmake] section to new type
- _NMAKE_FLAG_PATTERN_ = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
- # dict used to convert old tool name used in [nmake] section to new ones
- _TOOL_CODE_ = {
- "C" : "CC",
- "LIB" : "SLINK",
- "LINK" : "DLINK",
- }
-
-
- ## Constructor of DscBuildData
- #
- # Initialize object of DscBuildData
- #
- # @param FilePath The path of platform description file
- # @param RawData The raw data of DSC file
- # @param BuildDataBase Database used to retrieve module/package information
- # @param Arch The target architecture
- # @param Platform The name of platform employing this module
- # @param Macros Macros used for replacement in DSC file
- #
- def __init__(self, FilePath, RawData, BuildDatabase, Arch='COMMON', Target=None, Toolchain=None):
- self.MetaFile = FilePath
- self._ModuleDir = FilePath.Dir
- self._RawData = RawData
- self._Bdb = BuildDatabase
- self._Arch = Arch
- self._Target = Target
- self._Toolchain = Toolchain
- self._Platform = 'COMMON'
- self._SourceOverridePath = None
- if FilePath.Key in GlobalData.gOverrideDir:
- self._SourceOverridePath = GlobalData.gOverrideDir[FilePath.Key]
- self._Clear()
-
- ## XXX[key] = value
- def __setitem__(self, key, value):
- self.__dict__[self._PROPERTY_[key]] = value
-
- ## value = XXX[key]
- def __getitem__(self, key):
- return self.__dict__[self._PROPERTY_[key]]
-
- ## "in" test support
- def __contains__(self, key):
- return key in self._PROPERTY_
-
- ## Set all internal used members of InfBuildData to None
- def _Clear(self):
- self._HeaderComments = None
- self._TailComments = None
- self._Header_ = None
- self._AutoGenVersion = None
- self._BaseName = None
- self._DxsFile = None
- self._ModuleType = None
- self._ComponentType = None
- self._BuildType = None
- self._Guid = None
- self._Version = None
- self._PcdIsDriver = None
- self._BinaryModule = None
- self._Shadow = None
- self._MakefileName = None
- self._CustomMakefile = None
- self._Specification = None
- self._LibraryClass = None
- self._ModuleEntryPointList = None
- self._ModuleUnloadImageList = None
- self._ConstructorList = None
- self._DestructorList = None
- self._Defs = None
- self._Binaries = None
- self._Sources = None
- self._LibraryClasses = None
- self._Libraries = None
- self._Protocols = None
- self._ProtocolComments = None
- self._Ppis = None
- self._PpiComments = None
- self._Guids = None
- self._GuidsUsedByPcd = sdict()
- self._GuidComments = None
- self._Includes = None
- self._Packages = None
- self._Pcds = None
- self._PcdComments = None
- self._BuildOptions = None
- self._Depex = None
- self._DepexExpression = None
- self.__Macros = None
-
- ## Get current effective macros
- def _GetMacros(self):
- if self.__Macros == None:
- self.__Macros = {}
- # EDK_GLOBAL defined macros can be applied to EDK module
- if self.AutoGenVersion < 0x00010005:
- self.__Macros.update(GlobalData.gEdkGlobal)
- self.__Macros.update(GlobalData.gGlobalDefines)
- return self.__Macros
-
- ## Get architecture
- def _GetArch(self):
- return self._Arch
-
- ## Set architecture
- #
- # Changing the default ARCH to another may affect all other information
- # because all information in a platform may be ARCH-related. That's
- # why we need to clear all internal used members, in order to cause all
- # information to be re-retrieved.
- #
- # @param Value The value of ARCH
- #
- def _SetArch(self, Value):
- if self._Arch == Value:
- return
- self._Arch = Value
- self._Clear()
-
- ## Return the name of platform employing this module
- def _GetPlatform(self):
- return self._Platform
-
- ## Change the name of platform employing this module
- #
- # Changing the default name of platform to another may affect some information
- # because they may be PLATFORM-related. That's why we need to clear all internal
- # used members, in order to cause all information to be re-retrieved.
- #
- def _SetPlatform(self, Value):
- if self._Platform == Value:
- return
- self._Platform = Value
- self._Clear()
- def _GetHeaderComments(self):
- if not self._HeaderComments:
- self._HeaderComments = []
- RecordList = self._RawData[MODEL_META_DATA_HEADER_COMMENT]
- for Record in RecordList:
- self._HeaderComments.append(Record[0])
- return self._HeaderComments
- def _GetTailComments(self):
- if not self._TailComments:
- self._TailComments = []
- RecordList = self._RawData[MODEL_META_DATA_TAIL_COMMENT]
- for Record in RecordList:
- self._TailComments.append(Record[0])
- return self._TailComments
- ## Retrieve all information in [Defines] section
- #
- # (Retriving all [Defines] information in one-shot is just to save time.)
- #
- def _GetHeaderInfo(self):
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
- for Record in RecordList:
- Name, Value = Record[1], ReplaceMacro(Record[2], self._Macros, False)
- # items defined _PROPERTY_ don't need additional processing
- if Name in self:
- self[Name] = Value
- if self._Defs == None:
- self._Defs = sdict()
- self._Defs[Name] = Value
- self._Macros[Name] = Value
- # some special items in [Defines] section need special treatment
- elif Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION', 'EDK_RELEASE_VERSION', 'PI_SPECIFICATION_VERSION'):
- if Name in ('EFI_SPECIFICATION_VERSION', 'UEFI_SPECIFICATION_VERSION'):
- Name = 'UEFI_SPECIFICATION_VERSION'
- if self._Specification == None:
- self._Specification = sdict()
- self._Specification[Name] = GetHexVerValue(Value)
- if self._Specification[Name] == None:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
- "'%s' format is not supported for %s" % (Value, Name),
- File=self.MetaFile, Line=Record[-1])
- elif Name == 'LIBRARY_CLASS':
- if self._LibraryClass == None:
- self._LibraryClass = []
- ValueList = GetSplitValueList(Value)
- LibraryClass = ValueList[0]
- if len(ValueList) > 1:
- SupModuleList = GetSplitValueList(ValueList[1], ' ')
- else:
- SupModuleList = SUP_MODULE_LIST
- self._LibraryClass.append(LibraryClassObject(LibraryClass, SupModuleList))
- elif Name == 'ENTRY_POINT':
- if self._ModuleEntryPointList == None:
- self._ModuleEntryPointList = []
- self._ModuleEntryPointList.append(Value)
- elif Name == 'UNLOAD_IMAGE':
- if self._ModuleUnloadImageList == None:
- self._ModuleUnloadImageList = []
- if not Value:
- continue
- self._ModuleUnloadImageList.append(Value)
- elif Name == 'CONSTRUCTOR':
- if self._ConstructorList == None:
- self._ConstructorList = []
- if not Value:
- continue
- self._ConstructorList.append(Value)
- elif Name == 'DESTRUCTOR':
- if self._DestructorList == None:
- self._DestructorList = []
- if not Value:
- continue
- self._DestructorList.append(Value)
- elif Name == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
- TokenList = GetSplitValueList(Value)
- if self._CustomMakefile == None:
- self._CustomMakefile = {}
- if len(TokenList) < 2:
- self._CustomMakefile['MSFT'] = TokenList[0]
- self._CustomMakefile['GCC'] = TokenList[0]
- else:
- if TokenList[0] not in ['MSFT', 'GCC']:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
- "No supported family [%s]" % TokenList[0],
- File=self.MetaFile, Line=Record[-1])
- self._CustomMakefile[TokenList[0]] = TokenList[1]
- else:
- if self._Defs == None:
- self._Defs = sdict()
- self._Defs[Name] = Value
- self._Macros[Name] = Value
-
- #
- # Retrieve information in sections specific to Edk.x modules
- #
- if self.AutoGenVersion >= 0x00010005:
- if not self._ModuleType:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
- "MODULE_TYPE is not given", File=self.MetaFile)
- if self._ModuleType not in SUP_MODULE_LIST:
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
- for Record in RecordList:
- Name = Record[1]
- if Name == "MODULE_TYPE":
- LineNo = Record[6]
- break
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED,
- "MODULE_TYPE %s is not supported for EDK II, valid values are:\n %s" % (self._ModuleType, ' '.join(l for l in SUP_MODULE_LIST)),
- File=self.MetaFile, Line=LineNo)
- if (self._Specification == None) or (not 'PI_SPECIFICATION_VERSION' in self._Specification) or (int(self._Specification['PI_SPECIFICATION_VERSION'], 16) < 0x0001000A):
- if self._ModuleType == SUP_MODULE_SMM_CORE:
- EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "SMM_CORE module type can't be used in the module with PI_SPECIFICATION_VERSION less than 0x0001000A", File=self.MetaFile)
- if self._Defs and 'PCI_DEVICE_ID' in self._Defs and 'PCI_VENDOR_ID' in self._Defs \
- and 'PCI_CLASS_CODE' in self._Defs and 'PCI_REVISION' in self._Defs:
- self._BuildType = 'UEFI_OPTIONROM'
- if 'PCI_COMPRESS' in self._Defs:
- if self._Defs['PCI_COMPRESS'] not in ('TRUE', 'FALSE'):
- EdkLogger.error("build", FORMAT_INVALID, "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.MetaFile)
-
- elif self._Defs and 'UEFI_HII_RESOURCE_SECTION' in self._Defs \
- and self._Defs['UEFI_HII_RESOURCE_SECTION'] == 'TRUE':
- self._BuildType = 'UEFI_HII'
- else:
- self._BuildType = self._ModuleType.upper()
-
- if self._DxsFile:
- File = PathClass(NormPath(self._DxsFile), self._ModuleDir, Arch=self._Arch)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False)
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
- File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
- self._Sources = []
- self._Sources.append(File)
- else:
- if not self._ComponentType:
- EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
- "COMPONENT_TYPE is not given", File=self.MetaFile)
- self._BuildType = self._ComponentType.upper()
- if self._ComponentType in self._MODULE_TYPE_:
- self._ModuleType = self._MODULE_TYPE_[self._ComponentType]
- if self._ComponentType == 'LIBRARY':
- self._LibraryClass = [LibraryClassObject(self._BaseName, SUP_MODULE_LIST)]
- # make use some [nmake] section macros
- Macros = self._Macros
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- Macros['PROCESSOR'] = self._Arch
- RecordList = self._RawData[MODEL_META_DATA_NMAKE, self._Arch, self._Platform]
- for Name, Value, Dummy, Arch, Platform, ID, LineNo in RecordList:
- Value = ReplaceMacro(Value, Macros, True)
- if Name == "IMAGE_ENTRY_POINT":
- if self._ModuleEntryPointList == None:
- self._ModuleEntryPointList = []
- self._ModuleEntryPointList.append(Value)
- elif Name == "DPX_SOURCE":
- File = PathClass(NormPath(Value), self._ModuleDir, Arch=self._Arch)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate(".dxs", CaseSensitive=False)
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo,
- File=self.MetaFile, Line=LineNo)
- if self.Sources == None:
- self._Sources = []
- self._Sources.append(File)
- else:
- ToolList = self._NMAKE_FLAG_PATTERN_.findall(Name)
- if len(ToolList) == 0 or len(ToolList) != 1:
- pass
-# EdkLogger.warn("build", "Don't know how to do with macro [%s]" % Name,
-# File=self.MetaFile, Line=LineNo)
- else:
- if self._BuildOptions == None:
- self._BuildOptions = sdict()
-
- if ToolList[0] in self._TOOL_CODE_:
- Tool = self._TOOL_CODE_[ToolList[0]]
- else:
- Tool = ToolList[0]
- ToolChain = "*_*_*_%s_FLAGS" % Tool
- ToolChainFamily = 'MSFT' # Edk.x only support MSFT tool chain
- #ignore not replaced macros in value
- ValueList = GetSplitList(' ' + Value, '/D')
- Dummy = ValueList[0]
- for Index in range(1, len(ValueList)):
- if ValueList[Index][-1] == '=' or ValueList[Index] == '':
- continue
- Dummy = Dummy + ' /D ' + ValueList[Index]
- Value = Dummy.strip()
- if (ToolChainFamily, ToolChain) not in self._BuildOptions:
- self._BuildOptions[ToolChainFamily, ToolChain] = Value
- else:
- OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
- self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Value
- # set _Header to non-None in order to avoid database re-querying
- self._Header_ = 'DUMMY'
-
- ## Retrieve file version
- def _GetInfVersion(self):
- if self._AutoGenVersion == None:
- RecordList = self._RawData[MODEL_META_DATA_HEADER, self._Arch, self._Platform]
- for Record in RecordList:
- if Record[1] == TAB_INF_DEFINES_INF_VERSION:
- if '.' in Record[2]:
- ValueList = Record[2].split('.')
- Major = '%04o' % int(ValueList[0], 0)
- Minor = '%04o' % int(ValueList[1], 0)
- self._AutoGenVersion = int('0x' + Major + Minor, 0)
- else:
- self._AutoGenVersion = int(Record[2], 0)
- break
- if self._AutoGenVersion == None:
- self._AutoGenVersion = 0x00010000
- return self._AutoGenVersion
-
- ## Retrieve BASE_NAME
- def _GetBaseName(self):
- if self._BaseName == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._BaseName == None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No BASE_NAME name", File=self.MetaFile)
- return self._BaseName
-
- ## Retrieve DxsFile
- def _GetDxsFile(self):
- if self._DxsFile == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._DxsFile == None:
- self._DxsFile = ''
- return self._DxsFile
-
- ## Retrieve MODULE_TYPE
- def _GetModuleType(self):
- if self._ModuleType == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._ModuleType == None:
- self._ModuleType = 'BASE'
- if self._ModuleType not in SUP_MODULE_LIST:
- self._ModuleType = "USER_DEFINED"
- return self._ModuleType
-
- ## Retrieve COMPONENT_TYPE
- def _GetComponentType(self):
- if self._ComponentType == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._ComponentType == None:
- self._ComponentType = 'USER_DEFINED'
- return self._ComponentType
-
- ## Retrieve "BUILD_TYPE"
- def _GetBuildType(self):
- if self._BuildType == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if not self._BuildType:
- self._BuildType = "BASE"
- return self._BuildType
-
- ## Retrieve file guid
- def _GetFileGuid(self):
- if self._Guid == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._Guid == None:
- self._Guid = '00000000-0000-0000-0000-000000000000'
- return self._Guid
-
- ## Retrieve module version
- def _GetVersion(self):
- if self._Version == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._Version == None:
- self._Version = '0.0'
- return self._Version
-
- ## Retrieve PCD_IS_DRIVER
- def _GetPcdIsDriver(self):
- if self._PcdIsDriver == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._PcdIsDriver == None:
- self._PcdIsDriver = ''
- return self._PcdIsDriver
-
- ## Retrieve SHADOW
- def _GetShadow(self):
- if self._Shadow == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._Shadow != None and self._Shadow.upper() == 'TRUE':
- self._Shadow = True
- else:
- self._Shadow = False
- return self._Shadow
-
- ## Retrieve CUSTOM_MAKEFILE
- def _GetMakefile(self):
- if self._CustomMakefile == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._CustomMakefile == None:
- self._CustomMakefile = {}
- return self._CustomMakefile
-
- ## Retrieve EFI_SPECIFICATION_VERSION
- def _GetSpec(self):
- if self._Specification == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._Specification == None:
- self._Specification = {}
- return self._Specification
-
- ## Retrieve LIBRARY_CLASS
- def _GetLibraryClass(self):
- if self._LibraryClass == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._LibraryClass == None:
- self._LibraryClass = []
- return self._LibraryClass
-
- ## Retrieve ENTRY_POINT
- def _GetEntryPoint(self):
- if self._ModuleEntryPointList == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._ModuleEntryPointList == None:
- self._ModuleEntryPointList = []
- return self._ModuleEntryPointList
-
- ## Retrieve UNLOAD_IMAGE
- def _GetUnloadImage(self):
- if self._ModuleUnloadImageList == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._ModuleUnloadImageList == None:
- self._ModuleUnloadImageList = []
- return self._ModuleUnloadImageList
-
- ## Retrieve CONSTRUCTOR
- def _GetConstructor(self):
- if self._ConstructorList == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._ConstructorList == None:
- self._ConstructorList = []
- return self._ConstructorList
-
- ## Retrieve DESTRUCTOR
- def _GetDestructor(self):
- if self._DestructorList == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._DestructorList == None:
- self._DestructorList = []
- return self._DestructorList
-
- ## Retrieve definies other than above ones
- def _GetDefines(self):
- if self._Defs == None:
- if self._Header_ == None:
- self._GetHeaderInfo()
- if self._Defs == None:
- self._Defs = sdict()
- return self._Defs
-
- ## Retrieve binary files
- def _GetBinaries(self):
- if self._Binaries == None:
- self._Binaries = []
- RecordList = self._RawData[MODEL_EFI_BINARY_FILE, self._Arch, self._Platform]
- Macros = self._Macros
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- Macros['PROCESSOR'] = self._Arch
- for Record in RecordList:
- FileType = Record[0]
- LineNo = Record[-1]
- Target = 'COMMON'
- FeatureFlag = []
- if Record[2]:
- TokenList = GetSplitValueList(Record[2], TAB_VALUE_SPLIT)
- if TokenList:
- Target = TokenList[0]
- if len(TokenList) > 1:
- FeatureFlag = Record[1:]
-
- File = PathClass(NormPath(Record[1], Macros), self._ModuleDir, '', FileType, True, self._Arch, '', Target)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate()
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
- self._Binaries.append(File)
- return self._Binaries
-
- ## Retrieve binary files with error check.
- def _GetBinaryFiles(self):
- Binaries = self._GetBinaries()
- if GlobalData.gIgnoreSource and Binaries == []:
- ErrorInfo = "The INF file does not contain any Binaries to use in creating the image\n"
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, ExtraData=ErrorInfo, File=self.MetaFile)
-
- return Binaries
- ## Check whether it exists the binaries with current ARCH in AsBuild INF
- def _IsSupportedArch(self):
- if self._GetBinaries() and not self._GetSourceFiles():
- return True
- else:
- return False
- ## Retrieve source files
- def _GetSourceFiles(self):
- #Ignore all source files in a binary build mode
- if GlobalData.gIgnoreSource:
- self._Sources = []
- return self._Sources
-
- if self._Sources == None:
- self._Sources = []
- RecordList = self._RawData[MODEL_EFI_SOURCE_FILE, self._Arch, self._Platform]
- Macros = self._Macros
- for Record in RecordList:
- LineNo = Record[-1]
- ToolChainFamily = Record[1]
- TagName = Record[2]
- ToolCode = Record[3]
- FeatureFlag = Record[4]
- if self.AutoGenVersion < 0x00010005:
- Macros["EDK_SOURCE"] = GlobalData.gEcpSource
- Macros['PROCESSOR'] = self._Arch
- SourceFile = NormPath(Record[0], Macros)
- if SourceFile[0] == os.path.sep:
- SourceFile = mws.join(GlobalData.gWorkspace, SourceFile[1:])
- # old module source files (Edk)
- File = PathClass(SourceFile, self._ModuleDir, self._SourceOverridePath,
- '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate(CaseSensitive=False)
- if ErrorCode != 0:
- if File.Ext.lower() == '.h':
- EdkLogger.warn('build', 'Include file not found', ExtraData=ErrorInfo,
- File=self.MetaFile, Line=LineNo)
- continue
- else:
- EdkLogger.error('build', ErrorCode, ExtraData=File, File=self.MetaFile, Line=LineNo)
- else:
- File = PathClass(NormPath(Record[0], Macros), self._ModuleDir, '',
- '', False, self._Arch, ToolChainFamily, '', TagName, ToolCode)
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate()
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
-
- self._Sources.append(File)
- return self._Sources
-
- ## Retrieve library classes employed by this module
- def _GetLibraryClassUses(self):
- if self._LibraryClasses == None:
- self._LibraryClasses = sdict()
- RecordList = self._RawData[MODEL_EFI_LIBRARY_CLASS, self._Arch, self._Platform]
- for Record in RecordList:
- Lib = Record[0]
- Instance = Record[1]
- if Instance:
- Instance = NormPath(Instance, self._Macros)
- self._LibraryClasses[Lib] = Instance
- return self._LibraryClasses
-
- ## Retrieve library names (for Edk.x style of modules)
- def _GetLibraryNames(self):
- if self._Libraries == None:
- self._Libraries = []
- RecordList = self._RawData[MODEL_EFI_LIBRARY_INSTANCE, self._Arch, self._Platform]
- for Record in RecordList:
- LibraryName = ReplaceMacro(Record[0], self._Macros, False)
- # in case of name with '.lib' extension, which is unusual in Edk.x inf
- LibraryName = os.path.splitext(LibraryName)[0]
- if LibraryName not in self._Libraries:
- self._Libraries.append(LibraryName)
- return self._Libraries
-
- def _GetProtocolComments(self):
- self._GetProtocols()
- return self._ProtocolComments
- ## Retrieve protocols consumed/produced by this module
- def _GetProtocols(self):
- if self._Protocols == None:
- self._Protocols = sdict()
- self._ProtocolComments = sdict()
- RecordList = self._RawData[MODEL_EFI_PROTOCOL, self._Arch, self._Platform]
- for Record in RecordList:
- CName = Record[0]
- Value = ProtocolValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
- PackageList = "\n\t".join([str(P) for P in self.Packages])
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
- "Value of Protocol [%s] is not found under [Protocols] section in" % CName,
- ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
- self._Protocols[CName] = Value
- CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
- Comments = []
- for CmtRec in CommentRecords:
- Comments.append(CmtRec[0])
- self._ProtocolComments[CName] = Comments
- return self._Protocols
-
- def _GetPpiComments(self):
- self._GetPpis()
- return self._PpiComments
- ## Retrieve PPIs consumed/produced by this module
- def _GetPpis(self):
- if self._Ppis == None:
- self._Ppis = sdict()
- self._PpiComments = sdict()
- RecordList = self._RawData[MODEL_EFI_PPI, self._Arch, self._Platform]
- for Record in RecordList:
- CName = Record[0]
- Value = PpiValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
- PackageList = "\n\t".join([str(P) for P in self.Packages])
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
- "Value of PPI [%s] is not found under [Ppis] section in " % CName,
- ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
- self._Ppis[CName] = Value
- CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
- Comments = []
- for CmtRec in CommentRecords:
- Comments.append(CmtRec[0])
- self._PpiComments[CName] = Comments
- return self._Ppis
-
- def _GetGuidComments(self):
- self._GetGuids()
- return self._GuidComments
- ## Retrieve GUIDs consumed/produced by this module
- def _GetGuids(self):
- if self._Guids == None:
- self._Guids = sdict()
- self._GuidComments = sdict()
- RecordList = self._RawData[MODEL_EFI_GUID, self._Arch, self._Platform]
- for Record in RecordList:
- CName = Record[0]
- Value = GuidValue(CName, self.Packages, self.MetaFile.Path)
- if Value == None:
- PackageList = "\n\t".join([str(P) for P in self.Packages])
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
- "Value of Guid [%s] is not found under [Guids] section in" % CName,
- ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
- self._Guids[CName] = Value
- CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Record[5]]
- Comments = []
- for CmtRec in CommentRecords:
- Comments.append(CmtRec[0])
- self._GuidComments[CName] = Comments
- return self._Guids
-
- ## Retrieve include paths necessary for this module (for Edk.x style of modules)
- def _GetIncludes(self):
- if self._Includes == None:
- self._Includes = []
- if self._SourceOverridePath:
- self._Includes.append(self._SourceOverridePath)
-
- Macros = self._Macros
- if 'PROCESSOR' in GlobalData.gEdkGlobal.keys():
- Macros['PROCESSOR'] = GlobalData.gEdkGlobal['PROCESSOR']
- else:
- Macros['PROCESSOR'] = self._Arch
- RecordList = self._RawData[MODEL_EFI_INCLUDE, self._Arch, self._Platform]
- for Record in RecordList:
- if Record[0].find('EDK_SOURCE') > -1:
- Macros['EDK_SOURCE'] = GlobalData.gEcpSource
- File = NormPath(Record[0], self._Macros)
- if File[0] == '.':
- File = os.path.join(self._ModuleDir, File)
- else:
- File = os.path.join(GlobalData.gWorkspace, File)
- File = RealPath(os.path.normpath(File))
- if File:
- self._Includes.append(File)
-
- #TRICK: let compiler to choose correct header file
- Macros['EDK_SOURCE'] = GlobalData.gEdkSource
- File = NormPath(Record[0], self._Macros)
- if File[0] == '.':
- File = os.path.join(self._ModuleDir, File)
- else:
- File = os.path.join(GlobalData.gWorkspace, File)
- File = RealPath(os.path.normpath(File))
- if File:
- self._Includes.append(File)
- else:
- File = NormPath(Record[0], Macros)
- if File[0] == '.':
- File = os.path.join(self._ModuleDir, File)
- else:
- File = mws.join(GlobalData.gWorkspace, File)
- File = RealPath(os.path.normpath(File))
- if File:
- self._Includes.append(File)
- if not File and Record[0].find('EFI_SOURCE') > -1:
- # tricky to regard WorkSpace as EFI_SOURCE
- Macros['EFI_SOURCE'] = GlobalData.gWorkspace
- File = NormPath(Record[0], Macros)
- if File[0] == '.':
- File = os.path.join(self._ModuleDir, File)
- else:
- File = os.path.join(GlobalData.gWorkspace, File)
- File = RealPath(os.path.normpath(File))
- if File:
- self._Includes.append(File)
- return self._Includes
-
- ## Retrieve packages this module depends on
- def _GetPackages(self):
- if self._Packages == None:
- self._Packages = []
- RecordList = self._RawData[MODEL_META_DATA_PACKAGE, self._Arch, self._Platform]
- Macros = self._Macros
- Macros['EDK_SOURCE'] = GlobalData.gEcpSource
- for Record in RecordList:
- File = PathClass(NormPath(Record[0], Macros), GlobalData.gWorkspace, Arch=self._Arch)
- LineNo = Record[-1]
- # check the file validation
- ErrorCode, ErrorInfo = File.Validate('.dec')
- if ErrorCode != 0:
- EdkLogger.error('build', ErrorCode, ExtraData=ErrorInfo, File=self.MetaFile, Line=LineNo)
- # parse this package now. we need it to get protocol/ppi/guid value
- Package = self._Bdb[File, self._Arch, self._Target, self._Toolchain]
- self._Packages.append(Package)
- return self._Packages
-
- ## Retrieve PCD comments
- def _GetPcdComments(self):
- self._GetPcds()
- return self._PcdComments
- ## Retrieve PCDs used in this module
- def _GetPcds(self):
- if self._Pcds == None:
- self._Pcds = sdict()
- self._PcdComments = sdict()
- self._Pcds.update(self._GetPcd(MODEL_PCD_FIXED_AT_BUILD))
- self._Pcds.update(self._GetPcd(MODEL_PCD_PATCHABLE_IN_MODULE))
- self._Pcds.update(self._GetPcd(MODEL_PCD_FEATURE_FLAG))
- self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC))
- self._Pcds.update(self._GetPcd(MODEL_PCD_DYNAMIC_EX))
- return self._Pcds
-
- ## Retrieve build options specific to this module
- def _GetBuildOptions(self):
- if self._BuildOptions == None:
- self._BuildOptions = sdict()
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch, self._Platform]
- for Record in RecordList:
- ToolChainFamily = Record[0]
- ToolChain = Record[1]
- Option = Record[2]
- if (ToolChainFamily, ToolChain) not in self._BuildOptions or Option.startswith('='):
- self._BuildOptions[ToolChainFamily, ToolChain] = Option
- else:
- # concatenate the option string if they're for the same tool
- OptionString = self._BuildOptions[ToolChainFamily, ToolChain]
- self._BuildOptions[ToolChainFamily, ToolChain] = OptionString + " " + Option
- return self._BuildOptions
-
- ## Retrieve dependency expression
- def _GetDepex(self):
- if self._Depex == None:
- self._Depex = tdict(False, 2)
- RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
-
- # If the module has only Binaries and no Sources, then ignore [Depex]
- if self.Sources == None or self.Sources == []:
- if self.Binaries != None and self.Binaries != []:
- return self._Depex
-
- # PEIM and DXE drivers must have a valid [Depex] section
- if len(self.LibraryClass) == 0 and len(RecordList) == 0:
- if self.ModuleType == 'DXE_DRIVER' or self.ModuleType == 'PEIM' or self.ModuleType == 'DXE_SMM_DRIVER' or \
- self.ModuleType == 'DXE_SAL_DRIVER' or self.ModuleType == 'DXE_RUNTIME_DRIVER':
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No [Depex] section or no valid expression in [Depex] section for [%s] module" \
- % self.ModuleType, File=self.MetaFile)
-
- if len(RecordList) != 0 and self.ModuleType == 'USER_DEFINED':
- for Record in RecordList:
- if Record[4] not in ['PEIM', 'DXE_DRIVER', 'DXE_SMM_DRIVER']:
- EdkLogger.error('build', FORMAT_INVALID,
- "'%s' module must specify the type of [Depex] section" % self.ModuleType,
- File=self.MetaFile)
-
- Depex = sdict()
- for Record in RecordList:
- DepexStr = ReplaceMacro(Record[0], self._Macros, False)
- Arch = Record[3]
- ModuleType = Record[4]
- TokenList = DepexStr.split()
- if (Arch, ModuleType) not in Depex:
- Depex[Arch, ModuleType] = []
- DepexList = Depex[Arch, ModuleType]
- for Token in TokenList:
- if Token in DEPEX_SUPPORTED_OPCODE:
- DepexList.append(Token)
- elif Token.endswith(".inf"): # module file name
- ModuleFile = os.path.normpath(Token)
- Module = self.BuildDatabase[ModuleFile]
- if Module == None:
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "Module is not found in active platform",
- ExtraData=Token, File=self.MetaFile, Line=Record[-1])
- DepexList.append(Module.Guid)
- else:
- # get the GUID value now
- Value = ProtocolValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
- Value = PpiValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
- Value = GuidValue(Token, self.Packages, self.MetaFile.Path)
- if Value == None:
- PackageList = "\n\t".join([str(P) for P in self.Packages])
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
- "Value of [%s] is not found in" % Token,
- ExtraData=PackageList, File=self.MetaFile, Line=Record[-1])
- DepexList.append(Value)
- for Arch, ModuleType in Depex:
- self._Depex[Arch, ModuleType] = Depex[Arch, ModuleType]
- return self._Depex
-
- ## Retrieve depedency expression
- def _GetDepexExpression(self):
- if self._DepexExpression == None:
- self._DepexExpression = tdict(False, 2)
- RecordList = self._RawData[MODEL_EFI_DEPEX, self._Arch]
- DepexExpression = sdict()
- for Record in RecordList:
- DepexStr = ReplaceMacro(Record[0], self._Macros, False)
- Arch = Record[3]
- ModuleType = Record[4]
- TokenList = DepexStr.split()
- if (Arch, ModuleType) not in DepexExpression:
- DepexExpression[Arch, ModuleType] = ''
- for Token in TokenList:
- DepexExpression[Arch, ModuleType] = DepexExpression[Arch, ModuleType] + Token.strip() + ' '
- for Arch, ModuleType in DepexExpression:
- self._DepexExpression[Arch, ModuleType] = DepexExpression[Arch, ModuleType]
- return self._DepexExpression
-
- def GetGuidsUsedByPcd(self):
- return self._GuidsUsedByPcd
- ## Retrieve PCD for given type
- def _GetPcd(self, Type):
- Pcds = sdict()
- PcdDict = tdict(True, 4)
- PcdList = []
- RecordList = self._RawData[Type, self._Arch, self._Platform]
- for TokenSpaceGuid, PcdCName, Setting, Arch, Platform, Id, LineNo in RecordList:
- PcdDict[Arch, Platform, PcdCName, TokenSpaceGuid] = (Setting, LineNo)
- PcdList.append((PcdCName, TokenSpaceGuid))
- # get the guid value
- if TokenSpaceGuid not in self.Guids:
- Value = GuidValue(TokenSpaceGuid, self.Packages, self.MetaFile.Path)
- if Value == None:
- PackageList = "\n\t".join([str(P) for P in self.Packages])
- EdkLogger.error('build', RESOURCE_NOT_AVAILABLE,
- "Value of Guid [%s] is not found under [Guids] section in" % TokenSpaceGuid,
- ExtraData=PackageList, File=self.MetaFile, Line=LineNo)
- self.Guids[TokenSpaceGuid] = Value
- self._GuidsUsedByPcd[TokenSpaceGuid] = Value
- CommentRecords = self._RawData[MODEL_META_DATA_COMMENT, self._Arch, self._Platform, Id]
- Comments = []
- for CmtRec in CommentRecords:
- Comments.append(CmtRec[0])
- self._PcdComments[TokenSpaceGuid, PcdCName] = Comments
-
- # resolve PCD type, value, datum info, etc. by getting its definition from package
- for PcdCName, TokenSpaceGuid in PcdList:
- PcdRealName = PcdCName
- Setting, LineNo = PcdDict[self._Arch, self.Platform, PcdCName, TokenSpaceGuid]
- if Setting == None:
- continue
- ValueList = AnalyzePcdData(Setting)
- DefaultValue = ValueList[0]
- Pcd = PcdClassObject(
- PcdCName,
- TokenSpaceGuid,
- '',
- '',
- DefaultValue,
- '',
- '',
- {},
- False,
- self.Guids[TokenSpaceGuid]
- )
- if Type == MODEL_PCD_PATCHABLE_IN_MODULE and ValueList[1]:
- # Patch PCD: TokenSpace.PcdCName|Value|Offset
- Pcd.Offset = ValueList[1]
-
- if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
- for Package in self.Packages:
- for key in Package.Pcds:
- if (Package.Pcds[key].TokenCName, Package.Pcds[key].TokenSpaceGuidCName) == (PcdRealName, TokenSpaceGuid):
- for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
- Pcd_Type = item[0].split('_')[-1]
- if Pcd_Type == Package.Pcds[key].Type:
- Value = Package.Pcds[key]
- Value.TokenCName = Package.Pcds[key].TokenCName + '_' + Pcd_Type
- if len(key) == 2:
- newkey = (Value.TokenCName, key[1])
- elif len(key) == 3:
- newkey = (Value.TokenCName, key[1], key[2])
- del Package.Pcds[key]
- Package.Pcds[newkey] = Value
- break
- else:
- pass
- else:
- pass
-
- # get necessary info from package declaring this PCD
- for Package in self.Packages:
- #
- # 'dynamic' in INF means its type is determined by platform;
- # if platform doesn't give its type, use 'lowest' one in the
- # following order, if any
- #
- # "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
- #
- PcdType = self._PCD_TYPE_STRING_[Type]
- if Type == MODEL_PCD_DYNAMIC:
- Pcd.Pending = True
- for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
- if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
- for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
- if str(item[0]).endswith(T) and (item[0], item[1], T) in Package.Pcds:
- PcdType = T
- PcdCName = item[0]
- break
- else:
- pass
- break
- else:
- if (PcdRealName, TokenSpaceGuid, T) in Package.Pcds:
- PcdType = T
- break
-
- else:
- Pcd.Pending = False
- if (PcdRealName, TokenSpaceGuid) in GlobalData.MixedPcd:
- for item in GlobalData.MixedPcd[(PcdRealName, TokenSpaceGuid)]:
- Pcd_Type = item[0].split('_')[-1]
- if Pcd_Type == PcdType:
- PcdCName = item[0]
- break
- else:
- pass
- else:
- pass
-
- if (PcdCName, TokenSpaceGuid, PcdType) in Package.Pcds:
- PcdInPackage = Package.Pcds[PcdCName, TokenSpaceGuid, PcdType]
- Pcd.Type = PcdType
- Pcd.TokenValue = PcdInPackage.TokenValue
-
- #
- # Check whether the token value exist or not.
- #
- if Pcd.TokenValue == None or Pcd.TokenValue == "":
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "No TokenValue for PCD [%s.%s] in [%s]!" % (TokenSpaceGuid, PcdRealName, str(Package)),
- File=self.MetaFile, Line=LineNo,
- ExtraData=None
- )
- #
- # Check hexadecimal token value length and format.
- #
- ReIsValidPcdTokenValue = re.compile(r"^[0][x|X][0]*[0-9a-fA-F]{1,8}$", re.DOTALL)
- if Pcd.TokenValue.startswith("0x") or Pcd.TokenValue.startswith("0X"):
- if ReIsValidPcdTokenValue.match(Pcd.TokenValue) == None:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid:" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
- File=self.MetaFile, Line=LineNo,
- ExtraData=None
- )
-
- #
- # Check decimal token value length and format.
- #
- else:
- try:
- TokenValueInt = int (Pcd.TokenValue, 10)
- if (TokenValueInt < 0 or TokenValueInt > 4294967295):
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, as a decimal it should between: 0 - 4294967295!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
- File=self.MetaFile, Line=LineNo,
- ExtraData=None
- )
- except:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "The format of TokenValue [%s] of PCD [%s.%s] in [%s] is invalid, it should be hexadecimal or decimal!" % (Pcd.TokenValue, TokenSpaceGuid, PcdRealName, str(Package)),
- File=self.MetaFile, Line=LineNo,
- ExtraData=None
- )
-
- Pcd.DatumType = PcdInPackage.DatumType
- Pcd.MaxDatumSize = PcdInPackage.MaxDatumSize
- Pcd.InfDefaultValue = Pcd.DefaultValue
- if Pcd.DefaultValue in [None, '']:
- Pcd.DefaultValue = PcdInPackage.DefaultValue
- break
- else:
- EdkLogger.error(
- 'build',
- FORMAT_INVALID,
- "PCD [%s.%s] in [%s] is not found in dependent packages:" % (TokenSpaceGuid, PcdRealName, self.MetaFile),
- File=self.MetaFile, Line=LineNo,
- ExtraData="\t%s" % '\n\t'.join([str(P) for P in self.Packages])
- )
- Pcds[PcdCName, TokenSpaceGuid] = Pcd
-
- return Pcds
-
- ## check whether current module is binary module
- def _IsBinaryModule(self):
- if self.Binaries and not self.Sources:
- return True
- elif GlobalData.gIgnoreSource:
- return True
- else:
- return False
-
- _Macros = property(_GetMacros)
- Arch = property(_GetArch, _SetArch)
- Platform = property(_GetPlatform, _SetPlatform)
-
- HeaderComments = property(_GetHeaderComments)
- TailComments = property(_GetTailComments)
- AutoGenVersion = property(_GetInfVersion)
- BaseName = property(_GetBaseName)
- ModuleType = property(_GetModuleType)
- ComponentType = property(_GetComponentType)
- BuildType = property(_GetBuildType)
- Guid = property(_GetFileGuid)
- Version = property(_GetVersion)
- PcdIsDriver = property(_GetPcdIsDriver)
- Shadow = property(_GetShadow)
- CustomMakefile = property(_GetMakefile)
- Specification = property(_GetSpec)
- LibraryClass = property(_GetLibraryClass)
- ModuleEntryPointList = property(_GetEntryPoint)
- ModuleUnloadImageList = property(_GetUnloadImage)
- ConstructorList = property(_GetConstructor)
- DestructorList = property(_GetDestructor)
- Defines = property(_GetDefines)
- DxsFile = property(_GetDxsFile)
-
- Binaries = property(_GetBinaryFiles)
- Sources = property(_GetSourceFiles)
- LibraryClasses = property(_GetLibraryClassUses)
- Libraries = property(_GetLibraryNames)
- Protocols = property(_GetProtocols)
- ProtocolComments = property(_GetProtocolComments)
- Ppis = property(_GetPpis)
- PpiComments = property(_GetPpiComments)
- Guids = property(_GetGuids)
- GuidComments = property(_GetGuidComments)
- Includes = property(_GetIncludes)
- Packages = property(_GetPackages)
- Pcds = property(_GetPcds)
- PcdComments = property(_GetPcdComments)
- BuildOptions = property(_GetBuildOptions)
- Depex = property(_GetDepex)
- DepexExpression = property(_GetDepexExpression)
- IsBinaryModule = property(_IsBinaryModule)
- IsSupportedArch = property(_IsSupportedArch)
-
-## Database
-#
-# This class defined the build database for all modules, packages and platform.
-# It will call corresponding parser for the given file if it cannot find it in
-# the database.
-#
-# @param DbPath Path of database file
-# @param GlobalMacros Global macros used for replacement during file parsing
-# @prarm RenewDb=False Create new database file if it's already there
-#
-class WorkspaceDatabase(object):
-
-
- #
- # internal class used for call corresponding file parser and caching the result
- # to avoid unnecessary re-parsing
- #
- class BuildObjectFactory(object):
-
- _FILE_TYPE_ = {
- ".inf" : MODEL_FILE_INF,
- ".dec" : MODEL_FILE_DEC,
- ".dsc" : MODEL_FILE_DSC,
- }
-
- # file parser
- _FILE_PARSER_ = {
- MODEL_FILE_INF : InfParser,
- MODEL_FILE_DEC : DecParser,
- MODEL_FILE_DSC : DscParser,
- }
-
- # convert to xxxBuildData object
- _GENERATOR_ = {
- MODEL_FILE_INF : InfBuildData,
- MODEL_FILE_DEC : DecBuildData,
- MODEL_FILE_DSC : DscBuildData,
- }
-
- _CACHE_ = {} # (FilePath, Arch) : <object>
-
- # constructor
- def __init__(self, WorkspaceDb):
- self.WorkspaceDb = WorkspaceDb
-
- # key = (FilePath, Arch=None)
- def __contains__(self, Key):
- FilePath = Key[0]
- if len(Key) > 1:
- Arch = Key[1]
- else:
- Arch = None
- return (FilePath, Arch) in self._CACHE_
-
- # key = (FilePath, Arch=None, Target=None, Toochain=None)
- def __getitem__(self, Key):
- FilePath = Key[0]
- KeyLength = len(Key)
- if KeyLength > 1:
- Arch = Key[1]
- else:
- Arch = None
- if KeyLength > 2:
- Target = Key[2]
- else:
- Target = None
- if KeyLength > 3:
- Toolchain = Key[3]
- else:
- Toolchain = None
-
- # if it's generated before, just return the cached one
- Key = (FilePath, Arch, Target, Toolchain)
- if Key in self._CACHE_:
- return self._CACHE_[Key]
-
- # check file type
- Ext = FilePath.Type
- if Ext not in self._FILE_TYPE_:
- return None
- FileType = self._FILE_TYPE_[Ext]
- if FileType not in self._GENERATOR_:
- return None
-
- # get the parser ready for this file
- MetaFile = self._FILE_PARSER_[FileType](
- FilePath,
- FileType,
- Arch,
- MetaFileStorage(self.WorkspaceDb.Cur, FilePath, FileType)
- )
- # alwasy do post-process, in case of macros change
- MetaFile.DoPostProcess()
- # object the build is based on
- BuildObject = self._GENERATOR_[FileType](
- FilePath,
- MetaFile,
- self,
- Arch,
- Target,
- Toolchain
- )
- self._CACHE_[Key] = BuildObject
- return BuildObject
-
- # placeholder for file format conversion
- class TransformObjectFactory:
- def __init__(self, WorkspaceDb):
- self.WorkspaceDb = WorkspaceDb
-
- # key = FilePath, Arch
- def __getitem__(self, Key):
- pass
-
- ## Constructor of WorkspaceDatabase
- #
- # @param DbPath Path of database file
- # @param GlobalMacros Global macros used for replacement during file parsing
- # @prarm RenewDb=False Create new database file if it's already there
- #
- def __init__(self, DbPath, RenewDb=False):
- self._DbClosedFlag = False
- if not DbPath:
- DbPath = os.path.normpath(mws.join(GlobalData.gWorkspace, 'Conf', GlobalData.gDatabasePath))
-
- # don't create necessary path for db in memory
- if DbPath != ':memory:':
- DbDir = os.path.split(DbPath)[0]
- if not os.path.exists(DbDir):
- os.makedirs(DbDir)
-
- # remove db file in case inconsistency between db and file in file system
- if self._CheckWhetherDbNeedRenew(RenewDb, DbPath):
- os.remove(DbPath)
-
- # create db with optimized parameters
- self.Conn = sqlite3.connect(DbPath, isolation_level='DEFERRED')
- self.Conn.execute("PRAGMA synchronous=OFF")
- self.Conn.execute("PRAGMA temp_store=MEMORY")
- self.Conn.execute("PRAGMA count_changes=OFF")
- self.Conn.execute("PRAGMA cache_size=8192")
- #self.Conn.execute("PRAGMA page_size=8192")
-
- # to avoid non-ascii character conversion issue
- self.Conn.text_factory = str
- self.Cur = self.Conn.cursor()
-
- # create table for internal uses
- self.TblDataModel = TableDataModel(self.Cur)
- self.TblFile = TableFile(self.Cur)
- self.Platform = None
-
- # conversion object for build or file format conversion purpose
- self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self)
- self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self)
-
- ## Check whether workspace database need to be renew.
- # The renew reason maybe:
- # 1) If user force to renew;
- # 2) If user do not force renew, and
- # a) If the time of last modified python source is newer than database file;
- # b) If the time of last modified frozen executable file is newer than database file;
- #
- # @param force User force renew database
- # @param DbPath The absolute path of workspace database file
- #
- # @return Bool value for whether need renew workspace databse
- #
- def _CheckWhetherDbNeedRenew (self, force, DbPath):
- # if database does not exist, we need do nothing
- if not os.path.exists(DbPath): return False
-
- # if user force to renew database, then not check whether database is out of date
- if force: return True
-
- #
- # Check the time of last modified source file or build.exe
- # if is newer than time of database, then database need to be re-created.
- #
- timeOfToolModified = 0
- if hasattr(sys, "frozen"):
- exePath = os.path.abspath(sys.executable)
- timeOfToolModified = os.stat(exePath).st_mtime
- else:
- curPath = os.path.dirname(__file__) # curPath is the path of WorkspaceDatabase.py
- rootPath = os.path.split(curPath)[0] # rootPath is root path of python source, such as /BaseTools/Source/Python
- if rootPath == "" or rootPath == None:
- EdkLogger.verbose("\nFail to find the root path of build.exe or python sources, so can not \
-determine whether database file is out of date!\n")
-
- # walk the root path of source or build's binary to get the time last modified.
-
- for root, dirs, files in os.walk (rootPath):
- for dir in dirs:
- # bypass source control folder
- if dir.lower() in [".svn", "_svn", "cvs"]:
- dirs.remove(dir)
-
- for file in files:
- ext = os.path.splitext(file)[1]
- if ext.lower() == ".py": # only check .py files
- fd = os.stat(os.path.join(root, file))
- if timeOfToolModified < fd.st_mtime:
- timeOfToolModified = fd.st_mtime
- if timeOfToolModified > os.stat(DbPath).st_mtime:
- EdkLogger.verbose("\nWorkspace database is out of data!")
- return True
-
- return False
-
- ## Initialize build database
- def InitDatabase(self):
- EdkLogger.verbose("\nInitialize build database started ...")
-
- #
- # Create new tables
- #
- self.TblDataModel.Create(False)
- self.TblFile.Create(False)
-
- #
- # Initialize table DataModel
- #
- self.TblDataModel.InitTable()
- EdkLogger.verbose("Initialize build database ... DONE!")
-
- ## Query a table
- #
- # @param Table: The instance of the table to be queried
- #
- def QueryTable(self, Table):
- Table.Query()
-
- def __del__(self):
- self.Close()
-
- ## Close entire database
- #
- # Commit all first
- # Close the connection and cursor
- #
- def Close(self):
- if not self._DbClosedFlag:
- self.Conn.commit()
- self.Cur.close()
- self.Conn.close()
- self._DbClosedFlag = True
-
- ## Summarize all packages in the database
- def GetPackageList(self, Platform, Arch, TargetName, ToolChainTag):
- self.Platform = Platform
- PackageList = []
- Pa = self.BuildObject[self.Platform, 'COMMON']
- #
- # Get Package related to Modules
- #
- for Module in Pa.Modules:
- ModuleObj = self.BuildObject[Module, Arch, TargetName, ToolChainTag]
- for Package in ModuleObj.Packages:
- if Package not in PackageList:
- PackageList.append(Package)
- #
- # Get Packages related to Libraries
- #
- for Lib in Pa.LibraryInstances:
- LibObj = self.BuildObject[Lib, Arch, TargetName, ToolChainTag]
- for Package in LibObj.Packages:
- if Package not in PackageList:
- PackageList.append(Package)
-
- return PackageList
-
- ## Summarize all platforms in the database
- def _GetPlatformList(self):
- PlatformList = []
- for PlatformFile in self.TblFile.GetFileList(MODEL_FILE_DSC):
- try:
- Platform = self.BuildObject[PathClass(PlatformFile), 'COMMON']
- except:
- Platform = None
- if Platform != None:
- PlatformList.append(Platform)
- return PlatformList
-
- def _MapPlatform(self, Dscfile):
- Platform = self.BuildObject[PathClass(Dscfile), 'COMMON']
- if Platform == None:
- EdkLogger.error('build', PARSER_ERROR, "Failed to parser DSC file: %s" % Dscfile)
- return Platform
-
- PlatformList = property(_GetPlatformList)
-
-##
-#
-# This acts like the main() function for the script, unless it is 'import'ed into another
-# script.
-#
-if __name__ == '__main__':
- pass
-
diff --git a/BaseTools/Source/Python/Workspace/__init__.py b/BaseTools/Source/Python/Workspace/__init__.py
deleted file mode 100644
index 05cd34bad5..0000000000
--- a/BaseTools/Source/Python/Workspace/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-## @file
-# Python 'Workspace' package initialization file.
-#
-# This file is required to make Python interpreter treat the directory
-# as containing package.
-#
-# Copyright (c) 2008 - 2010, Intel Corporation. All rights reserved.<BR>
-# This program and the accompanying materials
-# are licensed and made available under the terms and conditions of the BSD License
-# which accompanies this distribution. The full text of the license may be found at
-# http://opensource.org/licenses/bsd-license.php
-#
-# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-#