summaryrefslogtreecommitdiff
path: root/BaseTools/Source/Python/Common
diff options
context:
space:
mode:
authorlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>2009-07-17 09:10:31 +0000
committerlgao4 <lgao4@6f19259b-4bc3-4df7-8a09-765794883524>2009-07-17 09:10:31 +0000
commit30fdf1140b8d1ce93f3821d986fa165552023440 (patch)
treec45c336a8955b1d03ea56d6c915a0e68a43b4ee9 /BaseTools/Source/Python/Common
parent577e30cdb473e4af8e65fd6f75236691d0c8dfb3 (diff)
downloadedk2-platforms-30fdf1140b8d1ce93f3821d986fa165552023440.tar.xz
Check In tool source code based on Build tool project revision r1655.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@8964 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'BaseTools/Source/Python/Common')
-rw-r--r--BaseTools/Source/Python/Common/BuildToolError.py152
-rw-r--r--BaseTools/Source/Python/Common/DataType.py401
-rw-r--r--BaseTools/Source/Python/Common/Database.py120
-rw-r--r--BaseTools/Source/Python/Common/DecClassObject.py563
-rw-r--r--BaseTools/Source/Python/Common/DecClassObjectLight.py580
-rw-r--r--BaseTools/Source/Python/Common/Dictionary.py75
-rw-r--r--BaseTools/Source/Python/Common/DscClassObject.py1434
-rw-r--r--BaseTools/Source/Python/Common/EdkIIWorkspace.py318
-rw-r--r--BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py1669
-rw-r--r--BaseTools/Source/Python/Common/EdkLogger.py269
-rw-r--r--BaseTools/Source/Python/Common/FdfClassObject.py116
-rw-r--r--BaseTools/Source/Python/Common/FdfParserLite.py3603
-rw-r--r--BaseTools/Source/Python/Common/GlobalData.py37
-rw-r--r--BaseTools/Source/Python/Common/Identification.py58
-rw-r--r--BaseTools/Source/Python/Common/InfClassObject.py1116
-rw-r--r--BaseTools/Source/Python/Common/InfClassObjectLight.py876
-rw-r--r--BaseTools/Source/Python/Common/MigrationUtilities.py567
-rw-r--r--BaseTools/Source/Python/Common/Misc.py1327
-rw-r--r--BaseTools/Source/Python/Common/Parsing.py935
-rw-r--r--BaseTools/Source/Python/Common/PyUtility.pydbin0 -> 4608 bytes
-rw-r--r--BaseTools/Source/Python/Common/String.py703
-rw-r--r--BaseTools/Source/Python/Common/TargetTxtClassObject.py174
-rw-r--r--BaseTools/Source/Python/Common/ToolDefClassObject.py217
-rw-r--r--BaseTools/Source/Python/Common/XmlParser.py1754
-rw-r--r--BaseTools/Source/Python/Common/XmlRoutines.py228
-rw-r--r--BaseTools/Source/Python/Common/__init__.py0
26 files changed, 17292 insertions, 0 deletions
diff --git a/BaseTools/Source/Python/Common/BuildToolError.py b/BaseTools/Source/Python/Common/BuildToolError.py
new file mode 100644
index 0000000000..982ea93659
--- /dev/null
+++ b/BaseTools/Source/Python/Common/BuildToolError.py
@@ -0,0 +1,152 @@
+## @file
+# Standardized Error Hanlding infrastructures.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+FILE_OPEN_FAILURE = 1
+FILE_WRITE_FAILURE = 2
+FILE_PARSE_FAILURE = 3
+FILE_READ_FAILURE = 4
+FILE_CREATE_FAILURE = 5
+FILE_CHECKSUM_FAILURE = 6
+FILE_COMPRESS_FAILURE = 7
+FILE_DECOMPRESS_FAILURE = 8
+FILE_MOVE_FAILURE = 9
+FILE_DELETE_FAILURE = 10
+FILE_COPY_FAILURE = 11
+FILE_POSITIONING_FAILURE = 12
+FILE_ALREADY_EXIST = 13
+FILE_NOT_FOUND = 14
+FILE_TYPE_MISMATCH = 15
+FILE_CASE_MISMATCH = 16
+FILE_DUPLICATED = 17
+FILE_UNKNOWN_ERROR = 0x0FFF
+
+OPTION_UNKNOWN = 0x1000
+OPTION_MISSING = 0x1001
+OPTION_CONFLICT = 0x1002
+OPTION_VALUE_INVALID = 0x1003
+OPTION_DEPRECATED = 0x1004
+OPTION_NOT_SUPPORTED = 0x1005
+OPTION_UNKNOWN_ERROR = 0x1FFF
+
+PARAMETER_INVALID = 0x2000
+PARAMETER_MISSING = 0x2001
+PARAMETER_UNKNOWN_ERROR =0x2FFF
+
+FORMAT_INVALID = 0x3000
+FORMAT_NOT_SUPPORTED = 0x3001
+FORMAT_UNKNOWN = 0x3002
+FORMAT_UNKNOWN_ERROR = 0x3FFF
+
+RESOURCE_NOT_AVAILABLE = 0x4000
+RESOURCE_ALLOCATE_FAILURE = 0x4001
+RESOURCE_FULL = 0x4002
+RESOURCE_OVERFLOW = 0x4003
+RESOURCE_UNDERRUN = 0x4004
+RESOURCE_UNKNOWN_ERROR = 0x4FFF
+
+ATTRIBUTE_NOT_AVAILABLE = 0x5000
+ATTRIBUTE_GET_FAILURE = 0x5001
+ATTRIBUTE_SET_FAILURE = 0x5002
+ATTRIBUTE_UPDATE_FAILURE = 0x5003
+ATTRIBUTE_ACCESS_DENIED = 0x5004
+ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF
+
+IO_NOT_READY = 0x6000
+IO_BUSY = 0x6001
+IO_TIMEOUT = 0x6002
+IO_UNKNOWN_ERROR = 0x6FFF
+
+COMMAND_FAILURE = 0x7000
+
+CODE_ERROR = 0xC0DE
+
+AUTOGEN_ERROR = 0xF000
+PARSER_ERROR = 0xF001
+BUILD_ERROR = 0xF002
+GENFDS_ERROR = 0xF003
+ECC_ERROR = 0xF004
+EOT_ERROR = 0xF005
+DDC_ERROR = 0xF009
+WARNING_AS_ERROR = 0xF006
+MIGRATION_ERROR = 0xF010
+ABORT_ERROR = 0xFFFE
+UNKNOWN_ERROR = 0xFFFF
+
+## Error message of each error code
+gErrorMessage = {
+ FILE_NOT_FOUND : "File/directory not found",
+ FILE_OPEN_FAILURE : "File open failure",
+ FILE_WRITE_FAILURE : "File write failure",
+ FILE_PARSE_FAILURE : "File parse failure",
+ FILE_READ_FAILURE : "File read failure",
+ FILE_CREATE_FAILURE : "File create failure",
+ FILE_CHECKSUM_FAILURE : "Invalid checksum of file",
+ FILE_COMPRESS_FAILURE : "File compress failure",
+ FILE_DECOMPRESS_FAILURE : "File decompress failure",
+ FILE_MOVE_FAILURE : "File move failure",
+ FILE_DELETE_FAILURE : "File delete failure",
+ FILE_COPY_FAILURE : "File copy failure",
+ FILE_POSITIONING_FAILURE: "Failed to seeking position",
+ FILE_ALREADY_EXIST : "File or directory already exists",
+ FILE_TYPE_MISMATCH : "Incorrect file type",
+ FILE_CASE_MISMATCH : "File name case mismatch",
+ FILE_DUPLICATED : "Duplicated file found",
+ FILE_UNKNOWN_ERROR : "Unknown error encountered on file",
+
+ OPTION_UNKNOWN : "Unknown option",
+ OPTION_MISSING : "Missing option",
+ OPTION_CONFLICT : "Conflict options",
+ OPTION_VALUE_INVALID : "Invalid value of option",
+ OPTION_DEPRECATED : "Deprecated option",
+ OPTION_NOT_SUPPORTED : "Unsupported option",
+ OPTION_UNKNOWN_ERROR : "Unknown error when processing options",
+
+ PARAMETER_INVALID : "Invalid parameter",
+ PARAMETER_MISSING : "Missing parameter",
+ PARAMETER_UNKNOWN_ERROR : "Unknown error in parameters",
+
+ FORMAT_INVALID : "Invalid syntax/format",
+ FORMAT_NOT_SUPPORTED : "Not supported syntax/format",
+ FORMAT_UNKNOWN : "Unknown format",
+ FORMAT_UNKNOWN_ERROR : "Unknown error in syntax/format ",
+
+ RESOURCE_NOT_AVAILABLE : "Not available",
+ RESOURCE_ALLOCATE_FAILURE : "Allocate failure",
+ RESOURCE_FULL : "Full",
+ RESOURCE_OVERFLOW : "Overflow",
+ RESOURCE_UNDERRUN : "Underrun",
+ RESOURCE_UNKNOWN_ERROR : "Unkown error",
+
+ ATTRIBUTE_NOT_AVAILABLE : "Not available",
+ ATTRIBUTE_GET_FAILURE : "Failed to retrieve",
+ ATTRIBUTE_SET_FAILURE : "Failed to set",
+ ATTRIBUTE_UPDATE_FAILURE: "Failed to update",
+ ATTRIBUTE_ACCESS_DENIED : "Access denied",
+ ATTRIBUTE_UNKNOWN_ERROR : "Unknown error when accessing",
+
+ COMMAND_FAILURE : "Failed to execute command",
+
+ IO_NOT_READY : "Not ready",
+ IO_BUSY : "Busy",
+ IO_TIMEOUT : "Timeout",
+ IO_UNKNOWN_ERROR : "Unknown error in IO operation",
+
+ UNKNOWN_ERROR : "Unknown error",
+}
+
+## Exception indicating a fatal error
+class FatalError(Exception):
+ pass
+
+if __name__ == "__main__":
+ pass
diff --git a/BaseTools/Source/Python/Common/DataType.py b/BaseTools/Source/Python/Common/DataType.py
new file mode 100644
index 0000000000..8b6c4e4921
--- /dev/null
+++ b/BaseTools/Source/Python/Common/DataType.py
@@ -0,0 +1,401 @@
+## @file
+# This file is used to define common static strings used by INF/DEC/DSC files
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+##
+# Common Definitions
+#
+TAB_SPLIT = '.'
+TAB_COMMENT_R8_START = '/*'
+TAB_COMMENT_R8_END = '*/'
+TAB_COMMENT_R8_SPLIT = '//'
+TAB_COMMENT_SPLIT = '#'
+TAB_EQUAL_SPLIT = '='
+TAB_VALUE_SPLIT = '|'
+TAB_COMMA_SPLIT = ','
+TAB_SPACE_SPLIT = ' '
+TAB_SECTION_START = '['
+TAB_SECTION_END = ']'
+TAB_OPTION_START = '<'
+TAB_OPTION_END = '>'
+TAB_SLASH = '\\'
+TAB_BACK_SLASH = '/'
+
+TAB_EDK_SOURCE = '$(EDK_SOURCE)'
+TAB_EFI_SOURCE = '$(EFI_SOURCE)'
+TAB_WORKSPACE = '$(WORKSPACE)'
+
+TAB_ARCH_NULL = ''
+TAB_ARCH_COMMON = 'COMMON'
+TAB_ARCH_IA32 = 'IA32'
+TAB_ARCH_X64 = 'X64'
+TAB_ARCH_IPF = 'IPF'
+TAB_ARCH_ARM = 'ARM'
+TAB_ARCH_EBC = 'EBC'
+
+ARCH_LIST = [TAB_ARCH_IA32, TAB_ARCH_X64, TAB_ARCH_IPF, TAB_ARCH_ARM, TAB_ARCH_EBC]
+ARCH_LIST_FULL = [TAB_ARCH_COMMON] + ARCH_LIST
+
+SUP_MODULE_BASE = 'BASE'
+SUP_MODULE_SEC = 'SEC'
+SUP_MODULE_PEI_CORE = 'PEI_CORE'
+SUP_MODULE_PEIM = 'PEIM'
+SUP_MODULE_DXE_CORE = 'DXE_CORE'
+SUP_MODULE_DXE_DRIVER = 'DXE_DRIVER'
+SUP_MODULE_DXE_RUNTIME_DRIVER = 'DXE_RUNTIME_DRIVER'
+SUP_MODULE_DXE_SAL_DRIVER = 'DXE_SAL_DRIVER'
+SUP_MODULE_DXE_SMM_DRIVER = 'DXE_SMM_DRIVER'
+SUP_MODULE_UEFI_DRIVER = 'UEFI_DRIVER'
+SUP_MODULE_UEFI_APPLICATION = 'UEFI_APPLICATION'
+SUP_MODULE_USER_DEFINED = 'USER_DEFINED'
+SUP_MODULE_SMM_DRIVER = 'SMM_DRIVER'
+SUP_MODULE_SMM_CORE = 'SMM_CORE'
+
+SUP_MODULE_LIST = [SUP_MODULE_BASE, SUP_MODULE_SEC, SUP_MODULE_PEI_CORE, SUP_MODULE_PEIM, SUP_MODULE_DXE_CORE, SUP_MODULE_DXE_DRIVER, \
+ SUP_MODULE_DXE_RUNTIME_DRIVER, SUP_MODULE_DXE_SAL_DRIVER, SUP_MODULE_DXE_SMM_DRIVER, SUP_MODULE_UEFI_DRIVER, \
+ SUP_MODULE_UEFI_APPLICATION, SUP_MODULE_USER_DEFINED, SUP_MODULE_SMM_DRIVER, SUP_MODULE_SMM_CORE]
+SUP_MODULE_LIST_STRING = TAB_VALUE_SPLIT.join(l for l in SUP_MODULE_LIST)
+
+EDK_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+EDK_COMPONENT_TYPE_SECUARITY_CORE = 'SECUARITY_CORE'
+EDK_COMPONENT_TYPE_PEI_CORE = 'PEI_CORE'
+EDK_COMPONENT_TYPE_COMBINED_PEIM_DRIVER = 'COMBINED_PEIM_DRIVER'
+EDK_COMPONENT_TYPE_PIC_PEIM = 'PIC_PEIM'
+EDK_COMPONENT_TYPE_RELOCATABLE_PEIM = 'RELOCATABLE_PEIM'
+EDK_COMPONENT_TYPE_BS_DRIVER = 'BS_DRIVER'
+EDK_COMPONENT_TYPE_RT_DRIVER = 'RT_DRIVER'
+EDK_COMPONENT_TYPE_SAL_RT_DRIVER = 'SAL_RT_DRIVER'
+EDK_COMPONENT_TYPE_APPLICATION = 'APPLICATION'
+
+BINARY_FILE_TYPE_FW = 'FW'
+BINARY_FILE_TYPE_GUID = 'GUID'
+BINARY_FILE_TYPE_PREEFORM = 'PREEFORM'
+BINARY_FILE_TYPE_UEFI_APP = 'UEFI_APP'
+BINARY_FILE_TYPE_UNI_UI = 'UNI_UI'
+BINARY_FILE_TYPE_UNI_VER = 'UNI_VER'
+BINARY_FILE_TYPE_LIB = 'LIB'
+BINARY_FILE_TYPE_PE32 = 'PE32'
+BINARY_FILE_TYPE_PIC = 'PIC'
+BINARY_FILE_TYPE_PEI_DEPEX = 'PEI_DEPEX'
+BINARY_FILE_TYPE_DXE_DEPEX = 'DXE_DEPEX'
+BINARY_FILE_TYPE_TE = 'TE'
+BINARY_FILE_TYPE_VER = 'VER'
+BINARY_FILE_TYPE_UI = 'UI'
+BINARY_FILE_TYPE_BIN = 'BIN'
+BINARY_FILE_TYPE_FV = 'FV'
+
+PLATFORM_COMPONENT_TYPE_LIBRARY = 'LIBRARY'
+PLATFORM_COMPONENT_TYPE_LIBRARY_CLASS = 'LIBRARY_CLASS'
+PLATFORM_COMPONENT_TYPE_MODULE = 'MODULE'
+
+TAB_LIBRARIES = 'Libraries'
+
+TAB_SOURCES = 'Sources'
+TAB_SOURCES_COMMON = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_SOURCES_IA32 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_SOURCES_X64 = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_X64
+TAB_SOURCES_IPF = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_SOURCES_ARM = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_SOURCES_EBC = TAB_SOURCES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_BINARIES = 'Binaries'
+TAB_BINARIES_COMMON = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_BINARIES_IA32 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_BINARIES_X64 = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_BINARIES_IPF = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_BINARIES_ARM = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_BINARIES_EBC = TAB_BINARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_INCLUDES = 'Includes'
+TAB_INCLUDES_COMMON = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_INCLUDES_IA32 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_INCLUDES_X64 = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_X64
+TAB_INCLUDES_IPF = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_INCLUDES_ARM = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_INCLUDES_EBC = TAB_INCLUDES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_GUIDS = 'Guids'
+TAB_GUIDS_COMMON = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_GUIDS_IA32 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_GUIDS_X64 = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_X64
+TAB_GUIDS_IPF = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_GUIDS_ARM = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_GUIDS_EBC = TAB_GUIDS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PROTOCOLS = 'Protocols'
+TAB_PROTOCOLS_COMMON = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PROTOCOLS_IA32 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PROTOCOLS_X64 = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PROTOCOLS_IPF = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PROTOCOLS_ARM = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PROTOCOLS_EBC = TAB_PROTOCOLS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PPIS = 'Ppis'
+TAB_PPIS_COMMON = TAB_PPIS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PPIS_IA32 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PPIS_X64 = TAB_PPIS + TAB_SPLIT + TAB_ARCH_X64
+TAB_PPIS_IPF = TAB_PPIS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PPIS_ARM = TAB_PPIS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PPIS_EBC = TAB_PPIS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_LIBRARY_CLASSES = 'LibraryClasses'
+TAB_LIBRARY_CLASSES_COMMON = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARY_CLASSES_IA32 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARY_CLASSES_X64 = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARY_CLASSES_IPF = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARY_CLASSES_ARM = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARY_CLASSES_EBC = TAB_LIBRARY_CLASSES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PACKAGES = 'Packages'
+TAB_PACKAGES_COMMON = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PACKAGES_IA32 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PACKAGES_X64 = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_X64
+TAB_PACKAGES_IPF = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PACKAGES_ARM = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PACKAGES_EBC = TAB_PACKAGES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS = 'Pcds'
+TAB_PCDS_FIXED_AT_BUILD = 'FixedAtBuild'
+TAB_PCDS_PATCHABLE_IN_MODULE = 'PatchableInModule'
+TAB_PCDS_FEATURE_FLAG = 'FeatureFlag'
+TAB_PCDS_DYNAMIC_EX = 'DynamicEx'
+TAB_PCDS_DYNAMIC_EX_DEFAULT = 'DynamicExDefault'
+TAB_PCDS_DYNAMIC_EX_VPD = 'DynamicExVpd'
+TAB_PCDS_DYNAMIC_EX_HII = 'DynamicExHii'
+TAB_PCDS_DYNAMIC = 'Dynamic'
+TAB_PCDS_DYNAMIC_DEFAULT = 'DynamicDefault'
+TAB_PCDS_DYNAMIC_VPD = 'DynamicVpd'
+TAB_PCDS_DYNAMIC_HII = 'DynamicHii'
+
+PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC, TAB_PCDS_DYNAMIC_DEFAULT, TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_HII]
+PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+## Dynamic-ex PCD types
+gDynamicExPcd = [TAB_PCDS_DYNAMIC_EX, TAB_PCDS_DYNAMIC_EX_DEFAULT, TAB_PCDS_DYNAMIC_EX_VPD, TAB_PCDS_DYNAMIC_EX_HII]
+
+TAB_PCDS_FIXED_AT_BUILD_NULL = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD
+TAB_PCDS_FIXED_AT_BUILD_COMMON = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FIXED_AT_BUILD_IA32 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FIXED_AT_BUILD_X64 = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FIXED_AT_BUILD_IPF = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_FIXED_AT_BUILD_ARM = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FIXED_AT_BUILD_EBC = TAB_PCDS + TAB_PCDS_FIXED_AT_BUILD + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_PATCHABLE_IN_MODULE_NULL = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE
+TAB_PCDS_PATCHABLE_IN_MODULE_COMMON = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_PATCHABLE_IN_MODULE_IA32 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_PATCHABLE_IN_MODULE_X64 = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_PATCHABLE_IN_MODULE_IPF = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_PATCHABLE_IN_MODULE_ARM = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_PATCHABLE_IN_MODULE_EBC = TAB_PCDS + TAB_PCDS_PATCHABLE_IN_MODULE + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_FEATURE_FLAG_NULL = TAB_PCDS + TAB_PCDS_FEATURE_FLAG
+TAB_PCDS_FEATURE_FLAG_COMMON = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_FEATURE_FLAG_IA32 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_FEATURE_FLAG_X64 = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_FEATURE_FLAG_IPF = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_FEATURE_FLAG_ARM = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_FEATURE_FLAG_EBC = TAB_PCDS + TAB_PCDS_FEATURE_FLAG + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_EX_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX
+TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_DEFAULT
+TAB_PCDS_DYNAMIC_EX_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_HII
+TAB_PCDS_DYNAMIC_EX_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_EX_VPD
+TAB_PCDS_DYNAMIC_EX_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_EX_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_EX_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_EX_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_EX_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EX_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC_EX + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCDS_DYNAMIC_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC
+TAB_PCDS_DYNAMIC_DEFAULT_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_DEFAULT
+TAB_PCDS_DYNAMIC_HII_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_HII
+TAB_PCDS_DYNAMIC_VPD_NULL = TAB_PCDS + TAB_PCDS_DYNAMIC_VPD
+TAB_PCDS_DYNAMIC_COMMON = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_PCDS_DYNAMIC_IA32 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IA32
+TAB_PCDS_DYNAMIC_X64 = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_X64
+TAB_PCDS_DYNAMIC_IPF = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_IPF
+TAB_PCDS_DYNAMIC_ARM = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_ARM
+TAB_PCDS_DYNAMIC_EBC = TAB_PCDS + TAB_PCDS_DYNAMIC + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_PCD_DYNAMIC_TYPE_LIST = [TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, TAB_PCDS_DYNAMIC_HII_NULL]
+TAB_PCD_DYNAMIC_EX_TYPE_LIST = [TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL]
+
+TAB_DEPEX = 'Depex'
+TAB_DEPEX_COMMON = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_DEPEX_IA32 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IA32
+TAB_DEPEX_X64 = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_X64
+TAB_DEPEX_IPF = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_IPF
+TAB_DEPEX_ARM = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_ARM
+TAB_DEPEX_EBC = TAB_DEPEX + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_SKUIDS = 'SkuIds'
+
+TAB_LIBRARIES = 'Libraries'
+TAB_LIBRARIES_COMMON = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_LIBRARIES_IA32 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IA32
+TAB_LIBRARIES_X64 = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_X64
+TAB_LIBRARIES_IPF = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_IPF
+TAB_LIBRARIES_ARM = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_ARM
+TAB_LIBRARIES_EBC = TAB_LIBRARIES + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_COMPONENTS = 'Components'
+TAB_COMPONENTS_COMMON = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_COMMON
+TAB_COMPONENTS_IA32 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IA32
+TAB_COMPONENTS_X64 = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_X64
+TAB_COMPONENTS_IPF = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_IPF
+TAB_COMPONENTS_ARM = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_ARM
+TAB_COMPONENTS_EBC = TAB_COMPONENTS + TAB_SPLIT + TAB_ARCH_EBC
+
+TAB_COMPONENTS_SOURCE_OVERRIDE_PATH = 'SOURCE_OVERRIDE_PATH'
+
+TAB_BUILD_OPTIONS = 'BuildOptions'
+
+TAB_DEFINE = 'DEFINE'
+TAB_NMAKE = 'Nmake'
+TAB_USER_EXTENSIONS = 'UserExtensions'
+TAB_INCLUDE = '!include'
+
+#
+# Common Define
+#
+TAB_COMMON_DEFINES = 'Defines'
+
+#
+# Inf Definitions
+#
+TAB_INF_DEFINES = TAB_COMMON_DEFINES
+TAB_INF_DEFINES_INF_VERSION = 'INF_VERSION'
+TAB_INF_DEFINES_BASE_NAME = 'BASE_NAME'
+TAB_INF_DEFINES_FILE_GUID = 'FILE_GUID'
+TAB_INF_DEFINES_MODULE_TYPE = 'MODULE_TYPE'
+TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION = 'EFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION = 'UEFI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_PI_SPECIFICATION_VERSION = 'PI_SPECIFICATION_VERSION'
+TAB_INF_DEFINES_EDK_RELEASE_VERSION = 'EDK_RELEASE_VERSION'
+TAB_INF_DEFINES_BINARY_MODULE = 'BINARY_MODULE'
+TAB_INF_DEFINES_LIBRARY_CLASS = 'LIBRARY_CLASS'
+TAB_INF_DEFINES_COMPONENT_TYPE = 'COMPONENT_TYPE'
+TAB_INF_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_INF_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_INF_DEFINES_BUILD_TYPE = 'BUILD_TYPE'
+TAB_INF_DEFINES_FFS_EXT = 'FFS_EXT'
+TAB_INF_DEFINES_FV_EXT = 'FV_EXT'
+TAB_INF_DEFINES_SOURCE_FV = 'SOURCE_FV'
+TAB_INF_DEFINES_VERSION_NUMBER = 'VERSION_NUMBER'
+TAB_INF_DEFINES_VERSION = 'VERSION' # for R8 inf, the same as VERSION_NUMBER
+TAB_INF_DEFINES_VERSION_STRING = 'VERSION_STRING'
+TAB_INF_DEFINES_PCD_IS_DRIVER = 'PCD_IS_DRIVER'
+TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H = 'TIANO_R8_FLASHMAP_H'
+TAB_INF_DEFINES_ENTRY_POINT = 'ENTRY_POINT'
+TAB_INF_DEFINES_UNLOAD_IMAGE = 'UNLOAD_IMAGE'
+TAB_INF_DEFINES_CONSTRUCTOR = 'CONSTRUCTOR'
+TAB_INF_DEFINES_DESTRUCTOR = 'DESTRUCTOR'
+TAB_INF_DEFINES_DEFINE = 'DEFINE'
+TAB_INF_DEFINES_SPEC = 'SPEC'
+TAB_INF_DEFINES_CUSTOM_MAKEFILE = 'CUSTOM_MAKEFILE'
+TAB_INF_DEFINES_MACRO = '__MACROS__'
+TAB_INF_DEFINES_SHADOW = 'SHADOW'
+TAB_INF_FIXED_PCD = 'FixedPcd'
+TAB_INF_FEATURE_PCD = 'FeaturePcd'
+TAB_INF_PATCH_PCD = 'PatchPcd'
+TAB_INF_PCD = 'Pcd'
+TAB_INF_PCD_EX = 'PcdEx'
+
+#
+# Dec Definitions
+#
+TAB_DEC_DEFINES = TAB_COMMON_DEFINES
+TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
+TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
+TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
+TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+
+#
+# Dsc Definitions
+#
+TAB_DSC_DEFINES = TAB_COMMON_DEFINES
+TAB_DSC_DEFINES_PLATFORM_NAME = 'PLATFORM_NAME'
+TAB_DSC_DEFINES_PLATFORM_GUID = 'PLATFORM_GUID'
+TAB_DSC_DEFINES_PLATFORM_VERSION = 'PLATFORM_VERSION'
+TAB_DSC_DEFINES_DSC_SPECIFICATION = 'DSC_SPECIFICATION'
+TAB_DSC_DEFINES_OUTPUT_DIRECTORY = 'OUTPUT_DIRECTORY'
+TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES = 'SUPPORTED_ARCHITECTURES'
+TAB_DSC_DEFINES_BUILD_TARGETS = 'BUILD_TARGETS'
+TAB_DSC_DEFINES_SKUID_IDENTIFIER = 'SKUID_IDENTIFIER'
+TAB_DSC_DEFINES_FLASH_DEFINITION = 'FLASH_DEFINITION'
+TAB_DSC_DEFINES_BUILD_NUMBER = 'BUILD_NUMBER'
+TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
+TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
+TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
+TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+
+#
+# TargetTxt Definitions
+#
+TAB_TAT_DEFINES_ACTIVE_PLATFORM = 'ACTIVE_PLATFORM'
+TAB_TAT_DEFINES_ACTIVE_MODULE = 'ACTIVE_MODULE'
+TAB_TAT_DEFINES_TOOL_CHAIN_CONF = 'TOOL_CHAIN_CONF'
+TAB_TAT_DEFINES_MULTIPLE_THREAD = 'MULTIPLE_THREAD'
+TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER = 'MAX_CONCURRENT_THREAD_NUMBER'
+TAB_TAT_DEFINES_TARGET = 'TARGET'
+TAB_TAT_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TAT_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TAT_DEFINES_BUILD_RULE_CONF = "BUILD_RULE_CONF"
+
+#
+# ToolDef Definitions
+#
+TAB_TOD_DEFINES_TARGET = 'TARGET'
+TAB_TOD_DEFINES_TOOL_CHAIN_TAG = 'TOOL_CHAIN_TAG'
+TAB_TOD_DEFINES_TARGET_ARCH = 'TARGET_ARCH'
+TAB_TOD_DEFINES_COMMAND_TYPE = 'COMMAND_TYPE'
+TAB_TOD_DEFINES_FAMILY = 'FAMILY'
+TAB_TOD_DEFINES_BUILDRULEFAMILY = 'BUILDRULEFAMILY'
+
+#
+# Conditional Statements
+#
+TAB_IF = '!if'
+TAB_END_IF = '!endif'
+TAB_ELSE_IF = '!elseif'
+TAB_ELSE = '!else'
+TAB_IF_DEF = '!ifdef'
+TAB_IF_N_DEF = '!ifndef'
+TAB_IF_EXIST = '!if exist'
+
+#
+# Unknown section
+#
+TAB_UNKNOWN = 'UNKNOWN'
+
+#
+# Build database path
+#
+DATABASE_PATH = ":memory:" #"BuildDatabase.db"
+
+# used by ECC
+MODIFIER_LIST = ['IN', 'OUT', 'OPTIONAL', 'UNALIGNED', 'EFI_RUNTIMESERVICE', 'EFI_BOOTSERVICE', 'EFIAPI']
+
+# Dependency Expression
+DEPEX_SUPPORTED_OPCODE = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "END", "SOR", "TRUE", "FALSE", '(', ')']
+
+TAB_STATIC_LIBRARY = "STATIC-LIBRARY-FILE"
+TAB_DYNAMIC_LIBRARY = "DYNAMIC-LIBRARY-FILE"
+TAB_FRAMEWORK_IMAGE = "EFI-IMAGE-FILE"
+TAB_C_CODE_FILE = "C-CODE-FILE"
+TAB_C_HEADER_FILE = "C-HEADER-FILE"
+TAB_UNICODE_FILE = "UNICODE-TEXT-FILE"
+TAB_DEPENDENCY_EXPRESSION_FILE = "DEPENDENCY-EXPRESSION-FILE"
+TAB_UNKNOWN_FILE = "UNKNOWN-TYPE-FILE"
+TAB_DEFAULT_BINARY_FILE = "_BINARY_FILE_"
+
diff --git a/BaseTools/Source/Python/Common/Database.py b/BaseTools/Source/Python/Common/Database.py
new file mode 100644
index 0000000000..e645337a39
--- /dev/null
+++ b/BaseTools/Source/Python/Common/Database.py
@@ -0,0 +1,120 @@
+## @file
+# This file is used to create a database used by ECC tool
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import sqlite3
+import os
+
+import EdkLogger as EdkLogger
+from CommonDataClass.DataClass import *
+from String import *
+from DataType import *
+
+from Table.TableDataModel import TableDataModel
+from Table.TableFile import TableFile
+from Table.TableInf import TableInf
+from Table.TableDec import TableDec
+from Table.TableDsc import TableDsc
+
+## Database
+#
+# This class defined the build databse
+# During the phase of initialization, the database will create all tables and
+# insert all records of table DataModel
+#
+# @param object: Inherited from object class
+# @param DbPath: A string for the path of the ECC database
+#
+# @var Conn: Connection of the ECC database
+# @var Cur: Cursor of the connection
+# @var TblDataModel: Local instance for TableDataModel
+#
+class Database(object):
+ def __init__(self, DbPath):
+ if os.path.exists(DbPath):
+ os.remove(DbPath)
+ self.Conn = sqlite3.connect(DbPath, isolation_level = 'DEFERRED')
+ self.Conn.execute("PRAGMA page_size=8192")
+ self.Conn.execute("PRAGMA synchronous=OFF")
+ self.Cur = self.Conn.cursor()
+ self.TblDataModel = TableDataModel(self.Cur)
+ self.TblFile = TableFile(self.Cur)
+ self.TblInf = TableInf(self.Cur)
+ self.TblDec = TableDec(self.Cur)
+ self.TblDsc = TableDsc(self.Cur)
+
+ ## Initialize build database
+ #
+ # 1. Delete all old existing tables
+ # 2. Create new tables
+ # 3. Initialize table DataModel
+ #
+ def InitDatabase(self):
+ EdkLogger.verbose("\nInitialize ECC database started ...")
+ #
+ # Drop all old existing tables
+ #
+# self.TblDataModel.Drop()
+# self.TblDsc.Drop()
+# self.TblFile.Drop()
+
+ #
+ # Create new tables
+ #
+ self.TblDataModel.Create()
+ self.TblFile.Create()
+ self.TblInf.Create()
+ self.TblDec.Create()
+ self.TblDsc.Create()
+
+ #
+ # Initialize table DataModel
+ #
+ self.TblDataModel.InitTable()
+ EdkLogger.verbose("Initialize ECC database ... DONE!")
+
+ ## Query a table
+ #
+ # @param Table: The instance of the table to be queried
+ #
+ def QueryTable(self, Table):
+ Table.Query()
+
+ ## Close entire database
+ #
+ # Commit all first
+ # Close the connection and cursor
+ #
+ def Close(self):
+ self.Conn.commit()
+ self.Cur.close()
+ self.Conn.close()
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ Db = Database(DATABASE_PATH)
+ Db.InitDatabase()
+ Db.QueryTable(Db.TblDataModel)
+ Db.QueryTable(Db.TblFile)
+ Db.QueryTable(Db.TblDsc)
+ Db.Close()
+ \ No newline at end of file
diff --git a/BaseTools/Source/Python/Common/DecClassObject.py b/BaseTools/Source/Python/Common/DecClassObject.py
new file mode 100644
index 0000000000..b95ff621cc
--- /dev/null
+++ b/BaseTools/Source/Python/Common/DecClassObject.py
@@ -0,0 +1,563 @@
+## @file
+# This file is used to define each component of DEC file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from String import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass.CommonClass import PcdClass
+from BuildToolError import *
+from Table.TableDec import TableDec
+import Database
+from Parsing import *
+import GlobalData
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+
+## DecObject
+#
+# This class defined basic Dec object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DecObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dec
+#
+# This class defined the structure used in Dec object
+#
+# @param DecObject: Inherited from DecObject class
+# @param Filename: Input value for Filename of Dec file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPackage: Input value for IsToPackage
+# True is to transfer to PackageObject automatically
+# False is not to transfer to PackageObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DecDefines
+# @var UserExtensions: To store value for UserExtensions
+# @var Package: To store value for Package, it is a structure as PackageClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var Contents: To store value for Contents, it is a structure as DecContents
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dec(DecObject):
+ def __init__(self, Filename = None, IsToDatabase = False, IsToPackage = False, WorkspaceDir = None, Database = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = Identification()
+ self.Package = PackageClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblDec = Database.TblDec
+ self.FileID = -1
+
+ self.KeyList = [
+ TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
+ TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
+ ]
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Dec file if filename is not None
+ #
+ if Filename != None:
+ self.LoadDecFile(Filename)
+
+ #
+ # Transfer to Package Object if IsToPackage is True
+ #
+ if IsToPackage:
+ self.DecToPackage()
+
+ ## Load Dec file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dec file
+ #
+ def LoadDecFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DEC)
+
+ #
+ # Init DecTable
+ #
+ #self.TblDec.Table = "Dec%s" % self.FileID
+ #self.TblDec.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblDec, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ ParseDefineMacro2(self.TblDec, self.RecordSet, GlobalData.gGlobalDefines)
+
+ ## Transfer to Package Object
+ #
+ # Transfer all contents of a Dec file to a standard Package Object
+ #
+ def DecToPackage(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Package Header
+ #
+ self.GenPackageHeader(ContainerFile)
+
+ #
+ # Generate Includes
+ #
+ self.GenIncludes(ContainerFile)
+
+ #
+ # Generate Guids
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ #
+ # Generate Protocols
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ #
+ # Generate Ppis
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(ContainerFile)
+
+ ## Get Package Header
+ #
+ # Gen Package Header of Dec as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPackageHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PackageHeader ...")
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2])
+ ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblDec.Exec(SqlCommand)
+
+ #
+ # Get detailed information
+ #
+ for Arch in self.SupArchList:
+ PackageHeader = PackageHeaderClass()
+
+ PackageHeader.Name = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_NAME, Arch, self.FileID)[0]
+ PackageHeader.Guid = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_GUID, Arch, self.FileID)[0]
+ PackageHeader.Version = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_PACKAGE_VERSION, Arch, self.FileID)[0]
+ PackageHeader.FileName = self.Identification.FileName
+ PackageHeader.FullPath = self.Identification.FileFullPath
+ PackageHeader.DecSpecification = QueryDefinesItem(self.TblDec, TAB_DEC_DEFINES_DEC_SPECIFICATION, Arch, self.FileID)[0]
+
+ self.Package.Header[Arch] = PackageHeader
+
+ ## GenIncludes
+ #
+ # Gen Includes of Dec
+ #
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = {}
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Includes, Record[0], Arch)
+
+ for Key in Includes.keys():
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Includes[Key]
+ self.Package.Includes.append(Include)
+
+ ## GenPpis
+ #
+ # Gen Ppis of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ #
+ # Get all Items
+ #
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Lists, (Name, Value), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3])
+ self.TblDec.Exec(SqlCommand)
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Package.GuidDeclarations
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Package.ProtocolDeclarations
+ elif Type == TAB_PPIS:
+ ListMember = self.Package.PpiDeclarations
+
+ for Key in Lists.keys():
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Key[0]
+ ListClass.Guid = Key[1]
+ ListClass.SupArchList = Lists[Key]
+ ListMember.append(ListClass)
+
+
+ ## GenLibraryClasses
+ #
+ # Gen LibraryClasses of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all Guids
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Record[0], 'LibraryClasses', ContainerFile, '<LibraryClassName>|<LibraryClassInstanceFilename>', Record[2])
+ else:
+ CheckFileExist(self.Identification.FileRelativePath, List[1], ContainerFile, 'LibraryClasses', Record[0])
+ MergeArches(LibraryClasses, (List[0], List[1]), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDec.Table, ConvertToSqlString2(List[0]), ConvertToSqlString2(List[1]), SUP_MODULE_LIST_STRING, Record[3])
+ self.TblDec.Exec(SqlCommand)
+
+
+ for Key in LibraryClasses.keys():
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = Key[0]
+ LibraryClass.RecommendedInstance = NormPath(Key[1])
+ LibraryClass.SupModuleList = SUP_MODULE_LIST
+ LibraryClass.SupArchList = LibraryClasses[Key]
+ self.Package.LibraryClassDeclarations.append(LibraryClass)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Dec
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+ #
+ # Get all Guids
+ #
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet1:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet2:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet3:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet4:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet5:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, DatumType, Token, Type) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, DatumType, Token, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblDec.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key)
+ self.TblDec.Exec(SqlCommand)
+
+ for Key in Pcds.keys():
+ Pcd = PcdClass()
+ Pcd.CName = Key[1]
+ Pcd.Token = Key[4]
+ Pcd.TokenSpaceGuidCName = Key[0]
+ Pcd.DatumType = Key[3]
+ Pcd.DefaultValue = Key[2]
+ Pcd.ItemType = Key[5]
+ Pcd.SupArchList = Pcds[Key]
+ self.Package.PcdDeclarations.append(Pcd)
+
+ ## Show detailed information of Package
+ #
+ # Print all members and their values of Package class
+ #
+ def ShowPackage(self):
+ M = self.Package
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'DecSpecification =', M.Header[Arch].DecSpecification
+ print '\nIncludes =', M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nGuids =', M.GuidDeclarations
+ for Item in M.GuidDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nProtocols =', M.ProtocolDeclarations
+ for Item in M.ProtocolDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nPpis =', M.PpiDeclarations
+ for Item in M.PpiDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nLibraryClasses =', M.LibraryClassDeclarations
+ for Item in M.LibraryClassDeclarations:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
+ print '\nPcds =', M.PcdDeclarations
+ for Item in M.PcdDeclarations:
+ print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'Nt32Pkg/Nt32Pkg.dec')
+
+ Db = Database.Database('Dec.db')
+ Db.InitDatabase()
+
+ P = Dec(os.path.normpath(F), True, True, W, Db)
+ P.ShowPackage()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/DecClassObjectLight.py b/BaseTools/Source/Python/Common/DecClassObjectLight.py
new file mode 100644
index 0000000000..7c572a56f0
--- /dev/null
+++ b/BaseTools/Source/Python/Common/DecClassObjectLight.py
@@ -0,0 +1,580 @@
+## @file
+# This file is used to define each component of DEC file in light mode
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+from Misc import GetFiles
+from String import *
+from DataType import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass import CommonClass
+from BuildToolError import *
+from Parsing import *
+
+# Global variable
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DEC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+## DecObject
+#
+# This class defined basic Dec object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DecObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dec
+#
+# This class defined the structure used in Dec object
+#
+# @param DecObject: Inherited from DecObject class
+# @param Filename: Input value for Filename of Dec file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPackage: Input value for IsToPackage
+# True is to transfer to PackageObject automatically
+# False is not to transfer to PackageObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DecDefines
+# @var UserExtensions: To store value for UserExtensions
+# @var Package: To store value for Package, it is a structure as PackageClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var Contents: To store value for Contents, it is a structure as DecContents
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dec(DecObject):
+ def __init__(self, Filename = None, IsToPackage = False, WorkspaceDir = None, AllGuidVersionDict = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = IdentificationClass()
+ self.Package = PackageClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.AllGuidVersionDict = {}
+ if AllGuidVersionDict:
+ self.AllGuidVersionDict = AllGuidVersionDict
+
+ self.KeyList = [
+ TAB_INCLUDES, TAB_GUIDS, TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, \
+ TAB_PCDS_FIXED_AT_BUILD_NULL, TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_NULL, TAB_PCDS_DYNAMIC_EX_NULL, TAB_DEC_DEFINES
+ ]
+ # Upper all KEYs to ignore case sensitive when parsing
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ # Init RecordSet
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ # Init Comment
+ self.SectionHeaderCommentDict = {}
+
+ # Load Dec file if filename is not None
+ if Filename != None:
+ self.LoadDecFile(Filename)
+
+ # Transfer to Package Object if IsToPackage is True
+ if IsToPackage:
+ self.DecToPackage()
+
+ ## Load Dec file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dec file
+ #
+ def LoadDecFile(self, Filename):
+ # Insert a record for file
+ Filename = NormPath(Filename)
+ self.Identification.FullPath = Filename
+ (self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
+ if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
+ self.Identification.PackagePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
+
+ # Init common datas
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ # Parse file content
+ IsFindBlockComment = False
+ ReservedLine = ''
+ Comment = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ # Remove comment block
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
+ Comment = Comment + Line.strip() + '\n'
+ ReservedLine = ''
+ else:
+ Comment = Comment + Line[len(ReservedLine):] + '\n'
+ IsFindBlockComment = True
+ if not ReservedLine:
+ continue
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ Comment = Comment + Line.strip() + '\n'
+ continue
+
+ # Remove comments at tail and remove spaces again
+ if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
+ Comment = Comment + Line.strip() + '\n'
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ ## Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ # Insert items data of previous section
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ # Parse the new section
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ if Comment:
+ if Comment.endswith('\n'):
+ Comment = Comment[:len(Comment) - len('\n')]
+ self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
+ Comment = ''
+ continue
+
+ # Not in any defined section
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ # Add a section item
+ SectionItemList.append([Line, LineNo, Comment])
+ Comment = ''
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ if Comment != '':
+ self.SectionHeaderCommentDict[Model] = Comment
+ Comment = ''
+
+ ## Package Object to DEC file
+ def PackageToDec(self, Package):
+ Dec = ''
+ DecList = sdict()
+ SectionHeaderCommentDict = {}
+ if Package == None:
+ return Dec
+
+ PackageHeader = Package.PackageHeader
+ TmpList = []
+ if PackageHeader.Name:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_NAME + ' = ' + PackageHeader.Name)
+ if PackageHeader.Guid:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_GUID + ' = ' + PackageHeader.Guid)
+ if PackageHeader.Version:
+ TmpList.append(TAB_DEC_DEFINES_PACKAGE_VERSION + ' = ' + PackageHeader.Version)
+ if PackageHeader.DecSpecification:
+ TmpList.append(TAB_DEC_DEFINES_DEC_SPECIFICATION + ' = ' + PackageHeader.DecSpecification)
+ if Package.UserExtensions != None:
+ for Item in Package.UserExtensions.Defines:
+ TmpList.append(Item)
+ DecList['Defines'] =TmpList
+ if PackageHeader.Description != '':
+ SectionHeaderCommentDict['Defines'] = PackageHeader.Description
+
+ for Item in Package.Includes:
+ Key = 'Includes.' + Item.SupArchList
+ Value = Item.FilePath
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.GuidDeclarations:
+ Key = 'Guids.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.ProtocolDeclarations:
+ Key = 'Protocols.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.PpiDeclarations:
+ Key = 'Ppis.' + Item.SupArchList
+ Value = Item.CName + '=' + Item.Guid
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.LibraryClassDeclarations:
+ Key = 'LibraryClasses.' + Item.SupArchList
+ Value = Item.LibraryClass + '|' + Item.RecommendedInstance
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ for Item in Package.PcdDeclarations:
+ Key = 'Pcds' + Item.ItemType + '.' + Item.SupArchList
+ Value = Item.TokenSpaceGuidCName + '.' + Item.CName
+ if Item.DefaultValue != '':
+ Value = Value + '|' + Item.DefaultValue
+ if Item.DatumType != '':
+ Value = Value + '|' + Item.DatumType
+ if Item.Token != '':
+ Value = Value + '|' + Item.Token
+ GenMetaDatSectionItem(Key, Value, DecList)
+
+ # Transfer Package to Inf
+ for Key in DecList:
+ if Key in SectionHeaderCommentDict:
+ List = SectionHeaderCommentDict[Key].split('\r')
+ for Item in List:
+ Dec = Dec + Item + '\n'
+ Dec = Dec + '[' + Key + ']' + '\n'
+ for Value in DecList[Key]:
+ if type(Value) == type([]):
+ for SubValue in Value:
+ Dec = Dec + ' ' + SubValue + '\n'
+ else:
+ Dec = Dec + ' ' + Value + '\n'
+ Dec = Dec + '\n'
+
+ return Dec
+
+ ## Transfer to Package Object
+ #
+ # Transfer all contents of a Dec file to a standard Package Object
+ #
+ def DecToPackage(self):
+ # Init global information for the file
+ ContainerFile = self.Identification.FullPath
+
+ # Generate Package Header
+ self.GenPackageHeader(ContainerFile)
+
+ # Generate Includes
+ # Only for R8
+ self.GenIncludes(ContainerFile)
+
+ # Generate Guids
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ # Generate Protocols
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ # Generate Ppis
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ # Generate LibraryClasses
+ self.GenLibraryClasses(ContainerFile)
+
+ # Generate Pcds
+ self.GenPcds(ContainerFile)
+
+ # Init MiscFiles
+ self.GenMiscFiles(ContainerFile)
+
+ ## GenMiscFiles
+ #
+ def GenMiscFiles(self, ContainerFile):
+ MiscFiles = MiscFileClass()
+ MiscFiles.Name = 'ModuleFiles'
+ for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
+ File = CommonClass.FileClass()
+ File.Filename = Item
+ MiscFiles.Files.append(File)
+ self.Package.MiscFiles = MiscFiles
+
+ ## Get Package Header
+ #
+ # Gen Package Header of Dec as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPackageHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PackageHeader ...")
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ PackageHeader = PackageHeaderClass()
+ OtherDefines = []
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ OtherDefines.append(Record[0])
+ else:
+ Name = ValueList[0]
+ Value = ValueList[1]
+ if Name == TAB_DEC_DEFINES_PACKAGE_NAME:
+ PackageHeader.Name = Value
+ elif Name == TAB_DEC_DEFINES_PACKAGE_GUID:
+ PackageHeader.Guid = Value
+ elif Name == TAB_DEC_DEFINES_PACKAGE_VERSION:
+ PackageHeader.Version = Value
+ elif Name == TAB_DEC_DEFINES_DEC_SPECIFICATION:
+ PackageHeader.DecSpecification = Value
+ else:
+ OtherDefines.append(Record[0])
+
+ PackageHeader.FileName = self.Identification.FileName
+ PackageHeader.FullPath = self.Identification.FullPath
+ PackageHeader.RelaPath = self.Identification.RelaPath
+ PackageHeader.PackagePath = self.Identification.PackagePath
+ PackageHeader.ModulePath = self.Identification.ModulePath
+ PackageHeader.CombinePath = os.path.normpath(os.path.join(PackageHeader.PackagePath, PackageHeader.ModulePath, PackageHeader.FileName))
+
+ if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
+ PackageHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
+
+ self.Package.PackageHeader = PackageHeader
+ UE = UserExtensionsClass()
+ UE.Defines = OtherDefines
+ self.Package.UserExtensions = UE
+
+
+ ## GenIncludes
+ #
+ # Gen Includes of Dec
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = {}
+ # Get all Includes
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ Key = Record[0]
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Arch
+ self.Package.Includes.append(Include)
+
+ ## GenPpis
+ #
+ # Gen Ppis of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ # Get all Items
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ (Name, Value) = GetGuidsProtocolsPpisOfDec(Record[0], Type, ContainerFile, Record[2])
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Package.GuidDeclarations
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Package.ProtocolDeclarations
+ elif Type == TAB_PPIS:
+ ListMember = self.Package.PpiDeclarations
+
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Name
+ ListClass.Guid = Value
+ ListClass.SupArchList = Arch
+ ListMember.append(ListClass)
+
+ ## GenLibraryClasses
+ #
+ # Gen LibraryClasses of Dec
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ # Get all Guids
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ # Go through each arch
+ for Record in RecordSet:
+ Arch = Record[1]
+ List = GetSplitValueList(Record[0], DataType.TAB_VALUE_SPLIT)
+ if len(List) != 2:
+ continue
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = List[0]
+ LibraryClass.RecommendedInstance = NormPath(List[1])
+ LibraryClass.SupArchList = Arch
+ self.Package.LibraryClassDeclarations.append(LibraryClass)
+
+ def AddPcd(self, CName, Token, TokenSpaceGuidCName, DatumType, DefaultValue, ItemType, Arch):
+ Pcd = CommonClass.PcdClass()
+ Pcd.CName = CName
+ Pcd.Token = Token
+ Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
+ Pcd.DatumType = DatumType
+ Pcd.DefaultValue = DefaultValue
+ Pcd.ItemType = ItemType
+ Pcd.SupArchList = Arch
+ self.Package.PcdDeclarations.append(Pcd)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Dec
+ # <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+ # Get all Pcds
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ # Go through each pcd
+ for Record in RecordSet1:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet2:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet3:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet4:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+ for Record in RecordSet5:
+ Arch = Record[1]
+ (TokenGuidCName, TokenName, DefaultValue, DatumType, Token, ItemType) = GetPcdOfDec(Record[0], TAB_PCDS_DYNAMIC, ContainerFile, Record[2])
+ self.AddPcd(TokenName, Token, TokenGuidCName, DatumType, DefaultValue, ItemType, Arch)
+
+ ## Show detailed information of Package
+ #
+ # Print all members and their values of Package class
+ #
+ def ShowPackage(self):
+ M = self.Package
+ print 'Filename =', M.PackageHeader.FileName
+ print 'FullPath =', M.PackageHeader.FullPath
+ print 'RelaPath =', M.PackageHeader.RelaPath
+ print 'PackagePath =', M.PackageHeader.PackagePath
+ print 'ModulePath =', M.PackageHeader.ModulePath
+ print 'CombinePath =', M.PackageHeader.CombinePath
+
+ print 'BaseName =', M.PackageHeader.Name
+ print 'Guid =', M.PackageHeader.Guid
+ print 'Version =', M.PackageHeader.Version
+ print 'DecSpecification =', M.PackageHeader.DecSpecification
+
+ print '\nIncludes ='#, M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nGuids ='#, M.GuidDeclarations
+ for Item in M.GuidDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nProtocols ='#, M.ProtocolDeclarations
+ for Item in M.ProtocolDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nPpis ='#, M.PpiDeclarations
+ for Item in M.PpiDeclarations:
+ print Item.CName, Item.Guid, Item.SupArchList
+ print '\nLibraryClasses ='#, M.LibraryClassDeclarations
+ for Item in M.LibraryClassDeclarations:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.SupModuleList, Item.SupArchList
+ print '\nPcds ='#, M.PcdDeclarations
+ for Item in M.PcdDeclarations:
+ print 'CName=', Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, 'Token=', Item.Token, 'DatumType=', Item.DatumType, Item.SupArchList
+ print '\nUserExtensions =', M.UserExtensions.Defines
+ print '\n*** FileList ***'
+ for Item in M.MiscFiles.Files:
+ print Item.Filename
+ print '****************\n'
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/MdeModulePkg.dec')
+
+ P = Dec(os.path.normpath(F), True, W)
+ P.ShowPackage()
+ print P.PackageToDec(P.Package)
diff --git a/BaseTools/Source/Python/Common/Dictionary.py b/BaseTools/Source/Python/Common/Dictionary.py
new file mode 100644
index 0000000000..3c968f5ec6
--- /dev/null
+++ b/BaseTools/Source/Python/Common/Dictionary.py
@@ -0,0 +1,75 @@
+## @file
+# Define a dictionary structure
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import EdkLogger
+from DataType import *
+
+## Convert a text file to a dictionary
+#
+# Convert a text file to a dictionary of (name:value) pairs.
+#
+# @retval 0 Convert successful
+# @retval 1 Open file failed
+#
+def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ Keys = []
+ for Line in F:
+ if Line.startswith(CommentCharacter):
+ continue
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
+ if ValueSplitFlag:
+ Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ else:
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Keys += [Key[0]]
+ F.close()
+ return 0
+ except:
+ EdkLogger.info('Open file failed')
+ return 1
+
+## Print the dictionary
+#
+# Print all items of dictionary one by one
+#
+# @param Dict: The dictionary to be printed
+#
+def printDict(Dict):
+ if Dict != None:
+ KeyList = Dict.keys()
+ for Key in KeyList:
+ if Dict[Key] != '':
+ print Key + ' = ' + str(Dict[Key])
+
+## Print the dictionary
+#
+# Print the items of dictionary which matched with input key
+#
+# @param list: The dictionary to be printed
+# @param key: The key of the item to be printed
+#
+def printList(Key, List):
+ if type(List) == type([]):
+ if len(List) > 0:
+ if key.find(TAB_SPLIT) != -1:
+ print "\n" + Key
+ for Item in List:
+ print Item
diff --git a/BaseTools/Source/Python/Common/DscClassObject.py b/BaseTools/Source/Python/Common/DscClassObject.py
new file mode 100644
index 0000000000..ddccf6507d
--- /dev/null
+++ b/BaseTools/Source/Python/Common/DscClassObject.py
@@ -0,0 +1,1434 @@
+## @file
+# This file is used to define each component of DSC file
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import EdkLogger as EdkLogger
+import Database
+from String import *
+from Parsing import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from CommonDataClass.PlatformClass import *
+from CommonDataClass.CommonClass import SkuInfoClass
+from BuildToolError import *
+from Misc import sdict
+import GlobalData
+from Table.TableDsc import TableDsc
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_DSC_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_SKUIDS.upper() : MODEL_EFI_SKU_ID,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PCDS_FIXED_AT_BUILD_NULL.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_PCDS_FEATURE_FLAG_NULL.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_PCDS_DYNAMIC_EX_NULL.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_EX_DEFAULT,
+ TAB_PCDS_DYNAMIC_EX_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_EX_VPD,
+ TAB_PCDS_DYNAMIC_EX_HII_NULL.upper() : MODEL_PCD_DYNAMIC_EX_HII,
+ TAB_PCDS_DYNAMIC_NULL.upper() : MODEL_PCD_DYNAMIC,
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL.upper() : MODEL_PCD_DYNAMIC_DEFAULT,
+ TAB_PCDS_DYNAMIC_VPD_NULL.upper() : MODEL_PCD_DYNAMIC_VPD,
+ TAB_PCDS_DYNAMIC_HII_NULL.upper() : MODEL_PCD_DYNAMIC_HII,
+ TAB_COMPONENTS.upper() : MODEL_META_DATA_COMPONENT,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+## DscObject
+#
+# This class defined basic Dsc object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class DscObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Dsc
+#
+# This class defined the structure used in Dsc object
+#
+# @param DscObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToPlatform: Input value for IsToPlatform
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var _NullClassIndex: To store value for _NullClassIndex, default is 0
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var Defines: To store value for Defines, it is a structure as DscDefines
+# @var Contents: To store value for Contents, it is a structure as DscContents
+# @var UserExtensions: To store value for UserExtensions
+# @var Platform: To store value for Platform, it is a structure as PlatformClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Dec
+#
+class Dsc(DscObject):
+ _NullClassIndex = 0
+
+ def __init__(self, Filename = None, IsToDatabase = False, IsToPlatform = False, WorkspaceDir = None, Database = None):
+ self.Identification = Identification()
+ self.Platform = PlatformClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblDsc = Database.TblDsc
+
+
+ self.KeyList = [
+ TAB_SKUIDS, TAB_LIBRARIES, TAB_LIBRARY_CLASSES, TAB_BUILD_OPTIONS, TAB_PCDS_FIXED_AT_BUILD_NULL, \
+ TAB_PCDS_PATCHABLE_IN_MODULE_NULL, TAB_PCDS_FEATURE_FLAG_NULL, \
+ TAB_PCDS_DYNAMIC_DEFAULT_NULL, TAB_PCDS_DYNAMIC_HII_NULL, TAB_PCDS_DYNAMIC_VPD_NULL, \
+ TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, TAB_PCDS_DYNAMIC_EX_HII_NULL, TAB_PCDS_DYNAMIC_EX_VPD_NULL, \
+ TAB_COMPONENTS, TAB_DSC_DEFINES
+ ]
+
+ self.PcdToken = {}
+
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+# self.RecordSet = {}
+# for Key in self.KeyList:
+# self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Dsc file if filename is not None
+ #
+ if Filename != None:
+ self.LoadDscFile(Filename)
+
+ #
+ # Transfer to Platform Object if IsToPlatform is True
+ #
+ if IsToPlatform:
+ self.DscToPlatform()
+
+ ## Transfer to Platform Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def DscToPlatform(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Platform Header
+ #
+ self.GenPlatformHeader(ContainerFile)
+
+ #
+ # Generate BuildOptions
+ #
+ self.GenBuildOptions(ContainerFile)
+
+ #
+ # Generate SkuInfos
+ #
+ self.GenSkuInfos(ContainerFile)
+
+ #
+ # Generate Libraries
+ #
+ self.GenLibraries(ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(DataType.TAB_PCDS_FIXED_AT_BUILD, ContainerFile)
+ self.GenPcds(DataType.TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile)
+ self.GenFeatureFlagPcds(DataType.TAB_PCDS_FEATURE_FLAG, ContainerFile)
+ self.GenDynamicDefaultPcds(DataType.TAB_PCDS_DYNAMIC_DEFAULT, ContainerFile)
+ self.GenDynamicDefaultPcds(DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT, ContainerFile)
+ self.GenDynamicHiiPcds(DataType.TAB_PCDS_DYNAMIC_HII, ContainerFile)
+ self.GenDynamicHiiPcds(DataType.TAB_PCDS_DYNAMIC_EX_HII, ContainerFile)
+ self.GenDynamicVpdPcds(DataType.TAB_PCDS_DYNAMIC_VPD, ContainerFile)
+ self.GenDynamicVpdPcds(DataType.TAB_PCDS_DYNAMIC_EX_VPD, ContainerFile)
+
+ #
+ # Generate Components
+ #
+ self.GenComponents(ContainerFile)
+
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in self.PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblDsc.Table, ".".join((self.PcdToken[Key][0], self.PcdToken[Key][1])), Key)
+ self.TblDsc.Exec(SqlCommand)
+ #End of DscToPlatform
+
+ ## Get Platform Header
+ #
+ # Gen Platform Header of Dsc as <Key> = <Value>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenPlatformHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate PlatformHeader ...")
+ #
+ # Update all defines item in database
+ #
+ SqlCommand = """select ID, Value1, Arch, StartLine from %s
+ where Model = %s
+ and BelongsToFile = %s
+ and Enabled > -1""" % (self.TblDsc.Table, MODEL_META_DATA_HEADER, self.FileID)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[1], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[1], 'Defines', ContainerFile, '<Key> = <Value>', Record[3])
+ ID, Value1, Value2, Arch = Record[0], ValueList[0], ValueList[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblDsc.Exec(SqlCommand)
+
+ #
+ # Get detailed information
+ #
+ for Arch in DataType.ARCH_LIST:
+ PlatformHeader = PlatformHeaderClass()
+
+ PlatformHeader.Name = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_NAME, Arch, self.FileID)[0]
+ PlatformHeader.Guid = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_GUID, Arch, self.FileID)[0]
+ PlatformHeader.Version = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_PLATFORM_VERSION, Arch, self.FileID)[0]
+ PlatformHeader.FileName = self.Identification.FileName
+ PlatformHeader.FullPath = self.Identification.FileFullPath
+ PlatformHeader.DscSpecification = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_DSC_SPECIFICATION, Arch, self.FileID)[0]
+
+ PlatformHeader.SkuIdName = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_SKUID_IDENTIFIER, Arch, self.FileID)
+ PlatformHeader.SupArchList = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_SUPPORTED_ARCHITECTURES, Arch, self.FileID)
+ PlatformHeader.BuildTargets = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BUILD_TARGETS, Arch, self.FileID)
+ PlatformHeader.OutputDirectory = NormPath(QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_OUTPUT_DIRECTORY, Arch, self.FileID)[0])
+ PlatformHeader.BuildNumber = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BUILD_NUMBER, Arch, self.FileID)[0]
+ PlatformHeader.MakefileName = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_MAKEFILE_NAME, Arch, self.FileID)[0]
+
+ PlatformHeader.BsBaseAddress = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_BS_BASE_ADDRESS, Arch, self.FileID)[0]
+ PlatformHeader.RtBaseAddress = QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_RT_BASE_ADDRESS, Arch, self.FileID)[0]
+
+ self.Platform.Header[Arch] = PlatformHeader
+ Fdf = PlatformFlashDefinitionFileClass()
+ Fdf.FilePath = NormPath(QueryDefinesItem(self.TblDsc, TAB_DSC_DEFINES_FLASH_DEFINITION, Arch, self.FileID)[0])
+ self.Platform.FlashDefinitionFile = Fdf
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Dsc
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_META_DATA_BUILD_OPTION, self.FileID)
+
+ #
+ # Get all BuildOptions
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_BUILD_OPTION, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_BUILD_OPTIONS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (Family, ToolChain, Flag) = GetBuildOption(NewItem, Filename, -1)
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (Family, ToolChain, Flag) = GetBuildOption(Record[0], ContainerFile, Record[2])
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(Family), ConvertToSqlString2(ToolChain), ConvertToSqlString2(Flag), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ for Key in BuildOptions.keys():
+ BuildOption = BuildOptionClass(Key[0], Key[1], Key[2])
+ BuildOption.SupArchList = BuildOptions[Key]
+ self.Platform.BuildOptions.BuildOptionList.append(BuildOption)
+
+ ## GenSkuInfos
+ #
+ # Gen SkuInfos of Dsc
+ # <Integer>|<UiName>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenSkuInfos(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SKUIDS)
+ #
+ # SkuIds
+ # <Integer>|<UiName>
+ #
+ self.Platform.SkuInfos.SkuInfoList['DEFAULT'] = '0'
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_SKU_ID, self.FileID)
+
+ #
+ # Get all SkuInfos
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_SKU_ID, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_SKUIDS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ List = GetSplitValueList(NewItem)
+ if len(List) != 2:
+ RaiseParserError(NewItem, TAB_SKUIDS, Filename, '<Integer>|<UiName>')
+ else:
+ self.Platform.SkuInfos.SkuInfoList[List[1]] = List[0]
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ List = GetSplitValueList(Record[0])
+ if len(List) != 2:
+ RaiseParserError(Record[0], TAB_SKUIDS, ContainerFile, '<Integer>|<UiName>')
+ else:
+ self.Platform.SkuInfos.SkuInfoList[List[1]] = List[0]
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(List[0]), ConvertToSqlString2(List[1]), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ ## GenLibraries
+ #
+ # Gen Libraries of Dsc
+ # <PathAndFilename>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenLibraries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARIES)
+ Libraries = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_LIBRARY_INSTANCE, self.FileID)
+
+ #
+ # Get all Libraries
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_INSTANCE, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_LIBRARIES, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ MergeArches(Libraries, NewItem, Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ MergeArches(Libraries, Record[0], Arch)
+
+ for Key in Libraries.keys():
+ Library = PlatformLibraryClass()
+ Library.FilePath = NormPath(Key)
+ Library.SupArchList = Libraries[Key]
+ self.Platform.Libraries.LibraryList.append(Library)
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClasses of Dsc
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Dsc file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_EFI_LIBRARY_CLASS, self.FileID)
+
+ #
+ # Get all LibraryClasses
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_CLASS, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_LIBRARY_CLASSES, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ MergeArches(LibraryClasses, GetLibraryClass([NewItem, IncludeFile[4]], Filename, self.WorkspaceDir, -1), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (LibClassName, LibClassIns, SupModelList) = GetLibraryClass([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(LibraryClasses, (LibClassName, LibClassIns, SupModelList), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblDsc.Table, ConvertToSqlString2(LibClassName), ConvertToSqlString2(LibClassIns), ConvertToSqlString2(SupModelList), Record[3])
+ self.TblDsc.Exec(SqlCommand)
+
+ for Key in LibraryClasses.keys():
+ Library = PlatformLibraryClass()
+ Library.Name = Key[0]
+ Library.FilePath = NormPath(Key[1])
+ Library.SupModuleList = GetSplitValueList(Key[2])
+ Library.SupArchList = LibraryClasses[Key]
+ self.Platform.LibraryClasses.LibraryList.append(Library)
+
+ ## Gen Pcds
+ #
+ # Gen Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ if Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
+ Model = MODEL_PCD_PATCHABLE_IN_MODULE
+ elif Type == DataType.TAB_PCDS_FIXED_AT_BUILD:
+ Model = MODEL_PCD_FIXED_AT_BUILD
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all Pcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type) = GetPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type) = GetPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, DatumType, MaxDatumSize, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Key in Pcds:
+ Pcd = PcdClass(Key[0], '', Key[1], Key[3], Key[4], Key[2], Key[5], [], {}, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen FeatureFlagPcds
+ #
+ # Gen FeatureFlagPcds of Dsc file as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenFeatureFlagPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ if Type == DataType.TAB_PCDS_FEATURE_FLAG:
+ Model = MODEL_PCD_FEATURE_FLAG
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all FeatureFlagPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (TokenName, TokenGuidCName, Value, Type) = GetFeatureFlagPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (TokenName, TokenGuidCName, Value, Type) = GetFeatureFlagPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenName, TokenGuidCName, Value, Type), Arch)
+ self.PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+
+ for Key in Pcds:
+ Pcd = PcdClass(Key[0], '', Key[1], '', '', Key[2], Key[3], [], {}, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicDefaultPcds
+ #
+ # Gen DynamicDefaultPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicDefaultPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_DEFAULT:
+ Model = MODEL_PCD_DYNAMIC_DEFAULT
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT:
+ Model = MODEL_PCD_DYNAMIC_EX_DEFAULT
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicDefaultPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5, K6) = GetDynamicDefaultPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5, K6) = GetDynamicDefaultPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[6], self.Platform.SkuInfos.SkuInfoList, '', '', '', '', '', Key[2])
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], Key[3], Key[4], Key[2], Key[5], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicHiiPcds
+ #
+ # Gen DynamicHiiPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicHiiPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_HII:
+ Model = MODEL_PCD_DYNAMIC_HII
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_HII:
+ Model = MODEL_PCD_DYNAMIC_EX_HII
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicHiiPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5, K6, K7, K8) = GetDynamicHiiPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, K7, K8, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5, K6, K7, K8) = GetDynamicHiiPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, K6, K7, K8, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[8], self.Platform.SkuInfos.SkuInfoList, Key[2], Key[3], Key[4], Key[5], '', '')
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], '', Key[6], Key[5], Key[7], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+ ## Gen DynamicVpdPcds
+ #
+ # Gen DynamicVpdPcds of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
+ #
+ # @param Type: The type of Pcd
+ # @param ContainerFile: The file which describes the pcd, used for error report
+ #
+ def GenDynamicVpdPcds(self, Type = '', ContainerFile = ''):
+ Pcds = {}
+ SkuInfoList = {}
+ if Type == DataType.TAB_PCDS_DYNAMIC_VPD:
+ Model = MODEL_PCD_DYNAMIC_VPD
+ elif Type == DataType.TAB_PCDS_DYNAMIC_EX_VPD:
+ Model = MODEL_PCD_DYNAMIC_EX_VPD
+ else:
+ pass
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, Model, self.FileID)
+
+ #
+ # Get all DynamicVpdPcds
+ #
+ RecordSet = QueryDscItem(self.TblDsc, Model, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, Type, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ (K1, K2, K3, K4, K5) = GetDynamicVpdPcd(NewItem, Type, Filename, -1)
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, IncludeFile[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ (K1, K2, K3, K4, K5) = GetDynamicVpdPcd(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Pcds, (K1, K2, K3, K4, K5, Record[4]), Arch)
+ self.PcdToken[Record[3]] = (K2, K1)
+
+ for Key in Pcds:
+ (Status, SkuInfoList) = self.GenSkuInfoList(Key[5], self.Platform.SkuInfos.SkuInfoList, '', '', '', '', Key[2], '')
+ if Status == False:
+ ErrorMsg = "The SKUID '%s' used in section '%s' is not defined in section [SkuIds]" % (SkuInfoList, Type)
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, ContainerFile, RaiseError = EdkLogger.IsRaiseError)
+ Pcd = PcdClass(Key[0], '', Key[1], '', Key[3], '', Key[4], [], SkuInfoList, [])
+ Pcd.SupArchList = Pcds[Key]
+ self.Platform.DynamicPcdBuildDefinitions.append(Pcd)
+
+
+ ## Get Component
+ #
+ # Get Component section defined in Dsc file
+ #
+ # @param ContainerFile: The file which describes the Components, used for error report
+ #
+ # @retval PlatformModuleClass() A instance for PlatformModuleClass
+ #
+ def GenComponents(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_COMPONENTS)
+ Components = sdict()
+ #
+ # Get all include files
+ #
+ IncludeFiles = QueryDscItem(self.TblDsc, MODEL_META_DATA_INCLUDE, MODEL_META_DATA_COMPONENT, self.FileID)
+
+ #
+ # Get all Components
+ #
+ RecordSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_COMPONENT, -1, self.FileID)
+
+ #
+ # Go through each arch
+ #
+ for Arch in DataType.ARCH_LIST:
+ for IncludeFile in IncludeFiles:
+ if IncludeFile[1] == Arch or IncludeFile[1] == TAB_ARCH_COMMON.upper():
+ Filename = CheckFileExist(self.WorkspaceDir, IncludeFile[0], ContainerFile, TAB_COMPONENTS, '', IncludeFile[2])
+ for NewItem in open(Filename, 'r').readlines():
+ if CleanString(NewItem) == '':
+ continue
+ NewItems = []
+ GetComponents(open(Filename, 'r').read(), TAB_COMPONENTS, NewItems, TAB_COMMENT_SPLIT)
+ for NewComponent in NewItems:
+ MergeArches(Components, self.GenComponent(NewComponent, Filename), Arch)
+
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON.upper():
+ Lib, Bo, Pcd = [], [], []
+
+ SubLibSet = QueryDscItem(self.TblDsc, MODEL_EFI_LIBRARY_CLASS, Record[3], self.FileID)
+ for SubLib in SubLibSet:
+ Lib.append(TAB_VALUE_SPLIT.join([SubLib[0],SubLib[4]]))
+
+ SubBoSet = QueryDscItem(self.TblDsc, MODEL_META_DATA_BUILD_OPTION, Record[3], self.FileID)
+ for SubBo in SubBoSet:
+ Bo.append(SubBo[0])
+
+ SubPcdSet1 = QueryDscItem(self.TblDsc, MODEL_PCD_FIXED_AT_BUILD, Record[3], self.FileID)
+ SubPcdSet2 = QueryDscItem(self.TblDsc, MODEL_PCD_PATCHABLE_IN_MODULE, Record[3], self.FileID)
+ SubPcdSet3 = QueryDscItem(self.TblDsc, MODEL_PCD_FEATURE_FLAG, Record[3], self.FileID)
+ SubPcdSet4 = QueryDscItem(self.TblDsc, MODEL_PCD_DYNAMIC_EX_DEFAULT, Record[3], self.FileID)
+ SubPcdSet5 = QueryDscItem(self.TblDsc, MODEL_PCD_DYNAMIC_DEFAULT, Record[3], self.FileID)
+ for SubPcd in SubPcdSet1:
+ Pcd.append([DataType.TAB_PCDS_FIXED_AT_BUILD, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet2:
+ Pcd.append([DataType.TAB_PCDS_PATCHABLE_IN_MODULE, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet3:
+ Pcd.append([DataType.TAB_PCDS_FEATURE_FLAG, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet4:
+ Pcd.append([DataType.TAB_PCDS_DYNAMIC_EX, SubPcd[0], SubPcd[3]])
+ for SubPcd in SubPcdSet5:
+ Pcd.append([DataType.TAB_PCDS_DYNAMIC, SubPcd[0], SubPcd[3]])
+ Item = [Record[0], Lib, Bo, Pcd]
+ MergeArches(Components, self.GenComponent(Item, ContainerFile), Arch)
+
+ for Key in Components.keys():
+ Key.SupArchList = Components[Key]
+ self.Platform.Modules.ModuleList.append(Key)
+
+ ## Get Component
+ #
+ # Get Component section defined in Dsc file
+ #
+ # @param Item: Contents includes a component block
+ # @param ContainerFile: The file which describes the library class, used for error report
+ #
+ # @retval PlatformModuleClass() A instance for PlatformModuleClass
+ #
+ def GenComponent(self, Item, ContainerFile, LineNo = -1):
+ (InfFilename, ExecFilename) = GetExec(Item[0])
+ LibraryClasses = Item[1]
+ BuildOptions = Item[2]
+ Pcds = Item[3]
+ Component = PlatformModuleClass()
+ Component.FilePath = NormPath(InfFilename)
+ Component.ExecFilePath = NormPath(ExecFilename)
+ CheckFileType(Component.FilePath, '.Inf', ContainerFile, 'component name', Item[0], LineNo)
+ CheckFileExist(self.WorkspaceDir, Component.FilePath, ContainerFile, 'component', Item[0], LineNo)
+ for Lib in LibraryClasses:
+ List = GetSplitValueList(Lib)
+ if len(List) != 2:
+ RaiseParserError(Lib, 'LibraryClasses', ContainerFile, '<ClassName>|<InfFilename>')
+ LibName = List[0]
+ LibFile = NormPath(List[1])
+ if LibName == "" or LibName == "NULL":
+ LibName = "NULL%d" % self._NullClassIndex
+ self._NullClassIndex += 1
+ CheckFileType(List[1], '.Inf', ContainerFile, 'library instance of component ', Lib, LineNo)
+ CheckFileExist(self.WorkspaceDir, LibFile, ContainerFile, 'library instance of component', Lib, LineNo)
+ Component.LibraryClasses.LibraryList.append(PlatformLibraryClass(LibName, LibFile))
+ for BuildOption in BuildOptions:
+ Key = GetBuildOption(BuildOption, ContainerFile)
+ Component.ModuleSaBuildOption.BuildOptionList.append(BuildOptionClass(Key[0], Key[1], Key[2]))
+ for Pcd in Pcds:
+ Type = Pcd[0]
+ List = GetSplitValueList(Pcd[1])
+ PcdId = Pcd[2]
+
+ TokenInfo = None
+ #
+ # For FeatureFlag
+ #
+ if Type == DataType.TAB_PCDS_FEATURE_FLAG:
+ if len(List) != 2:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>|TRUE/FALSE')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', '', List[1], Type, [], {}, []))
+ #
+ # For FixedAtBuild or PatchableInModule
+ #
+ if Type == DataType.TAB_PCDS_FIXED_AT_BUILD or Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
+ List.append('')
+ if len(List) != 3 and len(List) != 4:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>|<Value>[|<MaxDatumSize>]')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', List[2], List[1], Type, [], {}, []))
+
+ #
+ # For Dynamic or DynamicEx
+ #
+ if Type == DataType.TAB_PCDS_DYNAMIC or Type == DataType.TAB_PCDS_DYNAMIC_EX:
+ if len(List) != 1:
+ RaiseParserError(Pcd[1], 'Components', ContainerFile, '<PcdTokenSpaceGuidCName>.<PcdTokenName>')
+
+ CheckPcdTokenInfo(List[0], 'Components', ContainerFile)
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ Component.PcdBuildDefinitions.append(PcdClass(TokenInfo[1], '', TokenInfo[0], '', '', '', Type, [], {}, []))
+
+ #
+ # Add to PcdToken
+ #
+ self.PcdToken[PcdId] = (TokenInfo[0], TokenInfo[1])
+
+ return Component
+ #End of GenComponent
+
+ ## Gen SkuInfoList
+ #
+ # Gen SkuInfoList section defined in Dsc file
+ #
+ # @param SkuNameList: Input value for SkuNameList
+ # @param SkuInfo: Input value for SkuInfo
+ # @param VariableName: Input value for VariableName
+ # @param VariableGuid: Input value for VariableGuid
+ # @param VariableOffset: Input value for VariableOffset
+ # @param HiiDefaultValue: Input value for HiiDefaultValue
+ # @param VpdOffset: Input value for VpdOffset
+ # @param DefaultValue: Input value for DefaultValue
+ #
+ # @retval (False, SkuName) Not found in section SkuId Dsc file
+ # @retval (True, SkuInfoList) Found in section SkuId of Dsc file
+ #
+ def GenSkuInfoList(self, SkuNameList, SkuInfo, VariableName = '', VariableGuid = '', VariableOffset = '', HiiDefaultValue = '', VpdOffset = '', DefaultValue = ''):
+ SkuNameList = GetSplitValueList(SkuNameList)
+ if SkuNameList == None or SkuNameList == [] or SkuNameList == ['']:
+ SkuNameList = ['DEFAULT']
+ SkuInfoList = {}
+ for Item in SkuNameList:
+ if Item not in SkuInfo:
+ return False, Item
+ Sku = SkuInfoClass(Item, SkuInfo[Item], VariableName, VariableGuid, VariableOffset, HiiDefaultValue, VpdOffset, DefaultValue)
+ SkuInfoList[Item] = Sku
+
+ return True, SkuInfoList
+
+ ## Parse Include statement
+ #
+ # Get include file path
+ #
+ # 1. Insert a record into TblFile ???
+ # 2. Insert a record into TblDsc
+ # Value1: IncludeFilePath
+ #
+ # @param LineValue: The line of incude statement
+ def ParseInclude(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "!include statement '%s' found in section %s" % (LineValue, SectionName))
+ SectionModel = Section[SectionName.upper()]
+ IncludeFile = CleanString(LineValue[LineValue.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
+ Table.Insert(Model, IncludeFile, '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Parse DEFINE statement
+ #
+ # Get DEFINE macros
+ #
+ # 1. Insert a record into TblDsc
+ # Value1: Macro Name
+ # Value2: Macro Value
+ #
+ def ParseDefine(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
+ SectionModel = Section[SectionName.upper()]
+ Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
+ Table.Insert(Model, Define[0], Define[1], '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Parse Defines section
+ #
+ # Get one item in defines section
+ #
+ # Value1: Item Name
+ # Value2: Item Value
+ #
+ def ParseDefinesSection(self, LineValue, StartLine, Table, FileID, Filename, SectionName, Model, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "Parse '%s' found in section %s" % (LineValue, SectionName))
+ Defines = GetSplitValueList(LineValue, TAB_EQUAL_SPLIT, 1)
+ if len(Defines) != 2:
+ RaiseParserError(LineValue, SectionName, Filename, '', StartLine)
+ self.TblDsc.Insert(Model, Defines[0], Defines[1], '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Insert conditional statements
+ #
+ # Pop an item from IfDefList
+ # Insert conditional statements to database
+ #
+ # @param Filename: Path of parsing file
+ # @param IfDefList: A list stored current conditional statements
+ # @param EndLine: The end line no
+ # @param ArchList: Support arch list
+ #
+ def InsertConditionalStatement(self, Filename, FileID, BelongsToItem, IfDefList, EndLine, ArchList):
+ (Value1, Value2, Value3, Model, StartColumn, EndColumn, Enabled) = ('', '', '', -1, -1, -1, 0)
+ if IfDefList == []:
+ ErrorMsg = 'Not suited conditional statement in file %s' % Filename
+ EdkLogger.error("DSC File Parser", PARSER_ERROR, ErrorMsg, Filename, RaiseError = EdkLogger.IsRaiseError)
+ else:
+ #
+ # Get New Dsc item ID
+ #
+ DscID = self.TblDsc.GetCount() + 1
+
+ #
+ # Pop the conditional statements which is closed
+ #
+ PreviousIf = IfDefList.pop()
+ EdkLogger.debug(EdkLogger.DEBUG_5, 'Previous IfDef: ' + str(PreviousIf))
+
+ #
+ # !ifdef and !ifndef
+ #
+ if PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF):
+ Value1 = PreviousIf[0]
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+ #
+ # !if and !elseif
+ #
+ elif PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, Model):
+ List = PreviousIf[0].split(' ')
+ Value1 = List[0]
+ Value2 = List[1]
+ Value3 = List[2]
+ Value3 = SplitString(Value3)
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+ #
+ # !else
+ #
+ elif PreviousIf[2] in (MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, Model):
+ Value1 = PreviousIf[0].strip()
+ Model = PreviousIf[2]
+ self.TblDsc.Insert(Model, Value1, Value2, Value3, ArchList, BelongsToItem, self.FileID, PreviousIf[1], StartColumn, EndLine, EndColumn, Enabled)
+
+ ## Load Dsc file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Dsc file
+ #
+ def LoadDscFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_DSC)
+
+ #
+ # Init DscTable
+ #
+ #self.TblDsc.Table = "Dsc%s" % FileID
+ #self.TblDsc.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ self.InsertSectionItemsIntoDatabase(self.FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ CurrentSection = TAB_UNKNOWN
+ continue
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ self.InsertSectionItemsIntoDatabase(self.FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+
+ #
+ # Parse conditional statements
+ #
+ self.ParseConditionalStatement()
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ #ParseDefineMacro2(self.TblDsc, self.RecordSet, GlobalData.gGlobalDefines)
+ ParseDefineMacro(self.TblDsc, GlobalData.gGlobalDefines)
+
+
+ ## ParseConditionalStatement
+ #
+ # Search all conditional statement and disable no match records
+ #
+ def ParseConditionalStatement(self):
+ #
+ # Disabled all !if/!elif/!ifdef statements without DEFINE
+ #
+ SqlCommand = """select A.StartLine, A.EndLine from %s as A
+ where A.Model in (%s, %s, %s)
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and A.Value1 not in (select B.Value1 from %s as B
+ where B.Model = %s
+ and B.Enabled = 0
+ and A.StartLine > B.StartLine
+ and A.Arch = B.Arch
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile) """ % \
+ (self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF, \
+ self.FileID, \
+ self.TblDsc.Table, \
+ MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[0], Record[1])
+ self.TblDsc.Exec(SqlCommand)
+
+ #
+ # Disabled !ifndef with DEFINE
+ #
+ SqlCommand = """select A.StartLine, A.EndLine from %s as A
+ where A.Model = %s
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and A.Value1 in (select B.Value1 from %s as B
+ where B.Model = %s
+ and B.Enabled = 0
+ and A.StartLine > B.StartLine
+ and A.Arch = B.Arch
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile)""" % \
+ (self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF, \
+ self.FileID, \
+ self.TblDsc.Table, \
+ MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ for Record in RecordSet:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[0], Record[1])
+ EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
+ self.Cur.execute(SqlCommand)
+
+ #
+ # Disabled !if, !elif and !else with un-match value
+ #
+ SqlCommand = """select A.Model, A.Value1, A.Value2, A.Value3, A.StartLine, A.EndLine, B.Value2 from %s as A join %s as B
+ where A.Model in (%s, %s)
+ and A.Enabled = 0
+ and A.BelongsToFile = %s
+ and B.Enabled = 0
+ and B.Model = %s
+ and A.Value1 = B.Value1
+ and A.StartLine > B.StartLine
+ and A.BelongsToItem = B.BelongsToItem
+ and A.BelongsToFile = B.BelongsToFile""" % \
+ (self.TblDsc.Table, self.TblDsc.Table, \
+ MODEL_META_DATA_CONDITIONAL_STATEMENT_IF, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE, \
+ self.FileID, MODEL_META_DATA_DEFINE)
+ RecordSet = self.TblDsc.Exec(SqlCommand)
+ DisabledList = []
+ for Record in RecordSet:
+ if Record[0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_IF:
+ if not self.Compare(Record[6], Record[2], Record[3]):
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[4], Record[5])
+ self.TblDsc.Exec(SqlCommand)
+ else:
+ DisabledList.append(Record[1])
+ continue
+ if Record[0] == MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE and Record[1] in DisabledList:
+ SqlCommand = """Update %s set Enabled = -1 where StartLine >= %s and EndLine <= %s""" %(self.TblDsc.Table, Record[4], Record[5])
+ self.TblDsc.Exec(SqlCommand)
+
+ ## Compare
+ #
+ # Compare two values
+ # @param Value1:
+ # @param CompareType:
+ # @param Value2:
+ #
+ def Compare(self, Value1, CompareType, Value2):
+ Command = """Value1 %s Value2""" %CompareType
+ return eval(Command)
+
+ ## First time to insert records to database
+ #
+ # Insert item data of a section to database
+ # @param FileID: The ID of belonging file
+ # @param Filename: The name of belonging file
+ # @param CurrentSection: The name of currect section
+ # @param SectionItemList: A list of items of the section
+ # @param ArchList: A list of arches
+ # @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
+ # @param IfDefList: A list of all conditional statements
+ #
+ def InsertSectionItemsIntoDatabase(self, FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON.upper()
+
+ Model = Section[CurrentSection.upper()]
+ #Records = self.RecordSet[Model]
+
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
+
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ #
+ # Parse '!ifdef'
+ #
+ if LineValue.upper().find(TAB_IF_DEF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF_N_DEF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF))
+ continue
+
+ #
+ # Parse '!ifndef'
+ #
+ if LineValue.upper().find(TAB_IF_N_DEF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF_N_DEF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF))
+ continue
+
+ #
+ # Parse '!endif'
+ #
+ if LineValue.upper().find(TAB_END_IF.upper()) > -1:
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine, Arch)
+ continue
+ #
+ # Parse '!if'
+ #
+ if LineValue.upper().find(TAB_IF.upper()) > -1:
+ IfDefList.append((LineValue[len(TAB_IF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IF))
+ continue
+
+ #
+ # Parse '!elseif'
+ #
+ if LineValue.upper().find(TAB_ELSE_IF.upper()) > -1:
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine - 1, Arch)
+ IfDefList.append((LineValue[len(TAB_ELSE_IF):].strip(), StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_IF))
+ continue
+
+ #
+ # Parse '!else'
+ #
+ if LineValue.upper().find(TAB_ELSE.upper()) > -1:
+ Key = IfDefList[-1][0].split(' ' , 1)[0].strip()
+ self.InsertConditionalStatement(Filename, FileID, Model, IfDefList, StartLine, Arch)
+ IfDefList.append((Key, StartLine, MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE))
+ continue
+
+ #
+ # Parse !include statement first
+ #
+ if LineValue.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
+ self.ParseInclude(LineValue, StartLine, self.TblDsc, FileID, Filename, CurrentSection, MODEL_META_DATA_INCLUDE, Arch)
+ continue
+
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ self.ParseDefine(LineValue, StartLine, self.TblDsc, FileID, Filename, CurrentSection, MODEL_META_DATA_DEFINE, Arch)
+ continue
+
+ #
+ # At last parse other sections
+ #
+ if CurrentSection == TAB_LIBRARY_CLASSES or CurrentSection in TAB_PCD_DYNAMIC_TYPE_LIST or CurrentSection in TAB_PCD_DYNAMIC_EX_TYPE_LIST:
+ ID = self.TblDsc.Insert(Model, LineValue, Third, '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ #Records.append([LineValue, Arch, StartLine, ID, Third])
+ continue
+ elif CurrentSection != TAB_COMPONENTS:
+ ID = self.TblDsc.Insert(Model, LineValue, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ #Records.append([LineValue, Arch, StartLine, ID, Third])
+ continue
+
+ #
+ # Parse COMPONENT section
+ #
+ if CurrentSection == TAB_COMPONENTS:
+ Components = []
+ GetComponent(SectionItemList, Components)
+ for Component in Components:
+ EdkLogger.debug(4, "Parsing component %s ..." %Component)
+ DscItmeID = self.TblDsc.Insert(MODEL_META_DATA_COMPONENT, Component[0], '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[1]:
+ List = GetSplitValueList(Item, MaxSplit = 2)
+ LibName, LibIns = '', ''
+ if len(List) == 2:
+ LibName = List[0]
+ LibIns = List[1]
+ else:
+ LibName = List[0]
+ self.TblDsc.Insert(MODEL_EFI_LIBRARY_CLASS, LibName, LibIns, '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[2]:
+ self.TblDsc.Insert(MODEL_META_DATA_BUILD_OPTION, Item, '', '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+ for Item in Component[3]:
+ Model = Section[Item[0].upper()]
+ self.TblDsc.Insert(Model, Item[1], '', '', Arch, DscItmeID, FileID, StartLine, -1, StartLine, -1, 0)
+
+ ## Show detailed information of Dsc
+ #
+ # Print all members and their values of Dsc class
+ #
+ def ShowDsc(self):
+ print TAB_SECTION_START + TAB_INF_DEFINES + TAB_SECTION_END
+ printDict(self.Defines.DefinesDictionary)
+
+ for Key in self.KeyList:
+ for Arch in DataType.ARCH_LIST_FULL:
+ Command = "printList(TAB_SECTION_START + '" + \
+ Key + DataType.TAB_SPLIT + Arch + \
+ "' + TAB_SECTION_END, self.Contents[arch]." + Key + ')'
+ eval(Command)
+
+ ## Show detailed information of Platform
+ #
+ # Print all members and their values of Platform class
+ #
+ def ShowPlatform(self):
+ M = self.Platform
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'DscSpecification =', M.Header[Arch].DscSpecification
+ print 'SkuId =', M.Header[Arch].SkuIdName
+ print 'SupArchList =', M.Header[Arch].SupArchList
+ print 'BuildTargets =', M.Header[Arch].BuildTargets
+ print 'OutputDirectory =', M.Header[Arch].OutputDirectory
+ print 'BuildNumber =', M.Header[Arch].BuildNumber
+ print 'MakefileName =', M.Header[Arch].MakefileName
+ print 'BsBaseAddress =', M.Header[Arch].BsBaseAddress
+ print 'RtBaseAddress =', M.Header[Arch].RtBaseAddress
+ print 'Define =', M.Header[Arch].Define
+ print 'Fdf =', M.FlashDefinitionFile.FilePath
+ print '\nBuildOptions =', M.BuildOptions, M.BuildOptions.IncludeFiles
+ for Item in M.BuildOptions.BuildOptionList:
+ print '\t', 'ToolChainFamily =', Item.ToolChainFamily, 'ToolChain =', Item.ToolChain, 'Option =', Item.Option, 'Arch =', Item.SupArchList
+ print '\nSkuIds =', M.SkuInfos.SkuInfoList, M.SkuInfos.IncludeFiles
+ print '\nLibraries =', M.Libraries, M.Libraries.IncludeFiles
+ for Item in M.Libraries.LibraryList:
+ print '\t', Item.FilePath, Item.SupArchList, Item.Define
+ print '\nLibraryClasses =', M.LibraryClasses, M.LibraryClasses.IncludeFiles
+ for Item in M.LibraryClasses.LibraryList:
+ print '\t', Item.Name, Item.FilePath, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPcds =', M.DynamicPcdBuildDefinitions
+ for Item in M.DynamicPcdBuildDefinitions:
+ print '\tCname=', Item.CName, 'TSG=', Item.TokenSpaceGuidCName, 'Value=', Item.DefaultValue, 'Token=', Item.Token, 'Type=', Item.ItemType, 'Datum=', Item.DatumType, 'Size=', Item.MaxDatumSize, 'Arch=', Item.SupArchList, Item.SkuInfoList
+ for Sku in Item.SkuInfoList.values():
+ print '\t\t', str(Sku)
+ print '\nComponents =', M.Modules.ModuleList, M.Modules.IncludeFiles
+ for Item in M.Modules.ModuleList:
+ print '\t', Item.FilePath, Item.ExecFilePath, Item.SupArchList
+ for Lib in Item.LibraryClasses.LibraryList:
+ print '\t\tLib:', Lib.Name, Lib.FilePath
+ for Bo in Item.ModuleSaBuildOption.BuildOptionList:
+ print '\t\tBuildOption:', Bo.ToolChainFamily, Bo.ToolChain, Bo.Option
+ for Pcd in Item.PcdBuildDefinitions:
+ print '\t\tPcd:', Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.MaxDatumSize, Pcd.DefaultValue, Pcd.ItemType
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'Nt32Pkg/Nt32Pkg.dsc')
+
+ Db = Database.Database('Dsc.db')
+ Db.InitDatabase()
+
+ P = Dsc(os.path.normpath(F), True, True, W, Db)
+ P.ShowPlatform()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspace.py b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
new file mode 100644
index 0000000000..a494e814a6
--- /dev/null
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspace.py
@@ -0,0 +1,318 @@
+## @file
+# This is the base class for applications that operate on an EDK II Workspace
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, sys, time
+from DataType import *
+
+## EdkIIWorkspace
+#
+# Collect WorkspaceDir from the environment, the Verbose command line flag, and detect an icon bitmap file.
+#
+# @var StartTime: Time of build system starting
+# @var PrintRunTime: Printable time of build system running
+# @var PrintRunStatus: Printable status of build system running
+# @var RunStatus: Status of build system running
+#
+class EdkIIWorkspace:
+ def __init__(self):
+ self.StartTime = time.time()
+ self.PrintRunTime = False
+ self.PrintRunStatus = False
+ self.RunStatus = ''
+
+ #
+ # Check environment valiable 'WORKSPACE'
+ #
+ if os.environ.get('WORKSPACE') == None:
+ print 'ERROR: WORKSPACE not defined. Please run EdkSetup from the EDK II install directory.'
+ return False
+
+ self.CurrentWorkingDir = os.getcwd()
+
+ self.WorkspaceDir = os.path.realpath(os.environ.get('WORKSPACE'))
+ (Drive, Path) = os.path.splitdrive(self.WorkspaceDir)
+ if Drive == '':
+ (Drive, CwdPath) = os.path.splitdrive(self.CurrentWorkingDir)
+ if Drive != '':
+ self.WorkspaceDir = Drive + Path
+ else:
+ self.WorkspaceDir = Drive.upper() + Path
+
+ self.WorkspaceRelativeWorkingDir = self.WorkspaceRelativePath (self.CurrentWorkingDir)
+
+ try:
+ #
+ # Load TianoCoreOrgLogo, used for GUI tool
+ #
+ self.Icon = wx.Icon(self.WorkspaceFile('tools/Python/TianoCoreOrgLogo.gif'),wx.BITMAP_TYPE_GIF)
+ except:
+ self.Icon = None
+
+ self.Verbose = False
+ for Arg in sys.argv:
+ if Arg.lower() == '-v':
+ self.Verbose = True
+
+ ## Close build system
+ #
+ # Close build system and print running time and status
+ #
+ def Close(self):
+ if self.PrintRunTime:
+ Seconds = int(time.time() - self.StartTime)
+ if Seconds < 60:
+ print 'Run Time: %d seconds' % (Seconds)
+ else:
+ Minutes = Seconds / 60
+ Seconds = Seconds % 60
+ if Minutes < 60:
+ print 'Run Time: %d minutes %d seconds' % (Minutes, Seconds)
+ else:
+ Hours = Minutes / 60
+ Minutes = Minutes % 60
+ print 'Run Time: %d hours %d minutes %d seconds' % (Hours, Minutes, Seconds)
+ if self.RunStatus != '':
+ print self.RunStatus
+
+ ## Convert to a workspace relative filename
+ #
+ # Convert a full path filename to a workspace relative filename.
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval None Workspace dir is not found in the full path
+ # @retval string The relative filename
+ #
+ def WorkspaceRelativePath(self, FileName):
+ FileName = os.path.realpath(FileName)
+ if FileName.find(self.WorkspaceDir) != 0:
+ return None
+ return FileName.replace (self.WorkspaceDir, '').strip('\\').strip('/')
+
+ ## Convert to a full path filename
+ #
+ # Convert a workspace relative filename to a full path filename.
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval string The full path filename
+ #
+ def WorkspaceFile(self, FileName):
+ return os.path.realpath(os.path.join(self.WorkspaceDir,FileName))
+
+ ## Convert to a real path filename
+ #
+ # Convert ${WORKSPACE} to real path
+ #
+ # @param FileName: The filename to be Converted
+ #
+ # @retval string The full path filename
+ #
+ def WorkspacePathConvert(self, FileName):
+ return os.path.realpath(FileName.replace(TAB_WORKSPACE, self.WorkspaceDir))
+
+ ## Convert XML into a DOM
+ #
+ # Parse an XML file into a DOM and return the DOM.
+ #
+ # @param FileName: The filename to be parsed
+ #
+ # @retval XmlParseFile (self.WorkspaceFile(FileName))
+ #
+ def XmlParseFile (self, FileName):
+ if self.Verbose:
+ print FileName
+ return XmlParseFile (self.WorkspaceFile(FileName))
+
+ ## Convert a XML section
+ #
+ # Parse a section of an XML file into a DOM(Document Object Model) and return the DOM.
+ #
+ # @param FileName: The filename to be parsed
+ # @param SectionTag: The tag name of the section to be parsed
+ #
+ # @retval XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
+ #
+ def XmlParseFileSection (self, FileName, SectionTag):
+ if self.Verbose:
+ print FileName
+ return XmlParseFileSection (self.WorkspaceFile(FileName), SectionTag)
+
+ ## Save a XML file
+ #
+ # Save a DOM(Document Object Model) into an XML file.
+ #
+ # @param Dom: The Dom to be saved
+ # @param FileName: The filename
+ #
+ # @retval XmlSaveFile (Dom, self.WorkspaceFile(FileName))
+ #
+ def XmlSaveFile (self, Dom, FileName):
+ if self.Verbose:
+ print FileName
+ return XmlSaveFile (Dom, self.WorkspaceFile(FileName))
+
+ ## Convert Text File To Dictionary
+ #
+ # Convert a workspace relative text file to a dictionary of (name:value) pairs.
+ #
+ # @param FileName: Text filename
+ # @param Dictionary: Dictionary to store data
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ # @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+ # @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+ #
+ # @retval ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+ #
+ def ConvertTextFileToDictionary(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ if self.Verbose:
+ print FileName
+ return ConvertTextFileToDictionary(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+
+ ## Convert Dictionary To Text File
+ #
+ # Convert a dictionary of (name:value) pairs to a workspace relative text file.
+ #
+ # @param FileName: Text filename
+ # @param Dictionary: Dictionary to store data
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ # @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+ # @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+ #
+ # @retval ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+ #
+ def ConvertDictionaryToTextFile(self, FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ if self.Verbose:
+ print FileName
+ return ConvertDictionaryToTextFile(self.WorkspaceFile(FileName), Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter)
+
+## Convert Text File To Dictionary
+#
+# Convert a text file to a dictionary of (name:value) pairs.
+#
+# @param FileName: Text filename
+# @param Dictionary: Dictionary to store data
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Convert successfully
+# @retval False Open file failed
+#
+def ConvertTextFileToDictionary(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ except:
+ return False
+ Keys = []
+ for Line in F:
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] not in Keys:
+ if ValueSplitFlag:
+ Dictionary[Key[0]] = LineList[1].replace('\\','/').split(ValueSplitCharacter)
+ else:
+ Dictionary[Key[0]] = LineList[1].strip().replace('\\','/')
+ Keys += [Key[0]]
+ F.close()
+ return True
+
+## Convert Dictionary To Text File
+#
+# Convert a dictionary of (name:value) pairs to a text file.
+#
+# @param FileName: Text filename
+# @param Dictionary: Dictionary to store data
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Convert successfully
+# @retval False Open file failed
+#
+def ConvertDictionaryToTextFile(FileName, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ try:
+ F = open(FileName,'r')
+ Lines = []
+ Lines = F.readlines()
+ F.close()
+ except:
+ Lines = []
+ Keys = Dictionary.keys()
+ MaxLength = 0
+ for Key in Keys:
+ if len(Key) > MaxLength:
+ MaxLength = len(Key)
+ Index = 0
+ for Line in Lines:
+ LineList = Line.split(KeySplitCharacter,1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter and Key[0] in Dictionary:
+ if ValueSplitFlag:
+ Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, ' '.join(Dictionary[Key[0]]))
+ else:
+ Line = '%-*s %c %s\n' % (MaxLength, Key[0], KeySplitCharacter, Dictionary[Key[0]])
+ Lines.pop(Index)
+ if Key[0] in Keys:
+ Lines.insert(Index,Line)
+ Keys.remove(Key[0])
+ Index += 1
+ for RemainingKey in Keys:
+ if ValueSplitFlag:
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter,' '.join(Dictionary[RemainingKey]))
+ else:
+ Line = '%-*s %c %s\n' % (MaxLength, RemainingKey, KeySplitCharacter, Dictionary[RemainingKey])
+ Lines.append(Line)
+ try:
+ F = open(FileName,'w')
+ except:
+ return False
+ F.writelines(Lines)
+ F.close()
+ return True
+
+## Create a new directory
+#
+# @param Directory: Directory to be created
+#
+def CreateDirectory(Directory):
+ if not os.access(Directory, os.F_OK):
+ os.makedirs (Directory)
+
+## Create a new file
+#
+# @param Directory: Directory to be created
+# @param FileName: Filename to be created
+# @param Mode: The mode of open file, defautl is 'w'
+#
+def CreateFile(Directory, FileName, Mode='w'):
+ CreateDirectory (Directory)
+ return open(os.path.join(Directory, FileName), Mode)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ # Nothing to do here. Could do some unit tests
+ pass \ No newline at end of file
diff --git a/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
new file mode 100644
index 0000000000..82ab1796ad
--- /dev/null
+++ b/BaseTools/Source/Python/Common/EdkIIWorkspaceBuild.py
@@ -0,0 +1,1669 @@
+## @file
+# This file is used to define each component of the build database
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os, string, copy, pdb, copy
+import EdkLogger
+import DataType
+from InfClassObject import *
+from DecClassObject import *
+from DscClassObject import *
+from String import *
+from BuildToolError import *
+from Misc import sdict
+import Database as Database
+import time as time
+
+## PcdClassObject
+#
+# This Class is used for PcdObject
+#
+# @param object: Inherited from object class
+# @param Name: Input value for Name of Pcd, default is None
+# @param Guid: Input value for Guid of Pcd, default is None
+# @param Type: Input value for Type of Pcd, default is None
+# @param DatumType: Input value for DatumType of Pcd, default is None
+# @param Value: Input value for Value of Pcd, default is None
+# @param Token: Input value for Token of Pcd, default is None
+# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
+# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
+# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+#
+# @var TokenCName: To store value for TokenCName
+# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
+# @var Type: To store value for Type
+# @var DatumType: To store value for DatumType
+# @var TokenValue: To store value for TokenValue
+# @var MaxDatumSize: To store value for MaxDatumSize
+# @var SkuInfoList: To store value for SkuInfoList
+# @var IsOverrided: To store value for IsOverrided
+# @var Phase: To store value for Phase, default is "DXE"
+#
+class PcdClassObject(object):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False):
+ self.TokenCName = Name
+ self.TokenSpaceGuidCName = Guid
+ self.Type = Type
+ self.DatumType = DatumType
+ self.DefaultValue = Value
+ self.TokenValue = Token
+ self.MaxDatumSize = MaxDatumSize
+ self.SkuInfoList = SkuInfoList
+ self.IsOverrided = IsOverrided
+ self.Phase = "DXE"
+
+ ## Convert the class to a string
+ #
+ # Convert each member of the class to string
+ # Organize to a signle line format string
+ #
+ # @retval Rtn Formatted String
+ #
+ def __str__(self):
+ Rtn = '\tTokenCName=' + str(self.TokenCName) + ', ' + \
+ 'TokenSpaceGuidCName=' + str(self.TokenSpaceGuidCName) + ', ' + \
+ 'Type=' + str(self.Type) + ', ' + \
+ 'DatumType=' + str(self.DatumType) + ', ' + \
+ 'DefaultValue=' + str(self.DefaultValue) + ', ' + \
+ 'TokenValue=' + str(self.TokenValue) + ', ' + \
+ 'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
+ for Item in self.SkuInfoList.values():
+ Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
+ Rtn = Rtn + str(self.IsOverrided)
+
+ return Rtn
+
+ ## Override __eq__ function
+ #
+ # Check whether pcds are the same
+ #
+ # @retval False The two pcds are different
+ # @retval True The two pcds are the same
+ #
+ def __eq__(self, Other):
+ return Other != None and self.TokenCName == Other.TokenCName and self.TokenSpaceGuidCName == Other.TokenSpaceGuidCName
+
+ ## Override __hash__ function
+ #
+ # Use (TokenCName, TokenSpaceGuidCName) as key in hash table
+ #
+ # @retval truple() Key for hash table
+ #
+ def __hash__(self):
+ return hash((self.TokenCName, self.TokenSpaceGuidCName))
+
+## LibraryClassObject
+#
+# This Class defines LibraryClassObject used in BuildDatabase
+#
+# @param object: Inherited from object class
+# @param Name: Input value for LibraryClassName, default is None
+# @param SupModList: Input value for SupModList, default is []
+# @param Type: Input value for Type, default is None
+#
+# @var LibraryClass: To store value for LibraryClass
+# @var SupModList: To store value for SupModList
+# @var Type: To store value for Type
+#
+class LibraryClassObject(object):
+ def __init__(self, Name = None, SupModList = [], Type = None):
+ self.LibraryClass = Name
+ self.SupModList = SupModList
+ if Type != None:
+ self.SupModList = CleanString(Type).split(DataType.TAB_SPACE_SPLIT)
+
+## ModuleBuildClassObject
+#
+# This Class defines ModuleBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var BaseName: To store value for BaseName
+# @var ModuleType: To store value for ModuleType
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var PcdIsDriver: To store value for PcdIsDriver
+# @var BinaryModule: To store value for BinaryModule
+# @var CustomMakefile: To store value for CustomMakefile
+# @var Specification: To store value for Specification
+# @var Shadow To store value for Shadow
+# @var LibraryClass: To store value for LibraryClass, it is a list structure as
+# [ LibraryClassObject, ...]
+# @var ModuleEntryPointList: To store value for ModuleEntryPointList
+# @var ModuleUnloadImageList: To store value for ModuleUnloadImageList
+# @var ConstructorList: To store value for ConstructorList
+# @var DestructorList: To store value for DestructorList
+# @var Binaries: To store value for Binaries, it is a list structure as
+# [ ModuleBinaryClassObject, ...]
+# @var Sources: To store value for Sources, it is a list structure as
+# [ ModuleSourceFilesClassObject, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName, ModuleType] : LibraryClassInfFile }
+# @var Protocols: To store value for Protocols, it is a list structure as
+# [ ProtocolName, ... ]
+# @var Ppis: To store value for Ppis, it is a list structure as
+# [ PpiName, ... ]
+# @var Guids: To store value for Guids, it is a list structure as
+# [ GuidName, ... ]
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var Packages: To store value for Packages, it is a list structure as
+# [ DecFileName, ... ]
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue}
+# @var Depex: To store value for Depex
+#
+class ModuleBuildClassObject(object):
+ def __init__(self):
+ self.AutoGenVersion = 0
+ self.DescFilePath = ''
+ self.BaseName = ''
+ self.ModuleType = ''
+ self.Guid = ''
+ self.Version = ''
+ self.PcdIsDriver = ''
+ self.BinaryModule = ''
+ self.Shadow = ''
+ self.CustomMakefile = {}
+ self.Specification = {}
+ self.LibraryClass = []
+ self.ModuleEntryPointList = []
+ self.ModuleUnloadImageList = []
+ self.ConstructorList = []
+ self.DestructorList = []
+
+ self.Binaries = []
+ self.Sources = []
+ self.LibraryClasses = sdict()
+ self.Libraries = []
+ self.Protocols = []
+ self.Ppis = []
+ self.Guids = []
+ self.Includes = []
+ self.Packages = []
+ self.Pcds = {}
+ self.BuildOptions = {}
+ self.Depex = ''
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether ModuleBuildClassObjects are the same
+ #
+ # @retval False The two ModuleBuildClassObjects are different
+ # @retval True The two ModuleBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.DescFilePath == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## PackageBuildClassObject
+#
+# This Class defines PackageBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var PackageName: To store value for PackageName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var Protocols: To store value for Protocols, it is a set structure as
+# { [ProtocolName] : Protocol Guid, ... }
+# @var Ppis: To store value for Ppis, it is a set structure as
+# { [PpiName] : Ppi Guid, ... }
+# @var Guids: To store value for Guids, it is a set structure as
+# { [GuidName] : Guid, ... }
+# @var Includes: To store value for Includes, it is a list structure as
+# [ IncludePath, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { [LibraryClassName] : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject}
+#
+class PackageBuildClassObject(object):
+ def __init__(self):
+ self.DescFilePath = ''
+ self.PackageName = ''
+ self.Guid = ''
+ self.Version = ''
+
+ self.Protocols = {}
+ self.Ppis = {}
+ self.Guids = {}
+ self.Includes = []
+ self.LibraryClasses = {}
+ self.Pcds = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether PackageBuildClassObjects are the same
+ #
+ # @retval False The two PackageBuildClassObjects are different
+ # @retval True The two PackageBuildClassObjects are the same
+ #
+ def __eq__(self, Other):
+ return self.DescFilePath == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## PlatformBuildClassObject
+#
+# This Class defines PlatformBuildClass
+#
+# @param object: Inherited from object class
+#
+# @var DescFilePath: To store value for DescFilePath
+# @var PlatformName: To store value for PlatformName
+# @var Guid: To store value for Guid
+# @var Version: To store value for Version
+# @var DscSpecification: To store value for DscSpecification
+# @var OutputDirectory: To store value for OutputDirectory
+# @var FlashDefinition: To store value for FlashDefinition
+# @var BuildNumber: To store value for BuildNumber
+# @var MakefileName: To store value for MakefileName
+# @var SkuIds: To store value for SkuIds, it is a set structure as
+# { 'SkuName' : SkuId, '!include' : includefilename, ...}
+# @var Modules: To store value for Modules, it is a list structure as
+# [ InfFileName, ... ]
+# @var Libraries: To store value for Libraries, it is a list structure as
+# [ InfFileName, ... ]
+# @var LibraryClasses: To store value for LibraryClasses, it is a set structure as
+# { (LibraryClassName, ModuleType) : LibraryClassInfFile }
+# @var Pcds: To store value for Pcds, it is a set structure as
+# { [(PcdCName, PcdGuidCName)] : PcdClassObject }
+# @var BuildOptions: To store value for BuildOptions, it is a set structure as
+# { [BuildOptionKey] : BuildOptionValue }
+#
+class PlatformBuildClassObject(object):
+ def __init__(self):
+ self.DescFilePath = ''
+ self.PlatformName = ''
+ self.Guid = ''
+ self.Version = ''
+ self.DscSpecification = ''
+ self.OutputDirectory = ''
+ self.FlashDefinition = ''
+ self.BuildNumber = ''
+ self.MakefileName = ''
+
+ self.SkuIds = {}
+ self.Modules = []
+ self.LibraryInstances = []
+ self.LibraryClasses = {}
+ self.Libraries = {}
+ self.Pcds = {}
+ self.BuildOptions = {}
+
+ ## Convert the class to a string
+ #
+ # Convert member DescFilePath of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.DescFilePath
+
+ ## Override __eq__ function
+ #
+ # Check whether PlatformBuildClassObjects are the same
+ #
+ # @retval False The two PlatformBuildClassObjects are different
+ # @retval True The two PlatformBuildClassObjects are the same
+ #
+ def __eq__(self, other):
+ return self.DescFilePath == str(other)
+
+ ## Override __hash__ function
+ #
+ # Use DescFilePath as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.DescFilePath)
+
+## ItemBuild
+#
+# This Class defines Module/Platform/Package databases for build system
+#
+# @param object: Inherited from object class
+# @param Arch: Build arch
+# @param Platform: Build Platform
+# @param Package: Build Package
+# @param Module: Build Module
+#
+# @var Arch: To store value for Build Arch
+# @var PlatformDatabase: To store value for PlatformDatabase, it is a set structure as
+# { [DscFileName] : PlatformBuildClassObject, ...}
+# @var PackageDatabase: To store value for PackageDatabase, it is a set structure as
+# { [DecFileName] : PacakgeBuildClassObject, ...}
+# @var ModuleDatabase: To store value for ModuleDatabase, it is a list structure as
+# { [InfFileName] : ModuleBuildClassObject, ...}
+#
+class ItemBuild(object):
+ def __init__(self, Arch, Platform = None, Package = None, Module = None):
+ self.Arch = Arch
+ self.PlatformDatabase = {}
+ self.PackageDatabase = {}
+ self.ModuleDatabase = {}
+
+## WorkspaceBuild
+#
+# This class is used to parse active platform to init all inf/dec/dsc files
+# Generate module/package/platform databases for build
+#
+# @param object: Inherited from object class
+# @param ActivePlatform: Input value for current active platform
+# @param WorkspaceDir: Input value for current WorkspaceDir
+#
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var SupArchList: To store value for SupArchList, selection scope is in below list
+# EBC | IA32 | X64 | IPF | ARM | PPC
+# @var BuildTarget: To store value for WorkspaceDir, selection scope is in below list
+# RELEASE | DEBUG
+# @var SkuId: To store value for SkuId
+# @var Fdf: To store value for Fdf
+# @var FdTargetList: To store value for FdTargetList
+# @var FvTargetList: To store value for FvTargetList
+# @var TargetTxt: To store value for TargetTxt, it is a set structure as
+# TargetTxtClassObject
+# @var ToolDef: To store value for ToolDef, it is a set structure as
+# ToolDefClassObject
+# @var InfDatabase: To store value for InfDatabase, it is a set structure as
+# { [InfFileName] : InfClassObject}
+# @var DecDatabase: To store value for DecDatabase, it is a set structure as
+# { [DecFileName] : DecClassObject}
+# @var DscDatabase: To store value for DscDatabase, it is a set structure as
+# { [DscFileName] : DscClassObject}
+# @var Build: To store value for DscDatabase, it is a set structure as
+# ItemBuild
+# @var DscFileName: To store value for Active Platform
+# @var UnFoundPcdInDsc: To store values for the pcds defined in INF/DEC but not found in DSC, it is a set structure as
+# { (PcdGuid, PcdCName, Arch) : DecFileName }
+#
+class WorkspaceBuild(object):
+ def __init__(self, ActivePlatform, WorkspaceDir):
+ self.WorkspaceDir = NormPath(WorkspaceDir)
+ self.SupArchList = []
+ self.BuildTarget = []
+ self.SkuId = ''
+ self.Fdf = ''
+ self.FdTargetList = []
+ self.FvTargetList = []
+ self.TargetTxt = None
+ self.ToolDef = None
+
+ self.InfDatabase = {}
+ self.DecDatabase = {}
+ self.DscDatabase = {}
+
+ self.UnFoundPcdInDsc = {}
+
+ #
+ # Init build for all arches
+ #
+ self.Build = {}
+ for Arch in DataType.ARCH_LIST:
+ self.Build[Arch] = ItemBuild(Arch)
+
+ #
+ # Init build database
+ #
+ self.Db = Database.Database(DATABASE_PATH)
+ self.Db.InitDatabase()
+
+ #
+ # Get active platform
+ #
+ self.DscFileName = NormPath(ActivePlatform)
+ File = self.WorkspaceFile(self.DscFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ self.DscDatabase[self.DscFileName] = Dsc(File, False, True, self.WorkspaceDir, self.Db)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData = File)
+
+ #
+ # Parse platform to get module
+ #
+ for DscFile in self.DscDatabase.keys():
+ Platform = self.DscDatabase[DscFile].Platform
+
+ #
+ # Get global information
+ #
+ Tmp = set()
+ for Arch in DataType.ARCH_LIST:
+ for Item in Platform.Header[Arch].SupArchList:
+ Tmp.add(Item)
+ self.SupArchList = list(Tmp)
+ Tmp = set()
+ for Arch in DataType.ARCH_LIST:
+ for Item in Platform.Header[Arch].BuildTargets:
+ Tmp.add(Item)
+ self.BuildTarget = list(Tmp)
+ for Arch in self.SupArchList:
+ self.SkuId = Platform.Header[Arch].SkuIdName
+ self.Fdf = Platform.FlashDefinitionFile.FilePath
+
+ #
+ # Get all inf files
+ #
+ for Item in Platform.LibraryClasses.LibraryList:
+ for Arch in Item.SupArchList:
+ self.AddToInfDatabase(Item.FilePath)
+
+ for Item in Platform.Libraries.LibraryList:
+ for Arch in Item.SupArchList:
+ self.AddToInfDatabase(Item.FilePath)
+
+ for Item in Platform.Modules.ModuleList:
+ for Arch in Item.SupArchList:
+ #
+ # Add modules
+ #
+ Module = Item.FilePath
+ self.AddToInfDatabase(Module)
+ #
+ # Add library used in modules
+ #
+ for Lib in Item.LibraryClasses.LibraryList:
+ self.AddToInfDatabase(Lib.FilePath)
+ self.UpdateLibraryClassOfModule(Module, Lib.Name, Arch, Lib.FilePath)
+
+ #
+ # Parse module to get package
+ #
+ for InfFile in self.InfDatabase.keys():
+ Module = self.InfDatabase[InfFile].Module
+ #
+ # Get all dec
+ #
+ for Item in Module.PackageDependencies:
+ for Arch in Item.SupArchList:
+ self.AddToDecDatabase(Item.FilePath)
+ # End of self.Init()
+
+ ## Generate PlatformDatabase
+ #
+ # Go through each arch to get all items in DscDatabase to PlatformDatabase
+ #
+ def GenPlatformDatabase(self, PcdsSet={}):
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ for Arch in self.SupArchList:
+ Pb = PlatformBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Dsc
+ Pb.PlatformName = Platform.Header[Arch].Name
+ if Pb.PlatformName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of platform %s is not defined for arch %s" % (Dsc, Arch))
+ Pb.Guid = Platform.Header[Arch].Guid
+ Pb.Version = Platform.Header[Arch].Version
+ Pb.DscSpecification = Platform.Header[Arch].DscSpecification
+ Pb.OutputDirectory = Platform.Header[Arch].OutputDirectory
+ Pb.FlashDefinition = Platform.FlashDefinitionFile.FilePath
+ Pb.BuildNumber = Platform.Header[Arch].BuildNumber
+
+ #
+ # SkuId
+ #
+ for Key in Platform.SkuInfos.SkuInfoList.keys():
+ Pb.SkuIds[Key] = Platform.SkuInfos.SkuInfoList[Key]
+
+ #
+ # Module
+ #
+ for Item in Platform.Modules.ModuleList:
+ if Arch in Item.SupArchList:
+ Pb.Modules.append(Item.FilePath)
+
+ #
+ # BuildOptions
+ #
+ for Item in Platform.BuildOptions.BuildOptionList:
+ if Arch in Item.SupArchList:
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = Item.Option
+
+ #
+ # LibraryClass
+ #
+ for Item in Platform.LibraryClasses.LibraryList:
+ SupModuleList = self.FindSupModuleListOfLibraryClass(Item, Platform.LibraryClasses.LibraryList, Arch)
+ if Arch in Item.SupArchList:
+ for ModuleType in SupModuleList:
+ Pb.LibraryClasses[(Item.Name, ModuleType)] = Item.FilePath
+
+ #
+ # Libraries
+ #
+ for Item in Platform.Libraries.LibraryList:
+ for ItemArch in Item.SupArchList:
+ Library = self.InfDatabase[Item.FilePath]
+ if ItemArch not in Library.Module.Header:
+ continue
+ Pb.Libraries[Library.Module.Header[ItemArch].Name] = Item.FilePath
+
+ #
+ # Pcds
+ #
+ for Item in Platform.DynamicPcdBuildDefinitions:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ DatumType = Item.DatumType
+ Value = Item.DefaultValue
+ Token = Item.Token
+ MaxDatumSize = Item.MaxDatumSize
+ SkuInfoList = Item.SkuInfoList
+ Pb.Pcds[(Name, Guid)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+
+ for (Name, Guid) in PcdsSet:
+ Value = PcdsSet[Name, Guid]
+ for PcdType in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
+ for Dec in self.Build[Arch].PackageDatabase:
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, PcdType) in Pcds:
+ Pcd = Pcds[(Name, Guid, PcdType)]
+ Type = PcdType
+ DatumType = Pcd.DatumType
+ Token = Pcd.TokenValue
+ MaxDatumSize = Pcd.MaxDatumSize
+ SkuInfoList = Pcd.SkuInfoList
+ Pb.Pcds[(Name, Guid)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+ break
+ else:
+ # nothing found
+ continue
+ # found in one package, find next PCD
+ break
+ else:
+ EdkLogger.error("AutoGen", PARSER_ERROR, "PCD is not found in any package", ExtraData="%s.%s" % (Guid, Name))
+ #
+ # Add to database
+ #
+ self.Build[Arch].PlatformDatabase[Dsc] = Pb
+ Pb = None
+
+ ## Generate PackageDatabase
+ #
+ # Go through each arch to get all items in DecDatabase to PackageDatabase
+ #
+ def GenPackageDatabase(self):
+ for Dec in self.DecDatabase.keys():
+ Package = self.DecDatabase[Dec].Package
+
+ for Arch in self.SupArchList:
+ Pb = PackageBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Dec
+ Pb.PackageName = Package.Header[Arch].Name
+ if Pb.PackageName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of package %s is not defined for arch %s" % (Dec, Arch))
+
+ Pb.Guid = Package.Header[Arch].Guid
+ Pb.Version = Package.Header[Arch].Version
+
+ #
+ # Protocols
+ #
+ for Item in Package.ProtocolDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Protocols[Item.CName] = Item.Guid
+
+ #
+ # Ppis
+ #
+ for Item in Package.PpiDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Ppis[Item.CName] = Item.Guid
+
+ #
+ # Guids
+ #
+ for Item in Package.GuidDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.Guids[Item.CName] = Item.Guid
+
+ #
+ # Includes
+ #
+ for Item in Package.Includes:
+ if Arch in Item.SupArchList:
+ Pb.Includes.append(Item.FilePath)
+
+ #
+ # LibraryClasses
+ #
+ for Item in Package.LibraryClassDeclarations:
+ if Arch in Item.SupArchList:
+ Pb.LibraryClasses[Item.LibraryClass] = Item.RecommendedInstance
+
+ #
+ # Pcds
+ #
+ for Item in Package.PcdDeclarations:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ DatumType = Item.DatumType
+ Value = Item.DefaultValue
+ Token = Item.Token
+ MaxDatumSize = Item.MaxDatumSize
+ SkuInfoList = Item.SkuInfoList
+ Pb.Pcds[(Name, Guid, Type)] = PcdClassObject(Name, Guid, Type, DatumType, Value, Token, MaxDatumSize, SkuInfoList, False)
+
+ #
+ # Add to database
+ #
+ self.Build[Arch].PackageDatabase[Dec] = Pb
+ Pb = None
+
+ ## Generate ModuleDatabase
+ #
+ # Go through each arch to get all items in InfDatabase to ModuleDatabase
+ #
+ def GenModuleDatabase(self, InfList = []):
+ for Inf in self.InfDatabase.keys():
+ Module = self.InfDatabase[Inf].Module
+
+ for Arch in self.SupArchList:
+ if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList) or Arch not in Module.Header:
+ continue
+
+ ModuleHeader = Module.Header[Arch]
+ Pb = ModuleBuildClassObject()
+
+ #
+ # Defines
+ #
+ Pb.DescFilePath = Inf
+ Pb.BaseName = ModuleHeader.Name
+ if Pb.BaseName == '':
+ EdkLogger.error("AutoGen", PARSER_ERROR, "The BaseName of module %s is not defined for arch %s" % (Inf, Arch))
+ Pb.Guid = ModuleHeader.Guid
+ Pb.Version = ModuleHeader.Version
+ Pb.ModuleType = ModuleHeader.ModuleType
+ Pb.PcdIsDriver = ModuleHeader.PcdIsDriver
+ Pb.BinaryModule = ModuleHeader.BinaryModule
+ Pb.CustomMakefile = ModuleHeader.CustomMakefile
+ Pb.Shadow = ModuleHeader.Shadow
+
+ #
+ # Specs os Defines
+ #
+ Pb.Specification = ModuleHeader.Specification
+ Pb.Specification[TAB_INF_DEFINES_EDK_RELEASE_VERSION] = ModuleHeader.EdkReleaseVersion
+ Pb.Specification[TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION] = ModuleHeader.EfiSpecificationVersion
+ Pb.AutoGenVersion = int(ModuleHeader.InfVersion, 0)
+
+ #
+ # LibraryClass of Defines
+ #
+ for Item in ModuleHeader.LibraryClass:
+ Pb.LibraryClass.append(LibraryClassObject(Item.LibraryClass, Item.SupModuleList, None))
+
+ #
+ # Module image and library of Defines
+ #
+ for Item in Module.ExternImages:
+ if Item.ModuleEntryPoint != '' and Item.ModuleEntryPoint not in Pb.ModuleEntryPointList:
+ Pb.ModuleEntryPointList.append(Item.ModuleEntryPoint)
+ if Item.ModuleUnloadImage != '' and Item.ModuleUnloadImage not in Pb.ModuleUnloadImageList:
+ Pb.ModuleUnloadImageList.append(Item.ModuleUnloadImage)
+ for Item in Module.ExternLibraries:
+ if Item.Constructor != '' and Item.Constructor not in Pb.ConstructorList:
+ Pb.ConstructorList.append(Item.Constructor)
+ if Item.Destructor != '' and Item.Destructor not in Pb.DestructorList:
+ Pb.DestructorList.append(Item.Destructor)
+
+ #
+ # Binaries
+ #
+ for Item in Module.Binaries:
+ if Arch in Item.SupArchList:
+ FileName = Item.BinaryFile
+ FileType = Item.FileType
+ Target = Item.Target
+ FeatureFlag = Item.FeatureFlag
+ Pb.Binaries.append(ModuleBinaryFileClass(FileName, FileType, Target, FeatureFlag, Arch.split()))
+
+ #
+ # Sources
+ #
+ for Item in Module.Sources:
+ if Arch in Item.SupArchList:
+ SourceFile = Item.SourceFile
+ TagName = Item.TagName
+ ToolCode = Item.ToolCode
+ ToolChainFamily = Item.ToolChainFamily
+ FeatureFlag = Item.FeatureFlag
+ Pb.Sources.append(ModuleSourceFileClass(SourceFile, TagName, ToolCode, ToolChainFamily, FeatureFlag))
+
+ #
+ # Protocols
+ #
+ for Item in Module.Protocols:
+ if Arch in Item.SupArchList:
+ Pb.Protocols.append(Item.CName)
+
+ #
+ # Ppis
+ #
+ for Item in Module.Ppis:
+ if Arch in Item.SupArchList:
+ Pb.Ppis.append(Item.CName)
+
+ #
+ # Guids
+ #
+ for Item in Module.Guids:
+ if Arch in Item.SupArchList:
+ Pb.Ppis.append(Item.CName)
+
+ #
+ # Includes
+ #
+ for Item in Module.Includes:
+ if Arch in Item.SupArchList:
+ Pb.Includes.append(Item.FilePath)
+
+ #
+ # Packages
+ #
+ for Item in Module.PackageDependencies:
+ if Arch in Item.SupArchList:
+ Pb.Packages.append(Item.FilePath)
+
+ #
+ # BuildOptions
+ #
+ for Item in Module.BuildOptions:
+ if Arch in Item.SupArchList:
+ if (Item.ToolChainFamily, Item.ToolChain) not in Pb.BuildOptions:
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = Item.Option
+ else:
+ OptionString = Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)]
+ Pb.BuildOptions[(Item.ToolChainFamily, Item.ToolChain)] = OptionString + " " + Item.Option
+ self.FindBuildOptions(Arch, Inf, Pb.BuildOptions)
+
+ #
+ # Depex
+ #
+ for Item in Module.Depex:
+ if Arch in Item.SupArchList:
+ Pb.Depex = Pb.Depex + Item.Depex + ' '
+ Pb.Depex = Pb.Depex.strip()
+
+ #
+ # LibraryClasses
+ #
+ for Item in Module.LibraryClasses:
+ if Arch in Item.SupArchList:
+ Lib = Item.LibraryClass
+ RecommendedInstance = Item.RecommendedInstance
+ if Pb.LibraryClass != []:
+ #
+ # For Library
+ #
+ for Libs in Pb.LibraryClass:
+ for Type in Libs.SupModList:
+ Instance = self.FindLibraryClassInstanceOfLibrary(Lib, Arch, Type)
+ if Instance == None:
+ Instance = RecommendedInstance
+ Pb.LibraryClasses[(Lib, Type)] = Instance
+ else:
+ #
+ # For Module
+ #
+ Instance = self.FindLibraryClassInstanceOfModule(Lib, Arch, Pb.ModuleType, Inf)
+ if Instance == None:
+ Instance = RecommendedInstance
+ Pb.LibraryClasses[(Lib, Pb.ModuleType)] = Instance
+
+ #
+ # Libraries
+ #
+ for Item in Module.Libraries:
+ if Arch in Item.SupArchList:
+ Pb.Libraries.append(Item.Library)
+
+ #
+ # Pcds
+ #
+ for Item in Module.PcdCodes:
+ if Arch in Item.SupArchList:
+ Name = Item.CName
+ Guid = Item.TokenSpaceGuidCName
+ Type = Item.ItemType
+ Pb.Pcds[(Name, Guid)] = self.FindPcd(Arch, Inf, Name, Guid, Type)
+
+ #
+ # Add to database
+ #
+ self.Build[Arch].ModuleDatabase[Inf] = Pb
+ Pb = None
+
+ ## Update Libraries Of Platform Database
+ #
+ # @param InfList: A list for all inf files
+ #
+ def UpdateLibrariesOfPlatform(self, InfList = []):
+ for Arch in self.SupArchList:
+ PlatformDatabase = self.Build[Arch].PlatformDatabase
+ for Dsc in PlatformDatabase:
+ Platform = PlatformDatabase[Dsc]
+ for Inf in Platform.Modules:
+ if not self.IsModuleDefinedInPlatform(Inf, Arch, InfList):
+ continue
+ Module = self.Build[Arch].ModuleDatabase[Inf]
+ if Module.LibraryClass == None or Module.LibraryClass == []:
+ self.UpdateLibrariesOfModule(Platform, Module, Arch)
+ for Key in Module.LibraryClasses:
+ Lib = Module.LibraryClasses[Key]
+ if Lib not in Platform.LibraryInstances:
+ Platform.LibraryInstances.append(Lib)
+
+
+ ## Update Libraries Of Module Database
+ #
+ # @param Module: The module need to be updated libraries
+ # @param Arch: The supportted arch of the module
+ #
+ def UpdateLibrariesOfModule(self, Platform, Module, Arch):
+ ModuleDatabase = self.Build[Arch].ModuleDatabase
+ ModuleType = Module.ModuleType
+
+ # check R8 module
+ if Module.AutoGenVersion < 0x00010005:
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
+ LibraryConsumerList = [Module]
+
+ # "CompilerStub" is a must for R8 modules
+ Module.Libraries.append("CompilerStub")
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for LibraryName in M.Libraries:
+ if LibraryName not in Platform.Libraries:
+ EdkLogger.warn("AutoGen", "Library [%s] is not found" % LibraryName,
+ ExtraData="\t%s [%s]" % (str(Module), Arch))
+ continue
+
+ LibraryFile = Platform.Libraries[LibraryName]
+ if (LibraryName, ModuleType) not in Module.LibraryClasses:
+ Module.LibraryClasses[LibraryName, ModuleType] = LibraryFile
+ LibraryConsumerList.append(ModuleDatabase[LibraryFile])
+ EdkLogger.verbose("\t" + LibraryName + " : " + LibraryFile)
+ return
+
+ # R9 module
+ LibraryConsumerList = [Module]
+ Constructor = []
+ ConsumedByList = sdict()
+ LibraryInstance = sdict()
+
+ EdkLogger.verbose("")
+ EdkLogger.verbose("Library instances of module [%s] [%s]:" % (str(Module), Arch))
+ while len(LibraryConsumerList) > 0:
+ M = LibraryConsumerList.pop()
+ for Key, LibraryPath in M.LibraryClasses.iteritems():
+ # The "Key" is in format of (library_class_name, supported_module_type)
+ if ModuleType != "USER_DEFINED" and ModuleType not in Key:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "%s for module type %s is not supported (%s)" % (Key + (LibraryPath,)))
+ continue
+
+ LibraryClassName = Key[0]
+ if LibraryClassName not in LibraryInstance or LibraryInstance[LibraryClassName] == None:
+ if LibraryPath == None or LibraryPath == "":
+ LibraryInstance[LibraryClassName] = None
+ continue
+ LibraryModule = ModuleDatabase[LibraryPath]
+ LibraryInstance[LibraryClassName] = LibraryModule
+ LibraryConsumerList.append(LibraryModule)
+ EdkLogger.verbose("\t" + LibraryClassName + " : " + str(LibraryModule))
+ elif LibraryPath == None or LibraryPath == "":
+ continue
+ else:
+ LibraryModule = LibraryInstance[LibraryClassName]
+
+ if LibraryModule.ConstructorList != [] and LibraryModule not in Constructor:
+ Constructor.append(LibraryModule)
+
+ if LibraryModule not in ConsumedByList:
+ ConsumedByList[LibraryModule] = []
+ if M != Module:
+ if M in ConsumedByList[LibraryModule]:
+ continue
+ ConsumedByList[LibraryModule].append(M)
+ #
+ # Initialize the sorted output list to the empty set
+ #
+ SortedLibraryList = []
+ #
+ # Q <- Set of all nodes with no incoming edges
+ #
+ LibraryList = [] #LibraryInstance.values()
+ Q = []
+ for LibraryClassName in LibraryInstance:
+ M = LibraryInstance[LibraryClassName]
+ if M == None:
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "Library instance for library class [%s] is not found" % LibraryClassName,
+ ExtraData="\t%s [%s]" % (str(Module), Arch))
+ LibraryList.append(M)
+ #
+ # check if there're duplicate library classes
+ #
+ for Lc in M.LibraryClass:
+ if Lc.SupModList != None and ModuleType not in Lc.SupModList:
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "Module type [%s] is not supported by library instance [%s]" % (ModuleType, str(M)),
+ ExtraData="\t%s" % str(Module))
+
+ if Lc.LibraryClass in LibraryInstance and str(M) != str(LibraryInstance[Lc.LibraryClass]):
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR,
+ "More than one library instance found for library class [%s] in module [%s]" % (Lc.LibraryClass, Module),
+ ExtraData="\t%s\n\t%s" % (LibraryInstance[Lc.LibraryClass], str(M))
+ )
+ if ConsumedByList[M] == []:
+ Q.insert(0, M)
+ #
+ # while Q is not empty do
+ #
+ while Q != []:
+ #
+ # remove node from Q
+ #
+ Node = Q.pop()
+ #
+ # output Node
+ #
+ SortedLibraryList.append(Node)
+ #
+ # for each node Item with an edge e from Node to Item do
+ #
+ for Item in LibraryList:
+ if Node not in ConsumedByList[Item]:
+ continue
+ #
+ # remove edge e from the graph
+ #
+ ConsumedByList[Item].remove(Node)
+ #
+ # If Item has no other incoming edges then
+ #
+ if ConsumedByList[Item] == []:
+ #
+ # insert Item into Q
+ #
+ Q.insert(0, Item)
+
+ EdgeRemoved = True
+ while Q == [] and EdgeRemoved:
+ EdgeRemoved = False
+ #
+ # for each node Item with a Constructor
+ #
+ for Item in LibraryList:
+ if Item in Constructor:
+ #
+ # for each Node without a constructor with an edge e from Item to Node
+ #
+ for Node in ConsumedByList[Item]:
+ if Node not in Constructor:
+ #
+ # remove edge e from the graph
+ #
+ ConsumedByList[Item].remove(Node)
+ EdgeRemoved = True
+ if ConsumedByList[Item] == []:
+ #
+ # insert Item into Q
+ #
+ Q.insert(0, Item)
+ break
+ if Q != []:
+ break
+
+ #
+ # if any remaining node Item in the graph has a constructor and an incoming edge, then the graph has a cycle
+ #
+ for Item in LibraryList:
+ if ConsumedByList[Item] != [] and Item in Constructor and len(Constructor) > 1:
+ ErrorMessage = 'Library [%s] with constructors has a cycle' % str(Item)
+ EdkLogger.error("AutoGen", AUTOGEN_ERROR, ErrorMessage,
+ "\tconsumed by " + "\n\tconsumed by ".join([str(L) for L in ConsumedByList[Item]]))
+ if Item not in SortedLibraryList:
+ SortedLibraryList.append(Item)
+
+ #
+ # Build the list of constructor and destructir names
+ # The DAG Topo sort produces the destructor order, so the list of constructors must generated in the reverse order
+ #
+ SortedLibraryList.reverse()
+ Module.LibraryClasses = sdict()
+ for L in SortedLibraryList:
+ for Lc in L.LibraryClass:
+ Module.LibraryClasses[Lc.LibraryClass, ModuleType] = str(L)
+ #
+ # Merge PCDs from library instance
+ #
+ for Key in L.Pcds:
+ if Key not in Module.Pcds:
+ LibPcd = L.Pcds[Key]
+ Module.Pcds[Key] = self.FindPcd(Arch, str(Module), LibPcd.TokenCName, LibPcd.TokenSpaceGuidCName, LibPcd.Type)
+ #
+ # Merge GUIDs from library instance
+ #
+ for CName in L.Guids:
+ if CName not in Module.Guids:
+ Module.Guids.append(CName)
+ #
+ # Merge Protocols from library instance
+ #
+ for CName in L.Protocols:
+ if CName not in Module.Protocols:
+ Module.Protocols.append(CName)
+ #
+ # Merge Ppis from library instance
+ #
+ for CName in L.Ppis:
+ if CName not in Module.Ppis:
+ Module.Ppis.append(CName)
+
+ ## GenBuildDatabase
+ #
+ # Generate build database for all arches
+ #
+ # @param PcdsSet: Pcd list for override from Fdf parse result
+ # @param InfList: Inf list for override from Fdf parse result
+ #
+ def GenBuildDatabase(self, PcdsSet = {}, InfList = []):
+ #
+ # Add additional inf file defined in Fdf file
+ #
+ for InfFile in InfList:
+ self.AddToInfDatabase(NormPath(InfFile))
+
+ #
+ # Generate PlatformDatabase, PackageDatabase and ModuleDatabase
+ #
+ self.GenPackageDatabase()
+ self.GenPlatformDatabase(PcdsSet)
+ self.GenModuleDatabase(InfList)
+
+ self.Db.Close()
+
+ #
+ # Update Libraries Of Platform
+ #
+ self.UpdateLibrariesOfPlatform(InfList)
+
+ #
+ # Output used Pcds not found in DSC file
+ #
+ self.ShowUnFoundPcds()
+
+ ## ShowUnFoundPcds()
+ #
+ # If there is any pcd used but not defined in DSC
+ # Print warning message on screen and output a list of pcds
+ #
+ def ShowUnFoundPcds(self):
+ if self.UnFoundPcdInDsc != {}:
+ WrnMessage = '**** WARNING ****\n'
+ WrnMessage += 'The following Pcds were not defined in the DSC file: %s\n' % self.DscFileName
+ WrnMessage += 'The default values were obtained from the DEC file that declares the PCD and the PCD default value\n'
+ for (Guid, Name, Type, Arch) in self.UnFoundPcdInDsc:
+ Dec = self.UnFoundPcdInDsc[(Guid, Name, Type, Arch)]
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, Type) in Pcds:
+ Pcd = Pcds[(Name, Guid, Type)]
+ PcdItemTypeUsed = Pcd.Type
+ DefaultValue = Pcd.DefaultValue
+ WrnMessage += '%s.%s: Defined in file %s, PcdItemType is Pcds%s, DefaultValue is %s\n' % (Guid, Name, Dec, PcdItemTypeUsed, DefaultValue)
+ EdkLogger.verbose(WrnMessage)
+
+ ## Create a full path with workspace dir
+ #
+ # Convert Filename with workspace dir to create a full path
+ #
+ # @param Filename: The filename need to be added workspace dir
+ #
+ # @retval string Full path
+ #
+ def WorkspaceFile(self, Filename):
+ return WorkspaceFile(self.WorkspaceDir, Filename)
+
+ ## Update LibraryClass of Module
+ #
+ # If a module of a platform has its own override libraryclass but the libraryclass not defined in the module
+ # Add this libraryclass to the module
+ #
+ # @param InfFileName: InfFileName specificed in platform
+ # @param LibraryClass: LibraryClass specificed in platform
+ # @param Arch: Supportted Arch
+ # @param InstanceFilePath: InstanceFilePath specificed in platform
+ #
+ def UpdateLibraryClassOfModule(self, InfFileName, LibraryClass, Arch, InstanceFilePath):
+ #
+ # Update the library instance itself to add this libraryclass name
+ #
+ LibraryModule = self.InfDatabase[InstanceFilePath].Module
+ LibList = LibraryModule.Header[Arch].LibraryClass
+ NotFound = True
+ for Lib in LibList:
+ #
+ # Find this LibraryClass
+ #
+ if Lib.LibraryClass == LibraryClass:
+ NotFound = False;
+ break;
+ if NotFound:
+ NewLib = LibraryClassClass()
+ NewLib.LibraryClass = LibraryClass
+ NewLib.SupModuleList = DataType.SUP_MODULE_LIST # LibraryModule.Header[Arch].ModuleType.split()
+ LibraryModule.Header[Arch].LibraryClass.append(NewLib)
+
+ #
+ # Add it to LibraryClasses Section for the module which is using the library
+ #
+ Module = self.InfDatabase[InfFileName].Module
+ LibList = Module.LibraryClasses
+ NotFound = True
+ for Lib in LibList:
+ #
+ # Find this LibraryClass
+ #
+ if Lib.LibraryClass == LibraryClass:
+ if Arch in Lib.SupArchList:
+ return
+ else:
+ Lib.SupArchList.append(Arch)
+ return
+ if NotFound:
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = LibraryClass
+ Lib.SupArchList = [Arch]
+ Module.LibraryClasses.append(Lib)
+
+ ## Add Inf file to InfDatabase
+ #
+ # Create a Inf instance for input inf file and add it to InfDatabase
+ #
+ # @param InfFileName: The InfFileName need to be added to database
+ #
+ def AddToInfDatabase(self, InfFileName):
+ File = self.WorkspaceFile(InfFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ if InfFileName not in self.InfDatabase:
+ self.InfDatabase[InfFileName] = Inf(File, False, True, self.WorkspaceDir, self.Db, self.SupArchList)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData=File)
+
+ ## Add Dec file to DecDatabase
+ #
+ # Create a Dec instance for input dec file and add it to DecDatabase
+ #
+ # @param DecFileName: The DecFileName need to be added to database
+ #
+ def AddToDecDatabase(self, DecFileName):
+ File = self.WorkspaceFile(DecFileName)
+ if os.path.exists(File) and os.path.isfile(File):
+ if DecFileName not in self.DecDatabase:
+ self.DecDatabase[DecFileName] = Dec(File, False, True, self.WorkspaceDir, self.Db, self.SupArchList)
+ else:
+ EdkLogger.error("AutoGen", FILE_NOT_FOUND, ExtraData=File)
+
+ ## Search LibraryClass Instance for Module
+ #
+ # Search PlatformBuildDatabase to find LibraryClass Instance for Module
+ # Return the instance if found
+ #
+ # @param Lib: Input value for Library Class Name
+ # @param Arch: Supportted Arch
+ # @param ModuleType: Supportted ModuleType
+ # @param ModuleName: Input value for Module Name
+ #
+ # @retval string Found LibraryClass Instance file path
+ #
+ def FindLibraryClassInstanceOfModule(self, Lib, Arch, ModuleType, ModuleName):
+ #
+ # First find if exist in <LibraryClass> of <Components> from dsc file
+ #
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ for Module in Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for LibraryClass in Module.LibraryClasses.LibraryList:
+ if LibraryClass.Name == Lib:
+ return LibraryClass.FilePath
+ #
+ #Second find if exist in <LibraryClass> of <LibraryClasses> from dsc file
+ #
+ return self.FindLibraryClassInstanceOfLibrary(Lib, Arch, ModuleType)
+
+ ## Search LibraryClass Instance for Library
+ #
+ # Search PlatformBuildDatabase to find LibraryClass Instance for Library
+ # Return the instance if found
+ #
+ # @param Lib: Input value for Library Class Name
+ # @param Arch: Supportted Arch
+ # @param Type: Supportted Library Usage Type
+ #
+ # @retval string Found LibraryClass Instance file path
+ # @retval None Not Found
+ #
+ def FindLibraryClassInstanceOfLibrary(self, Lib, Arch, Type):
+ for Dsc in self.DscDatabase.keys():
+ Platform = self.DscDatabase[Dsc].Platform
+ if (Lib, Type) in self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses:
+ return self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses[(Lib, Type)]
+ elif (Lib, '') in self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses:
+ return self.Build[Arch].PlatformDatabase[Dsc].LibraryClasses[(Lib, '')]
+ return None
+
+ ## Find BuildOptions
+ #
+ # Search DscDatabase to find component definition of ModuleName
+ # Override BuildOption if it is defined in component
+ #
+ # @param Arch: Supportted Arch
+ # @param ModuleName: The module which has buildoption definition in component of platform
+ # @param BuildOptions: The set of all buildopitons
+ #
+ def FindBuildOptions(self, Arch, ModuleName, BuildOptions):
+ for Dsc in self.DscDatabase.keys():
+ #
+ # First find if exist in <BuildOptions> of <Components> from dsc file
+ # if find, use that override the one defined in inf file
+ #
+ Platform = self.DscDatabase[Dsc].Platform
+ for Module in Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for BuildOption in Module.ModuleSaBuildOption.BuildOptionList:
+ #
+ # Add to BuildOptions
+ #
+ BuildOptions[(BuildOption.ToolChainFamily, BuildOption.ToolChain)] = BuildOption.Option
+
+ ## Find Pcd
+ #
+ # Search platform database, package database, module database and PcdsSet from Fdf
+ # Return found Pcd
+ #
+ # @param Arch: Supportted Arch
+ # @param ModuleName: The module which has pcd definition in component of platform
+ # @param Name: Name of Pcd
+ # @param Guid: Guid of Pcd
+ # @param Type: Type of Pcd
+ #
+ # @retval PcdClassObject An instance for PcdClassObject with all members filled
+ #
+ def FindPcd(self, Arch, ModuleName, Name, Guid, Type):
+ NewType = ''
+ DatumType = ''
+ Value = ''
+ Token = ''
+ MaxDatumSize = ''
+ SkuInfoList = {}
+ IsOverrided = False
+ IsFoundInDsc = False
+ IsFoundInDec = False
+ FoundInDecFile = ''
+
+ #
+ # Second get information from platform database
+ #
+ OwnerPlatform = ''
+ for Dsc in self.Build[Arch].PlatformDatabase.keys():
+ Pcds = self.Build[Arch].PlatformDatabase[Dsc].Pcds
+ if (Name, Guid) in Pcds:
+ OwnerPlatform = Dsc
+ Pcd = Pcds[(Name, Guid)]
+ if Pcd.Type != '' and Pcd.Type != None:
+ NewType = Pcd.Type
+ if NewType in DataType.PCD_DYNAMIC_TYPE_LIST:
+ NewType = DataType.TAB_PCDS_DYNAMIC
+ elif NewType in DataType.PCD_DYNAMIC_EX_TYPE_LIST:
+ NewType = DataType.TAB_PCDS_DYNAMIC_EX
+ else:
+ NewType = Type
+
+ if Type != '' and Type != NewType:
+ ErrorMsg = "PCD %s.%s is declared as [%s] in module\n\t%s\n\n"\
+ " But it's used as [%s] in platform\n\t%s"\
+ % (Guid, Name, Type, ModuleName, NewType, OwnerPlatform)
+ EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
+
+
+ if Pcd.DatumType != '' and Pcd.DatumType != None:
+ DatumType = Pcd.DatumType
+ if Pcd.TokenValue != '' and Pcd.TokenValue != None:
+ Token = Pcd.TokenValue
+ if Pcd.DefaultValue != '' and Pcd.DefaultValue != None:
+ Value = Pcd.DefaultValue
+ if Pcd.MaxDatumSize != '' and Pcd.MaxDatumSize != None:
+ MaxDatumSize = Pcd.MaxDatumSize
+ SkuInfoList = Pcd.SkuInfoList
+
+ IsOverrided = True
+ IsFoundInDsc = True
+ break
+
+ #
+ # Third get information from <Pcd> of <Compontents> from module database
+ #
+ for Dsc in self.DscDatabase.keys():
+ for Module in self.DscDatabase[Dsc].Platform.Modules.ModuleList:
+ if Arch in Module.SupArchList:
+ if Module.FilePath == ModuleName:
+ for Pcd in Module.PcdBuildDefinitions:
+ if (Name, Guid) == (Pcd.CName, Pcd.TokenSpaceGuidCName):
+ if Pcd.DefaultValue != '':
+ Value = Pcd.DefaultValue
+ if Pcd.MaxDatumSize != '':
+ MaxDatumSize = Pcd.MaxDatumSize
+
+ IsFoundInDsc = True
+ IsOverrided = True
+ break
+
+ #
+ # First get information from package database
+ #
+ Pcd = None
+ if NewType == '':
+ if Type != '':
+ PcdTypeList = [Type]
+ else:
+ PcdTypeList = ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]
+
+ for Dec in self.Build[Arch].PackageDatabase.keys():
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ for PcdType in PcdTypeList:
+ if (Name, Guid, PcdType) in Pcds:
+ Pcd = Pcds[(Name, Guid, PcdType)]
+ NewType = PcdType
+ IsOverrided = True
+ IsFoundInDec = True
+ FoundInDecFile = Dec
+ break
+ else:
+ continue
+ break
+ else:
+ for Dec in self.Build[Arch].PackageDatabase.keys():
+ Pcds = self.Build[Arch].PackageDatabase[Dec].Pcds
+ if (Name, Guid, NewType) in Pcds:
+ Pcd = Pcds[(Name, Guid, NewType)]
+ IsOverrided = True
+ IsFoundInDec = True
+ FoundInDecFile = Dec
+ break
+
+ if not IsFoundInDec:
+ ErrorMsg = "Pcd '%s.%s [%s]' defined in module '%s' is not found in any package for Arch '%s'" % (Guid, Name, NewType, ModuleName, Arch)
+ EdkLogger.error("AutoGen", PARSER_ERROR, ErrorMsg)
+
+ #
+ # Not found in any platform and fdf
+ #
+ if not IsFoundInDsc:
+ Value = Pcd.DefaultValue
+ if NewType.startswith("Dynamic") and SkuInfoList == {}:
+ SkuIds = self.Build[Arch].PlatformDatabase.values()[0].SkuIds
+ SkuInfoList['DEFAULT'] = SkuInfoClass(SkuIdName='DEFAULT', SkuId=SkuIds['DEFAULT'], DefaultValue=Value)
+ self.UnFoundPcdInDsc[(Guid, Name, NewType, Arch)] = FoundInDecFile
+ #elif Type != '' and NewType.startswith("Dynamic"):
+ # NewType = Pcd.Type
+ DatumType = Pcd.DatumType
+ if Token in [None, '']:
+ Token = Pcd.TokenValue
+ if DatumType == "VOID*" and MaxDatumSize in ['', None]:
+ EdkLogger.verbose("No MaxDatumSize specified for PCD %s.%s in module [%s]" % (Guid, Name, ModuleName))
+ if Value[0] == 'L':
+ MaxDatumSize = str(len(Value) * 2)
+ elif Value[0] == '{':
+ MaxDatumSize = str(len(Value.split(',')))
+ else:
+ MaxDatumSize = str(len(Value))
+
+ return PcdClassObject(Name, Guid, NewType, DatumType, Value, Token, MaxDatumSize, SkuInfoList, IsOverrided)
+
+ ## Find Supportted Module List Of LibraryClass
+ #
+ # Search in InfDatabase, find the supmodulelist of the libraryclass
+ #
+ # @param LibraryClass: LibraryClass name for search
+ # @param OverridedLibraryClassList: A list of all LibraryClass
+ # @param Arch: Supportted Arch
+ #
+ # @retval list SupModuleList
+ #
+ def FindSupModuleListOfLibraryClass(self, LibraryClass, OverridedLibraryClassList, Arch):
+ Name = LibraryClass.Name
+ FilePath = LibraryClass.FilePath
+ SupModuleList = copy.copy(LibraryClass.SupModuleList)
+
+ #
+ # If the SupModuleList means all, remove overrided module types of platform
+ #
+ if SupModuleList == DataType.SUP_MODULE_LIST:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "\tLibraryClass %s supports all module types" % Name)
+ for Item in OverridedLibraryClassList:
+ #
+ # Find a library class (Item) with the same name
+ #
+ if Item.Name == Name:
+ #
+ # Do nothing if it is itself
+ #
+ if Item.SupModuleList == DataType.SUP_MODULE_LIST:
+ continue
+ #
+ # If not itself, check arch first
+ #
+ if Arch in LibraryClass.SupArchList:
+ #
+ # If arch is supportted, remove all related module type
+ #
+ if Arch in Item.SupArchList:
+ for ModuleType in Item.SupModuleList:
+ EdkLogger.debug(EdkLogger.DEBUG_3, "\tLibraryClass %s has specific defined module types" % Name)
+ if ModuleType in SupModuleList:
+ SupModuleList.remove(ModuleType)
+
+ return SupModuleList
+
+ ## Find Module inf Platform
+ #
+ # Check if the module is defined in <Compentent> of <Platform>
+ #
+ # @param Inf: Inf file (Module) need to be searched
+ # @param Arch: Supportted Arch
+ # @param InfList: A list for all Inf file
+ #
+ # @retval True Mudule Found
+ # @retval Flase Module Not Found
+ #
+ def IsModuleDefinedInPlatform(self, Inf, Arch, InfList):
+ for Dsc in self.DscDatabase.values():
+ for LibraryClass in Dsc.Platform.LibraryClasses.LibraryList:
+ if Inf == LibraryClass.FilePath and Arch in LibraryClass.SupArchList:
+ return True
+ for Module in Dsc.Platform.Modules.ModuleList:
+ if Inf == Module.FilePath and Arch in Module.SupArchList:
+ return True
+ for Item in Module.LibraryClasses.LibraryList:
+ if Inf == Item.FilePath:
+ return True
+ for Library in Dsc.Platform.Libraries.LibraryList:
+ if Inf == Library.FilePath and Arch in Library.SupArchList:
+ return True
+
+ return False
+
+ ## Show all content of the workspacebuild
+ #
+ # Print each item of the workspacebuild with (Key = Value) pair
+ #
+ def ShowWorkspaceBuild(self):
+ print self.DscDatabase
+ print self.InfDatabase
+ print self.DecDatabase
+ print 'SupArchList', self.SupArchList
+ print 'BuildTarget', self.BuildTarget
+ print 'SkuId', self.SkuId
+
+ for Arch in self.SupArchList:
+ print Arch
+ print 'Platform'
+ for Platform in self.Build[Arch].PlatformDatabase.keys():
+ P = self.Build[Arch].PlatformDatabase[Platform]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'PlatformName = ', P.PlatformName
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'OutputDirectory = ', P.OutputDirectory
+ print 'FlashDefinition = ', P.FlashDefinition
+ print 'SkuIds = ', P.SkuIds
+ print 'Modules = ', P.Modules
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print 'BuildOptions = ', P.BuildOptions
+ print ''
+ # End of Platform
+
+ print 'package'
+ for Package in self.Build[Arch].PackageDatabase.keys():
+ P = self.Build[Arch].PackageDatabase[Package]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'PackageName = ', P.PackageName
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'Protocols = ', P.Protocols
+ print 'Ppis = ', P.Ppis
+ print 'Guids = ', P.Guids
+ print 'Includes = ', P.Includes
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print ''
+ # End of Package
+
+ print 'module'
+ for Module in self.Build[Arch].ModuleDatabase.keys():
+ P = self.Build[Arch].ModuleDatabase[Module]
+ print 'DescFilePath = ', P.DescFilePath
+ print 'BaseName = ', P.BaseName
+ print 'ModuleType = ', P.ModuleType
+ print 'Guid = ', P.Guid
+ print 'Version = ', P.Version
+ print 'CustomMakefile = ', P.CustomMakefile
+ print 'Specification = ', P.Specification
+ print 'Shadow = ', P.Shadow
+ print 'PcdIsDriver = ', P.PcdIsDriver
+ for Lib in P.LibraryClass:
+ print 'LibraryClassDefinition = ', Lib.LibraryClass, 'SupModList = ', Lib.SupModList
+ print 'ModuleEntryPointList = ', P.ModuleEntryPointList
+ print 'ModuleUnloadImageList = ', P.ModuleUnloadImageList
+ print 'ConstructorList = ', P.ConstructorList
+ print 'DestructorList = ', P.DestructorList
+
+ print 'Binaries = '
+ for item in P.Binaries:
+ print item.BinaryFile, item.FeatureFlag, item.SupArchList
+ print 'Sources = '
+ for item in P.Sources:
+ print item.SourceFile
+ print 'LibraryClasses = ', P.LibraryClasses
+ print 'Protocols = ', P.Protocols
+ print 'Ppis = ', P.Ppis
+ print 'Guids = ', P.Guids
+ print 'Includes = ', P.Includes
+ print 'Packages = ', P.Packages
+ print 'Pcds = ', P.Pcds
+ for item in P.Pcds.keys():
+ print P.Pcds[item]
+ print 'BuildOptions = ', P.BuildOptions
+ print 'Depex = ', P.Depex
+ print ''
+ # End of Module
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ print 'Start!', time.strftime('%H:%M:%S', time.localtime())
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ Ewb = WorkspaceBuild('Nt32Pkg/Nt32Pkg.dsc', W)
+ Ewb.GenBuildDatabase({('PcdDevicePathSupportDevicePathFromText', 'gEfiMdeModulePkgTokenSpaceGuid') : 'KKKKKKKKKKKKKKKKKKKKK'}, ['Test.Inf'])
+ print 'Done!', time.strftime('%H:%M:%S', time.localtime())
+ Ewb.ShowWorkspaceBuild()
diff --git a/BaseTools/Source/Python/Common/EdkLogger.py b/BaseTools/Source/Python/Common/EdkLogger.py
new file mode 100644
index 0000000000..ce4cfa14bb
--- /dev/null
+++ b/BaseTools/Source/Python/Common/EdkLogger.py
@@ -0,0 +1,269 @@
+## @file
+# This file implements the log mechanism for Python tools.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+## Import modules
+import sys, os, logging
+import traceback
+from BuildToolError import *
+
+## Log level constants
+DEBUG_0 = 1
+DEBUG_1 = 2
+DEBUG_2 = 3
+DEBUG_3 = 4
+DEBUG_4 = 5
+DEBUG_5 = 6
+DEBUG_6 = 7
+DEBUG_7 = 8
+DEBUG_8 = 9
+DEBUG_9 = 10
+VERBOSE = 15
+INFO = 20
+WARN = 30
+QUIET = 40
+ERROR = 50
+
+IsRaiseError = True
+
+# Tool name
+_ToolName = os.path.basename(sys.argv[0])
+
+# For validation purpose
+_LogLevels = [DEBUG_0, DEBUG_1, DEBUG_2, DEBUG_3, DEBUG_4, DEBUG_5, DEBUG_6, DEBUG_7, DEBUG_8, DEBUG_9, VERBOSE, WARN, INFO, ERROR, QUIET]
+
+# For DEBUG level (All DEBUG_0~9 are applicable)
+_DebugLogger = logging.getLogger("tool_debug")
+_DebugFormatter = logging.Formatter("[%(asctime)s.%(msecs)d]: %(message)s", datefmt="%H:%M:%S")
+
+# For VERBOSE, INFO, WARN level
+_InfoLogger = logging.getLogger("tool_info")
+_InfoFormatter = logging.Formatter("%(message)s")
+
+# For ERROR level
+_ErrorLogger = logging.getLogger("tool_error")
+_ErrorFormatter = logging.Formatter("%(message)s")
+
+# String templates for ERROR/WARN/DEBUG log message
+_ErrorMessageTemplate = '\n\n%(tool)s...\n%(file)s(%(line)s): error %(errorcode)04X: %(msg)s\n\t%(extra)s'
+_ErrorMessageTemplateWithoutFile = '\n\n%(tool)s...\n : error %(errorcode)04X: %(msg)s\n\t%(extra)s'
+_WarningMessageTemplate = '%(tool)s...\n%(file)s(%(line)s): warning: %(msg)s'
+_WarningMessageTemplateWithoutFile = '%(tool)s: : warning: %(msg)s'
+_DebugMessageTemplate = '%(file)s(%(line)s): debug: \n %(msg)s'
+
+#
+# Flag used to take WARN as ERROR.
+# By default, only ERROR message will break the tools execution.
+#
+_WarningAsError = False
+
+## Log debug message
+#
+# @param Level DEBUG level (DEBUG0~9)
+# @param Message Debug information
+# @param ExtraData More information associated with "Message"
+#
+def debug(Level, Message, ExtraData=None):
+ if _DebugLogger.level > Level:
+ return
+ if Level > DEBUG_9:
+ return
+
+ # Find out the caller method information
+ CallerStack = traceback.extract_stack()[-2]
+ TemplateDict = {
+ "file" : CallerStack[0],
+ "line" : CallerStack[1],
+ "msg" : Message,
+ }
+
+ if ExtraData != None:
+ LogText = _DebugMessageTemplate % TemplateDict + "\n %s" % ExtraData
+ else:
+ LogText = _DebugMessageTemplate % TemplateDict
+
+ _DebugLogger.log(Level, LogText)
+
+## Log verbose message
+#
+# @param Message Verbose information
+#
+def verbose(Message):
+ return _InfoLogger.log(VERBOSE, Message)
+
+## Log warning message
+#
+# Warning messages are those which might be wrong but won't fail the tool.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param Message Warning information
+# @param File The name of file which caused the warning.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+#
+def warn(ToolName, Message, File=None, Line=None, ExtraData=None):
+ if _InfoLogger.level > WARN:
+ return
+
+ # if no tool name given, use caller's source file name as tool name
+ if ToolName == None or ToolName == "":
+ ToolName = os.path.basename(traceback.extract_stack()[-2][0])
+
+ if Line == None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ TemplateDict = {
+ "tool" : ToolName,
+ "file" : File,
+ "line" : Line,
+ "msg" : Message,
+ }
+
+ if File != None:
+ LogText = _WarningMessageTemplate % TemplateDict
+ else:
+ LogText = _WarningMessageTemplateWithoutFile % TemplateDict
+
+ if ExtraData != None:
+ LogText += "\n %s" % ExtraData
+
+ _InfoLogger.log(WARN, LogText)
+
+ # Raise an execption if indicated
+ if _WarningAsError == True:
+ raise FatalError(WARNING_AS_ERROR)
+
+## Log INFO message
+info = _InfoLogger.info
+
+## Log ERROR message
+#
+# Once an error messages is logged, the tool's execution will be broken by raising
+# an execption. If you don't want to break the execution later, you can give
+# "RaiseError" with "False" value.
+#
+# @param ToolName The name of the tool. If not given, the name of caller
+# method will be used.
+# @param ErrorCode The error code
+# @param Message Warning information
+# @param File The name of file which caused the error.
+# @param Line The line number in the "File" which caused the warning.
+# @param ExtraData More information associated with "Message"
+# @param RaiseError Raise an exception to break the tool's executuion if
+# it's True. This is the default behavior.
+#
+def error(ToolName, ErrorCode, Message=None, File=None, Line=None, ExtraData=None, RaiseError=IsRaiseError):
+ if Line == None:
+ Line = "..."
+ else:
+ Line = "%d" % Line
+
+ if Message == None:
+ if ErrorCode in gErrorMessage:
+ Message = gErrorMessage[ErrorCode]
+ else:
+ Message = gErrorMessage[UNKNOWN_ERROR]
+
+ if ExtraData == None:
+ ExtraData = ""
+
+ TemplateDict = {
+ "tool" : _ToolName,
+ "file" : File,
+ "line" : Line,
+ "errorcode" : ErrorCode,
+ "msg" : Message,
+ "extra" : ExtraData
+ }
+
+ if File != None:
+ LogText = _ErrorMessageTemplate % TemplateDict
+ else:
+ LogText = _ErrorMessageTemplateWithoutFile % TemplateDict
+
+ _ErrorLogger.log(ERROR, LogText)
+ if RaiseError:
+ raise FatalError(ErrorCode)
+
+# Log information which should be always put out
+quiet = _ErrorLogger.error
+
+## Initialize log system
+def Initialize():
+ #
+ # Since we use different format to log different levels of message into different
+ # place (stdout or stderr), we have to use different "Logger" objects to do this.
+ #
+ # For DEBUG level (All DEBUG_0~9 are applicable)
+ _DebugLogger.setLevel(INFO)
+ _DebugChannel = logging.StreamHandler(sys.stdout)
+ _DebugChannel.setFormatter(_DebugFormatter)
+ _DebugLogger.addHandler(_DebugChannel)
+
+ # For VERBOSE, INFO, WARN level
+ _InfoLogger.setLevel(INFO)
+ _InfoChannel = logging.StreamHandler(sys.stdout)
+ _InfoChannel.setFormatter(_InfoFormatter)
+ _InfoLogger.addHandler(_InfoChannel)
+
+ # For ERROR level
+ _ErrorLogger.setLevel(INFO)
+ _ErrorCh = logging.StreamHandler(sys.stderr)
+ _ErrorCh.setFormatter(_ErrorFormatter)
+ _ErrorLogger.addHandler(_ErrorCh)
+
+## Set log level
+#
+# @param Level One of log level in _LogLevel
+def SetLevel(Level):
+ if Level not in _LogLevels:
+ info("Not supported log level (%d). Use default level instead." % Level)
+ Level = INFO
+ _DebugLogger.setLevel(Level)
+ _InfoLogger.setLevel(Level)
+ _ErrorLogger.setLevel(Level)
+
+## Get current log level
+def GetLevel():
+ return _InfoLogger.getEffectiveLevel()
+
+## Raise up warning as error
+def SetWarningAsError():
+ global _WarningAsError
+ _WarningAsError = True
+
+## Specify a file to store the log message as well as put on console
+#
+# @param LogFile The file path used to store the log message
+#
+def SetLogFile(LogFile):
+ if os.path.exists(LogFile):
+ os.remove(LogFile)
+
+ _Ch = logging.FileHandler(LogFile)
+ _Ch.setFormatter(_DebugFormatter)
+ _DebugLogger.addHandler(_Ch)
+
+ _Ch= logging.FileHandler(LogFile)
+ _Ch.setFormatter(_InfoFormatter)
+ _InfoLogger.addHandler(_Ch)
+
+ _Ch = logging.FileHandler(LogFile)
+ _Ch.setFormatter(_ErrorFormatter)
+ _ErrorLogger.addHandler(_Ch)
+
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Common/FdfClassObject.py b/BaseTools/Source/Python/Common/FdfClassObject.py
new file mode 100644
index 0000000000..e0df1c20c2
--- /dev/null
+++ b/BaseTools/Source/Python/Common/FdfClassObject.py
@@ -0,0 +1,116 @@
+## @file
+# This file is used to define each component of FDF file
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from FdfParserLite import FdfParser
+from Table.TableFdf import TableFdf
+from CommonDataClass.DataClass import MODEL_FILE_FDF, MODEL_PCD, MODEL_META_DATA_COMPONENT
+from String import NormPath
+
+## FdfObject
+#
+# This class defined basic Fdf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class FdfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Fdf
+#
+# This class defined the structure used in Fdf object
+#
+# @param FdfObject: Inherited from FdfObject class
+# @param Filename: Input value for Ffilename of Fdf file, default is None
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+class Fdf(FdfObject):
+ def __init__(self, Filename = None, IsToDatabase = False, WorkspaceDir = None, Database = None):
+ self.WorkspaceDir = WorkspaceDir
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblFdf = Database.TblFdf
+ self.FileID = -1
+ self.FileList = {}
+
+ #
+ # Load Fdf file if filename is not None
+ #
+ if Filename != None:
+ self.LoadFdfFile(Filename)
+
+ #
+ # Insert a FDF file record into database
+ #
+ def InsertFile(self, Filename):
+ FileID = -1
+ Filename = NormPath(Filename)
+ if Filename not in self.FileList:
+ FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_FDF)
+ self.FileList[Filename] = FileID
+
+ return self.FileList[Filename]
+
+
+ ## Load Fdf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Fdf file
+ #
+ def LoadFdfFile(self, Filename):
+ FileList = []
+ #
+ # Parse Fdf file
+ #
+ Filename = NormPath(Filename)
+ Fdf = FdfParser(Filename)
+ Fdf.ParseFile()
+
+ #
+ # Insert inf file and pcd information
+ #
+ if self.IsToDatabase:
+ (Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled) = \
+ (0, '', '', '', 'COMMON', -1, -1, -1, -1, -1, -1, 0)
+ for Index in range(0, len(Fdf.Profile.PcdDict)):
+ pass
+ for Key in Fdf.Profile.PcdDict.keys():
+ Model = MODEL_PCD
+ Value1 = ''
+ Value2 = ".".join((Key[1], Key[0]))
+ FileName = Fdf.Profile.PcdFileLineDict[Key][0]
+ StartLine = Fdf.Profile.PcdFileLineDict[Key][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+ for Index in range(0, len(Fdf.Profile.InfList)):
+ Model = MODEL_META_DATA_COMPONENT
+ Value1 = Fdf.Profile.InfList[Index]
+ Value2 = ''
+ FileName = Fdf.Profile.InfFileLineList[Index][0]
+ StartLine = Fdf.Profile.InfFileLineList[Index][1]
+ BelongsToFile = self.InsertFile(FileName)
+ self.TblFdf.Insert(Model, Value1, Value2, Value3, Arch, BelongsToItem, BelongsToFile, StartLine, StartColumn, EndLine, EndColumn, Enabled)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/Common/FdfParserLite.py b/BaseTools/Source/Python/Common/FdfParserLite.py
new file mode 100644
index 0000000000..59006fa5c5
--- /dev/null
+++ b/BaseTools/Source/Python/Common/FdfParserLite.py
@@ -0,0 +1,3603 @@
+## @file
+# parse FDF file
+#
+# Copyright (c) 2007, Intel Corporation
+#
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import re
+import os
+
+import CommonDataClass.FdfClass
+
+##define T_CHAR_SPACE ' '
+##define T_CHAR_NULL '\0'
+##define T_CHAR_CR '\r'
+##define T_CHAR_TAB '\t'
+##define T_CHAR_LF '\n'
+##define T_CHAR_SLASH '/'
+##define T_CHAR_BACKSLASH '\\'
+##define T_CHAR_DOUBLE_QUOTE '\"'
+##define T_CHAR_SINGLE_QUOTE '\''
+##define T_CHAR_STAR '*'
+##define T_CHAR_HASH '#'
+
+(T_CHAR_SPACE, T_CHAR_NULL, T_CHAR_CR, T_CHAR_TAB, T_CHAR_LF, T_CHAR_SLASH, \
+T_CHAR_BACKSLASH, T_CHAR_DOUBLE_QUOTE, T_CHAR_SINGLE_QUOTE, T_CHAR_STAR, T_CHAR_HASH) = \
+(' ', '\0', '\r', '\t', '\n', '/', '\\', '\"', '\'', '*', '#')
+
+SEPERATOR_TUPLE = ('=', '|', ',', '{', '}')
+
+IncludeFileList = []
+# Macro passed from command line, which has greatest priority and can NOT be overridden by those in FDF
+InputMacroDict = {}
+# All Macro values when parsing file, not replace existing Macro
+AllMacroList = []
+
+def GetRealFileLine (File, Line):
+
+ InsertedLines = 0
+ for Profile in IncludeFileList:
+ if Line >= Profile.InsertStartLineNumber and Line < Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ return (Profile.FileName, Line - Profile.InsertStartLineNumber + 1)
+ if Line >= Profile.InsertStartLineNumber + Profile.InsertAdjust + len(Profile.FileLinesList):
+ InsertedLines += Profile.InsertAdjust + len(Profile.FileLinesList)
+
+ return (File, Line - InsertedLines)
+
+## The exception class that used to report error messages when parsing FDF
+#
+# Currently the "ToolName" is set to be "FDF Parser".
+#
+class Warning (Exception):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param Str The message to record
+ # @param File The FDF name
+ # @param Line The Line number that error occurs
+ #
+ def __init__(self, Str, File = None, Line = None):
+
+ FileLineTuple = GetRealFileLine(File, Line)
+ self.FileName = FileLineTuple[0]
+ self.LineNumber = FileLineTuple[1]
+ self.message = Str + str(self.LineNumber)
+ self.ToolName = 'FDF Parser'
+
+## The MACRO class that used to record macro value data when parsing include file
+#
+#
+class MacroProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName, Line):
+ self.FileName = FileName
+ self.DefinedAtLine = Line
+ self.MacroName = None
+ self.MacroValue = None
+
+## The Include file content class that used to record file data when parsing include file
+#
+# May raise Exception when opening file.
+#
+class IncludeFileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileName = FileName
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
+
+ self.InsertStartLineNumber = None
+ self.InsertAdjust = 0
+
+## The FDF content class that used to record file data when parsing FDF
+#
+# May raise Exception when opening file.
+#
+class FileProfile :
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.FileLinesList = []
+ try:
+ fsock = open(FileName, "rb", 0)
+ try:
+ self.FileLinesList = fsock.readlines()
+ finally:
+ fsock.close()
+
+ except IOError:
+ raise Warning("Error when opening file %s" % FileName)
+
+ self.PcdDict = {}
+ self.InfList = []
+
+ self.PcdFileLineDict = {}
+ self.InfFileLineList = []
+
+ self.FdDict = {}
+ self.FvDict = {}
+ self.CapsuleList = []
+# self.VtfList = []
+# self.RuleDict = {}
+
+## The syntax parser for FDF
+#
+# PreprocessFile method should be called prior to ParseFile
+# CycleReferenceCheck method can detect cycles in FDF contents
+#
+# GetNext*** procedures mean these procedures will get next token first, then make judgement.
+# Get*** procedures mean these procedures will make judgement on current token only.
+#
+class FdfParser(object):
+ ## The constructor
+ #
+ # @param self The object pointer
+ # @param FileName The file that to be parsed
+ #
+ def __init__(self, FileName):
+ self.Profile = FileProfile(FileName)
+ self.FileName = FileName
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+ self.CurrentFdName = None
+ self.CurrentFvName = None
+ self.__Token = ""
+ self.__SkippedChars = ""
+
+ self.__WipeOffArea = []
+
+ ## __IsWhiteSpace() method
+ #
+ # Whether char at current FileBufferPos is whitespace
+ #
+ # @param self The object pointer
+ # @param Char The char to test
+ # @retval True The char is a kind of white space
+ # @retval False The char is NOT a kind of white space
+ #
+ def __IsWhiteSpace(self, Char):
+ if Char in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_SPACE, T_CHAR_TAB, T_CHAR_LF):
+ return True
+ else:
+ return False
+
+ ## __SkipWhiteSpace() method
+ #
+ # Skip white spaces from current char, return number of chars skipped
+ #
+ # @param self The object pointer
+ # @retval Count The number of chars skipped
+ #
+ def __SkipWhiteSpace(self):
+ Count = 0
+ while not self.__EndOfFile():
+ Count += 1
+ if self.__CurrentChar() in (T_CHAR_NULL, T_CHAR_CR, T_CHAR_LF, T_CHAR_SPACE, T_CHAR_TAB):
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ else:
+ Count = Count - 1
+ return Count
+
+ ## __EndOfFile() method
+ #
+ # Judge current buffer pos is at file end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at file end
+ # @retval False Current File buffer position is NOT at file end
+ #
+ def __EndOfFile(self):
+ NumberOfLines = len(self.Profile.FileLinesList)
+ SizeOfLastLine = len(self.Profile.FileLinesList[-1])
+ if self.CurrentLineNumber == NumberOfLines and self.CurrentOffsetWithinLine >= SizeOfLastLine - 1:
+ return True
+ elif self.CurrentLineNumber > NumberOfLines:
+ return True
+ else:
+ return False
+
+ ## __EndOfLine() method
+ #
+ # Judge current buffer pos is at line end
+ #
+ # @param self The object pointer
+ # @retval True Current File buffer position is at line end
+ # @retval False Current File buffer position is NOT at line end
+ #
+ def __EndOfLine(self):
+ if self.CurrentLineNumber > len(self.Profile.FileLinesList):
+ return True
+ SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
+ if self.CurrentOffsetWithinLine >= SizeOfCurrentLine:
+ return True
+ else:
+ return False
+
+ ## Rewind() method
+ #
+ # Reset file data buffer to the initial state
+ #
+ # @param self The object pointer
+ #
+ def Rewind(self):
+ self.CurrentLineNumber = 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __UndoOneChar() method
+ #
+ # Go back one char in the file buffer
+ #
+ # @param self The object pointer
+ # @retval True Successfully go back one char
+ # @retval False Not able to go back one char as file beginning reached
+ #
+ def __UndoOneChar(self):
+
+ if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
+ return False
+ elif self.CurrentOffsetWithinLine == 0:
+ self.CurrentLineNumber -= 1
+ self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
+ else:
+ self.CurrentOffsetWithinLine -= 1
+ return True
+
+ ## __GetOneChar() method
+ #
+ # Move forward one char in the file buffer
+ #
+ # @param self The object pointer
+ #
+ def __GetOneChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ else:
+ self.CurrentOffsetWithinLine += 1
+
+ ## __CurrentChar() method
+ #
+ # Get the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Current char
+ #
+ def __CurrentChar(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine]
+
+ ## __NextChar() method
+ #
+ # Get the one char pass the char pointed to by the file buffer pointer
+ #
+ # @param self The object pointer
+ # @retval Char Next char
+ #
+ def __NextChar(self):
+ if self.CurrentOffsetWithinLine == len(self.Profile.FileLinesList[self.CurrentLineNumber - 1]) - 1:
+ return self.Profile.FileLinesList[self.CurrentLineNumber][0]
+ else:
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine + 1]
+
+ ## __SetCurrentCharValue() method
+ #
+ # Modify the value of current char
+ #
+ # @param self The object pointer
+ # @param Value The new value of current char
+ #
+ def __SetCurrentCharValue(self, Value):
+ self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine] = Value
+
+ ## __CurrentLine() method
+ #
+ # Get the list that contains current line contents
+ #
+ # @param self The object pointer
+ # @retval List current line contents
+ #
+ def __CurrentLine(self):
+ return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
+
+ def __StringToList(self):
+ self.Profile.FileLinesList = [list(s) for s in self.Profile.FileLinesList]
+ self.Profile.FileLinesList[-1].append(' ')
+
+ def __ReplaceMacros(self, Str, File, Line):
+ MacroEnd = 0
+ while Str.find('$(', MacroEnd) >= 0:
+ MacroStart = Str.find('$(', MacroEnd)
+ if Str.find(')', MacroStart) > 0:
+ MacroEnd = Str.find(')', MacroStart)
+ Name = Str[MacroStart + 2 : MacroEnd]
+ Value = None
+ if Name in InputMacroDict:
+ Value = InputMacroDict[Name]
+
+ else:
+ for Profile in AllMacroList:
+ if Profile.FileName == File and Profile.MacroName == Name and Profile.DefinedAtLine <= Line:
+ Value = Profile.MacroValue
+
+ if Value != None:
+ Str = Str.replace('$(' + Name + ')', Value)
+ MacroEnd = MacroStart + len(Value)
+
+ else:
+ raise Warning("Macro not complete At Line ", self.FileName, self.CurrentLineNumber)
+ return Str
+
+ def __ReplaceFragment(self, StartPos, EndPos, Value = ' '):
+ if StartPos[0] == EndPos[0]:
+ Offset = StartPos[1]
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+ return
+
+ Offset = StartPos[1]
+ while self.Profile.FileLinesList[StartPos[0]][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[StartPos[0]][Offset] = Value
+ Offset += 1
+
+ Line = StartPos[0]
+ while Line < EndPos[0]:
+ Offset = 0
+ while self.Profile.FileLinesList[Line][Offset] not in ('\r', '\n'):
+ self.Profile.FileLinesList[Line][Offset] = Value
+ Offset += 1
+ Line += 1
+
+ Offset = 0
+ while Offset <= EndPos[1]:
+ self.Profile.FileLinesList[EndPos[0]][Offset] = Value
+ Offset += 1
+
+
+ ## PreprocessFile() method
+ #
+ # Preprocess file contents, replace comments with spaces.
+ # In the end, rewind the file buffer pointer to the beginning
+ # BUGBUG: No !include statement processing contained in this procedure
+ # !include statement should be expanded at the same FileLinesList[CurrentLineNumber - 1]
+ #
+ # @param self The object pointer
+ #
+ def PreprocessFile(self):
+
+ self.Rewind()
+ InComment = False
+ DoubleSlashComment = False
+ HashComment = False
+ # HashComment in quoted string " " is ignored.
+ InString = False
+
+ while not self.__EndOfFile():
+
+ if self.__CurrentChar() == T_CHAR_DOUBLE_QUOTE and not InComment:
+ InString = not InString
+ # meet new line, then no longer in a comment for // and '#'
+ if self.__CurrentChar() == T_CHAR_LF:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+ if InComment and DoubleSlashComment:
+ InComment = False
+ DoubleSlashComment = False
+ if InComment and HashComment:
+ InComment = False
+ HashComment = False
+ # check for */ comment end
+ elif InComment and not DoubleSlashComment and not HashComment and self.__CurrentChar() == T_CHAR_STAR and self.__NextChar() == T_CHAR_SLASH:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = False
+ # set comments to spaces
+ elif InComment:
+ self.__SetCurrentCharValue(T_CHAR_SPACE)
+ self.__GetOneChar()
+ # check for // comment
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_SLASH and not self.__EndOfLine():
+ InComment = True
+ DoubleSlashComment = True
+ # check for '#' comment
+ elif self.__CurrentChar() == T_CHAR_HASH and not self.__EndOfLine() and not InString:
+ InComment = True
+ HashComment = True
+ # check for /* comment start
+ elif self.__CurrentChar() == T_CHAR_SLASH and self.__NextChar() == T_CHAR_STAR:
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ self.__SetCurrentCharValue( T_CHAR_SPACE)
+ self.__GetOneChar()
+ InComment = True
+ else:
+ self.__GetOneChar()
+
+ # restore from ListOfList to ListOfString
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessIncludeFile(self):
+
+ while self.__GetNextToken():
+
+ if self.__Token == '!include':
+ IncludeLine = self.CurrentLineNumber
+ IncludeOffset = self.CurrentOffsetWithinLine - len('!include')
+ if not self.__GetNextToken():
+ raise Warning("expected include file name At Line ", self.FileName, self.CurrentLineNumber)
+ IncFileName = self.__Token
+ if not os.path.isabs(IncFileName):
+ if IncFileName.startswith('$(WORKSPACE)'):
+ Str = IncFileName.replace('$(WORKSPACE)', os.environ.get('WORKSPACE'))
+ if os.path.exists(Str):
+ if not os.path.isabs(Str):
+ Str = os.path.abspath(Str)
+ IncFileName = Str
+ else:
+ # file is in the same dir with FDF file
+ FullFdf = self.FileName
+ if not os.path.isabs(self.FileName):
+ FullFdf = os.path.join(os.environ.get('WORKSPACE'), self.FileName)
+
+ IncFileName = os.path.join(os.path.dirname(FullFdf), IncFileName)
+
+ if not os.path.exists(os.path.normpath(IncFileName)):
+ raise Warning("Include file not exists At Line ", self.FileName, self.CurrentLineNumber)
+
+ IncFileProfile = IncludeFileProfile(os.path.normpath(IncFileName))
+
+ CurrentLine = self.CurrentLineNumber
+ CurrentOffset = self.CurrentOffsetWithinLine
+ # list index of the insertion, note that line number is 'CurrentLine + 1'
+ InsertAtLine = CurrentLine
+ IncFileProfile.InsertStartLineNumber = InsertAtLine + 1
+ # deal with remaining portions after "!include filename", if exists.
+ if self.__GetNextToken():
+ if self.CurrentLineNumber == CurrentLine:
+ RemainingLine = self.__CurrentLine()[CurrentOffset:]
+ self.Profile.FileLinesList.insert(self.CurrentLineNumber, RemainingLine)
+ IncFileProfile.InsertAdjust += 1
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ for Line in IncFileProfile.FileLinesList:
+ self.Profile.FileLinesList.insert(InsertAtLine, Line)
+ self.CurrentLineNumber += 1
+ InsertAtLine += 1
+
+ IncludeFileList.append(IncFileProfile)
+
+ # comment out the processed include file statement
+ TempList = list(self.Profile.FileLinesList[IncludeLine - 1])
+ TempList.insert(IncludeOffset, '#')
+ self.Profile.FileLinesList[IncludeLine - 1] = ''.join(TempList)
+
+ self.Rewind()
+
+ ## PreprocessIncludeFile() method
+ #
+ # Preprocess file contents, replace !include statements with file contents.
+ # In the end, rewind the file buffer pointer to the beginning
+ #
+ # @param self The object pointer
+ #
+ def PreprocessConditionalStatement(self):
+ # IfList is a stack of if branches with elements of list [Pos, CondSatisfied, BranchDetermined]
+ IfList = []
+ while self.__GetNextToken():
+ if self.__Token == 'DEFINE':
+ DefineLine = self.CurrentLineNumber - 1
+ DefineOffset = self.CurrentOffsetWithinLine - len('DEFINE')
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ pass
+ Value = self.__Token
+ if not Macro in InputMacroDict:
+ FileLineTuple = GetRealFileLine(self.FileName, DefineLine + 1)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+ self.__WipeOffArea.append(((DefineLine, DefineOffset), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!ifdef', '!ifndef', '!if'):
+ IfStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ IfList.append([IfStartPos, None, None])
+ CondLabel = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ NotDefineFlag = False
+ if CondLabel == '!ifndef':
+ NotDefineFlag = True
+ if CondLabel == '!ifdef' or CondLabel == '!ifndef':
+ if NotFlag:
+ raise Warning("'NOT' operation not allowed for Macro name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if CondLabel == '!if':
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ else:
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1)
+ if NotDefineFlag:
+ ConditionSatisfied = not ConditionSatisfied
+ BranchDetermined = ConditionSatisfied
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, BranchDetermined]
+ if ConditionSatisfied:
+ self.__WipeOffArea.append((IfStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ elif self.__Token in ('!elseif', '!else'):
+ ElseStartPos = (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len(self.__Token))
+ if len(IfList) <= 0:
+ raise Warning("Missing !if statement At Line ", self.FileName, self.CurrentLineNumber)
+ if IfList[-1][1]:
+ IfList[-1] = [ElseStartPos, False, True]
+ self.__WipeOffArea.append((ElseStartPos, (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], ElseStartPos))
+ IfList[-1] = [ElseStartPos, True, IfList[-1][2]]
+ if self.__Token == '!elseif':
+ if not self.__GetNextToken():
+ raise Warning("expected Macro name At Line ", self.FileName, self.CurrentLineNumber)
+ MacroName = self.__Token
+ NotFlag = False
+ if MacroName.startswith('!'):
+ NotFlag = True
+ MacroName = MacroName[1:]
+
+ if not self.__GetNextOp():
+ raise Warning("expected !endif At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token in ('!=', '==', '>', '<', '>=', '<='):
+ Op = self.__Token
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__GetStringData():
+ pass
+ MacroValue = self.__Token
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, Op, MacroValue)
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ else:
+ self.CurrentOffsetWithinLine -= len(self.__Token)
+ ConditionSatisfied = self.__EvaluateConditional(MacroName, IfList[-1][0][0] + 1, None, 'Bool')
+ if NotFlag:
+ ConditionSatisfied = not ConditionSatisfied
+
+ IfList[-1] = [IfList[-1][0], ConditionSatisfied, IfList[-1][2]]
+
+ if IfList[-1][1]:
+ if IfList[-1][2]:
+ IfList[-1][1] = False
+ else:
+ IfList[-1][2] = True
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+
+ elif self.__Token == '!endif':
+ if IfList[-1][1]:
+ self.__WipeOffArea.append(((self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - len('!endif')), (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+ else:
+ self.__WipeOffArea.append((IfList[-1][0], (self.CurrentLineNumber - 1, self.CurrentOffsetWithinLine - 1)))
+
+ IfList.pop()
+
+
+ if len(IfList) > 0:
+ raise Warning("Missing !endif At Line ", self.FileName, self.CurrentLineNumber)
+ self.Rewind()
+
+ def __EvaluateConditional(self, Name, Line, Op = None, Value = None):
+
+ FileLineTuple = GetRealFileLine(self.FileName, Line)
+ if Name in InputMacroDict:
+ MacroValue = InputMacroDict[Name]
+ if Op == None:
+ if Value == 'Bool' and MacroValue == None or MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(MacroValue) or (MacroValue != None and MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number At Line ", self.FileName, Line)
+
+ for Profile in AllMacroList:
+ if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
+ if Op == None:
+ if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
+ return False
+ return True
+ elif Op == '!=':
+ if Value != Profile.MacroValue:
+ return True
+ else:
+ return False
+ elif Op == '==':
+ if Value == Profile.MacroValue:
+ return True
+ else:
+ return False
+ else:
+ if (self.__IsHex(Value) or Value.isdigit()) and (self.__IsHex(Profile.MacroValue) or (Profile.MacroValue != None and Profile.MacroValue.isdigit())):
+ InputVal = long(Value, 0)
+ MacroVal = long(Profile.MacroValue, 0)
+ if Op == '>':
+ if MacroVal > InputVal:
+ return True
+ else:
+ return False
+ elif Op == '>=':
+ if MacroVal >= InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<':
+ if MacroVal < InputVal:
+ return True
+ else:
+ return False
+ elif Op == '<=':
+ if MacroVal <= InputVal:
+ return True
+ else:
+ return False
+ else:
+ return False
+ else:
+ raise Warning("Value %s is not a number At Line ", self.FileName, Line)
+
+ return False
+
+ ## __IsToken() method
+ #
+ # Check whether input string is found from current char position along
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsToken(self, String, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __IsKeyword() method
+ #
+ # Check whether input keyword is found from current char position along, whole word only!
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @param Keyword The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find string, file buffer pointer moved forward
+ # @retval False Not able to find string, file buffer pointer not changed
+ #
+ def __IsKeyword(self, KeyWord, IgnoreCase = False):
+ self.__SkipWhiteSpace()
+
+ # Only consider the same line, no multi-line token allowed
+ StartPos = self.CurrentOffsetWithinLine
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(KeyWord.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(KeyWord)
+ if index == 0:
+ followingChar = self.__CurrentLine()[self.CurrentOffsetWithinLine + len(KeyWord)]
+ if not str(followingChar).isspace() and followingChar not in SEPERATOR_TUPLE:
+ return False
+ self.CurrentOffsetWithinLine += len(KeyWord)
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ return False
+
+ ## __GetNextWord() method
+ #
+ # Get next C name from file lines
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a C name string, file buffer pointer moved forward
+ # @retval False Not able to find a C name string, file buffer pointer not changed
+ #
+ def __GetNextWord(self):
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+
+ TempChar = self.__CurrentChar()
+ StartPos = self.CurrentOffsetWithinLine
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') or TempChar == '_':
+ self.__GetOneChar()
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ if (TempChar >= 'a' and TempChar <= 'z') or (TempChar >= 'A' and TempChar <= 'Z') \
+ or (TempChar >= '0' and TempChar <= '9') or TempChar == '_' or TempChar == '-':
+ self.__GetOneChar()
+
+ else:
+ break
+
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+
+ return False
+
+ ## __GetNextToken() method
+ #
+ # Get next token unit before a seperator
+ # If found, the string value is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a token unit, file buffer pointer moved forward
+ # @retval False Not able to find a token unit, file buffer pointer not changed
+ #
+ def __GetNextToken(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ StartLine = self.CurrentLineNumber
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and TempChar not in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ self.__GetOneChar()
+ break
+ else:
+ break
+# else:
+# return False
+
+ EndPos = self.CurrentOffsetWithinLine
+ if self.CurrentLineNumber != StartLine:
+ EndPos = len(self.Profile.FileLinesList[StartLine-1])
+ self.__Token = self.Profile.FileLinesList[StartLine-1][StartPos : EndPos]
+ if StartPos != self.CurrentOffsetWithinLine:
+ return True
+ else:
+ return False
+
+ def __GetNextOp(self):
+ # Skip leading spaces, if exist.
+ self.__SkipWhiteSpace()
+ if self.__EndOfFile():
+ return False
+ # Record the token start position, the position of the first non-space char.
+ StartPos = self.CurrentOffsetWithinLine
+ while not self.__EndOfLine():
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space
+ if not str(TempChar).isspace():
+ self.__GetOneChar()
+ else:
+ break
+ else:
+ return False
+
+ if StartPos != self.CurrentOffsetWithinLine:
+ self.__Token = self.__CurrentLine()[StartPos : self.CurrentOffsetWithinLine]
+ return True
+ else:
+ return False
+ ## __GetNextGuid() method
+ #
+ # Get next token unit before a seperator
+ # If found, the GUID string is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a registry format GUID, file buffer pointer moved forward
+ # @retval False Not able to find a registry format GUID, file buffer pointer not changed
+ #
+ def __GetNextGuid(self):
+
+ if not self.__GetNextToken():
+ return False
+ p = re.compile('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}')
+ if p.match(self.__Token) != None:
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __UndoToken() method
+ #
+ # Go back one token unit in file buffer
+ #
+ # @param self The object pointer
+ #
+ def __UndoToken(self):
+ self.__UndoOneChar()
+ while self.__CurrentChar().isspace():
+ if not self.__UndoOneChar():
+ self.__GetOneChar()
+ return
+
+
+ StartPos = self.CurrentOffsetWithinLine
+ CurrentLine = self.CurrentLineNumber
+ while CurrentLine == self.CurrentLineNumber:
+
+ TempChar = self.__CurrentChar()
+ # Try to find the end char that is not a space and not in seperator tuple.
+ # That is, when we got a space or any char in the tuple, we got the end of token.
+ if not str(TempChar).isspace() and not TempChar in SEPERATOR_TUPLE:
+ if not self.__UndoOneChar():
+ break
+ # if we happen to meet a seperator as the first char, we must proceed to get it.
+ # That is, we get a token that is a seperator char. nomally it is the boundary of other tokens.
+ elif StartPos == self.CurrentOffsetWithinLine and TempChar in SEPERATOR_TUPLE:
+ return
+ else:
+ break
+
+ self.__GetOneChar()
+
+ ## __HexDigit() method
+ #
+ # Whether char input is a Hex data bit
+ #
+ # @param self The object pointer
+ # @param TempChar The char to test
+ # @retval True The char is a Hex data bit
+ # @retval False The char is NOT a Hex data bit
+ #
+ def __HexDigit(self, TempChar):
+ if (TempChar >= 'a' and TempChar <= 'f') or (TempChar >= 'A' and TempChar <= 'F') \
+ or (TempChar >= '0' and TempChar <= '9'):
+ return True
+ else:
+ return False
+
+ def __IsHex(self, HexStr):
+ if not HexStr.upper().startswith("0X"):
+ return False
+ if len(self.__Token) <= 2:
+ return False
+ charList = [c for c in HexStr[2 : ] if not self.__HexDigit( c)]
+ if len(charList) == 0:
+ return True
+ else:
+ return False
+ ## __GetNextHexNumber() method
+ #
+ # Get next HEX data before a seperator
+ # If found, the HEX data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a HEX data, file buffer pointer moved forward
+ # @retval False Not able to find a HEX data, file buffer pointer not changed
+ #
+ def __GetNextHexNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__IsHex(self.__Token):
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextDecimalNumber() method
+ #
+ # Get next decimal data before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a decimal data, file buffer pointer moved forward
+ # @retval False Not able to find a decimal data, file buffer pointer not changed
+ #
+ def __GetNextDecimalNumber(self):
+ if not self.__GetNextToken():
+ return False
+ if self.__Token.isdigit():
+ return True
+ else:
+ self.__UndoToken()
+ return False
+
+ ## __GetNextPcdName() method
+ #
+ # Get next PCD token space C name and PCD C name pair before a seperator
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval Tuple PCD C name and PCD token space C name pair
+ #
+ def __GetNextPcdName(self):
+ if not self.__GetNextWord():
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+ pcdTokenSpaceCName = self.__Token
+
+ if not self.__IsToken( "."):
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected PcdTokenSpaceCName.PcdCName At Line ", self.FileName, self.CurrentLineNumber)
+ pcdCName = self.__Token
+
+ return (pcdCName, pcdTokenSpaceCName)
+
+ ## __GetStringData() method
+ #
+ # Get string contents quoted in ""
+ # If found, the decimal data is put into self.__Token
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a string data, file buffer pointer moved forward
+ # @retval False Not able to find a string data, file buffer pointer not changed
+ #
+ def __GetStringData(self):
+ if self.__Token.startswith("\"") or self.__Token.startswith("L\""):
+ self.__UndoToken()
+ self.__SkipToToken("\"")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\""):
+ raise Warning("Missing Quote \" for String At Line ", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \" for String At Line ", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\"')
+ return True
+
+ elif self.__Token.startswith("\'") or self.__Token.startswith("L\'"):
+ self.__UndoToken()
+ self.__SkipToToken("\'")
+ currentLineNumber = self.CurrentLineNumber
+
+ if not self.__SkipToToken("\'"):
+ raise Warning("Missing Quote \' for String At Line ", self.FileName, self.CurrentLineNumber)
+ if currentLineNumber != self.CurrentLineNumber:
+ raise Warning("Missing Quote \' for String At Line ", self.FileName, self.CurrentLineNumber)
+ self.__Token = self.__SkippedChars.rstrip('\'')
+ return True
+
+ else:
+ return False
+
+ ## __SkipToToken() method
+ #
+ # Search forward in file buffer for the string
+ # The skipped chars are put into self.__SkippedChars
+ #
+ # @param self The object pointer
+ # @param String The string to search
+ # @param IgnoreCase Indicate case sensitive/non-sensitive search, default is case sensitive
+ # @retval True Successfully find the string, file buffer pointer moved forward
+ # @retval False Not able to find the string, file buffer pointer not changed
+ #
+ def __SkipToToken(self, String, IgnoreCase = False):
+ StartPos = self.GetFileBufferPos()
+
+ self.__SkippedChars = ""
+ while not self.__EndOfFile():
+ index = -1
+ if IgnoreCase:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].upper().find(String.upper())
+ else:
+ index = self.__CurrentLine()[self.CurrentOffsetWithinLine : ].find(String)
+ if index == 0:
+ self.CurrentOffsetWithinLine += len(String)
+ self.__SkippedChars += String
+ return True
+ self.__SkippedChars += str(self.__CurrentChar())
+ self.__GetOneChar()
+
+ self.SetFileBufferPos( StartPos)
+ self.__SkippedChars = ""
+ return False
+
+ ## GetFileBufferPos() method
+ #
+ # Return the tuple of current line and offset within the line
+ #
+ # @param self The object pointer
+ # @retval Tuple Line number and offset pair
+ #
+ def GetFileBufferPos(self):
+ return (self.CurrentLineNumber, self.CurrentOffsetWithinLine)
+
+ ## SetFileBufferPos() method
+ #
+ # Restore the file buffer position
+ #
+ # @param self The object pointer
+ # @param Pos The new file buffer position
+ #
+ def SetFileBufferPos(self, Pos):
+ (self.CurrentLineNumber, self.CurrentOffsetWithinLine) = Pos
+
+ ## ParseFile() method
+ #
+ # Parse the file profile buffer to extract fd, fv ... information
+ # Exception will be raised if syntax error found
+ #
+ # @param self The object pointer
+ #
+ def ParseFile(self):
+
+ try:
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessIncludeFile()
+ self.__StringToList()
+ self.PreprocessFile()
+ self.PreprocessConditionalStatement()
+ self.__StringToList()
+ for Pos in self.__WipeOffArea:
+ self.__ReplaceFragment(Pos[0], Pos[1])
+ self.Profile.FileLinesList = ["".join(list) for list in self.Profile.FileLinesList]
+
+ while self.__GetDefines():
+ pass
+
+ Index = 0
+ while Index < len(self.Profile.FileLinesList):
+ FileLineTuple = GetRealFileLine(self.FileName, Index + 1)
+ self.Profile.FileLinesList[Index] = self.__ReplaceMacros(self.Profile.FileLinesList[Index], FileLineTuple[0], FileLineTuple[1])
+ Index += 1
+
+ while self.__GetFd():
+ pass
+
+ while self.__GetFv():
+ pass
+
+ while self.__GetCapsule():
+ pass
+
+# while self.__GetVtf():
+# pass
+#
+# while self.__GetRule():
+# pass
+
+
+ except Warning, X:
+ self.__UndoToken()
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ X.message += '\nGot Token: \"%s\" from File %s\n' % (self.__Token, FileLineTuple[0]) + \
+ 'Previous Token: \"%s\" At line: %d, Offset Within Line: %d\n' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :].rstrip('\n').rstrip('\r'), FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise
+
+ ## __GetDefines() method
+ #
+ # Get Defines section contents and store its data into AllMacrosList
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Defines
+ # @retval False Not able to find a Defines
+ #
+ def __GetDefines(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[DEFINES"):
+ if not S.startswith("[FD.") and not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [DEFINES], [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[DEFINES", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ #print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ # % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [DEFINES", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']'", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetNextWord():
+ Macro = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken() or self.__Token.startswith('['):
+ raise Warning("expected MACRO value", self.FileName, self.CurrentLineNumber)
+ Value = self.__Token
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ MacProfile = MacroProfile(FileLineTuple[0], FileLineTuple[1])
+ MacProfile.MacroName = Macro
+ MacProfile.MacroValue = Value
+ AllMacroList.append(MacProfile)
+
+ return False
+
+ ## __GetFd() method
+ #
+ # Get FD section contents and store its data into FD dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FD
+ # @retval False Not able to find a FD
+ #
+ def __GetFd(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FD."):
+ if not S.startswith("[FV.") and not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE."):
+ raise Warning("Unknown section At Line ", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FD.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [FD.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ FdName = self.__GetUiName()
+ self.CurrentFdName = FdName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FdObj = CommonDataClass.FdfClass.FDClassObject()
+ FdObj.FdUiName = self.CurrentFdName
+ self.Profile.FdDict[self.CurrentFdName] = FdObj
+ Status = self.__GetCreateFile(FdObj)
+ if not Status:
+ raise Warning("FD name error At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetTokenStatements(FdObj):
+ return False
+
+ self.__GetDefineStatements(FdObj)
+
+ self.__GetSetStatements(FdObj)
+
+ if not self.__GetRegionLayout(FdObj):
+ raise Warning("expected region layout At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetRegionLayout(FdObj):
+ pass
+ return True
+
+ ## __GetUiName() method
+ #
+ # Return the UI name of a section
+ #
+ # @param self The object pointer
+ # @retval FdName UI name
+ #
+ def __GetUiName(self):
+ FdName = ""
+ if self.__GetNextWord():
+ FdName = self.__Token
+
+ return FdName
+
+ ## __GetCreateFile() method
+ #
+ # Return the output file name of object
+ #
+ # @param self The object pointer
+ # @param Obj object whose data will be stored in file
+ # @retval FdName UI name
+ #
+ def __GetCreateFile(self, Obj):
+
+ if self.__IsKeyword( "CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ FileName = self.__Token
+ Obj.CreateFileName = FileName
+
+ return True
+
+ ## __GetTokenStatements() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statement is got
+ # @retval True Successfully find a token statement
+ # @retval False Not able to find a token statement
+ #
+ def __GetTokenStatements(self, Obj):
+ if not self.__IsKeyword( "BaseAddress"):
+ raise Warning("BaseAddress missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex base address At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.BaseAddress = self.__Token
+
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.BaseAddressPcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = long(Obj.BaseAddress, 0)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ if not self.__IsKeyword( "Size"):
+ raise Warning("Size missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex size At Line ", self.FileName, self.CurrentLineNumber)
+
+
+ Obj.Size = long(self.__Token, 0)
+
+ if self.__IsToken( "|"):
+ pcdPair = self.__GetNextPcdName()
+ Obj.SizePcd = pcdPair
+ self.Profile.PcdDict[pcdPair] = Obj.Size
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ if not self.__IsKeyword( "ErasePolarity"):
+ raise Warning("ErasePolarity missing At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Erase Polarity At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token != "1" and self.__Token != "0":
+ raise Warning("expected 1 or 0 Erase Polarity At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.ErasePolarity = self.__Token
+
+ Status = self.__GetBlockStatements(Obj)
+ return Status
+
+ ## __GetAddressStatements() method
+ #
+ # Get address statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom address statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetAddressStatements(self, Obj):
+
+ if self.__IsKeyword("BsBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address At Line ", self.FileName, self.CurrentLineNumber)
+
+ BsAddress = long(self.__Token, 0)
+ Obj.BsBaseAddress = BsAddress
+
+ if self.__IsKeyword("RtBaseAddress"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected address At Line ", self.FileName, self.CurrentLineNumber)
+
+ RtAddress = long(self.__Token, 0)
+ Obj.RtBaseAddress = RtAddress
+
+ ## __GetBlockStatements() method
+ #
+ # Get block statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatements(self, Obj):
+
+ if not self.__GetBlockStatement(Obj):
+ raise Warning("expected block statement At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__GetBlockStatement(Obj):
+ pass
+ return True
+
+ ## __GetBlockStatement() method
+ #
+ # Get block statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom block statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetBlockStatement(self, Obj):
+ if not self.__IsKeyword( "BlockSize"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber() and not self.__GetNextDecimalNumber():
+ raise Warning("expected Hex block size At Line ", self.FileName, self.CurrentLineNumber)
+
+ BlockSize = long(self.__Token, 0)
+ BlockSizePcd = None
+ if self.__IsToken( "|"):
+ PcdPair = self.__GetNextPcdName()
+ BlockSizePcd = PcdPair
+ self.Profile.PcdDict[PcdPair] = BlockSize
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[pcdPair] = FileLineTuple
+
+ BlockNumber = None
+ if self.__IsKeyword( "NumBlocks"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextDecimalNumber() and not self.__GetNextHexNumber():
+ raise Warning("expected block numbers At Line ", self.FileName, self.CurrentLineNumber)
+
+ BlockNumber = long(self.__Token, 0)
+
+ Obj.BlockSizeList.append((BlockSize, BlockNumber, BlockSizePcd))
+ return True
+
+ ## __GetDefineStatements() method
+ #
+ # Get define statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatements(self, Obj):
+ while self.__GetDefineStatement( Obj):
+ pass
+
+ ## __GetDefineStatement() method
+ #
+ # Get define statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom define statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetDefineStatement(self, Obj):
+ if self.__IsKeyword("DEFINE"):
+ self.__GetNextToken()
+ Macro = self.__Token
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ Macro = '$(' + Macro + ')'
+ Obj.DefineVarDict[Macro] = Value
+ return True
+
+ return False
+
+ ## __GetSetStatements() method
+ #
+ # Get set statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatements(self, Obj):
+ while self.__GetSetStatement(Obj):
+ pass
+
+ ## __GetSetStatement() method
+ #
+ # Get set statement
+ #
+ # @param self The object pointer
+ # @param Obj for whom set statement is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetSetStatement(self, Obj):
+ if self.__IsKeyword("SET"):
+ PcdPair = self.__GetNextPcdName()
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected value At Line ", self.FileName, self.CurrentLineNumber)
+
+ Value = self.__Token
+ if Value.startswith("{"):
+ # deal with value with {}
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Value += self.__SkippedChars
+
+ Obj.SetVarDict[PcdPair] = Value
+ self.Profile.PcdDict[PcdPair] = Value
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[PcdPair] = FileLineTuple
+ return True
+
+ return False
+
+ ## __GetRegionLayout() method
+ #
+ # Get region layout for FD
+ #
+ # @param self The object pointer
+ # @param Fd for whom region is got
+ # @retval True Successfully find
+ # @retval False Not able to find
+ #
+ def __GetRegionLayout(self, Fd):
+ if not self.__GetNextHexNumber():
+ return False
+
+ RegionObj = CommonDataClass.FdfClass.RegionClassObject()
+ RegionObj.Offset = long(self.__Token, 0)
+ Fd.RegionList.append(RegionObj)
+
+ if not self.__IsToken( "|"):
+ raise Warning("expected '|' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Region Size At Line ", self.FileName, self.CurrentLineNumber)
+ RegionObj.Size = long(self.__Token, 0)
+
+ if not self.__GetNextWord():
+ return True
+
+ if not self.__Token in ("SET", "FV", "FILE", "DATA"):
+ self.__UndoToken()
+ RegionObj.PcdOffset = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdOffset] = RegionObj.Offset + long(Fd.BaseAddress, 0)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdOffset] = FileLineTuple
+ if self.__IsToken( "|"):
+ RegionObj.PcdSize = self.__GetNextPcdName()
+ self.Profile.PcdDict[RegionObj.PcdSize] = RegionObj.Size
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.PcdFileLineDict[RegionObj.PcdSize] = FileLineTuple
+
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "SET":
+ self.__UndoToken()
+ self.__GetSetStatements( RegionObj)
+ if not self.__GetNextWord():
+ return True
+
+ if self.__Token == "FV":
+ self.__UndoToken()
+ self.__GetRegionFvType( RegionObj)
+
+ elif self.__Token == "FILE":
+ self.__UndoToken()
+ self.__GetRegionFileType( RegionObj)
+
+ else:
+ self.__UndoToken()
+ self.__GetRegionDataType( RegionObj)
+
+ return True
+
+ ## __GetRegionFvType() method
+ #
+ # Get region fv data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFvType(self, RegionObj):
+
+ if not self.__IsKeyword( "FV"):
+ raise Warning("expected Keyword 'FV' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FV"
+ RegionObj.RegionDataList.append(self.__Token)
+
+ while self.__IsKeyword( "FV"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionFileType() method
+ #
+ # Get region file data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionFileType(self, RegionObj):
+
+ if not self.__IsKeyword( "FILE"):
+ raise Warning("expected Keyword 'FILE' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionType = "FILE"
+ RegionObj.RegionDataList.append( self.__Token)
+
+ while self.__IsKeyword( "FILE"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FILE name At Line ", self.FileName, self.CurrentLineNumber)
+
+ RegionObj.RegionDataList.append(self.__Token)
+
+ ## __GetRegionDataType() method
+ #
+ # Get region array data for region
+ #
+ # @param self The object pointer
+ # @param RegionObj for whom region data is got
+ #
+ def __GetRegionDataType(self, RegionObj):
+
+ if not self.__IsKeyword( "DATA"):
+ raise Warning("expected Region Data type At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ if not self.__GetNextHexNumber():
+ raise Warning("Invalid Hex number At Line ", self.FileName, self.CurrentLineNumber)
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionType = "DATA"
+ RegionObj.RegionDataList.append( DataString)
+
+ while self.__IsKeyword( "DATA"):
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextHexNumber():
+ raise Warning("expected Hex byte At Line ", self.FileName, self.CurrentLineNumber)
+
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = self.__Token
+ DataString += ","
+
+ while self.__IsToken(","):
+ self.__GetNextHexNumber()
+ if len(self.__Token) > 4:
+ raise Warning("Hex byte(must be 2 digits) too long At Line ", self.FileName, self.CurrentLineNumber)
+ DataString += self.__Token
+ DataString += ","
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DataString = DataString.rstrip(",")
+ RegionObj.RegionDataList.append( DataString)
+
+ ## __GetFv() method
+ #
+ # Get FV section contents and store its data into FV dictionary of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a FV
+ # @retval False Not able to find a FV
+ #
+ def __GetFv(self):
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[FV."):
+ if not S.startswith("[CAPSULE.") \
+ and not S.startswith("[VTF.") and not S.startswith("[RULE."):
+ raise Warning("Unknown section or section appear sequence error \n(The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.]) At Line ", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[FV.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("Unknown Keyword At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__GetUiName()
+ self.CurrentFvName = FvName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj = CommonDataClass.FdfClass.FvClassObject()
+ FvObj.UiFvName = self.CurrentFvName
+ self.Profile.FvDict[self.CurrentFvName] = FvObj
+
+ Status = self.__GetCreateFile(FvObj)
+ if not Status:
+ raise Warning("FV name error At Line ", self.FileName, self.CurrentLineNumber)
+
+ self.__GetDefineStatements(FvObj)
+
+ self.__GetAddressStatements(FvObj)
+
+ self.__GetBlockStatement(FvObj)
+
+ self.__GetSetStatements(FvObj)
+
+ self.__GetFvAlignment(FvObj)
+
+ self.__GetFvAttributes(FvObj)
+
+ self.__GetFvNameGuid(FvObj)
+
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+ self.__GetAprioriSection(FvObj, FvObj.DefineVarDict.copy())
+
+ while True:
+ isInf = self.__GetInfStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ isFile = self.__GetFileStatement(FvObj, MacroDict = FvObj.DefineVarDict.copy())
+ if not isInf and not isFile:
+ break
+
+ return True
+
+ ## __GetFvAlignment() method
+ #
+ # Get alignment for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom alignment is got
+ # @retval True Successfully find a alignment statement
+ # @retval False Not able to find a alignment statement
+ #
+ def __GetFvAlignment(self, Obj):
+
+ if not self.__IsKeyword( "FvAlignment"):
+ return False
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token.upper() not in ("1", "2", "4", "8", "16", "32", "64", "128", "256", "512", \
+ "1K", "2K", "4K", "8K", "16K", "32K", "64K", "128K", "256K", "512K", \
+ "1M", "2M", "4M", "8M", "16M", "32M", "64M", "128M", "256M", "512M", \
+ "1G", "2G"):
+ raise Warning("Unknown alignment value At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.FvAlignment = self.__Token
+ return True
+
+ ## __GetFvAttributes() method
+ #
+ # Get attributes for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom attribute is got
+ # @retval None
+ #
+ def __GetFvAttributes(self, FvObj):
+
+ while self.__GetNextWord():
+ name = self.__Token
+ if name not in ("ERASE_POLARITY", "MEMORY_MAPPED", \
+ "STICKY_WRITE", "LOCK_CAP", "LOCK_STATUS", "WRITE_ENABLED_CAP", \
+ "WRITE_DISABLED_CAP", "WRITE_STATUS", "READ_ENABLED_CAP", \
+ "READ_DISABLED_CAP", "READ_STATUS", "READ_LOCK_CAP", \
+ "READ_LOCK_STATUS", "WRITE_LOCK_CAP", "WRITE_LOCK_STATUS", \
+ "WRITE_POLICY_RELIABLE"):
+ self.__UndoToken()
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvAttributeDict[name] = self.__Token
+
+ return
+
+ ## __GetFvNameGuid() method
+ #
+ # Get FV GUID for FV
+ #
+ # @param self The object pointer
+ # @param Obj for whom GUID is got
+ # @retval None
+ #
+ def __GetFvNameGuid(self, FvObj):
+
+ if not self.__IsKeyword( "FvNameGuid"):
+ return
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ raise Warning("expected FV GUID value", self.FileName, self.CurrentLineNumber)
+
+ FvObj.FvNameGuid = self.__Token
+
+ return
+
+ ## __GetAprioriSection() method
+ #
+ # Get token statements
+ #
+ # @param self The object pointer
+ # @param FvObj for whom apriori is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find apriori statement
+ # @retval False Not able to find apriori statement
+ #
+ def __GetAprioriSection(self, FvObj, MacroDict = {}):
+
+ if not self.__IsKeyword( "APRIORI"):
+ return False
+
+ if not self.__IsKeyword("PEI") and not self.__IsKeyword("DXE"):
+ raise Warning("expected Apriori file type At Line ", self.FileName, self.CurrentLineNumber)
+ AprType = self.__Token
+
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ AprSectionObj = CommonDataClass.FdfClass.AprioriSectionClassObject()
+ AprSectionObj.AprioriType = AprType
+
+ self.__GetDefineStatements(AprSectionObj)
+ MacroDict.update(AprSectionObj.DefineVarDict)
+
+ while True:
+ IsInf = self.__GetInfStatement( AprSectionObj, MacroDict = MacroDict)
+ IsFile = self.__GetFileStatement( AprSectionObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvObj.AprioriSectionList.append(AprSectionObj)
+ return True
+
+ ## __GetInfStatement() method
+ #
+ # Get INF statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom inf statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find inf statement
+ # @retval False Not able to find inf statement
+ #
+ def __GetInfStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "INF"):
+ return False
+
+ ffsInf = CommonDataClass.FdfClass.FfsInfStatementClassObject()
+ self.__GetInfOptions( ffsInf)
+
+ if not self.__GetNextToken():
+ raise Warning("expected INF file path At Line ", self.FileName, self.CurrentLineNumber)
+ ffsInf.InfFileName = self.__Token
+
+# if ffsInf.InfFileName.find('$') >= 0:
+# ffsInf.InfFileName = GenFdsGlobalVariable.GenFdsGlobalVariable.MacroExtend(ffsInf.InfFileName, MacroDict)
+
+ if not ffsInf.InfFileName in self.Profile.InfList:
+ self.Profile.InfList.append(ffsInf.InfFileName)
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ self.Profile.InfFileLineList.append(FileLineTuple)
+
+ if self.__IsToken('|'):
+ if self.__IsKeyword('RELOCS_STRIPPED'):
+ ffsInf.KeepReloc = False
+ elif self.__IsKeyword('RELOCS_RETAINED'):
+ ffsInf.KeepReloc = True
+ else:
+ raise Warning("Unknown reloc strip flag At Line ", self.FileName, self.CurrentLineNumber)
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = ffsInf
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(ffsInf)
+ return True
+
+ ## __GetInfOptions() method
+ #
+ # Get options for INF
+ #
+ # @param self The object pointer
+ # @param FfsInfObj for whom option is got
+ #
+ def __GetInfOptions(self, FfsInfObj):
+
+ if self.__IsKeyword( "RuleOverride"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Rule name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.Rule = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Version At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Version = self.__Token
+
+ if self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ FfsInfObj.Ui = self.__Token
+
+ if self.__IsKeyword( "USE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '='", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected ARCH name", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.UseArch = self.__Token
+
+
+ if self.__GetNextToken():
+ p = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if p.match(self.__Token):
+ FfsInfObj.KeyStringList.append(self.__Token)
+ if not self.__IsToken(","):
+ return
+ else:
+ self.__UndoToken()
+ return
+
+ while self.__GetNextToken():
+ if not p.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ FfsInfObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ ## __GetFileStatement() method
+ #
+ # Get FILE statements
+ #
+ # @param self The object pointer
+ # @param Obj for whom FILE statement is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find FILE statement
+ # @retval False Not able to find FILE statement
+ #
+ def __GetFileStatement(self, Obj, ForCapsule = False, MacroDict = {}):
+
+ if not self.__IsKeyword( "FILE"):
+ return False
+
+ FfsFileObj = CommonDataClass.FdfClass.FileStatementClassObject()
+
+ if not self.__GetNextWord():
+ raise Warning("expected FFS type At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvFileType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextGuid():
+ if not self.__GetNextWord():
+ raise Warning("expected File GUID", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ FfsFileObj.NameGuid = self.__Token
+
+ self.__GetFilePart( FfsFileObj, MacroDict.copy())
+
+ if ForCapsule:
+ capsuleFfs = CapsuleData.CapsuleFfs()
+ capsuleFfs.Ffs = FfsFileObj
+ Obj.CapsuleDataList.append(capsuleFfs)
+ else:
+ Obj.FfsList.append(FfsFileObj)
+
+ return True
+
+ ## __FileCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a file type.
+ #
+ # @param self The object pointer
+ # @param FileType The file type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __FileCouldHaveRelocFlag (self, FileType):
+ if FileType in ('SEC', 'PEI_CORE', 'PEIM', 'PEI_DXE_COMBO'):
+ return True
+ else:
+ return False
+
+ ## __SectionCouldHaveRelocFlag() method
+ #
+ # Check whether reloc strip flag can be set for a section type.
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check with
+ # @retval True This type could have relocation strip flag
+ # @retval False No way to have it
+ #
+
+ def __SectionCouldHaveRelocFlag (self, SectionType):
+ if SectionType in ('TE', 'PE32'):
+ return True
+ else:
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get components for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom component is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetFilePart(self, FfsFileObj, MacroDict = {}):
+
+ self.__GetFileOpts( FfsFileObj)
+
+ if not self.__IsToken("{"):
+# if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+# if self.__FileCouldHaveRelocFlag(FfsFileObj.FvFileType):
+# if self.__Token == 'RELOCS_STRIPPED':
+# FfsFileObj.KeepReloc = False
+# else:
+# FfsFileObj.KeepReloc = True
+# else:
+# raise Warning("File type %s could not have reloc strip flag At Line %d" % (FfsFileObj.FvFileType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+#
+# if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name or section data At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "FV":
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FvName = self.__Token
+
+ elif self.__Token == "FD":
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected FD name At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.FdName = self.__Token
+
+ elif self.__Token in ("DEFINE", "APRIORI", "SECTION"):
+ self.__UndoToken()
+ self.__GetSectionData( FfsFileObj, MacroDict)
+ else:
+ FfsFileObj.FileName = self.__Token
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetFileOpts() method
+ #
+ # Get options for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom options is got
+ #
+ def __GetFileOpts(self, FfsFileObj):
+
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ FfsFileObj.KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ FfsFileObj.KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+ if self.__IsKeyword( "FIXED", True):
+ FfsFileObj.Fixed = True
+
+ if self.__IsKeyword( "CHECKSUM", True):
+ FfsFileObj.CheckSum = True
+
+ if self.__GetAlignment():
+ FfsFileObj.Alignment = self.__Token
+
+
+
+ ## __GetAlignment() method
+ #
+ # Return the alignment value
+ #
+ # @param self The object pointer
+ # @retval True Successfully find alignment
+ # @retval False Not able to find alignment
+ #
+ def __GetAlignment(self):
+ if self.__IsKeyword( "Align", True):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected alignment value At Line ", self.FileName, self.CurrentLineNumber)
+ return True
+
+ return False
+
+ ## __GetFilePart() method
+ #
+ # Get section data for FILE statement
+ #
+ # @param self The object pointer
+ # @param FfsFileObj for whom section is got
+ # @param MacroDict dictionary used to replace macro
+ #
+ def __GetSectionData(self, FfsFileObj, MacroDict = {}):
+ Dict = {}
+ Dict.update(MacroDict)
+
+ self.__GetDefineStatements(FfsFileObj)
+
+ Dict.update(FfsFileObj.DefineVarDict)
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+ self.__GetAprioriSection(FfsFileObj, Dict.copy())
+
+ while True:
+ IsLeafSection = self.__GetLeafSection(FfsFileObj, Dict)
+ IsEncapSection = self.__GetEncapsulationSec(FfsFileObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ ## __GetLeafSection() method
+ #
+ # Get leaf section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param MacroDict dictionary used to replace macro
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetLeafSection(self, Obj, MacroDict = {}):
+
+ OldPos = self.GetFileBufferPos()
+
+ if not self.__IsKeyword( "SECTION"):
+ if len(Obj.SectionList) == 0:
+ raise Warning("expected SECTION At Line ", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ BuildNum = None
+ if self.__IsKeyword( "BUILD_NUM"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Build number value At Line ", self.FileName, self.CurrentLineNumber)
+
+ BuildNum = self.__Token
+
+ if self.__IsKeyword( "VERSION"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected version At Line ", self.FileName, self.CurrentLineNumber)
+ VerSectionObj = CommonDataClass.FdfClass.VerSectionClassObject()
+ VerSectionObj.Alignment = AlignValue
+ VerSectionObj.BuildNum = BuildNum
+ if self.__GetStringData():
+ VerSectionObj.StringData = self.__Token
+ else:
+ VerSectionObj.FileName = self.__Token
+ Obj.SectionList.append(VerSectionObj)
+
+ elif self.__IsKeyword( "UI"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected UI At Line ", self.FileName, self.CurrentLineNumber)
+ UiSectionObj = CommonDataClass.FdfClass.UiSectionClassObject()
+ UiSectionObj.Alignment = AlignValue
+ if self.__GetStringData():
+ UiSectionObj.StringData = self.__Token
+ else:
+ UiSectionObj.FileName = self.__Token
+ Obj.SectionList.append(UiSectionObj)
+
+ elif self.__IsKeyword( "FV_IMAGE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextWord():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvName = self.__Token.upper()
+ FvObj = None
+
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ FvObj.UiFvName = FvName
+ self.__GetDefineStatements(FvObj)
+ MacroDict.update(FvObj.DefineVarDict)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+ self.__GetAprioriSection(FvObj, MacroDict.copy())
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj, MacroDict.copy())
+ IsFile = self.__GetFileStatement(FvObj, MacroDict.copy())
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ FvImageSectionObj = CommonDataClass.FdfClass.FvImageSectionClassObject()
+ FvImageSectionObj.Alignment = AlignValue
+ if FvObj != None:
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+ else:
+ FvImageSectionObj.FvName = FvName
+
+ Obj.SectionList.append(FvImageSectionObj)
+
+ elif self.__IsKeyword("PEI_DEPEX_EXP") or self.__IsKeyword("DXE_DEPEX_EXP"):
+ DepexSectionObj = CommonDataClass.FdfClass.DepexSectionClassObject()
+ DepexSectionObj.Alignment = AlignValue
+ DepexSectionObj.DepexType = self.__Token
+
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__IsToken( "{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__SkipToToken( "}"):
+ raise Warning("expected Depex expression ending '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ DepexSectionObj.Expression = self.__SkippedChars.rstrip('}')
+ Obj.SectionList.append(DepexSectionObj)
+
+ else:
+
+ if not self.__GetNextWord():
+ raise Warning("expected section type At Line ", self.FileName, self.CurrentLineNumber)
+
+ # Encapsulation section appear, UndoToken and return
+ if self.__Token == "COMPRESS" or self.__Token == "GUIDED":
+ self.SetFileBufferPos(OldPos)
+ return False
+
+ if self.__Token not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown section type '%s'" % self.__Token, self.FileName, self.CurrentLineNumber)
+ # DataSection
+ DataSectionObj = CommonDataClass.FdfClass.DataSectionClassObject()
+ DataSectionObj.Alignment = AlignValue
+ DataSectionObj.SecType = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Obj.FvFileType) and self.__SectionCouldHaveRelocFlag(DataSectionObj.SecType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ DataSectionObj.KeepReloc = False
+ else:
+ DataSectionObj.KeepReloc = True
+ else:
+ raise Warning("File type %s, section type %s, could not have reloc strip flag At Line %d" % (Obj.FvFileType, DataSectionObj.SecType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("="):
+ if not self.__GetNextToken():
+ raise Warning("expected section file path At Line ", self.FileName, self.CurrentLineNumber)
+ DataSectionObj.SectFileName = self.__Token
+ else:
+ if not self.__GetCglSection(DataSectionObj):
+ return False
+
+ Obj.SectionList.append(DataSectionObj)
+
+ return True
+
+ ## __GetCglSection() method
+ #
+ # Get compressed or GUIDed section for Obj
+ #
+ # @param self The object pointer
+ # @param Obj for whom leaf section is got
+ # @param AlignValue alignment value for complex section
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetCglSection(self, Obj, AlignValue = None):
+
+ if self.__IsKeyword( "COMPRESS"):
+ type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CommonDataClass.FdfClass.CompressSectionClassObject()
+ CompressSectionObj.Alignment = AlignValue
+ CompressSectionObj.CompType = type
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(CompressSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(CompressSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(CompressSectionObj)
+
+# else:
+# raise Warning("Compress type not known At Line ")
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = CommonDataClass.FdfClass.GuidSectionClassObject()
+ GuidSectionObj.Alignment = AlignValue
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+ # Recursive sections...
+ while True:
+ IsLeafSection = self.__GetLeafSection(GuidSectionObj)
+ IsEncapSection = self.__GetEncapsulationSec(GuidSectionObj)
+ if not IsLeafSection and not IsEncapSection:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Obj.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetGuidAttri() method
+ #
+ # Get attributes for GUID section
+ #
+ # @param self The object pointer
+ # @retval AttribDict Dictionary of key-value pair of section attributes
+ #
+ def __GetGuidAttrib(self):
+
+ AttribDict = {}
+ AttribDict["PROCESSING_REQUIRED"] = False
+ AttribDict["AUTH_STATUS_VALID"] = False
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ if self.__IsKeyword("PROCESSING_REQUIRED") or self.__IsKeyword("AUTH_STATUS_VALID"):
+ AttribKey = self.__Token
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ")
+
+ if not self.__GetNextToken() or self.__Token.upper() not in ("TRUE", "FALSE", "1", "0"):
+ raise Warning("expected TRUE/FALSE (1/0) At Line ", self.FileName, self.CurrentLineNumber)
+ AttribDict[AttribKey] = self.__Token
+
+ return AttribDict
+
+ ## __GetEncapsulationSec() method
+ #
+ # Get encapsulation section for FILE
+ #
+ # @param self The object pointer
+ # @param FfsFile for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEncapsulationSec(self, FfsFileObj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__IsKeyword( "SECTION"):
+ if len(FfsFileObj.SectionList) == 0:
+ raise Warning("expected SECTION At Line ", self.FileName, self.CurrentLineNumber)
+ else:
+ return False
+
+ AlignValue = None
+ if self.__GetAlignment():
+ AlignValue = self.__Token
+
+ if not self.__GetCglSection(FfsFileObj, AlignValue):
+ self.SetFileBufferPos(OldPos)
+ return False
+ else:
+ return True
+
+ ## __GetCapsule() method
+ #
+ # Get capsule section contents and store its data into capsule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a capsule
+ # @retval False Not able to find a capsule
+ #
+ def __GetCapsule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[CAPSULE."):
+ if not S.startswith("[VTF.") and not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[CAPSULE.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Capsule.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj = CommonDataClass.FdfClass.CapsuleClassObject()
+
+ CapsuleName = self.__GetUiName()
+ if not CapsuleName:
+ raise Warning("expected capsule name At line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.UiCapsuleName = CapsuleName.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("CREATE_FILE"):
+ if not self.__IsToken( "="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ CapsuleObj.CreateFile = self.__Token
+
+ self.__GetCapsuleStatements(CapsuleObj)
+ self.Profile.CapsuleList.append(CapsuleObj)
+ return True
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom statements are got
+ #
+ def __GetCapsuleStatements(self, Obj):
+ self.__GetCapsuleTokens(Obj)
+ self.__GetDefineStatements(Obj)
+ self.__GetSetStatements(Obj)
+
+ self.__GetCapsuleData(Obj)
+
+ ## __GetCapsuleStatements() method
+ #
+ # Get token statements for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom token statements are got
+ #
+ def __GetCapsuleTokens(self, Obj):
+
+ if not self.__IsKeyword("CAPSULE_GUID"):
+ raise Warning("expected 'CAPSULE_GUID' At Line ", self.FileName, self.CurrentLineNumber)
+
+ while self.__CurrentLine().find("=") != -1:
+ NameValue = self.__CurrentLine().split("=")
+ Obj.TokensDict[NameValue[0].strip()] = NameValue[1].strip()
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ ## __GetCapsuleData() method
+ #
+ # Get capsule data for capsule
+ #
+ # @param self The object pointer
+ # @param Obj for whom capsule data are got
+ #
+ def __GetCapsuleData(self, Obj):
+
+ while True:
+ IsInf = self.__GetInfStatement(Obj, True)
+ IsFile = self.__GetFileStatement(Obj, True)
+ IsFv = self.__GetFvStatement(Obj)
+ if not IsInf and not IsFile and not IsFv:
+ break
+
+ ## __GetFvStatement() method
+ #
+ # Get FV for capsule
+ #
+ # @param self The object pointer
+ # @param CapsuleObj for whom FV is got
+ # @retval True Successfully find a FV statement
+ # @retval False Not able to find a FV statement
+ #
+ def __GetFvStatement(self, CapsuleObj):
+
+ if not self.__IsKeyword("FV"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected FV name At Line ", self.FileName, self.CurrentLineNumber)
+
+# CapsuleFv = CapsuleData.CapsuleFv()
+# CapsuleFv.FvName = self.__Token
+# CapsuleObj.CapsuleDataList.append(CapsuleFv)
+ return True
+
+ ## __GetRule() method
+ #
+ # Get Rule section contents and store its data into rule list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a Rule
+ # @retval False Not able to find a Rule
+ #
+ def __GetRule(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[RULE."):
+ if not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+ self.__UndoToken()
+ if not self.__IsToken("[Rule.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [Rule.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".")
+ if Arch.upper() not in ("IA32", "X64", "IPF", "EBC", "ARM", "COMMON"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+
+ ModuleType = self.__GetModuleType()
+
+ TemplateName = ""
+ if self.__IsToken("."):
+ if not self.__GetNextWord():
+ raise Warning("expected template name At Line ", self.FileName, self.CurrentLineNumber)
+ TemplateName = self.__Token
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ RuleObj = self.__GetRuleFileStatements()
+ RuleObj.Arch = Arch.upper()
+ RuleObj.ModuleType = ModuleType
+ RuleObj.TemplateName = TemplateName
+ if TemplateName == '' :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() ] = RuleObj
+ else :
+ self.Profile.RuleDict['RULE' + \
+ '.' + \
+ Arch.upper() + \
+ '.' + \
+ ModuleType.upper() + \
+ '.' + \
+ TemplateName.upper() ] = RuleObj
+# self.Profile.RuleList.append(rule)
+ return True
+
+ ## __GetModuleType() method
+ #
+ # Return the module type
+ #
+ # @param self The object pointer
+ # @retval string module type
+ #
+ def __GetModuleType(self):
+
+ if not self.__GetNextWord():
+ raise Warning("expected Module type At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("SEC", "PEI_CORE", "PEIM", "DXE_CORE", \
+ "DXE_DRIVER", "DXE_SAL_DRIVER", \
+ "DXE_SMM_DRIVER", "DXE_RUNTIME_DRIVER", \
+ "UEFI_DRIVER", "UEFI_APPLICATION", "USER_DEFINED", "DEFAULT", "BASE", \
+ "SECURITY_CORE", "COMBINED_PEIM_DRIVER", "PIC_PEIM", "RELOCATABLE_PEIM", \
+ "PE32_PEIM", "BS_DRIVER", "RT_DRIVER", "SAL_RT_DRIVER", "APPLICATION"):
+ raise Warning("Unknown Module type At line ", self.FileName, self.CurrentLineNumber)
+ return self.__Token
+
+ ## __GetFileExtension() method
+ #
+ # Return the file extension
+ #
+ # @param self The object pointer
+ # @retval string file name extension
+ #
+ def __GetFileExtension(self):
+ if not self.__IsToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Ext = ""
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z][a-zA-Z0-9]*)')
+ if Pattern.match(self.__Token):
+ Ext = self.__Token
+ return '.' + Ext
+ else:
+ raise Warning("Unknown file extension At Line ", self.FileName, self.CurrentLineNumber)
+
+ else:
+ raise Warning("expected file extension At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleFileStatement() method
+ #
+ # Get rule contents
+ #
+ # @param self The object pointer
+ # @retval Rule Rule object
+ #
+ def __GetRuleFileStatements(self):
+
+ if not self.__IsKeyword("FILE"):
+ raise Warning("expected FILE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected FV type At Line ", self.FileName, self.CurrentLineNumber)
+
+ Type = self.__Token.strip().upper()
+ if Type not in ("RAW", "FREEFORM", "SEC", "PEI_CORE", "PEIM",\
+ "PEI_DXE_COMBO", "DRIVER", "DXE_CORE", "APPLICATION", "FV_IMAGE"):
+ raise Warning("Unknown FV type At line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsKeyword("$(NAMED_GUID)"):
+ if not self.__GetNextWord():
+ raise Warning("expected $(NAMED_GUID)", self.FileName, self.CurrentLineNumber)
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ NameGuid = self.__Token
+
+ KeepReloc = None
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__FileCouldHaveRelocFlag(Type):
+ if self.__Token == 'RELOCS_STRIPPED':
+ KeepReloc = False
+ else:
+ KeepReloc = True
+ else:
+ raise Warning("File type %s could not have reloc strip flag At Line %d" % (Type, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ KeyStringList = []
+ if self.__GetNextToken():
+ Pattern = re.compile(r'([a-zA-Z0-9\-]+|\$\(TARGET\)|\*)_([a-zA-Z0-9\-]+|\$\(TOOL_CHAIN_TAG\)|\*)_([a-zA-Z0-9\-]+|\$\(ARCH\)|\*)')
+ if Pattern.match(self.__Token):
+ KeyStringList.append(self.__Token)
+ if self.__IsToken(","):
+ while self.__GetNextToken():
+ if not Pattern.match(self.__Token):
+ raise Warning("expected KeyString \"Target_Tag_Arch\" At Line ", self.FileName, self.CurrentLineNumber)
+ KeyStringList.append(self.__Token)
+
+ if not self.__IsToken(","):
+ break
+
+ else:
+ self.__UndoToken()
+
+
+ Fixed = False
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ CheckSum = False
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ AlignValue = ""
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if self.__IsToken("{"):
+ # Complex file rule expected
+ Rule = RuleComplexFile.RuleComplexFile()
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(Rule)
+ IsLeaf = self.__GetEfiSection(Rule)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken("}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+
+ return Rule
+
+ elif self.__IsToken("|"):
+ # Ext rule expected
+ Ext = self.__GetFileExtension()
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileExtension = Ext
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+
+ return Rule
+
+ else:
+ # Simple file rule expected
+ if not self.__GetNextWord():
+ raise Warning("expected leaf section type At Line ", self.FileName, self.CurrentLineNumber)
+
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "PEI_DEPEX", "VERSION", "SUBTYPE_GUID", "SMM_DEPEX"):
+ raise Warning("Unknown leaf section name '%s'" % SectionName, self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsKeyword("Fixed", True):
+ Fixed = True
+
+ if self.__IsKeyword("CheckSum", True):
+ CheckSum = True
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ AlignValue = self.__Token
+
+ if not self.__GetNextToken():
+ raise Warning("expected File name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Rule = RuleSimpleFile.RuleSimpleFile()
+ Rule.SectionType = SectionName
+ Rule.FvFileType = Type
+ Rule.NameGuid = NameGuid
+ Rule.Alignment = AlignValue
+ Rule.CheckSum = CheckSum
+ Rule.Fixed = Fixed
+ Rule.FileName = self.__Token
+ Rule.KeyStringList = KeyStringList
+ if KeepReloc != None:
+ Rule.KeepReloc = KeepReloc
+ return Rule
+
+ ## __GetEfiSection() method
+ #
+ # Get section list for Rule
+ #
+ # @param self The object pointer
+ # @param Obj for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetEfiSection(self, Obj):
+
+ OldPos = self.GetFileBufferPos()
+ if not self.__GetNextWord():
+ return False
+ SectionName = self.__Token
+
+ if SectionName not in ("COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ self.__UndoToken()
+ return False
+
+ if SectionName == "FV_IMAGE":
+ FvImageSectionObj = FvImageSection.FvImageSection()
+ if self.__IsKeyword("FV_IMAGE"):
+ pass
+ if self.__IsToken( "{"):
+ FvObj = Fv.FV()
+ self.__GetDefineStatements(FvObj)
+ self.__GetBlockStatement(FvObj)
+ self.__GetSetStatements(FvObj)
+ self.__GetFvAlignment(FvObj)
+ self.__GetFvAttributes(FvObj)
+ self.__GetAprioriSection(FvObj)
+ self.__GetAprioriSection(FvObj)
+
+ while True:
+ IsInf = self.__GetInfStatement(FvObj)
+ IsFile = self.__GetFileStatement(FvObj)
+ if not IsInf and not IsFile:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Fv = FvObj
+ FvImageSectionObj.FvName = None
+
+ else:
+ if not self.__IsKeyword("FV"):
+ raise Warning("expected 'FV' At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword("FV"):
+ FvImageSectionObj.FvFileType = self.__Token
+
+ if self.__GetAlignment():
+ if self.__Token not in ("8", "16", "32", "64", "128", "512", "1K", "4K", "32K" ,"64K"):
+ raise Warning("Incorrect alignment At Line ", self.FileName, self.CurrentLineNumber)
+ FvImageSectionObj.Alignment = self.__Token
+
+ if self.__IsToken('|'):
+ FvImageSectionObj.FvFileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+ FvImageSectionObj.FvFileName = self.__Token
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected FV file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(FvImageSectionObj)
+ return True
+
+ EfiSectionObj = EfiSection.EfiSection()
+ EfiSectionObj.SectionType = SectionName
+
+ if not self.__GetNextToken():
+ raise Warning("expected file type At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token == "STRING":
+ if not self.__RuleSectionCouldHaveString(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have string data At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken('='):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Quoted String At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__GetStringData():
+ EfiSectionObj.StringData = self.__Token
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number At Line ", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ else:
+ EfiSectionObj.FileType = self.__Token
+ self.__CheckRuleSectionFileType(EfiSectionObj.SectionType, EfiSectionObj.FileType)
+
+ if self.__IsKeyword("Optional"):
+ if not self.__RuleSectionCouldBeOptional(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT be optional At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.Optional = True
+
+ if self.__IsKeyword("BUILD_NUM"):
+ if not self.__RuleSectionCouldHaveBuildNum(EfiSectionObj.SectionType):
+ raise Warning("%s section could NOT have BUILD_NUM At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+ if not self.__GetNextToken():
+ raise Warning("expected Build number At Line ", self.FileName, self.CurrentLineNumber)
+ EfiSectionObj.BuildNum = self.__Token
+
+ if self.__GetAlignment():
+ EfiSectionObj.Alignment = self.__Token
+
+ if self.__IsKeyword('RELOCS_STRIPPED') or self.__IsKeyword('RELOCS_RETAINED'):
+ if self.__SectionCouldHaveRelocFlag(EfiSectionObj.SectionType):
+ if self.__Token == 'RELOCS_STRIPPED':
+ EfiSectionObj.KeepReloc = False
+ else:
+ EfiSectionObj.KeepReloc = True
+ if Obj.KeepReloc != None and Obj.KeepReloc != EfiSectionObj.KeepReloc:
+ raise Warning("Section type %s has reloc strip flag conflict with Rule At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+ else:
+ raise Warning("Section type %s could not have reloc strip flag At Line %d" % (EfiSectionObj.SectionType, self.CurrentLineNumber), self.FileName, self.CurrentLineNumber)
+
+
+ if self.__IsToken('|'):
+ EfiSectionObj.FileExtension = self.__GetFileExtension()
+ elif self.__GetNextToken():
+ if self.__Token not in ("}", "COMPAT16", "PE32", "PIC", "TE", "FV_IMAGE", "RAW", "DXE_DEPEX",\
+ "UI", "VERSION", "PEI_DEPEX", "GUID", "SMM_DEPEX"):
+
+ if self.__Token.startswith('PCD'):
+ self.__UndoToken()
+ self.__GetNextWord()
+
+ if self.__Token == 'PCD':
+ if not self.__IsToken( "("):
+ raise Warning("expected '('", self.FileName, self.CurrentLineNumber)
+ PcdPair = self.__GetNextPcdName()
+ if not self.__IsToken( ")"):
+ raise Warning("expected ')'", self.FileName, self.CurrentLineNumber)
+ self.__Token = 'PCD('+PcdPair[1]+'.'+PcdPair[0]+')'
+
+ EfiSectionObj.FileName = self.__Token
+
+ else:
+ self.__UndoToken()
+ else:
+ raise Warning("expected section file name At Line ", self.FileName, self.CurrentLineNumber)
+
+ Obj.SectionList.append(EfiSectionObj)
+ return True
+
+ ## __RuleSectionCouldBeOptional() method
+ #
+ # Get whether a section could be optional
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could be optional
+ # @retval False section never optional
+ #
+ def __RuleSectionCouldBeOptional(self, SectionType):
+ if SectionType in ("DXE_DEPEX", "UI", "VERSION", "PEI_DEPEX", "RAW", "SMM_DEPEX"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveBuildNum() method
+ #
+ # Get whether a section could have build number information
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have build number information
+ # @retval False section never have build number information
+ #
+ def __RuleSectionCouldHaveBuildNum(self, SectionType):
+ if SectionType in ("VERSION"):
+ return True
+ else:
+ return False
+
+ ## __RuleSectionCouldHaveString() method
+ #
+ # Get whether a section could have string
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @retval True section could have string
+ # @retval False section never have string
+ #
+ def __RuleSectionCouldHaveString(self, SectionType):
+ if SectionType in ("UI", "VERSION"):
+ return True
+ else:
+ return False
+
+ ## __CheckRuleSectionFileType() method
+ #
+ # Get whether a section matches a file type
+ #
+ # @param self The object pointer
+ # @param SectionType The section type to check
+ # @param FileType The file type to check
+ #
+ def __CheckRuleSectionFileType(self, SectionType, FileType):
+ if SectionType == "COMPAT16":
+ if FileType not in ("COMPAT16", "SEC_COMPAT16"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PE32":
+ if FileType not in ("PE32", "SEC_PE32"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PIC":
+ if FileType not in ("PIC", "PIC"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "TE":
+ if FileType not in ("TE", "SEC_TE"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "RAW":
+ if FileType not in ("BIN", "SEC_BIN", "RAW", "ASL", "ACPI"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "DXE_DEPEX":
+ if FileType not in ("DXE_DEPEX", "SEC_DXE_DEPEX"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "UI":
+ if FileType not in ("UI", "SEC_UI"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "VERSION":
+ if FileType not in ("VERSION", "SEC_VERSION"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "PEI_DEPEX":
+ if FileType not in ("PEI_DEPEX", "SEC_PEI_DEPEX"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+ elif SectionType == "GUID":
+ if FileType not in ("PE32", "SEC_GUID"):
+ raise Warning("Incorrect section file type At Line ", self.FileName, self.CurrentLineNumber)
+
+ ## __GetRuleEncapsulationSection() method
+ #
+ # Get encapsulation section for Rule
+ #
+ # @param self The object pointer
+ # @param Rule for whom section is got
+ # @retval True Successfully find section statement
+ # @retval False Not able to find section statement
+ #
+ def __GetRuleEncapsulationSection(self, Rule):
+
+ if self.__IsKeyword( "COMPRESS"):
+ Type = "PI_STD"
+ if self.__IsKeyword("PI_STD") or self.__IsKeyword("PI_NONE"):
+ Type = self.__Token
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompressSectionObj = CompressSection.CompressSection()
+
+ CompressSectionObj.CompType = Type
+ # Recursive sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(CompressSectionObj)
+ IsLeaf = self.__GetEfiSection(CompressSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(CompressSectionObj)
+
+ return True
+
+ elif self.__IsKeyword( "GUIDED"):
+ GuidValue = None
+ if self.__GetNextGuid():
+ GuidValue = self.__Token
+
+ if self.__IsKeyword( "$(NAMED_GUID)"):
+ GuidValue = self.__Token
+
+ AttribDict = self.__GetGuidAttrib()
+
+ if not self.__IsToken("{"):
+ raise Warning("expected '{' At Line ", self.FileName, self.CurrentLineNumber)
+ GuidSectionObj = GuidSection.GuidSection()
+ GuidSectionObj.NameGuid = GuidValue
+ GuidSectionObj.SectionType = "GUIDED"
+ GuidSectionObj.ProcessRequired = AttribDict["PROCESSING_REQUIRED"]
+ GuidSectionObj.AuthStatusValid = AttribDict["AUTH_STATUS_VALID"]
+
+ # Efi sections...
+ while True:
+ IsEncapsulate = self.__GetRuleEncapsulationSection(GuidSectionObj)
+ IsLeaf = self.__GetEfiSection(GuidSectionObj)
+ if not IsEncapsulate and not IsLeaf:
+ break
+
+ if not self.__IsToken( "}"):
+ raise Warning("expected '}' At Line ", self.FileName, self.CurrentLineNumber)
+ Rule.SectionList.append(GuidSectionObj)
+
+ return True
+
+ return False
+
+ ## __GetVtf() method
+ #
+ # Get VTF section contents and store its data into VTF list of self.Profile
+ #
+ # @param self The object pointer
+ # @retval True Successfully find a VTF
+ # @retval False Not able to find a VTF
+ #
+ def __GetVtf(self):
+
+ if not self.__GetNextToken():
+ return False
+
+ S = self.__Token.upper()
+ if S.startswith("[") and not S.startswith("[VTF."):
+ if not S.startswith("[RULE.") and not S.startswith("[OPTIONROM."):
+ raise Warning("Unknown section or section appear sequence error (The correct sequence should be [FD.], [FV.], [Capsule.], [VTF.], [Rule.], [OptionRom.])", self.FileName, self.CurrentLineNumber)
+ self.__UndoToken()
+ return False
+
+ self.__UndoToken()
+ if not self.__IsToken("[VTF.", True):
+ FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
+ print 'Parsing String: %s in File %s, At line: %d, Offset Within Line: %d' \
+ % (self.Profile.FileLinesList[self.CurrentLineNumber - 1][self.CurrentOffsetWithinLine :], FileLineTuple[0], FileLineTuple[1], self.CurrentOffsetWithinLine)
+ raise Warning("expected [VTF.] At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__SkipToToken("."):
+ raise Warning("expected '.' At Line ", self.FileName, self.CurrentLineNumber)
+
+ Arch = self.__SkippedChars.rstrip(".").upper()
+ if Arch not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected VTF name At Line ", self.FileName, self.CurrentLineNumber)
+ Name = self.__Token.upper()
+
+ VtfObj = Vtf.Vtf()
+ VtfObj.UiName = Name
+ VtfObj.KeyArch = Arch
+
+ if self.__IsToken(","):
+ if not self.__GetNextWord():
+ raise Warning("expected Arch list At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token.upper() not in ("IA32", "X64", "IPF", "ARM"):
+ raise Warning("Unknown Arch At line ", self.FileName, self.CurrentLineNumber)
+ VtfObj.ArchList = self.__Token.upper()
+
+ if not self.__IsToken( "]"):
+ raise Warning("expected ']' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsKeyword("IA32_RST_BIN"):
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Reset file At Line ", self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ResetBin = self.__Token
+
+ while self.__GetComponentStatement(VtfObj):
+ pass
+
+ self.Profile.VtfList.append(VtfObj)
+ return True
+
+ ## __GetComponentStatement() method
+ #
+ # Get components in VTF
+ #
+ # @param self The object pointer
+ # @param VtfObj for whom component is got
+ # @retval True Successfully find a component
+ # @retval False Not able to find a component
+ #
+ def __GetComponentStatement(self, VtfObj):
+
+ if not self.__IsKeyword("COMP_NAME"):
+ return False
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextWord():
+ raise Warning("expected Component Name At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj = ComponentStatement.ComponentStatement()
+ CompStatementObj.CompName = self.__Token
+
+ if not self.__IsKeyword("COMP_LOC"):
+ raise Warning("expected COMP_LOC At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompLoc = ""
+ if self.__GetNextWord():
+ CompStatementObj.CompLoc = self.__Token
+ if self.__IsToken('|'):
+ if not self.__GetNextWord():
+ raise Warning("Expected Region Name At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__Token not in ("F", "N", "S"): #, "H", "L", "PH", "PL"): not support
+ raise Warning("Unknown location type At line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.FilePos = self.__Token
+ else:
+ self.CurrentLineNumber += 1
+ self.CurrentOffsetWithinLine = 0
+
+ if not self.__IsKeyword("COMP_TYPE"):
+ raise Warning("expected COMP_TYPE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component type At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("FIT", "PAL_B", "PAL_A", "OEM"):
+ if not self.__Token.startswith("0x") or len(self.__Token) < 3 or len(self.__Token) > 4 or \
+ not self.__HexDigit(self.__Token[2]) or not self.__HexDigit(self.__Token[-1]):
+ raise Warning("Unknown location type At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompType = self.__Token
+
+ if not self.__IsKeyword("COMP_VER"):
+ raise Warning("expected COMP_VER At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component version At Line ", self.FileName, self.CurrentLineNumber)
+
+ Pattern = re.compile('-$|[0-9]{0,1}[0-9]{1}\.[0-9]{0,1}[0-9]{1}')
+ if Pattern.match(self.__Token) == None:
+ raise Warning("Unknown version format At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompVer = self.__Token
+
+ if not self.__IsKeyword("COMP_CS"):
+ raise Warning("expected COMP_CS At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component CS At Line ", self.FileName, self.CurrentLineNumber)
+ if self.__Token not in ("1", "0"):
+ raise Warning("Unknown Component CS At line ", self.FileName, self.CurrentLineNumber)
+ CompStatementObj.CompCs = self.__Token
+
+
+ if not self.__IsKeyword("COMP_BIN"):
+ raise Warning("expected COMP_BIN At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component file At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompBin = self.__Token
+
+ if not self.__IsKeyword("COMP_SYM"):
+ raise Warning("expected COMP_SYM At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__GetNextToken():
+ raise Warning("expected Component symbol file At Line ", self.FileName, self.CurrentLineNumber)
+
+ CompStatementObj.CompSym = self.__Token
+
+ if not self.__IsKeyword("COMP_SIZE"):
+ raise Warning("expected COMP_SIZE At Line ", self.FileName, self.CurrentLineNumber)
+
+ if not self.__IsToken("="):
+ raise Warning("expected '=' At Line ", self.FileName, self.CurrentLineNumber)
+
+ if self.__IsToken("-"):
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextDecimalNumber():
+ CompStatementObj.CompSize = self.__Token
+ elif self.__GetNextHexNumber():
+ CompStatementObj.CompSize = self.__Token
+ else:
+ raise Warning("Unknown size At line ", self.FileName, self.CurrentLineNumber)
+
+ VtfObj.ComponentStatementList.append(CompStatementObj)
+ return True
+
+ ## __GetFvInFd() method
+ #
+ # Get FV list contained in FD
+ #
+ # @param self The object pointer
+ # @param FdName FD name
+ # @retval FvList list of FV in FD
+ #
+ def __GetFvInFd (self, FdName):
+
+ FvList = []
+ if FdName.upper() in self.Profile.FdDict.keys():
+ FdObj = self.Profile.FdDict[FdName.upper()]
+ for elementRegion in FdObj.RegionList:
+ if elementRegion.RegionType == 'FV':
+ for elementRegionData in elementRegion.RegionDataList:
+ if elementRegionData != None and elementRegionData.upper() not in FvList:
+ FvList.append(elementRegionData.upper())
+ return FvList
+
+ ## __GetReferencedFdFvTuple() method
+ #
+ # Get FD and FV list referenced by a FFS file
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param RefFdList referenced FD by section
+ # @param RefFvList referenced FV by section
+ #
+ def __GetReferencedFdFvTuple(self, FvObj, RefFdList = [], RefFvList = []):
+
+ for FfsObj in FvObj.FfsList:
+ if isinstance(FfsObj, FfsFileStatement.FileStatement):
+ if FfsObj.FvName != None and FfsObj.FvName.upper() not in RefFvList:
+ RefFvList.append(FfsObj.FvName.upper())
+ elif FfsObj.FdName != None and FfsObj.FdName.upper() not in RefFdList:
+ RefFdList.append(FfsObj.FdName.upper())
+ else:
+ self.__GetReferencedFdFvTupleFromSection(FfsObj, RefFdList, RefFvList)
+
+ ## __GetReferencedFdFvTupleFromSection() method
+ #
+ # Get FD and FV list referenced by a FFS section
+ #
+ # @param self The object pointer
+ # @param FfsFile contains sections to be searched
+ # @param FdList referenced FD by section
+ # @param FvList referenced FV by section
+ #
+ def __GetReferencedFdFvTupleFromSection(self, FfsFile, FdList = [], FvList = []):
+
+ SectionStack = []
+ SectionStack.extend(FfsFile.SectionList)
+ while SectionStack != []:
+ SectionObj = SectionStack.pop()
+ if isinstance(SectionObj, FvImageSection.FvImageSection):
+ if SectionObj.FvName != None and SectionObj.FvName.upper() not in FvList:
+ FvList.append(SectionObj.FvName.upper())
+ if SectionObj.Fv != None and SectionObj.Fv.UiFvName != None and SectionObj.Fv.UiFvName.upper() not in FvList:
+ FvList.append(SectionObj.Fv.UiFvName.upper())
+ self.__GetReferencedFdFvTuple(SectionObj.Fv, FdList, FvList)
+
+ if isinstance(SectionObj, CompressSection.CompressSection) or isinstance(SectionObj, GuidSection.GuidSection):
+ SectionStack.extend(SectionObj.SectionList)
+
+ ## CycleReferenceCheck() method
+ #
+ # Check whether cycle reference exists in FDF
+ #
+ # @param self The object pointer
+ # @retval True cycle reference exists
+ # @retval False Not exists cycle reference
+ #
+ def CycleReferenceCheck(self):
+
+ CycleRefExists = False
+
+ try:
+ for FvName in self.Profile.FvDict.keys():
+ LogStr = "Cycle Reference Checking for FV: %s\n" % FvName
+ RefFvStack = []
+ RefFvStack.append(FvName)
+ FdAnalyzedList = []
+
+ while RefFvStack != []:
+ FvNameFromStack = RefFvStack.pop()
+ if FvNameFromStack.upper() in self.Profile.FvDict.keys():
+ FvObj = self.Profile.FvDict[FvNameFromStack.upper()]
+ else:
+ continue
+
+ RefFdList = []
+ RefFvList = []
+ self.__GetReferencedFdFvTuple(FvObj, RefFdList, RefFvList)
+
+ for RefFdName in RefFdList:
+ if RefFdName in FdAnalyzedList:
+ continue
+
+ LogStr += "FD %s is referenced by FV %s\n" % (RefFdName, FvNameFromStack)
+ FvInFdList = self.__GetFvInFd(RefFdName)
+ if FvInFdList != []:
+ LogStr += "FD %s contains FV: " % RefFdName
+ for FvObj in FvInFdList:
+ LogStr += FvObj
+ LogStr += ' \n'
+ if FvObj not in RefFvStack:
+ RefFvStack.append(FvObj)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+ FdAnalyzedList.append(RefFdName)
+
+ for RefFvName in RefFvList:
+ LogStr += "FV %s is referenced by FV %s\n" % (RefFvName, FvNameFromStack)
+ if RefFvName not in RefFvStack:
+ RefFvStack.append(RefFvName)
+
+ if FvName in RefFvStack:
+ CycleRefExists = True
+ raise Warning(LogStr)
+
+ except Warning:
+ print LogStr
+
+ finally:
+ return CycleRefExists
+
+if __name__ == "__main__":
+ parser = FdfParser("..\LakeportX64Pkg.fdf")
+ try:
+ parser.ParseFile()
+ parser.CycleReferenceCheck()
+ except Warning, X:
+ print X.message
+ else:
+ print "Success!"
+
diff --git a/BaseTools/Source/Python/Common/GlobalData.py b/BaseTools/Source/Python/Common/GlobalData.py
new file mode 100644
index 0000000000..d56152ec8a
--- /dev/null
+++ b/BaseTools/Source/Python/Common/GlobalData.py
@@ -0,0 +1,37 @@
+## @file
+# This file is used to define common static strings used by INF/DEC/DSC files
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+import re
+
+gIsWindows = None
+
+gEdkCompatibilityPkg = "EdkCompatibilityPkg"
+gWorkspace = "."
+gEdkSource = "EdkCompatibilityPkg"
+gEfiSource = "."
+gEcpSource = "EdkCompatibilityPkg"
+
+gOptions = None
+gCaseInsensitive = False
+gGlobalDefines = {}
+gAllFiles = None
+
+gEdkGlobal = {}
+gOverrideDir = {}
+
+# for debug trace purpose when problem occurs
+gProcessingFile = ''
+gBuildingModule = ''
+
+## Regular expression for matching macro used in DSC/DEC/INF file inclusion
+gMacroPattern = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
+
diff --git a/BaseTools/Source/Python/Common/Identification.py b/BaseTools/Source/Python/Common/Identification.py
new file mode 100644
index 0000000000..a9b2f33d55
--- /dev/null
+++ b/BaseTools/Source/Python/Common/Identification.py
@@ -0,0 +1,58 @@
+## @file
+# This file is used to define the identification of INF/DEC/DSC files
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+## Identification
+#
+# This class defined basic Identification information structure which is used by INF/DEC/DSC files
+#
+# @param object: Inherited from object class
+#
+# @var FileName: To store data for Filename
+# @var FileFullPath: To store data for full path of the file
+# @var FileRelativePath: To store data for relative path of the file
+# @var RunStatus: Status of build system running
+#
+class Identification(object):
+ def __init__(self):
+ self.FileName = ''
+ self.FileFullPath = ''
+ self.FileRelativePath = ''
+ self.PackagePath = ''
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileName(self, FileFullPath, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileFullPath(self, FileName, FileRelativePath):
+ pass
+
+ ## GetFileName
+ #
+ # Reserved
+ #
+ def GetFileRelativePath(self, FileName, FileFullPath):
+ pass
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ id = Identification()
diff --git a/BaseTools/Source/Python/Common/InfClassObject.py b/BaseTools/Source/Python/Common/InfClassObject.py
new file mode 100644
index 0000000000..a772840227
--- /dev/null
+++ b/BaseTools/Source/Python/Common/InfClassObject.py
@@ -0,0 +1,1116 @@
+## @file
+# This file is used to define each component of INF file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+from CommonDataClass.CommonClass import LibraryClassClass
+from CommonDataClass.ModuleClass import *
+from String import *
+from DataType import *
+from Identification import *
+from Dictionary import *
+from BuildToolError import *
+from Misc import sdict
+import GlobalData
+from Table.TableInf import TableInf
+import Database
+from Parsing import *
+
+#
+# Global variable
+#
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+gComponentType2ModuleType = {
+ "LIBRARY" : "BASE",
+ "SECURITY_CORE" : "SEC",
+ "PEI_CORE" : "PEI_CORE",
+ "COMBINED_PEIM_DRIVER" : "PEIM",
+ "PIC_PEIM" : "PEIM",
+ "RELOCATABLE_PEIM" : "PEIM",
+ "PE32_PEIM" : "PEIM",
+ "BS_DRIVER" : "DXE_DRIVER",
+ "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
+ "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
+# "BS_DRIVER" : "DXE_SMM_DRIVER",
+# "BS_DRIVER" : "UEFI_DRIVER",
+ "APPLICATION" : "UEFI_APPLICATION",
+ "LOGO" : "BASE",
+}
+
+gNmakeFlagPattern = re.compile("(?:EBC_)?([A-Z]+)_(?:STD_|PROJ_|ARCH_)?FLAGS(?:_DLL|_ASL|_EXE)?", re.UNICODE)
+gNmakeFlagName2ToolCode = {
+ "C" : "CC",
+ "LIB" : "SLINK",
+ "LINK" : "DLINK",
+}
+
+class InfHeader(ModuleHeaderClass):
+ _Mapping_ = {
+ #
+ # Required Fields
+ #
+ TAB_INF_DEFINES_BASE_NAME : "Name",
+ TAB_INF_DEFINES_FILE_GUID : "Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
+ #
+ # Optional Fields
+ #
+ TAB_INF_DEFINES_INF_VERSION : "InfVersion",
+ TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName",
+ TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber",
+ TAB_INF_DEFINES_BUILD_TYPE : "BuildType",
+ TAB_INF_DEFINES_FFS_EXT : "FfsExt",
+ TAB_INF_DEFINES_FV_EXT : "FvExt",
+ TAB_INF_DEFINES_SOURCE_FV : "SourceFv",
+ TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber",
+ TAB_INF_DEFINES_VERSION_STRING : "VersionString",
+ TAB_INF_DEFINES_VERSION : "Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver",
+ TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H : "TianoR8FlashMap_h",
+ TAB_INF_DEFINES_SHADOW : "Shadow",
+# TAB_INF_DEFINES_LIBRARY_CLASS : "LibraryClass",
+# TAB_INF_DEFINES_ENTRY_POINT : "ExternImages",
+# TAB_INF_DEFINES_UNLOAD_IMAGE : "ExternImages",
+# TAB_INF_DEFINES_CONSTRUCTOR : ,
+# TAB_INF_DEFINES_DESTRUCTOR : ,
+# TAB_INF_DEFINES_DEFINE : "Define",
+# TAB_INF_DEFINES_SPEC : "Specification",
+# TAB_INF_DEFINES_CUSTOM_MAKEFILE : "CustomMakefile",
+# TAB_INF_DEFINES_MACRO :
+ }
+
+ def __init__(self):
+ ModuleHeaderClass.__init__(self)
+ self.VersionNumber = ''
+ self.VersionString = ''
+ #print self.__dict__
+ def __setitem__(self, key, value):
+ self.__dict__[self._Mapping_[key]] = value
+ def __getitem__(self, key):
+ return self.__dict__[self._Mapping_[key]]
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._Mapping_
+
+## InfObject
+#
+# This class defined basic Inf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class InfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Inf
+#
+# This class defined the structure used in Inf object
+#
+# @param InfObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToModule: Input value for IsToModule
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var UserExtensions: To store value for UserExtensions
+# @var Module: To store value for Module, it is a structure as ModuleClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Inf
+#
+class Inf(InfObject):
+ def __init__(self, Filename = None, IsToDatabase = False, IsToModule = False, WorkspaceDir = None, Database = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = Identification()
+ self.Module = ModuleClass()
+ self.UserExtensions = ''
+ self.WorkspaceDir = WorkspaceDir
+ self.SupArchList = SupArchList
+ self.IsToDatabase = IsToDatabase
+
+ self.Cur = Database.Cur
+ self.TblFile = Database.TblFile
+ self.TblInf = Database.TblInf
+ self.FileID = -1
+ #self.TblInf = TableInf(Database.Cur)
+
+ self.KeyList = [
+ TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS,
+ TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_LIBRARIES,
+ TAB_INF_FIXED_PCD, TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD,
+ TAB_INF_PCD_EX, TAB_DEPEX, TAB_NMAKE, TAB_INF_DEFINES
+ ]
+ #
+ # Upper all KEYs to ignore case sensitive when parsing
+ #
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ #
+ # Init RecordSet
+ #
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ #
+ # Load Inf file if filename is not None
+ #
+ if Filename != None:
+ self.LoadInfFile(Filename)
+
+ #
+ # Transfer to Module Object if IsToModule is True
+ #
+ if IsToModule:
+ self.InfToModule()
+
+ ## Transfer to Module Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def InfToModule(self):
+ #
+ # Init global information for the file
+ #
+ ContainerFile = self.Identification.FileFullPath
+
+ #
+ # Generate Package Header
+ #
+ self.GenModuleHeader(ContainerFile)
+
+ #
+ # Generate BuildOptions
+ #
+ self.GenBuildOptions(ContainerFile)
+
+ #
+ # Generate Includes
+ #
+ self.GenIncludes(ContainerFile)
+
+ #
+ # Generate Libraries
+ #
+ self.GenLibraries(ContainerFile)
+
+ #
+ # Generate LibraryClasses
+ #
+ self.GenLibraryClasses(ContainerFile)
+
+ #
+ # Generate Packages
+ #
+ self.GenPackages(ContainerFile)
+
+ #
+ # Generate Nmakes
+ #
+ self.GenNmakes(ContainerFile)
+
+ #
+ # Generate Pcds
+ #
+ self.GenPcds(ContainerFile)
+
+ #
+ # Generate Sources
+ #
+ self.GenSources(ContainerFile)
+
+ #
+ # Generate UserExtensions
+ #
+ self.GenUserExtensions(ContainerFile)
+
+ #
+ # Generate Guids
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ #
+ # Generate Protocols
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ #
+ # Generate Ppis
+ #
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ #
+ # Generate Depexes
+ #
+ self.GenDepexes(ContainerFile)
+
+ #
+ # Generate Binaries
+ #
+ self.GenBinaries(ContainerFile)
+
+ ## Parse [Defines] section
+ #
+ # Parse [Defines] section into InfDefines object
+ #
+ # @param InfFile The path of the INF file
+ # @param Section The title of "Defines" section
+ # @param Lines The content of "Defines" section
+ #
+ def ParseDefines(self, InfFile, Section, Lines):
+ TokenList = Section.split(TAB_SPLIT)
+ if len(TokenList) == 3:
+ RaiseParserError(Section, "Defines", InfFile, "[xx.yy.%s] format (with platform) is not supported")
+ if len(TokenList) == 2:
+ Arch = TokenList[1].upper()
+ else:
+ Arch = TAB_ARCH_COMMON
+
+ if Arch not in self.Defines:
+ self.Defines[Arch] = InfDefines()
+ GetSingleValueOfKeyFromLines(Lines, self.Defines[Arch].DefinesDictionary,
+ TAB_COMMENT_SPLIT, TAB_EQUAL_SPLIT, False, None)
+
+ ## Load Inf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Inf file
+ #
+ def LoadInfFile(self, Filename):
+ #
+ # Insert a record for file
+ #
+ Filename = NormPath(Filename)
+ self.Identification.FileFullPath = Filename
+ (self.Identification.FileRelativePath, self.Identification.FileName) = os.path.split(Filename)
+ self.FileID = self.TblFile.InsertFile(Filename, MODEL_FILE_INF)
+
+ #
+ # Init InfTable
+ #
+ #self.TblInf.Table = "Inf%s" % self.FileID
+ #self.TblInf.Create()
+
+ #
+ # Init common datas
+ #
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ #
+ # Parse file content
+ #
+ IsFindBlockComment = False
+ ReservedLine = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ #
+ # Remove comment block
+ #
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ continue
+
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ if Line[1:3] == "--":
+ continue
+ Model = Section[CurrentSection.upper()]
+ #
+ # Insert items data of previous section
+ #
+ InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ CurrentSection = TAB_UNKNOWN
+ continue
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+ #
+ # Insert items data of last section
+ #
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItemsIntoDatabase(self.TblInf, self.FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, self.RecordSet)
+
+ #
+ # Replace all DEFINE macros with its actual values
+ #
+ ParseDefineMacro2(self.TblInf, self.RecordSet, GlobalData.gGlobalDefines)
+
+ ## Show detailed information of Module
+ #
+ # Print all members and their values of Module class
+ #
+ def ShowModule(self):
+ M = self.Module
+ for Arch in M.Header.keys():
+ print '\nArch =', Arch
+ print 'Filename =', M.Header[Arch].FileName
+ print 'FullPath =', M.Header[Arch].FullPath
+ print 'BaseName =', M.Header[Arch].Name
+ print 'Guid =', M.Header[Arch].Guid
+ print 'Version =', M.Header[Arch].Version
+ print 'InfVersion =', M.Header[Arch].InfVersion
+ print 'EfiSpecificationVersion =', M.Header[Arch].EfiSpecificationVersion
+ print 'EdkReleaseVersion =', M.Header[Arch].EdkReleaseVersion
+ print 'ModuleType =', M.Header[Arch].ModuleType
+ print 'BinaryModule =', M.Header[Arch].BinaryModule
+ print 'ComponentType =', M.Header[Arch].ComponentType
+ print 'MakefileName =', M.Header[Arch].MakefileName
+ print 'BuildNumber =', M.Header[Arch].BuildNumber
+ print 'BuildType =', M.Header[Arch].BuildType
+ print 'FfsExt =', M.Header[Arch].FfsExt
+ print 'FvExt =', M.Header[Arch].FvExt
+ print 'SourceFv =', M.Header[Arch].SourceFv
+ print 'PcdIsDriver =', M.Header[Arch].PcdIsDriver
+ print 'TianoR8FlashMap_h =', M.Header[Arch].TianoR8FlashMap_h
+ print 'Shadow =', M.Header[Arch].Shadow
+ print 'LibraryClass =', M.Header[Arch].LibraryClass
+ for Item in M.Header[Arch].LibraryClass:
+ print Item.LibraryClass, DataType.TAB_VALUE_SPLIT.join(Item.SupModuleList)
+ print 'CustomMakefile =', M.Header[Arch].CustomMakefile
+ print 'Define =', M.Header[Arch].Define
+ print 'Specification =', M.Header[Arch].Specification
+ for Item in self.Module.ExternImages:
+ print '\nEntry_Point = %s, UnloadImage = %s' % (Item.ModuleEntryPoint, Item.ModuleUnloadImage)
+ for Item in self.Module.ExternLibraries:
+ print 'Constructor = %s, Destructor = %s' % (Item.Constructor, Item.Destructor)
+ print '\nBuildOptions =', M.BuildOptions
+ for Item in M.BuildOptions:
+ print Item.ToolChainFamily, Item.ToolChain, Item.Option, Item.SupArchList
+ print '\nIncludes =', M.Includes
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nLibraries =', M.Libraries
+ for Item in M.Libraries:
+ print Item.Library, Item.SupArchList
+ print '\nLibraryClasses =', M.LibraryClasses
+ for Item in M.LibraryClasses:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPackageDependencies =', M.PackageDependencies
+ for Item in M.PackageDependencies:
+ print Item.FilePath, Item.SupArchList, Item.FeatureFlag
+ print '\nNmake =', M.Nmake
+ for Item in M.Nmake:
+ print Item.Name, Item.Value, Item.SupArchList
+ print '\nPcds =', M.PcdCodes
+ for Item in M.PcdCodes:
+ print '\tCName=',Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList
+ print '\nSources =', M.Sources
+ for Source in M.Sources:
+ print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList
+ print '\nUserExtensions =', M.UserExtensions
+ for UserExtension in M.UserExtensions:
+ print UserExtension.UserID, UserExtension.Identifier,UserExtension.Content
+ print '\nGuids =', M.Guids
+ for Item in M.Guids:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nProtocols =', M.Protocols
+ for Item in M.Protocols:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nPpis =', M.Ppis
+ for Item in M.Ppis:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nDepex =', M.Depex
+ for Item in M.Depex:
+ print Item.Depex, Item.SupArchList, Item.Define
+ print '\nBinaries =', M.Binaries
+ for Binary in M.Binaries:
+ print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList
+
+ ## Convert [Defines] section content to ModuleHeaderClass
+ #
+ # Convert [Defines] section content to ModuleHeaderClass
+ #
+ # @param Defines The content under [Defines] section
+ # @param ModuleHeader An object of ModuleHeaderClass
+ # @param Arch The supported ARCH
+ #
+ def GenModuleHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate ModuleHeader ...")
+ File = self.Identification.FileFullPath
+ #
+ # Update all defines item in database
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ RaiseParserError(Record[0], 'Defines', ContainerFile, '<Key> = <Value>', Record[2])
+ ID, Value1, Value2, Arch, LineNo = Record[3], ValueList[0], ValueList[1], Record[1], Record[2]
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Value1), ConvertToSqlString2(Value2), ID)
+ self.TblInf.Exec(SqlCommand)
+
+ for Arch in DataType.ARCH_LIST:
+ ModuleHeader = InfHeader()
+ ModuleHeader.FileName = self.Identification.FileName
+ ModuleHeader.FullPath = self.Identification.FileFullPath
+ DefineList = QueryDefinesItem2(self.TblInf, Arch, self.FileID)
+
+ NotProcessedDefineList = []
+ for D in DefineList:
+ if D[0] in ModuleHeader:
+ ModuleHeader[D[0]] = GetSplitValueList(D[1])[0]
+ else:
+ NotProcessedDefineList.append(D)
+
+ if ModuleHeader.ComponentType == "LIBRARY":
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = ModuleHeader.Name
+ Lib.SupModuleList = DataType.SUP_MODULE_LIST
+ ModuleHeader.LibraryClass.append(Lib)
+
+ # we need to make some key defines resolved first
+ for D in NotProcessedDefineList:
+ if D[0] == TAB_INF_DEFINES_LIBRARY_CLASS:
+ List = GetSplitValueList(D[1], DataType.TAB_VALUE_SPLIT, 1)
+ Lib = LibraryClassClass()
+ Lib.LibraryClass = CleanString(List[0])
+ if len(List) == 1:
+ Lib.SupModuleList = DataType.SUP_MODULE_LIST
+ elif len(List) == 2:
+ Lib.SupModuleList = GetSplitValueList(CleanString(List[1]), ' ')
+ ModuleHeader.LibraryClass.append(Lib)
+ elif D[0] == TAB_INF_DEFINES_CUSTOM_MAKEFILE:
+ List = D[1].split(DataType.TAB_VALUE_SPLIT)
+ if len(List) == 2:
+ ModuleHeader.CustomMakefile[CleanString(List[0])] = CleanString(List[1])
+ else:
+ RaiseParserError(D[1], 'CUSTOM_MAKEFILE of Defines', File, 'CUSTOM_MAKEFILE=<Family>|<Filename>', D[2])
+ elif D[0] == TAB_INF_DEFINES_ENTRY_POINT:
+ Image = ModuleExternImageClass()
+ Image.ModuleEntryPoint = CleanString(D[1])
+ self.Module.ExternImages.append(Image)
+ elif D[0] == TAB_INF_DEFINES_UNLOAD_IMAGE:
+ Image = ModuleExternImageClass()
+ Image.ModuleUnloadImage = CleanString(D[1])
+ self.Module.ExternImages.append(Image)
+ elif D[0] == TAB_INF_DEFINES_CONSTRUCTOR:
+ LibraryClass = ModuleExternLibraryClass()
+ LibraryClass.Constructor = CleanString(D[1])
+ self.Module.ExternLibraries.append(LibraryClass)
+ elif D[0] == TAB_INF_DEFINES_DESTRUCTOR:
+ LibraryClass = ModuleExternLibraryClass()
+ LibraryClass.Destructor = CleanString(D[1])
+ self.Module.ExternLibraries.append(LibraryClass)
+ elif D[0] == TAB_INF_DEFINES_DEFINE:
+ List = D[1].split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, 'DEFINE of Defines', File, 'DEFINE <Word> = <Word>', D[2])
+ else:
+ ModuleHeader.Define[CleanString(List[0])] = CleanString(List[1])
+ elif D[0] == TAB_INF_DEFINES_SPEC:
+ List = D[1].split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, 'SPEC of Defines', File, 'SPEC <Word> = <Version>', D[2])
+ else:
+ ModuleHeader.Specification[CleanString(List[0])] = CleanString(List[1])
+
+ #
+ # Get version of INF
+ #
+ if ModuleHeader.InfVersion != "":
+ # R9 inf
+ VersionNumber = ModuleHeader.VersionNumber
+ VersionString = ModuleHeader.VersionString
+ if len(VersionNumber) > 0 and len(VersionString) == 0:
+ EdkLogger.warn(2000, 'VERSION_NUMBER depricated; INF file %s should be modified to use VERSION_STRING instead.' % self.Identification.FileFullPath)
+ ModuleHeader.Version = VersionNumber
+ if len(VersionString) > 0:
+ if len(VersionNumber) > 0:
+ EdkLogger.warn(2001, 'INF file %s defines both VERSION_NUMBER and VERSION_STRING, using VERSION_STRING' % self.Identification.FileFullPath)
+ ModuleHeader.Version = VersionString
+ else:
+ # R8 inf
+ ModuleHeader.InfVersion = "0x00010000"
+ if ModuleHeader.ComponentType in gComponentType2ModuleType:
+ ModuleHeader.ModuleType = gComponentType2ModuleType[ModuleHeader.ComponentType]
+ elif ModuleHeader.ComponentType != '':
+ EdkLogger.error("Parser", PARSER_ERROR, "Unsupported R8 component type [%s]" % ModuleHeader.ComponentType, ExtraData=File, RaiseError = EdkLogger.IsRaiseError)
+
+ self.Module.Header[Arch] = ModuleHeader
+
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Inf
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ #
+ # Get all BuildOptions
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Family, ToolChain, Flag) = GetBuildOption(Record[0], ContainerFile, Record[2])
+ MergeArches(BuildOptions, (Family, ToolChain, Flag), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Family), ConvertToSqlString2(ToolChain), ConvertToSqlString2(Flag), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in BuildOptions.keys():
+ BuildOption = BuildOptionClass(Key[0], Key[1], Key[2])
+ BuildOption.SupArchList = BuildOptions[Key]
+ self.Module.BuildOptions.append(BuildOption)
+
+ ## GenIncludes
+ #
+ # Gen Includes of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = sdict()
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Includes, Record[0], Arch)
+
+ for Key in Includes.keys():
+ Include = IncludeClass()
+ Include.FilePath = NormPath(Key)
+ Include.SupArchList = Includes[Key]
+ self.Module.Includes.append(Include)
+
+ ## GenLibraries
+ #
+ # Gen Libraries of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARIES)
+ Libraries = sdict()
+ #
+ # Get all Includes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_INSTANCE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Libraries, Record[0], Arch)
+
+ for Key in Libraries.keys():
+ Library = ModuleLibraryClass()
+ # replace macro and remove file extension
+ Library.Library = Key.rsplit('.', 1)[0]
+ Library.SupArchList = Libraries[Key]
+ self.Module.Libraries.append(Library)
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClass of Inf
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ #
+ # Get all LibraryClasses
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(LibraryClasses, (LibClassName, LibClassIns, Pcd, SupModelList), Arch)
+ #
+ # Update to Database
+ #
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(LibClassName), ConvertToSqlString2(LibClassIns), ConvertToSqlString2(SupModelList), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in LibraryClasses.keys():
+ KeyList = Key[0].split(DataType.TAB_VALUE_SPLIT)
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = Key[0]
+ LibraryClass.RecommendedInstance = NormPath(Key[1])
+ LibraryClass.FeatureFlag = Key[2]
+ LibraryClass.SupArchList = LibraryClasses[Key]
+ LibraryClass.SupModuleList = GetSplitValueList(Key[3])
+ self.Module.LibraryClasses.append(LibraryClass)
+
+ ## GenPackages
+ #
+ # Gen Packages of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenPackages(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES)
+ Packages = {}
+ #
+ # Get all Packages
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Package, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2])
+ MergeArches(Packages, (Package, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Package), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+
+ for Key in Packages.keys():
+ Package = ModulePackageDependencyClass()
+ Package.FilePath = NormPath(Key[0])
+ Package.SupArchList = Packages[Key]
+ Package.FeatureFlag = Key[1]
+ self.Module.PackageDependencies.append(Package)
+
+ ## GenNmakes
+ #
+ # Gen Nmakes of Inf
+ #
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenNmakes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_NMAKE)
+ Nmakes = sdict()
+ #
+ # Get all Nmakes
+ #
+ RecordSet = self.RecordSet[MODEL_META_DATA_NMAKE]
+
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ MergeArches(Nmakes, Record[0], Arch)
+
+ for Key in Nmakes.keys():
+ List = GetSplitValueList(Key, DataType.TAB_EQUAL_SPLIT, MaxSplit=1)
+ if len(List) != 2:
+ RaiseParserError(Key, 'Nmake', ContainerFile, '<MacroName> = <Value>')
+ continue
+ Nmake = ModuleNmakeClass()
+ Nmake.Name = List[0]
+ Nmake.Value = List[1]
+ Nmake.SupArchList = Nmakes[Key]
+ self.Module.Nmake.append(Nmake)
+
+ # convert R8 format to R9 format
+ if Nmake.Name == "IMAGE_ENTRY_POINT":
+ Image = ModuleExternImageClass()
+ Image.ModuleEntryPoint = Nmake.Value
+ self.Module.ExternImages.append(Image)
+ elif Nmake.Name == "DPX_SOURCE":
+ Source = ModuleSourceFileClass(NormPath(Nmake.Value), "", "", "", "", Nmake.SupArchList)
+ self.Module.Sources.append(Source)
+ else:
+ ToolList = gNmakeFlagPattern.findall(Nmake.Name)
+ if len(ToolList) == 0 or len(ToolList) != 1:
+ EdkLogger.warn("\nParser", "Don't know how to do with MACRO: %s" % Nmake.Name,
+ ExtraData=ContainerFile)
+ else:
+ if ToolList[0] in gNmakeFlagName2ToolCode:
+ Tool = gNmakeFlagName2ToolCode[ToolList[0]]
+ else:
+ Tool = ToolList[0]
+ BuildOption = BuildOptionClass("MSFT", "*_*_*_%s_FLAGS" % Tool, Nmake.Value)
+ BuildOption.SupArchList = Nmake.SupArchList
+ self.Module.BuildOptions.append(BuildOption)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Inf
+ # <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+
+ #
+ # Get all Guids
+ #
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet1:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ if self.Module.Header[Arch].LibraryClass != {}:
+ pass
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet2:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet3:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet4:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ for Record in RecordSet5:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (TokenGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], "", ContainerFile, Record[2])
+ MergeArches(Pcds, (TokenGuidCName, TokenName, Value, Type), Arch)
+ PcdToken[Record[3]] = (TokenGuidCName, TokenName)
+ #
+ # Update to database
+ #
+ if self.IsToDatabase:
+ for Key in PcdToken.keys():
+ SqlCommand = """update %s set Value2 = '%s' where ID = %s""" % (self.TblInf.Table, ".".join((PcdToken[Key][0], PcdToken[Key][1])), Key)
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Pcds.keys():
+ Pcd = PcdClass()
+ Pcd.CName = Key[1]
+ Pcd.TokenSpaceGuidCName = Key[0]
+ Pcd.DefaultValue = Key[2]
+ Pcd.ItemType = Key[3]
+ Pcd.SupArchList = Pcds[Key]
+ self.Module.PcdCodes.append(Pcd)
+
+ ## GenSources
+ #
+ # Gen Sources of Inf
+ # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenSources(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES)
+ Sources = {}
+
+ #
+ # Get all Nmakes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2])
+ MergeArches(Sources, (Filename, Family, TagName, ToolCode, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s', Value5 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Filename), ConvertToSqlString2(Family), ConvertToSqlString2(TagName), ConvertToSqlString2(ToolCode), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Sources.keys():
+ Source = ModuleSourceFileClass(Key[0], Key[2], Key[3], Key[1], Key[4], Sources[Key])
+ self.Module.Sources.append(Source)
+
+ ## GenUserExtensions
+ #
+ # Gen UserExtensions of Inf
+ #
+ def GenUserExtensions(self, ContainerFile):
+# #
+# # UserExtensions
+# #
+# if self.UserExtensions != '':
+# UserExtension = UserExtensionsClass()
+# Lines = self.UserExtensions.splitlines()
+# List = GetSplitValueList(Lines[0], DataType.TAB_SPLIT, 2)
+# if len(List) != 3:
+# RaiseParserError(Lines[0], 'UserExtensions', File, "UserExtensions.UserId.'Identifier'")
+# else:
+# UserExtension.UserID = List[1]
+# UserExtension.Identifier = List[2][0:-1].replace("'", '').replace('\"', '')
+# for Line in Lines[1:]:
+# UserExtension.Content = UserExtension.Content + CleanString(Line) + '\n'
+# self.Module.UserExtensions.append(UserExtension)
+ pass
+
+ ## GenDepexes
+ #
+ # Gen Depex of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenDepexes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX)
+ Depex = {}
+ #
+ # Get all Depexes
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_DEPEX]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ Line = ''
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ Line = Line + Record[0] + ' '
+ if Line != '':
+ MergeArches(Depex, Line, Arch)
+
+ for Key in Depex.keys():
+ Dep = ModuleDepexClass()
+ Dep.Depex = Key
+ Dep.SupArchList = Depex[Key]
+ self.Module.Depex.append(Dep)
+
+ ## GenBinaries
+ #
+ # Gen Binary of Inf
+ # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenBinaries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES)
+ Binaries = {}
+
+ #
+ # Get all Guids
+ #
+ RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.FileRelativePath, Record[2])
+ MergeArches(Binaries, (FileType, Filename, Target, Pcd), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s', Value3 = '%s', Value4 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(FileType), ConvertToSqlString2(Filename), ConvertToSqlString2(Target), ConvertToSqlString2(Pcd), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ for Key in Binaries.keys():
+ Binary = ModuleBinaryFileClass(NormPath(Key[1]), Key[0], Key[2], Key[3], Binaries[Key])
+ self.Module.Binaries.append(Binary)
+
+ ## GenGuids
+ #
+ # Gen Guids of Inf
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ #
+ # Get all Items
+ #
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+
+ #
+ # Go through each arch
+ #
+ for Arch in self.SupArchList:
+ for Record in RecordSet:
+ if Record[1] == Arch or Record[1] == TAB_ARCH_COMMON:
+ (Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2])
+ MergeArches(Lists, (Name, Value), Arch)
+ if self.IsToDatabase:
+ SqlCommand = """update %s set Value1 = '%s', Value2 = '%s'
+ where ID = %s""" % (self.TblInf.Table, ConvertToSqlString2(Name), ConvertToSqlString2(Value), Record[3])
+ self.TblInf.Exec(SqlCommand)
+
+ ListMember = None
+ if Type == TAB_GUIDS:
+ ListMember = self.Module.Guids
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Module.Protocols
+ elif Type == TAB_PPIS:
+ ListMember = self.Module.Ppis
+
+ for Key in Lists.keys():
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Key[0]
+ ListClass.SupArchList = Lists[Key]
+ ListClass.FeatureFlag = Key[1]
+ ListMember.append(ListClass)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.DEBUG_0)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf')
+
+ Db = Database.Database('Inf.db')
+ Db.InitDatabase()
+
+ P = Inf(os.path.normpath(F), True, True, W, Db)
+ P.ShowModule()
+
+ Db.Close()
diff --git a/BaseTools/Source/Python/Common/InfClassObjectLight.py b/BaseTools/Source/Python/Common/InfClassObjectLight.py
new file mode 100644
index 0000000000..a655828e6a
--- /dev/null
+++ b/BaseTools/Source/Python/Common/InfClassObjectLight.py
@@ -0,0 +1,876 @@
+## @file
+# This file is used to define each component of INF file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+
+from CommonDataClass.ModuleClass import *
+from CommonDataClass import CommonClass
+from String import *
+from DataType import *
+from BuildToolError import *
+from Misc import sdict
+from Misc import GetFiles
+from Parsing import *
+
+# Global variable
+Section = {TAB_UNKNOWN.upper() : MODEL_UNKNOWN,
+ TAB_INF_DEFINES.upper() : MODEL_META_DATA_HEADER,
+ TAB_BUILD_OPTIONS.upper() : MODEL_META_DATA_BUILD_OPTION,
+ TAB_INCLUDES.upper() : MODEL_EFI_INCLUDE,
+ TAB_LIBRARIES.upper() : MODEL_EFI_LIBRARY_INSTANCE,
+ TAB_LIBRARY_CLASSES.upper() : MODEL_EFI_LIBRARY_CLASS,
+ TAB_PACKAGES.upper() : MODEL_META_DATA_PACKAGE,
+ TAB_NMAKE.upper() : MODEL_META_DATA_NMAKE,
+ TAB_INF_FIXED_PCD.upper() : MODEL_PCD_FIXED_AT_BUILD,
+ TAB_INF_PATCH_PCD.upper() : MODEL_PCD_PATCHABLE_IN_MODULE,
+ TAB_INF_FEATURE_PCD.upper() : MODEL_PCD_FEATURE_FLAG,
+ TAB_INF_PCD_EX.upper() : MODEL_PCD_DYNAMIC_EX,
+ TAB_INF_PCD.upper() : MODEL_PCD_DYNAMIC,
+ TAB_SOURCES.upper() : MODEL_EFI_SOURCE_FILE,
+ TAB_GUIDS.upper() : MODEL_EFI_GUID,
+ TAB_PROTOCOLS.upper() : MODEL_EFI_PROTOCOL,
+ TAB_PPIS.upper() : MODEL_EFI_PPI,
+ TAB_DEPEX.upper() : MODEL_EFI_DEPEX,
+ TAB_BINARIES.upper() : MODEL_EFI_BINARY_FILE,
+ TAB_USER_EXTENSIONS.upper() : MODEL_META_DATA_USER_EXTENSION
+ }
+
+gComponentType2ModuleType = {
+ "LIBRARY" : "BASE",
+ "SECURITY_CORE" : "SEC",
+ "PEI_CORE" : "PEI_CORE",
+ "COMBINED_PEIM_DRIVER" : "PEIM",
+ "PIC_PEIM" : "PEIM",
+ "RELOCATABLE_PEIM" : "PEIM",
+ "PE32_PEIM" : "PEIM",
+ "BS_DRIVER" : "DXE_DRIVER",
+ "RT_DRIVER" : "DXE_RUNTIME_DRIVER",
+ "SAL_RT_DRIVER" : "DXE_SAL_DRIVER",
+ "APPLICATION" : "UEFI_APPLICATION",
+ "LOGO" : "BASE",
+}
+
+class InfHeader(ModuleHeaderClass):
+ _Mapping_ = {
+ # Required Fields
+ TAB_INF_DEFINES_BASE_NAME : "Name",
+ TAB_INF_DEFINES_FILE_GUID : "Guid",
+ TAB_INF_DEFINES_MODULE_TYPE : "ModuleType",
+ TAB_INF_DEFINES_EFI_SPECIFICATION_VERSION : "EfiSpecificationVersion",
+ TAB_INF_DEFINES_EDK_RELEASE_VERSION : "EdkReleaseVersion",
+
+ # Optional Fields
+ TAB_INF_DEFINES_INF_VERSION : "InfVersion",
+ TAB_INF_DEFINES_BINARY_MODULE : "BinaryModule",
+ TAB_INF_DEFINES_COMPONENT_TYPE : "ComponentType",
+ TAB_INF_DEFINES_MAKEFILE_NAME : "MakefileName",
+ TAB_INF_DEFINES_BUILD_NUMBER : "BuildNumber",
+ TAB_INF_DEFINES_BUILD_TYPE : "BuildType",
+ TAB_INF_DEFINES_FFS_EXT : "FfsExt",
+ TAB_INF_DEFINES_FV_EXT : "FvExt",
+ TAB_INF_DEFINES_SOURCE_FV : "SourceFv",
+ TAB_INF_DEFINES_VERSION_NUMBER : "VersionNumber",
+ TAB_INF_DEFINES_VERSION_STRING : "VersionString",
+ TAB_INF_DEFINES_VERSION : "Version",
+ TAB_INF_DEFINES_PCD_IS_DRIVER : "PcdIsDriver",
+ TAB_INF_DEFINES_TIANO_R8_FLASHMAP_H : "TianoR8FlashMap_h",
+ TAB_INF_DEFINES_SHADOW : "Shadow",
+ }
+
+ def __init__(self):
+ ModuleHeaderClass.__init__(self)
+ self.VersionNumber = ''
+ self.VersionString = ''
+ #print self.__dict__
+ def __setitem__(self, key, value):
+ self.__dict__[self._Mapping_[key]] = value
+ def __getitem__(self, key):
+ return self.__dict__[self._Mapping_[key]]
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._Mapping_
+
+## InfObject
+#
+# This class defined basic Inf object which is used by inheriting
+#
+# @param object: Inherited from object class
+#
+class InfObject(object):
+ def __init__(self):
+ object.__init__()
+
+## Inf
+#
+# This class defined the structure used in Inf object
+#
+# @param InfObject: Inherited from InfObject class
+# @param Ffilename: Input value for Ffilename of Inf file, default is None
+# @param IsMergeAllArches: Input value for IsMergeAllArches
+# True is to merge all arches
+# Fales is not to merge all arches
+# default is False
+# @param IsToModule: Input value for IsToModule
+# True is to transfer to ModuleObject automatically
+# False is not to transfer to ModuleObject automatically
+# default is False
+# @param WorkspaceDir: Input value for current workspace directory, default is None
+#
+# @var Identification: To store value for Identification, it is a structure as Identification
+# @var UserExtensions: To store value for UserExtensions
+# @var Module: To store value for Module, it is a structure as ModuleClass
+# @var WorkspaceDir: To store value for WorkspaceDir
+# @var KeyList: To store value for KeyList, a list for all Keys used in Inf
+#
+class Inf(InfObject):
+ def __init__(self, Filename = None, IsToModule = False, WorkspaceDir = None, PackageDir = None, SupArchList = DataType.ARCH_LIST):
+ self.Identification = IdentificationClass()
+ self.Module = ModuleClass()
+ self.WorkspaceDir = WorkspaceDir
+ self.PackageDir = PackageDir
+ self.SupArchList = SupArchList
+
+ self.KeyList = [
+ TAB_SOURCES, TAB_BUILD_OPTIONS, TAB_BINARIES, TAB_INCLUDES, TAB_GUIDS,
+ TAB_PROTOCOLS, TAB_PPIS, TAB_LIBRARY_CLASSES, TAB_PACKAGES, TAB_INF_FIXED_PCD,
+ TAB_INF_PATCH_PCD, TAB_INF_FEATURE_PCD, TAB_INF_PCD, TAB_INF_PCD_EX,
+ TAB_DEPEX, TAB_INF_DEFINES
+ ]
+ # Upper all KEYs to ignore case sensitive when parsing
+ self.KeyList = map(lambda c: c.upper(), self.KeyList)
+
+ # Init RecordSet
+ self.RecordSet = {}
+ for Key in self.KeyList:
+ self.RecordSet[Section[Key]] = []
+
+ # Init Comment
+ self.SectionHeaderCommentDict = {}
+
+ # Load Inf file if filename is not None
+ if Filename != None:
+ self.LoadInfFile(Filename)
+
+ # Transfer to Module Object if IsToModule is True
+ if IsToModule:
+ self.InfToModule()
+
+ ## Module Object to INF file
+ def ModuleToInf(self, Module):
+ Inf = ''
+ InfList = sdict()
+ SectionHeaderCommentDict = {}
+ if Module == None:
+ return Inf
+
+ ModuleHeader = Module.ModuleHeader
+ TmpList = []
+ # Common define items
+ if ModuleHeader.Name:
+ TmpList.append(TAB_INF_DEFINES_BASE_NAME + ' = ' + ModuleHeader.Name)
+ if ModuleHeader.Guid:
+ TmpList.append(TAB_INF_DEFINES_FILE_GUID + ' = ' + ModuleHeader.Guid)
+ if ModuleHeader.Version:
+ TmpList.append(TAB_INF_DEFINES_VERSION_STRING + ' = ' + ModuleHeader.Version)
+ if ModuleHeader.ModuleType:
+ TmpList.append(TAB_INF_DEFINES_MODULE_TYPE + ' = ' + ModuleHeader.ModuleType)
+ if ModuleHeader.PcdIsDriver:
+ TmpList.append(TAB_INF_DEFINES_PCD_IS_DRIVER + ' = ' + ModuleHeader.PcdIsDriver)
+ # Externs
+ for Item in Module.Externs:
+ if Item.EntryPoint:
+ TmpList.append(TAB_INF_DEFINES_ENTRY_POINT + ' = ' + Item.EntryPoint)
+ if Item.UnloadImage:
+ TmpList.append(TAB_INF_DEFINES_UNLOAD_IMAGE + ' = ' + Item.UnloadImage)
+ if Item.Constructor:
+ TmpList.append(TAB_INF_DEFINES_CONSTRUCTOR + ' = ' + Item.Constructor)
+ if Item.Destructor:
+ TmpList.append(TAB_INF_DEFINES_DESTRUCTOR + ' = ' + Item.Destructor)
+ # Other define items
+ if Module.UserExtensions != None:
+ for Item in Module.UserExtensions.Defines:
+ TmpList.append(Item)
+ InfList['Defines'] = TmpList
+ if ModuleHeader.Description != '':
+ SectionHeaderCommentDict['Defines'] = ModuleHeader.Description
+
+ if Module.UserExtensions != None:
+ InfList['BuildOptions'] = Module.UserExtensions.BuildOptions
+
+ for Item in Module.Includes:
+ Key = 'Includes.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.FilePath)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.LibraryClasses:
+ Key = 'LibraryClasses.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.LibraryClass
+ if Item.RecommendedInstance:
+ NewValue = NewValue + '|' + Item.RecommendedInstance
+ if Item.FeatureFlag:
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.PackageDependencies:
+ Key = 'Packages.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.FilePath)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.PcdCodes:
+ Key = 'Pcds' + Item.ItemType + '.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.TokenSpaceGuidCName + '.' + Item.CName
+ if Item.DefaultValue != '':
+ NewValue = NewValue + '|' + Item.DefaultValue
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Sources:
+ Key = 'Sources.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.SourceFile
+ if Item.ToolChainFamily != '':
+ NewValue = NewValue + '|' + Item.ToolChainFamily
+ if Item.TagName != '':
+ NewValue = NewValue + '|' + Item.TagName
+ if Item.ToolCode != '':
+ NewValue = NewValue + '|' + Item.ToolCode
+ if Item.FeatureFlag != '':
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ if Item.HelpText != '':
+ SectionHeaderCommentDict[Key] = Item.HelpText
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Guids:
+ Key = 'Guids.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Protocols:
+ Key = 'Protocols.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Ppis:
+ Key = 'Ppis.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ Value.append(Item.CName)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.PeiDepex:
+ Key = 'Depex'
+ Value = Module.PeiDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.DxeDepex:
+ Key = 'Depex'
+ Value = Module.DxeDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ if Module.SmmDepex:
+ Key = 'Depex'
+ Value = Module.SmmDepex.Depex
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ for Item in Module.Binaries:
+ Key = 'Binaries.' + GetStringOfList(Item.SupArchList)
+ Value = GetHelpTextList(Item.HelpTextList)
+ NewValue = Item.FileType + '|' + Item.BinaryFile + '|' + Item.Target
+ if Item.FeatureFlag != '':
+ NewValue = NewValue + '|' + Item.FeatureFlag
+ Value.append(NewValue)
+ GenMetaDatSectionItem(Key, Value, InfList)
+
+ # Transfer Module to Inf
+ for Key in InfList:
+ if Key in SectionHeaderCommentDict:
+ List = SectionHeaderCommentDict[Key].split('\r')
+ for Item in List:
+ Inf = Inf + Item + '\n'
+ Inf = Inf + '[' + Key + ']' + '\n'
+ for Value in InfList[Key]:
+ if type(Value) == type([]):
+ for SubValue in Value:
+ Inf = Inf + ' ' + SubValue + '\n'
+ else:
+ Inf = Inf + ' ' + Value + '\n'
+ Inf = Inf + '\n'
+
+ return Inf
+
+
+ ## Transfer to Module Object
+ #
+ # Transfer all contents of an Inf file to a standard Module Object
+ #
+ def InfToModule(self):
+ # Init global information for the file
+ ContainerFile = self.Identification.FullPath
+
+ # Generate Module Header
+ self.GenModuleHeader(ContainerFile)
+
+ # Generate BuildOptions
+ self.GenBuildOptions(ContainerFile)
+
+ # Generate Includes
+ self.GenIncludes(ContainerFile)
+
+ # Generate LibraryClasses
+ self.GenLibraryClasses(ContainerFile)
+
+ # Generate Packages
+ self.GenPackages(ContainerFile)
+
+ # Generate Pcds
+ self.GenPcds(ContainerFile)
+
+ # Generate Sources
+ self.GenSources(ContainerFile)
+
+ # Generate Guids
+ self.GenGuidProtocolPpis(DataType.TAB_GUIDS, ContainerFile)
+
+ # Generate Protocols
+ self.GenGuidProtocolPpis(DataType.TAB_PROTOCOLS, ContainerFile)
+
+ # Generate Ppis
+ self.GenGuidProtocolPpis(DataType.TAB_PPIS, ContainerFile)
+
+ # Generate Depexes
+ self.GenDepexes(ContainerFile)
+
+ # Generate Binaries
+ self.GenBinaries(ContainerFile)
+
+ # Init MiscFiles
+ self.GenMiscFiles(ContainerFile)
+
+ ## GenMiscFiles
+ #
+ def GenMiscFiles(self, ContainerFile):
+ MiscFiles = MiscFileClass()
+ MiscFiles.Name = 'ModuleFiles'
+ for Item in GetFiles(os.path.dirname(ContainerFile), ['CVS', '.svn'], False):
+ File = CommonClass.FileClass()
+ File.Filename = Item
+ MiscFiles.Files.append(File)
+ self.Module.MiscFiles = MiscFiles
+
+ ## Load Inf file
+ #
+ # Load the file if it exists
+ #
+ # @param Filename: Input value for filename of Inf file
+ #
+ def LoadInfFile(self, Filename):
+ # Insert a record for file
+ Filename = NormPath(Filename)
+
+ self.Identification.FullPath = Filename
+ (self.Identification.RelaPath, self.Identification.FileName) = os.path.split(Filename)
+ if self.Identification.FullPath.find(self.WorkspaceDir) > -1:
+ self.Identification.ModulePath = os.path.dirname(self.Identification.FullPath[len(self.WorkspaceDir) + 1:])
+ if self.PackageDir:
+ self.Identification.PackagePath = self.PackageDir
+ if self.Identification.ModulePath.find(self.PackageDir) == 0:
+ self.Identification.ModulePath = self.Identification.ModulePath[len(self.PackageDir) + 1:]
+
+ # Init common datas
+ IfDefList, SectionItemList, CurrentSection, ArchList, ThirdList, IncludeFiles = \
+ [], [], TAB_UNKNOWN, [], [], []
+ LineNo = 0
+
+ # Parse file content
+ IsFindBlockComment = False
+ ReservedLine = ''
+ Comment = ''
+ for Line in open(Filename, 'r'):
+ LineNo = LineNo + 1
+ # Remove comment block
+ if Line.find(TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, TAB_COMMENT_R8_START, 1)[0]
+ if ReservedLine.strip().startswith(TAB_COMMENT_SPLIT):
+ Comment = Comment + Line.strip() + '\n'
+ ReservedLine = ''
+ else:
+ Comment = Comment + Line[len(ReservedLine):] + '\n'
+ IsFindBlockComment = True
+ if not ReservedLine:
+ continue
+ if Line.find(TAB_COMMENT_R8_END) > -1:
+ Comment = Comment + Line[:Line.find(TAB_COMMENT_R8_END) + len(TAB_COMMENT_R8_END)] + '\n'
+ Line = ReservedLine + GetSplitValueList(Line, TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ Comment = Comment + Line.strip() + '\n'
+ continue
+
+ # Remove comments at tail and remove spaces again
+ if Line.strip().startswith(TAB_COMMENT_SPLIT) or Line.strip().startswith('--/'):
+ Comment = Comment + Line.strip() + '\n'
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ ## Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ if Line[1:3] == "--":
+ continue
+ Model = Section[CurrentSection.upper()]
+ # Insert items data of previous section
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+
+ # Parse the new section
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ CurrentSection = ''
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ if CurrentSection == '':
+ CurrentSection = ItemList[0]
+ else:
+ if CurrentSection != ItemList[0]:
+ EdkLogger.error("Parser", PARSER_ERROR, "Different section names '%s' and '%s' are found in one section definition, this is not allowed." % (CurrentSection, ItemList[0]), File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ if Comment:
+ if Comment.endswith('\n'):
+ Comment = Comment[:len(Comment) - len('\n')]
+ self.SectionHeaderCommentDict[Section[CurrentSection.upper()]] = Comment
+ Comment = ''
+ continue
+
+ # Not in any defined section
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ # Add a section item
+ SectionItemList.append([Line, LineNo, Comment])
+ Comment = ''
+ # End of parse
+ #End of For
+
+ # Insert items data of last section
+ Model = Section[CurrentSection.upper()]
+ InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, self.RecordSet)
+ if Comment != '':
+ self.SectionHeaderCommentDict[Model] = Comment
+ Comment = ''
+
+ ## Show detailed information of Module
+ #
+ # Print all members and their values of Module class
+ #
+ def ShowModule(self):
+ M = self.Module
+ print 'Filename =', M.ModuleHeader.FileName
+ print 'FullPath =', M.ModuleHeader.FullPath
+ print 'RelaPath =', M.ModuleHeader.RelaPath
+ print 'PackagePath =', M.ModuleHeader.PackagePath
+ print 'ModulePath =', M.ModuleHeader.ModulePath
+ print 'CombinePath =', M.ModuleHeader.CombinePath
+
+ print 'BaseName =', M.ModuleHeader.Name
+ print 'Guid =', M.ModuleHeader.Guid
+ print 'Version =', M.ModuleHeader.Version
+
+ print '\nIncludes ='
+ for Item in M.Includes:
+ print Item.FilePath, Item.SupArchList
+ print '\nLibraryClasses ='
+ for Item in M.LibraryClasses:
+ print Item.LibraryClass, Item.RecommendedInstance, Item.RecommendedInstanceGuid, Item.RecommendedInstanceVersion, Item.FeatureFlag, Item.SupModuleList, Item.SupArchList, Item.Define
+ print '\nPackageDependencies ='
+ for Item in M.PackageDependencies:
+ print Item.FilePath, Item.SupArchList, Item.FeatureFlag
+ print '\nPcds ='
+ for Item in M.PcdCodes:
+ print '\tCName=',Item.CName, 'TokenSpaceGuidCName=', Item.TokenSpaceGuidCName, 'DefaultValue=', Item.DefaultValue, 'ItemType=', Item.ItemType, Item.SupArchList
+ print '\nSources ='
+ for Source in M.Sources:
+ print Source.SourceFile, 'Fam=', Source.ToolChainFamily, 'Pcd=', Source.FeatureFlag, 'Tag=', Source.TagName, 'ToolCode=', Source.ToolCode, Source.SupArchList
+ print '\nGuids ='
+ for Item in M.Guids:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nProtocols ='
+ for Item in M.Protocols:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nPpis ='
+ for Item in M.Ppis:
+ print Item.CName, Item.SupArchList, Item.FeatureFlag
+ print '\nDepex ='
+ for Item in M.Depex:
+ print Item.Depex, Item.SupArchList, Item.Define
+ print '\nBinaries ='
+ for Binary in M.Binaries:
+ print 'Type=', Binary.FileType, 'Target=', Binary.Target, 'Name=', Binary.BinaryFile, 'FeatureFlag=', Binary.FeatureFlag, 'SupArchList=', Binary.SupArchList
+ print '\n*** FileList ***'
+ for Item in M.MiscFiles.Files:
+ print Item.Filename
+ print '****************\n'
+
+ ## Convert [Defines] section content to ModuleHeaderClass
+ #
+ # Convert [Defines] section content to ModuleHeaderClass
+ #
+ # @param Defines The content under [Defines] section
+ # @param ModuleHeader An object of ModuleHeaderClass
+ # @param Arch The supported ARCH
+ #
+ def GenModuleHeader(self, ContainerFile):
+ EdkLogger.debug(2, "Generate ModuleHeader ...")
+ # Update all defines item in database
+ RecordSet = self.RecordSet[MODEL_META_DATA_HEADER]
+
+ ModuleHeader = ModuleHeaderClass()
+ ModuleExtern = ModuleExternClass()
+ OtherDefines = []
+ for Record in RecordSet:
+ ValueList = GetSplitValueList(Record[0], TAB_EQUAL_SPLIT)
+ if len(ValueList) != 2:
+ OtherDefines.append(Record[0])
+ else:
+ Name = ValueList[0]
+ Value = ValueList[1]
+ if Name == TAB_INF_DEFINES_BASE_NAME:
+ ModuleHeader.Name = Value
+ ModuleHeader.BaseName = Value
+ elif Name == TAB_INF_DEFINES_FILE_GUID:
+ ModuleHeader.Guid = Value
+ elif Name == TAB_INF_DEFINES_VERSION_STRING:
+ ModuleHeader.Version = Value
+ elif Name == TAB_INF_DEFINES_PCD_IS_DRIVER:
+ ModuleHeader.PcdIsDriver = Value
+ elif Name == TAB_INF_DEFINES_MODULE_TYPE:
+ ModuleHeader.ModuleType = Value
+ elif Name == TAB_INF_DEFINES_UEFI_SPECIFICATION_VERSION:
+ ModuleHeader.UefiSpecificationVersion = Value
+ elif Name == TAB_INF_DEFINES_PI_SPECIFICATION_VERSION:
+ ModuleHeader.PiSpecificationVersion = Value
+ elif Name == TAB_INF_DEFINES_ENTRY_POINT:
+ ModuleExtern.EntryPoint = Value
+ elif Name == TAB_INF_DEFINES_UNLOAD_IMAGE:
+ ModuleExtern.UnloadImage = Value
+ elif Name == TAB_INF_DEFINES_CONSTRUCTOR:
+ ModuleExtern.Constructor = Value
+ elif Name == TAB_INF_DEFINES_DESTRUCTOR:
+ ModuleExtern.Destructor = Value
+ else:
+ OtherDefines.append(Record[0])
+ ModuleHeader.FileName = self.Identification.FileName
+ ModuleHeader.FullPath = self.Identification.FullPath
+ ModuleHeader.RelaPath = self.Identification.RelaPath
+ ModuleHeader.PackagePath = self.Identification.PackagePath
+ ModuleHeader.ModulePath = self.Identification.ModulePath
+ ModuleHeader.CombinePath = os.path.normpath(os.path.join(ModuleHeader.PackagePath, ModuleHeader.ModulePath, ModuleHeader.FileName))
+
+ if MODEL_META_DATA_HEADER in self.SectionHeaderCommentDict:
+ ModuleHeader.Description = self.SectionHeaderCommentDict[MODEL_META_DATA_HEADER]
+ self.Module.ModuleHeader = ModuleHeader
+ self.Module.Externs.append(ModuleExtern)
+ UE = self.Module.UserExtensions
+ if UE == None:
+ UE = UserExtensionsClass()
+ UE.Defines = OtherDefines
+ self.Module.UserExtensions = UE
+
+ ## GenBuildOptions
+ #
+ # Gen BuildOptions of Inf
+ # [<Family>:]<ToolFlag>=Flag
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenBuildOptions(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BUILD_OPTIONS)
+ BuildOptions = {}
+ # Get all BuildOptions
+ RecordSet = self.RecordSet[MODEL_META_DATA_BUILD_OPTION]
+ UE = self.Module.UserExtensions
+ if UE == None:
+ UE = UserExtensionsClass()
+ for Record in RecordSet:
+ UE.BuildOptions.append(Record[0])
+ self.Module.UserExtensions = UE
+
+ ## GenIncludes
+ #
+ # Gen Includes of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenIncludes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_INCLUDES)
+ Includes = sdict()
+ # Get all Includes
+ RecordSet = self.RecordSet[MODEL_EFI_INCLUDE]
+ for Record in RecordSet:
+ Include = IncludeClass()
+ Include.FilePath = Record[0]
+ Include.SupArchList = Record[1]
+ if GenerateHelpText(Record[5], ''):
+ Include.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.Includes.append(Include)
+ #self.Module.FileList.extend(GetFiles(os.path.normpath(os.path.join(self.Identification.FileRelativePath, Include.FilePath)), ['CVS', '.svn']))
+
+ ## GenLibraryClasses
+ #
+ # Get LibraryClass of Inf
+ # <LibraryClassKeyWord>|<LibraryInstance>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenLibraryClasses(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_LIBRARY_CLASSES)
+ LibraryClasses = {}
+ # Get all LibraryClasses
+ RecordSet = self.RecordSet[MODEL_EFI_LIBRARY_CLASS]
+ for Record in RecordSet:
+ (LibClassName, LibClassIns, Pcd, SupModelList) = GetLibraryClassOfInf([Record[0], Record[4]], ContainerFile, self.WorkspaceDir, Record[2])
+ LibraryClass = CommonClass.LibraryClassClass()
+ LibraryClass.LibraryClass = LibClassName
+ LibraryClass.RecommendedInstance = LibClassIns
+ LibraryClass.FeatureFlag = Pcd
+ LibraryClass.SupArchList = Record[1]
+ LibraryClass.SupModuleList = Record[4]
+ if GenerateHelpText(Record[5], ''):
+ LibraryClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.LibraryClasses.append(LibraryClass)
+
+ ## GenPackages
+ #
+ # Gen Packages of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenPackages(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PACKAGES)
+ Packages = {}
+ # Get all Packages
+ RecordSet = self.RecordSet[MODEL_META_DATA_PACKAGE]
+ for Record in RecordSet:
+ (PackagePath, Pcd) = GetPackage(Record[0], ContainerFile, self.WorkspaceDir, Record[2])
+ Package = ModulePackageDependencyClass()
+ Package.FilePath = NormPath(PackagePath)
+ Package.SupArchList = Record[1]
+ Package.FeatureFlag = Pcd
+ if GenerateHelpText(Record[5], ''):
+ Package.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.PackageDependencies.append(Package)
+
+ def AddPcd(self, CName, TokenSpaceGuidCName, DefaultValue, ItemType, Arch, HelpTextList):
+ Pcd = PcdClass()
+ Pcd.CName = CName
+ Pcd.TokenSpaceGuidCName = TokenSpaceGuidCName
+ Pcd.DefaultValue = DefaultValue
+ Pcd.ItemType = ItemType
+ Pcd.SupArchList = Arch
+ if GenerateHelpText(HelpTextList, ''):
+ Pcd.HelpTextList.append(GenerateHelpText(HelpTextList, ''))
+ self.Module.PcdCodes.append(Pcd)
+
+ ## GenPcds
+ #
+ # Gen Pcds of Inf
+ # <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenPcds(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_PCDS)
+ Pcds = {}
+ PcdToken = {}
+
+ # Get all Pcds
+ RecordSet1 = self.RecordSet[MODEL_PCD_FIXED_AT_BUILD]
+ RecordSet2 = self.RecordSet[MODEL_PCD_PATCHABLE_IN_MODULE]
+ RecordSet3 = self.RecordSet[MODEL_PCD_FEATURE_FLAG]
+ RecordSet4 = self.RecordSet[MODEL_PCD_DYNAMIC_EX]
+ RecordSet5 = self.RecordSet[MODEL_PCD_DYNAMIC]
+
+ # Go through each arch
+ for Record in RecordSet1:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FIXED_AT_BUILD, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet2:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_PATCHABLE_IN_MODULE, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet3:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_FEATURE_FLAG, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet4:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], TAB_PCDS_DYNAMIC_EX, ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+ for Record in RecordSet5:
+ (TokenSpaceGuidCName, TokenName, Value, Type) = GetPcdOfInf(Record[0], '', ContainerFile, Record[2])
+ self.AddPcd(TokenName, TokenSpaceGuidCName, Value, Type, Record[1], Record[5])
+
+ ## GenSources
+ #
+ # Gen Sources of Inf
+ # <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenSources(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_SOURCES)
+ Sources = {}
+
+ # Get all Sources
+ RecordSet = self.RecordSet[MODEL_EFI_SOURCE_FILE]
+ for Record in RecordSet:
+ (Filename, Family, TagName, ToolCode, Pcd) = GetSource(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
+ Source = ModuleSourceFileClass(Filename, TagName, ToolCode, Family, Pcd, Record[1])
+ if GenerateHelpText(Record[5], ''):
+ Source.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ if MODEL_EFI_SOURCE_FILE in self.SectionHeaderCommentDict:
+ Source.HelpText = self.SectionHeaderCommentDict[MODEL_EFI_SOURCE_FILE]
+ self.Module.Sources.append(Source)
+ #self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
+
+ ## GenDepexes
+ #
+ # Gen Depex of Inf
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenDepexes(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_DEPEX)
+ Depex = {}
+ # Get all Depexes
+ RecordSet = self.RecordSet[MODEL_EFI_DEPEX]
+ DepexString = ''
+ for Record in RecordSet:
+ DepexString = DepexString + Record[0] + '\n'
+ Dep = ModuleDepexClass()
+ if DepexString.endswith('\n'):
+ DepexString = DepexString[:len(DepexString) - len('\n')]
+ Dep.Depex = DepexString
+ if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
+ self.Module.SmmDepex = Dep
+ elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
+ self.Module.PeiDepex = Dep
+ else:
+ self.Module.DxeDepex = Dep
+# for Record in RecordSet:
+#
+# Dep = ModuleDepexClass()
+# Dep.Depex = Record[0]
+# Dep.SupArchList = Record[1]
+# if GenerateHelpText(Record[5], ''):
+# Dep.HelpTextList.append(GenerateHelpText(Record[5], ''))
+# DepexString = DepexString + Dep
+# List.append(Dep)
+# self.Module.Depex = List
+# if self.Module.ModuleHeader.ModuleType in ['DXE_SMM_DRIVER']:
+# self.Module.SmmDepex = List
+# elif self.Module.ModuleHeader.ModuleType in ['PEI_CORE', 'PEIM']:
+# self.Module.PeiDepex = List
+# else:
+# self.Module.DxeDepex = List
+
+ ## GenBinaries
+ #
+ # Gen Binary of Inf
+ # <FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]
+ #
+ # @param ContainerFile: The Dec file full path
+ #
+ def GenBinaries(self, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % TAB_BINARIES)
+ Binaries = {}
+
+ # Get all Guids
+ RecordSet = self.RecordSet[MODEL_EFI_BINARY_FILE]
+ for Record in RecordSet:
+ (FileType, Filename, Target, Pcd) = GetBinary(Record[0], ContainerFile, self.Identification.RelaPath, Record[2])
+ Binary = ModuleBinaryFileClass(Filename, FileType, Target, Pcd, Record[1])
+ if GenerateHelpText(Record[5], ''):
+ Binary.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ self.Module.Binaries.append(Binary)
+ #self.Module.FileList.append(os.path.normpath(os.path.join(self.Identification.RelaPath, Filename)))
+
+ ## GenGuids
+ #
+ # Gen Guids of Inf
+ # <CName>=<GuidValue>
+ #
+ # @param ContainerFile: The Inf file full path
+ #
+ def GenGuidProtocolPpis(self, Type, ContainerFile):
+ EdkLogger.debug(2, "Generate %s ..." % Type)
+ Lists = {}
+ # Get all Items
+ if Type == TAB_GUIDS:
+ ListMember = self.Module.Guids
+ elif Type == TAB_PROTOCOLS:
+ ListMember = self.Module.Protocols
+ elif Type == TAB_PPIS:
+ ListMember = self.Module.Ppis
+
+ RecordSet = self.RecordSet[Section[Type.upper()]]
+ for Record in RecordSet:
+ (Name, Value) = GetGuidsProtocolsPpisOfInf(Record[0], Type, ContainerFile, Record[2])
+ ListClass = GuidProtocolPpiCommonClass()
+ ListClass.CName = Name
+ ListClass.SupArchList = Record[1]
+ ListClass.FeatureFlag = Value
+ if GenerateHelpText(Record[5], ''):
+ ListClass.HelpTextList.append(GenerateHelpText(Record[5], ''))
+ ListMember.append(ListClass)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ EdkLogger.Initialize()
+ EdkLogger.SetLevel(EdkLogger.QUIET)
+
+ W = os.getenv('WORKSPACE')
+ F = os.path.join(W, 'MdeModulePkg/Application/HelloWorld/HelloWorld.inf')
+
+ P = Inf(os.path.normpath(F), True, W, 'MdeModulePkg')
+ P.ShowModule()
+ print P.ModuleToInf(P.Module)
diff --git a/BaseTools/Source/Python/Common/MigrationUtilities.py b/BaseTools/Source/Python/Common/MigrationUtilities.py
new file mode 100644
index 0000000000..8573f0b692
--- /dev/null
+++ b/BaseTools/Source/Python/Common/MigrationUtilities.py
@@ -0,0 +1,567 @@
+## @file
+# Contains several utilitities shared by migration tools.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+from optparse import OptionParser
+from Common.BuildToolError import *
+from XmlRoutines import *
+from CommonDataClass.CommonClass import *
+
+## Set all fields of CommonClass object.
+#
+# Set all attributes of CommonClass object from XML Dom object of XmlCommon.
+#
+# @param Common The destine CommonClass object.
+# @param XmlCommon The source XML Dom object.
+#
+def SetCommon(Common, XmlCommon):
+ XmlTag = "Usage"
+ Common.Usage = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = "FeatureFlag"
+ Common.FeatureFlag = XmlAttribute(XmlCommon, XmlTag)
+
+ XmlTag = "SupArchList"
+ Common.SupArchList = XmlAttribute(XmlCommon, XmlTag).split()
+
+ XmlTag = XmlNodeName(XmlCommon) + "/" + "HelpText"
+ Common.HelpText = XmlElement(XmlCommon, XmlTag)
+
+
+## Set some fields of CommonHeaderClass object.
+#
+# Set Name, Guid, FileName and FullPath fields of CommonHeaderClass object from
+# XML Dom object of XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetIdentification(CommonHeader, XmlCommonHeader, NameTag, FileName):
+ XmlParentTag = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParentTag + "/" + NameTag
+ CommonHeader.Name = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "GuidValue"
+ CommonHeader.Guid = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParentTag + "/" + "Version"
+ CommonHeader.Version = XmlElement(XmlCommonHeader, XmlTag)
+
+ CommonHeader.FileName = os.path.basename(FileName)
+ CommonHeader.FullPath = os.path.abspath(FileName)
+
+
+## Regular expression to match specification and value.
+mReSpecification = re.compile(r"(?P<Specification>\w+)\s+(?P<Value>\w*)")
+
+## Add specification to specification dictionary.
+#
+# Abstract specification name, value pair from Specification String and add them
+# to specification dictionary.
+#
+# @param SpecificationDict The destine Specification dictionary.
+# @param SpecificationString The source Specification String from which the
+# specification name and value pair is abstracted.
+#
+def AddToSpecificationDict(SpecificationDict, SpecificationString):
+ """Abstract specification name, value pair from Specification String"""
+ for SpecificationMatch in mReSpecification.finditer(SpecificationString):
+ Specification = SpecificationMatch.group("Specification")
+ Value = SpecificationMatch.group("Value")
+ SpecificationDict[Specification] = Value
+
+## Set all fields of CommonHeaderClass object.
+#
+# Set all attributes of CommonHeaderClass object from XML Dom object of
+# XmlCommonHeader, NameTag and FileName.
+#
+# @param CommonHeader The destine CommonClass object.
+# @param XmlCommonHeader The source XML Dom object.
+# @param NameTag The name tag in XML Dom object.
+# @param FileName The file name of the XML file.
+#
+def SetCommonHeader(CommonHeader, XmlCommonHeader):
+ """Set all attributes of CommonHeaderClass object from XmlCommonHeader"""
+ XmlParent = XmlNodeName(XmlCommonHeader)
+
+ XmlTag = XmlParent + "/" + "Abstract"
+ CommonHeader.Abstract = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Description"
+ CommonHeader.Description = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Copyright"
+ CommonHeader.Copyright = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "License"
+ CommonHeader.License = XmlElement(XmlCommonHeader, XmlTag)
+
+ XmlTag = XmlParent + "/" + "Specification"
+ Specification = XmlElement(XmlCommonHeader, XmlTag)
+
+ AddToSpecificationDict(CommonHeader.Specification, Specification)
+
+ XmlTag = XmlParent + "/" + "ModuleType"
+ CommonHeader.ModuleType = XmlElement(XmlCommonHeader, XmlTag)
+
+
+## Load a new Cloned Record class object.
+#
+# Read an input XML ClonedRecord DOM object and return an object of Cloned Record
+# contained in the DOM object.
+#
+# @param XmlCloned A child XML DOM object in a Common XML DOM.
+#
+# @retvel ClonedRecord A new Cloned Record object created by XmlCloned.
+#
+def LoadClonedRecord(XmlCloned):
+ ClonedRecord = ClonedRecordClass()
+
+ XmlTag = "Id"
+ ClonedRecord.Id = int(XmlAttribute(XmlCloned, XmlTag))
+
+ XmlTag = "FarGuid"
+ ClonedRecord.FarGuid = XmlAttribute(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageGuid"
+ ClonedRecord.PackageGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/PackageVersion"
+ ClonedRecord.PackageVersion = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleGuid"
+ ClonedRecord.ModuleGuid = XmlElement(XmlCloned, XmlTag)
+
+ XmlTag = "Cloned/ModuleVersion"
+ ClonedRecord.ModuleVersion = XmlElement(XmlCloned, XmlTag)
+
+ return ClonedRecord
+
+
+## Load a new Guid/Protocol/Ppi common class object.
+#
+# Read an input XML Guid/Protocol/Ppi DOM object and return an object of
+# Guid/Protocol/Ppi contained in the DOM object.
+#
+# @param XmlGuidProtocolPpiCommon A child XML DOM object in a Common XML DOM.
+#
+# @retvel GuidProtocolPpiCommon A new GuidProtocolPpiCommon class object
+# created by XmlGuidProtocolPpiCommon.
+#
+def LoadGuidProtocolPpiCommon(XmlGuidProtocolPpiCommon):
+ GuidProtocolPpiCommon = GuidProtocolPpiCommonClass()
+
+ XmlTag = "Name"
+ GuidProtocolPpiCommon.Name = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlParent = XmlNodeName(XmlGuidProtocolPpiCommon)
+ if XmlParent == "Entry":
+ XmlTag = "%s/C_Name" % XmlParent
+ elif XmlParent == "GuidCNames":
+ XmlTag = "%s/GuidCName" % XmlParent
+ else:
+ XmlTag = "%s/%sCName" % (XmlParent, XmlParent)
+
+ GuidProtocolPpiCommon.CName = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ XmlTag = XmlParent + "/" + "GuidValue"
+ GuidProtocolPpiCommon.Guid = XmlElement(XmlGuidProtocolPpiCommon, XmlTag)
+
+ if XmlParent.endswith("Notify"):
+ GuidProtocolPpiCommon.Notify = True
+
+ XmlTag = "GuidTypeList"
+ GuidTypes = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.GuidTypeList = GuidTypes.split()
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlGuidProtocolPpiCommon, XmlTag)
+ GuidProtocolPpiCommon.SupModuleList = SupModules.split()
+
+ SetCommon(GuidProtocolPpiCommon, XmlGuidProtocolPpiCommon)
+
+ return GuidProtocolPpiCommon
+
+
+## Load a new Pcd class object.
+#
+# Read an input XML Pcd DOM object and return an object of Pcd
+# contained in the DOM object.
+#
+# @param XmlPcd A child XML DOM object in a Common XML DOM.
+#
+# @retvel Pcd A new Pcd object created by XmlPcd.
+#
+def LoadPcd(XmlPcd):
+ """Return a new PcdClass object equivalent to XmlPcd"""
+ Pcd = PcdClass()
+
+ XmlTag = "PcdEntry/C_Name"
+ Pcd.CName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/Token"
+ Pcd.Token = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/TokenSpaceGuidCName"
+ Pcd.TokenSpaceGuidCName = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DatumType"
+ Pcd.DatumType = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/MaxDatumSize"
+ Pcd.MaxDatumSize = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/DefaultValue"
+ Pcd.DefaultValue = XmlElement(XmlPcd, XmlTag)
+
+ XmlTag = "PcdItemType"
+ Pcd.ItemType = XmlAttribute(XmlPcd, XmlTag)
+
+ XmlTag = "PcdEntry/ValidUsage"
+ Pcd.ValidUsage = XmlElement(XmlPcd, XmlTag).split()
+
+ XmlTag = "SupModuleList"
+ Pcd.SupModuleList = XmlAttribute(XmlPcd, XmlTag).split()
+
+ SetCommon(Pcd, XmlPcd)
+
+ return Pcd
+
+
+## Load a new LibraryClass class object.
+#
+# Read an input XML LibraryClass DOM object and return an object of LibraryClass
+# contained in the DOM object.
+#
+# @param XmlLibraryClass A child XML DOM object in a Common XML DOM.
+#
+# @retvel LibraryClass A new LibraryClass object created by XmlLibraryClass.
+#
+def LoadLibraryClass(XmlLibraryClass):
+ LibraryClass = LibraryClassClass()
+
+ XmlTag = "LibraryClass/Keyword"
+ LibraryClass.LibraryClass = XmlElement(XmlLibraryClass, XmlTag)
+ if LibraryClass.LibraryClass == "":
+ XmlTag = "Name"
+ LibraryClass.LibraryClass = XmlAttribute(XmlLibraryClass, XmlTag)
+
+ XmlTag = "LibraryClass/IncludeHeader"
+ LibraryClass.IncludeHeader = XmlElement(XmlLibraryClass, XmlTag)
+
+ XmlTag = "RecommendedInstanceVersion"
+ RecommendedInstanceVersion = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceVersion = RecommendedInstanceVersion
+
+ XmlTag = "RecommendedInstanceGuid"
+ RecommendedInstanceGuid = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.RecommendedInstanceGuid = RecommendedInstanceGuid
+
+ XmlTag = "SupModuleList"
+ SupModules = XmlAttribute(XmlLibraryClass, XmlTag)
+ LibraryClass.SupModuleList = SupModules.split()
+
+ SetCommon(LibraryClass, XmlLibraryClass)
+
+ return LibraryClass
+
+
+## Load a new Build Option class object.
+#
+# Read an input XML BuildOption DOM object and return an object of Build Option
+# contained in the DOM object.
+#
+# @param XmlBuildOption A child XML DOM object in a Common XML DOM.
+#
+# @retvel BuildOption A new Build Option object created by XmlBuildOption.
+#
+def LoadBuildOption(XmlBuildOption):
+ """Return a new BuildOptionClass object equivalent to XmlBuildOption"""
+ BuildOption = BuildOptionClass()
+
+ BuildOption.Option = XmlElementData(XmlBuildOption)
+
+ XmlTag = "BuildTargets"
+ BuildOption.BuildTargetList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ XmlTag = "ToolChainFamily"
+ BuildOption.ToolChainFamily = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "TagName"
+ BuildOption.TagName = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "ToolCode"
+ BuildOption.ToolCode = XmlAttribute(XmlBuildOption, XmlTag)
+
+ XmlTag = "SupArchList"
+ BuildOption.SupArchList = XmlAttribute(XmlBuildOption, XmlTag).split()
+
+ return BuildOption
+
+
+## Load a new User Extensions class object.
+#
+# Read an input XML UserExtensions DOM object and return an object of User
+# Extensions contained in the DOM object.
+#
+# @param XmlUserExtensions A child XML DOM object in a Common XML DOM.
+#
+# @retvel UserExtensions A new User Extensions object created by
+# XmlUserExtensions.
+#
+def LoadUserExtensions(XmlUserExtensions):
+ UserExtensions = UserExtensionsClass()
+
+ XmlTag = "UserID"
+ UserExtensions.UserID = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ XmlTag = "Identifier"
+ UserExtensions.Identifier = XmlAttribute(XmlUserExtensions, XmlTag)
+
+ UserExtensions.Content = XmlElementData(XmlUserExtensions)
+
+ return UserExtensions
+
+
+## Store content to a text file object.
+#
+# Write some text file content to a text file object. The contents may echo
+# in screen in a verbose way.
+#
+# @param TextFile The text file object.
+# @param Content The string object to be written to a text file.
+#
+def StoreTextFile(TextFile, Content):
+ EdkLogger.verbose(Content)
+ TextFile.write(Content)
+
+
+## Add item to a section.
+#
+# Add an Item with specific CPU architecture to section dictionary.
+# The possible duplication is ensured to be removed.
+#
+# @param Section Section dictionary indexed by CPU architecture.
+# @param Arch CPU architecture: Ia32, X64, Ipf, ARM, Ebc or Common.
+# @param Item The Item to be added to section dictionary.
+#
+def AddToSection(Section, Arch, Item):
+ SectionArch = Section.get(Arch, [])
+ if Item not in SectionArch:
+ SectionArch.append(Item)
+ Section[Arch] = SectionArch
+
+
+## Get section contents.
+#
+# Return the content of section named SectionName.
+# the contents is based on Methods and ObjectLists.
+#
+# @param SectionName The name of the section.
+# @param Method A function returning a string item of an object.
+# @param ObjectList The list of object.
+#
+# @retval Section The string content of a section.
+#
+def GetSection(SectionName, Method, ObjectList):
+ SupportedArches = ["common", "Ia32", "X64", "Ipf", "Ebc", "ARM"]
+ SectionDict = {}
+ for Object in ObjectList:
+ Item = Method(Object)
+ if Item == "":
+ continue
+ Item = " %s" % Item
+ Arches = Object.SupArchList
+ if len(Arches) == 0:
+ AddToSection(SectionDict, "common", Item)
+ else:
+ for Arch in SupportedArches:
+ if Arch.upper() in Arches:
+ AddToSection(SectionDict, Arch, Item)
+
+ Section = ""
+ for Arch in SupportedArches:
+ SectionArch = "\n".join(SectionDict.get(Arch, []))
+ if SectionArch != "":
+ Section += "[%s.%s]\n%s\n" % (SectionName, Arch, SectionArch)
+ Section += "\n"
+ if Section != "":
+ Section += "\n"
+ return Section
+
+
+## Store file header to a text file.
+#
+# Write standard file header to a text file. The content includes copyright,
+# abstract, description and license extracted from CommonHeader class object.
+#
+# @param TextFile The text file object.
+# @param CommonHeader The source CommonHeader class object.
+#
+def StoreHeader(TextFile, CommonHeader):
+ CopyRight = CommonHeader.Copyright
+ Abstract = CommonHeader.Abstract
+ Description = CommonHeader.Description
+ License = CommonHeader.License
+
+ Header = "#/** @file\n#\n"
+ Header += "# " + Abstract + "\n#\n"
+ Header += "# " + Description.strip().replace("\n", "\n# ") + "\n"
+ Header += "# " + CopyRight + "\n#\n"
+ Header += "# " + License.replace("\n", "\n# ").replace(" ", " ")
+ Header += "\n#\n#**/\n\n"
+
+ StoreTextFile(TextFile, Header)
+
+## Store file header to a text file.
+#
+# Write Defines section to a text file. DefinesTupleList determines the content.
+#
+# @param TextFile The text file object.
+# @param DefinesTupleList The list of (Tag, Value) to be added as one item.
+#
+def StoreDefinesSection(TextFile, DefinesTupleList):
+ Section = "[Defines]\n"
+ for DefineItem in DefinesTupleList:
+ Section += " %-30s = %s\n" % DefineItem
+
+ Section += "\n\n"
+ StoreTextFile(TextFile, Section)
+
+
+## Return one User Extension section.
+#
+# Read the input UserExtentsions class object and return one section.
+#
+# @param UserExtensions An input UserExtensions class object.
+#
+# @retval UserExtensionSection A section representing UserExtensions object.
+#
+def GetUserExtensions(UserExtensions):
+ UserId = UserExtensions.UserID
+ Identifier = UserExtensions.Identifier
+ Content = UserExtensions.Content
+
+ return "[UserExtensions.%s.%s]\n %s\n\n" % (UserId, Identifier, Content)
+
+## Regular expression to match an equation.
+mReEquation = re.compile(r"\s*(\S+)\s*=\s*(\S*)\s*")
+
+## Return a value tuple matching information in a text fle.
+#
+# Parse the text file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the text file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetTextFileInfo(FileName, TagTuple):
+ ValueTuple = [""] * len(TagTuple)
+ try:
+ for Line in open(FileName):
+ Line = Line.split("#", 1)[0]
+ MatchEquation = mReEquation.match(Line)
+ if MatchEquation:
+ Tag = MatchEquation.group(1).upper()
+ Value = MatchEquation.group(2)
+ for Index in range(len(TagTuple)):
+ if TagTuple[Index] == Tag:
+ ValueTuple[Index] = Value
+ except:
+ EdkLogger.info("IO Error in reading file %s" % FileName)
+
+ return ValueTuple
+
+
+## Return a value tuple matching information in an XML fle.
+#
+# Parse the XML file and return a value tuple corresponding to an input tag
+# tuple. In case of any error, an tuple of empty strings is returned.
+#
+# @param FileName The file name of the XML file.
+# @param TagTuple A tuple of tags as the key to the value.
+#
+# @param ValueTupe The returned tuple corresponding to the tag tuple.
+#
+def GetXmlFileInfo(FileName, TagTuple):
+ XmlDom = XmlParseFile(FileName)
+ return tuple([XmlElement(XmlDom, XmlTag) for XmlTag in TagTuple])
+
+
+## Parse migration command line options
+#
+# Use standard Python module optparse to parse command line option of this tool.
+#
+# @param Source The source file type.
+# @param Destinate The destinate file type.
+#
+# @retval Options A optparse object containing the parsed options.
+# @retval InputFile Path of an source file to be migrated.
+#
+def MigrationOptionParser(Source, Destinate, ToolName, VersionNumber = 1.0):
+ # use clearer usage to override default usage message
+ UsageString = "%s [-a] [-v|-q] [-o <output_file>] <input_file>" % ToolName
+ Version = "%s Version %.2f" % (ToolName, VersionNumber)
+ Copyright = "Copyright (c) 2007, Intel Corporation. All rights reserved."
+
+ Parser = OptionParser(description=Copyright, version=Version, usage=UsageString)
+ Parser.add_option("-o", "--output", dest="OutputFile", help="The name of the %s file to be created." % Destinate)
+ Parser.add_option("-a", "--auto", dest="AutoWrite", action="store_true", default=False, help="Automatically create the %s file using the name of the %s file and replacing file extension" % (Source, Destinate))
+ Parser.add_option("-q", "--quiet", action="store_true", type=None, help="Disable all messages except FATAL ERRORS.")
+ Parser.add_option("-v", "--verbose", action="store_true", type=None, help="Turn on verbose output with informational messages printed.")
+
+ Options, Args = Parser.parse_args()
+
+ # Set logging level
+ if Options.verbose:
+ EdkLogger.setLevel(EdkLogger.VERBOSE)
+ elif Options.quiet:
+ EdkLogger.setLevel(EdkLogger.QUIET)
+ else:
+ EdkLogger.setLevel(EdkLogger.INFO)
+
+ # error check
+ if len(Args) == 0:
+ raise MigrationError(PARAMETER_MISSING, name="Input file", usage=Parser.get_usage())
+ if len(Args) > 1:
+ raise MigrationError(PARAMETER_INVALID, name="Too many input files", usage=Parser.get_usage())
+
+ InputFile = Args[0]
+ if not os.path.exists(InputFile):
+ raise MigrationError(FILE_NOT_FOUND, name=InputFile)
+
+ if Options.OutputFile:
+ if Options.AutoWrite:
+ raise MigrationError(OPTION_CONFLICT, arg1="-o", arg2="-a", usage=Parser.get_usage())
+ else:
+ if Options.AutoWrite:
+ Options.OutputFile = os.path.splitext(InputFile)[0] + "." + Destinate.lower()
+ else:
+ raise MigrationError(OPTION_MISSING, name="-o", usage=Parser.get_usage())
+
+ return Options, InputFile
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ pass
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
new file mode 100644
index 0000000000..14f6550f29
--- /dev/null
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -0,0 +1,1327 @@
+## @file
+# Common routines used by all tools
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import sys
+import string
+import thread
+import threading
+import time
+import re
+import cPickle
+from UserDict import IterableUserDict
+from UserList import UserList
+
+from Common import EdkLogger as EdkLogger
+from Common import GlobalData as GlobalData
+
+from BuildToolError import *
+
+## Regular expression used to find out place holders in string template
+gPlaceholderPattern = re.compile("\$\{([^$()\s]+)\}", re.MULTILINE|re.UNICODE)
+
+## Dictionary used to store file time stamp for quick re-access
+gFileTimeStampCache = {} # {file path : file time stamp}
+
+## Dictionary used to store dependencies of files
+gDependencyDatabase = {} # arch : {file path : [dependent files list]}
+
+## callback routine for processing variable option
+#
+# This function can be used to process variable number of option values. The
+# typical usage of it is specify architecure list on command line.
+# (e.g. <tool> -a IA32 X64 IPF)
+#
+# @param Option Standard callback function parameter
+# @param OptionString Standard callback function parameter
+# @param Value Standard callback function parameter
+# @param Parser Standard callback function parameter
+#
+# @retval
+#
+def ProcessVariableArgument(Option, OptionString, Value, Parser):
+ assert Value is None
+ Value = []
+ RawArgs = Parser.rargs
+ while RawArgs:
+ Arg = RawArgs[0]
+ if (Arg[:2] == "--" and len(Arg) > 2) or \
+ (Arg[:1] == "-" and len(Arg) > 1 and Arg[1] != "-"):
+ break
+ Value.append(Arg)
+ del RawArgs[0]
+ setattr(Parser.values, Option.dest, Value)
+
+## Convert GUID string in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx style to C structure style
+#
+# @param Guid The GUID string
+#
+# @retval string The GUID string in C structure style
+#
+def GuidStringToGuidStructureString(Guid):
+ GuidList = Guid.split('-')
+ Result = '{'
+ for Index in range(0,3,1):
+ Result = Result + '0x' + GuidList[Index] + ', '
+ Result = Result + '{0x' + GuidList[3][0:2] + ', 0x' + GuidList[3][2:4]
+ for Index in range(0,12,2):
+ Result = Result + ', 0x' + GuidList[4][Index:Index+2]
+ Result += '}}'
+ return Result
+
+## Convert GUID structure in byte array to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in byte array
+#
+# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
+#
+def GuidStructureByteArrayToGuidString(GuidValue):
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 16:
+ return ''
+ #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
+ try:
+ return "%02x%02x%02x%02x-%02x%02x-%02x%02x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[3], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[0], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16),
+ int(guidValueList[11], 16),
+ int(guidValueList[12], 16),
+ int(guidValueList[13], 16),
+ int(guidValueList[14], 16),
+ int(guidValueList[15], 16)
+ )
+ except:
+ return ''
+
+## Convert GUID string in C structure style to xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in C structure format
+#
+# @retval string The GUID value in xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
+#
+def GuidStructureStringToGuidString(GuidValue):
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "").replace(";", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 11:
+ return ''
+ #EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
+ try:
+ return "%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[0], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[3], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16)
+ )
+ except:
+ return ''
+
+## Convert GUID string in C structure style to xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx
+#
+# @param GuidValue The GUID value in C structure format
+#
+# @retval string The GUID value in xxxxxxxx_xxxx_xxxx_xxxx_xxxxxxxxxxxx format
+#
+def GuidStructureStringToGuidValueName(GuidValue):
+ guidValueString = GuidValue.lower().replace("{", "").replace("}", "").replace(" ", "")
+ guidValueList = guidValueString.split(",")
+ if len(guidValueList) != 11:
+ EdkLogger.error(None, None, "Invalid GUID value string %s" % GuidValue)
+ return "%08x_%04x_%04x_%02x%02x_%02x%02x%02x%02x%02x%02x" % (
+ int(guidValueList[0], 16),
+ int(guidValueList[1], 16),
+ int(guidValueList[2], 16),
+ int(guidValueList[3], 16),
+ int(guidValueList[4], 16),
+ int(guidValueList[5], 16),
+ int(guidValueList[6], 16),
+ int(guidValueList[7], 16),
+ int(guidValueList[8], 16),
+ int(guidValueList[9], 16),
+ int(guidValueList[10], 16)
+ )
+
+## Create directories
+#
+# @param Directory The directory name
+#
+def CreateDirectory(Directory):
+ if Directory == None or Directory.strip() == "":
+ return True
+ try:
+ if not os.access(Directory, os.F_OK):
+ os.makedirs(Directory)
+ except:
+ return False
+ return True
+
+## Remove directories, including files and sub-directories in it
+#
+# @param Directory The directory name
+#
+def RemoveDirectory(Directory, Recursively=False):
+ if Directory == None or Directory.strip() == "" or not os.path.exists(Directory):
+ return
+ if Recursively:
+ CurrentDirectory = os.getcwd()
+ os.chdir(Directory)
+ for File in os.listdir("."):
+ if os.path.isdir(File):
+ RemoveDirectory(File, Recursively)
+ else:
+ os.remove(File)
+ os.chdir(CurrentDirectory)
+ os.rmdir(Directory)
+
+## Check if given file is changed or not
+#
+# This method is used to check if a file is changed or not between two build
+# actions. It makes use a cache to store files timestamp.
+#
+# @param File The path of file
+#
+# @retval True If the given file is changed, doesn't exist, or can't be
+# found in timestamp cache
+# @retval False If the given file is changed
+#
+def IsChanged(File):
+ if not os.path.exists(File):
+ return True
+
+ FileState = os.stat(File)
+ TimeStamp = FileState[-2]
+
+ if File in gFileTimeStampCache and TimeStamp == gFileTimeStampCache[File]:
+ FileChanged = False
+ else:
+ FileChanged = True
+ gFileTimeStampCache[File] = TimeStamp
+
+ return FileChanged
+
+## Store content in file
+#
+# This method is used to save file only when its content is changed. This is
+# quite useful for "make" system to decide what will be re-built and what won't.
+#
+# @param File The path of file
+# @param Content The new content of the file
+# @param IsBinaryFile The flag indicating if the file is binary file or not
+#
+# @retval True If the file content is changed and the file is renewed
+# @retval False If the file content is the same
+#
+def SaveFileOnChange(File, Content, IsBinaryFile=True):
+ if not IsBinaryFile:
+ Content = Content.replace("\n", os.linesep)
+
+ if os.path.exists(File):
+ try:
+ if Content == open(File, "rb").read():
+ return False
+ except:
+ EdkLogger.error(None, FILE_OPEN_FAILURE, ExtraData=File)
+
+ CreateDirectory(os.path.dirname(File))
+ try:
+ if GlobalData.gIsWindows:
+ try:
+ from PyUtility import SaveFileToDisk
+ if not SaveFileToDisk(File, Content):
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File)
+ except:
+ Fd = open(File, "wb")
+ Fd.write(Content)
+ Fd.close()
+ else:
+ Fd = open(File, "wb")
+ Fd.write(Content)
+ Fd.close()
+ except:
+ EdkLogger.error(None, FILE_CREATE_FAILURE, ExtraData=File)
+
+ return True
+
+## Make a Python object persistent on file system
+#
+# @param Data The object to be stored in file
+# @param File The path of file to store the object
+#
+def DataDump(Data, File):
+ Fd = None
+ try:
+ Fd = open(File, 'wb')
+ cPickle.dump(Data, Fd, cPickle.HIGHEST_PROTOCOL)
+ except:
+ EdkLogger.error("", FILE_OPEN_FAILURE, ExtraData=File, RaiseError=False)
+ finally:
+ if Fd != None:
+ Fd.close()
+
+## Restore a Python object from a file
+#
+# @param File The path of file stored the object
+#
+# @retval object A python object
+# @retval None If failure in file operation
+#
+def DataRestore(File):
+ Data = None
+ Fd = None
+ try:
+ Fd = open(File, 'rb')
+ Data = cPickle.load(Fd)
+ except Exception, e:
+ EdkLogger.verbose("Failed to load [%s]\n\t%s" % (File, str(e)))
+ Data = None
+ finally:
+ if Fd != None:
+ Fd.close()
+ return Data
+
+## Retrieve and cache the real path name in file system
+#
+# @param Root The root directory of path relative to
+#
+# @retval str The path string if the path exists
+# @retval None If path doesn't exist
+#
+class DirCache:
+ _CACHE_ = {}
+
+ def __init__(self, Root):
+ self._Root = Root
+ for F in os.listdir(Root):
+ self._CACHE_[F.upper()] = F
+
+ # =[] operator
+ def __getitem__(self, Path):
+ Path = Path[len(os.path.commonprefix([Path, self._Root])):]
+ if not Path:
+ return self._Root
+ if Path and Path[0] == os.path.sep:
+ Path = Path[1:]
+ Path = Path.upper()
+ if Path in self._CACHE_:
+ return os.path.join(self._Root, self._CACHE_[Path])
+
+ IndexList = []
+ LastSepIndex = -1
+ SepIndex = Path.find(os.path.sep)
+ while SepIndex > -1:
+ Parent = Path[:SepIndex]
+ if Parent not in self._CACHE_:
+ break
+ LastSepIndex = SepIndex
+ SepIndex = Path.find(os.path.sep, LastSepIndex + 1)
+
+ if LastSepIndex == -1:
+ return None
+
+ Cwd = os.getcwd()
+ os.chdir(self._Root)
+ SepIndex = LastSepIndex
+ while SepIndex > -1:
+ ParentKey = Path[:SepIndex]
+ if ParentKey not in self._CACHE_:
+ os.chdir(Cwd)
+ return None
+
+ ParentDir = self._CACHE_[ParentKey]
+ for F in os.listdir(ParentDir):
+ Dir = os.path.join(ParentDir, F)
+ self._CACHE_[Dir.upper()] = Dir
+
+ SepIndex = Path.find(os.path.sep, SepIndex + 1)
+
+ os.chdir(Cwd)
+ if Path not in self._CACHE_:
+ return None
+ return os.path.join(self._Root, self._CACHE_[Path])
+
+## Get all files of a directory
+#
+# @param Root: Root dir
+# @param SkipList : The files need be skipped
+#
+# @retval A list of all files
+#
+def GetFiles(Root, SkipList=None, FullPath = True):
+ OriPath = Root
+ FileList = []
+ for Root, Dirs, Files in os.walk(Root):
+ if SkipList:
+ for Item in SkipList:
+ if Item in Dirs:
+ Dirs.remove(Item)
+
+ for File in Files:
+ File = os.path.normpath(os.path.join(Root, File))
+ if not FullPath:
+ File = File[len(OriPath) + 1:]
+ FileList.append(File)
+
+ return FileList
+
+## Check if gvien file exists or not
+#
+# @param File File name or path to be checked
+# @param Dir The directory the file is relative to
+#
+# @retval True if file exists
+# @retval False if file doesn't exists
+#
+def ValidFile(File, Ext=None):
+ if Ext != None:
+ Dummy, FileExt = os.path.splitext(File)
+ if FileExt.lower() != Ext.lower():
+ return False
+ if not os.path.exists(File):
+ return False
+ return True
+
+def RealPath(File, Dir='', OverrideDir=''):
+ NewFile = os.path.normpath(os.path.join(Dir, File))
+ NewFile = GlobalData.gAllFiles[NewFile]
+ if not NewFile and OverrideDir:
+ NewFile = os.path.normpath(os.path.join(OverrideDir, File))
+ NewFile = GlobalData.gAllFiles[NewFile]
+ return NewFile
+
+def RealPath2(File, Dir='', OverrideDir=''):
+ NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(Dir, File))]
+ if NewFile:
+ if Dir:
+ if Dir[-1] == os.path.sep:
+ return NewFile[len(Dir):], NewFile[0:len(Dir)]
+ else:
+ return NewFile[len(Dir)+1:], NewFile[0:len(Dir)]
+ else:
+ return NewFile, ''
+
+ if OverrideDir:
+ NewFile = GlobalData.gAllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
+ if NewFile:
+ return NewFile[len(OverrideDir)+1:], NewFile[0:len(OverrideDir)]
+ return None, None
+
+## Check if gvien file exists or not
+#
+#
+def ValidFile2(AllFiles, File, Ext=None, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
+ NewFile = File
+ if Ext != None:
+ Dummy, FileExt = os.path.splitext(File)
+ if FileExt.lower() != Ext.lower():
+ return False, File
+
+ # Replace the R8 macros
+ if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir.find('$(EFI_SOURCE)') > -1:
+ OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
+ if OverrideDir.find('$(EDK_SOURCE)') > -1:
+ OverrideDir = OverrideDir.replace('$(EDK_SOURCE)', EdkSource)
+
+ # Replace the default dir to current dir
+ if Dir == '.':
+ Dir = os.getcwd()
+ Dir = Dir[len(Workspace)+1:]
+
+ # First check if File has R8 definition itself
+ if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:
+ NewFile = File.replace('$(EFI_SOURCE)', EfiSource)
+ NewFile = NewFile.replace('$(EDK_SOURCE)', EdkSource)
+ NewFile = AllFiles[os.path.normpath(NewFile)]
+ if NewFile != None:
+ return True, NewFile
+
+ # Second check the path with override value
+ if OverrideDir != '' and OverrideDir != None:
+ NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
+ if NewFile != None:
+ return True, NewFile
+
+ # Last check the path with normal definitions
+ File = os.path.join(Dir, File)
+ NewFile = AllFiles[os.path.normpath(File)]
+ if NewFile != None:
+ return True, NewFile
+
+ return False, File
+
+## Check if gvien file exists or not
+#
+#
+def ValidFile3(AllFiles, File, Workspace='', EfiSource='', EdkSource='', Dir='.', OverrideDir=''):
+ # Replace the R8 macros
+ if OverrideDir != '' and OverrideDir != None:
+ if OverrideDir.find('$(EFI_SOURCE)') > -1:
+ OverrideDir = OverrideDir.replace('$(EFI_SOURCE)', EfiSource)
+ if OverrideDir.find('$(EDK_SOURCE)') > -1:
+ OverrideDir = OverrideDir.replace('$(EDK_SOURCE)', EdkSource)
+
+ # Replace the default dir to current dir
+ # Dir is current module dir related to workspace
+ if Dir == '.':
+ Dir = os.getcwd()
+ Dir = Dir[len(Workspace)+1:]
+
+ NewFile = File
+ RelaPath = AllFiles[os.path.normpath(Dir)]
+ NewRelaPath = RelaPath
+
+ while(True):
+ # First check if File has R8 definition itself
+ if File.find('$(EFI_SOURCE)') > -1 or File.find('$(EDK_SOURCE)') > -1:
+ File = File.replace('$(EFI_SOURCE)', EfiSource)
+ File = File.replace('$(EDK_SOURCE)', EdkSource)
+ NewFile = AllFiles[os.path.normpath(File)]
+ if NewFile != None:
+ NewRelaPath = os.path.dirname(NewFile)
+ File = os.path.basename(NewFile)
+ #NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
+ break
+
+ # Second check the path with override value
+ if OverrideDir != '' and OverrideDir != None:
+ NewFile = AllFiles[os.path.normpath(os.path.join(OverrideDir, File))]
+ if NewFile != None:
+ #NewRelaPath = os.path.dirname(NewFile)
+ NewRelaPath = NewFile[:len(NewFile) - len(File.replace("..\\", '').replace("../", '')) - 1]
+ break
+
+ # Last check the path with normal definitions
+ NewFile = AllFiles[os.path.normpath(os.path.join(Dir, File))]
+ if NewFile != None:
+ break
+
+ # No file found
+ break
+
+ return NewRelaPath, RelaPath, File
+
+
+def GetRelPath(Path1, Path2):
+ FileName = os.path.basename(Path2)
+ L1 = os.path.normpath(Path1).split(os.path.normpath('/'))
+ L2 = os.path.normpath(Path2).split(os.path.normpath('/'))
+ for Index in range(0, len(L1)):
+ if L1[Index] != L2[Index]:
+ FileName = '../' * (len(L1) - Index)
+ for Index2 in range(Index, len(L2)):
+ FileName = os.path.join(FileName, L2[Index2])
+ break
+ return os.path.normpath(FileName)
+
+
+## Get GUID value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def GuidValue(CName, PackageList):
+ for P in PackageList:
+ if CName in P.Guids:
+ return P.Guids[CName]
+ return None
+
+## Get Protocol value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def ProtocolValue(CName, PackageList):
+ for P in PackageList:
+ if CName in P.Protocols:
+ return P.Protocols[CName]
+ return None
+
+## Get PPI value from given packages
+#
+# @param CName The CName of the GUID
+# @param PackageList List of packages looking-up in
+#
+# @retval GuidValue if the CName is found in any given package
+# @retval None if the CName is not found in all given packages
+#
+def PpiValue(CName, PackageList):
+ for P in PackageList:
+ if CName in P.Ppis:
+ return P.Ppis[CName]
+ return None
+
+## A string template class
+#
+# This class implements a template for string replacement. A string template
+# looks like following
+#
+# ${BEGIN} other_string ${placeholder_name} other_string ${END}
+#
+# The string between ${BEGIN} and ${END} will be repeated as many times as the
+# length of "placeholder_name", which is a list passed through a dict. The
+# "placeholder_name" is the key name of the dict. The ${BEGIN} and ${END} can
+# be not used and, in this case, the "placeholder_name" must not a list and it
+# will just be replaced once.
+#
+class TemplateString(object):
+ _REPEAT_START_FLAG = "BEGIN"
+ _REPEAT_END_FLAG = "END"
+
+ class Section(object):
+ _LIST_TYPES = [type([]), type(set()), type((0,))]
+
+ def __init__(self, TemplateSection, PlaceHolderList):
+ self._Template = TemplateSection
+ self._PlaceHolderList = []
+
+ # Split the section into sub-sections according to the position of placeholders
+ if PlaceHolderList:
+ self._SubSectionList = []
+ SubSectionStart = 0
+ #
+ # The placeholders passed in must be in the format of
+ #
+ # PlaceHolderName, PlaceHolderStartPoint, PlaceHolderEndPoint
+ #
+ for PlaceHolder,Start,End in PlaceHolderList:
+ self._SubSectionList.append(TemplateSection[SubSectionStart:Start])
+ self._SubSectionList.append(TemplateSection[Start:End])
+ self._PlaceHolderList.append(PlaceHolder)
+ SubSectionStart = End
+ if SubSectionStart < len(TemplateSection):
+ self._SubSectionList.append(TemplateSection[SubSectionStart:])
+ else:
+ self._SubSectionList = [TemplateSection]
+
+ def __str__(self):
+ return self._Template + " : " + str(self._PlaceHolderList)
+
+ def Instantiate(self, PlaceHolderValues):
+ RepeatTime = -1
+ RepeatPlaceHolders = {}
+ NonRepeatPlaceHolders = {}
+
+ for PlaceHolder in self._PlaceHolderList:
+ if PlaceHolder not in PlaceHolderValues:
+ continue
+ Value = PlaceHolderValues[PlaceHolder]
+ if type(Value) in self._LIST_TYPES:
+ if RepeatTime < 0:
+ RepeatTime = len(Value)
+ elif RepeatTime != len(Value):
+ EdkLogger.error(
+ "TemplateString",
+ PARAMETER_INVALID,
+ "${%s} has different repeat time from others!" % PlaceHolder,
+ ExtraData=str(self._Template)
+ )
+ RepeatPlaceHolders["${%s}" % PlaceHolder] = Value
+ else:
+ NonRepeatPlaceHolders["${%s}" % PlaceHolder] = Value
+
+ if NonRepeatPlaceHolders:
+ StringList = []
+ for S in self._SubSectionList:
+ if S not in NonRepeatPlaceHolders:
+ StringList.append(S)
+ else:
+ StringList.append(str(NonRepeatPlaceHolders[S]))
+ else:
+ StringList = self._SubSectionList
+
+ if RepeatPlaceHolders:
+ TempStringList = []
+ for Index in range(RepeatTime):
+ for S in StringList:
+ if S not in RepeatPlaceHolders:
+ TempStringList.append(S)
+ else:
+ TempStringList.append(str(RepeatPlaceHolders[S][Index]))
+ StringList = TempStringList
+
+ return "".join(StringList)
+
+ ## Constructor
+ def __init__(self, Template=None):
+ self.String = ''
+ self._Template = Template
+ self._TemplateSectionList = self._Parse(Template)
+
+ ## str() operator
+ #
+ # @retval string The string replaced
+ #
+ def __str__(self):
+ return self.String
+
+ ## Split the template string into fragments per the ${BEGIN} and ${END} flags
+ #
+ # @retval list A list of TemplateString.Section objects
+ #
+ def _Parse(self, Template):
+ SectionStart = 0
+ SearchFrom = 0
+ MatchEnd = 0
+ PlaceHolderList = []
+ TemplateSectionList = []
+ while Template:
+ MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
+ if not MatchObj:
+ if MatchEnd < len(Template):
+ TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ break
+
+ MatchString = MatchObj.group(1)
+ MatchStart = MatchObj.start()
+ MatchEnd = MatchObj.end()
+
+ if MatchString == self._REPEAT_START_FLAG:
+ if MatchStart > SectionStart:
+ TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ SectionStart = MatchEnd
+ PlaceHolderList = []
+ elif MatchString == self._REPEAT_END_FLAG:
+ TemplateSection = TemplateString.Section(Template[SectionStart:MatchStart], PlaceHolderList)
+ TemplateSectionList.append(TemplateSection)
+ SectionStart = MatchEnd
+ PlaceHolderList = []
+ else:
+ PlaceHolderList.append((MatchString, MatchStart - SectionStart, MatchEnd - SectionStart))
+ SearchFrom = MatchEnd
+ return TemplateSectionList
+
+ ## Replace the string template with dictionary of placeholders and append it to previous one
+ #
+ # @param AppendString The string template to append
+ # @param Dictionary The placeholder dictionaries
+ #
+ def Append(self, AppendString, Dictionary=None):
+ if Dictionary:
+ SectionList = self._Parse(AppendString)
+ self.String += "".join([S.Instantiate(Dictionary) for S in SectionList])
+ else:
+ self.String += AppendString
+
+ ## Replace the string template with dictionary of placeholders
+ #
+ # @param Dictionary The placeholder dictionaries
+ #
+ # @retval str The string replaced with placeholder values
+ #
+ def Replace(self, Dictionary=None):
+ return "".join([S.Instantiate(Dictionary) for S in self._TemplateSectionList])
+
+## Progress indicator class
+#
+# This class makes use of thread to print progress on console.
+#
+class Progressor:
+ # for avoiding deadloop
+ _StopFlag = None
+ _ProgressThread = None
+ _CheckInterval = 0.25
+
+ ## Constructor
+ #
+ # @param OpenMessage The string printed before progress charaters
+ # @param CloseMessage The string printed after progress charaters
+ # @param ProgressChar The charater used to indicate the progress
+ # @param Interval The interval in seconds between two progress charaters
+ #
+ def __init__(self, OpenMessage="", CloseMessage="", ProgressChar='.', Interval=1.0):
+ self.PromptMessage = OpenMessage
+ self.CodaMessage = CloseMessage
+ self.ProgressChar = ProgressChar
+ self.Interval = Interval
+ if Progressor._StopFlag == None:
+ Progressor._StopFlag = threading.Event()
+
+ ## Start to print progress charater
+ #
+ # @param OpenMessage The string printed before progress charaters
+ #
+ def Start(self, OpenMessage=None):
+ if OpenMessage != None:
+ self.PromptMessage = OpenMessage
+ Progressor._StopFlag.clear()
+ if Progressor._ProgressThread == None:
+ Progressor._ProgressThread = threading.Thread(target=self._ProgressThreadEntry)
+ Progressor._ProgressThread.setDaemon(False)
+ Progressor._ProgressThread.start()
+
+ ## Stop printing progress charater
+ #
+ # @param CloseMessage The string printed after progress charaters
+ #
+ def Stop(self, CloseMessage=None):
+ OriginalCodaMessage = self.CodaMessage
+ if CloseMessage != None:
+ self.CodaMessage = CloseMessage
+ self.Abort()
+ self.CodaMessage = OriginalCodaMessage
+
+ ## Thread entry method
+ def _ProgressThreadEntry(self):
+ sys.stdout.write(self.PromptMessage + " ")
+ sys.stdout.flush()
+ TimeUp = 0.0
+ while not Progressor._StopFlag.isSet():
+ if TimeUp <= 0.0:
+ sys.stdout.write(self.ProgressChar)
+ sys.stdout.flush()
+ TimeUp = self.Interval
+ time.sleep(self._CheckInterval)
+ TimeUp -= self._CheckInterval
+ sys.stdout.write(" " + self.CodaMessage + "\n")
+ sys.stdout.flush()
+
+ ## Abort the progress display
+ @staticmethod
+ def Abort():
+ if Progressor._StopFlag != None:
+ Progressor._StopFlag.set()
+ if Progressor._ProgressThread != None:
+ Progressor._ProgressThread.join()
+ Progressor._ProgressThread = None
+
+## A dict which can access its keys and/or values orderly
+#
+# The class implements a new kind of dict which its keys or values can be
+# accessed in the order they are added into the dict. It guarantees the order
+# by making use of an internal list to keep a copy of keys.
+#
+class sdict(IterableUserDict):
+ ## Constructor
+ def __init__(self):
+ IterableUserDict.__init__(self)
+ self._key_list = []
+
+ ## [] operator
+ def __setitem__(self, key, value):
+ if key not in self._key_list:
+ self._key_list.append(key)
+ IterableUserDict.__setitem__(self, key, value)
+
+ ## del operator
+ def __delitem__(self, key):
+ self._key_list.remove(key)
+ IterableUserDict.__delitem__(self, key)
+
+ ## used in "for k in dict" loop to ensure the correct order
+ def __iter__(self):
+ return self.iterkeys()
+
+ ## len() support
+ def __len__(self):
+ return len(self._key_list)
+
+ ## "in" test support
+ def __contains__(self, key):
+ return key in self._key_list
+
+ ## indexof support
+ def index(self, key):
+ return self._key_list.index(key)
+
+ ## insert support
+ def insert(self, key, newkey, newvalue, order):
+ index = self._key_list.index(key)
+ if order == 'BEFORE':
+ self._key_list.insert(index, newkey)
+ IterableUserDict.__setitem__(self, newkey, newvalue)
+ elif order == 'AFTER':
+ self._key_list.insert(index + 1, newkey)
+ IterableUserDict.__setitem__(self, newkey, newvalue)
+
+ ## append support
+ def append(self, sdict):
+ for key in sdict:
+ if key not in self._key_list:
+ self._key_list.append(key)
+ IterableUserDict.__setitem__(self, key, sdict[key])
+
+ def has_key(self, key):
+ return key in self._key_list
+
+ ## Empty the dict
+ def clear(self):
+ self._key_list = []
+ IterableUserDict.clear(self)
+
+ ## Return a copy of keys
+ def keys(self):
+ keys = []
+ for key in self._key_list:
+ keys.append(key)
+ return keys
+
+ ## Return a copy of values
+ def values(self):
+ values = []
+ for key in self._key_list:
+ values.append(self[key])
+ return values
+
+ ## Return a copy of (key, value) list
+ def items(self):
+ items = []
+ for key in self._key_list:
+ items.append((key, self[key]))
+ return items
+
+ ## Iteration support
+ def iteritems(self):
+ return iter(self.items())
+
+ ## Keys interation support
+ def iterkeys(self):
+ return iter(self.keys())
+
+ ## Values interation support
+ def itervalues(self):
+ return iter(self.values())
+
+ ## Return value related to a key, and remove the (key, value) from the dict
+ def pop(self, key, *dv):
+ value = None
+ if key in self._key_list:
+ value = self[key]
+ self.__delitem__(key)
+ elif len(dv) != 0 :
+ value = kv[0]
+ return value
+
+ ## Return (key, value) pair, and remove the (key, value) from the dict
+ def popitem(self):
+ key = self._key_list[-1]
+ value = self[key]
+ self.__delitem__(key)
+ return key, value
+
+ def update(self, dict=None, **kwargs):
+ if dict != None:
+ for k, v in dict.items():
+ self[k] = v
+ if len(kwargs):
+ for k, v in kwargs.items():
+ self[k] = v
+
+## Dictionary with restricted keys
+#
+class rdict(dict):
+ ## Constructor
+ def __init__(self, KeyList):
+ for Key in KeyList:
+ dict.__setitem__(self, Key, "")
+
+ ## []= operator
+ def __setitem__(self, key, value):
+ if key not in self:
+ EdkLogger.error("RestrictedDict", ATTRIBUTE_SET_FAILURE, "Key [%s] is not allowed" % key,
+ ExtraData=", ".join(dict.keys(self)))
+ dict.__setitem__(self, key, value)
+
+ ## =[] operator
+ def __getitem__(self, key):
+ if key not in self:
+ return ""
+ return dict.__getitem__(self, key)
+
+ ## del operator
+ def __delitem__(self, key):
+ EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="del")
+
+ ## Empty the dict
+ def clear(self):
+ for Key in self:
+ self.__setitem__(Key, "")
+
+ ## Return value related to a key, and remove the (key, value) from the dict
+ def pop(self, key, *dv):
+ EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="pop")
+
+ ## Return (key, value) pair, and remove the (key, value) from the dict
+ def popitem(self):
+ EdkLogger.error("RestrictedDict", ATTRIBUTE_ACCESS_DENIED, ExtraData="popitem")
+
+## Dictionary using prioritized list as key
+#
+class tdict:
+ _ListType = type([])
+ _TupleType = type(())
+ _Wildcard = 'COMMON'
+ _ValidWildcardList = ['COMMON', 'DEFAULT', 'ALL', '*', 'PLATFORM']
+
+ def __init__(self, _Single_=False, _Level_=2):
+ self._Level_ = _Level_
+ self.data = {}
+ self._Single_ = _Single_
+
+ # =[] operator
+ def __getitem__(self, key):
+ KeyType = type(key)
+ RestKeys = None
+ if KeyType == self._ListType or KeyType == self._TupleType:
+ FirstKey = key[0]
+ if len(key) > 1:
+ RestKeys = key[1:]
+ elif self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ else:
+ FirstKey = key
+ if self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+
+ if FirstKey == None or str(FirstKey).upper() in self._ValidWildcardList:
+ FirstKey = self._Wildcard
+
+ if self._Single_:
+ return self._GetSingleValue(FirstKey, RestKeys)
+ else:
+ return self._GetAllValues(FirstKey, RestKeys)
+
+ def _GetSingleValue(self, FirstKey, RestKeys):
+ Value = None
+ #print "%s-%s" % (FirstKey, self._Level_) ,
+ if self._Level_ > 1:
+ if FirstKey == self._Wildcard:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey][RestKeys]
+ if Value == None:
+ for Key in self.data:
+ Value = self.data[Key][RestKeys]
+ if Value != None: break
+ else:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey][RestKeys]
+ if Value == None and self._Wildcard in self.data:
+ #print "Value=None"
+ Value = self.data[self._Wildcard][RestKeys]
+ else:
+ if FirstKey == self._Wildcard:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey]
+ if Value == None:
+ for Key in self.data:
+ Value = self.data[Key]
+ if Value != None: break
+ else:
+ if FirstKey in self.data:
+ Value = self.data[FirstKey]
+ elif self._Wildcard in self.data:
+ Value = self.data[self._Wildcard]
+ return Value
+
+ def _GetAllValues(self, FirstKey, RestKeys):
+ Value = []
+ if self._Level_ > 1:
+ if FirstKey == self._Wildcard:
+ for Key in self.data:
+ Value += self.data[Key][RestKeys]
+ else:
+ if FirstKey in self.data:
+ Value += self.data[FirstKey][RestKeys]
+ if self._Wildcard in self.data:
+ Value += self.data[self._Wildcard][RestKeys]
+ else:
+ if FirstKey == self._Wildcard:
+ for Key in self.data:
+ Value.append(self.data[Key])
+ else:
+ if FirstKey in self.data:
+ Value.append(self.data[FirstKey])
+ if self._Wildcard in self.data:
+ Value.append(self.data[self._Wildcard])
+ return Value
+
+ ## []= operator
+ def __setitem__(self, key, value):
+ KeyType = type(key)
+ RestKeys = None
+ if KeyType == self._ListType or KeyType == self._TupleType:
+ FirstKey = key[0]
+ if len(key) > 1:
+ RestKeys = key[1:]
+ else:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+ else:
+ FirstKey = key
+ if self._Level_ > 1:
+ RestKeys = [self._Wildcard for i in range(0, self._Level_-1)]
+
+ if FirstKey in self._ValidWildcardList:
+ FirstKey = self._Wildcard
+
+ if FirstKey not in self.data and self._Level_ > 0:
+ self.data[FirstKey] = tdict(self._Single_, self._Level_ - 1)
+
+ if self._Level_ > 1:
+ self.data[FirstKey][RestKeys] = value
+ else:
+ self.data[FirstKey] = value
+
+ def SetGreedyMode(self):
+ self._Single_ = False
+ if self._Level_ > 1:
+ for Key in self.data:
+ self.data[Key].SetGreedyMode()
+
+ def SetSingleMode(self):
+ self._Single_ = True
+ if self._Level_ > 1:
+ for Key in self.data:
+ self.data[Key].SetSingleMode()
+
+## Boolean chain list
+#
+class Blist(UserList):
+ def __init__(self, initlist=None):
+ UserList.__init__(self, initlist)
+ def __setitem__(self, i, item):
+ if item not in [True, False]:
+ if item == 0:
+ item = False
+ else:
+ item = True
+ self.data[i] = item
+ def _GetResult(self):
+ Value = True
+ for item in self.data:
+ Value &= item
+ return Value
+ Result = property(_GetResult)
+
+def ParseConsoleLog(Filename):
+ Opr = open(os.path.normpath(Filename), 'r')
+ Opw = open(os.path.normpath(Filename + '.New'), 'w+')
+ for Line in Opr.readlines():
+ if Line.find('.efi') > -1:
+ Line = Line[Line.rfind(' ') : Line.rfind('.efi')].strip()
+ Opw.write('%s\n' % Line)
+
+ Opr.close()
+ Opw.close()
+
+## check format of PCD value against its the datum type
+#
+# For PCD value setting
+#
+def CheckPcdDatum(Type, Value):
+ if Type == "VOID*":
+ if not ((Value.startswith('L"') or Value.startswith('"') and Value.endswith('"'))
+ or (Value.startswith('{') and Value.endswith('}'))
+ ):
+ return False, "Invalid value [%s] of type [%s]; must be in the form of {...} for array"\
+ ", or \"...\" for string, or L\"...\" for unicode string" % (Value, Type)
+ elif Type == 'BOOLEAN':
+ if Value not in ['TRUE', 'FALSE']:
+ return False, "Invalid value [%s] of type [%s]; must be TRUE or FALSE" % (Value, Type)
+ elif type(Value) == type(""):
+ try:
+ Value = long(Value, 0)
+ except:
+ return False, "Invalid value [%s] of type [%s];"\
+ " must be a hexadecimal, decimal or octal in C language format."\
+ % (Value, Type)
+
+ return True, ""
+
+## Split command line option string to list
+#
+# subprocess.Popen needs the args to be a sequence. Otherwise there's problem
+# in non-windows platform to launch command
+#
+def SplitOption(OptionString):
+ OptionList = []
+ LastChar = " "
+ OptionStart = 0
+ QuotationMark = ""
+ for Index in range(0, len(OptionString)):
+ CurrentChar = OptionString[Index]
+ if CurrentChar in ['"', "'"]:
+ if QuotationMark == CurrentChar:
+ QuotationMark = ""
+ elif QuotationMark == "":
+ QuotationMark = CurrentChar
+ continue
+ elif QuotationMark:
+ continue
+
+ if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
+ if Index > OptionStart:
+ OptionList.append(OptionString[OptionStart:Index-1])
+ OptionStart = Index
+ LastChar = CurrentChar
+ OptionList.append(OptionString[OptionStart:])
+ return OptionList
+
+def CommonPath(PathList):
+ P1 = min(PathList).split(os.path.sep)
+ P2 = max(PathList).split(os.path.sep)
+ for Index in xrange(min(len(P1), len(P2))):
+ if P1[Index] != P2[Index]:
+ return os.path.sep.join(P1[:Index])
+ return os.path.sep.join(P1)
+
+class PathClass(object):
+ def __init__(self, File='', Root='', AlterRoot='', Type='', IsBinary=False,
+ Arch='COMMON', ToolChainFamily='', Target='', TagName='', ToolCode=''):
+ self.Arch = Arch
+ self.File = str(File)
+ if os.path.isabs(self.File):
+ self.Root = ''
+ self.AlterRoot = ''
+ else:
+ self.Root = str(Root)
+ self.AlterRoot = str(AlterRoot)
+
+ # Remove any '.' and '..' in path
+ if self.Root:
+ self.Path = os.path.normpath(os.path.join(self.Root, self.File))
+ self.Root = os.path.normpath(CommonPath([self.Root, self.Path]))
+ # eliminate the side-effect of 'C:'
+ if self.Root[-1] == ':':
+ self.Root += os.path.sep
+ # file path should not start with path separator
+ if self.Root[-1] == os.path.sep:
+ self.File = self.Path[len(self.Root):]
+ else:
+ self.File = self.Path[len(self.Root)+1:]
+ else:
+ self.Path = os.path.normpath(self.File)
+
+ self.SubDir, self.Name = os.path.split(self.File)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+
+ if self.Root:
+ if self.SubDir:
+ self.Dir = os.path.join(self.Root, self.SubDir)
+ else:
+ self.Dir = self.Root
+ else:
+ self.Dir = self.SubDir
+
+ if IsBinary:
+ self.Type = Type
+ else:
+ self.Type = self.Ext.lower()
+
+ self.IsBinary = IsBinary
+ self.Target = Target
+ self.TagName = TagName
+ self.ToolCode = ToolCode
+ self.ToolChainFamily = ToolChainFamily
+
+ self._Key = None
+
+ ## Convert the object of this class to a string
+ #
+ # Convert member Path of the class to a string
+ #
+ # @retval string Formatted String
+ #
+ def __str__(self):
+ return self.Path
+
+ ## Override __eq__ function
+ #
+ # Check whether PathClass are the same
+ #
+ # @retval False The two PathClass are different
+ # @retval True The two PathClass are the same
+ #
+ def __eq__(self, Other):
+ if type(Other) == type(self):
+ return self.Path == Other.Path
+ else:
+ return self.Path == str(Other)
+
+ ## Override __hash__ function
+ #
+ # Use Path as key in hash table
+ #
+ # @retval string Key for hash table
+ #
+ def __hash__(self):
+ return hash(self.Path)
+
+ def _GetFileKey(self):
+ if self._Key == None:
+ self._Key = self.Path.upper() # + self.ToolChainFamily + self.TagName + self.ToolCode + self.Target
+ return self._Key
+
+ def Validate(self, Type='', CaseSensitive=True):
+ if GlobalData.gCaseInsensitive:
+ CaseSensitive = False
+ if Type and Type.lower() != self.Type:
+ return FILE_TYPE_MISMATCH, '%s (expect %s but got %s)' % (self.File, Type, self.Type)
+
+ RealFile, RealRoot = RealPath2(self.File, self.Root, self.AlterRoot)
+ if not RealRoot and not RealFile:
+ return FILE_NOT_FOUND, self.File
+
+ ErrorCode = 0
+ ErrorInfo = ''
+ if RealRoot != self.Root or RealFile != self.File:
+ if CaseSensitive and (RealFile != self.File or (RealRoot != self.Root and RealRoot != self.AlterRoot)):
+ ErrorCode = FILE_CASE_MISMATCH
+ ErrorInfo = self.File + '\n\t' + RealFile + " [in file system]"
+
+ self.SubDir, self.Name = os.path.split(RealFile)
+ self.BaseName, self.Ext = os.path.splitext(self.Name)
+ if self.SubDir:
+ self.Dir = os.path.join(RealRoot, self.SubDir)
+ else:
+ self.Dir = RealRoot
+ self.File = RealFile
+ self.Root = RealRoot
+ self.Path = os.path.join(RealRoot, RealFile)
+ return ErrorCode, ErrorInfo
+
+ Key = property(_GetFileKey)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Common/Parsing.py b/BaseTools/Source/Python/Common/Parsing.py
new file mode 100644
index 0000000000..755f7901b5
--- /dev/null
+++ b/BaseTools/Source/Python/Common/Parsing.py
@@ -0,0 +1,935 @@
+## @file
+# This file is used to define common parsing related functions used in parsing INF/DEC/DSC process
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from String import *
+from CommonDataClass.DataClass import *
+from DataType import *
+
+## ParseContent
+#
+# Parse content of a DSC/INF/DEC file
+#
+def ParseContent(Lines, ):
+ for Line in Lines:
+ LineNo = LineNo + 1
+ #
+ # Remove comments at tail and remove spaces again
+ #
+ Line = CleanString(Line)
+ if Line == '':
+ continue
+
+ #
+ # Find a new section tab
+ # First insert previous section items
+ # And then parse the content of the new section
+ #
+ if Line.startswith(TAB_SECTION_START) and Line.endswith(TAB_SECTION_END):
+ #
+ # Insert items data of previous section
+ #
+ self.InsertSectionItemsIntoDatabase(FileID, Filename, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList)
+ #
+ # Parse the new section
+ #
+ SectionItemList = []
+ ArchList = []
+ ThirdList = []
+
+ LineList = GetSplitValueList(Line[len(TAB_SECTION_START):len(Line) - len(TAB_SECTION_END)], TAB_COMMA_SPLIT)
+ for Item in LineList:
+ ItemList = GetSplitValueList(Item, TAB_SPLIT)
+ CurrentSection = ItemList[0]
+ if CurrentSection.upper() not in self.KeyList:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ ItemList.append('')
+ ItemList.append('')
+ if len(ItemList) > 5:
+ RaiseParserError(Line, CurrentSection, Filename, '', LineNo)
+ else:
+ if ItemList[1] != '' and ItemList[1].upper() not in ARCH_LIST_FULL:
+ EdkLogger.error("Parser", PARSER_ERROR, "Invalid Arch definition '%s' found" % ItemList[1], File=Filename, Line=LineNo)
+ ArchList.append(ItemList[1].upper())
+ ThirdList.append(ItemList[2])
+
+ continue
+
+ #
+ # Not in any defined section
+ #
+ if CurrentSection == TAB_UNKNOWN:
+ ErrorMsg = "%s is not in any defined section" % Line
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=Filename, Line=LineNo)
+
+ #
+ # Add a section item
+ #
+ SectionItemList.append([Line, LineNo])
+ # End of parse
+ #End of For
+
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro2(Table, RecordSets, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros in section [Header] and its section
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overrided by Global Macros
+ #
+ for Key in GlobalMacro.keys():
+ Macros[Key] = GlobalMacro[Key]
+
+ #
+ # Replace the Macros
+ #
+ for Key in RecordSets.keys():
+ if RecordSets[Key] != []:
+ for Item in RecordSets[Key]:
+ Item[0] = ReplaceMacro(Item[0], Macros)
+
+## ParseDefineMacro
+#
+# Search whole table to find all defined Macro and replaced them with the real values
+#
+def ParseDefineMacro(Table, GlobalMacro):
+ Macros = {}
+ #
+ # Find all DEFINE macros
+ #
+ SqlCommand = """select Value1, Value2, BelongsToItem, StartLine, Arch from %s
+ where Model = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ RecordSet = Table.Exec(SqlCommand)
+ for Record in RecordSet:
+#***************************************************************************************************************************************************
+# The follow SqlCommand (expr replace) is not supported in Sqlite 3.3.4 which is used in Python 2.5 *
+# Reserved Only *
+# SqlCommand = """update %s set Value1 = replace(Value1, '%s', '%s') *
+# where ID in (select ID from %s *
+# where Model = %s *
+# and Value1 like '%%%s%%' *
+# and StartLine > %s *
+# and Enabled > -1 *
+# and Arch = '%s')""" % \ *
+# (self.TblDsc.Table, Record[0], Record[1], self.TblDsc.Table, Record[2], Record[1], Record[3], Record[4]) *
+#***************************************************************************************************************************************************
+ Macros[Record[0]] = Record[1]
+
+ #
+ # Overrided by Global Macros
+ #
+ for Key in GlobalMacro.keys():
+ Macros[Key] = GlobalMacro[Key]
+
+ #
+ # Found all defined macro and replaced
+ #
+ SqlCommand = """select ID, Value1 from %s
+ where Model != %s
+ and Value1 like '%%$(%%' and Value1 like '%%)%%'
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_DEFINE)
+ FoundRecords = Table.Exec(SqlCommand)
+ for FoundRecord in FoundRecords:
+ NewValue = ReplaceMacro(FoundRecord[1], Macros)
+ SqlCommand = """update %s set Value1 = '%s'
+ where ID = %s""" % (Table.Table, ConvertToSqlString2(NewValue), FoundRecord[0])
+ Table.Exec(SqlCommand)
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem(Table, Name, Arch, BelongsToFile):
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value2 from %s
+ where Model = %s
+ and Value1 = '%s'
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Name), ConvertToSqlString2(TAB_ARCH_COMMON.upper()), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) == 1:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ return [RecordSet[0][0]]
+ else:
+ return GetSplitValueList(RecordSet[0][0])
+ elif len(RecordSet) < 1:
+ return ['']
+ elif len(RecordSet) > 1:
+ RetVal = []
+ for Record in RecordSet:
+ if Name == TAB_INF_DEFINES_LIBRARY_CLASS:
+ RetVal.append(Record[0])
+ else:
+ Items = GetSplitValueList(Record[0])
+ for Item in Items:
+ RetVal.append(Item)
+ return RetVal
+
+##QueryDefinesItem
+#
+# Search item of section [Defines] by name, return its values
+#
+# @param Table: The Table to be executed
+# @param Name: The Name of item of section [Defines]
+# @param Arch: The Arch of item of section [Defines]
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDefinesItem2(Table, Arch, BelongsToFile):
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(Arch), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+ if len(RecordSet) < 1:
+ SqlCommand = """select Value1, Value2, StartLine from %s
+ where Model = %s
+ and Arch = '%s'
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, MODEL_META_DATA_HEADER, ConvertToSqlString2(TAB_ARCH_COMMON), BelongsToFile)
+ RecordSet = Table.Exec(SqlCommand)
+
+ return RecordSet
+
+##QueryDscItem
+#
+# Search all dsc item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDscItem(Table, Model, BelongsToItem, BelongsToFile):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and BelongsToFile = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem, BelongsToFile)
+ return Table.Exec(SqlCommand)
+
+##QueryDecItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryDecItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+##QueryInfItem
+#
+# Search all dec item for a specific section
+#
+# @param Table: The Table to be executed
+# @param Model: The type of section
+#
+# @retval RecordSet: A list of all matched records
+#
+def QueryInfItem(Table, Model, BelongsToItem):
+ SqlCommand = """select Value1, Arch, StartLine, ID, Value2 from %s
+ where Model = %s
+ and BelongsToItem = %s
+ and Enabled > -1""" % (Table.Table, Model, BelongsToItem)
+ return Table.Exec(SqlCommand)
+
+## GetBuildOption
+#
+# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
+# Return (Family, ToolFlag, Flag)
+#
+# @param String: String with BuildOption statement
+# @param File: The file which defines build option, used in error report
+#
+# @retval truple() A truple structure as (Family, ToolChain, Flag)
+#
+def GetBuildOption(String, File, LineNo = -1):
+ if String.find(TAB_EQUAL_SPLIT) < 0:
+ RaiseParserError(String, 'BuildOptions', File, '[<Family>:]<ToolFlag>=Flag', LineNo)
+ (Family, ToolChain, Flag) = ('', '', '')
+ List = GetSplitValueList(String, TAB_EQUAL_SPLIT, MaxSplit = 1)
+ if List[0].find(':') > -1:
+ Family = List[0][ : List[0].find(':')].strip()
+ ToolChain = List[0][List[0].find(':') + 1 : ].strip()
+ else:
+ ToolChain = List[0].strip()
+ Flag = List[1].strip()
+
+ return (Family, ToolChain, Flag)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ List = GetSplitValueList(Item[0])
+ SupMod = SUP_MODULE_LIST_STRING
+ if len(List) != 2:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>|<LibraryInstance>')
+ else:
+ CheckFileType(List[1], '.Inf', ContainerFile, 'library class instance', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, List[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (List[0], List[1], SupMod)
+
+## Get Library Class
+#
+# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]
+#
+# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (LibraryClassKeyWord, LibraryInstance, [SUP_MODULE_LIST]) Formatted Library Item
+#
+def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo = -1):
+ ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
+ SupMod = SUP_MODULE_LIST_STRING
+
+ if len(ItemList) > 5:
+ RaiseParserError(Item[0], 'LibraryClasses', ContainerFile, '<LibraryClassKeyWord>[|<LibraryInstance>][|<TokenSpaceGuidCName>.<PcdCName>]')
+ else:
+ CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, 'LibraryClasses', Item[0], LineNo)
+ if ItemList[2] != '':
+ CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', ContainerFile, LineNo)
+ if Item[1] != '':
+ SupMod = Item[1]
+
+ return (ItemList[0], ItemList[1], ItemList[2], SupMod)
+
+## CheckPcdTokenInfo
+#
+# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
+#
+# @param TokenInfoString: String to be checked
+# @param Section: Used for error report
+# @param File: Used for error report
+#
+# @retval True PcdTokenInfo is in correct format
+#
+def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>'
+ if TokenInfoString != '' and TokenInfoString != None:
+ TokenInfoList = GetSplitValueList(TokenInfoString, TAB_SPLIT)
+ if len(TokenInfoList) == 2:
+ return True
+
+ RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
+
+## Get Pcd
+#
+# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+
+ if len(List) < 4 or len(List) > 6:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<Type>|<MaximumDatumSize>]', LineNo)
+ else:
+ Value = List[1]
+ MaximumDatumSize = List[2]
+ Token = List[3]
+
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
+
+## Get FeatureFlagPcd
+#
+# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], Type)
+#
+def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value = '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', LineNo)
+ else:
+ Value = List[1]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, Type)
+
+## Get DynamicDefaultPcd
+#
+# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], Type)
+#
+def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 4 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>[|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
+ else:
+ Value = List[1]
+ DatumTyp = List[2]
+ MaxDatumSize = List[3]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], TAB_SPLIT)
+
+ return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
+
+## Get DynamicHiiPcd
+#
+# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], List[3], List[4], List[5], Type)
+#
+def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2, L3, L4, L5 = '', '', '', '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT * 2)
+ if len(List) < 6 or len(List) > 8:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|<VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]', LineNo)
+ else:
+ L1, L2, L3, L4, L5 = List[1], List[2], List[3], List[4], List[5]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, L3, L4, L5, Type)
+
+## Get DynamicVpdPcd
+#
+# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]
+#
+# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
+# @param ContainerFile: The file which describes the pcd, used for error report
+#
+# @retval (TokenInfo[1], TokenInfo[0], List[1], List[2], Type)
+#
+def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo = -1):
+ TokenGuid, TokenName, L1, L2 = '', '', '', ''
+ List = GetSplitValueList(Item + TAB_VALUE_SPLIT)
+ if len(List) < 3 or len(List) > 4:
+ RaiseParserError(Item, 'Pcds' + Type, ContainerFile, '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>[|<MaximumDatumSize>]', LineNo)
+ else:
+ L1, L2 = List[1], List[2]
+ if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
+ (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+
+ return (TokenName, TokenGuid, L1, L2, Type)
+
+## GetComponent
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param KeyValues: To store data after parsing
+#
+# @retval True Get component successfully
+#
+def GetComponent(Lines, KeyValues):
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ for Line in Lines:
+ Line = Line[0]
+
+ #
+ # Ignore !include statement
+ #
+ if Line.upper().find(TAB_INCLUDE.upper() + ' ') > -1 or Line.upper().find(TAB_DEFINE + ' ') > -1:
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], DataType.TAB_COMMENT_SPLIT)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## GetExec
+#
+# Parse a string with format "InfFilename [EXEC = ExecFilename]"
+# Return (InfFilename, ExecFilename)
+#
+# @param String: String with EXEC statement
+#
+# @retval truple() A pair as (InfFilename, ExecFilename)
+#
+def GetExec(String):
+ InfFilename = ''
+ ExecFilename = ''
+ if String.find('EXEC') > -1:
+ InfFilename = String[ : String.find('EXEC')].strip()
+ ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
+ else:
+ InfFilename = String.strip()
+
+ return (InfFilename, ExecFilename)
+
+## GetComponents
+#
+# Parse block of the components defined in dsc file
+# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get component successfully
+#
+def GetComponents(Lines, Key, KeyValues, CommentCharacter):
+ if Lines.find(DataType.TAB_SECTION_END) > -1:
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ ListItem = None
+ LibraryClassItem = []
+ BuildOption = []
+ Pcd = []
+
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line == None or Line == '':
+ continue
+
+ if findBlock == False:
+ ListItem = Line
+ #
+ # find '{' at line tail
+ #
+ if Line.endswith('{'):
+ findBlock = True
+ ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
+
+ #
+ # Parse a block content
+ #
+ if findBlock:
+ if Line.find('<LibraryClasses>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (True, False, False, False, False, False, False)
+ continue
+ if Line.find('<BuildOptions>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, True, False, False, False, False, False)
+ continue
+ if Line.find('<PcdsFeatureFlag>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, True, False, False, False, False)
+ continue
+ if Line.find('<PcdsPatchableInModule>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, True, False, False, False)
+ continue
+ if Line.find('<PcdsFixedAtBuild>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, True, False, False)
+ continue
+ if Line.find('<PcdsDynamic>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, True, False)
+ continue
+ if Line.find('<PcdsDynamicEx>') != -1:
+ (findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, True)
+ continue
+ if Line.endswith('}'):
+ #
+ # find '}' at line tail
+ #
+ KeyValues.append([ListItem, LibraryClassItem, BuildOption, Pcd])
+ (findBlock, findLibraryClass, findBuildOption, findPcdsFeatureFlag, findPcdsPatchableInModule, findPcdsFixedAtBuild, findPcdsDynamic, findPcdsDynamicEx) = (False, False, False, False, False, False, False, False)
+ LibraryClassItem, BuildOption, Pcd = [], [], []
+ continue
+
+ if findBlock:
+ if findLibraryClass:
+ LibraryClassItem.append(Line)
+ elif findBuildOption:
+ BuildOption.append(Line)
+ elif findPcdsFeatureFlag:
+ Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
+ elif findPcdsPatchableInModule:
+ Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
+ elif findPcdsFixedAtBuild:
+ Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
+ elif findPcdsDynamic:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
+ elif findPcdsDynamicEx:
+ Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
+ else:
+ KeyValues.append([ListItem, [], [], []])
+
+ return True
+
+## Get Source
+#
+# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3], List[4])
+#
+def GetSource(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
+ List = GetSplitValueList(ItemNew)
+ if len(List) < 5 or len(List) > 9:
+ RaiseParserError(Item, 'Sources', ContainerFile, '<Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]', LineNo)
+ List[0] = NormPath(List[0])
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', Item, LineNo)
+ if List[4] != '':
+ CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3], List[4])
+
+## Get Binary
+#
+# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+#
+# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>[|<PcdFeatureFlag>]]]]
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1], List[2], List[3])
+#
+def GetBinary(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + DataType.TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if len(List) != 4 and len(List) != 5:
+ RaiseParserError(Item, 'Binaries', ContainerFile, "<FileType>|<Filename>|<Target>[|<TokenSpaceGuidCName>.<PcdCName>]", LineNo)
+ else:
+ if List[3] != '':
+ CheckPcdTokenInfo(List[3], 'Binaries', ContainerFile, LineNo)
+
+ return (List[0], List[1], List[2], List[3])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfInf(Item, Type, ContainerFile, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], Type, ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Guids/Protocols/Ppis
+#
+# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
+#
+# @param Item: String as <GuidCName>=<GuidValue>
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo = -1):
+ List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
+ if len(List) != 2:
+ RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', LineNo)
+
+ return (List[0], List[1])
+
+## GetPackage
+#
+# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
+#
+# @param Item: String as <PackagePath>[|<PcdFeatureFlag>]
+# @param Type: Type of parsing string
+# @param ContainerFile: The file which describes the library class, used for error report
+#
+# @retval (List[0], List[1])
+#
+def GetPackage(Item, ContainerFile, FileRelativePath, LineNo = -1):
+ ItemNew = Item + TAB_VALUE_SPLIT
+ List = GetSplitValueList(ItemNew)
+ CheckFileType(List[0], '.Dec', ContainerFile, 'package', List[0], LineNo)
+ CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Packages', List[0], LineNo)
+
+ if List[1] != '':
+ CheckPcdTokenInfo(List[1], 'Packages', ContainerFile, LineNo)
+
+ return (List[0], List[1])
+
+## Get Pcd Values of Inf
+#
+# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
+#
+# @param Item: The string describes pcd
+# @param Type: The type of Pcd
+# @param File: The file which describes the pcd, used for error report
+#
+# @retval (TokenSpcCName, TokenCName, Value, ItemType) Formatted Pcd Item
+#
+def GetPcdOfInf(Item, Type, File, LineNo):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
+ TokenGuid, TokenName, Value, InfType = '', '', '', ''
+
+ if Type == TAB_PCDS_FIXED_AT_BUILD:
+ InfType = TAB_INF_FIXED_PCD
+ elif Type == TAB_PCDS_PATCHABLE_IN_MODULE:
+ InfType = TAB_INF_PATCH_PCD
+ elif Type == TAB_PCDS_FEATURE_FLAG:
+ InfType = TAB_INF_FEATURE_PCD
+ elif Type == TAB_PCDS_DYNAMIC_EX:
+ InfType = TAB_INF_PCD_EX
+ elif Type == TAB_PCDS_DYNAMIC:
+ InfType = TAB_INF_PCD
+ List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
+ if len(List) < 2 or len(List) > 3:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ Value = List[1]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, InfType, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, Type)
+
+
+## Get Pcd Values of Dec
+#
+# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
+# @retval (TokenSpcCName, TokenCName, Value, DatumType, Token, ItemType) Formatted Pcd Item
+#
+def GetPcdOfDec(Item, Type, File, LineNo = -1):
+ Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
+ TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
+ List = GetSplitValueList(Item)
+ if len(List) != 4:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ Value = List[1]
+ DatumType = List[2]
+ Token = List[3]
+ TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
+ if len(TokenInfo) != 2:
+ RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
+ else:
+ TokenGuid = TokenInfo[0]
+ TokenName = TokenInfo[1]
+
+ return (TokenGuid, TokenName, Value, DatumType, Token, Type)
+
+## Parse DEFINE statement
+#
+# Get DEFINE macros
+#
+# 1. Insert a record into TblDec
+# Value1: Macro Name
+# Value2: Macro Value
+#
+def ParseDefine(LineValue, StartLine, Table, FileID, Filename, SectionName, SectionModel, Arch):
+ EdkLogger.debug(EdkLogger.DEBUG_2, "DEFINE statement '%s' found in section %s" % (LineValue, SectionName))
+ Define = GetSplitValueList(CleanString(LineValue[LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') + len(DataType.TAB_DEFINE + ' ') : ]), TAB_EQUAL_SPLIT, 1)
+ Table.Insert(MODEL_META_DATA_DEFINE, Define[0], Define[1], '', '', '', Arch, SectionModel, FileID, StartLine, -1, StartLine, -1, 0)
+
+## InsertSectionItems
+#
+# Insert item data of a section to a dict
+#
+def InsertSectionItems(Model, CurrentSection, SectionItemList, ArchList, ThirdList, RecordSet):
+ # Insert each item data of a section
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine, Comment = SectionItem[0], SectionItem[1], SectionItem[1], SectionItem[2]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ # And then parse DEFINE statement
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ continue
+
+ # At last parse other sections
+ ID = -1
+ Records.append([LineValue, Arch, StartLine, ID, Third, Comment])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## Insert records to database
+#
+# Insert item data of a section to database
+# @param Table: The Table to be inserted
+# @param FileID: The ID of belonging file
+# @param Filename: The name of belonging file
+# @param CurrentSection: The name of currect section
+# @param SectionItemList: A list of items of the section
+# @param ArchList: A list of arches
+# @param ThirdList: A list of third parameters, ModuleType for LibraryClass and SkuId for Dynamic Pcds
+# @param IfDefList: A list of all conditional statements
+# @param RecordSet: A dict of all parsed records
+#
+def InsertSectionItemsIntoDatabase(Table, FileID, Filename, Model, CurrentSection, SectionItemList, ArchList, ThirdList, IfDefList, RecordSet):
+ #
+ # Insert each item data of a section
+ #
+ for Index in range(0, len(ArchList)):
+ Arch = ArchList[Index]
+ Third = ThirdList[Index]
+ if Arch == '':
+ Arch = TAB_ARCH_COMMON
+
+ Records = RecordSet[Model]
+ for SectionItem in SectionItemList:
+ BelongsToItem, EndLine, EndColumn = -1, -1, -1
+ LineValue, StartLine, EndLine = SectionItem[0], SectionItem[1], SectionItem[1]
+
+ EdkLogger.debug(4, "Parsing %s ..." %LineValue)
+ #
+ # And then parse DEFINE statement
+ #
+ if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
+ ParseDefine(LineValue, StartLine, Table, FileID, Filename, CurrentSection, Model, Arch)
+ continue
+
+ #
+ # At last parse other sections
+ #
+ ID = Table.Insert(Model, LineValue, Third, Third, '', '', Arch, -1, FileID, StartLine, -1, StartLine, -1, 0)
+ Records.append([LineValue, Arch, StartLine, ID, Third])
+
+ if RecordSet != {}:
+ RecordSet[Model] = Records
+
+## GenMetaDatSectionItem
+def GenMetaDatSectionItem(Key, Value, List):
+ if Key not in List:
+ List[Key] = [Value]
+ else:
+ List[Key].append(Value) \ No newline at end of file
diff --git a/BaseTools/Source/Python/Common/PyUtility.pyd b/BaseTools/Source/Python/Common/PyUtility.pyd
new file mode 100644
index 0000000000..5bb57d91e0
--- /dev/null
+++ b/BaseTools/Source/Python/Common/PyUtility.pyd
Binary files differ
diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py
new file mode 100644
index 0000000000..5da0cacfb0
--- /dev/null
+++ b/BaseTools/Source/Python/Common/String.py
@@ -0,0 +1,703 @@
+## @file
+# This file is used to define common string related functions used in parsing process
+#
+# Copyright (c) 2007 ~ 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import re
+import DataType
+import os.path
+import string
+import EdkLogger as EdkLogger
+
+from GlobalData import *
+from BuildToolError import *
+
+## GetSplitValueList
+#
+# Get a value list from a string with multiple values splited with SplitTag
+# The default SplitTag is DataType.TAB_VALUE_SPLIT
+# 'AAA|BBB|CCC' -> ['AAA', 'BBB', 'CCC']
+#
+# @param String: The input string to be splitted
+# @param SplitTag: The split key, default is DataType.TAB_VALUE_SPLIT
+# @param MaxSplit: The max number of split values, default is -1
+#
+# @retval list() A list for splitted string
+#
+def GetSplitValueList(String, SplitTag = DataType.TAB_VALUE_SPLIT, MaxSplit = -1):
+ return map(lambda l: l.strip(), String.split(SplitTag, MaxSplit))
+
+## MergeArches
+#
+# Find a key's all arches in dict, add the new arch to the list
+# If not exist any arch, set the arch directly
+#
+# @param Dict: The input value for Dict
+# @param Key: The input value for Key
+# @param Arch: The Arch to be added or merged
+#
+def MergeArches(Dict, Key, Arch):
+ if Key in Dict.keys():
+ Dict[Key].append(Arch)
+ else:
+ Dict[Key] = Arch.split()
+
+## GenDefines
+#
+# Parse a string with format "DEFINE <VarName> = <PATH>"
+# Generate a map Defines[VarName] = PATH
+# Return False if invalid format
+#
+# @param String: String with DEFINE statement
+# @param Arch: Supportted Arch
+# @param Defines: DEFINE statement to be parsed
+#
+# @retval 0 DEFINE statement found, and valid
+# @retval 1 DEFINE statement found, but not valid
+# @retval -1 DEFINE statement not found
+#
+def GenDefines(String, Arch, Defines):
+ if String.find(DataType.TAB_DEFINE + ' ') > -1:
+ List = String.replace(DataType.TAB_DEFINE + ' ', '').split(DataType.TAB_EQUAL_SPLIT)
+ if len(List) == 2:
+ Defines[(CleanString(List[0]), Arch)] = CleanString(List[1])
+ return 0
+ else:
+ return -1
+
+ return 1
+
+## GenInclude
+#
+# Parse a string with format "!include <Filename>"
+# Return the file path
+# Return False if invalid format or NOT FOUND
+#
+# @param String: String with INCLUDE statement
+# @param IncludeFiles: INCLUDE statement to be parsed
+# @param Arch: Supportted Arch
+#
+# @retval True
+# @retval False
+#
+def GenInclude(String, IncludeFiles, Arch):
+ if String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1:
+ IncludeFile = CleanString(String[String.upper().find(DataType.TAB_INCLUDE.upper() + ' ') + len(DataType.TAB_INCLUDE + ' ') : ])
+ MergeArches(IncludeFiles, IncludeFile, Arch)
+ return True
+ else:
+ return False
+
+## GetLibraryClassesWithModuleType
+#
+# Get Library Class definition when no module type defined
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get library classes successfully
+#
+def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
+ newKey = SplitModuleType(Key)
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), newKey[1]])
+
+ return True
+
+## GetDynamics
+#
+# Get Dynamic Pcds
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Get Dynamic Pcds successfully
+#
+def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
+ #
+ # Get SkuId Name List
+ #
+ SkuIdNameList = SplitModuleType(Key)
+
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.splitlines()
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
+
+ return True
+
+## SplitModuleType
+#
+# Split ModuleType out of section defien to get key
+# [LibraryClass.Arch.ModuleType|ModuleType|ModuleType] -> [ 'LibraryClass.Arch', ['ModuleType', 'ModuleType', 'ModuleType'] ]
+#
+# @param Key: String to be parsed
+#
+# @retval ReturnValue A list for module types
+#
+def SplitModuleType(Key):
+ KeyList = Key.split(DataType.TAB_SPLIT)
+ #
+ # Fill in for arch
+ #
+ KeyList.append('')
+ #
+ # Fill in for moduletype
+ #
+ KeyList.append('')
+ ReturnValue = []
+ KeyValue = KeyList[0]
+ if KeyList[1] != '':
+ KeyValue = KeyValue + DataType.TAB_SPLIT + KeyList[1]
+ ReturnValue.append(KeyValue)
+ ReturnValue.append(GetSplitValueList(KeyList[2]))
+
+ return ReturnValue
+
+## Replace macro in strings list
+#
+# This method replace macros used in a given string list. The macros are
+# given in a dictionary.
+#
+# @param StringList StringList to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval NewList A new string list whose macros are replaced
+#
+def ReplaceMacros(StringList, MacroDefinitions={}, SelfReplacement = False):
+ NewList = []
+ for String in StringList:
+ if type(String) == type(''):
+ NewList.append(ReplaceMacro(String, MacroDefinitions, SelfReplacement))
+ else:
+ NewList.append(String)
+
+ return NewList
+
+## Replace macro in string
+#
+# This method replace macros used in given string. The macros are given in a
+# dictionary.
+#
+# @param String String to be processed
+# @param MacroDefinitions The macro definitions in the form of dictionary
+# @param SelfReplacement To decide whether replace un-defined macro to ''
+#
+# @retval string The string whose macros are replaced
+#
+def ReplaceMacro(String, MacroDefinitions={}, SelfReplacement = False):
+ LastString = String
+ while MacroDefinitions:
+ MacroUsed = gMacroPattern.findall(String)
+ # no macro found in String, stop replacing
+ if len(MacroUsed) == 0:
+ break
+
+ for Macro in MacroUsed:
+ if Macro not in MacroDefinitions:
+ if SelfReplacement:
+ String = String.replace("$(%s)" % Macro, '')
+ continue
+ String = String.replace("$(%s)" % Macro, MacroDefinitions[Macro])
+ # in case there's macro not defined
+ if String == LastString:
+ break
+ LastString = String
+
+ return String
+
+## NormPath
+#
+# Create a normal path
+# And replace DFEINE in the path
+#
+# @param Path: The input value for Path to be converted
+# @param Defines: A set for DEFINE statement
+#
+# @retval Path Formatted path
+#
+def NormPath(Path, Defines = {}):
+ IsRelativePath = False
+ if Path:
+ if Path[0] == '.':
+ IsRelativePath = True
+ #
+ # Replace with Define
+ #
+ if Defines:
+ Path = ReplaceMacro(Path, Defines)
+ #
+ # To local path format
+ #
+ Path = os.path.normpath(Path)
+
+ if IsRelativePath and Path[0] != '.':
+ Path = os.path.join('.', Path)
+
+ return Path
+
+## CleanString
+#
+# Remove comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace R8's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
+ #
+ # remove comments
+ #
+ Line = Line.split(CommentCharacter, 1)[0];
+ #
+ # remove whitespace again
+ #
+ Line = Line.strip();
+
+ return Line
+
+## GetMultipleValuesOfKeyFromLines
+#
+# Parse multiple strings to clean comment and spaces
+# The result is saved to KeyValues
+#
+# @param Lines: The content to be parsed
+# @param Key: Reserved
+# @param KeyValues: To store data after parsing
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval True Successfully executed
+#
+def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
+ Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
+ LineList = Lines.split('\n')
+ for Line in LineList:
+ Line = CleanString(Line, CommentCharacter)
+ if Line != '' and Line[0] != CommentCharacter:
+ KeyValues += [Line]
+
+ return True
+
+## GetDefineValue
+#
+# Parse a DEFINE statement to get defined value
+# DEFINE Key Value
+#
+# @param String: The content to be parsed
+# @param Key: The key of DEFINE statement
+# @param CommentCharacter: Comment char, used to ignore comment content
+#
+# @retval string The defined value
+#
+def GetDefineValue(String, Key, CommentCharacter):
+ String = CleanString(String)
+ return String[String.find(Key + ' ') + len(Key + ' ') : ]
+
+## GetSingleValueOfKeyFromLines
+#
+# Parse multiple strings as below to get value of each definition line
+# Key1 = Value1
+# Key2 = Value2
+# The result is saved to Dictionary
+#
+# @param Lines: The content to be parsed
+# @param Dictionary: To store data after parsing
+# @param CommentCharacter: Comment char, be used to ignore comment content
+# @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+# @param ValueSplitFlag: Value split flag, be used to decide if has multiple values
+# @param ValueSplitCharacter: Value split char, be used to split multiple values. Key1 = Value1|Value2, '|' is the value split char
+#
+# @retval True Successfully executed
+#
+def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, ValueSplitFlag, ValueSplitCharacter):
+ Lines = Lines.split('\n')
+ Keys = []
+ Value = ''
+ DefineValues = ['']
+ SpecValues = ['']
+
+ for Line in Lines:
+ #
+ # Handle DEFINE and SPEC
+ #
+ if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
+ if '' in DefineValues:
+ DefineValues.remove('')
+ DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
+ continue
+ if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
+ if '' in SpecValues:
+ SpecValues.remove('')
+ SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
+ continue
+
+ #
+ # Handle Others
+ #
+ LineList = Line.split(KeySplitCharacter, 1)
+ if len(LineList) >= 2:
+ Key = LineList[0].split()
+ if len(Key) == 1 and Key[0][0] != CommentCharacter:
+ #
+ # Remove comments and white spaces
+ #
+ LineList[1] = CleanString(LineList[1], CommentCharacter)
+ if ValueSplitFlag:
+ Value = map(string.strip, LineList[1].split(ValueSplitCharacter))
+ else:
+ Value = CleanString(LineList[1], CommentCharacter).splitlines()
+
+ if Key[0] in Dictionary:
+ if Key[0] not in Keys:
+ Dictionary[Key[0]] = Value
+ Keys.append(Key[0])
+ else:
+ Dictionary[Key[0]].extend(Value)
+ else:
+ Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
+
+ if DefineValues == []:
+ DefineValues = ['']
+ if SpecValues == []:
+ SpecValues = ['']
+ Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
+ Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
+
+ return True
+
+## The content to be parsed
+#
+# Do pre-check for a file before it is parsed
+# Check $()
+# Check []
+#
+# @param FileName: Used for error report
+# @param FileContent: File content to be parsed
+# @param SupSectionTag: Used for error report
+#
+def PreCheck(FileName, FileContent, SupSectionTag):
+ LineNo = 0
+ IsFailed = False
+ NewFileContent = ''
+ for Line in FileContent.splitlines():
+ LineNo = LineNo + 1
+ #
+ # Clean current line
+ #
+ Line = CleanString(Line)
+
+ #
+ # Remove commented line
+ #
+ if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
+ Line = ''
+ #
+ # Check $()
+ #
+ if Line.find('$') > -1:
+ if Line.find('$(') < 0 or Line.find(')') < 0:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Check []
+ #
+ if Line.find('[') > -1 or Line.find(']') > -1:
+ #
+ # Only get one '[' or one ']'
+ #
+ if not (Line.find('[') > -1 and Line.find(']') > -1):
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ #
+ # Regenerate FileContent
+ #
+ NewFileContent = NewFileContent + Line + '\r\n'
+
+ if IsFailed:
+ EdkLogger.error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError = EdkLogger.IsRaiseError)
+
+ return NewFileContent
+
+## CheckFileType
+#
+# Check if the Filename is including ExtName
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param ExtName: Ext name of the file to be checked
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval True The file type is correct
+#
+def CheckFileType(CheckFilename, ExtName, ContainerFilename, SectionName, Line, LineNo = -1):
+ if CheckFilename != '' and CheckFilename != None:
+ (Root, Ext) = os.path.splitext(CheckFilename)
+ if Ext.upper() != ExtName.upper():
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Invalid %s. '%s' is found, but '%s' file is needed" % (SectionName, CheckFilename, ExtName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, Line=LineNo,
+ File=ContainerFilename, RaiseError = EdkLogger.IsRaiseError)
+
+ return True
+
+## CheckFileExist
+#
+# Check if the file exists
+# Return True if it exists
+# Raise a error message if it not exists
+#
+# @param CheckFilename: Name of the file to be checked
+# @param WorkspaceDir: Current workspace dir
+# @param ContainerFilename: The container file which describes the file to be checked, used for error report
+# @param SectionName: Used for error report
+# @param Line: The line in container file which defines the file to be checked
+#
+# @retval The file full path if the file exists
+#
+def CheckFileExist(WorkspaceDir, CheckFilename, ContainerFilename, SectionName, Line, LineNo = -1):
+ CheckFile = ''
+ if CheckFilename != '' and CheckFilename != None:
+ CheckFile = WorkspaceFile(WorkspaceDir, CheckFilename)
+ if not os.path.isfile(CheckFile):
+ ContainerFile = open(ContainerFilename, 'r').read()
+ if LineNo == -1:
+ LineNo = GetLineNo(ContainerFile, Line)
+ ErrorMsg = "Can't find file '%s' defined in section '%s'" % (CheckFile, SectionName)
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg,
+ File=ContainerFilename, Line = LineNo, RaiseError = EdkLogger.IsRaiseError)
+
+ return CheckFile
+
+## GetLineNo
+#
+# Find the index of a line in a file
+#
+# @param FileContent: Search scope
+# @param Line: Search key
+#
+# @retval int Index of the line
+# @retval -1 The line is not found
+#
+def GetLineNo(FileContent, Line, IsIgnoreComment = True):
+ LineList = FileContent.splitlines()
+ for Index in range(len(LineList)):
+ if LineList[Index].find(Line) > -1:
+ #
+ # Ignore statement in comment
+ #
+ if IsIgnoreComment:
+ if LineList[Index].strip()[0] == DataType.TAB_COMMENT_SPLIT:
+ continue
+ return Index + 1
+
+ return -1
+
+## RaiseParserError
+#
+# Raise a parser error
+#
+# @param Line: String which has error
+# @param Section: Used for error report
+# @param File: File which has the string
+# @param Format: Correct format
+#
+def RaiseParserError(Line, Section, File, Format = '', LineNo = -1):
+ if LineNo == -1:
+ LineNo = GetLineNo(open(os.path.normpath(File), 'r').read(), Line)
+ ErrorMsg = "Invalid statement '%s' is found in section '%s'" % (Line, Section)
+ if Format != '':
+ Format = "Correct format is " + Format
+ EdkLogger.error("Parser", PARSER_ERROR, ErrorMsg, File=File, Line=LineNo, ExtraData=Format, RaiseError = EdkLogger.IsRaiseError)
+
+## WorkspaceFile
+#
+# Return a full path with workspace dir
+#
+# @param WorkspaceDir: Workspace dir
+# @param Filename: Relative file name
+#
+# @retval string A full path
+#
+def WorkspaceFile(WorkspaceDir, Filename):
+ return os.path.join(NormPath(WorkspaceDir), NormPath(Filename))
+
+## Split string
+#
+# Revmove '"' which startswith and endswith string
+#
+# @param String: The string need to be splited
+#
+# @retval String: The string after removed '""'
+#
+def SplitString(String):
+ if String.startswith('\"'):
+ String = String[1:]
+ if String.endswith('\"'):
+ String = String[:-1]
+
+ return String
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in each item of StringList
+#
+# @param StringList: A list for strings to be converted
+#
+def ConvertToSqlString(StringList):
+ return map(lambda s: s.replace("'", "''") , StringList)
+
+## Convert To Sql String
+#
+# 1. Replace "'" with "''" in the String
+#
+# @param String: A String to be converted
+#
+def ConvertToSqlString2(String):
+ return String.replace("'", "''")
+
+#
+# Remove comment block
+#
+def RemoveBlockComment(Lines):
+ IsFindBlockComment = False
+ IsFindBlockCode = False
+ ReservedLine = ''
+ NewLines = []
+
+ for Line in Lines:
+ Line = Line.strip()
+ #
+ # Remove comment block
+ #
+ if Line.find(DataType.TAB_COMMENT_R8_START) > -1:
+ ReservedLine = GetSplitValueList(Line, DataType.TAB_COMMENT_R8_START, 1)[0]
+ IsFindBlockComment = True
+ if Line.find(DataType.TAB_COMMENT_R8_END) > -1:
+ Line = ReservedLine + GetSplitValueList(Line, DataType.TAB_COMMENT_R8_END, 1)[1]
+ ReservedLine = ''
+ IsFindBlockComment = False
+ if IsFindBlockComment:
+ NewLines.append('')
+ continue
+
+ NewLines.append(Line)
+ return NewLines
+
+#
+# Get String of a List
+#
+def GetStringOfList(List, Split = ' '):
+ if type(List) != type([]):
+ return List
+ Str = ''
+ for Item in List:
+ Str = Str + Item + Split
+
+ return Str.strip()
+
+#
+# Get HelpTextList from HelpTextClassList
+#
+def GetHelpTextList(HelpTextClassList):
+ List = []
+ if HelpTextClassList:
+ for HelpText in HelpTextClassList:
+ if HelpText.String.endswith('\n'):
+ HelpText.String = HelpText.String[0: len(HelpText.String) - len('\n')]
+ List.extend(HelpText.String.split('\n'))
+
+ return List
+
+def StringToArray(String):
+ if isinstance(String, unicode):
+ if len(unicode) ==0:
+ return "{0x00, 0x00}"
+ return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String])
+ elif String.startswith('L"'):
+ if String == "L\"\"":
+ return "{0x00, 0x00}"
+ else:
+ return "{%s, 0x00, 0x00}" % ", ".join(["0x%02x, 0x00" % ord(C) for C in String[2:-1]])
+ elif String.startswith('"'):
+ if String == "\"\"":
+ return "{0x00}";
+ else:
+ return "{%s, 0x00}" % ", ".join(["0x%02x" % ord(C) for C in String[1:-1]])
+ else:
+ return '{%s, 0}' % ', '.join(String.split())
+
+def StringArrayLength(String):
+ if isinstance(String, unicode):
+ return (len(String) + 1) * 2 + 1;
+ elif String.startswith('L"'):
+ return (len(String) - 3 + 1) * 2
+ elif String.startswith('"'):
+ return (len(String) - 2 + 1)
+ else:
+ return len(String.split()) + 1
+
+def RemoveDupOption(OptionString, Which="/I", Against=None):
+ OptionList = OptionString.split()
+ ValueList = []
+ if Against:
+ ValueList += Against
+ for Index in range(len(OptionList)):
+ Opt = OptionList[Index]
+ if not Opt.startswith(Which):
+ continue
+ if len(Opt) > len(Which):
+ Val = Opt[len(Which):]
+ else:
+ Val = ""
+ if Val in ValueList:
+ OptionList[Index] = ""
+ else:
+ ValueList.append(Val)
+ return " ".join(OptionList)
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
new file mode 100644
index 0000000000..70178f54ce
--- /dev/null
+++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py
@@ -0,0 +1,174 @@
+## @file
+# This file is used to define each component of Target.txt file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import EdkLogger
+import DataType
+from BuildToolError import *
+import GlobalData
+
+gDefaultTargetTxtFile = "Conf/target.txt"
+
+## TargetTxtClassObject
+#
+# This class defined content used in file target.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of target.txt
+#
+# @var TargetTxtDictionary: To store keys and values defined in target.txt
+#
+class TargetTxtClassObject(object):
+ def __init__(self, Filename = None):
+ self.TargetTxtDictionary = {
+ DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM : '',
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE : '',
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF : '',
+ DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD : '',
+ DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER : '',
+ DataType.TAB_TAT_DEFINES_TARGET : [],
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG : [],
+ DataType.TAB_TAT_DEFINES_TARGET_ARCH : [],
+ DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF : '',
+ }
+ if Filename != None:
+ self.LoadTargetTxtFile(Filename)
+
+ ## LoadTargetTxtFile
+ #
+ # Load target.txt file and parse it, return a set structure to store keys and values
+ #
+ # @param Filename: Input value for full path of target.txt
+ #
+ # @retval set() A set structure to store keys and values
+ # @retval 1 Error happenes in parsing
+ #
+ def LoadTargetTxtFile(self, Filename):
+ if os.path.exists(Filename) and os.path.isfile(Filename):
+ return self.ConvertTextFileToDict(Filename, '#', '=')
+ else:
+ EdkLogger.error("Target.txt Parser", FILE_NOT_FOUND, ExtraData=Filename)
+ return 1
+
+ ## ConvertTextFileToDict
+ #
+ # Convert a text file to a dictionary of (name:value) pairs.
+ # The data is saved to self.TargetTxtDictionary
+ #
+ # @param FileName: Text filename
+ # @param CommentCharacter: Comment char, be used to ignore comment content
+ # @param KeySplitCharacter: Key split char, between key name and key value. Key1 = Value1, '=' is the key split char
+ #
+ # @retval 0 Convert successfully
+ # @retval 1 Open file failed
+ #
+ def ConvertTextFileToDict(self, FileName, CommentCharacter, KeySplitCharacter):
+ F = None
+ try:
+ F = open(FileName,'r')
+ except:
+ EdkLogger.error("build", FILE_OPEN_FAILURE, ExtraData=FileName)
+ if F != None:
+ F.close()
+
+ for Line in F:
+ Line = Line.strip()
+ if Line.startswith(CommentCharacter) or Line == '':
+ continue
+
+ LineList = Line.split(KeySplitCharacter, 1)
+ Key = LineList[0].strip()
+ if len(LineList) == 2:
+ Value = LineList[1].strip()
+ else:
+ Value = ""
+
+ if Key in [DataType.TAB_TAT_DEFINES_ACTIVE_PLATFORM, DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF, \
+ DataType.TAB_TAT_DEFINES_ACTIVE_MODULE, DataType.TAB_TAT_DEFINES_BUILD_RULE_CONF]:
+ self.TargetTxtDictionary[Key] = Value.replace('\\', '/')
+ elif Key in [DataType.TAB_TAT_DEFINES_TARGET, DataType.TAB_TAT_DEFINES_TARGET_ARCH, \
+ DataType.TAB_TAT_DEFINES_TOOL_CHAIN_TAG]:
+ self.TargetTxtDictionary[Key] = Value.split()
+ elif Key == DataType.TAB_TAT_DEFINES_MULTIPLE_THREAD:
+ if Value not in ["Enable", "Disable"]:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid setting of [%s]: %s." % (Key, Value),
+ ExtraData="\tSetting must be one of [Enable, Disable]",
+ File=FileName)
+ self.TargetTxtDictionary[Key] = Value
+ elif Key == DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER:
+ try:
+ V = int(Value, 0)
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid number of [%s]: %s." % (Key, Value),
+ File=FileName)
+ self.TargetTxtDictionary[Key] = Value
+ #elif Key not in GlobalData.gGlobalDefines:
+ # GlobalData.gGlobalDefines[Key] = Value
+
+ F.close()
+ return 0
+
+ ## Print the dictionary
+ #
+ # Print all items of dictionary one by one
+ #
+ # @param Dict: The dictionary to be printed
+ #
+ def printDict(Dict):
+ if Dict != None:
+ KeyList = Dict.keys()
+ for Key in KeyList:
+ if Dict[Key] != '':
+ print Key + ' = ' + str(Dict[Key])
+
+ ## Print the dictionary
+ #
+ # Print the items of dictionary which matched with input key
+ #
+ # @param list: The dictionary to be printed
+ # @param key: The key of the item to be printed
+ #
+ def printList(Key, List):
+ if type(List) == type([]):
+ if len(List) > 0:
+ if Key.find(TAB_SPLIT) != -1:
+ print "\n" + Key
+ for Item in List:
+ print Item
+## TargetTxtDict
+#
+# Load target.txt in input workspace dir
+#
+# @param WorkSpace: Workspace dir
+#
+# @retval Target An instance of TargetTxtClassObject() with loaded target.txt
+#
+def TargetTxtDict(WorkSpace):
+ Target = TargetTxtClassObject()
+ Target.LoadTargetTxtFile(os.path.normpath(os.path.join(WorkSpace, gDefaultTargetTxtFile)))
+ return Target
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ pass
+ Target = TargetTxtDict(os.getenv("WORKSPACE"))
+ print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_MAX_CONCURRENT_THREAD_NUMBER]
+ print Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TARGET]
+ print Target.TargetTxtDictionary
diff --git a/BaseTools/Source/Python/Common/ToolDefClassObject.py b/BaseTools/Source/Python/Common/ToolDefClassObject.py
new file mode 100644
index 0000000000..5a9a3096bb
--- /dev/null
+++ b/BaseTools/Source/Python/Common/ToolDefClassObject.py
@@ -0,0 +1,217 @@
+## @file
+# This file is used to define each component of tools_def.txt file
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import os
+import re
+import EdkLogger
+
+from Dictionary import *
+from BuildToolError import *
+from TargetTxtClassObject import *
+
+##
+# Static vailabes used for pattern
+#
+gMacroRefPattern = re.compile('(DEF\([^\(\)]+\))')
+gEnvRefPattern = re.compile('(ENV\([^\(\)]+\))')
+gMacroDefPattern = re.compile("DEFINE\s+([^\s]+)")
+gDefaultToolsDefFile = "Conf/tools_def.txt"
+
+## ToolDefClassObject
+#
+# This class defined content used in file tools_def.txt
+#
+# @param object: Inherited from object class
+# @param Filename: Input value for full path of tools_def.txt
+#
+# @var ToolsDefTxtDictionary: To store keys and values defined in target.txt
+# @var MacroDictionary: To store keys and values defined in DEFINE statement
+#
+class ToolDefClassObject(object):
+ def __init__(self, FileName = None):
+ self.ToolsDefTxtDictionary = {}
+ self.MacroDictionary = {}
+ for Env in os.environ:
+ self.MacroDictionary["ENV(%s)" % Env] = os.environ[Env]
+
+ if FileName != None:
+ self.LoadToolDefFile(FileName)
+
+ ## LoadToolDefFile
+ #
+ # Load target.txt file and parse it, return a set structure to store keys and values
+ #
+ # @param Filename: Input value for full path of tools_def.txt
+ #
+ def LoadToolDefFile(self, FileName):
+ FileContent = []
+ if os.path.isfile(FileName):
+ try:
+ F = open(FileName,'r')
+ FileContent = F.readlines()
+ except:
+ EdkLogger.error("tools_def.txt parser", FILE_OPEN_FAILURE, ExtraData=FileName)
+ else:
+ EdkLogger.error("tools_def.txt parser", FILE_NOT_FOUND, ExtraData=FileName)
+
+ self.ToolsDefTxtDatabase = {
+ TAB_TOD_DEFINES_TARGET : [],
+ TAB_TOD_DEFINES_TOOL_CHAIN_TAG : [],
+ TAB_TOD_DEFINES_TARGET_ARCH : [],
+ TAB_TOD_DEFINES_COMMAND_TYPE : []
+ }
+
+ for Index in range(len(FileContent)):
+ Line = FileContent[Index].strip()
+ if Line == "" or Line[0] == '#':
+ continue
+ NameValuePair = Line.split("=", 1)
+ if len(NameValuePair) != 2:
+ EdkLogger.warn("tools_def.txt parser", "Line %d: not correct assignment statement, skipped" % (Index + 1))
+ continue
+
+ Name = NameValuePair[0].strip()
+ Value = NameValuePair[1].strip()
+
+ if Name == "IDENTIFIER":
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found identifier statement, skipped: %s" % ((Index + 1), Value))
+ continue
+
+ MacroDefinition = gMacroDefPattern.findall(Name)
+ if MacroDefinition != []:
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ MacroName = MacroDefinition[0].strip()
+ self.MacroDictionary["DEF(%s)" % MacroName] = Value
+ EdkLogger.debug(EdkLogger.DEBUG_8, "Line %d: Found macro: %s = %s" % ((Index + 1), MacroName, Value))
+ continue
+
+ Done, Value = self.ExpandMacros(Value)
+ if not Done:
+ EdkLogger.error("tools_def.txt parser", ATTRIBUTE_NOT_AVAILABLE,
+ "Macro or Environment has not been defined",
+ ExtraData=Value[4:-1], File=FileName, Line=Index+1)
+
+ List = Name.split('_')
+ if len(List) != 5:
+ EdkLogger.verbose("Line %d: Not a valid name of definition: %s" % ((Index + 1), Name))
+ continue
+ elif List[4] == '*':
+ EdkLogger.verbose("Line %d: '*' is not allowed in last field: %s" % ((Index + 1), Name))
+ continue
+ else:
+ self.ToolsDefTxtDictionary[Name] = Value
+ if List[0] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] += [List[0]]
+ if List[1] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] += [List[1]]
+ if List[2] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] += [List[2]]
+ if List[3] != '*':
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] += [List[3]]
+ if List[4] == TAB_TOD_DEFINES_FAMILY and List[2] == '*' and List[3] == '*':
+ if TAB_TOD_DEFINES_FAMILY not in self.ToolsDefTxtDatabase:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY] = {}
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] = Value
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+ elif self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY][List[1]] != Value:
+ EdkLogger.verbose("Line %d: No override allowed for the family of a tool chain: %s" % ((Index + 1), Name))
+ if List[4] == TAB_TOD_DEFINES_BUILDRULEFAMILY and List[2] == '*' and List[3] == '*':
+ if TAB_TOD_DEFINES_BUILDRULEFAMILY not in self.ToolsDefTxtDatabase \
+ or List[1] not in self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_FAMILY]:
+ EdkLogger.verbose("Line %d: The family is not specified, but BuildRuleFamily is specified for the tool chain: %s" % ((Index + 1), Name))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_BUILDRULEFAMILY][List[1]] = Value
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH]))
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE] = list(set(self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE]))
+
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TOOL_CHAIN_TAG].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_TARGET_ARCH].sort()
+ self.ToolsDefTxtDatabase[TAB_TOD_DEFINES_COMMAND_TYPE].sort()
+
+ KeyList = [TAB_TOD_DEFINES_TARGET, TAB_TOD_DEFINES_TOOL_CHAIN_TAG, TAB_TOD_DEFINES_TARGET_ARCH, TAB_TOD_DEFINES_COMMAND_TYPE]
+ for Index in range(3,-1,-1):
+ for Key in dict(self.ToolsDefTxtDictionary):
+ List = Key.split('_')
+ if List[Index] == '*':
+ for String in self.ToolsDefTxtDatabase[KeyList[Index]]:
+ List[Index] = String
+ NewKey = '%s_%s_%s_%s_%s' % tuple(List)
+ if NewKey not in self.ToolsDefTxtDictionary:
+ self.ToolsDefTxtDictionary[NewKey] = self.ToolsDefTxtDictionary[Key]
+ continue
+ del self.ToolsDefTxtDictionary[Key]
+ elif List[Index] not in self.ToolsDefTxtDatabase[KeyList[Index]]:
+ del self.ToolsDefTxtDictionary[Key]
+
+ ## ExpandMacros
+ #
+ # Replace defined macros with real value
+ #
+ # @param Value: The string with unreplaced macros
+ #
+ # @retval Value: The string which has been replaced with real value
+ #
+ def ExpandMacros(self, Value):
+ EnvReference = gEnvRefPattern.findall(Value)
+ for Ref in EnvReference:
+ if Ref not in self.MacroDictionary:
+ return False, Ref
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+
+ MacroReference = gMacroRefPattern.findall(Value)
+ for Ref in MacroReference:
+ if Ref not in self.MacroDictionary:
+ return False, Ref
+ Value = Value.replace(Ref, self.MacroDictionary[Ref])
+
+ return True, Value
+
+## ToolDefDict
+#
+# Load tools_def.txt in input workspace dir
+#
+# @param WorkSpace: Workspace dir
+#
+# @retval ToolDef An instance of ToolDefClassObject() with loaded tools_def.txt
+#
+def ToolDefDict(WorkSpace):
+ Target = TargetTxtDict(WorkSpace)
+ ToolDef = ToolDefClassObject()
+ if DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF in Target.TargetTxtDictionary:
+ gDefaultToolsDefFile = Target.TargetTxtDictionary[DataType.TAB_TAT_DEFINES_TOOL_CHAIN_CONF]
+ ToolDef.LoadToolDefFile(os.path.normpath(os.path.join(WorkSpace, gDefaultToolsDefFile)))
+ return ToolDef
+
+##
+#
+# This acts like the main() function for the script, unless it is 'import'ed into another
+# script.
+#
+if __name__ == '__main__':
+ ToolDef = ToolDefDict(os.getenv("WORKSPACE"))
+ pass
diff --git a/BaseTools/Source/Python/Common/XmlParser.py b/BaseTools/Source/Python/Common/XmlParser.py
new file mode 100644
index 0000000000..4d60115925
--- /dev/null
+++ b/BaseTools/Source/Python/Common/XmlParser.py
@@ -0,0 +1,1754 @@
+## @file
+# This file is used to parse a xml file of .PKG file
+#
+# Copyright (c) 2008, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+from xml.dom import minidom
+from XmlRoutines import *
+from CommonDataClass.DistributionPackageClass import *
+from CommonDataClass.PackageClass import *
+from CommonDataClass.ModuleClass import *
+from Common.String import GetStringOfList
+
+#
+# Get Help Text
+#
+def GetHelpTextList(HelpText):
+ HelpTextList = []
+ for HT in HelpText:
+ HelpTextObj = HelpTextClass()
+ HelpTextObj.Lang = HT.Lang
+ HelpTextObj.String = HT.HelpText
+ HelpTextList.append(HelpTextObj)
+ return HelpTextList
+
+# HeaderXml
+class HeaderXml(object):
+ def __init__(self):
+ self.Name = ''
+ self.BaseName = ''
+ self.GUID = ''
+ self.Version = ''
+ self.Copyright = ''
+ self.License = ''
+ self.Abstract = ''
+ self.Description = ''
+
+ def FromXml(self, Item, Key):
+ self.Name = XmlElement(Item, '%s/Name' % Key)
+ self.BaseName = XmlAttribute(XmlNode(Item, '%s/Name' % Key), 'BaseName')
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ self.Copyright = XmlElement(Item, '%s/Copyright' % Key)
+ self.License = XmlElement(Item, '%s/License' % Key)
+ self.Abstract = XmlElement(Item, '%s/Abstract' % Key)
+ self.Description = XmlElement(Item, '%s/Description' % Key)
+
+ ModuleHeader = ModuleHeaderClass()
+ ModuleHeader.Name = self.Name
+ ModuleHeader.BaseName = self.BaseName
+ ModuleHeader.Guid = self.GUID
+ ModuleHeader.Version = self.Version
+ ModuleHeader.Copyright = self.Copyright
+ ModuleHeader.License = self.License
+ ModuleHeader.Abstract = self.Abstract
+ ModuleHeader.Description = self.Description
+
+ return ModuleHeader
+
+ def ToXml(self, Header, Key):
+ Element1 = CreateXmlElement('Name', Header.Name, [], [['BaseName', Header.BaseName]])
+ Element2 = CreateXmlElement('GUID', Header.Guid, [], [['Version', Header.Version]])
+ AttributeList = []
+ NodeList = [Element1,
+ Element2,
+ ['Abstract', Header.Abstract],
+ ['Copyright', Header.Copyright],
+ ['License', Header.License],
+ ['Description', Header.Description],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "Name = %s BaseName = %s GUID = %s Version = %s Copyright = %s License = %s Abstract = %s Description = %s" \
+ % (self.Name, self.BaseName, self.GUID, self.Version, self.Copyright, self.License, self.Abstract, self.Description)
+
+# DistributionPackageHeaderXml
+class DistributionPackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.ReadOnly = False
+ self.RePackage = True
+ self.Vendor = ''
+ self.Date = ''
+ self.Signature = ''
+ self.XmlSpecification = ''
+
+ def FromXml(self, Item, Key):
+ self.ReadOnly = XmlAttribute(XmlNode(Item, '%s' % Key), 'ReadOnly')
+ self.RePackage = XmlAttribute(XmlNode(Item, '%s' % Key), 'RePackage')
+ self.Vendor = XmlElement(Item, '%s/Vendor' % Key)
+ self.Date = XmlElement(Item, '%s/Date' % Key)
+ self.Signature = XmlElement(Item, '%s/Signature' % Key)
+ self.XmlSpecification = XmlElement(Item, '%s/XmlSpecification' % Key)
+ self.Header.FromXml(Item, Key)
+
+ DistributionPackageHeader = DistributionPackageHeaderClass()
+ DistributionPackageHeader.ReadOnly = self.ReadOnly
+ DistributionPackageHeader.RePackage = self.RePackage
+ DistributionPackageHeader.Name = self.Header.Name
+ DistributionPackageHeader.BaseName = self.Header.BaseName
+ DistributionPackageHeader.Guid = self.Header.GUID
+ DistributionPackageHeader.Version = self.Header.Version
+ DistributionPackageHeader.Vendor = self.Vendor
+ DistributionPackageHeader.Date = self.Date
+ DistributionPackageHeader.Copyright = self.Header.Copyright
+ DistributionPackageHeader.License = self.Header.License
+ DistributionPackageHeader.Abstract = self.Header.Abstract
+ DistributionPackageHeader.Description = self.Header.Description
+ DistributionPackageHeader.Signature = self.Signature
+ DistributionPackageHeader.XmlSpecification = self.XmlSpecification
+
+ return DistributionPackageHeader
+
+ def ToXml(self, DistributionPackageHeader, Key):
+ Element1 = CreateXmlElement('Name', DistributionPackageHeader.Name, [], [['BaseName', DistributionPackageHeader.BaseName]])
+ Element2 = CreateXmlElement('GUID', DistributionPackageHeader.Guid, [], [['Version', DistributionPackageHeader.Version]])
+ AttributeList = [['ReadOnly', str(DistributionPackageHeader.ReadOnly)], ['RePackage', str(DistributionPackageHeader.RePackage)]]
+ NodeList = [Element1,
+ Element2,
+ ['Vendor', DistributionPackageHeader.Vendor],
+ ['Date', DistributionPackageHeader.Date],
+ ['Copyright', DistributionPackageHeader.Copyright],
+ ['License', DistributionPackageHeader.License],
+ ['Abstract', DistributionPackageHeader.Abstract],
+ ['Description', DistributionPackageHeader.Description],
+ ['Signature', DistributionPackageHeader.Signature],
+ ['XmlSpecification', DistributionPackageHeader.XmlSpecification],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "ReadOnly = %s RePackage = %s Vendor = %s Date = %s Signature = %s XmlSpecification = %s %s" \
+ % (self.ReadOnly, self.RePackage, self.Vendor, self.Date, self.Signature, self.XmlSpecification, self.Header)
+
+# PackageHeaderXml
+class PackageHeaderXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.PackagePath = ''
+
+ def FromXml(self, Item, Key):
+ self.PackagePath = XmlElement(Item, '%s/PackagePath' % Key)
+ self.Header.FromXml(Item, Key)
+
+ PackageHeader = PackageHeaderClass()
+ PackageHeader.Name = self.Header.Name
+ PackageHeader.BaseName = self.Header.BaseName
+ PackageHeader.Guid = self.Header.GUID
+ PackageHeader.Version = self.Header.Version
+ PackageHeader.Copyright = self.Header.Copyright
+ PackageHeader.License = self.Header.License
+ PackageHeader.Abstract = self.Header.Abstract
+ PackageHeader.Description = self.Header.Description
+ PackageHeader.CombinePath = self.PackagePath
+
+ return PackageHeader
+
+ def ToXml(self, PackageHeader, Key):
+ Element1 = CreateXmlElement('Name', PackageHeader.Name, [], [['BaseName', PackageHeader.BaseName]])
+ Element2 = CreateXmlElement('GUID', PackageHeader.Guid, [], [['Version', PackageHeader.Version]])
+ AttributeList = []
+ NodeList = [Element1,
+ Element2,
+ ['Copyright', PackageHeader.Copyright],
+ ['License', PackageHeader.License],
+ ['Abstract', PackageHeader.Abstract],
+ ['Description', PackageHeader.Description],
+ ['PackagePath', PackageHeader.CombinePath],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "PackagePath = %s %s" \
+ % (self.PackagePath, self.Header)
+
+# ClonedFromXml
+class ClonedFromXml(object):
+ def __init__(self):
+ self.GUID = ''
+ self.Version = ''
+
+ def FromXml(self, Item, Key):
+ self.GUID = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+
+ if self.GUID == '' and self.Version == '':
+ return None
+
+ ClonedFrom = ClonedRecordClass()
+ ClonedFrom.PackageGuid = self.GUID
+ ClonedFrom.PackageVersion = self.Version
+
+ return ClonedFrom
+
+ def ToXml(self, ClonedFrom, Key):
+ Root = minidom.Document()
+ Element1 = CreateXmlElement('GUID', ClonedFrom.PackageGuid, [], [['Version', ClonedFrom.PackageVersion]])
+ AttributeList = []
+ NodeList = [Element1]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "GUID = %s Version = %s" % (self.GUID, self.Version)
+
+# CommonDefinesXml
+class CommonDefinesXml(object):
+ def __init__(self):
+ self.Usage = ''
+ self.SupArchList = ''
+ self.SupModList = ''
+ self.FeatureFlag = ''
+
+ def FromXml(self, Item, Key):
+ self.Usage = XmlAttribute(Item, 'Usage')
+ self.SupArchList = XmlAttribute(Item, 'SupArchList')
+ self.SupModList = XmlAttribute(Item, 'SupModList')
+ self.FeatureFlag = XmlAttribute(Item, 'FeatureFlag')
+
+ def ToXml(self):
+ pass
+
+ def __str__(self):
+ return "Usage = %s SupArchList = %s SupModList = %s FeatureFlag = %s" % (self.Usage, self.SupArchList, self.SupModList, self.FeatureFlag)
+
+# HelpTextXml
+class HelpTextXml(object):
+ def __init__(self):
+ self.HelpText = ''
+ self.Lang = ''
+
+ def FromXml(self, Item, Key):
+ self.HelpText = XmlElement(Item, 'HelpText')
+ self.Lang = XmlAttribute(Item, 'Lang')
+
+ def ToXml(self, HelpText, Key = 'HelpText'):
+ return CreateXmlElement('%s' % Key, HelpText.String, [], [['Lang', HelpText.Lang]])
+
+ def __str__(self):
+ return "HelpText = %s Lang = %s" % (self.HelpText, self.Lang)
+
+# LibraryClassXml
+class LibraryClassXml(object):
+ def __init__(self):
+ self.Keyword = ''
+ self.HeaderFile = ''
+ self.RecommendedInstanceGuid = ''
+ self.RecommendedInstanceVersion = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.Keyword = XmlAttribute(XmlNode(Item, '%s' % Key), 'Keyword')
+ if self.Keyword == '':
+ self.Keyword = XmlElement(Item, '%s/Keyword' % Key)
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.RecommendedInstanceGuid = XmlElement(Item, '%s/RecommendedInstance/GUID' % Key)
+ self.RecommendedInstanceVersion = XmlAttribute(XmlNode(Item, '%s/RecommendedInstance/GUID' % Key), 'Version')
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ LibraryClass = LibraryClassClass()
+ LibraryClass.LibraryClass = self.Keyword
+ LibraryClass.IncludeHeader = self.HeaderFile
+ LibraryClass.SupArchList = self.CommonDefines.SupArchList
+ LibraryClass.SupModuleList = self.CommonDefines.SupModList
+ LibraryClass.RecommendedInstanceGuid = self.RecommendedInstanceGuid
+ LibraryClass.RecommendedInstanceVersion = self.RecommendedInstanceVersion
+ LibraryClass.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return LibraryClass
+
+ def ToXml(self, LibraryClass, Key):
+ Element1 = CreateXmlElement('GUID', LibraryClass.RecommendedInstanceGuid, [], [['Version', LibraryClass.RecommendedInstanceVersion]])
+ Element2 = CreateXmlElement('RecommendedInstance', '', [Element1], [])
+ AttributeList = [['Keyword', LibraryClass.LibraryClass],
+ ['SupArchList', GetStringOfList(LibraryClass.SupArchList)],
+ ['SupModList', GetStringOfList(LibraryClass.SupModuleList)]
+ ]
+ NodeList = [['HeaderFile', LibraryClass.IncludeHeader],
+ Element2
+ ]
+ for Item in LibraryClass.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Keyword = %s HeaderFile = %s RecommendedInstanceGuid = %s RecommendedInstanceVersion = %s %s" \
+ % (self.Keyword, self.HeaderFile, self.RecommendedInstanceGuid, self.RecommendedInstanceVersion, \
+ self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# IndustryStandardHeaderXml
+class IndustryStandardHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeClass()
+ Include.FilePath = self.HeaderFile
+ Include.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Include
+
+ def ToXml(self, IndustryStandardHeader, Key):
+ AttributeList = []
+ NodeList = [['HeaderFile', IndustryStandardHeader.FilePath]]
+ for Item in IndustryStandardHeader.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s" % (self.HeaderFile)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# PackageIncludeHeaderXml
+class PackageIncludeHeaderXml(object):
+ def __init__(self):
+ self.HeaderFile = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HeaderFile = XmlElement(Item, '%s/HeaderFile' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s/HeaderFile' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Include = IncludeClass()
+ Include.FilePath = self.HeaderFile
+ Include.SupArchList = self.CommonDefines.SupArchList
+ Include.SupModuleList = self.CommonDefines.SupModList
+ Include.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Include
+
+ def ToXml(self, PackageIncludeHeader, Key):
+ AttributeList = [['SupArchList', PackageIncludeHeader.SupArchList],
+ ['SupModList', PackageIncludeHeader.SupModuleList]
+ ]
+ NodeList = [['HeaderFile', PackageIncludeHeader.FilePath]]
+ for Item in PackageIncludeHeader.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HeaderFile = %s\n\t%s" % (self.HeaderFile, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+#GUID/Protocol/Ppi
+class GuidProtocolPpiXml(object):
+ def __init__(self):
+ self.UiName = ''
+ self.GuidTypes = ''
+ self.Notify = ''
+ self.CName = ''
+ self.GuidValue = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.UiName = XmlAttribute(XmlNode(Item, '%s' % Key), 'UiName')
+ self.GuidTypes = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidTypes')
+ self.GuidType = XmlAttribute(XmlNode(Item, '%s' % Key), 'GuidType')
+ self.Notify = XmlAttribute(XmlNode(Item, '%s' % Key), 'Notify')
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.GuidValue = XmlElement(Item, '%s/GuidValue' % Key)
+ self.VariableName = XmlElement(Item, '%s/VariableName' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ GuidProtocolPpi = GuidProtocolPpiCommonClass()
+ GuidProtocolPpi.Name = self.UiName
+ GuidProtocolPpi.CName = self.CName
+ GuidProtocolPpi.Guid = self.GuidValue
+ GuidProtocolPpi.VariableName = self.VariableName
+ GuidProtocolPpi.Notify = self.Notify
+ GuidProtocolPpi.Usage = self.CommonDefines.Usage
+ GuidProtocolPpi.FeatureFlag = self.CommonDefines.FeatureFlag
+ GuidProtocolPpi.SupArchList = self.CommonDefines.SupArchList
+ GuidProtocolPpi.SupModuleList = self.CommonDefines.SupModList
+ GuidProtocolPpi.GuidTypeLists = self.GuidTypes
+ GuidProtocolPpi.GuidTypeList = self.GuidType
+ GuidProtocolPpi.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return GuidProtocolPpi
+
+ def ToXml(self, GuidProtocolPpi, Key):
+ AttributeList = [['Usage', GetStringOfList(GuidProtocolPpi.Usage)],
+ ['UiName', GuidProtocolPpi.Name],
+ ['GuidTypes', GetStringOfList(GuidProtocolPpi.GuidTypeLists)],
+ ['GuidType', GetStringOfList(GuidProtocolPpi.GuidTypeList)],
+ ['Notify', str(GuidProtocolPpi.Notify)],
+ ['SupArchList', GetStringOfList(GuidProtocolPpi.SupArchList)],
+ ['SupModList', GetStringOfList(GuidProtocolPpi.SupModuleList)],
+ ['FeatureFlag', GuidProtocolPpi.FeatureFlag]
+ ]
+ NodeList = [['CName', GuidProtocolPpi.CName],
+ ['GuidValue', GuidProtocolPpi.Guid],
+ ['VariableName', GuidProtocolPpi.VariableName]
+ ]
+ for Item in GuidProtocolPpi.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "UiName = %s Notify = %s GuidTypes = %s CName = %s GuidValue = %s %s" \
+ % (self.UiName, self.Notify, self.GuidTypes, self.CName, self.GuidValue, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ return Str
+
+# PcdErrorXml
+class PcdErrorXml(object):
+ def __init__(self):
+ self.ValidValueList = ''
+ self.ValidValueListLang = ''
+ self.ValidValueRange = ''
+ self.Expression = ''
+ self.ErrorNumber = ''
+ self.ErrorMessage = []
+
+ def FromXml(self, Item, Key):
+ self.ValidValueList = XmlElement(Item, '%s/ValidValueList' % Key)
+ self.ValidValueListLang = XmlAttribute(XmlNode(Item, '%s/ValidValueList' % Key), 'Lang')
+ self.ValidValueRange = XmlElement(Item, '%s/ValidValueRange' % Key)
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ self.ErrorNumber = XmlElement(Item, '%s/ErrorNumber' % Key)
+ for ErrMsg in XmlList(Item, '%s/ErrorMessage' % Key):
+ ErrorMessageString = XmlElement(ErrMsg, 'ErrorMessage')
+ ErrorMessageLang = XmlAttribute(XmlNode(ErrMsg, 'ErrorMessage'), 'Lang')
+ self.ErrorMessage.append((ErrorMessageLang, ErrorMessageString))
+
+ Error = PcdErrorClass()
+ Error.ValidValueList = self.ValidValueList
+ Error.ValidValueListLang = self.ValidValueListLang
+ Error.ValidValueRange = self.ValidValueRange
+ Error.Expression = self.Expression
+ Error.ErrorNumber = self.ErrorNumber
+ Error.ErrorMessage = self.ErrorMessage
+
+ return Error
+
+ def ToXml(self, PcdError, Key):
+ AttributeList = []
+ Element1 = CreateXmlElement('ValidValueList', PcdError.ValidValueList, [], [['Lang', PcdError.ValidValueListLang]])
+ NodeList = [Element1,
+ ['ValidValueRange', PcdError.ValidValueRange],
+ ['Expression', PcdError.Expression],
+ ['ErrorNumber', PcdError.ErrorNumber],
+ ]
+ for Item in PcdError.ErrorMessage:
+ Element = CreateXmlElement('ErrorMessage', Item[1], [], [['Lang', Item[0]]])
+ NodeList.append(Element)
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "ValidValueList = %s ValidValueListLang = %s ValidValueRange = %s Expression = %s ErrorNumber = %s %s" \
+ % (self.ValidValueList, self.ValidValueListLang, self.ValidValueRange, self.Expression, self.ErrorNumber, self.ErrorMessage)
+
+# PcdEntryXml
+class PcdEntryXml(object):
+ def __init__(self):
+ self.PcdItemType = ''
+ self.PcdUsage = ''
+ self.TokenSpaceGuidCName = ''
+ self.TokenSpaceGuidValue = ''
+ self.Token = ''
+ self.CName = ''
+ self.PcdCName = ''
+ self.DatumType = ''
+ self.ValidUsage = ''
+ self.DefaultValue = ''
+ self.MaxDatumSize = ''
+ self.Value = ''
+ self.Offset = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+ self.PcdError = []
+
+ def FromXml(self, Item, Key):
+ self.PcdItemType = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdItemType')
+ self.PcdUsage = XmlAttribute(XmlNode(Item, '%s' % Key), 'PcdUsage')
+ self.TokenSpaceGuidCName = XmlElement(Item, '%s/TokenSpaceGuidCName' % Key)
+ self.TokenSpaceGuidValue = XmlElement(Item, '%s/TokenSpaceGuidValue' % Key)
+ self.Token = XmlElement(Item, '%s/Token' % Key)
+ self.CName = XmlElement(Item, '%s/CName' % Key)
+ self.PcdCName = XmlElement(Item, '%s/PcdCName' % Key)
+ self.DatumType = XmlElement(Item, '%s/DatumType' % Key)
+ self.ValidUsage = XmlElement(Item, '%s/ValidUsage' % Key)
+ self.DefaultValue = XmlElement(Item, '%s/DefaultValue' % Key)
+ self.MaxDatumSize = XmlElement(Item, '%s/MaxDatumSize' % Key)
+ self.Value = XmlElement(Item, '%s/Value' % Key)
+ self.Offset = XmlElement(Item, '%s/Offset' % Key)
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+ for PcdErrorItem in XmlList(Item, '%s/PcdError' % Key):
+ PcdErrorObj = PcdErrorXml()
+ PcdErrorObj.FromXml(PcdErrorItem, 'PcdError')
+ self.PcdError.append(PcdErrorObj)
+
+ PcdEntry = PcdClass()
+ PcdEntry.SupArchList = self.CommonDefines.SupArchList
+ PcdEntry.SupModuleList = self.CommonDefines.SupModList
+ PcdEntry.TokenSpaceGuidCName = self.TokenSpaceGuidCName
+ PcdEntry.TokenSpaceGuidValue = self.TokenSpaceGuidValue
+ PcdEntry.Token = self.Token
+ PcdEntry.CName = self.CName
+ PcdEntry.PcdCName = self.PcdCName
+ PcdEntry.DatumType = self.DatumType
+ PcdEntry.ValidUsage = self.ValidUsage
+ PcdEntry.PcdUsage = self.PcdUsage
+ PcdEntry.Usage = self.CommonDefines.Usage
+ PcdEntry.DefaultValue = self.DefaultValue
+ PcdEntry.Value = self.Value
+ PcdEntry.Offset = self.Offset
+ PcdEntry.MaxDatumSize = self.MaxDatumSize
+ PcdEntry.FeatureFlag = self.CommonDefines.FeatureFlag
+ PcdEntry.PcdItemType = self.PcdItemType
+ PcdEntry.HelpTextList = GetHelpTextList(self.HelpText)
+ PcdEntry.PcdErrors = self.PcdError
+
+ return PcdEntry
+
+ def ToXml(self, PcdEntry, Key):
+ AttributeList = [['SupArchList', GetStringOfList(PcdEntry.SupArchList)],
+ ['PcdUsage', PcdEntry.PcdUsage],
+ ['PcdItemType', PcdEntry.PcdItemType],
+ ['FeatureFlag', PcdEntry.FeatureFlag],
+ ['SupModList', GetStringOfList(PcdEntry.SupModuleList)]
+ ]
+ NodeList = [['TokenSpaceGuidCName', PcdEntry.TokenSpaceGuidCName],
+ ['TokenSpaceGuidValue', PcdEntry.TokenSpaceGuidValue],
+ ['Token', PcdEntry.Token],
+ ['CName', PcdEntry.CName],
+ ['PcdCName', PcdEntry.PcdCName],
+ ['DatumType', PcdEntry.DatumType],
+ ['ValidUsage', GetStringOfList(PcdEntry.ValidUsage)],
+ ['DefaultValue', PcdEntry.DefaultValue],
+ ['Value', PcdEntry.Value],
+ ['Offset', PcdEntry.Offset],
+ ['MaxDatumSize', PcdEntry.MaxDatumSize],
+ ]
+ for Item in PcdEntry.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ for Item in PcdEntry.PcdErrors:
+ Tmp = PcdErrorXml()
+ NodeList.append(Tmp.ToXml(Item, 'PcdError'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "PcdItemType = %s PcdUsage = %s TokenSpaceGuidCName = %s TokenSpaceGuidValue = %s Token = %s CName = %s PcdCName = %s DatumType = %s ValidUsage = %s DefaultValue = %s MaxDatumSize = %s Value = %s Offset = %s %s" \
+ % (self.PcdItemType, self.PcdUsage, self.TokenSpaceGuidCName, self.TokenSpaceGuidValue, self.Token, self.CName, self.PcdCName, self.DatumType, self.ValidUsage, self.DefaultValue, self.MaxDatumSize, self.Value, self.Offset, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + "\n\t" + str(Item)
+ for Item in self.PcdError:
+ Str = Str + "\n\tPcdError:" + str(Item)
+ return Str
+
+# PcdCheckXml
+class PcdCheckXml(object):
+ def __init__(self):
+ self.PcdCheck = ''
+
+ def FromXml(self, Item, Key):
+ self.PcdCheck = XmlElement(Item, 'PcdCheck')
+
+ return self.PcdCheck
+
+ def ToXml(self, PcdCheck, Key):
+ Root = CreateXmlElement('%s' % Key, PcdCheck, [], [])
+ return Root
+
+ def __str__(self):
+ return "PcdCheck = %s" % (self.PcdCheck)
+
+# MiscellaneousFileXml
+class MiscellaneousFileXml(object):
+ def __init__(self):
+ self.Header = HeaderXml()
+ self.Files = []
+
+ def FromXml(self, Item, Key):
+ self.Header.FromXml(Item, Key)
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ self.Files.append([Filename, Executable])
+
+ MiscFile = MiscFileClass()
+ MiscFile.Copyright = self.Header.Copyright
+ MiscFile.License = self.Header.License
+ MiscFile.Abstract = self.Header.Abstract
+ MiscFile.Description = self.Header.Description
+ for File in self.Files:
+ FileObj = FileClass()
+ FileObj.Filename = File[0]
+ FileObj.Executable = File[1]
+ MiscFile.Files.append(FileObj)
+
+ return MiscFile
+
+ def FromXml2(self, Item, Key):
+ NewItem = XmlNode(Item, '%s/Header' % Key)
+ self.Header.FromXml(NewItem, 'Header')
+
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ Filename = XmlElement(SubItem, '%s/Filename' % Key)
+ Executable = XmlAttribute(XmlNode(SubItem, '%s/Filename' % Key), 'Executable')
+ self.Files.append([Filename, Executable])
+
+ MiscFile = MiscFileClass()
+ MiscFile.Name = self.Header.Name
+ MiscFile.Copyright = self.Header.Copyright
+ MiscFile.License = self.Header.License
+ MiscFile.Abstract = self.Header.Abstract
+ MiscFile.Description = self.Header.Description
+ for File in self.Files:
+ FileObj = FileClass()
+ FileObj.Filename = File[0]
+ FileObj.Executable = File[1]
+ MiscFile.Files.append(FileObj)
+
+ return MiscFile
+
+
+ def ToXml(self, MiscFile, Key):
+ if MiscFile:
+ NodeList = [['Copyright', MiscFile.Copyright],
+ ['License', MiscFile.License],
+ ['Abstract', MiscFile.Abstract],
+ ['Description', MiscFile.Description],
+ ]
+ if MiscFile != None:
+ for File in MiscFile.Files:
+ NodeList.append(CreateXmlElement('Filename', File.Filename, [], [['Executable', File.Executable]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def ToXml2(self, MiscFile, Key):
+ if MiscFile:
+ NodeList = [['Name', MiscFile.Name],
+ ['Copyright', MiscFile.Copyright],
+ ['License', MiscFile.License],
+ ['Abstract', MiscFile.Abstract],
+ ['Description', MiscFile.Description],
+ ]
+ HeaderNode = CreateXmlElement('Header', '', NodeList, [])
+ NodeList = [HeaderNode]
+
+ for File in MiscFile.Files:
+ NodeList.append(CreateXmlElement('Filename', File.Filename, [], [['Executable', File.Executable]]))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = str(self.Header)
+ for Item in self.Files:
+ Str = Str + '\n\tFilename:' + str(Item)
+ return Str
+
+# UserExtensionsXml
+class UserExtensionsXml(object):
+ def __init__(self):
+ self.UserId = ''
+ self.Identifier = ''
+ self.Defines = []
+ self.BuildOptions = []
+
+ def FromXml(self, Item, Key):
+ self.UserId = XmlAttribute(XmlNode(Item, '%s' % Key), 'UserId')
+ self.Identifier = XmlAttribute(XmlNode(Item, '%s' % Key), 'Identifier')
+ for SubItem in XmlList(Item, '%s/Define' % Key):
+ self.Defines.append(XmlElement(SubItem, '%s/Define' % Key))
+ for SubItem in XmlList(Item, '%s/BuildOption' % Key):
+ self.BuildOptions.append(XmlElement(SubItem, '%s/BuildOption' % Key))
+
+ UserExtension = UserExtensionsClass()
+ UserExtension.UserID = self.UserId
+ UserExtension.Identifier = self.Identifier
+ UserExtension.Defines = self.Defines
+ UserExtension.BuildOptions = self.BuildOptions
+
+ return UserExtension
+
+ def ToXml(self, UserExtension, Key):
+ AttributeList = [['UserId', str(UserExtension.UserID)],
+ ['Identifier', str(UserExtension.Identifier)]
+ ]
+ NodeList = []
+ for Item in UserExtension.Defines:
+ NodeList.append(['Define', Item])
+ for Item in UserExtension.BuildOptions:
+ NodeList.append(['BuildOption', Item])
+ Root = CreateXmlElement('%s' % Key, UserExtension.Content, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "UserId = %s Identifier = %s" % (self.UserId, self.Identifier)
+ Str = Str + '\n\tDefines:' + str(self.Defines)
+ Str = Str + '\n\tBuildOptions:' + str(self.BuildOptions)
+ return Str
+
+# BootModeXml
+class BootModeXml(object):
+ def __init__(self):
+ self.SupportedBootModes = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.SupportedBootModes = XmlElement(Item, '%s/SupportedBootModes' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ BootMode = ModuleBootModeClass()
+ BootMode.Name = self.SupportedBootModes
+ BootMode.SupArchList = self.CommonDefines.SupArchList
+ BootMode.Usage = self.CommonDefines.Usage
+ BootMode.FeatureFlag = self.CommonDefines.FeatureFlag
+ BootMode.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return BootMode
+
+ def ToXml(self, BootMode, Key):
+ AttributeList = [['Usage', BootMode.Usage],
+ ['SupArchList', GetStringOfList(BootMode.SupArchList)],
+ ['FeatureFlag', BootMode.FeatureFlag],
+ ]
+ NodeList = [['SupportedBootModes', BootMode.Name]]
+ for Item in BootMode.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "SupportedBootModes = %s %s" % (self.SupportedBootModes, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# EventXml
+class EventXml(object):
+ def __init__(self):
+ self.EventType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.EventType = XmlAttribute(XmlNode(Item, '%s' % Key), 'EventType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Event = ModuleEventClass()
+ Event.Type = self.EventType
+ Event.GuidCName = self.Name
+ Event.SupArchList = self.CommonDefines.SupArchList
+ Event.Usage = self.CommonDefines.Usage
+ Event.FeatureFlag = self.CommonDefines.FeatureFlag
+ Event.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Event
+
+ def ToXml(self, Event, Key):
+ AttributeList = [['EventType', Event.Type],
+ ['Usage', Event.Usage],
+ ['SupArchList', GetStringOfList(Event.SupArchList)],
+ ['FeatureFlag', Event.FeatureFlag],
+ ]
+ NodeList = []
+ for Item in Event.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, Event.GuidCName, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "EventType = %s %s" % (self.EventType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# HobXml
+class HobXml(object):
+ def __init__(self):
+ self.HobType = ''
+ self.Name = ''
+ self.CommonDefines = CommonDefinesXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.HobType = XmlAttribute(XmlNode(Item, '%s' % Key), 'HobType')
+ self.Name = XmlElement(Item, '%s' % Key)
+ self.CommonDefines.FromXml(Item, Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Hob = ModuleHobClass()
+ Hob.Type = self.HobType
+ Hob.GuidCName = self.Name
+ Hob.SupArchList = self.CommonDefines.SupArchList
+ Hob.Usage = self.CommonDefines.Usage
+ Hob.FeatureFlag = self.CommonDefines.FeatureFlag
+ Hob.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Hob
+
+ def ToXml(self, Hob, Key):
+ AttributeList = [['EventType', Hob.Type],
+ ['Usage', Hob.Usage],
+ ['SupArchList', GetStringOfList(Hob.SupArchList)],
+ ['FeatureFlag', Hob.FeatureFlag],
+ ]
+ NodeList = []
+ for Item in Hob.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+ Root = CreateXmlElement('%s' % Key, Hob.GuidCName, NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "HobType = %s %s" % (self.HobType, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# ModulePropertyXml
+class ModulePropertyXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.ModuleType = ''
+ self.Path = ''
+ self.PcdIsDriver = ''
+ self.UefiSpecificationVersion = ''
+ self.PiSpecificationVersion = ''
+ self.Specification = ''
+ self.SpecificationVersion = ''
+ self.BootModes = []
+ self.Events = []
+ self.HOBs = []
+
+ def FromXml(self, Item, Key, Header = None):
+ self.CommonDefines.FromXml(Item, Key)
+ self.ModuleType = XmlElement(Item, '%s/ModuleType' % Key)
+ self.Path = XmlElement(Item, '%s/Path' % Key)
+ self.PcdIsDriver = XmlElement(Item, '%s/PcdIsDriver' % Key)
+ self.UefiSpecificationVersion = XmlElement(Item, '%s/UefiSpecificationVersion' % Key)
+ self.PiSpecificationVersion = XmlElement(Item, '%s/PiSpecificationVersion' % Key)
+ self.Specification = XmlElement(Item, '%s/Specification' % Key)
+ self.SpecificationVersion = XmlAttribute(XmlNode(Item, '%s/Specification' % Key), 'Version')
+ for SubItem in XmlList(Item, '%s/BootMode' % Key):
+ A = BootModeXml()
+ BootMode = A.FromXml(SubItem, 'BootMode')
+ self.BootModes.append(BootMode)
+ for SubItem in XmlList(Item, '%s/Event' % Key):
+ A = EventXml()
+ Event = A.FromXml(SubItem, 'Event')
+ self.Events.append(Event)
+ for SubItem in XmlList(Item, '%s/HOB' % Key):
+ A = HobXml()
+ Hob = A.FromXml(SubItem, 'HOB')
+ self.HOBs.append(Hob)
+
+ if Header == None:
+ Header = ModuleHeaderClass()
+
+ Header.ModuleType = self.ModuleType
+ Header.SupArchList = self.CommonDefines.SupArchList
+ Header.SupModuleList = self.CommonDefines.SupModList
+ Header.CombinePath = self.Path
+ Header.PcdIsDriver = self.PcdIsDriver
+ Header.UefiSpecificationVersion = self.UefiSpecificationVersion
+ Header.PiSpecificationVersion = self.PiSpecificationVersion
+
+ return Header, self.BootModes, self.Events, self.HOBs
+
+
+ def ToXml(self, Header, BootModes, Events, Hobs, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Header.SupArchList)],
+ ['SupModList', GetStringOfList(Header.SupModuleList)],
+ ]
+ NodeList = [['ModuleType', Header.ModuleType],
+ ['Path', Header.CombinePath],
+ ['PcdIsDriver', Header.PcdIsDriver],
+ ['UefiSpecificationVersion', Header.UefiSpecificationVersion],
+ ['PiSpecificationVersion', Header.PiSpecificationVersion],
+ ]
+ for Item in BootModes:
+ Tmp = BootModeXml()
+ NodeList.append(Tmp.ToXml(Item, 'BootMode'))
+ for Item in Events:
+ Tmp = EventXml()
+ NodeList.append(Tmp.ToXml(Item, 'Event'))
+ for Item in Hobs:
+ Tmp = HobXml()
+ NodeList.append(Tmp.ToXml(Item, 'Hob'))
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "ModuleType = %s Path = %s PcdIsDriver = %s UefiSpecificationVersion = %s PiSpecificationVersion = %s Specification = %s SpecificationVersion = %s %s" \
+ % (self.ModuleType, self.Path, self.PcdIsDriver, self.UefiSpecificationVersion, self.PiSpecificationVersion, \
+ self.Specification, self.SpecificationVersion, self.CommonDefines)
+ for Item in self.BootModes:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.Events:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.HOBs:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# SourceFileXml
+class SourceFileXml(object):
+ def __init__(self):
+ self.SourceFile = ''
+ self.ToolChainFamily = ''
+ self.FileType = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.ToolChainFamily = XmlAttribute(Item, 'Family')
+ self.FileType = XmlAttribute(Item, 'FileType')
+ self.SourceFile = XmlElement(Item, 'Filename')
+ self.CommonDefines.FromXml(Item, Key)
+
+ SourceFile = ModuleSourceFileClass()
+ SourceFile.SourceFile = self.SourceFile
+ SourceFile.FileType = self.FileType
+ SourceFile.ToolChainFamily = self.ToolChainFamily
+ SourceFile.SupArchList = self.CommonDefines.SupArchList
+ SourceFile.FeatureFlag = self.CommonDefines.FeatureFlag
+
+ return SourceFile
+
+ def ToXml(self, SourceFile, Key):
+ AttributeList = [['SupArchList', GetStringOfList(SourceFile.SupArchList)],
+ ['Family', SourceFile.ToolChainFamily],
+ ['FileType', SourceFile.FileType],
+ ['FeatureFlag', SourceFile.FeatureFlag],
+ ]
+ Root = CreateXmlElement('%s' % Key, SourceFile.SourceFile, [], AttributeList)
+
+ return Root
+
+# FilenameXml
+class FilenameXml(object):
+ def __init__(self):
+ self.OS = ''
+ self.Family = ''
+ self.FileType = ''
+ self.Filename = ''
+ self.Executable = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.OS = XmlAttribute(Item, 'OS')
+ self.Family = XmlAttribute(Item, 'Family')
+ self.FileType = XmlAttribute(Item, 'FileType')
+ self.Filename = XmlElement(Item, 'Filename')
+ self.Executable = XmlElement(Item, 'Executable')
+ self.CommonDefines.FromXml(Item, Key)
+
+ Filename = FileClass()
+ Filename.Family = self.Family
+ Filename.FileType = self.FileType
+ Filename.Filename = self.Filename
+ Filename.Executable = self.Executable
+ Filename.SupArchList = self.CommonDefines.SupArchList
+ Filename.FeatureFlag = self.CommonDefines.FeatureFlag
+
+ return Filename
+
+ def ToXml(self, Filename, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Filename.SupArchList)],
+ ['Family', Filename.Family],
+ ['FileType', Filename.FileType],
+ ['Executable', Filename.Executable],
+ ['FeatureFlag', Filename.FeatureFlag],
+ ]
+ NodeList = [['Filename', Filename.Filename],
+ ]
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ return "OS = %s Family = %s FileType = %s Filename = %s Executable = %s %s" \
+ % (self.OS, self.Family, self.FileType, self.Filename, self.Executable, self.CommonDefines)
+
+class BinaryFileXml(object):
+ def __init__(self):
+ self.Filenames = []
+ self.PatchPcdValues = []
+ self.PcdExValues = []
+ self.LibraryInstances = []
+ self.BuildFlags = []
+
+ def FromXml(self, Item, Key):
+ BinaryFile = ModuleBinaryFileClass()
+ for SubItem in XmlList(Item, '%s/Filename' % Key):
+ A = FilenameXml()
+ B = A.FromXml(SubItem, 'Filename')
+ BinaryFile.Filenames.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/PatchPcdValue' % Key):
+ A = PcdEntryXml()
+ B = A.FromXml(SubItem, 'PatchPcdValue')
+ BinaryFile.PatchPcdValues.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/PcdExValue' % Key):
+ A = PcdEntryXml()
+ B = A.FromXml(SubItem, 'PcdExValue')
+ BinaryFile.PatchPcdValues.append(B)
+ for SubItem in XmlList(Item, '%s/AsBuilt/LibraryInstances/GUID' % Key):
+ GUID = XmlElement(SubItem, 'GUID')
+ Version = XmlAttribute(XmlNode(SubItem, 'GUID'), 'Version')
+ BinaryFile.LibraryInstances.append([GUID, Version])
+ for SubItem in XmlList(Item, '%s/AsBuilt/BuildFlags' % Key):
+ BinaryFile.BuildFlags.append(XmlElement(SubItem, 'BuildFlags'))
+
+ return BinaryFile
+
+ def ToXml(self, BinaryFile, Key):
+ NodeList = []
+ for Item in BinaryFile.Filenames:
+ Tmp = FilenameXml()
+ NodeList.append(Tmp.ToXml(Item, 'Filename'))
+ AsBuiltNodeList = []
+ for Item in BinaryFile.PatchPcdValues:
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml(Item, 'PatchPcdValue'))
+ for Item in BinaryFile.PcdExValues:
+ Tmp = PcdEntryXml()
+ AsBuiltNodeList.append(Tmp.ToXml(Item, 'PcdExValue'))
+ LibNodeList = []
+ for Item in BinaryFile.LibraryInstances:
+ LibNode = CreateXmlElement('GUID', Item[0], [], [['Version', Item[1]]])
+ LibNodeList.append(LibNode)
+ if LibNodeList:
+ AsBuiltNodeList.append(CreateXmlElement('LibraryInstances', '', LibNodeList, []))
+ for Item in BinaryFile.BuildFlags:
+ AsBuiltNodeList.append(CreateXmlElement('BuildFlags', Item, [], []))
+ Element = CreateXmlElement('AsBuilt', '', AsBuiltNodeList, [])
+ NodeList.append(Element)
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, [])
+
+ return Root
+
+ def __str__(self):
+ Str = "BinaryFiles:"
+ for Item in self.Filenames:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PatchPcdValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.PcdExValues:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.LibraryInstances:
+ Str = Str + '\n\t' + str(Item)
+ for Item in self.BuildFlags:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# PackageXml
+class PackageXml(object):
+ def __init__(self):
+ self.Description = ''
+ self.Guid = ''
+ self.Version = ''
+ self.CommonDefines = CommonDefinesXml()
+
+ def FromXml(self, Item, Key):
+ self.Description = XmlElement(Item, '%s/Description' % Key)
+ self.Guid = XmlElement(Item, '%s/GUID' % Key)
+ self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
+ self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
+
+ PackageDependency = ModulePackageDependencyClass()
+ PackageDependency.FilePath = self.Description
+ PackageDependency.PackageGuid = self.Guid
+ PackageDependency.PackageVersion = self.Version
+ PackageDependency.FeatureFlag = self.CommonDefines.FeatureFlag
+ PackageDependency.SupArchList = self.CommonDefines.SupArchList
+
+ return PackageDependency
+
+ def ToXml(self, PackageDependency, Key):
+ AttributeList = [['SupArchList', GetStringOfList(PackageDependency.SupArchList)],
+ ['FeatureFlag', PackageDependency.FeatureFlag],
+ ]
+ Element1 = CreateXmlElement('GUID', PackageDependency.PackageGuid, [], [['Version', PackageDependency.PackageVersion]])
+ NodeList = [['Description', PackageDependency.FilePath],
+ Element1,
+ ]
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Description = %s Guid = %s Version = %s %s" \
+ % (self.Description, self.Guid, self.Version, self.CommonDefines)
+ return Str
+
+# ExternXml
+class ExternXml(object):
+ def __init__(self):
+ self.CommonDefines = CommonDefinesXml()
+ self.EntryPoint = ''
+ self.UnloadImage = ''
+ self.Constructor = ''
+ self.Destructor = ''
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.CommonDefines.FromXml(Item, Key)
+ self.EntryPoint = XmlElement(Item, '%s/EntryPoint' % Key)
+ self.UnloadImage = XmlElement(Item, '%s/UnloadImage' % Key)
+ self.Constructor = XmlElement(Item, '%s/Constructor' % Key)
+ self.Destructor = XmlElement(Item, '%s/Destructor' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Extern = ModuleExternClass()
+ Extern.EntryPoint = self.EntryPoint
+ Extern.UnloadImage = self.UnloadImage
+ Extern.Constructor = self.Constructor
+ Extern.Destructor = self.Destructor
+ Extern.SupArchList = self.CommonDefines.SupArchList
+ Extern.FeatureFlag = self.CommonDefines.FeatureFlag
+ Extern.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Extern
+
+ def ToXml(self, Extern, Key):
+ AttributeList = [['SupArchList', GetStringOfList(Extern.SupArchList)],
+ ['FeatureFlag', Extern.FeatureFlag],
+ ]
+ NodeList = [['EntryPoint', Extern.EntryPoint],
+ ['UnloadImage', Extern.UnloadImage],
+ ['Constructor', Extern.Constructor],
+ ['Destructor', Extern.Destructor],
+ ]
+ for Item in Extern.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "EntryPoint = %s UnloadImage = %s Constructor = %s Destructor = %s %s" \
+ % (self.EntryPoint, self.UnloadImage, self.Constructor, self.Destructor, self.CommonDefines)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+# DepexXml
+class DepexXml(object):
+ def __init__(self):
+ self.Expression = ''
+ #self.HelpText = HelpTextXml()
+ self.HelpText = []
+
+ def FromXml(self, Item, Key):
+ self.Expression = XmlElement(Item, '%s/Expression' % Key)
+ for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
+ HelpTextObj = HelpTextXml()
+ HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
+ self.HelpText.append(HelpTextObj)
+
+ Depex = ModuleDepexClass()
+ Depex.Depex = self.Expression
+ Depex.HelpTextList = GetHelpTextList(self.HelpText)
+
+ return Depex
+
+ def ToXml(self, Depex, Key):
+ AttributeList = []
+ NodeList = [['Expression', Depex.Depex],
+ ]
+ for Item in Depex.HelpTextList:
+ Tmp = HelpTextXml()
+ NodeList.append(Tmp.ToXml(Item, 'HelpText'))
+
+ Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
+
+ return Root
+
+ def __str__(self):
+ Str = "Expression = %s" % (self.Expression)
+ for Item in self.HelpText:
+ Str = Str + '\n\t' + str(Item)
+ return Str
+
+# PackageSurfaceAreaXml
+class PackageSurfaceAreaXml(object):
+ def __init__(self):
+ self.Package = None
+
+ def FromXml(self, Item, Key):
+ # Create a package object
+ Package = PackageClass()
+
+ # Header
+ Tmp = PackageHeaderXml()
+ PackageHeader = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/Header'), 'Header')
+ Package.PackageHeader = PackageHeader
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ if ClonedFrom:
+ Package.PackageHeader.ClonedFrom.append(ClonedFrom)
+
+ # LibraryClass
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/LibraryClassDeclarations/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Package.LibraryClassDeclarations.append(LibraryClass)
+
+ # IndustryStandardHeader
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/IndustryStandardIncludes/IndustryStandardHeader'):
+ Tmp = IndustryStandardHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'IndustryStandardHeader')
+ Package.IndustryStdHeaders.append(Include)
+
+ # PackageHeader
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PackageIncludes/PackageHeader'):
+ Tmp = PackageIncludeHeaderXml()
+ Include = Tmp.FromXml(SubItem, 'PackageHeader')
+ Package.PackageIncludePkgHeaders.append(Include)
+
+ # Guid
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/GuidDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.GuidDeclarations.append(GuidProtocolPpi)
+
+ # Protocol
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/ProtocolDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.ProtocolDeclarations.append(GuidProtocolPpi)
+
+ # Ppi
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PpiDeclarations/Entry'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Entry')
+ Package.PpiDeclarations.append(GuidProtocolPpi)
+
+ # PcdEntry
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdDeclarations/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml(SubItem, 'PcdEntry')
+ Package.PcdDeclarations.append(PcdEntry)
+
+ # PcdCheck
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/PcdRelationshipChecks/PcdCheck'):
+ Tmp = PcdCheckXml()
+ PcdCheck = Tmp.FromXml(SubItem, 'PcdCheck')
+ Package.PcdChecks.append(PcdCheck)
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ Package.MiscFiles = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ Package.UserExtensions = Tmp.FromXml(XmlNode(Item, '/PackageSurfaceArea/UserExtensions'), 'UserExtensions')
+
+ # Modules
+ for SubItem in XmlList(Item, '/PackageSurfaceArea/Modules/ModuleSurfaceArea'):
+ Tmp = ModuleSurfaceAreaXml()
+ Module = Tmp.FromXml(SubItem, 'ModuleSurfaceArea')
+ Package.Modules[(Module.ModuleHeader.Guid, Module.ModuleHeader.Version, Module.ModuleHeader.CombinePath)] = Module
+
+ self.Package = Package
+ return self.Package
+
+ def ToXml(self, Package):
+ # Create PackageSurfaceArea node
+ DomPackage = minidom.Document().createElement('PackageSurfaceArea')
+
+ # Header
+ Tmp = PackageHeaderXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.PackageHeader, 'Header'))
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ if Package.PackageHeader.ClonedFrom != []:
+ DomPackage.appendChild(Tmp.ToXml(Package.PackageHeader.ClonedFrom[0], 'ClonedFrom'))
+
+ # LibraryClass
+ LibraryClassNode = CreateXmlElement('LibraryClassDeclarations', '', [], [])
+ for LibraryClass in Package.LibraryClassDeclarations:
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
+ DomPackage.appendChild(LibraryClassNode)
+
+ # IndustryStandardHeader
+ IndustryStandardHeaderNode = CreateXmlElement('IndustryStandardIncludes', '', [], [])
+ for Include in Package.IndustryStdHeaders:
+ Tmp = IndustryStandardHeaderXml()
+ IndustryStandardHeaderNode.appendChild(Tmp.ToXml(Include, 'IndustryStandardHeader'))
+ DomPackage.appendChild(IndustryStandardHeaderNode)
+
+ # PackageHeader
+ PackageIncludeHeaderNode = CreateXmlElement('PackageIncludes', '', [], [])
+ for Include in Package.PackageIncludePkgHeaders:
+ Tmp = PackageIncludeHeaderXml()
+ PackageIncludeHeaderNode.appendChild(Tmp.ToXml(Include, 'PackageHeader'))
+ DomPackage.appendChild(PackageIncludeHeaderNode)
+
+ # Guid
+ GuidProtocolPpiNode = CreateXmlElement('GuidDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.GuidDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # Protocol
+ GuidProtocolPpiNode = CreateXmlElement('ProtocolDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.ProtocolDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # Ppi
+ GuidProtocolPpiNode = CreateXmlElement('PpiDeclarations', '', [], [])
+ for GuidProtocolPpi in Package.PpiDeclarations:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Entry'))
+ DomPackage.appendChild(GuidProtocolPpiNode)
+
+ # PcdEntry
+ PcdEntryNode = CreateXmlElement('PcdDeclarations', '', [], [])
+ for PcdEntry in Package.PcdDeclarations:
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml(PcdEntry, 'PcdEntry'))
+ DomPackage.appendChild(PcdEntryNode)
+
+ # PcdCheck
+ PcdCheckNode = CreateXmlElement('PcdRelationshipChecks', '', [], [])
+ for PcdCheck in Package.PcdChecks:
+ Tmp = PcdCheckXml()
+ PcdCheckNode.appendChild(Tmp.ToXml(PcdCheck, 'PcdCheck'))
+ DomPackage.appendChild(PcdCheckNode)
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.MiscFiles, 'MiscellaneousFiles'))
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ DomPackage.appendChild(Tmp.ToXml(Package.UserExtensions, 'UserExtensions'))
+
+ # Modules
+ ModuleNode = CreateXmlElement('Modules', '', [], [])
+ for Module in Package.Modules.values():
+ Tmp = ModuleSurfaceAreaXml()
+ ModuleNode.appendChild(Tmp.ToXml(Module))
+ DomPackage.appendChild(ModuleNode)
+
+ return DomPackage
+
+# ModuleXml
+class ModuleSurfaceAreaXml(object):
+ def __init__(self):
+ self.Module = None
+
+ def FromXml(self, Item, Key):
+ # Create a package object
+ Module = ModuleClass()
+
+ # Header
+ Tmp = HeaderXml()
+ ModuleHeader = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/Header'), 'Header')
+ Module.ModuleHeader = ModuleHeader
+
+ # ModuleProperties
+ Tmp = ModulePropertyXml()
+ (Header, BootModes, Events, HOBs) = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ModuleProperties'), 'ModuleProperties', ModuleHeader)
+ Module.ModuleHeader = Header
+ Module.BootModes = BootModes
+ Module.Events = Events
+ Module.Hobs = HOBs
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ ClonedFrom = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ClonedFrom'), 'ClonedFrom')
+ if ClonedFrom:
+ Module.ModuleHeader.ClonedFrom.append(ClonedFrom)
+
+ # LibraryClass
+ #LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
+ Tmp = LibraryClassXml()
+ LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
+ Module.LibraryClasses.append(LibraryClass)
+
+ # SourceFile
+ #SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename'):
+ Tmp = SourceFileXml()
+ SourceFile = Tmp.FromXml(SubItem, 'Filename')
+ Module.Sources.append(SourceFile)
+
+ # BinaryFile
+ #BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile'):
+ Tmp = BinaryFileXml()
+ BinaryFile = Tmp.FromXml(SubItem, 'BinaryFile')
+ Module.Binaries.append(BinaryFile)
+
+ # PackageDependencies
+ #PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
+ Tmp = PackageXml()
+ PackageDependency = Tmp.FromXml(SubItem, 'Package')
+ Module.PackageDependencies.append(PackageDependency)
+
+ # Guid
+ #GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'GuidCName')
+ Module.Guids.append(GuidProtocolPpi)
+
+ # Protocol
+ #GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Protocol')
+ Module.Protocols.append(GuidProtocolPpi)
+
+ # Ppi
+ #GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpi = Tmp.FromXml(SubItem, 'Ppi')
+ Module.Ppis.append(GuidProtocolPpi)
+
+ # Extern
+ #ExternNode = CreateXmlElement('Externs', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
+ Tmp = ExternXml()
+ Extern = Tmp.FromXml(SubItem, 'Extern')
+ Module.Externs.append(Extern)
+
+ # PcdCoded
+ #PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
+ for SubItem in XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
+ Tmp = PcdEntryXml()
+ PcdEntry = Tmp.FromXml(SubItem, 'PcdEntry')
+ Module.PcdCodes.append(PcdEntry)
+
+ # PeiDepex
+ #DepexNode = CreateXmlElement('PeiDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.PeiDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/PeiDepex'), 'PeiDepex')
+
+ # DxeDepex
+ #DepexNode = CreateXmlElement('DxeDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.DxeDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/DxeDepex'), 'DxeDepex')
+
+ # SmmDepex
+ #DepexNode = CreateXmlElement('SmmDepex', '', [], [])
+ Tmp = DepexXml()
+ Module.SmmDepex = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/DxeDepex'), 'SmmDepex')
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ Module.MiscFiles = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ Module.UserExtensions = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/UserExtensions'), 'UserExtensions')
+
+ # return the module object
+ self.Module = Module
+ return self.Module
+
+ def ToXml(self, Module):
+ # Create root node of module surface area
+ DomModule = minidom.Document().createElement('ModuleSurfaceArea')
+
+ # Header
+ Tmp = HeaderXml()
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader, 'Header'))
+
+ # ModuleProperties
+ Tmp = ModulePropertyXml()
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader, Module.BootModes, Module.Events, Module.Hobs, 'ModuleProperties'))
+
+ # ClonedFrom
+ Tmp = ClonedFromXml()
+ if Module.ModuleHeader.ClonedFrom != []:
+ DomModule.appendChild(Tmp.ToXml(Module.ModuleHeader.ClonedFrom[0], 'ClonedFrom'))
+
+ # LibraryClass
+ LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
+ for LibraryClass in Module.LibraryClasses:
+ Tmp = LibraryClassXml()
+ LibraryClassNode.appendChild(Tmp.ToXml(LibraryClass, 'LibraryClass'))
+ DomModule.appendChild(LibraryClassNode)
+
+ # SourceFile
+ SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
+ for SourceFile in Module.Sources:
+ Tmp = SourceFileXml()
+ SourceFileNode.appendChild(Tmp.ToXml(SourceFile, 'Filename'))
+ DomModule.appendChild(SourceFileNode)
+
+ # BinaryFile
+ BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
+ for BinaryFile in Module.Binaries:
+ Tmp = BinaryFileXml()
+ BinaryFileNode.appendChild(Tmp.ToXml(BinaryFile, 'BinaryFile'))
+ DomModule.appendChild(BinaryFileNode)
+
+ # PackageDependencies
+ PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
+ for PackageDependency in Module.PackageDependencies:
+ Tmp = PackageXml()
+ PackageDependencyNode.appendChild(Tmp.ToXml(PackageDependency, 'Package'))
+ DomModule.appendChild(PackageDependencyNode)
+
+ # Guid
+ GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
+ for GuidProtocolPpi in Module.Guids:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'GuidCName'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Protocol
+ GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
+ for GuidProtocolPpi in Module.Protocols:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Protocol'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Ppi
+ GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
+ for GuidProtocolPpi in Module.Ppis:
+ Tmp = GuidProtocolPpiXml()
+ GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Ppi'))
+ DomModule.appendChild(GuidProtocolPpiNode)
+
+ # Extern
+ ExternNode = CreateXmlElement('Externs', '', [], [])
+ for Extern in Module.Externs:
+ Tmp = ExternXml()
+ ExternNode.appendChild(Tmp.ToXml(Extern, 'Extern'))
+ DomModule.appendChild(ExternNode)
+
+ # PcdCoded
+ PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
+ for PcdEntry in Module.PcdCodes:
+ Tmp = PcdEntryXml()
+ PcdEntryNode.appendChild(Tmp.ToXml(PcdEntry, 'PcdEntry'))
+ DomModule.appendChild(PcdEntryNode)
+
+ # PeiDepex
+ if Module.PeiDepex:
+ DepexNode = CreateXmlElement('PeiDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.PeiDepex, 'PeiDepex'))
+
+ # DxeDepex
+ if Module.DxeDepex:
+ DepexNode = CreateXmlElement('DxeDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.DxeDepex, 'DxeDepex'))
+
+ # SmmDepex
+ if Module.SmmDepex:
+ DepexNode = CreateXmlElement('SmmDepex', '', [], [])
+ Tmp = DepexXml()
+ DomModule.appendChild(Tmp.ToXml(Module.SmmDepex, 'SmmDepex'))
+
+ # MiscellaneousFile
+ Tmp = MiscellaneousFileXml()
+ DomModule.appendChild(Tmp.ToXml(Module.MiscFiles, 'MiscellaneousFiles'))
+
+ # UserExtensions
+ Tmp = UserExtensionsXml()
+ DomModule.appendChild(Tmp.ToXml(Module.UserExtensions, 'UserExtensions'))
+
+ return DomModule
+
+# DistributionPackageXml
+class DistributionPackageXml(object):
+ def __init__(self):
+ self.Dp = DistributionPackageClass()
+
+ def FromXml(self, Filename = None):
+ if Filename != None:
+ self.Dp = DistributionPackageClass()
+
+ # Load to XML
+ self.Pkg = XmlParseFile(Filename)
+
+ # Parse Header information
+ Tmp = DistributionPackageHeaderXml()
+ DistributionPackageHeader = Tmp.FromXml(XmlNode(self.Pkg, '/DistributionPackage/DistributionHeader'), 'DistributionHeader')
+ self.Dp.Header = DistributionPackageHeader
+
+ # Parse each PackageSurfaceArea
+ for Item in XmlList(self.Pkg, '/DistributionPackage/PackageSurfaceArea'):
+ Psa = PackageSurfaceAreaXml()
+ Package = Psa.FromXml(Item, 'PackageSurfaceArea')
+ self.Dp.PackageSurfaceArea[(Package.PackageHeader.Guid, Package.PackageHeader.Version, Package.PackageHeader.CombinePath)] = Package
+
+ # Parse each ModuleSurfaceArea
+ for Item in XmlList(self.Pkg, '/DistributionPackage/ModuleSurfaceArea'):
+ Msa = ModuleSurfaceAreaXml()
+ Module = Msa.FromXml(Item, 'ModuleSurfaceArea')
+ self.Dp.ModuleSurfaceArea[(Module.ModuleHeader.Guid, Module.ModuleHeader.Version, Module.ModuleHeader.CombinePath)] = Module
+
+ # Parse Tools
+ Tmp = MiscellaneousFileXml()
+ self.Dp.Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
+
+ # Parse MiscFiles
+ Tmp = MiscellaneousFileXml()
+ self.Dp.MiscellaneousFiles = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/MiscellaneousFiles'), 'MiscellaneousFiles')
+
+ return self.Dp
+
+ def ToXml(self, Dp):
+ if Dp != None:
+ # Parse DistributionPackageHeader
+ Attrs = [['xmlns', 'http://www.uefi.org/2008/2.1'],
+ ['xmlns:xsi', 'http:/www.w3.org/2001/XMLSchema-instance'],
+ ]
+ Root = CreateXmlElement('DistributionPackage', '', [], Attrs)
+
+ Tmp = DistributionPackageHeaderXml()
+ Root.appendChild(Tmp.ToXml(Dp.Header, 'DistributionHeader'))
+
+ # Parse each PackageSurfaceArea
+ for Package in Dp.PackageSurfaceArea.values():
+ Psa = PackageSurfaceAreaXml()
+ DomPackage = Psa.ToXml(Package)
+ Root.appendChild(DomPackage)
+
+ # Parse each ModuleSurfaceArea
+ for Module in Dp.ModuleSurfaceArea.values():
+ Msa = ModuleSurfaceAreaXml()
+ DomModule = Msa.ToXml(Module)
+ Root.appendChild(DomModule)
+
+ # Parse Tools
+ Tmp = MiscellaneousFileXml()
+ #Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/Tools'), 'Tools')
+ Root.appendChild(Tmp.ToXml2(Dp.Tools, 'Tools'))
+
+ # Parse MiscFiles
+ Tmp = MiscellaneousFileXml()
+ #Tools = Tmp.FromXml2(XmlNode(self.Pkg, '/DistributionPackage/MiscellaneousFiles'), 'MiscellaneousFiles')
+ Root.appendChild(Tmp.ToXml2(Dp.MiscellaneousFiles, 'MiscellaneousFiles'))
+
+ return Root.toprettyxml(indent = ' ')
+
+ return ''
+
+if __name__ == '__main__':
+ M = DistributionPackageXml()
+ M.FromXml('C:\Test.xml')
+ print M.ToXml(M.Dp)
+ \ No newline at end of file
diff --git a/BaseTools/Source/Python/Common/XmlRoutines.py b/BaseTools/Source/Python/Common/XmlRoutines.py
new file mode 100644
index 0000000000..e5fedae83d
--- /dev/null
+++ b/BaseTools/Source/Python/Common/XmlRoutines.py
@@ -0,0 +1,228 @@
+## @file
+# This is an XML API that uses a syntax similar to XPath, but it is written in
+# standard python so that no extra python packages are required to use it.
+#
+# Copyright (c) 2007, Intel Corporation
+# All rights reserved. This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+##
+# Import Modules
+#
+import xml.dom.minidom
+
+## Create a element of XML
+#
+# @param Name
+# @param String
+# @param NodeList
+# @param AttributeList
+#
+# @revel Element
+#
+def CreateXmlElement(Name, String, NodeList, AttributeList):
+ Doc = xml.dom.minidom.Document()
+ Element = Doc.createElement(Name)
+ if String != '' and String != None:
+ Element.appendChild(Doc.createTextNode(String))
+
+ for Item in NodeList:
+ if type(Item) == type([]):
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key != None and Value != '' and Value != None:
+ Node = Doc.createElement(Key)
+ Node.appendChild(Doc.createTextNode(Value))
+ Element.appendChild(Node)
+ else:
+ Element.appendChild(Item)
+ for Item in AttributeList:
+ Key = Item[0]
+ Value = Item[1]
+ if Key != '' and Key != None and Value != '' and Value != None:
+ Element.setAttribute(Key, Value)
+
+ return Element
+
+## Get a list of XML nodes using XPath style syntax.
+#
+# Return a list of XML DOM nodes from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Nodes A list of XML nodes matching XPath style Sting.
+#
+def XmlList(Dom, String):
+ if String == None or String == "" or Dom == None or Dom == "":
+ return []
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Nodes = [Dom]
+ Index = 0
+ End = len(TagList) - 1
+ while Index <= End:
+ ChildNodes = []
+ for Node in Nodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes.extend(Node.childNodes)
+ else:
+ ChildNodes.append(Node)
+ Nodes = ChildNodes
+ ChildNodes = []
+ Index += 1
+
+ return Nodes
+
+
+## Get a single XML node using XPath style syntax.
+#
+# Return a single XML DOM node from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM node.
+# @param String A XPath style path.
+#
+# @revel Node A single XML node matching XPath style Sting.
+#
+def XmlNode(Dom, String):
+ if String == None or String == "" or Dom == None or Dom == "":
+ return ""
+ if Dom.nodeType == Dom.DOCUMENT_NODE:
+ Dom = Dom.documentElement
+ if String[0] == "/":
+ String = String[1:]
+ TagList = String.split('/')
+ Index = 0
+ End = len(TagList) - 1
+ ChildNodes = [Dom]
+ while Index <= End:
+ for Node in ChildNodes:
+ if Node.nodeType == Node.ELEMENT_NODE and Node.tagName == TagList[Index]:
+ if Index < End:
+ ChildNodes = Node.childNodes
+ else:
+ return Node
+ break
+ Index += 1
+ return ""
+
+
+## Get a single XML element using XPath style syntax.
+#
+# Return a single XML element from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Strin A XPath style path.
+#
+# @revel Element An XML element matching XPath style Sting.
+#
+def XmlElement(Dom, String):
+ try:
+ return XmlNode(Dom, String).firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a single XML element of the current node.
+#
+# Return a single XML element specified by the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element An XML element in current root Dom.
+#
+def XmlElementData(Dom):
+ try:
+ return Dom.firstChild.data.strip()
+ except:
+ return ""
+
+
+## Get a list of XML elements using XPath style syntax.
+#
+# Return a list of XML elements from the root Dom specified by XPath String.
+# If the input Dom or String is not valid, then an empty list is returned.
+#
+# @param Dom The root XML DOM object.
+# @param String A XPath style path.
+#
+# @revel Elements A list of XML elements matching XPath style Sting.
+#
+def XmlElementList(Dom, String):
+ return map(XmlElementData, XmlList(Dom, String))
+
+
+## Get the XML attribute of the current node.
+#
+# Return a single XML attribute named Attribute from the current root Dom.
+# If the input Dom or Attribute is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+# @param Attribute The name of Attribute.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlAttribute(Dom, Attribute):
+ try:
+ return Dom.getAttribute(Attribute).strip()
+ except:
+ return ''
+
+
+## Get the XML node name of the current node.
+#
+# Return a single XML node name from the current root Dom.
+# If the input Dom is not valid, then an empty string is returned.
+#
+# @param Dom The root XML DOM object.
+#
+# @revel Element A single XML element matching XPath style Sting.
+#
+def XmlNodeName(Dom):
+ try:
+ return Dom.nodeName.strip()
+ except:
+ return ''
+
+## Parse an XML file.
+#
+# Parse the input XML file named FileName and return a XML DOM it stands for.
+# If the input File is not a valid XML file, then an empty string is returned.
+#
+# @param FileName The XML file name.
+#
+# @revel Dom The Dom object achieved from the XML file.
+#
+def XmlParseFile(FileName):
+ try:
+ XmlFile = open(FileName)
+ Dom = xml.dom.minidom.parse(XmlFile)
+ XmlFile.close()
+ return Dom
+ except Exception, X:
+ print X
+ return ""
+
+# This acts like the main() function for the script, unless it is 'import'ed
+# into another script.
+if __name__ == '__main__':
+ # Nothing to do here. Could do some unit tests.
+ A = CreateXmlElement('AAA', 'CCC', [['AAA', '111'], ['BBB', '222']], [['A', '1'], ['B', '2']])
+ B = CreateXmlElement('ZZZ', 'CCC', [['XXX', '111'], ['YYY', '222']], [['A', '1'], ['B', '2']])
+ C = CreateXmlList('DDD', 'EEE', [A, B], ['FFF', 'GGG'])
+ print C.toprettyxml(indent = " ")
+ pass
diff --git a/BaseTools/Source/Python/Common/__init__.py b/BaseTools/Source/Python/Common/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/BaseTools/Source/Python/Common/__init__.py