summaryrefslogtreecommitdiff
path: root/BaseTools
diff options
context:
space:
mode:
authorqhuang8 <qhuang8@6f19259b-4bc3-4df7-8a09-765794883524>2010-09-06 01:58:00 +0000
committerqhuang8 <qhuang8@6f19259b-4bc3-4df7-8a09-765794883524>2010-09-06 01:58:00 +0000
commite56468c072e0d53834787f4ad0e292b33cc6be08 (patch)
tree9b101cd782db879a969041b7bd389ad2b8453b10 /BaseTools
parent034ffda8b2ec8575a9a6f42b1dc9ff6db1621a97 (diff)
downloadedk2-platforms-e56468c072e0d53834787f4ad0e292b33cc6be08.tar.xz
Sync EDKII BaseTools to BaseTools project r2042.
git-svn-id: https://edk2.svn.sourceforge.net/svnroot/edk2/trunk/edk2@10850 6f19259b-4bc3-4df7-8a09-765794883524
Diffstat (limited to 'BaseTools')
-rw-r--r--BaseTools/Bin/Win32/BPDG.exebin0 -> 577452 bytes
-rwxr-xr-xBaseTools/Bin/Win32/BootSectImage.exebin393216 -> 393216 bytes
-rwxr-xr-xBaseTools/Bin/Win32/EfiLdrImage.exebin421888 -> 421888 bytes
-rwxr-xr-xBaseTools/Bin/Win32/EfiRom.exebin446464 -> 446464 bytes
-rwxr-xr-xBaseTools/Bin/Win32/Fpd2Dsc.exebin1434119 -> 1434295 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenBootSector.exebin425984 -> 425984 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenCrc32.exebin425984 -> 425984 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenDepex.exebin615750 -> 615750 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenFds.exebin1474967 -> 1556029 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenFfs.exebin430080 -> 430080 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenFv.exebin479232 -> 479232 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenFw.exebin495616 -> 495616 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenPage.exebin421888 -> 421888 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenPatchPcdTable.exebin609747 -> 609747 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenSec.exebin446464 -> 446464 bytes
-rwxr-xr-xBaseTools/Bin/Win32/GenVtf.exebin446464 -> 446464 bytes
-rwxr-xr-xBaseTools/Bin/Win32/LzmaCompress.exebin397312 -> 397312 bytes
-rwxr-xr-xBaseTools/Bin/Win32/MigrationMsa2Inf.exebin1379556 -> 1379732 bytes
-rwxr-xr-xBaseTools/Bin/Win32/Spd2Dec.exebin1389784 -> 1389784 bytes
-rwxr-xr-xBaseTools/Bin/Win32/Split.exebin425984 -> 425984 bytes
-rwxr-xr-xBaseTools/Bin/Win32/TargetTool.exebin582580 -> 582756 bytes
-rwxr-xr-xBaseTools/Bin/Win32/TianoCompress.exebin434176 -> 434176 bytes
-rwxr-xr-xBaseTools/Bin/Win32/Trim.exebin678782 -> 678782 bytes
-rwxr-xr-xBaseTools/Bin/Win32/VfrCompile.exebin1187840 -> 1187840 bytes
-rwxr-xr-xBaseTools/Bin/Win32/VolInfo.exebin471040 -> 471040 bytes
-rwxr-xr-xBaseTools/Bin/Win32/build.exebin2617426 -> 2635681 bytes
l---------BaseTools/BinWrappers/PosixLike/BPDG1
-rw-r--r--BaseTools/Conf/build_rule.template24
-rw-r--r--BaseTools/Conf/target.template4
-rw-r--r--BaseTools/Conf/tools_def.template19
-rw-r--r--BaseTools/Source/C/Makefiles/header.makefile111
-rw-r--r--BaseTools/Source/Python/AutoGen/AutoGen.py159
-rw-r--r--BaseTools/Source/Python/AutoGen/GenC.py20
-rw-r--r--BaseTools/Source/Python/Common/DataType.py3
-rw-r--r--BaseTools/Source/Python/Common/Misc.py2
-rw-r--r--BaseTools/Source/Python/Common/String.py44
-rw-r--r--BaseTools/Source/Python/Common/VpdInfoFile.py259
-rw-r--r--BaseTools/Source/Python/CommonDataClass/DataClass.py30
-rw-r--r--BaseTools/Source/Python/Ecc/Check.py292
-rw-r--r--BaseTools/Source/Python/Ecc/Configuration.py63
-rw-r--r--BaseTools/Source/Python/Ecc/Database.py109
-rw-r--r--BaseTools/Source/Python/Ecc/Ecc.py4
-rw-r--r--BaseTools/Source/Python/Ecc/EccGlobalData.py2
-rw-r--r--BaseTools/Source/Python/Ecc/EccToolError.py2
-rw-r--r--BaseTools/Source/Python/Ecc/MetaDataParser.py21
-rw-r--r--BaseTools/Source/Python/Ecc/c.py48
-rw-r--r--BaseTools/Source/Python/Ecc/config.ini15
-rw-r--r--BaseTools/Source/Python/GNUmakefile2
-rw-r--r--BaseTools/Source/Python/GenFds/FdfParser.py2
-rw-r--r--BaseTools/Source/Python/GenFds/FfsInfStatement.py4
-rw-r--r--BaseTools/Source/Python/GenFds/GenFds.py1
-rw-r--r--BaseTools/Source/Python/Makefile8
-rw-r--r--BaseTools/Source/Python/Workspace/BuildClassObject.py10
-rw-r--r--BaseTools/Source/Python/Workspace/MetaFileParser.py78
-rw-r--r--BaseTools/Source/Python/Workspace/WorkspaceDatabase.py73
-rw-r--r--BaseTools/Source/Python/build/BuildReport.py98
-rw-r--r--BaseTools/Source/Python/build/build.py7
57 files changed, 1148 insertions, 367 deletions
diff --git a/BaseTools/Bin/Win32/BPDG.exe b/BaseTools/Bin/Win32/BPDG.exe
new file mode 100644
index 0000000000..d0b2c95c30
--- /dev/null
+++ b/BaseTools/Bin/Win32/BPDG.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/BootSectImage.exe b/BaseTools/Bin/Win32/BootSectImage.exe
index 0ddaa55734..0e11dd8a5c 100755
--- a/BaseTools/Bin/Win32/BootSectImage.exe
+++ b/BaseTools/Bin/Win32/BootSectImage.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/EfiLdrImage.exe b/BaseTools/Bin/Win32/EfiLdrImage.exe
index d18067e721..d63752e1c1 100755
--- a/BaseTools/Bin/Win32/EfiLdrImage.exe
+++ b/BaseTools/Bin/Win32/EfiLdrImage.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/EfiRom.exe b/BaseTools/Bin/Win32/EfiRom.exe
index 700e3ba72b..f92ae9bf30 100755
--- a/BaseTools/Bin/Win32/EfiRom.exe
+++ b/BaseTools/Bin/Win32/EfiRom.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/Fpd2Dsc.exe b/BaseTools/Bin/Win32/Fpd2Dsc.exe
index e282d9207d..688dd210e4 100755
--- a/BaseTools/Bin/Win32/Fpd2Dsc.exe
+++ b/BaseTools/Bin/Win32/Fpd2Dsc.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenBootSector.exe b/BaseTools/Bin/Win32/GenBootSector.exe
index 0dac396e6e..bc2bdebd91 100755
--- a/BaseTools/Bin/Win32/GenBootSector.exe
+++ b/BaseTools/Bin/Win32/GenBootSector.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenCrc32.exe b/BaseTools/Bin/Win32/GenCrc32.exe
index 6bac0d00cd..9e86442a73 100755
--- a/BaseTools/Bin/Win32/GenCrc32.exe
+++ b/BaseTools/Bin/Win32/GenCrc32.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenDepex.exe b/BaseTools/Bin/Win32/GenDepex.exe
index 6f6c73028b..698eebf30c 100755
--- a/BaseTools/Bin/Win32/GenDepex.exe
+++ b/BaseTools/Bin/Win32/GenDepex.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenFds.exe b/BaseTools/Bin/Win32/GenFds.exe
index b4e55aed8b..adb7da94bc 100755
--- a/BaseTools/Bin/Win32/GenFds.exe
+++ b/BaseTools/Bin/Win32/GenFds.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenFfs.exe b/BaseTools/Bin/Win32/GenFfs.exe
index e7dae7a7b0..2f51ce6825 100755
--- a/BaseTools/Bin/Win32/GenFfs.exe
+++ b/BaseTools/Bin/Win32/GenFfs.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenFv.exe b/BaseTools/Bin/Win32/GenFv.exe
index 240101306c..66c61fee76 100755
--- a/BaseTools/Bin/Win32/GenFv.exe
+++ b/BaseTools/Bin/Win32/GenFv.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenFw.exe b/BaseTools/Bin/Win32/GenFw.exe
index 6ec43f4728..10cb4a33f7 100755
--- a/BaseTools/Bin/Win32/GenFw.exe
+++ b/BaseTools/Bin/Win32/GenFw.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenPage.exe b/BaseTools/Bin/Win32/GenPage.exe
index 8a03a51e97..9ce843fb87 100755
--- a/BaseTools/Bin/Win32/GenPage.exe
+++ b/BaseTools/Bin/Win32/GenPage.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenPatchPcdTable.exe b/BaseTools/Bin/Win32/GenPatchPcdTable.exe
index ca85eb1d24..9306426f75 100755
--- a/BaseTools/Bin/Win32/GenPatchPcdTable.exe
+++ b/BaseTools/Bin/Win32/GenPatchPcdTable.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenSec.exe b/BaseTools/Bin/Win32/GenSec.exe
index 9cd22fe9e4..7806717c82 100755
--- a/BaseTools/Bin/Win32/GenSec.exe
+++ b/BaseTools/Bin/Win32/GenSec.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/GenVtf.exe b/BaseTools/Bin/Win32/GenVtf.exe
index ab2b374d28..9066545715 100755
--- a/BaseTools/Bin/Win32/GenVtf.exe
+++ b/BaseTools/Bin/Win32/GenVtf.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/LzmaCompress.exe b/BaseTools/Bin/Win32/LzmaCompress.exe
index edca459e3f..f9108fa8f5 100755
--- a/BaseTools/Bin/Win32/LzmaCompress.exe
+++ b/BaseTools/Bin/Win32/LzmaCompress.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/MigrationMsa2Inf.exe b/BaseTools/Bin/Win32/MigrationMsa2Inf.exe
index 400ca79dc1..117cad3ddb 100755
--- a/BaseTools/Bin/Win32/MigrationMsa2Inf.exe
+++ b/BaseTools/Bin/Win32/MigrationMsa2Inf.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/Spd2Dec.exe b/BaseTools/Bin/Win32/Spd2Dec.exe
index 9069b966a0..049766b682 100755
--- a/BaseTools/Bin/Win32/Spd2Dec.exe
+++ b/BaseTools/Bin/Win32/Spd2Dec.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/Split.exe b/BaseTools/Bin/Win32/Split.exe
index 0ff13016c3..561aa69bd1 100755
--- a/BaseTools/Bin/Win32/Split.exe
+++ b/BaseTools/Bin/Win32/Split.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/TargetTool.exe b/BaseTools/Bin/Win32/TargetTool.exe
index 984d37edb0..bfe546188e 100755
--- a/BaseTools/Bin/Win32/TargetTool.exe
+++ b/BaseTools/Bin/Win32/TargetTool.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/TianoCompress.exe b/BaseTools/Bin/Win32/TianoCompress.exe
index f106424ec1..4c1d44ff5d 100755
--- a/BaseTools/Bin/Win32/TianoCompress.exe
+++ b/BaseTools/Bin/Win32/TianoCompress.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/Trim.exe b/BaseTools/Bin/Win32/Trim.exe
index 6cbbd5f57a..2c0b8738f7 100755
--- a/BaseTools/Bin/Win32/Trim.exe
+++ b/BaseTools/Bin/Win32/Trim.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/VfrCompile.exe b/BaseTools/Bin/Win32/VfrCompile.exe
index daf66f23c5..da1b0a9a99 100755
--- a/BaseTools/Bin/Win32/VfrCompile.exe
+++ b/BaseTools/Bin/Win32/VfrCompile.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/VolInfo.exe b/BaseTools/Bin/Win32/VolInfo.exe
index a470075e48..2cf76569a4 100755
--- a/BaseTools/Bin/Win32/VolInfo.exe
+++ b/BaseTools/Bin/Win32/VolInfo.exe
Binary files differ
diff --git a/BaseTools/Bin/Win32/build.exe b/BaseTools/Bin/Win32/build.exe
index 54a323996d..170a55cdbe 100755
--- a/BaseTools/Bin/Win32/build.exe
+++ b/BaseTools/Bin/Win32/build.exe
Binary files differ
diff --git a/BaseTools/BinWrappers/PosixLike/BPDG b/BaseTools/BinWrappers/PosixLike/BPDG
new file mode 120000
index 0000000000..af6c748b1a
--- /dev/null
+++ b/BaseTools/BinWrappers/PosixLike/BPDG
@@ -0,0 +1 @@
+RunToolFromSource \ No newline at end of file
diff --git a/BaseTools/Conf/build_rule.template b/BaseTools/Conf/build_rule.template
index 70f510aed7..d240e9694d 100644
--- a/BaseTools/Conf/build_rule.template
+++ b/BaseTools/Conf/build_rule.template
@@ -295,7 +295,7 @@
$(DEBUG_DIR)(+)$(MODULE_NAME).efi
<Command.MSFT, Command.INTEL, Command.RVCT, Command.ARMGCC>
- GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@@ -304,7 +304,7 @@
$(OBJCOPY) --only-keep-debug ${src} $(BIN_DIR)(+)$(MODULE_NAME).debug
$(OBJCOPY) --strip-unneeded ${src}
$(OBJCOPY) --add-gnu-debuglink=$(BIN_DIR)(+)$(MODULE_NAME).debug ${src}
- GenFw -e $(MODULE_TYPE) -o ${dst} ${src}
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} ${src} $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@@ -314,7 +314,7 @@
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) ${src} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
# create symbol file for GDB debug
-$(DSYMUTIL) ${src}
- GenFw -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff
+ "$(GENFW)" -e $(MODULE_TYPE) -o ${dst} $(DEBUG_DIR)(+)$(MODULE_NAME).pecoff $(GENFW_FLAGS)
$(CP) ${dst} $(OUTPUT_DIR)
$(CP) ${dst} $(BIN_DIR)
-$(CP) $(DEBUG_DIR)(+)*.map $(OUTPUT_DIR)
@@ -367,12 +367,12 @@
<Command.MSFT, Command.INTEL>
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.GCC>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
[Acpi-Table-Code-File]
<InputFile>
@@ -387,18 +387,18 @@
<Command.MSFT, Command.INTEL>
"$(ASLCC)" /Fo$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" /OUT:$(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.GCC>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(CC_FLAGS) $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(GENFW_FLAGS)
<Command.XCODE>
"$(ASLCC)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj $(ASLCC_FLAGS) $(INC) ${src}
"$(ASLDLINK)" -o $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(ASLDLINK_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.obj
"$(MTOC)" -subsystem $(MODULE_TYPE) $(MTOC_FLAGS) $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.dll $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
- GenFw -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi
+ "$(GENFW)" -o ${dst} -c $(OUTPUT_DIR)(+)${s_dir}(+)${s_base}.efi $(GENFW_FLAGS)
[Masm16-Code-File]
@@ -438,7 +438,7 @@
$(OUTPUT_DIR)(+)${s_base}.mcb
<Command>
- GenFw -o ${dst} -m ${src}
+ "$(GENFW)" -o ${dst} -m ${src} $(GENFW_FLAGS)
[Microcode-Binary-File]
<InputFile>
@@ -451,7 +451,7 @@
$(OUTPUT_DIR)(+)$(MODULE_NAME).bin
<Command>
- GenFw -o ${dst} -j $(MICROCODE_BINARY_FILES)
+ "$(GENFW)" -o ${dst} -j $(MICROCODE_BINARY_FILES) $(GENFW_FLAGS)
-$(CP) ${dst} $(BIN_DIR)
[EFI-Image-File]
@@ -518,9 +518,9 @@
$(OUTPUT_DIR)(+)$(MODULE_NAME)hii.lib
<Command.MSFT, Command.INTEL>
- GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES)
+ "$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiipackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
"$(RC)" /Fo${dst} $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc
<Command.GCC>
- GenFw -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES)
+ "$(GENFW)" -o $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc -g $(MODULE_GUID) --hiibinpackage $(HII_BINARY_PACKAGES) $(GENFW_FLAGS)
"$(RC)" $(RC_FLAGS) $(OUTPUT_DIR)(+)$(MODULE_NAME)hii.rc ${dst}
diff --git a/BaseTools/Conf/target.template b/BaseTools/Conf/target.template
index 7f0771ec55..a05e7af3af 100644
--- a/BaseTools/Conf/target.template
+++ b/BaseTools/Conf/target.template
@@ -63,10 +63,6 @@ TOOL_CHAIN_TAG = MYTOOLS
# cores or CPUs. Less than 2 means disable multithread build.
MAX_CONCURRENT_THREAD_NUMBER = 1
-# MULTIPLE_THREAD BOOLEAN Optional If "Enable", multi-thread is enable for bulding.
-# If "Disable", multi-thread is disable for building.
-MULTIPLE_THREAD = Disable
-
# Build rules definition
#
#
diff --git a/BaseTools/Conf/tools_def.template b/BaseTools/Conf/tools_def.template
index d776ab22d7..c873d93738 100644
--- a/BaseTools/Conf/tools_def.template
+++ b/BaseTools/Conf/tools_def.template
@@ -86,8 +86,8 @@ DEFINE ICC11_ASM32x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERS
DEFINE ICC11_BINX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
DEFINE ICC11_ASMX64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
-DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
-DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_intel64
+DEFINE ICC11_BINX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
+DEFINE ICC11_ASMX64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\intel64
DEFINE ICC11_BIN64 = C:\Program Files\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
DEFINE ICC11_BIN64x86 = C:\Program Files (x86)\Intel\Compiler\DEF(ICC11_VERSION)\DEF(ICC11_BUILD)\bin\ia32_ia64
@@ -3308,8 +3308,8 @@ RELEASE_XCODE32_ARM_ASM_FLAGS = $(ARCHASM_FLAGS)
*_XCODE32_ARM_PP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -E -x assembler-with-cpp -include $(DEST_DIR_DEBUG)/AutoGen.h
*_XCODE32_ARM_VFRPP_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -x c -E -P -DVFRCOMPILE --include $(DEST_DIR_DEBUG)/$(MODULE_NAME)StrDefs.h
- DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
-RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -mdynamic-no-pic -fno-stack-protector
+ DEBUG_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -g -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
+RELEASE_XCODE32_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mthumb-interwork -Oz -mabi=aapcs -mapcs -fno-short-enums -save-temps -combine -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-missing-braces -fomit-frame-pointer -c -include AutoGen.h -fno-stack-protector
####################################################################################
@@ -3482,6 +3482,12 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
*_*_*_OPTROM_FLAGS = -e
##################
+# GenFw tool definitions
+##################
+*_*_*_GENFW_PATH = GenFw
+*_*_*_GENFW_FLAGS =
+
+##################
# Asl Compiler definitions
##################
*_*_*_ASLCC_FLAGS = /GL- /Y- /TC /Dmain=ReferenceAcpiTable
@@ -3501,3 +3507,8 @@ RELEASE_ARMGCC_ARM_CC_FLAGS = $(ARCHCC_FLAGS) $(PLATFORM_FLAGS) -mfpu=fpa -mlitt
*_*_*_TIANO_PATH = TianoCompress
*_*_*_TIANO_GUID = A31280AD-481E-41B6-95E8-127F4C984779
+##################
+# BPDG tool definitions
+##################
+*_*_*_VPDTOOL_PATH = BPDG
+*_*_*_VPDTOOL_GUID = 8C3D856A-9BE6-468E-850A-24F7A8D38E08
diff --git a/BaseTools/Source/C/Makefiles/header.makefile b/BaseTools/Source/C/Makefiles/header.makefile
index 96b3d15e8a..841803e51d 100644
--- a/BaseTools/Source/C/Makefiles/header.makefile
+++ b/BaseTools/Source/C/Makefiles/header.makefile
@@ -1,10 +1,10 @@
## @file
#
-# The makefile can be invoked with
-# ARCH = x86_64 or x64 for EM64T build
-# ARCH = ia32 or IA32 for IA32 build
-# ARCH = ia64 or IA64 for IA64 build
-#
+# The makefile can be invoked with
+# ARCH = x86_64 or x64 for EM64T build
+# ARCH = ia32 or IA32 for IA32 build
+# ARCH = ia64 or IA64 for IA64 build
+#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
@@ -12,55 +12,52 @@
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
-# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
-
-ARCH ?= IA32
-
-CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
-LINUX:=$(findstring Linux, $(shell uname -s))
-DARWIN:=$(findstring Darwin, $(shell uname -s))
-
-CC = gcc
-CXX = g++
-AS = gcc
-AR = ar
-LD = ld
-LINKER ?= $(CC)
-ifeq ($(ARCH), IA32)
-ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
-endif
-
-ifeq ($(ARCH), X64)
-ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
-endif
-
-INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
-CPPFLAGS = $(INCLUDE)
-CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
-LFLAGS =
-
-#
-# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
-# to x86_64. So make sure tools match uname -m
-#
-uname_s = $(shell uname -s)
-uname_m = $(shell uname -m)
-ifeq ($(uname_s),Darwin)
-ifeq ($(uname_m),i386)
- CFLAGS += -arch i386
- CPPFLAGS += -arch i386
- LFLAGS += -arch i386
-endif
-endif
-
-.PHONY: all
-.PHONY: install
-.PHONY: clean
-
-all:
-
-$(MAKEROOT)/libs:
- mkdir $(MAKEROOT)/libs
-
-$(MAKEROOT)/bin:
- mkdir $(MAKEROOT)/bin
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+
+ARCH ?= IA32
+
+CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
+LINUX:=$(findstring Linux, $(shell uname -s))
+DARWIN:=$(findstring Darwin, $(shell uname -s))
+
+CC = gcc
+CXX = g++
+AS = gcc
+AR = ar
+LD = ld
+LINKER ?= $(CC)
+ifeq ($(ARCH), IA32)
+ARCH_INCLUDE = -I $(MAKEROOT)/Include/Ia32/
+endif
+
+ifeq ($(ARCH), X64)
+ARCH_INCLUDE = -I $(MAKEROOT)/Include/X64/
+endif
+
+INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE)
+CPPFLAGS = $(INCLUDE)
+CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -fno-merge-constants -nostdlib -Wall -Werror -c -g
+LFLAGS =
+
+#
+# Snow Leopard is a 32-bit and 64-bit environment. uname -m returns -i386, but gcc defaults
+# to x86_64. So make sure tools match uname -m
+#
+uname_s = $(shell uname -s)
+ifeq ($(uname_s),Darwin)
+ CFLAGS += -arch i386
+ CPPFLAGS += -arch i386
+ LFLAGS += -arch i386
+endif
+
+.PHONY: all
+.PHONY: install
+.PHONY: clean
+
+all:
+
+$(MAKEROOT)/libs:
+ mkdir $(MAKEROOT)/libs
+
+$(MAKEROOT)/bin:
+ mkdir $(MAKEROOT)/bin
diff --git a/BaseTools/Source/Python/AutoGen/AutoGen.py b/BaseTools/Source/Python/AutoGen/AutoGen.py
index 3b8024c199..d95f40bf82 100644
--- a/BaseTools/Source/Python/AutoGen/AutoGen.py
+++ b/BaseTools/Source/Python/AutoGen/AutoGen.py
@@ -34,6 +34,7 @@ import Common.GlobalData as GlobalData
from GenFds.FdfParser import *
from CommonDataClass.CommonClass import SkuInfoClass
from Workspace.BuildClassObject import *
+import Common.VpdInfoFile as VpdInfoFile
## Regular expression for splitting Dependency Expression stirng into tokens
gDepexTokenPattern = re.compile("(\(|\)|\w+| \S+\.inf)")
@@ -244,7 +245,7 @@ class WorkspaceAutoGen(AutoGen):
self._BuildCommand = self.AutoGenObjectList[0].BuildCommand
return self._BuildCommand
- ## Create makefile for the platform and mdoules in it
+ ## Create makefile for the platform and modules in it
#
# @param CreateDepsMakeFile Flag indicating if the makefile for
# modules will be created as well
@@ -476,19 +477,141 @@ class PlatformAutoGen(AutoGen):
UnicodePcdArray = []
HiiPcdArray = []
OtherPcdArray = []
- for Pcd in self._DynamicPcdList:
- # just pick the a value to determine whether is unicode string type
- Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
- PcdValue = Sku.DefaultValue
- if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
- # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
- UnicodePcdArray.append(Pcd)
- elif len(Sku.VariableName) > 0:
- # if found HII type PCD then insert to right of UnicodeIndex
- HiiPcdArray.append(Pcd)
- else:
- OtherPcdArray.append(Pcd)
- del self._DynamicPcdList[:]
+ VpdFile = VpdInfoFile.VpdInfoFile()
+ NeedProcessVpdMapFile = False
+
+ if (self.Workspace.ArchList[-1] == self.Arch):
+ for Pcd in self._DynamicPcdList:
+
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ PcdValue = Sku.DefaultValue
+ if Pcd.DatumType == 'VOID*' and PcdValue.startswith("L"):
+ # if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
+ UnicodePcdArray.append(Pcd)
+ elif len(Sku.VariableName) > 0:
+ # if found HII type PCD then insert to right of UnicodeIndex
+ HiiPcdArray.append(Pcd)
+ else:
+ OtherPcdArray.append(Pcd)
+
+ if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ #
+ # Fix the optional data of VPD PCD.
+ #
+ if (Pcd.DatumType.strip() != "VOID*"):
+ if Sku.DefaultValue == '':
+ Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue = Pcd.MaxDatumSize
+ Pcd.MaxDatumSize = None
+ else:
+ EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
+ File=self.MetaFile,
+ ExtraData="\n\tPCD: %s.%s format incorrect in DSC: %s\n\t\t\n"
+ % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, self.Platform.MetaFile.Path))
+
+ VpdFile.Add(Pcd, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+
+ #
+ # Fix the PCDs define in VPD PCD section that never referenced by module.
+ # An example is PCD for signature usage.
+ #
+ for DscPcd in self.Platform.Pcds:
+ DscPcdEntry = self.Platform.Pcds[DscPcd]
+ if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
+ if not (self.Platform.VpdToolGuid == None or self.Platform.VpdToolGuid == ''):
+ FoundFlag = False
+ for VpdPcd in VpdFile._VpdArray.keys():
+ # This PCD has been referenced by module
+ if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (VpdPcd.TokenCName == DscPcdEntry.TokenCName):
+ FoundFlag = True
+
+ # Not found, it should be signature
+ if not FoundFlag :
+ # just pick the a value to determine whether is unicode string type
+ Sku = DscPcdEntry.SkuInfoList[DscPcdEntry.SkuInfoList.keys()[0]]
+ Sku.VpdOffset = Sku.VpdOffset.strip()
+
+ # Need to iterate DEC pcd information to get the value & datumtype
+ for eachDec in self.PackageList:
+ for DecPcd in eachDec.Pcds:
+ DecPcdEntry = eachDec.Pcds[DecPcd]
+ if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
+ (DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
+ DscPcdEntry.DatumType = DecPcdEntry.DatumType
+ DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
+ Sku.DefaultValue = DecPcdEntry.DefaultValue
+
+ VpdFile.Add(DscPcdEntry, Sku.VpdOffset)
+ # if the offset of a VPD is *, then it need to be fixed up by third party tool.
+ if not NeedProcessVpdMapFile and Sku.VpdOffset == "*":
+ NeedProcessVpdMapFile = True
+
+
+ if (self.Platform.FlashDefinition == None or self.Platform.FlashDefinition == '') and \
+ VpdFile.GetCount() != 0:
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
+ "Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
+
+ if VpdFile.GetCount() != 0:
+ WorkspaceDb = self.BuildDatabase.WorkspaceDb
+ DscTimeStamp = WorkspaceDb.GetTimeStamp(WorkspaceDb.GetFileId(str(self.Platform.MetaFile)))
+ FvPath = os.path.join(self.BuildDir, "FV")
+ if not os.path.exists(FvPath):
+ try:
+ os.makedirs(FvPath)
+ except:
+ EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
+
+ VpdFileName = self.Platform.VpdFileName
+ if VpdFileName == None or VpdFileName == "" :
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
+ else :
+ VpdFilePath = os.path.join(FvPath, "%s.txt" % VpdFileName)
+
+ if not os.path.exists(VpdFilePath) or os.path.getmtime(VpdFilePath) < DscTimeStamp:
+ VpdFile.Write(VpdFilePath)
+
+ # retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
+ BPDGToolName = None
+ for ToolDef in self.ToolDefinition.values():
+ if ToolDef.has_key("GUID") and ToolDef["GUID"] == self.Platform.VpdToolGuid:
+ if not ToolDef.has_key("PATH"):
+ EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
+ BPDGToolName = ToolDef["PATH"]
+ break
+ # Call third party GUID BPDG tool.
+ if BPDGToolName != None:
+ VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath, VpdFileName)
+ else:
+ EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
+
+ # Process VPD map file generated by third party BPDG tool
+ if NeedProcessVpdMapFile:
+ if VpdFileName == None or VpdFileName == "" :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % self.Platform.VpdToolGuid)
+ else :
+ VpdMapFilePath = os.path.join(self.BuildDir, "FV", "%s.map" % VpdFileName)
+ if os.path.exists(VpdMapFilePath):
+ VpdFile.Read(VpdMapFilePath)
+
+ # Fixup "*" offset
+ for Pcd in self._DynamicPcdList:
+ # just pick the a value to determine whether is unicode string type
+ Sku = Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]]
+ if Sku.VpdOffset == "*":
+ Sku.VpdOffset = VpdFile.GetOffset(Pcd)[0]
+ else:
+ EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
+
+ # Delete the DynamicPcdList At the last time enter into this function
+ del self._DynamicPcdList[:]
self._DynamicPcdList.extend(UnicodePcdArray)
self._DynamicPcdList.extend(HiiPcdArray)
self._DynamicPcdList.extend(OtherPcdArray)
@@ -709,10 +832,14 @@ class PlatformAutoGen(AutoGen):
## Get list of non-dynamic PCDs
def _GetNonDynamicPcdList(self):
+ if self._NonDynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
def _GetDynamicPcdList(self):
+ if self._DynamicPcdList == None:
+ self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
@@ -952,6 +1079,10 @@ class PlatformAutoGen(AutoGen):
if FromPcd != None:
if ToPcd.Pending and FromPcd.Type not in [None, '']:
ToPcd.Type = FromPcd.Type
+ elif (ToPcd.Type not in [None, '']) and (FromPcd.Type not in [None, ''])\
+ and (ToPcd.Type != FromPcd.Type) and (ToPcd.Type in FromPcd.Type):
+ if ToPcd.Type.strip() == "DynamicEx":
+ ToPcd.Type = FromPcd.Type
elif ToPcd.Type not in [None, ''] and FromPcd.Type not in [None, ''] \
and ToPcd.Type != FromPcd.Type:
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py
index 1db68ebec5..2a133d3812 100644
--- a/BaseTools/Source/Python/AutoGen/GenC.py
+++ b/BaseTools/Source/Python/AutoGen/GenC.py
@@ -1028,7 +1028,9 @@ def CreateModulePcdCode(Info, AutoGenC, AutoGenH, Pcd):
ArraySize = ArraySize / 2;
if ArraySize < (len(Value) + 1):
- ArraySize = len(Value) + 1
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Info))
Value = NewValue + '0 }'
Array = '[%d]' % ArraySize
#
@@ -1262,10 +1264,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
VariableHeadValueList = []
Pcd.InitString = 'UNINIT'
- if Pcd.Type in ["DynamicVpd", "DynamicExVpd"]:
- Pcd.TokenTypeList = ['PCD_TYPE_VPD']
- elif Pcd.DatumType == 'VOID*':
- Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ if Pcd.DatumType == 'VOID*':
+ if Pcd.Type not in ["DynamicVpd", "DynamicExVpd"]:
+ Pcd.TokenTypeList = ['PCD_TYPE_STRING']
+ else:
+ Pcd.TokenTypeList = []
elif Pcd.DatumType == 'BOOLEAN':
Pcd.TokenTypeList = ['PCD_DATUM_TYPE_UINT8']
else:
@@ -1364,8 +1367,11 @@ def CreatePcdDatabasePhaseSpecificAutoGen (Platform, Phase):
Dict['SIZE_TABLE_MAXIMUM_LENGTH'].append(Pcd.MaxDatumSize)
if Pcd.MaxDatumSize != '':
MaxDatumSize = int(Pcd.MaxDatumSize, 0)
- if MaxDatumSize > Size:
- Size = MaxDatumSize
+ if MaxDatumSize < Size:
+ EdkLogger.error("build", AUTOGEN_ERROR,
+ "The maximum size of VOID* type PCD '%s.%s' is less than its actual size occupied." % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName),
+ ExtraData="[%s]" % str(Platform))
+ Size = MaxDatumSize
Dict['STRING_TABLE_LENGTH'].append(Size)
StringTableIndex += 1
StringTableSize += (Size)
diff --git a/BaseTools/Source/Python/Common/DataType.py b/BaseTools/Source/Python/Common/DataType.py
index 982d710ce5..62a23ea773 100644
--- a/BaseTools/Source/Python/Common/DataType.py
+++ b/BaseTools/Source/Python/Common/DataType.py
@@ -334,6 +334,7 @@ TAB_DEC_DEFINES_DEC_SPECIFICATION = 'DEC_SPECIFICATION'
TAB_DEC_DEFINES_PACKAGE_NAME = 'PACKAGE_NAME'
TAB_DEC_DEFINES_PACKAGE_GUID = 'PACKAGE_GUID'
TAB_DEC_DEFINES_PACKAGE_VERSION = 'PACKAGE_VERSION'
+TAB_DEC_DEFINES_PKG_UNI_FILE = 'PKG_UNI_FILE'
#
# Dsc Definitions
@@ -353,6 +354,8 @@ TAB_DSC_DEFINES_MAKEFILE_NAME = 'MAKEFILE_NAME'
TAB_DSC_DEFINES_BS_BASE_ADDRESS = 'BsBaseAddress'
TAB_DSC_DEFINES_RT_BASE_ADDRESS = 'RtBaseAddress'
TAB_DSC_DEFINES_DEFINE = 'DEFINE'
+TAB_DSC_DEFINES_VPD_TOOL_GUID = 'VPD_TOOL_GUID'
+TAB_DSC_DEFINES_VPD_FILENAME = 'VPD_FILENAME'
TAB_FIX_LOAD_TOP_MEMORY_ADDRESS = 'FIX_LOAD_TOP_MEMORY_ADDRESS'
#
diff --git a/BaseTools/Source/Python/Common/Misc.py b/BaseTools/Source/Python/Common/Misc.py
index 126985fdac..311749285f 100644
--- a/BaseTools/Source/Python/Common/Misc.py
+++ b/BaseTools/Source/Python/Common/Misc.py
@@ -719,7 +719,7 @@ class TemplateString(object):
while Template:
MatchObj = gPlaceholderPattern.search(Template, SearchFrom)
if not MatchObj:
- if MatchEnd < len(Template):
+ if MatchEnd <= len(Template):
TemplateSection = TemplateString.Section(Template[SectionStart:], PlaceHolderList)
TemplateSectionList.append(TemplateSection)
break
diff --git a/BaseTools/Source/Python/Common/String.py b/BaseTools/Source/Python/Common/String.py
index 896fb7da0f..283e913b3b 100644
--- a/BaseTools/Source/Python/Common/String.py
+++ b/BaseTools/Source/Python/Common/String.py
@@ -296,6 +296,50 @@ def CleanString(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppSty
return Line
+## CleanString2
+#
+# Split comments in a string
+# Remove spaces
+#
+# @param Line: The string to be cleaned
+# @param CommentCharacter: Comment char, used to ignore comment content, default is DataType.TAB_COMMENT_SPLIT
+#
+# @retval Path Formatted path
+#
+def CleanString2(Line, CommentCharacter = DataType.TAB_COMMENT_SPLIT, AllowCppStyleComment=False):
+ #
+ # remove whitespace
+ #
+ Line = Line.strip();
+ #
+ # Replace R8's comment character
+ #
+ if AllowCppStyleComment:
+ Line = Line.replace(DataType.TAB_COMMENT_R8_SPLIT, CommentCharacter)
+ #
+ # separate comments and statements
+ #
+ LineParts = Line.split(CommentCharacter, 1);
+ #
+ # remove whitespace again
+ #
+ Line = LineParts[0].strip();
+ if len(LineParts) > 1:
+ Comment = LineParts[1].strip()
+ # Remove prefixed and trailing comment characters
+ Start = 0
+ End = len(Comment)
+ while Start < End and Comment.startswith(CommentCharacter, Start, End):
+ Start += 1
+ while End >= 0 and Comment.endswith(CommentCharacter, Start, End):
+ End -= 1
+ Comment = Comment[Start:End]
+ Comment = Comment.strip()
+ else:
+ Comment = ''
+
+ return Line, Comment
+
## GetMultipleValuesOfKeyFromLines
#
# Parse multiple strings to clean comment and spaces
diff --git a/BaseTools/Source/Python/Common/VpdInfoFile.py b/BaseTools/Source/Python/Common/VpdInfoFile.py
new file mode 100644
index 0000000000..0111744cc0
--- /dev/null
+++ b/BaseTools/Source/Python/Common/VpdInfoFile.py
@@ -0,0 +1,259 @@
+## @file
+#
+# This package manage the VPD PCD information file which will be generated
+# by build tool's autogen.
+# The VPD PCD information file will be input for third-party BPDG tool which
+# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
+#
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+import os
+import re
+import Common.EdkLogger as EdkLogger
+import Common.BuildToolError as BuildToolError
+import subprocess
+
+FILE_COMMENT_TEMPLATE = \
+"""
+## @file
+#
+# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
+#
+# This file lists all VPD informations for a platform collected by build.exe.
+#
+# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
+# This program and the accompanying materials
+# are licensed and made available under the terms and conditions of the BSD License
+# which accompanies this distribution. The full text of the license may be found at
+# http://opensource.org/licenses/bsd-license.php
+#
+# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
+# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
+#
+
+"""
+
+## The class manage VpdInfoFile.
+#
+# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
+# Format for this file (using EBNF notation) is:
+# <File> :: = [<CommentBlock>]
+# [<PcdEntry>]*
+# <CommentBlock> ::= ["#" <String> <EOL>]*
+# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
+# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
+# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
+# <PcdCName> ::= C Variable Name of the PCD
+# <Offset> ::= {"*"} {<HexNumber>}
+# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
+# <Size> ::= <HexNumber>
+# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
+# <NonNegativeInt> ::= (0-9)+
+# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
+# <DblQuote> ::= 0x22
+# <Array> ::= {<CArray>} {<NList>}
+# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
+# <NList> ::= <HexNumber> ["," <HexNumber>]*
+#
+class VpdInfoFile:
+
+ ## The mapping dictionary from datum type to size string.
+ _MAX_SIZE_TYPE = {"BOOLEAN":"1", "UINT8":"1", "UINT16":"2", "UINT32":"4", "UINT64":"8"}
+ _rVpdPcdLine = None
+ ## Constructor
+ def __init__(self):
+ ## Dictionary for VPD in following format
+ #
+ # Key : PcdClassObject instance.
+ # @see BuildClassObject.PcdClassObject
+ # Value : offset in different SKU such as [sku1_offset, sku2_offset]
+ self._VpdArray = {}
+
+ ## Add a VPD PCD collected from platform's autogen when building.
+ #
+ # @param vpds The list of VPD PCD collected for a platform.
+ # @see BuildClassObject.PcdClassObject
+ #
+ # @param offset integer value for VPD's offset in specific SKU.
+ #
+ def Add(self, Vpd, Offset):
+ if (Vpd == None):
+ EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
+
+ if not (Offset >= 0 or Offset == "*"):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
+
+ if Vpd.DatumType == "VOID*":
+ if Vpd.MaxDatumSize <= 0:
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
+ elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
+ if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
+ Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
+ else:
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
+
+ if Vpd not in self._VpdArray.keys():
+ #
+ # If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
+ #
+ self._VpdArray[Vpd] = [Offset]
+ else:
+ #
+ # If there is an offset for a specific SKU in dict, then append this offset for other sku to array.
+ #
+ self._VpdArray[Vpd].append(Offset)
+
+
+ ## Generate VPD PCD information into a text file
+ #
+ # If parameter FilePath is invalid, then assert.
+ # If
+ # @param FilePath The given file path which would hold VPD information
+ def Write(self, FilePath):
+ if not (FilePath != None or len(FilePath) != 0):
+ EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
+ "Invalid parameter FilePath: %s." % FilePath)
+ try:
+ fd = open(FilePath, "w")
+ except:
+ EdkLogger.error("VpdInfoFile",
+ BuildToolError.FILE_OPEN_FAILURE,
+ "Fail to open file %s for written." % FilePath)
+
+ try:
+ # write file header
+ fd.write(FILE_COMMENT_TEMPLATE)
+
+ # write each of PCD in VPD type
+ for Pcd in self._VpdArray.keys():
+ for Offset in self._VpdArray[Pcd]:
+ PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
+ if PcdValue == "" :
+ PcdValue = Pcd.DefaultValue
+
+ fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))
+ except:
+ EdkLogger.error("VpdInfoFile",
+ BuildToolError.FILE_WRITE_FAILURE,
+ "Fail to write file %s" % FilePath)
+ fd.close()
+
+ ## Read an existing VPD PCD info file.
+ #
+ # This routine will read VPD PCD information from existing file and construct
+ # internal PcdClassObject array.
+ # This routine could be used by third-party tool to parse VPD info file content.
+ #
+ # @param FilePath The full path string for existing VPD PCD info file.
+ def Read(self, FilePath):
+ try:
+ fd = open(FilePath, "r")
+ except:
+ EdkLogger.error("VpdInfoFile",
+ BuildToolError.FILE_OPEN_FAILURE,
+ "Fail to open file %s for written." % FilePath)
+ Lines = fd.readlines()
+ for Line in Lines:
+ Line = Line.strip()
+ if len(Line) == 0 or Line.startswith("#"):
+ continue
+
+ #
+ # the line must follow output format defined in BPDG spec.
+ #
+ try:
+ PcdName, Offset, Size, Value = Line.split("#")[0].split("|")
+ TokenSpaceName, PcdTokenName = PcdName.split(".")
+ except:
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
+
+ Found = False
+ for VpdObject in self._VpdArray.keys():
+ if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip():
+ if self._VpdArray[VpdObject][0] == "*":
+ if Offset == "*":
+ EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
+
+ self._VpdArray[VpdObject][0] = Offset
+ Found = True
+ break
+ if not Found:
+ EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
+
+ ## Get count of VPD PCD collected from platform's autogen when building.
+ #
+ # @return The integer count value
+ def GetCount(self):
+ Count = 0
+ for OffsetList in self._VpdArray.values():
+ Count += len(OffsetList)
+
+ return Count
+
+ ## Get an offset value for a given VPD PCD
+ #
+ # Because BPDG only support one Sku, so only return offset for SKU default.
+ #
+ # @param vpd A given VPD PCD
+ def GetOffset(self, vpd):
+ if not self._VpdArray.has_key(vpd):
+ return None
+
+ if len(self._VpdArray[vpd]) == 0:
+ return None
+
+ return self._VpdArray[vpd]
+
+## Call external BPDG tool to process VPD file
+#
+# @param ToolPath The string path name for BPDG tool
+# @param VpdFileName The string path name for VPD information guid.txt
+#
+def CallExtenalBPDGTool(ToolPath, VpdFilePath, VpdFileName):
+ assert ToolPath != None, "Invalid parameter ToolPath"
+ assert VpdFilePath != None and os.path.exists(VpdFilePath), "Invalid parameter VpdFileName"
+
+ OutputDir = os.path.dirname(VpdFilePath)
+ if (VpdFileName == None or VpdFileName == "") :
+ FileName = os.path.basename(VpdFilePath)
+ BaseName, ext = os.path.splitext(FileName)
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
+ else :
+ OutputMapFileName = os.path.join(OutputDir, "%s.map" % VpdFileName)
+ OutputBinFileName = os.path.join(OutputDir, "%s.bin" % VpdFileName)
+
+ try:
+ PopenObject = subprocess.Popen([ToolPath,
+ '-o', OutputBinFileName,
+ '-m', OutputMapFileName,
+ '-s',
+ '-f',
+ '-v',
+ VpdFilePath],
+ stdout=subprocess.PIPE,
+ stderr= subprocess.PIPE)
+ except Exception, X:
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
+ (out, error) = PopenObject.communicate()
+ print out
+ while PopenObject.returncode == None :
+ PopenObject.wait()
+
+ if PopenObject.returncode != 0:
+ if PopenObject.returncode != 0:
+ EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
+ EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
+ (PopenObject.returncode, str(error)))
+
+ return PopenObject.returncode
diff --git a/BaseTools/Source/Python/CommonDataClass/DataClass.py b/BaseTools/Source/Python/CommonDataClass/DataClass.py
index 2615bd17b0..ebb0358b0b 100644
--- a/BaseTools/Source/Python/CommonDataClass/DataClass.py
+++ b/BaseTools/Source/Python/CommonDataClass/DataClass.py
@@ -29,6 +29,7 @@ MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
+MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
@@ -91,6 +92,8 @@ MODEL_META_DATA_NMAKE = 5012
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 50013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
+MODEL_META_DATA_COMMENT = 5016
+MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_EXTERNAL_DEPENDENCY = 10000
@@ -103,6 +106,8 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
('MODEL_FILE_DSC', MODEL_FILE_DSC),
('MODEL_FILE_FDF', MODEL_FILE_FDF),
('MODEL_FILE_INC', MODEL_FILE_INC),
+ ('MODEL_FILE_CIF', MODEL_FILE_CIF),
+ ('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
@@ -159,16 +164,17 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
- ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE)
+ ('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
+ ('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
]
## FunctionClass
#
# This class defines a structure of a function
-#
+#
# @param ID: ID of a Function
# @param Header: Header of a Function
-# @param Modifier: Modifier of a Function
+# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Funciont
# @param StartLine: StartLine of a Function
@@ -183,7 +189,7 @@ MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
#
# @var ID: ID of a Function
# @var Header: Header of a Function
-# @var Modifier: Modifier of a Function
+# @var Modifier: Modifier of a Function
# @var Name: Name of a Function
# @var ReturnStatement: ReturnStatement of a Funciont
# @var StartLine: StartLine of a Function
@@ -204,7 +210,7 @@ class FunctionClass(object):
FunNameStartLine = -1, FunNameStartColumn = -1):
self.ID = ID
self.Header = Header
- self.Modifier = Modifier
+ self.Modifier = Modifier
self.Name = Name
self.ReturnStatement = ReturnStatement
self.StartLine = StartLine
@@ -216,14 +222,14 @@ class FunctionClass(object):
self.BelongsToFile = BelongsToFile
self.FunNameStartLine = FunNameStartLine
self.FunNameStartColumn = FunNameStartColumn
-
+
self.IdentifierList = IdentifierList
self.PcdList = PcdList
## IdentifierClass
#
# This class defines a structure of a variable
-#
+#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
@@ -269,7 +275,7 @@ class IdentifierClass(object):
## PcdClass
#
# This class defines a structure of a Pcd
-#
+#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
@@ -314,7 +320,7 @@ class PcdDataClass(object):
## FileClass
#
# This class defines a structure of a file
-#
+#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
@@ -340,14 +346,14 @@ class PcdDataClass(object):
class FileClass(object):
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
FunctionList = [], IdentifierList = [], PcdList = []):
- self.ID = ID
+ self.ID = ID
self.Name = Name
- self.ExtName = ExtName
+ self.ExtName = ExtName
self.Path = Path
self.FullPath = FullPath
self.Model = Model
self.TimeStamp = TimeStamp
-
+
self.FunctionList = FunctionList
self.IdentifierList = IdentifierList
self.PcdList = PcdList
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py
index dbfedb514b..1e9ce34f8b 100644
--- a/BaseTools/Source/Python/Ecc/Check.py
+++ b/BaseTools/Source/Python/Ecc/Check.py
@@ -30,6 +30,7 @@ class Check(object):
# Check all required checkpoints
def Check(self):
+ self.GeneralCheck()
self.MetaDataFileCheck()
self.DoxygenCheck()
self.IncludeFileCheck()
@@ -38,6 +39,29 @@ class Check(object):
self.FunctionLayoutCheck()
self.NamingConventionCheck()
+ # General Checking
+ def GeneralCheck(self):
+ self.GeneralCheckNonAcsii()
+
+ # Check whether file has non ACSII char
+ def GeneralCheckNonAcsii(self):
+ if EccGlobalData.gConfig.GeneralCheckNonAcsii == '1' or EccGlobalData.gConfig.GeneralCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
+ EdkLogger.quiet("Checking Non-ACSII char in file ...")
+ SqlCommand = """select ID, FullPath, ExtName from File"""
+ RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
+ for Record in RecordSet:
+ if Record[2].upper() not in EccGlobalData.gConfig.BinaryExtList:
+ op = open(Record[1]).readlines()
+ IndexOfLine = 0
+ for Line in op:
+ IndexOfLine += 1
+ IndexOfChar = 0
+ for Char in Line:
+ IndexOfChar += 1
+ if ord(Char) > 126:
+ OtherMsg = "File %s has Non-ASCII char at line %s column %s" %(Record[1], IndexOfLine, IndexOfChar)
+ EccGlobalData.gDb.TblReport.Insert(ERROR_GENERAL_CHECK_NON_ACSII, OtherMsg = OtherMsg, BelongsToTable = 'File', BelongsToItem = Record[0])
+
# C Function Layout Checking
def FunctionLayoutCheck(self):
self.FunctionLayoutCheckReturnType()
@@ -67,22 +91,26 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckReturnType == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout return type ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c', '.h'):
- FullName = os.path.join(Dirpath, F)
- c.CheckFuncLayoutReturnType(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutReturnType(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutReturnType(FullName)
# Check whether any optional functional modifiers exist and next to the return type
def FunctionLayoutCheckModifier(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckOptionalFunctionalModifier == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout modifier ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c', '.h'):
- FullName = os.path.join(Dirpath, F)
- c.CheckFuncLayoutModifier(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutModifier(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutModifier(FullName)
# Check whether the next line contains the function name, left justified, followed by the beginning of the parameter list
# Check whether the closing parenthesis is on its own line and also indented two spaces
@@ -90,33 +118,41 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionName == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function name ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c', '.h'):
- FullName = os.path.join(Dirpath, F)
- c.CheckFuncLayoutName(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c', '.h'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutName(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckFuncLayoutName(FullName)
+
# Check whether the function prototypes in include files have the same form as function definitions
def FunctionLayoutCheckPrototype(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionPrototype == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function prototype ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[PROTOTYPE]" + FullName)
- c.CheckFuncLayoutPrototype(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[PROTOTYPE]" + FullName)
+# c.CheckFuncLayoutPrototype(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[PROTOTYPE]" + FullName)
+ c.CheckFuncLayoutPrototype(FullName)
# Check whether the body of a function is contained by open and close braces that must be in the first column
def FunctionLayoutCheckBody(self):
if EccGlobalData.gConfig.CFunctionLayoutCheckFunctionBody == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout function body ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- c.CheckFuncLayoutBody(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutBody(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ c.CheckFuncLayoutBody(FullName)
# Check whether the data declarations is the first code in a module.
# self.CFunctionLayoutCheckDataDeclaration = 1
@@ -125,11 +161,14 @@ class Check(object):
if EccGlobalData.gConfig.CFunctionLayoutCheckNoInitOfVariable == '1' or EccGlobalData.gConfig.CFunctionLayoutCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking function layout local variables ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- c.CheckFuncLayoutLocalVariable(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckFuncLayoutLocalVariable(FullName)
+
+ for FullName in EccGlobalData.gCFileList:
+ c.CheckFuncLayoutLocalVariable(FullName)
# Check whether no use of STATIC for functions
# self.CFunctionLayoutCheckNoStatic = 1
@@ -150,22 +189,26 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckNoUseCType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration No use C type ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- c.CheckDeclNoUseCType(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckDeclNoUseCType(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckDeclNoUseCType(FullName)
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
def DeclCheckInOutModifier(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckInOutModifier == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration argument modifier ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- c.CheckDeclArgModifier(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# c.CheckDeclArgModifier(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ c.CheckDeclArgModifier(FullName)
# Check whether the EFIAPI modifier should be used at the entry of drivers, events, and member functions of protocols
def DeclCheckEFIAPIModifier(self):
@@ -177,24 +220,30 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckEnumeratedType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration enum typedef ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[ENUM]" + FullName)
- c.CheckDeclEnumTypedef(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[ENUM]" + FullName)
+# c.CheckDeclEnumTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[ENUM]" + FullName)
+ c.CheckDeclEnumTypedef(FullName)
# Check whether Structure Type has a 'typedef' and the name is capital
def DeclCheckStructureDeclaration(self):
if EccGlobalData.gConfig.DeclarationDataTypeCheckStructureDeclaration == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration struct typedef ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[STRUCT]" + FullName)
- c.CheckDeclStructTypedef(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[STRUCT]" + FullName)
+# c.CheckDeclStructTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[STRUCT]" + FullName)
+ c.CheckDeclStructTypedef(FullName)
# Check whether having same Structure
def DeclCheckSameStructure(self):
@@ -223,12 +272,15 @@ class Check(object):
if EccGlobalData.gConfig.DeclarationDataTypeCheckUnionType == '1' or EccGlobalData.gConfig.DeclarationDataTypeCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Declaration union typedef ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[UNION]" + FullName)
- c.CheckDeclUnionTypedef(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[UNION]" + FullName)
+# c.CheckDeclUnionTypedef(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ EdkLogger.quiet("[UNION]" + FullName)
+ c.CheckDeclUnionTypedef(FullName)
# Predicate Expression Checking
def PredicateExpressionCheck(self):
@@ -241,35 +293,46 @@ class Check(object):
if EccGlobalData.gConfig.PredicateExpressionCheckBooleanValue == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Boolean value ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[BOOLEAN]" + FullName)
- c.CheckBooleanValueComparison(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[BOOLEAN]" + FullName)
+# c.CheckBooleanValueComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[BOOLEAN]" + FullName)
+ c.CheckBooleanValueComparison(FullName)
# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
def PredicateExpressionCheckNonBooleanOperator(self):
if EccGlobalData.gConfig.PredicateExpressionCheckNonBooleanOperator == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression Non-Boolean variable...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
- c.CheckNonBooleanValueComparison(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
+# c.CheckNonBooleanValueComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[NON-BOOLEAN]" + FullName)
+ c.CheckNonBooleanValueComparison(FullName)
+
# Check whether a comparison of any pointer to zero must be done via the NULL type
def PredicateExpressionCheckComparisonNullType(self):
if EccGlobalData.gConfig.PredicateExpressionCheckComparisonNullType == '1' or EccGlobalData.gConfig.PredicateExpressionCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking predicate expression NULL pointer ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.c'):
- FullName = os.path.join(Dirpath, F)
- EdkLogger.quiet("[POINTER]" + FullName)
- c.CheckPointerNullComparison(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.c'):
+# FullName = os.path.join(Dirpath, F)
+# EdkLogger.quiet("[POINTER]" + FullName)
+# c.CheckPointerNullComparison(FullName)
+ for FullName in EccGlobalData.gCFileList:
+ EdkLogger.quiet("[POINTER]" + FullName)
+ c.CheckPointerNullComparison(FullName)
+
# Include file checking
def IncludeFileCheck(self):
self.IncludeFileCheckIfndef()
@@ -309,22 +372,26 @@ class Check(object):
if EccGlobalData.gConfig.IncludeFileCheckIfndefStatement == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file ifndef ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h'):
- FullName = os.path.join(Dirpath, F)
- MsgList = c.CheckHeaderFileIfndef(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckHeaderFileIfndef(FullName)
+ for FullName in EccGlobalData.gHFileList:
+ MsgList = c.CheckHeaderFileIfndef(FullName)
# Check whether include files NOT contain code or define data variables
def IncludeFileCheckData(self):
if EccGlobalData.gConfig.IncludeFileCheckData == '1' or EccGlobalData.gConfig.IncludeFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking header file data ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h'):
- FullName = os.path.join(Dirpath, F)
- MsgList = c.CheckHeaderFileData(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckHeaderFileData(FullName)
+ for FullName in EccGlobalData.gHFileList:
+ MsgList = c.CheckHeaderFileData(FullName)
# Doxygen document checking
def DoxygenCheck(self):
@@ -347,24 +414,28 @@ class Check(object):
MsgList = c.CheckFileHeaderDoxygenComments(FullName)
elif Ext in ('.inf', '.dec', '.dsc', '.fdf'):
FullName = os.path.join(Dirpath, F)
- if not open(FullName).read().startswith('## @file'):
+ op = open(FullName).readlines()
+ if not op[0].startswith('## @file') and op[6].startswith('## @file') and op[7].startswith('## @file'):
SqlStatement = """ select ID from File where FullPath like '%s'""" % FullName
ResultSet = EccGlobalData.gDb.TblFile.Exec(SqlStatement)
for Result in ResultSet:
Msg = 'INF/DEC/DSC/FDF file header comment should begin with ""## @file""'
EccGlobalData.gDb.TblReport.Insert(ERROR_DOXYGEN_CHECK_FILE_HEADER, Msg, "File", Result[0])
-
+
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
def DoxygenCheckFunctionHeader(self):
if EccGlobalData.gConfig.DoxygenCheckFunctionHeader == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen function header ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckFuncHeaderDoxygenComments(FullName)
+
# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
@@ -377,22 +448,26 @@ class Check(object):
if EccGlobalData.gConfig.DoxygenCheckCommentFormat == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen comment ///< ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckDoxygenTripleForwardSlash(FullName)
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
def DoxygenCheckCommand(self):
if EccGlobalData.gConfig.DoxygenCheckCommand == '1' or EccGlobalData.gConfig.DoxygenCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
EdkLogger.quiet("Checking Doxygen command ...")
- for Dirpath, Dirnames, Filenames in self.WalkTree():
- for F in Filenames:
- if os.path.splitext(F)[1] in ('.h', '.c'):
- FullName = os.path.join(Dirpath, F)
- MsgList = c.CheckDoxygenCommand(FullName)
+# for Dirpath, Dirnames, Filenames in self.WalkTree():
+# for F in Filenames:
+# if os.path.splitext(F)[1] in ('.h', '.c'):
+# FullName = os.path.join(Dirpath, F)
+# MsgList = c.CheckDoxygenCommand(FullName)
+ for FullName in EccGlobalData.gCFileList + EccGlobalData.gHFileList:
+ MsgList = c.CheckDoxygenCommand(FullName)
# Meta-Data File Processing Checking
def MetaDataFileCheck(self):
@@ -556,7 +631,6 @@ class Check(object):
SqlCommand2 = """select Name from File where ID = %s""" %Record[5]
DscFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand1)[0][0])[0]
FdfFileName = os.path.splitext(EccGlobalData.gDb.TblDsc.Exec(SqlCommand2)[0][0])[0]
- print DscFileName, 111, FdfFileName
if DscFileName != FdfFileName:
continue
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_PCD_DUPLICATE, Record[1]):
@@ -680,8 +754,8 @@ class Check(object):
SqlCommand = """
select ID from File where FullPath in
(select B.Path || '\\' || A.Value1 from INF as A, File as B where A.Model = %s and A.BelongsToFile = %s
- and B.ID = %s)
- """ %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile)
+ and B.ID = %s and (B.Model = %s or B.Model = %s))
+ """ %(MODEL_EFI_SOURCE_FILE, BelongsToFile, BelongsToFile, MODEL_FILE_C, MODEL_FILE_H)
TableSet = EccGlobalData.gDb.TblFile.Exec(SqlCommand)
for Tbl in TableSet:
TblName = 'Identifier' + str(Tbl[0])
@@ -714,7 +788,7 @@ class Check(object):
if Path.startswith('\\') or Path.startswith('/'):
Path = Path[1:]
return Path
-
+
# Check whether two module INFs under one workspace has the same FILE_GUID value
def MetaDataFileCheckModuleFileGuidDuplication(self):
if EccGlobalData.gConfig.MetaDataFileCheckModuleFileGuidDuplication == '1' or EccGlobalData.gConfig.MetaDataFileCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
@@ -733,7 +807,7 @@ class Check(object):
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, InfPath1):
Msg = "The FILE_GUID of INF file [%s] is duplicated with that of %s" % (InfPath1, InfPath2)
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_GUID_DUPLICATION, OtherMsg = Msg, BelongsToTable = Table.Table, BelongsToItem = Record[0])
-
+
# Check whether these is duplicate Guid/Ppi/Protocol name
def CheckGuidProtocolPpi(self, ErrorID, Model, Table):
diff --git a/BaseTools/Source/Python/Ecc/Configuration.py b/BaseTools/Source/Python/Ecc/Configuration.py
index 310cb5716b..1478ee6351 100644
--- a/BaseTools/Source/Python/Ecc/Configuration.py
+++ b/BaseTools/Source/Python/Ecc/Configuration.py
@@ -28,7 +28,7 @@ from Common.String import *
class Configuration(object):
def __init__(self, Filename):
self.Filename = Filename
-
+
self.Version = 0.1
## Identify to if check all items
@@ -49,14 +49,14 @@ class Configuration(object):
# SpaceCheckAll
#
self.AutoCorrect = 0
-
+
# List customized Modifer here, split with ','
# Defaultly use the definition in class DataType
self.ModifierList = MODIFIER_LIST
-
+
## General Checking
self.GeneralCheckAll = 0
-
+
# Check whether NO Tab is used, replaced with spaces
self.GeneralCheckNoTab = 1
# The width of Tab
@@ -77,31 +77,33 @@ class Configuration(object):
self.GeneralCheckCarriageReturn = 1
# Check whether the file exists
self.GeneralCheckFileExistence = 1
-
+ # Check whether file has non ACSII char
+ self.GeneralCheckNonAcsii = 1
+
## Space Checking
self.SpaceCheckAll = 1
-
+
## Predicate Expression Checking
self.PredicateExpressionCheckAll = 0
-
+
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
self.PredicateExpressionCheckBooleanValue = 1
- # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+ # Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
self.PredicateExpressionCheckNonBooleanOperator = 1
# Check whether a comparison of any pointer to zero must be done via the NULL type
self.PredicateExpressionCheckComparisonNullType = 1
-
+
## Headers Checking
self.HeaderCheckAll = 0
-
+
# Check whether File header exists
self.HeaderCheckFile = 1
# Check whether Function header exists
self.HeaderCheckFunction = 1
-
+
## C Function Layout Checking
self.CFunctionLayoutCheckAll = 0
-
+
# Check whether return type exists and in the first line
self.CFunctionLayoutCheckReturnType = 1
# Check whether any optional functional modifiers exist and next to the return type
@@ -119,10 +121,10 @@ class Configuration(object):
self.CFunctionLayoutCheckNoInitOfVariable = 1
# Check whether no use of STATIC for functions
self.CFunctionLayoutCheckNoStatic = 1
-
+
## Include Files Checking
self.IncludeFileCheckAll = 0
-
+
#Check whether having include files with same name
self.IncludeFileCheckSameName = 1
# Check whether all include file contents is guarded by a #ifndef statement.
@@ -132,10 +134,10 @@ class Configuration(object):
# Check whether include files contain only public or only private data
# Check whether include files NOT contain code or define data variables
self.IncludeFileCheckData = 1
-
+
## Declarations and Data Types Checking
self.DeclarationDataTypeCheckAll = 0
-
+
# Check whether no use of int, unsigned, char, void, static, long in any .c, .h or .asl files.
self.DeclarationDataTypeCheckNoUseCType = 1
# Check whether the modifiers IN, OUT, OPTIONAL, and UNALIGNED are used only to qualify arguments to a function and should not appear in a data type declaration
@@ -150,10 +152,10 @@ class Configuration(object):
self.DeclarationDataTypeCheckSameStructure = 1
# Check whether Union Type has a 'typedef' and the name is capital
self.DeclarationDataTypeCheckUnionType = 1
-
+
## Naming Conventions Checking
self.NamingConventionCheckAll = 0
-
+
# Check whether only capital letters are used for #define declarations
self.NamingConventionCheckDefineStatement = 1
# Check whether only capital letters are used for typedef declarations
@@ -172,33 +174,33 @@ class Configuration(object):
self.NamingConventionCheckFunctionName = 1
# Check whether NO use short variable name with single character
self.NamingConventionCheckSingleCharacterVariable = 1
-
+
## Doxygen Checking
self.DoxygenCheckAll = 0
-
+
# Check whether the file headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFileHeader = 1
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
self.DoxygenCheckFunctionHeader = 1
- # Check whether the first line of text in a comment block is a brief description of the element being documented.
+ # Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
self.DoxygenCheckCommentDescription = 1
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
self.DoxygenCheckCommentFormat = 1
# Check whether only Doxygen commands allowed to mark the code are @bug and @todo.
self.DoxygenCheckCommand = 1
-
+
## Meta-Data File Processing Checking
self.MetaDataFileCheckAll = 0
-
+
# Check whether each file defined in meta-data exists
self.MetaDataFileCheckPathName = 1
# Generate a list for all files defined in meta-data files
self.MetaDataFileCheckGenerateFileList = 1
# The path of log file
self.MetaDataFileCheckPathOfGenerateFileList = 'File.log'
- # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
- # Each Library Instance must specify the Supported Module Types in its INF file,
+ # Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+ # Each Library Instance must specify the Supported Module Types in its INF file,
# and any module specifying the library instance must be one of the supported types.
self.MetaDataFileCheckLibraryInstance = 1
# Check whether a Library Instance has been defined for all dependent library classes
@@ -235,14 +237,17 @@ class Configuration(object):
# The directory listed here will not be parsed, split with ','
self.SkipDirList = []
+ # A list for binary file ext name
+ self.BinaryExtList = []
+
self.ParseConfig()
-
+
def ParseConfig(self):
Filepath = os.path.normpath(self.Filename)
if not os.path.isfile(Filepath):
ErrorMsg = "Can't find configuration file '%s'" % Filepath
EdkLogger.error("Ecc", EdkLogger.ECC_ERROR, ErrorMsg, File = Filepath)
-
+
LineNo = 0
for Line in open(Filepath, 'r'):
LineNo = LineNo + 1
@@ -258,8 +263,10 @@ class Configuration(object):
continue
if List[0] == 'SkipDirList':
List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
+ if List[0] == 'BinaryExtList':
+ List[1] = GetSplitValueList(List[1], TAB_COMMA_SPLIT)
self.__dict__[List[0]] = List[1]
-
+
def ShowMe(self):
print self.Filename
for Key in self.__dict__.keys():
diff --git a/BaseTools/Source/Python/Ecc/Database.py b/BaseTools/Source/Python/Ecc/Database.py
index 9520be4345..4b79cb708f 100644
--- a/BaseTools/Source/Python/Ecc/Database.py
+++ b/BaseTools/Source/Python/Ecc/Database.py
@@ -41,7 +41,7 @@ DATABASE_PATH = "Ecc.db"
# This class defined the ECC databse
# During the phase of initialization, the database will create all tables and
# insert all records of table DataModel
-#
+#
# @param object: Inherited from object class
# @param DbPath: A string for the path of the ECC database
#
@@ -64,7 +64,7 @@ class Database(object):
self.TblDec = None
self.TblDsc = None
self.TblFdf = None
-
+
## Initialize ECC database
#
# 1. Delete all old existing tables
@@ -85,7 +85,7 @@ class Database(object):
# to avoid non-ascii charater conversion error
self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
-
+
self.TblDataModel = TableDataModel(self.Cur)
self.TblFile = TableFile(self.Cur)
self.TblFunction = TableFunction(self.Cur)
@@ -96,7 +96,7 @@ class Database(object):
self.TblDec = TableDec(self.Cur)
self.TblDsc = TableDsc(self.Cur)
self.TblFdf = TableFdf(self.Cur)
-
+
#
# Create new tables
#
@@ -110,7 +110,7 @@ class Database(object):
self.TblDec.Create()
self.TblDsc.Create()
self.TblFdf.Create()
-
+
#
# Init each table's ID
#
@@ -123,13 +123,13 @@ class Database(object):
self.TblDec.InitID()
self.TblDsc.InitID()
self.TblFdf.InitID()
-
+
#
# Initialize table DataModel
#
if NewDatabase:
self.TblDataModel.InitTable()
-
+
EdkLogger.verbose("Initialize ECC database ... DONE!")
## Query a table
@@ -138,7 +138,7 @@ class Database(object):
#
def QueryTable(self, Table):
Table.Query()
-
+
## Close entire database
#
# Commit all first
@@ -147,15 +147,15 @@ class Database(object):
def Close(self):
#
# Commit to file
- #
+ #
self.Conn.commit()
-
+
#
# Close connection and cursor
#
self.Cur.close()
self.Conn.close()
-
+
## Insert one file information
#
# Insert one file's information to the database
@@ -171,43 +171,44 @@ class Database(object):
# Insert a record for file
#
FileID = self.TblFile.Insert(File.Name, File.ExtName, File.Path, File.FullPath, Model = File.Model, TimeStamp = File.TimeStamp)
- IdTable = TableIdentifier(self.Cur)
- IdTable.Table = "Identifier%s" % FileID
- IdTable.Create()
- #
- # Insert function of file
- #
- for Function in File.FunctionList:
- FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
- Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
- Function.BodyStartLine, Function.BodyStartColumn, FileID, \
- Function.FunNameStartLine, Function.FunNameStartColumn)
+ if File.Model == DataClass.MODEL_FILE_C or File.Model == DataClass.MODEL_FILE_H:
+ IdTable = TableIdentifier(self.Cur)
+ IdTable.Table = "Identifier%s" % FileID
+ IdTable.Create()
+ #
+ # Insert function of file
+ #
+ for Function in File.FunctionList:
+ FunctionID = self.TblFunction.Insert(Function.Header, Function.Modifier, Function.Name, Function.ReturnStatement, \
+ Function.StartLine, Function.StartColumn, Function.EndLine, Function.EndColumn, \
+ Function.BodyStartLine, Function.BodyStartColumn, FileID, \
+ Function.FunNameStartLine, Function.FunNameStartColumn)
+ #
+ # Insert Identifier of function
+ #
+ for Identifier in Function.IdentifierList:
+ IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
+ FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ #
+ # Insert Pcd of function
+ #
+ for Pcd in Function.PcdList:
+ PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
+ FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
#
- # Insert Identifier of function
+ # Insert Identifier of file
#
- for Identifier in Function.IdentifierList:
+ for Identifier in File.IdentifierList:
IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
- FileID, FunctionID, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
+ FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
#
- # Insert Pcd of function
+ # Insert Pcd of file
#
- for Pcd in Function.PcdList:
+ for Pcd in File.PcdList:
PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
- FileID, FunctionID, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
- #
- # Insert Identifier of file
- #
- for Identifier in File.IdentifierList:
- IdentifierID = IdTable.Insert(Identifier.Modifier, Identifier.Type, Identifier.Name, Identifier.Value, Identifier.Model, \
- FileID, -1, Identifier.StartLine, Identifier.StartColumn, Identifier.EndLine, Identifier.EndColumn)
- #
- # Insert Pcd of file
- #
- for Pcd in File.PcdList:
- PcdID = self.TblPcd.Insert(Pcd.CName, Pcd.TokenSpaceGuidCName, Pcd.Token, Pcd.DatumType, Pcd.Model, \
- FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
-
+ FileID, -1, Pcd.StartLine, Pcd.StartColumn, Pcd.EndLine, Pcd.EndColumn)
+
EdkLogger.verbose("Insert information from file %s ... DONE!" % File.FullPath)
## UpdateIdentifierBelongsToFunction
@@ -217,7 +218,7 @@ class Database(object):
#
def UpdateIdentifierBelongsToFunction_disabled(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
-
+
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
@@ -233,7 +234,7 @@ class Database(object):
# Check whether an identifier belongs to a function
#
EdkLogger.debug(4, "For common identifiers ... ")
- SqlCommand = """select ID from Function
+ SqlCommand = """select ID from Function
where StartLine < %s and EndLine > %s
and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
@@ -243,13 +244,13 @@ class Database(object):
SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
-
+
#
# Check whether the identifier is a function header
#
- EdkLogger.debug(4, "For function headers ... ")
+ EdkLogger.debug(4, "For function headers ... ")
if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
- SqlCommand = """select ID from Function
+ SqlCommand = """select ID from Function
where StartLine = %s + 1
and BelongsToFile = %s""" % (EndLine, BelongsToFile)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
@@ -259,7 +260,7 @@ class Database(object):
SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
self.Cur.execute(SqlCommand)
-
+
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
@@ -270,7 +271,7 @@ class Database(object):
#
def UpdateIdentifierBelongsToFunction(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
-
+
SqlCommand = """select ID, BelongsToFile, StartLine, EndLine from Function"""
Records = self.TblFunction.Exec(SqlCommand)
Data1 = []
@@ -308,7 +309,7 @@ class Database(object):
# self.Cur.executemany(SqlCommand, Data2)
#
# EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
-
+
##
#
@@ -320,11 +321,11 @@ if __name__ == '__main__':
#EdkLogger.SetLevel(EdkLogger.VERBOSE)
EdkLogger.SetLevel(EdkLogger.DEBUG_0)
EdkLogger.verbose("Start at " + time.strftime('%H:%M:%S', time.localtime()))
-
+
Db = Database(DATABASE_PATH)
Db.InitDatabase()
Db.QueryTable(Db.TblDataModel)
-
+
identifier1 = DataClass.IdentifierClass(-1, '', '', "i''1", 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 32, 43, 54, 43)
identifier2 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 15, 43, 20, 43)
identifier3 = DataClass.IdentifierClass(-1, '', '', 'i1', 'aaa', DataClass.MODEL_IDENTIFIER_COMMENT, 1, -1, 55, 43, 58, 43)
@@ -333,12 +334,12 @@ if __name__ == '__main__':
file = DataClass.FileClass(-1, 'F1', 'c', 'C:\\', 'C:\\F1.exe', DataClass.MODEL_FILE_C, '2007-12-28', [fun1], [identifier1, identifier2, identifier3, identifier4], [])
Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
-
+
Db.QueryTable(Db.TblFile)
Db.QueryTable(Db.TblFunction)
Db.QueryTable(Db.TblPcd)
Db.QueryTable(Db.TblIdentifier)
-
+
Db.Close()
EdkLogger.verbose("End at " + time.strftime('%H:%M:%S', time.localtime()))
-
+
diff --git a/BaseTools/Source/Python/Ecc/Ecc.py b/BaseTools/Source/Python/Ecc/Ecc.py
index 62b265bbaa..e9a1c2a890 100644
--- a/BaseTools/Source/Python/Ecc/Ecc.py
+++ b/BaseTools/Source/Python/Ecc/Ecc.py
@@ -106,6 +106,8 @@ class Ecc(object):
self.BuildMetaDataFileDatabase()
EccGlobalData.gIdentifierTableList = GetTableList((MODEL_FILE_C, MODEL_FILE_H), 'Identifier', EccGlobalData.gDb)
+ EccGlobalData.gCFileList = GetFileList(MODEL_FILE_C, EccGlobalData.gDb)
+ EccGlobalData.gHFileList = GetFileList(MODEL_FILE_H, EccGlobalData.gDb)
## BuildMetaDataFileDatabase
#
@@ -227,7 +229,7 @@ class Ecc(object):
if Options.Workspace:
os.environ["WORKSPACE"] = Options.Workspace
-
+
# Check workspace envirnoment
if "WORKSPACE" not in os.environ:
EdkLogger.error("ECC", BuildToolError.ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
diff --git a/BaseTools/Source/Python/Ecc/EccGlobalData.py b/BaseTools/Source/Python/Ecc/EccGlobalData.py
index 29e4aca0d0..5226e4a7d1 100644
--- a/BaseTools/Source/Python/Ecc/EccGlobalData.py
+++ b/BaseTools/Source/Python/Ecc/EccGlobalData.py
@@ -21,4 +21,6 @@ gTarget = ''
gConfig = None
gDb = None
gIdentifierTableList = []
+gCFileList = []
+gHFileList = []
gException = None \ No newline at end of file
diff --git a/BaseTools/Source/Python/Ecc/EccToolError.py b/BaseTools/Source/Python/Ecc/EccToolError.py
index 985737f192..48810e466c 100644
--- a/BaseTools/Source/Python/Ecc/EccToolError.py
+++ b/BaseTools/Source/Python/Ecc/EccToolError.py
@@ -19,6 +19,7 @@ ERROR_GENERAL_CHECK_NO_ASM = 1004
ERROR_GENERAL_CHECK_NO_PROGMA = 1005
ERROR_GENERAL_CHECK_CARRIAGE_RETURN = 1006
ERROR_GENERAL_CHECK_FILE_EXISTENCE = 1007
+ERROR_GENERAL_CHECK_NON_ACSII = 1008
ERROR_SPACE_CHECK_ALL = 2000
@@ -105,6 +106,7 @@ gEccErrorMessage = {
ERROR_GENERAL_CHECK_NO_PROGMA : """There should be no use of "#progma" in source file except "#pragma pack(#)\"""",
ERROR_GENERAL_CHECK_CARRIAGE_RETURN : "There should be a carriage return at the end of the file",
ERROR_GENERAL_CHECK_FILE_EXISTENCE : "File not found",
+ ERROR_GENERAL_CHECK_NON_ACSII : "File has invalid Non-ACSII char",
ERROR_SPACE_CHECK_ALL : "",
diff --git a/BaseTools/Source/Python/Ecc/MetaDataParser.py b/BaseTools/Source/Python/Ecc/MetaDataParser.py
index 36ad6e492f..4dda2e5360 100644
--- a/BaseTools/Source/Python/Ecc/MetaDataParser.py
+++ b/BaseTools/Source/Python/Ecc/MetaDataParser.py
@@ -26,7 +26,7 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
Filepath = os.path.normpath(Filepath)
SqlCommand = """
select Value1, FullPath from Inf, File where Inf.Model = %s and Inf.BelongsToFile in(
- select distinct B.BelongsToFile from File as A left join Inf as B
+ select distinct B.BelongsToFile from File as A left join Inf as B
where A.ID = B.BelongsToFile and B.Model = %s and (A.Path || '%s' || B.Value1) = '%s')
and Inf.BelongsToFile = File.ID""" \
% (MODEL_META_DATA_PACKAGE, MODEL_EFI_SOURCE_FILE, '\\', Filepath)
@@ -36,7 +36,7 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
InfFullPath = os.path.normpath(os.path.join(WorkSpace, Record[1]))
(DecPath, DecName) = os.path.split(DecFullPath)
(InfPath, InfName) = os.path.split(InfFullPath)
- SqlCommand = """select Value1 from Dec where BelongsToFile =
+ SqlCommand = """select Value1 from Dec where BelongsToFile =
(select ID from File where FullPath = '%s') and Model = %s""" \
% (DecFullPath, MODEL_EFI_INCLUDE)
NewRecordSet = Db.TblDec.Exec(SqlCommand)
@@ -46,9 +46,22 @@ def GetIncludeListOfFile(WorkSpace, Filepath, Db):
IncludePath = os.path.normpath(os.path.join(DecPath, NewRecord[0]))
if IncludePath not in IncludeList:
IncludeList.append(IncludePath)
-
+
return IncludeList
+## Get the file list
+#
+# Search table file and find all specific type files
+#
+def GetFileList(FileModel, Db):
+ FileList = []
+ SqlCommand = """select FullPath from File where Model = %s""" % str(FileModel)
+ RecordSet = Db.TblFile.Exec(SqlCommand)
+ for Record in RecordSet:
+ FileList.append(Record[0])
+
+ return FileList
+
## Get the table list
#
# Search table file and find all small tables
@@ -60,6 +73,6 @@ def GetTableList(FileModelList, Table, Db):
for Record in RecordSet:
TableName = Table + str(Record[0])
TableList.append(TableName)
-
+
return TableList
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py
index 941392be0f..5a8c1d13e7 100644
--- a/BaseTools/Source/Python/Ecc/c.py
+++ b/BaseTools/Source/Python/Ecc/c.py
@@ -514,7 +514,9 @@ def CollectSourceCodeDataIntoDB(RootDir):
dirnames.append(Dirname)
for f in filenames:
+ collector = None
FullName = os.path.normpath(os.path.join(dirpath, f))
+ model = DataClass.MODEL_FILE_OTHERS
if os.path.splitext(f)[1] in ('.h', '.c'):
EdkLogger.info("Parsing " + FullName)
model = f.endswith('c') and DataClass.MODEL_FILE_C or DataClass.MODEL_FILE_H
@@ -526,12 +528,13 @@ def CollectSourceCodeDataIntoDB(RootDir):
collector.CleanFileProfileBuffer()
collector.ParseFileWithClearedPPDirective()
# collector.PrintFragments()
- BaseName = os.path.basename(f)
- DirName = os.path.dirname(FullName)
- Ext = os.path.splitext(f)[1].lstrip('.')
- ModifiedTime = os.path.getmtime(FullName)
- FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
- FileObjList.append(FileObj)
+ BaseName = os.path.basename(f)
+ DirName = os.path.dirname(FullName)
+ Ext = os.path.splitext(f)[1].lstrip('.')
+ ModifiedTime = os.path.getmtime(FullName)
+ FileObj = DataClass.FileClass(-1, BaseName, Ext, DirName, FullName, model, ModifiedTime, GetFunctionList(), GetIdentifierList(), [])
+ FileObjList.append(FileObj)
+ if collector:
collector.CleanFileProfileBuffer()
if len(ParseErrorFileList) > 0:
@@ -539,7 +542,8 @@ def CollectSourceCodeDataIntoDB(RootDir):
Db = GetDB()
for file in FileObjList:
- Db.InsertOneFile(file)
+ if file.ExtName.upper() not in ['INF', 'DEC', 'DSC', 'FDF']:
+ Db.InsertOneFile(file)
Db.UpdateIdentifierBelongsToFunction()
@@ -552,7 +556,6 @@ def GetTableID(FullFileName, ErrorMsgList = None):
from File
where FullPath like '%s'
""" % FullFileName
-
ResultSet = Db.TblFile.Exec(SqlStatement)
FileID = -1
@@ -567,6 +570,8 @@ def GetTableID(FullFileName, ErrorMsgList = None):
return FileID
def GetIncludeFileList(FullFileName):
+ if os.path.splitext(FullFileName)[1].upper() not in ('.H'):
+ return []
IFList = IncludeFileListDict.get(FullFileName)
if IFList != None:
return IFList
@@ -2301,21 +2306,32 @@ def CheckFileHeaderDoxygenComments(FullFileName):
FileTable = 'Identifier' + str(FileID)
SqlStatement = """ select Value, ID
from %s
- where Model = %d and StartLine = 1 and StartColumn = 0
+ where Model = %d and (StartLine = 1 or StartLine = 7 or StartLine = 8) and StartColumn = 0
""" % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT)
ResultSet = Db.TblFile.Exec(SqlStatement)
if len(ResultSet) == 0:
PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'No Comment appear at the very beginning of file.', 'File', FileID)
return ErrorMsgList
+ IsFoundError1 = True
+ IsFoundError2 = True
+ IsFoundError3 = True
for Result in ResultSet:
- CommentStr = Result[0]
- if not CommentStr.startswith('/** @file'):
- PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, Result[1])
- if not CommentStr.endswith('**/'):
- PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with **/', FileTable, Result[1])
- if CommentStr.find('.') == -1:
- PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period \'.\'', FileTable, Result[1])
+ CommentStr = Result[0].strip()
+ ID = Result[1]
+ if CommentStr.startswith('/** @file'):
+ IsFoundError1 = False
+ if CommentStr.endswith('**/'):
+ IsFoundError2 = False
+ if CommentStr.find('.') != -1:
+ IsFoundError3 = False
+
+ if IsFoundError1:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_FILE_HEADER, 'File header comment should begin with ""/** @file""', FileTable, ID)
+ if IsFoundError2:
+ PrintErrorMsg(ERROR_HEADER_CHECK_FILE, 'File header comment should end with ""**/""', FileTable, ID)
+ if IsFoundError3:
+ PrintErrorMsg(ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION, 'Comment description should end with period "".""', FileTable, ID)
def CheckFuncHeaderDoxygenComments(FullFileName):
ErrorMsgList = []
diff --git a/BaseTools/Source/Python/Ecc/config.ini b/BaseTools/Source/Python/Ecc/config.ini
index 973bc68b38..357c52ced0 100644
--- a/BaseTools/Source/Python/Ecc/config.ini
+++ b/BaseTools/Source/Python/Ecc/config.ini
@@ -21,7 +21,7 @@ Version = 0.1
# Identify to if check all items
# 1 - Check all items and ignore all other detailed items
# 0 - Not check all items, the tool will go through all other detailed items to decide to check or not
-#
+#
CheckAll = 0
#
@@ -68,6 +68,8 @@ GeneralCheckNoProgma = 1
GeneralCheckCarriageReturn = 1
# Check whether the file exists
GeneralCheckFileExistence = 1
+# Check whether file has non ACSII char
+GeneralCheckNonAcsii = 1
#
# Space Checking
@@ -81,7 +83,7 @@ PredicateExpressionCheckAll = 0
# Check whether Boolean values, variable type BOOLEAN not use explicit comparisons to TRUE or FALSE
PredicateExpressionCheckBooleanValue = 1
-# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
+# Check whether Non-Boolean comparisons use a compare operator (==, !=, >, < >=, <=).
PredicateExpressionCheckNonBooleanOperator = 1
# Check whether a comparison of any pointer to zero must be done via the NULL type
PredicateExpressionCheckComparisonNullType = 1
@@ -189,7 +191,7 @@ DoxygenCheckAll = 0
DoxygenCheckFileHeader = 1
# Check whether the function headers are followed Doxygen special documentation blocks in section 2.3.5
DoxygenCheckFunctionHeader = 1
-# Check whether the first line of text in a comment block is a brief description of the element being documented.
+# Check whether the first line of text in a comment block is a brief description of the element being documented.
# The brief description must end with a period.
DoxygenCheckCommentDescription = 1
# Check whether comment lines with '///< ... text ...' format, if it is used, it should be after the code section.
@@ -208,8 +210,8 @@ MetaDataFileCheckPathName = 1
MetaDataFileCheckGenerateFileList = 1
# The path of log file
MetaDataFileCheckPathOfGenerateFileList = File.log
-# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
-# Each Library Instance must specify the Supported Module Types in its INF file,
+# Check whether all Library Instances defined for a given module (or dependent library instance) match the module's type.
+# Each Library Instance must specify the Supported Module Types in its INF file,
# and any module specifying the library instance must be one of the supported types.
MetaDataFileCheckLibraryInstance = 1
# Check whether a Library Instance has been defined for all dependent library classes
@@ -242,3 +244,6 @@ MetaDataFileCheckModuleFileGuidDuplication = 1
# GotoStatementCheckAll = 0
# SpellingCheckAll = 0
#
+
+# A list for binary file ext name
+BinaryExtList = EXE, EFI, FV, ROM, DLL, COM, BMP, GIF, PYD, CMP, BIN, JPG, UNI, RAW, COM2, LIB, DEPEX, SYS, DB
diff --git a/BaseTools/Source/Python/GNUmakefile b/BaseTools/Source/Python/GNUmakefile
index e489c10753..9fe46f6edf 100644
--- a/BaseTools/Source/Python/GNUmakefile
+++ b/BaseTools/Source/Python/GNUmakefile
@@ -1,5 +1,5 @@
## @file
-# Windows makefile for Python tools build.
+# Linux makefile for Python tools build.
#
# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py
index 83b58bcd46..d11af6c134 100644
--- a/BaseTools/Source/Python/GenFds/FdfParser.py
+++ b/BaseTools/Source/Python/GenFds/FdfParser.py
@@ -751,7 +751,7 @@ class FdfParser:
raise Warning("Value %s is not a number", self.FileName, Line)
for Profile in AllMacroList:
- if Profile.FileName == FileLineTuple[0] and Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
+ if Profile.MacroName == Name and Profile.DefinedAtLine <= FileLineTuple[1]:
if Op == None:
if Value == 'Bool' and Profile.MacroValue == None or Profile.MacroValue.upper() == 'FALSE':
return False
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
index d95af34dfc..47521e08c5 100644
--- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py
+++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py
@@ -80,7 +80,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
#
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
- ErrorCode, ErrorInfo = PathClassObj.Validate()
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
@@ -343,7 +343,7 @@ class FfsInfStatement(FfsInfStatementClassObject):
if len(PlatformArchList) == 0:
self.InDsc = False
PathClassObj = PathClass(self.InfFileName, GenFdsGlobalVariable.WorkSpaceDir)
- ErrorCode, ErrorInfo = PathClassObj.Validate()
+ ErrorCode, ErrorInfo = PathClassObj.Validate(".inf")
if ErrorCode != 0:
EdkLogger.error("GenFds", ErrorCode, ExtraData=ErrorInfo)
if len(ArchList) == 1:
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py
index cb82c717dd..48fe4983f8 100644
--- a/BaseTools/Source/Python/GenFds/GenFds.py
+++ b/BaseTools/Source/Python/GenFds/GenFds.py
@@ -172,6 +172,7 @@ def main():
"""call Workspace build create database"""
os.environ["WORKSPACE"] = Workspace
+ FdfParser.InputMacroDict["WORKSPACE"] = Workspace
BuildWorkSpace = WorkspaceDatabase(':memory:', FdfParser.InputMacroDict)
BuildWorkSpace.InitDatabase()
diff --git a/BaseTools/Source/Python/Makefile b/BaseTools/Source/Python/Makefile
index 26aacf947f..b3efe538d9 100644
--- a/BaseTools/Source/Python/Makefile
+++ b/BaseTools/Source/Python/Makefile
@@ -22,7 +22,7 @@ MODULES=encodings.cp437,encodings.gbk,encodings.utf_16,encodings.utf_8,encodings
BIN_DIR=$(EDK_TOOLS_PATH)\Bin\Win32
-APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe
+APPLICATIONS=$(BIN_DIR)\build.exe $(BIN_DIR)\GenFds.exe $(BIN_DIR)\Trim.exe $(BIN_DIR)\MigrationMsa2Inf.exe $(BIN_DIR)\Fpd2Dsc.exe $(BIN_DIR)\TargetTool.exe $(BIN_DIR)\spd2dec.exe $(BIN_DIR)\GenDepex.exe $(BIN_DIR)\GenPatchPcdTable.exe $(BIN_DIR)\PatchPcdValue.exe $(BIN_DIR)\BPDG.exe
COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\Database.py \
@@ -46,6 +46,7 @@ COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\String.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\TargetTxtClassObject.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\ToolDefClassObject.py \
+ $(BASE_TOOLS_PATH)\Source\Python\Common\VpdInfoFile.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlParser.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\XmlRoutines.py \
$(BASE_TOOLS_PATH)\Source\Python\Common\__init__.py \
@@ -62,7 +63,7 @@ COMMON_PYTHON=$(BASE_TOOLS_PATH)\Source\Python\Common\BuildToolError.py \
$(BASE_TOOLS_PATH)\Source\Python\Autogen\GenMake.py \
$(BASE_TOOLS_PATH)\Source\Python\Autogen\StrGather.py \
$(BASE_TOOLS_PATH)\Source\Python\Autogen\UniClassObject.py \
- $(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py
+ $(BASE_TOOLS_PATH)\Source\Python\Autogen\__init__.py
all: SetPythonPath $(APPLICATIONS)
@@ -100,6 +101,9 @@ $(BIN_DIR)\GenPatchPcdTable.exe: $(BASE_TOOLS_PATH)\Source\Python\GenPatchPcdTab
$(BIN_DIR)\PatchPcdValue.exe: $(BASE_TOOLS_PATH)\Source\Python\PatchPcdValue\PatchPcdValue.py $(COMMON_PYTHON)
@pushd . & @cd PatchPcdValue & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) PatchPcdValue.py & @popd
+$(BIN_DIR)\BPDG.exe: $(BASE_TOOLS_PATH)\Source\Python\BPDG\BPDG.py $(COMMON_PYTHON)
+ @pushd . & @cd BPDG & @$(FREEZE) --include-modules=$(MODULES) --install-dir=$(BIN_DIR) BPDG.py & @popd
+
clean:
cleanall:
@del /f /q $(BIN_DIR)\*.pyd $(BIN_DIR)\*.dll
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py
index f0fef4dda3..7d709d3f0b 100644
--- a/BaseTools/Source/Python/Workspace/BuildClassObject.py
+++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py
@@ -1,7 +1,7 @@
## @file
# This file is used to define each component of the build database
#
-# Copyright (c) 2007 - 2008, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
@@ -31,6 +31,7 @@ from Common.BuildToolError import *
# @param MaxDatumSize: Input value for MaxDatumSize of Pcd, default is None
# @param SkuInfoList: Input value for SkuInfoList of Pcd, default is {}
# @param IsOverrided: Input value for IsOverrided of Pcd, default is False
+# @param GuidValue: Input value for TokenSpaceGuidValue of Pcd, default is None
#
# @var TokenCName: To store value for TokenCName
# @var TokenSpaceGuidCName: To store value for TokenSpaceGuidCName
@@ -43,7 +44,7 @@ from Common.BuildToolError import *
# @var Phase: To store value for Phase, default is "DXE"
#
class PcdClassObject(object):
- def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, GuidValue = None):
+ def __init__(self, Name = None, Guid = None, Type = None, DatumType = None, Value = None, Token = None, MaxDatumSize = None, SkuInfoList = {}, IsOverrided = False, GuidValue = None):
self.TokenCName = Name
self.TokenSpaceGuidCName = Guid
self.TokenSpaceGuidValue = GuidValue
@@ -55,7 +56,8 @@ class PcdClassObject(object):
self.SkuInfoList = SkuInfoList
self.Phase = "DXE"
self.Pending = False
-
+ self.IsOverrided = IsOverrided
+
## Convert the class to a string
#
# Convert each member of the class to string
@@ -73,7 +75,7 @@ class PcdClassObject(object):
'MaxDatumSize=' + str(self.MaxDatumSize) + ', '
for Item in self.SkuInfoList.values():
Rtn = Rtn + 'SkuId=' + Item.SkuId + ', ' + 'SkuIdName=' + Item.SkuIdName
- Rtn = Rtn + str(self.IsOverrided)
+ Rtn = Rtn + ', IsOverrided=' + str(self.IsOverrided)
return Rtn
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py
index 7a6152c8fb..fb66e41fb5 100644
--- a/BaseTools/Source/Python/Workspace/MetaFileParser.py
+++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py
@@ -82,6 +82,7 @@ class MetaFileParser(object):
self.MetaFile = FilePath
self._FileDir = os.path.dirname(self.MetaFile)
self._Macros = copy.copy(Macros)
+ self._Macros["WORKSPACE"] = os.environ["WORKSPACE"]
# for recursive parsing
self._Owner = Owner
@@ -490,7 +491,12 @@ class InfParser(MetaFileParser):
## [FixedPcd], [FeaturePcd], [PatchPcd], [Pcd] and [PcdEx] sections parser
def _PcdParser(self):
TokenList = GetSplitValueList(self._CurrentLine, TAB_VALUE_SPLIT, 1)
- self._ValueList[0:1] = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ ValueList = GetSplitValueList(TokenList[0], TAB_SPLIT)
+ if len(ValueList) != 2:
+ EdkLogger.error('Parser', FORMAT_INVALID, "Illegal token space GUID and PCD name format",
+ ExtraData=self._CurrentLine + " (<TokenSpaceGuidCName>.<PcdCName>)",
+ File=self.MetaFile, Line=self._LineIndex+1)
+ self._ValueList[0:1] = ValueList
if len(TokenList) > 1:
self._ValueList[2] = TokenList[1]
if self._ValueList[0] == '' or self._ValueList[1] == '':
@@ -564,6 +570,7 @@ class DscParser(MetaFileParser):
# sections which allow "!include" directive
_IncludeAllowedSection = [
+ TAB_COMMON_DEFINES.upper(),
TAB_LIBRARIES.upper(),
TAB_LIBRARY_CLASSES.upper(),
TAB_SKUIDS.upper(),
@@ -648,7 +655,25 @@ class DscParser(MetaFileParser):
continue
# file private macros
elif Line.upper().startswith('DEFINE '):
- self._MacroParser()
+ (Name, Value) = self._MacroParser()
+ # Make the defined macro in DSC [Defines] section also
+ # available for FDF file.
+ if self._SectionName == TAB_COMMON_DEFINES.upper():
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ Name,
+ Value,
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
continue
elif Line.upper().startswith('EDK_GLOBAL '):
(Name, Value) = self._MacroParser()
@@ -715,6 +740,22 @@ class DscParser(MetaFileParser):
if TokenList[0] in ['FLASH_DEFINITION', 'OUTPUT_DIRECTORY']:
TokenList[1] = NormPath(TokenList[1], self._Macros)
self._ValueList[0:len(TokenList)] = TokenList
+ # Treat elements in the [defines] section as global macros for FDF file.
+ self._LastItem = self._Store(
+ MODEL_META_DATA_GLOBAL_DEFINE,
+ TokenList[0],
+ TokenList[1],
+ '',
+ 'COMMON',
+ 'COMMON',
+ self._Owner,
+ self._From,
+ self._LineIndex+1,
+ -1,
+ self._LineIndex+1,
+ -1,
+ self._Enabled
+ )
## <subsection_header> parser
def _SubsectionHeaderParser(self):
@@ -762,7 +803,7 @@ class DscParser(MetaFileParser):
EdkLogger.error("Parser", FORMAT_INVALID, File=self.MetaFile, Line=self._LineIndex+1,
ExtraData="'!include' is not allowed under section [%s]" % self._SectionName)
# the included file must be relative to the parsing file
- IncludedFile = os.path.join(self._FileDir, self._ValueList[1])
+ IncludedFile = os.path.join(self._FileDir, NormPath(self._ValueList[1], self._Macros))
Parser = DscParser(IncludedFile, self._FileType, self._Table, self._Macros, From=self._LastItem)
# set the parser status with current status
Parser._SectionName = self._SectionName
@@ -781,6 +822,7 @@ class DscParser(MetaFileParser):
self._SectionType = Parser._SectionType
self._Scope = Parser._Scope
self._Enabled = Parser._Enabled
+ self._Macros.update(Parser._Macros)
else:
if DirectiveName in ["!IF", "!IFDEF", "!IFNDEF"]:
# evaluate the expression
@@ -965,6 +1007,7 @@ class DecParser(MetaFileParser):
#
def __init__(self, FilePath, FileType, Table, Macro=None):
MetaFileParser.__init__(self, FilePath, FileType, Table, Macro, -1)
+ self._Comments = []
## Parser starter
def Start(self):
@@ -975,27 +1018,34 @@ class DecParser(MetaFileParser):
EdkLogger.error("Parser", FILE_READ_FAILURE, ExtraData=self.MetaFile)
for Index in range(0, len(self._Content)):
- Line = CleanString(self._Content[Index])
+ Line, Comment = CleanString2(self._Content[Index])
+ self._CurrentLine = Line
+ self._LineIndex = Index
+
+ # save comment for later use
+ if Comment:
+ self._Comments.append((Comment, self._LineIndex+1))
# skip empty line
if Line == '':
continue
- self._CurrentLine = Line
- self._LineIndex = Index
# section header
if Line[0] == TAB_SECTION_START and Line[-1] == TAB_SECTION_END:
self._SectionHeaderParser()
+ self._Comments = []
continue
elif Line.startswith('DEFINE '):
self._MacroParser()
continue
elif len(self._SectionType) == 0:
+ self._Comments = []
continue
# section content
self._ValueList = ['','','']
self._SectionParser[self._SectionType[0]](self)
if self._ValueList == None:
+ self._Comments = []
continue
#
@@ -1017,6 +1067,22 @@ class DecParser(MetaFileParser):
-1,
0
)
+ for Comment, LineNo in self._Comments:
+ self._Store(
+ MODEL_META_DATA_COMMENT,
+ Comment,
+ self._ValueList[0],
+ self._ValueList[1],
+ Arch,
+ ModuleType,
+ self._LastItem,
+ LineNo,
+ -1,
+ LineNo,
+ -1,
+ 0
+ )
+ self._Comments = []
self._Done()
## Section header parser
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
index 3aabd545d5..dad6ecd49a 100644
--- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
+++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py
@@ -18,6 +18,7 @@ import sqlite3
import os
import os.path
import pickle
+import uuid
import Common.EdkLogger as EdkLogger
import Common.GlobalData as GlobalData
@@ -99,6 +100,10 @@ class DscBuildData(PlatformBuildClassObject):
RecordList = self._RawData[MODEL_META_DATA_DEFINE, self._Arch]
for Record in RecordList:
GlobalData.gEdkGlobal[Record[0]] = Record[1]
+
+ RecordList = self._RawData[MODEL_META_DATA_GLOBAL_DEFINE, self._Arch]
+ for Record in RecordList:
+ GlobalData.gGlobalDefines[Record[0]] = Record[1]
## XXX[key] = value
def __setitem__(self, key, value):
@@ -135,6 +140,8 @@ class DscBuildData(PlatformBuildClassObject):
self._Pcds = None
self._BuildOptions = None
self._LoadFixAddress = None
+ self._VpdToolGuid = None
+ self._VpdFileName = None
## Get architecture
def _GetArch(self):
@@ -188,6 +195,18 @@ class DscBuildData(PlatformBuildClassObject):
self._SkuName = Record[1]
elif Name == TAB_FIX_LOAD_TOP_MEMORY_ADDRESS:
self._LoadFixAddress = Record[1]
+ elif Name == TAB_DSC_DEFINES_VPD_TOOL_GUID:
+ #
+ # try to convert GUID to a real UUID value to see whether the GUID is format
+ # for VPD_TOOL_GUID is correct.
+ #
+ try:
+ uuid.UUID(Record[1])
+ except:
+ EdkLogger.error("build", FORMAT_INVALID, "Invalid GUID format for VPD_TOOL_GUID", File=self.MetaFile)
+ self._VpdToolGuid = Record[1]
+ elif Name == TAB_DSC_DEFINES_VPD_FILENAME:
+ self._VpdFileName = Record[1]
# set _Header to non-None in order to avoid database re-querying
self._Header = 'DUMMY'
@@ -267,6 +286,8 @@ class DscBuildData(PlatformBuildClassObject):
def _SetSkuName(self, Value):
if Value in self.SkuIds:
self._SkuName = Value
+ # Needs to re-retrieve the PCD information
+ self._Pcds = None
def _GetFdfFile(self):
if self._FlashDefinition == None:
@@ -321,6 +342,24 @@ class DscBuildData(PlatformBuildClassObject):
self._LoadFixAddress = ''
return self._LoadFixAddress
+ ## Retrieve the GUID string for VPD tool
+ def _GetVpdToolGuid(self):
+ if self._VpdToolGuid == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._VpdToolGuid == None:
+ self._VpdToolGuid = ''
+ return self._VpdToolGuid
+
+ ## Retrieve the VPD file Name, this is optional in DSC file
+ def _GetVpdFileName(self):
+ if self._VpdFileName == None:
+ if self._Header == None:
+ self._GetHeaderInfo()
+ if self._VpdFileName == None:
+ self._VpdFileName = ''
+ return self._VpdFileName
+
## Retrieve [SkuIds] section information
def _GetSkuIds(self):
if self._SkuIds == None:
@@ -418,6 +457,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{},
+ False,
None
)
Module.Pcds[PcdCName, TokenSpaceGuid] = Pcd
@@ -576,6 +616,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{},
+ False,
None
)
return Pcds
@@ -619,6 +660,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{self.SkuName : SkuInfo},
+ False,
None
)
return Pcds
@@ -661,6 +703,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
'',
{self.SkuName : SkuInfo},
+ False,
None
)
return Pcds
@@ -686,15 +729,21 @@ class DscBuildData(PlatformBuildClassObject):
PcdDict[Arch, SkuName, PcdCName, TokenSpaceGuid] = Setting
# Remove redundant PCD candidates, per the ARCH and SKU
for PcdCName, TokenSpaceGuid in PcdSet:
- ValueList = ['', '']
+ ValueList = ['', '', '']
Setting = PcdDict[self._Arch, self.SkuName, PcdCName, TokenSpaceGuid]
if Setting == None:
continue
TokenList = Setting.split(TAB_VALUE_SPLIT)
ValueList[0:len(TokenList)] = TokenList
- VpdOffset, MaxDatumSize = ValueList
-
- SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset)
+ #
+ # For the VOID* type, it can have optional data of MaxDatumSize and InitialValue
+ # For the Integer & Boolean type, the optional data can only be InitialValue.
+ # At this point, we put all the data into the PcdClssObject for we don't know the PCD's datumtype
+ # until the DEC parser has been called.
+ #
+ VpdOffset, MaxDatumSize, InitialValue = ValueList
+
+ SkuInfo = SkuInfoClass(self.SkuName, self.SkuIds[self.SkuName], '', '', '', '', VpdOffset, InitialValue)
Pcds[PcdCName, TokenSpaceGuid] = PcdClassObject(
PcdCName,
TokenSpaceGuid,
@@ -704,6 +753,7 @@ class DscBuildData(PlatformBuildClassObject):
'',
MaxDatumSize,
{self.SkuName : SkuInfo},
+ False,
None
)
return Pcds
@@ -733,7 +783,7 @@ class DscBuildData(PlatformBuildClassObject):
#
def AddPcd(self, Name, Guid, Value):
if (Name, Guid) not in self.Pcds:
- self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, None)
+ self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
self.Pcds[Name, Guid].DefaultValue = Value
Arch = property(_GetArch, _SetArch)
@@ -752,7 +802,8 @@ class DscBuildData(PlatformBuildClassObject):
BsBaseAddress = property(_GetBsBaseAddress)
RtBaseAddress = property(_GetRtBaseAddress)
LoadFixAddress = property(_GetLoadFixAddress)
-
+ VpdToolGuid = property(_GetVpdToolGuid)
+ VpdFileName = property(_GetVpdFileName)
SkuIds = property(_GetSkuIds)
Modules = property(_GetModules)
LibraryInstances = property(_GetLibraryInstances)
@@ -760,7 +811,7 @@ class DscBuildData(PlatformBuildClassObject):
Pcds = property(_GetPcds)
BuildOptions = property(_GetBuildOptions)
-## Platform build information from DSC file
+## Platform build information from DEC file
#
# This class is used to retrieve information stored in database and convert them
# into PackageBuildClassObject form for easier use for AutoGen.
@@ -789,6 +840,7 @@ class DecBuildData(PackageBuildClassObject):
TAB_DEC_DEFINES_PACKAGE_NAME : "_PackageName",
TAB_DEC_DEFINES_PACKAGE_GUID : "_Guid",
TAB_DEC_DEFINES_PACKAGE_VERSION : "_Version",
+ TAB_DEC_DEFINES_PKG_UNI_FILE : "_PkgUniFile",
}
@@ -830,6 +882,7 @@ class DecBuildData(PackageBuildClassObject):
self._PackageName = None
self._Guid = None
self._Version = None
+ self._PkgUniFile = None
self._Protocols = None
self._Ppis = None
self._Guids = None
@@ -1063,6 +1116,7 @@ class DecBuildData(PackageBuildClassObject):
TokenNumber,
'',
{},
+ False,
None
)
return Pcds
@@ -1914,6 +1968,7 @@ class InfBuildData(ModuleBuildClassObject):
'',
'',
{},
+ False,
self.Guids[TokenSpaceGuid]
)
@@ -1927,7 +1982,7 @@ class InfBuildData(ModuleBuildClassObject):
# "FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"
#
PcdType = self._PCD_TYPE_STRING_[Type]
- if Type in [MODEL_PCD_DYNAMIC, MODEL_PCD_DYNAMIC_EX]:
+ if Type == MODEL_PCD_DYNAMIC:
Pcd.Pending = True
for T in ["FixedAtBuild", "PatchableInModule", "FeatureFlag", "Dynamic", "DynamicEx"]:
if (PcdCName, TokenSpaceGuid, T) in Package.Pcds:
@@ -1994,7 +2049,7 @@ class InfBuildData(ModuleBuildClassObject):
## Database
#
-# This class defined the build databse for all modules, packages and platform.
+# This class defined the build database for all modules, packages and platform.
# It will call corresponding parser for the given file if it cannot find it in
# the database.
#
diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py
index af03e1f982..c5793ad057 100644
--- a/BaseTools/Source/Python/build/BuildReport.py
+++ b/BaseTools/Source/Python/build/BuildReport.py
@@ -23,6 +23,7 @@ import textwrap
import traceback
import sys
import time
+import struct
from datetime import datetime
from StringIO import StringIO
from Common import EdkLogger
@@ -101,6 +102,9 @@ gDriverTypeMap = {
'SMM_DRIVER' : '0xA (SMM)', # Extension of module type to support PI 1.1 SMM drivers
}
+## The look up table of the supported opcode in the dependency expression binaries
+gOpCodeList = ["BEFORE", "AFTER", "PUSH", "AND", "OR", "NOT", "TRUE", "FALSE", "END", "SOR"]
+
##
# Writes a string to the file object.
#
@@ -163,6 +167,60 @@ def FindIncludeFiles(Source, IncludePathList, IncludeFiles):
break
##
+# Parse binary dependency expression section
+#
+# This utility class parses the dependency expression section and translate the readable
+# GUID name and value.
+#
+class DepexParser(object):
+ ##
+ # Constructor function for class DepexParser
+ #
+ # This constructor function collect GUID values so that the readable
+ # GUID name can be translated.
+ #
+ # @param self The object pointer
+ # @param Wa Workspace context information
+ #
+ def __init__(self, Wa):
+ self._GuidDb = {}
+ for Package in Wa.BuildDatabase.WorkspaceDb.PackageList:
+ for Protocol in Package.Protocols:
+ GuidValue = GuidStructureStringToGuidString(Package.Protocols[Protocol])
+ self._GuidDb[GuidValue.upper()] = Protocol
+ for Ppi in Package.Ppis:
+ GuidValue = GuidStructureStringToGuidString(Package.Ppis[Ppi])
+ self._GuidDb[GuidValue.upper()] = Ppi
+ for Guid in Package.Guids:
+ GuidValue = GuidStructureStringToGuidString(Package.Guids[Guid])
+ self._GuidDb[GuidValue.upper()] = Guid
+
+ ##
+ # Parse the binary dependency expression files.
+ #
+ # This function parses the binary dependency expression file and translate it
+ # to the instruction list.
+ #
+ # @param self The object pointer
+ # @param DepexFileName The file name of binary dependency expression file.
+ #
+ def ParseDepexFile(self, DepexFileName):
+ DepexFile = open(DepexFileName, "rb")
+ DepexStatement = []
+ OpCode = DepexFile.read(1)
+ while OpCode:
+ Statement = gOpCodeList[struct.unpack("B", OpCode)[0]]
+ if Statement in ["BEFORE", "AFTER", "PUSH"]:
+ GuidValue = "%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X" % \
+ struct.unpack("LHHBBBBBBBB", DepexFile.read(16))
+ GuidString = self._GuidDb.get(GuidValue, GuidValue)
+ Statement = "%s %s" % (Statement, GuidString)
+ DepexStatement.append(Statement)
+ OpCode = DepexFile.read(1)
+
+ return DepexStatement
+
+##
# Reports library information
#
# This class reports the module library subsection in the build report file.
@@ -254,6 +312,7 @@ class DepexReport(object):
#
def __init__(self, M):
self.Depex = ""
+ self._DepexFileName = os.path.join(M.BuildDir, "OUTPUT", M.Module.BaseName + ".depex")
ModuleType = M.ModuleType
if not ModuleType:
ModuleType = gComponentType2ModuleType.get(M.ComponentType, "")
@@ -289,14 +348,25 @@ class DepexReport(object):
#
# This function generates report for the module dependency expression.
#
- # @param self The object pointer
- # @param File The file object for report
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param GlobalDepexParser The platform global Dependency expression parser object
#
- def GenerateReport(self, File):
+ def GenerateReport(self, File, GlobalDepexParser):
if not self.Depex:
return
-
+
FileWrite(File, gSubSectionStart)
+ if os.path.isfile(self._DepexFileName):
+ try:
+ DepexStatements = GlobalDepexParser.ParseDepexFile(self._DepexFileName)
+ FileWrite(File, "Final Dependency Expression (DEPEX) Instructions")
+ for DepexStatement in DepexStatements:
+ FileWrite(File, " %s" % DepexStatement)
+ FileWrite(File, gSubSectionSep)
+ except:
+ EdkLogger.warn(None, "Dependency expression file is corrupted", self._DepexFileName)
+
FileWrite(File, "Dependency Expression (DEPEX) from %s" % self.Source)
if self.Source == "INF":
@@ -453,12 +523,14 @@ class ModuleReport(object):
# This function generates report for separate module expression
# in a platform build.
#
- # @param self The object pointer
- # @param File The file object for report
- # @param GlobalPcdReport The platform global PCD class object
- # @param ReportType The kind of report items in the final report file
+ # @param self The object pointer
+ # @param File The file object for report
+ # @param GlobalPcdReport The platform global PCD report object
+ # @param GlobalPredictionReport The platform global Prediction report object
+ # @param GlobalDepexParser The platform global Dependency expression parser object
+ # @param ReportType The kind of report items in the final report file
#
- def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, ReportType):
+ def GenerateReport(self, File, GlobalPcdReport, GlobalPredictionReport, GlobalDepexParser, ReportType):
FileWrite(File, gSectionStart)
FwReportFileName = os.path.join(self._BuildDir, "DEBUG", self.ModuleName + ".txt")
@@ -505,7 +577,7 @@ class ModuleReport(object):
self.LibraryReport.GenerateReport(File)
if "DEPEX" in ReportType:
- self.DepexReport.GenerateReport(File)
+ self.DepexReport.GenerateReport(File, GlobalDepexParser)
if "BUILD_FLAGS" in ReportType:
self.BuildFlagsReport.GenerateReport(File)
@@ -1325,6 +1397,10 @@ class PlatformReport(object):
if "FIXED_ADDRESS" in ReportType or "EXECUTION_ORDER" in ReportType:
self.PredictionReport = PredictionReport(Wa)
+ self.DepexParser = None
+ if "DEPEX" in ReportType:
+ self.DepexParser = DepexParser(Wa)
+
self.ModuleReportList = []
if MaList != None:
self._IsModuleBuild = True
@@ -1371,7 +1447,7 @@ class PlatformReport(object):
FdReportListItem.GenerateReport(File)
for ModuleReportItem in self.ModuleReportList:
- ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, ReportType)
+ ModuleReportItem.GenerateReport(File, self.PcdReport, self.PredictionReport, self.DepexParser, ReportType)
if not self._IsModuleBuild:
if "EXECUTION_ORDER" in ReportType:
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py
index 545ffa39aa..af9d3d5f5a 100644
--- a/BaseTools/Source/Python/build/build.py
+++ b/BaseTools/Source/Python/build/build.py
@@ -23,6 +23,7 @@ import glob
import time
import platform
import traceback
+import encodings.ascii
from struct import *
from threading import *
@@ -735,7 +736,7 @@ class Build():
self.LoadFixAddress = 0
self.UniFlag = UniFlag
- # print dot charater during doing some time-consuming work
+ # print dot character during doing some time-consuming work
self.Progress = Utils.Progressor()
# parse target.txt, tools_def.txt, and platform file
@@ -1267,9 +1268,9 @@ class Build():
if len (SmmModuleList) > 0:
MapBuffer.write('SMM_CODE_PAGE_NUMBER = 0x%x\n' % (SmmSize/0x1000))
- PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
+ PeiBaseAddr = TopMemoryAddress - RtSize - BtSize
BtBaseAddr = TopMemoryAddress - RtSize
- RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
+ RtBaseAddr = TopMemoryAddress - ReservedRuntimeMemorySize
self._RebaseModule (MapBuffer, PeiBaseAddr, PeiModuleList, TopMemoryAddress == 0)
self._RebaseModule (MapBuffer, BtBaseAddr, BtModuleList, TopMemoryAddress == 0)