diff options
Diffstat (limited to 'BaseTools/Source')
123 files changed, 1086 insertions, 2876 deletions
diff --git a/BaseTools/Source/C/BrotliCompress/BrotliCompress.c b/BaseTools/Source/C/BrotliCompress/BrotliCompress.c index 62a6aed..c6b0189 100644 --- a/BaseTools/Source/C/BrotliCompress/BrotliCompress.c +++ b/BaseTools/Source/C/BrotliCompress/BrotliCompress.c @@ -2,6 +2,7 @@ BrotliCompress Compress/Decompress tool (BrotliCompress)
Copyright (c) 2020, ByoSoft Corporation. All rights reserved.<BR>
+ Copyright (c) 2025, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
@@ -52,12 +53,14 @@ static FILE* ms_fopen(const char* FileName, const char* Mode) { return Result;
}
+#if !defined(__MINGW32__)
static int ms_open(const char* FileName, int Oflag, int Pmode) {
int Result;
Result = -1;
_sopen_s(&Result, FileName, Oflag | O_BINARY, _SH_DENYNO, Pmode);
return Result;
}
+#endif
#endif /* WIN32 */
diff --git a/BaseTools/Source/C/Common/CommonLib.c b/BaseTools/Source/C/Common/CommonLib.c index b2cde6d..5830cc1 100644 --- a/BaseTools/Source/C/Common/CommonLib.c +++ b/BaseTools/Source/C/Common/CommonLib.c @@ -1,7 +1,7 @@ /** @file
Common basic Library Functions
-Copyright (c) 2004 - 2018, Intel Corporation. All rights reserved.<BR>
+Copyright (c) 2004 - 2025, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
@@ -469,6 +469,7 @@ PrintGuidToBuffer ( #ifdef __GNUC__
+#ifndef _WIN32
size_t _filelength(int fd)
{
struct stat stat_buf;
@@ -487,6 +488,7 @@ char *strlwr(char *s) }
#endif
#endif
+#endif
#define WINDOWS_EXTENSION_PATH "\\\\?\\"
#define WINDOWS_UNC_EXTENSION_PATH "\\\\?\\UNC"
diff --git a/BaseTools/Source/C/Common/CommonLib.h b/BaseTools/Source/C/Common/CommonLib.h index a841029..f554de1 100644 --- a/BaseTools/Source/C/Common/CommonLib.h +++ b/BaseTools/Source/C/Common/CommonLib.h @@ -1,7 +1,7 @@ /** @file
Common library assistance routines.
-Copyright (c) 2004 - 2018, Intel Corporation. All rights reserved.<BR>
+Copyright (c) 2004 - 2025, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
@@ -450,12 +450,17 @@ Returns: #define _stricmp strcasecmp
#define strnicmp strncasecmp
#define strcmpi strcasecmp
-size_t _filelength(int fd);
#ifndef __CYGWIN__
char *strlwr(char *s);
#endif
#endif
+#ifdef _WIN32
+#include <io.h> // io.h provides the declaration of _filelength on Windows
+#else
+size_t _filelength(int fd); // Only declare this on non-Windows systems
+#endif
+
//
// On windows, mkdir only has one parameter.
// On unix, it has two parameters
diff --git a/BaseTools/Source/C/Common/Decompress.c b/BaseTools/Source/C/Common/Decompress.c index 0f2bdbf..0cf0c4a 100644 --- a/BaseTools/Source/C/Common/Decompress.c +++ b/BaseTools/Source/C/Common/Decompress.c @@ -15,7 +15,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent //
// Decompression algorithm begins here
//
+#ifndef UINT8_MAX
#define UINT8_MAX 0xff
+#endif
#define BITBUFSIZ 32
#define MAXMATCH 256
#define THRESHOLD 3
diff --git a/BaseTools/Source/C/DevicePath/GNUmakefile b/BaseTools/Source/C/DevicePath/GNUmakefile index f61b1b2..40cf277 100644 --- a/BaseTools/Source/C/DevicePath/GNUmakefile +++ b/BaseTools/Source/C/DevicePath/GNUmakefile @@ -1,7 +1,7 @@ ## @file
# GNU/Linux makefile for 'DevicePath' module build.
#
-# Copyright (c) 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2017 - 2025, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
ARCH ?= IA32
@@ -13,8 +13,13 @@ OBJECTS = DevicePath.o UefiDevicePathLib.o DevicePathFromText.o DevicePathUtili include $(MAKEROOT)/Makefiles/app.makefile
-GCCVERSION = $(shell $(CC) -dumpversion | awk -F'.' '{print $$1}')
-CLANG := $(shell $(CC) --version | grep clang)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+GCCVERSION := $(shell for /f "tokens=1 delims=." %%a in ("$(shell $(CC) -dumpversion)") do echo %%a)
+else
+GCCVERSION := $(shell $(CC) -dumpversion | awk -F'.' '{print $$1}')
+endif
+CLANG := $(findstring clang,$(shell $(CC) --version))
+
ifneq ("$(GCCVERSION)", "5")
ifeq ($(CLANG),)
ifneq ($(DARWIN),Darwin)
@@ -30,6 +35,10 @@ ifeq ($(CYGWIN), CYGWIN) endif
ifeq ($(LINUX), Linux)
+ifndef CROSS_LIB_UUID
LIBS += -luuid
+else
+ LIBS += -L$(CROSS_LIB_UUID)
+endif
endif
diff --git a/BaseTools/Source/C/GNUmakefile b/BaseTools/Source/C/GNUmakefile index 5275f65..0ea314e 100644 --- a/BaseTools/Source/C/GNUmakefile +++ b/BaseTools/Source/C/GNUmakefile @@ -1,50 +1,23 @@ ## @file
# GNU/Linux makefile for C tools build.
#
-# Copyright (c) 2007 - 2017, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2025, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
-ifndef HOST_ARCH
- #
- # If HOST_ARCH is not defined, then we use 'uname -m' to attempt
- # try to figure out the appropriate HOST_ARCH.
- #
- uname_m = $(shell uname -m)
- $(info Attempting to detect HOST_ARCH from 'uname -m': $(uname_m))
- ifneq (,$(strip $(filter $(uname_m), x86_64 amd64)))
- HOST_ARCH=X64
- endif
- ifeq ($(patsubst i%86,IA32,$(uname_m)),IA32)
- HOST_ARCH=IA32
- endif
- ifneq (,$(findstring aarch64,$(uname_m)))
- HOST_ARCH=AARCH64
- else ifneq (,$(findstring arm64,$(uname_m)))
- HOST_ARCH=AARCH64
- else ifneq (,$(findstring arm,$(uname_m)))
- HOST_ARCH=ARM
- endif
- ifneq (,$(findstring riscv64,$(uname_m)))
- HOST_ARCH=RISCV64
- endif
- ifneq (,$(findstring loongarch64,$(uname_m)))
- HOST_ARCH=LOONGARCH64
- endif
- ifndef HOST_ARCH
- $(info Could not detected HOST_ARCH from uname results)
- $(error HOST_ARCH is not defined!)
- endif
- $(info Detected HOST_ARCH of $(HOST_ARCH) using uname.)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ SHELL := cmd.exe
+ MAKEROOT := $(shell echo %CD%)
+else
+ MAKEROOT := .
endif
-export HOST_ARCH
-
-MAKEROOT = .
-
include Makefiles/header.makefile
+export PYTHON_COMMAND
+export HOST_ARCH
+
all: makerootdir subdirs
@echo Finished building BaseTools C Tools with HOST_ARCH=$(HOST_ARCH)
@@ -71,7 +44,7 @@ $(APPLICATIONS): $(LIBRARIES) $(MAKEROOT)/bin $(VFRAUTOGEN) .PHONY: outputdirs
makerootdir:
- -mkdir -p $(MAKEROOT)
+ -$(MD) $(MAKEROOT)
.PHONY: subdirs $(SUBDIRS)
subdirs: $(SUBDIRS)
@@ -90,7 +63,7 @@ clean: $(patsubst %,%-clean,$(sort $(SUBDIRS))) clean: localClean
localClean:
- rm -f $(MAKEROOT)/bin/*
- -rmdir $(MAKEROOT)/libs $(MAKEROOT)/bin
+ $(RM) $(MAKEROOT)/bin/*
+ -$(RD) $(MAKEROOT)/libs $(MAKEROOT)/bin
include Makefiles/footer.makefile
diff --git a/BaseTools/Source/C/GenFfs/GenFfs.c b/BaseTools/Source/C/GenFfs/GenFfs.c index d78d62a..4c2e93e 100644 --- a/BaseTools/Source/C/GenFfs/GenFfs.c +++ b/BaseTools/Source/C/GenFfs/GenFfs.c @@ -7,7 +7,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#include <sys/types.h>
#include <sys/stat.h>
diff --git a/BaseTools/Source/C/GenFv/GNUmakefile b/BaseTools/Source/C/GenFv/GNUmakefile index 872b981..843eb1b 100644 --- a/BaseTools/Source/C/GenFv/GNUmakefile +++ b/BaseTools/Source/C/GenFv/GNUmakefile @@ -14,6 +14,15 @@ include $(MAKEROOT)/Makefiles/app.makefile LIBS = -lCommon
ifeq ($(CYGWIN), CYGWIN)
- LIBS += -L/lib/e2fsprogs
+ LIBS += -L/lib/e2fsprogs -luuid
+endif
+
+ifeq ($(LINUX), Linux)
+ifndef CROSS_LIB_UUID
+ LIBS += -luuid
+else
+ LIBS += -L$(CROSS_LIB_UUID)
+ BUILD_CFLAGS += -D__CROSS_LIB_UUID__ -I $(CROSS_LIB_UUID_INC)
+endif
endif
diff --git a/BaseTools/Source/C/GenFv/GenFvInternalLib.c b/BaseTools/Source/C/GenFv/GenFvInternalLib.c index 29c3363..e4f4090 100644 --- a/BaseTools/Source/C/GenFv/GenFvInternalLib.c +++ b/BaseTools/Source/C/GenFv/GenFvInternalLib.c @@ -1,7 +1,7 @@ /** @file
This file contains the internal functions required to generate a Firmware Volume.
-Copyright (c) 2004 - 2018, Intel Corporation. All rights reserved.<BR>
+Copyright (c) 2004 - 2025, Intel Corporation. All rights reserved.<BR>
Portions Copyright (c) 2011 - 2013, ARM Ltd. All rights reserved.<BR>
Portions Copyright (c) 2016 HP Development Company, L.P.<BR>
Portions Copyright (c) 2020, Hewlett Packard Enterprise Development LP. All rights reserved.<BR>
@@ -14,6 +14,15 @@ SPDX-License-Identifier: BSD-2-Clause-Patent // Include files
//
+#if defined(__FreeBSD__)
+#include <uuid.h>
+#elif defined(__GNUC__) && !defined(_WIN32)
+#if !defined(__CROSS_LIB_UUID__)
+#include <uuid/uuid.h>
+#else
+#include <uuid.h>
+#endif
+#endif
#ifdef __GNUC__
#include <sys/stat.h>
#endif
@@ -3144,7 +3153,6 @@ Returns: --*/
{
UINTN CurrentOffset;
- UINTN OrigOffset;
UINTN Index;
FILE *fpin;
UINTN FfsFileSize;
@@ -3153,11 +3161,11 @@ Returns: UINT32 FfsHeaderSize;
EFI_FFS_FILE_HEADER FfsHeader;
UINTN VtfFileSize;
- UINTN MaxPadFileSize;
+ UINTN VtfPadSize;
FvExtendHeaderSize = 0;
- MaxPadFileSize = 0;
VtfFileSize = 0;
+ VtfPadSize = 0;
fpin = NULL;
Index = 0;
@@ -3265,12 +3273,8 @@ Returns: //
// Only EFI_FFS_FILE_HEADER is needed for a pad section.
//
- OrigOffset = CurrentOffset;
CurrentOffset = (CurrentOffset + FfsHeaderSize + sizeof(EFI_FFS_FILE_HEADER) + FfsAlignment - 1) & ~(FfsAlignment - 1);
CurrentOffset -= FfsHeaderSize;
- if ((CurrentOffset - OrigOffset) > MaxPadFileSize) {
- MaxPadFileSize = CurrentOffset - OrigOffset;
- }
}
}
@@ -3295,9 +3299,18 @@ Returns: if (FvInfoPtr->Size == 0) {
//
+ // Vtf file should be bottom aligned at end of block.
+ // If it is not aligned, insert EFI_FFS_FILE_HEADER to ensure the minimum pad file size for left space.
+ //
+ if ((VtfFileSize > 0) && (CurrentOffset % FvInfoPtr->FvBlocks[0].Length)) {
+ VtfPadSize = sizeof (EFI_FFS_FILE_HEADER);
+ }
+
+ //
// Update FvInfo data
//
- FvInfoPtr->FvBlocks[0].NumBlocks = CurrentOffset / FvInfoPtr->FvBlocks[0].Length + ((CurrentOffset % FvInfoPtr->FvBlocks[0].Length)?1:0);
+ FvInfoPtr->FvBlocks[0].NumBlocks = ((CurrentOffset + VtfPadSize) / FvInfoPtr->FvBlocks[0].Length) +
+ (((CurrentOffset + VtfPadSize) % FvInfoPtr->FvBlocks[0].Length) ? 1 : 0);
FvInfoPtr->Size = FvInfoPtr->FvBlocks[0].NumBlocks * FvInfoPtr->FvBlocks[0].Length;
FvInfoPtr->FvBlocks[1].NumBlocks = 0;
FvInfoPtr->FvBlocks[1].Length = 0;
@@ -3307,6 +3320,23 @@ Returns: //
Error (NULL, 0, 3000, "Invalid", "the required fv image size 0x%x exceeds the set fv image size 0x%x", (unsigned) CurrentOffset, (unsigned) FvInfoPtr->Size);
return EFI_INVALID_PARAMETER;
+ } else if ((VtfFileSize > 0) &&
+ (FvInfoPtr->Size > CurrentOffset) &&
+ ((FvInfoPtr->Size - CurrentOffset) < sizeof (EFI_FFS_FILE_HEADER)))
+ {
+ //
+ // Not invalid
+ //
+ Error (
+ NULL,
+ 0,
+ 3000,
+ "Invalid",
+ "the required fv image size = 0x%x. the set fv image size = 0x%x. Free space left is not enough to add a pad file (0x18)",
+ (unsigned)CurrentOffset,
+ (unsigned)FvInfoPtr->Size
+ );
+ return EFI_INVALID_PARAMETER;
}
//
@@ -3314,12 +3344,6 @@ Returns: //
mFvTotalSize = FvInfoPtr->Size;
mFvTakenSize = CurrentOffset;
- if ((mFvTakenSize == mFvTotalSize) && (MaxPadFileSize > 0)) {
- //
- // This FV means TOP FFS has been taken. Then, check whether there is padding data for use.
- //
- mFvTakenSize = mFvTakenSize - MaxPadFileSize;
- }
return EFI_SUCCESS;
}
diff --git a/BaseTools/Source/C/GenFw/Elf32Convert.c b/BaseTools/Source/C/GenFw/Elf32Convert.c index de198e5..246497a 100644 --- a/BaseTools/Source/C/GenFw/Elf32Convert.c +++ b/BaseTools/Source/C/GenFw/Elf32Convert.c @@ -10,7 +10,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#endif
#include <assert.h>
diff --git a/BaseTools/Source/C/GenFw/Elf64Convert.c b/BaseTools/Source/C/GenFw/Elf64Convert.c index 9d04fc6..1859412 100644 --- a/BaseTools/Source/C/GenFw/Elf64Convert.c +++ b/BaseTools/Source/C/GenFw/Elf64Convert.c @@ -11,7 +11,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#endif
#include <assert.h>
@@ -1396,6 +1398,18 @@ WriteSections64 ( SymName = (const UINT8 *)"<unknown>";
}
+ if (mEhdr->e_machine == EM_X86_64) {
+ //
+ // For x86_64, we can ignore R_X86_64_NONE relocations.
+ // They are used to indicate that the symbol is not defined
+ // in the current module, but in a shared library that may be
+ // used when building modules for inclusion in host-based unit tests.
+ //
+ if (ELF_R_TYPE(Rel->r_info) == R_X86_64_NONE) {
+ continue;
+ }
+ }
+
//
// Skip error on EM_RISCV64 and EM_LOONGARCH because no symbol name is built
// from RISC-V and LoongArch toolchain.
@@ -1482,9 +1496,18 @@ WriteSections64 ( - (SecOffset - SecShdr->sh_addr));
VerboseMsg ("Relocation: 0x%08X", *(UINT32 *)Targ);
break;
+ case R_X86_64_REX_GOTPCRELX:
+ //
+ // This is a relaxable GOTPCREL relocation, and the linker may have
+ // applied this relaxation without updating the relocation type.
+ // In the position independent code model, only transformations
+ // from MOV to LEA are possible for REX-prefixed instructions.
+ //
+ if (Targ[-2] == 0x8d) { // LEA
+ break;
+ }
case R_X86_64_GOTPCREL:
case R_X86_64_GOTPCRELX:
- case R_X86_64_REX_GOTPCRELX:
VerboseMsg ("R_X86_64_GOTPCREL family");
VerboseMsg ("Offset: 0x%08X, Addend: 0x%08X",
(UINT32)(SecOffset + (Rel->r_offset - SecShdr->sh_addr)),
diff --git a/BaseTools/Source/C/GenFw/ElfConvert.c b/BaseTools/Source/C/GenFw/ElfConvert.c index 3205f61..d6d9feb 100644 --- a/BaseTools/Source/C/GenFw/ElfConvert.c +++ b/BaseTools/Source/C/GenFw/ElfConvert.c @@ -8,7 +8,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#endif
#include <stdio.h>
diff --git a/BaseTools/Source/C/GenFw/GNUmakefile b/BaseTools/Source/C/GenFw/GNUmakefile index 76cda7e..2079835 100644 --- a/BaseTools/Source/C/GenFw/GNUmakefile +++ b/BaseTools/Source/C/GenFw/GNUmakefile @@ -18,6 +18,10 @@ ifeq ($(CYGWIN), CYGWIN) endif
ifeq ($(LINUX), Linux)
+ifndef CROSS_LIB_UUID
LIBS += -luuid
+else
+ LIBS += -L$(CROSS_LIB_UUID)
+endif
endif
diff --git a/BaseTools/Source/C/GenFw/GenFw.c b/BaseTools/Source/C/GenFw/GenFw.c index bd635b3..daf840c 100644 --- a/BaseTools/Source/C/GenFw/GenFw.c +++ b/BaseTools/Source/C/GenFw/GenFw.c @@ -7,7 +7,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#include <sys/types.h>
#include <sys/stat.h>
diff --git a/BaseTools/Source/C/GenSec/GNUmakefile b/BaseTools/Source/C/GenSec/GNUmakefile index 9f0844c..db634e4 100644 --- a/BaseTools/Source/C/GenSec/GNUmakefile +++ b/BaseTools/Source/C/GenSec/GNUmakefile @@ -18,6 +18,10 @@ ifeq ($(CYGWIN), CYGWIN) endif
ifeq ($(LINUX), Linux)
+ifndef CROSS_LIB_UUID
LIBS += -luuid
+else
+ LIBS += -L$(CROSS_LIB_UUID)
+endif
endif
diff --git a/BaseTools/Source/C/GenSec/GenSec.c b/BaseTools/Source/C/GenSec/GenSec.c index cf24d82..d9d2efe 100644 --- a/BaseTools/Source/C/GenSec/GenSec.c +++ b/BaseTools/Source/C/GenSec/GenSec.c @@ -6,7 +6,9 @@ SPDX-License-Identifier: BSD-2-Clause-Patent **/
#ifndef __GNUC__
+#define RUNTIME_FUNCTION _WINNT_DUP_RUNTIME_FUNCTION
#include <windows.h>
+#undef RUNTIME_FUNCTION
#include <io.h>
#include <sys/types.h>
#include <sys/stat.h>
diff --git a/BaseTools/Source/C/Include/Common/BaseTypes.h b/BaseTools/Source/C/Include/Common/BaseTypes.h index e669da8..5093a2f 100644 --- a/BaseTools/Source/C/Include/Common/BaseTypes.h +++ b/BaseTools/Source/C/Include/Common/BaseTypes.h @@ -4,6 +4,8 @@ This file is stand alone self consistent set of definitions.
Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>
+ Copyright (C) 2024 Advanced Micro Devices, Inc. All rights reserved.
+
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
@@ -202,7 +204,7 @@ typedef UINTN RETURN_STATUS; #define ENCODE_ERROR(a) ((RETURN_STATUS)(MAX_BIT | (a)))
#define ENCODE_WARNING(a) ((RETURN_STATUS)(a))
-#define RETURN_ERROR(a) (((INTN)(RETURN_STATUS)(a)) < 0)
+#define RETURN_ERROR(a) (((RETURN_STATUS)(a)) >= MAX_BIT)
#define RETURN_SUCCESS 0
#define RETURN_LOAD_ERROR ENCODE_ERROR (1)
diff --git a/BaseTools/Source/C/Include/Common/UefiMultiPhase.h b/BaseTools/Source/C/Include/Common/UefiMultiPhase.h index b889508..5b5af8b 100644 --- a/BaseTools/Source/C/Include/Common/UefiMultiPhase.h +++ b/BaseTools/Source/C/Include/Common/UefiMultiPhase.h @@ -14,6 +14,15 @@ //
// Enumeration of memory types introduced in UEFI.
+// +---------------------------------------------------+
+// | 0..(EfiMaxMemoryType - 1) - Normal memory type |
+// +---------------------------------------------------+
+// | EfiMaxMemoryType..0x6FFFFFFF - Invalid |
+// +---------------------------------------------------+
+// | 0x70000000..0x7FFFFFFF - OEM reserved |
+// +---------------------------------------------------+
+// | 0x80000000..0xFFFFFFFF - OS reserved |
+// +---------------------------------------------------+
//
typedef enum {
EfiReservedMemoryType,
@@ -31,7 +40,11 @@ typedef enum { EfiMemoryMappedIOPortSpace,
EfiPalCode,
EfiPersistentMemory,
- EfiMaxMemoryType
+ EfiMaxMemoryType,
+ MEMORY_TYPE_OEM_RESERVED_MIN = 0x70000000,
+ MEMORY_TYPE_OEM_RESERVED_MAX = 0x7FFFFFFF,
+ MEMORY_TYPE_OS_RESERVED_MIN = 0x80000000,
+ MEMORY_TYPE_OS_RESERVED_MAX = 0xFFFFFFFF
} EFI_MEMORY_TYPE;
diff --git a/BaseTools/Source/C/Include/Protocol/HiiFramework.h b/BaseTools/Source/C/Include/Protocol/HiiFramework.h index 4483509..21abdf0 100644 --- a/BaseTools/Source/C/Include/Protocol/HiiFramework.h +++ b/BaseTools/Source/C/Include/Protocol/HiiFramework.h @@ -28,19 +28,6 @@ 0xd7ad636e, 0xb997, 0x459b, {0xbf, 0x3f, 0x88, 0x46, 0x89, 0x79, 0x80, 0xe1} \
}
-// BugBug:
-//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-// If UGA goes away we need to put this some place. I'm not sure where?
-//
-//typedef struct {
-// UINT8 Blue;
-// UINT8 Green;
-// UINT8 Red;
-// UINT8 Reserved;
-//} EFI_UGA_PIXEL;
-
-//++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
-//
typedef struct _EFI_HII_PROTOCOL EFI_HII_PROTOCOL;
@@ -576,39 +563,6 @@ EFI_STATUS );
/**
- Translates a glyph into the format required for input to the Universal
- Graphics Adapter (UGA) Block Transfer (BLT) routines.
-
- @param This A pointer to the EFI_HII_PROTOCOL instance.
- @param GlyphBuffer A pointer to the buffer that contains glyph data.
- @param Foreground The foreground setting requested to be used for the
- generated BltBuffer data.
- @param Background The background setting requested to be used for the
- generated BltBuffer data.
- @param Count The entry in the BltBuffer upon which to act.
- @param Width The width in bits of the glyph being converted.
- @param Height The height in bits of the glyph being converted
- @param BltBuffer A pointer to the buffer that contains the data that is
- ready to be used by the UGA BLT routines.
-
- @retval EFI_SUCCESS It worked.
- @retval EFI_NOT_FOUND A glyph for a character was not found.
-
-**/
-typedef
-EFI_STATUS
-(EFIAPI *EFI_HII_GLYPH_TO_BLT) (
- IN EFI_HII_PROTOCOL *This,
- IN UINT8 *GlyphBuffer,
- IN EFI_GRAPHICS_OUTPUT_BLT_PIXEL Foreground,
- IN EFI_GRAPHICS_OUTPUT_BLT_PIXEL Background,
- IN UINTN Count,
- IN UINTN Width,
- IN UINTN Height,
- IN OUT EFI_GRAPHICS_OUTPUT_BLT_PIXEL *BltBuffer
- );
-
-/**
Allows a new string to be added to an already existing string package.
@param This A pointer to the EFI_HII_PROTOCOL instance.
@@ -878,9 +832,6 @@ EFI_STATUS @param GetGlyph
Translates a Unicode character into the corresponding font glyph.
- @param GlyphToBlt
- Converts a glyph value into a format that is ready for a UGA BLT command.
-
@param NewString
Allows a new string to be added to an already existing string package.
@@ -924,7 +875,6 @@ struct _EFI_HII_PROTOCOL { EFI_HII_TEST_STRING TestString;
EFI_HII_GET_GLYPH GetGlyph;
- EFI_HII_GLYPH_TO_BLT GlyphToBlt;
EFI_HII_NEW_STRING NewString;
EFI_HII_GET_PRI_LANGUAGES GetPrimaryLanguages;
diff --git a/BaseTools/Source/C/Include/Protocol/UgaDraw.h b/BaseTools/Source/C/Include/Protocol/UgaDraw.h deleted file mode 100644 index 412b000..0000000 --- a/BaseTools/Source/C/Include/Protocol/UgaDraw.h +++ /dev/null @@ -1,161 +0,0 @@ -/** @file
- UGA Draw protocol from the EFI 1.1 specification.
-
- Abstraction of a very simple graphics device.
-
- Copyright (c) 2006 - 2018, Intel Corporation. All rights reserved.<BR>
-
- SPDX-License-Identifier: BSD-2-Clause-Patent
-
-**/
-
-#ifndef __UGA_DRAW_H__
-#define __UGA_DRAW_H__
-
-#define EFI_UGA_DRAW_PROTOCOL_GUID \
- { \
- 0x982c298b, 0xf4fa, 0x41cb, {0xb8, 0x38, 0x77, 0xaa, 0x68, 0x8f, 0xb8, 0x39 } \
- }
-
-typedef struct _EFI_UGA_DRAW_PROTOCOL EFI_UGA_DRAW_PROTOCOL;
-
-/**
- Return the current video mode information.
-
- @param This Protocol instance pointer.
- @param HorizontalResolution Current video horizontal resolution in pixels
- @param VerticalResolution Current video vertical resolution in pixels
- @param ColorDepth Current video color depth in bits per pixel
- @param RefreshRate Current video refresh rate in Hz.
-
- @retval EFI_SUCCESS Mode information returned.
- @retval EFI_NOT_STARTED Video display is not initialized. Call SetMode ()
- @retval EFI_INVALID_PARAMETER One of the input args was NULL.
-
-**/
-typedef
-EFI_STATUS
-(EFIAPI *EFI_UGA_DRAW_PROTOCOL_GET_MODE) (
- IN EFI_UGA_DRAW_PROTOCOL *This,
- OUT UINT32 *HorizontalResolution,
- OUT UINT32 *VerticalResolution,
- OUT UINT32 *ColorDepth,
- OUT UINT32 *RefreshRate
- )
-;
-
-/**
- Return the current video mode information.
-
- @param This Protocol instance pointer.
- @param HorizontalResolution Current video horizontal resolution in pixels
- @param VerticalResolution Current video vertical resolution in pixels
- @param ColorDepth Current video color depth in bits per pixel
- @param RefreshRate Current video refresh rate in Hz.
-
- @retval EFI_SUCCESS Mode information returned.
- @retval EFI_NOT_STARTED Video display is not initialized. Call SetMode ()
-
-**/
-typedef
-EFI_STATUS
-(EFIAPI *EFI_UGA_DRAW_PROTOCOL_SET_MODE) (
- IN EFI_UGA_DRAW_PROTOCOL *This,
- IN UINT32 HorizontalResolution,
- IN UINT32 VerticalResolution,
- IN UINT32 ColorDepth,
- IN UINT32 RefreshRate
- )
-;
-
-typedef struct {
- UINT8 Blue;
- UINT8 Green;
- UINT8 Red;
- UINT8 Reserved;
-} EFI_UGA_PIXEL;
-
-typedef union {
- EFI_UGA_PIXEL Pixel;
- UINT32 Raw;
-} EFI_UGA_PIXEL_UNION;
-
-typedef enum {
- EfiUgaVideoFill,
- EfiUgaVideoToBltBuffer,
- EfiUgaBltBufferToVideo,
- EfiUgaVideoToVideo,
- EfiUgaBltMax
-} EFI_UGA_BLT_OPERATION;
-
-/**
- Type specifying a pointer to a function to perform an UGA Blt operation.
-
- The following table defines actions for BltOperations:
-
- <B>EfiUgaVideoFill</B> - Write data from the BltBuffer pixel (SourceX, SourceY)
- directly to every pixel of the video display rectangle
- (DestinationX, DestinationY) (DestinationX + Width, DestinationY + Height).
- Only one pixel will be used from the BltBuffer. Delta is NOT used.
-
- <B>EfiUgaVideoToBltBuffer</B> - Read data from the video display rectangle
- (SourceX, SourceY) (SourceX + Width, SourceY + Height) and place it in
- the BltBuffer rectangle (DestinationX, DestinationY )
- (DestinationX + Width, DestinationY + Height). If DestinationX or
- DestinationY is not zero then Delta must be set to the length in bytes
- of a row in the BltBuffer.
-
- <B>EfiUgaBltBufferToVideo</B> - Write data from the BltBuffer rectangle
- (SourceX, SourceY) (SourceX + Width, SourceY + Height) directly to the
- video display rectangle (DestinationX, DestinationY)
- (DestinationX + Width, DestinationY + Height). If SourceX or SourceY is
- not zero then Delta must be set to the length in bytes of a row in the
- BltBuffer.
-
- <B>EfiUgaVideoToVideo</B> - Copy from the video display rectangle (SourceX, SourceY)
- (SourceX + Width, SourceY + Height) .to the video display rectangle
- (DestinationX, DestinationY) (DestinationX + Width, DestinationY + Height).
- The BltBuffer and Delta are not used in this mode.
-
-
- @param[in] This - Protocol instance pointer.
- @param[in] BltBuffer - Buffer containing data to blit into video buffer. This
- buffer has a size of Width*Height*sizeof(EFI_UGA_PIXEL)
- @param[in] BltOperation - Operation to perform on BlitBuffer and video memory
- @param[in] SourceX - X coordinate of source for the BltBuffer.
- @param[in] SourceY - Y coordinate of source for the BltBuffer.
- @param[in] DestinationX - X coordinate of destination for the BltBuffer.
- @param[in] DestinationY - Y coordinate of destination for the BltBuffer.
- @param[in] Width - Width of rectangle in BltBuffer in pixels.
- @param[in] Height - Height of rectangle in BltBuffer in pixels.
- @param[in] Delta - OPTIONAL
-
- @retval EFI_SUCCESS - The Blt operation completed.
- @retval EFI_INVALID_PARAMETER - BltOperation is not valid.
- @retval EFI_DEVICE_ERROR - A hardware error occurred writing to the video buffer.
-
---*/
-typedef
-EFI_STATUS
-(EFIAPI *EFI_UGA_DRAW_PROTOCOL_BLT) (
- IN EFI_UGA_DRAW_PROTOCOL * This,
- IN EFI_UGA_PIXEL * BltBuffer, OPTIONAL
- IN EFI_UGA_BLT_OPERATION BltOperation,
- IN UINTN SourceX,
- IN UINTN SourceY,
- IN UINTN DestinationX,
- IN UINTN DestinationY,
- IN UINTN Width,
- IN UINTN Height,
- IN UINTN Delta OPTIONAL
- );
-
-struct _EFI_UGA_DRAW_PROTOCOL {
- EFI_UGA_DRAW_PROTOCOL_GET_MODE GetMode;
- EFI_UGA_DRAW_PROTOCOL_SET_MODE SetMode;
- EFI_UGA_DRAW_PROTOCOL_BLT Blt;
-};
-
-extern EFI_GUID gEfiUgaDrawProtocolGuid;
-
-#endif
diff --git a/BaseTools/Source/C/Makefile b/BaseTools/Source/C/Makefile index a376d32..0f26de0 100644 --- a/BaseTools/Source/C/Makefile +++ b/BaseTools/Source/C/Makefile @@ -4,7 +4,9 @@ # Copyright (c) 2009 - 2018, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
+!IFNDEF HOST_ARCH
HOST_ARCH = IA32
+!ENDIF
!INCLUDE Makefiles\ms.common
diff --git a/BaseTools/Source/C/Makefiles/GnuMakeUtils.py b/BaseTools/Source/C/Makefiles/GnuMakeUtils.py new file mode 100644 index 0000000..3924311 --- /dev/null +++ b/BaseTools/Source/C/Makefiles/GnuMakeUtils.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python3
+#
+## @file GnuMakeUtils.py
+#
+#
+# Copyright (c) 2025, Intel Corporation. All rights reserved.<BR>
+# SPDX-License-Identifier: BSD-2-Clause-Patent
+#
+from collections import namedtuple
+import glob
+import os
+import re
+import shutil
+import subprocess
+import sys
+import traceback
+if sys.platform == 'win32':
+ from ctypes import windll, POINTER, byref, GetLastError, Structure, WinError
+ from ctypes import c_void_p, c_ushort, c_int, c_long, c_ulong, c_wchar, sizeof
+
+ARCH_UNKNOWN = 'Unknown'
+ARCH_IA32 = 'IA32'
+ARCH_X64 = 'X64'
+ARCH_AARCH64 = 'AARCH64'
+ARCH_ARM = 'ARM'
+ARCH_RISCV64 = 'RISCV64'
+ARCH_LOONGARCH64 = 'LOONGARCH64'
+_Process = namedtuple('Process', ['process_id', 'parent_process_id', 'exe_filename'])
+
+def _get_win32_process_architecture(pid):
+ IMAGE_FILE_MACHINE_I386 = 0x014c
+ IMAGE_FILE_MACHINE_AMD64 = 0x8664
+ IMAGE_FILE_MACHINE_ARM64 = 0xAA64
+ def _get_machine_type(machine_id):
+ if machine_id == IMAGE_FILE_MACHINE_I386:
+ return ARCH_IA32
+ elif machine_id == IMAGE_FILE_MACHINE_AMD64:
+ return ARCH_X64
+ elif machine_id == IMAGE_FILE_MACHINE_ARM64:
+ return ARCH_AARCH64
+ return ARCH_UNKNOWN
+ PROCESS_QUERY_LIMITED_INFORMATION = 0x1000
+ kernel32 = windll.kernel32
+ OpenProcess = kernel32.OpenProcess
+ OpenProcess.argtypes = [c_ulong, c_int, c_ulong]
+ OpenProcess.restype = c_void_p
+ CloseHandle = kernel32.CloseHandle
+ CloseHandle.argtypes = [c_void_p]
+ CloseHandle.restype = c_int
+
+ IsWow64Process2 = None
+ IMAGE_FILE_MACHINE_UNKNOWN = 0
+ try:
+ #IsWow64Process2() is only available on Win10 TH2 or later
+ IsWow64Process2 = kernel32.IsWow64Process2
+ except AttributeError:
+ IsWow64Process2 = None
+ if IsWow64Process2 is not None:
+ IsWow64Process2.argtypes = [c_void_p, POINTER(c_ushort), POINTER(c_ushort)]
+ IsWow64Process2.restype = c_int
+ ProcessMachine = c_ushort(1)
+ NativeMachine = c_ushort(1)
+ hProcess = OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
+ if hProcess == c_void_p(0):
+ raise WinError(GetLastError())
+ if IsWow64Process2(hProcess, byref(ProcessMachine), byref(NativeMachine)) != 0:
+ CloseHandle(hProcess)
+ if ProcessMachine.value == IMAGE_FILE_MACHINE_UNKNOWN:
+ return _get_machine_type(NativeMachine.value)
+ else:
+ return _get_machine_type(ProcessMachine.value)
+ else:
+ CloseHandle(hProcess)
+ raise WinError(GetLastError())
+ else:
+ #Graceful fallback for older OSes
+ PROCESSOR_ARCHITECTURE_INTEL = 0
+ PROCESSOR_ARCHITECTURE_AMD64 = 9
+ class _SYSTEM_INFO(Structure):
+ _fields_ = [('wProcessorArchitecture', c_ushort),
+ ('wReserved', c_ushort),
+ ('dwPageSize', c_ulong),
+ ('lpMinimumApplicationAddress', c_void_p),
+ ('lpMaximumApplicationAddress', c_void_p),
+ ('dwActiveProcessorMask', c_void_p),
+ ('dwNumberOfProcessors', c_ulong),
+ ('dwProcessorType', c_ulong),
+ ('dwAllocationGranularity', c_ulong),
+ ('wProcessorLevel', c_ushort),
+ ('wProcessorRevision', c_ushort)]
+ GetNativeSystemInfo = kernel32.GetNativeSystemInfo
+ GetNativeSystemInfo.argtypes = [POINTER(_SYSTEM_INFO)]
+ systemInfo = _SYSTEM_INFO()
+ GetNativeSystemInfo(byref(systemInfo))
+ if systemInfo.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_AMD64:
+ hProcess = OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid)
+ if hProcess == c_void_p(0):
+ raise WinError(GetLastError())
+ IsWow64Process = kernel32.IsWow64Process
+ IsWow64Process.argtypes = [c_void_p, POINTER(c_int)]
+ IsWow64Process.restype = c_int
+ is_wow64 = c_int(0)
+ if IsWow64Process(hProcess, byref(is_wow64)) != 0:
+ CloseHandle(hProcess)
+ if is_wow64.value != 0:
+ return ARCH_IA32
+ else:
+ return ARCH_X64
+ else:
+ CloseHandle(hProcess)
+ raise WinError(GetLastError())
+ elif systemInfo.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_INTEL:
+ return ARCH_IA32
+ return ARCH_UNKNOWN
+
+def _get_win32_process_list():
+ class _PROCESSENTRY32W(Structure):
+ _fields_ = [('dwSize', c_ulong),
+ ('cntUsage', c_ulong),
+ ('th32ProcessID', c_ulong),
+ ('th32DefaultHeapID', c_void_p),
+ ('th32ModuleID', c_ulong),
+ ('cntThreads', c_ulong),
+ ('th32ParentProcessID', c_ulong),
+ ('pcPriClassBase', c_long),
+ ('dwFlags', c_ulong),
+ ('szExeFile', (c_wchar * 260))]
+ INVALID_HANDLE_VALUE = c_void_p(-1)
+ TH32CS_SNAPPROCESS = 2
+ ERROR_NO_MORE_FILES = 18
+ kernel32 = windll.kernel32
+ CreateToolhelp32Snapshot = kernel32.CreateToolhelp32Snapshot
+ CreateToolhelp32Snapshot.argtypes = [c_ulong, c_ulong]
+ CreateToolhelp32Snapshot.restype = c_void_p
+ Process32First = kernel32.Process32FirstW
+ Process32First.argtypes = [c_void_p, POINTER(_PROCESSENTRY32W)]
+ Process32First.restype = c_int
+ Process32Next = kernel32.Process32NextW
+ Process32Next.argtypes = [c_void_p, POINTER(_PROCESSENTRY32W)]
+ Process32Next.restype = c_int
+ CloseHandle = kernel32.CloseHandle
+ CloseHandle.argtypes = [c_void_p]
+ CloseHandle.restype = c_int
+
+ hSnapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0)
+ if hSnapshot == INVALID_HANDLE_VALUE.value:
+ raise WinError(GetLastError())
+ process_list = []
+ processEntry = _PROCESSENTRY32W()
+ processEntry.dwSize = sizeof(processEntry)
+ more_processes = True
+ if Process32First(hSnapshot, byref(processEntry)) == 0:
+ raise WinError(GetLastError())
+ while more_processes:
+ process_list.append(_Process(processEntry.th32ProcessID, processEntry.th32ParentProcessID, processEntry.szExeFile))
+ if Process32Next(hSnapshot, byref(processEntry)) == 0:
+ status = GetLastError()
+ if status == ERROR_NO_MORE_FILES:
+ more_processes = False
+ else:
+ raise WinError(status)
+ CloseHandle(hSnapshot)
+ return process_list
+
+def _get_win32_parent_processes():
+ kernel32 = windll.kernel32
+ GetCurrentProcessId = kernel32.GetCurrentProcessId
+ GetCurrentProcessId.argtypes = []
+ GetCurrentProcessId.restype = c_ulong
+
+ process_list = _get_win32_process_list()
+ pid = GetCurrentProcessId()
+ parent_processes = []
+ found_parent = True
+ while found_parent:
+ found_parent = False
+ for process in process_list:
+ if process.process_id == pid:
+ found_parent = True
+ parent_processes.append(process)
+ pid = process.parent_process_id
+ break
+ return parent_processes
+
+def _get_mingw_target_architecture():
+ parent_processes = _get_win32_parent_processes()
+ for process in parent_processes:
+ if 'make' in process.exe_filename.lower():
+ return _get_win32_process_architecture(process.process_id)
+ return ARCH_UNKNOWN
+
+def get_host_arch():
+ if sys.platform == 'win32':
+ host_arch = _get_mingw_target_architecture()
+ else:
+ result = subprocess.run('uname -m', universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, check=True)
+ uname_m = result.stdout.strip()
+ ia32_regex = re.compile(r".*i[8]?[0-9]86.*")
+ ia32_match = ia32_regex.match(uname_m)
+ if 'x86_64' in uname_m or 'amd64' in uname_m:
+ host_arch = ARCH_X64
+ elif ia32_match:
+ host_arch = ARCH_IA32
+ elif 'aarch64' in uname_m or 'arm64' in uname_m:
+ host_arch = ARCH_AARCH64
+ elif 'arm' in uname_m:
+ host_arch = ARCH_ARM
+ elif 'riscv64' in uname_m:
+ host_arch = ARCH_RISCV64
+ elif 'loongarch64' in uname_m:
+ host_arch = ARCH_LOONGARCH64
+ # There is a corner case for the Raspberry Pi. Sometimes it has a 64-bit
+ # kernel paired with an exclusively 32-bit user mode. Check for this case.
+ if shutil.which("lsb_release") is not None:
+ res = subprocess.run(["lsb_release", "-i"], universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+ check=True)
+ distributor = res.stdout.strip()
+ if distributor == "Distributor ID:\tRaspbian":
+ host_arch = ARCH_ARM
+ print(host_arch)
+ return 0
+
+def main():
+ if sys.argv[1] == 'get_host_arch':
+ return get_host_arch()
+ elif sys.argv[1] == 'cp':
+ shutil.copy(os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]))
+ elif sys.argv[1] == 'mv':
+ shutil.move(os.path.normpath(sys.argv[2]), os.path.normpath(sys.argv[3]))
+ elif sys.argv[1] == 'rm':
+ paths = [os.path.normpath(x) for x in sys.argv[2:]]
+ files = []
+ for path in paths:
+ if '*' in path:
+ files.extend(glob.glob(path))
+ else:
+ files.append(path)
+ for file in files:
+ if os.path.exists(file):
+ if os.path.isfile(file):
+ os.remove(file)
+ else:
+ sys.stderr.writelines(['{} is not a file.'.format(file)])
+ else:
+ sys.stderr.writelines(['File {} does not exist.'.format(file)])
+ elif sys.argv[1] == 'md':
+ path = os.path.normpath(sys.argv[2])
+ if not os.path.exists(path):
+ os.makedirs(path)
+ else:
+ if os.path.isdir(path):
+ sys.stderr.writelines(['Directory {} already exists.'.format(path)])
+ else:
+ sys.stderr.writelines(['{} is a file.'.format(path)])
+ return 1
+ elif sys.argv[1] == 'rd':
+ paths = [os.path.normpath(x) for x in sys.argv[2:]]
+ for path in paths:
+ if os.path.exists(path):
+ if os.path.isdir(path):
+ shutil.rmtree(path)
+ else:
+ sys.stderr.writelines(['{} is not a directory.'.format(path)])
+ else:
+ sys.stderr.writelines(['Directory {} does not exist.'.format(path)])
+ elif sys.argv[1] == 'rm_pyc_files':
+ path = os.path.normpath(sys.argv[2])
+ files = glob.glob(os.path.join(path, '*.pyc'))
+ for file in files:
+ if os.path.exists(file):
+ if os.path.isfile(file):
+ os.remove(file)
+ else:
+ sys.stderr.writelines(['{} is not a file.'.format(file)])
+ else:
+ sys.stderr.writelines(['File {} does not exist.'.format(file)])
+ py_cache = os.path.join(path, '__pycache__')
+ if os.path.isdir(py_cache):
+ shutil.rmtree(py_cache)
+ else:
+ sys.stderr.writelines(['Unsupported command.'])
+ return 1
+ return 0
+
+if __name__ == '__main__':
+ try:
+ sys.exit(main())
+ except Exception as e:
+ traceback.print_exc()
+ sys.exit(1)
diff --git a/BaseTools/Source/C/Makefiles/app.makefile b/BaseTools/Source/C/Makefiles/app.makefile index 506343a..0ad1378 100644 --- a/BaseTools/Source/C/Makefiles/app.makefile +++ b/BaseTools/Source/C/Makefiles/app.makefile @@ -1,7 +1,7 @@ ## @file
# Makefiles
#
-# Copyright (c) 2007 - 2014, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2025, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
@@ -16,7 +16,17 @@ all: $(MAKEROOT)/bin $(APPLICATION) $(APPLICATION): $(OBJECTS)
$(LINKER) -o $(APPLICATION) $(LDFLAGS) $(OBJECTS) -L$(MAKEROOT)/libs $(LIBS)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ $(CP) $(APPLICATION).exe $(BIN_PATH)
+endif
$(OBJECTS): $(MAKEROOT)/Include/Common/BuildVersion.h
+clean: appClean
+
+appClean:
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ $(RM) $(BIN_PATH)/$(APPNAME).exe
+endif
+
include $(MAKEROOT)/Makefiles/footer.makefile
diff --git a/BaseTools/Source/C/Makefiles/footer.makefile b/BaseTools/Source/C/Makefiles/footer.makefile index 7546da8..9a3dfcd 100644 --- a/BaseTools/Source/C/Makefiles/footer.makefile +++ b/BaseTools/Source/C/Makefiles/footer.makefile @@ -1,18 +1,18 @@ ## @file
# Makefile
#
-# Copyright (c) 2007 - 2016, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2025, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
DEPFILES = $(OBJECTS:%.o=%.d)
$(MAKEROOT)/libs-$(HOST_ARCH):
- mkdir -p $(MAKEROOT)/libs-$(HOST_ARCH)
+ $(MD) $(MAKEROOT)/libs-$(HOST_ARCH)
.PHONY: install
install: $(MAKEROOT)/libs-$(HOST_ARCH) $(LIBRARY)
- cp $(LIBRARY) $(MAKEROOT)/libs-$(HOST_ARCH)
+ $(CP) $(LIBRARY) $(MAKEROOT)/libs-$(HOST_ARCH)
$(LIBRARY): $(OBJECTS)
$(AR) crs $@ $^
@@ -25,6 +25,6 @@ $(LIBRARY): $(OBJECTS) .PHONY: clean
clean:
- @rm -f $(OBJECTS) $(LIBRARY) $(DEPFILES)
+ $(RM) $(OBJECTS) $(LIBRARY) $(DEPFILES)
-include $(DEPFILES)
diff --git a/BaseTools/Source/C/Makefiles/header.makefile b/BaseTools/Source/C/Makefiles/header.makefile index d369908..55a7307 100644 --- a/BaseTools/Source/C/Makefiles/header.makefile +++ b/BaseTools/Source/C/Makefiles/header.makefile @@ -5,48 +5,117 @@ # HOST_ARCH = ia32 or IA32 for IA32 build
# HOST_ARCH = Arm or ARM for ARM build
#
-# Copyright (c) 2007 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2007 - 2025, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
+# Set SEP to the platform specific path seperator
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ SHELL := cmd.exe
+ SEP:=$(shell echo \)
+else
+ SEP:=/
+endif
+
EDK2_PATH ?= $(MAKEROOT)/../../..
+ifndef PYTHON_COMMAND
+ ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ #
+ # Try using the Python Launcher for Windows to find an interperter.
+ #
+ CHECK_PY := $(shell where py.exe || echo NotFound)
+ ifeq ($(CHECK_PY),NotFound)
+ #
+ # PYTHON_HOME is the old method of specifying a Python interperter on Windows.
+ # Check if an interperter can be found using PYTHON_HOME.
+ #
+ ifdef PYTHON_HOME
+ ifndef (,$(wildcard $(PYTHON_HOME)$(SEP)python.exe)) # Make sure the file exists
+ PYTHON_COMMAND := $(PYTHON_HOME)$(SEP)python.exe
+ else
+ $(error Unable to find a Python interperter, if one is installed, set the PYTHON_COMMAND environment variable!)
+ endif
+ endif
+ else
+ PYTHON_COMMAND := $(shell py -3 -c "import sys; print(sys.executable)")
+ ifdef (,$(wildcard $(PYTHON_COMMAND))) # Make sure the file exists
+ $(error Unable to find a Python interperter, if one is installed, set the PYTHON_COMMAND environment variable!)
+ endif
+ endif
+ undefine CHECK_PY
+ else # UNIX
+ PYTHON_COMMAND := $(shell /usr/bin/env python3 -c "import sys; print(sys.executable)")
+ ifdef (,$(wildcard $(PYTHON_COMMAND))) # Make sure the file exists
+ PYTHON_COMMAND := $(shell /usr/bin/env python -c "import sys; print(sys.executable)")
+ ifdef (,$(wildcard $(PYTHON_COMMAND))) # Make sure the file exists
+ undefine PYTHON_COMMAND
+ endif
+ endif
+ ifndef PYTHON_COMMAND
+ $(error Unable to find a Python interpreter, if one is installed, set the PYTHON_COMMAND environment variable!)
+ endif
+ endif
+ export PYTHON_COMMAND
+endif
+
+# GnuMakeUtils.py is able to handle forward slashes in file paths on Windows systems
+GNU_MAKE_UTILS_PY := $(PYTHON_COMMAND) $(MAKEROOT)$(SEP)Makefiles$(SEP)GnuMakeUtils.py
+CP := $(GNU_MAKE_UTILS_PY) cp
+MV := $(GNU_MAKE_UTILS_PY) mv
+RM := $(GNU_MAKE_UTILS_PY) rm
+MD := $(GNU_MAKE_UTILS_PY) md
+RD := $(GNU_MAKE_UTILS_PY) rd
ifndef HOST_ARCH
#
- # If HOST_ARCH is not defined, then we use 'uname -m' to attempt
+ # If HOST_ARCH is not defined, then we use 'GnuMakeUtils.py' to attempt
# try to figure out the appropriate HOST_ARCH.
#
- uname_m = $(shell uname -m)
- $(info Attempting to detect HOST_ARCH from 'uname -m': $(uname_m))
- ifneq (,$(strip $(filter $(uname_m), x86_64 amd64)))
- HOST_ARCH=X64
- endif
- ifeq ($(patsubst i%86,IA32,$(uname_m)),IA32)
- HOST_ARCH=IA32
- endif
- ifneq (,$(findstring aarch64,$(uname_m)))
- HOST_ARCH=AARCH64
- else ifneq (,$(findstring arm64,$(uname_m)))
- HOST_ARCH=AARCH64
- else ifneq (,$(findstring arm,$(uname_m)))
- HOST_ARCH=ARM
+ GET_GNU_HOST_ARCH_PY:=$(MAKEROOT)$(SEP)Makefiles$(SEP)GnuMakeUtils.py get_host_arch
+ ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ HOST_ARCH:=$(shell if defined PYTHON_COMMAND $(PYTHON_COMMAND) $(GET_GNU_HOST_ARCH_PY))
+ else
+ HOST_ARCH:=$(shell if command -v $(PYTHON_COMMAND) >/dev/null 1; then $(PYTHON_COMMAND) $(GET_GNU_HOST_ARCH_PY); else python $(GET_GNU_HOST_ARCH_PY); fi)
endif
- ifneq (,$(findstring riscv64,$(uname_m)))
- HOST_ARCH=RISCV64
+ ifeq ($(HOST_ARCH),)
+ $(info HOST_ARCH detection failed.)
+ undefine HOST_ARCH
endif
- ifneq (,$(findstring loongarch64,$(uname_m)))
- HOST_ARCH=LOONGARCH64
+ ifeq ($(HOST_ARCH),Unknown)
+ $(info HOST_ARCH detection failed.)
+ undefine HOST_ARCH
endif
- ifndef HOST_ARCH
- $(info Could not detected HOST_ARCH from uname results)
- $(error HOST_ARCH is not defined!)
+endif
+ifndef HOST_ARCH
+ $(error HOST_ARCH is not defined!)
+endif
+
+#Set up BaseTools binary path for Windows builds
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ ifndef BIN_PATH
+ BIN_PATH_BASE=$(MAKEROOT)/../../Bin
+ ifeq ($(HOST_ARCH),X64)
+ BIN_PATH=$(BIN_PATH_BASE)/Win64
+ else
+ ifeq ($(HOST_ARCH),AARCH64)
+ BIN_PATH=$(BIN_PATH_BASE)/Win64
+ else
+ BIN_PATH=$(BIN_PATH_BASE)/Win32
+ endif
+ endif
endif
- $(info Detected HOST_ARCH of $(HOST_ARCH) using uname.)
endif
-CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
-LINUX:=$(findstring Linux, $(shell uname -s))
-DARWIN:=$(findstring Darwin, $(shell uname -s))
-CLANG:=$(shell $(CC) --version | grep clang)
+ifneq ($(findstring cmd,$(SHELL)),cmd)
+ CYGWIN:=$(findstring CYGWIN, $(shell uname -s))
+ LINUX:=$(findstring Linux, $(shell uname -s))
+ DARWIN:=$(findstring Darwin, $(shell uname -s))
+else
+ #Don't use uname on Windows
+ CYGWIN:=
+ LINUX:=
+ DARWIN:=
+endif
+CLANG := $(findstring clang,$(shell $(CC) --version))
ifneq ($(CLANG),)
CC ?= $(CLANG_BIN)clang
CXX ?= $(CLANG_BIN)clang++
@@ -54,11 +123,11 @@ AS ?= $(CLANG_BIN)clang AR ?= $(CLANG_BIN)llvm-ar
LD ?= $(CLANG_BIN)llvm-ld
else ifeq ($(origin CC),default)
-CC = gcc
-CXX = g++
-AS = gcc
-AR = ar
-LD = ld
+CC = $(GCC_PREFIX)gcc
+CXX = $(GCC_PREFIX)g++
+AS = $(GCC_PREFIX)gcc
+AR = $(GCC_PREFIX)ar
+LD = $(GCC_PREFIX)ld
endif
LINKER ?= $(CC)
ifeq ($(HOST_ARCH), IA32)
@@ -141,7 +210,10 @@ LDFLAGS += $(EXTRA_LDFLAGS) all:
$(MAKEROOT)/libs:
- mkdir $(MAKEROOT)/libs
+ $(MD) $(MAKEROOT)/libs
$(MAKEROOT)/bin:
- mkdir $(MAKEROOT)/bin
+ $(MD) $(MAKEROOT)/bin
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ $(MD) $(BIN_PATH)
+endif
diff --git a/BaseTools/Source/C/Makefiles/ms.common b/BaseTools/Source/C/Makefiles/ms.common index fe7a59c..2aafe58 100644 --- a/BaseTools/Source/C/Makefiles/ms.common +++ b/BaseTools/Source/C/Makefiles/ms.common @@ -44,6 +44,8 @@ BIN_PATH = $(BASE_TOOLS_PATH)\Bin\Win32 LIB_PATH = $(BASE_TOOLS_PATH)\Lib\Win32
SYS_BIN_PATH = $(EDK_TOOLS_PATH)\Bin\Win32
SYS_LIB_PATH = $(EDK_TOOLS_PATH)\Lib\Win32
+# Note: Disable flexible array member warnings
+CFLAGS = $(CFLAGS) /wd4200
!ELSEIF "$(HOST_ARCH)"=="X64"
ARCH_INCLUDE = $(EDK2_PATH)\MdePkg\Include\X64
@@ -51,7 +53,29 @@ BIN_PATH = $(BASE_TOOLS_PATH)\Bin\Win64 LIB_PATH = $(BASE_TOOLS_PATH)\Lib\Win64
SYS_BIN_PATH = $(EDK_TOOLS_PATH)\Bin\Win64
SYS_LIB_PATH = $(EDK_TOOLS_PATH)\Lib\Win64
+CFLAGS = $(CFLAGS) /wd4267 /wd4244 /wd4334
+# Note: Disable flexible array member warnings
+CFLAGS = $(CFLAGS) /wd4200
+!ELSEIF "$(HOST_ARCH)"=="ARM"
+ARCH_INCLUDE = $(EDK2_PATH)\MdePkg\Include\Arm
+BIN_PATH = $(BASE_TOOLS_PATH)\Bin\Win32
+LIB_PATH = $(BASE_TOOLS_PATH)\Lib\Win32
+SYS_BIN_PATH = $(EDK_TOOLS_PATH)\Bin\Win32
+SYS_LIB_PATH = $(EDK_TOOLS_PATH)\Lib\Win32
+# Note: Disable flexible array member warnings
+CFLAGS = $(CFLAGS) /wd4200
+
+!ELSEIF "$(HOST_ARCH)"=="AARCH64"
+ARCH_INCLUDE = $(EDK2_PATH)\MdePkg\Include\AArch64
+BIN_PATH = $(BASE_TOOLS_PATH)\Bin\Win64
+LIB_PATH = $(BASE_TOOLS_PATH)\Lib\Win64
+SYS_BIN_PATH = $(EDK_TOOLS_PATH)\Bin\Win64
+SYS_LIB_PATH = $(EDK_TOOLS_PATH)\Lib\Win64
+# Note: These are bit-width conversion related warning suppressions.
+CFLAGS = $(CFLAGS) /wd4267 /wd4244 /wd4334
+# Note: Disable flexible array member warnings
+CFLAGS = $(CFLAGS) /wd4200
!ELSE
!ERROR "Bad HOST_ARCH"
!ENDIF
diff --git a/BaseTools/Source/C/VfrCompile/GNUmakefile b/BaseTools/Source/C/VfrCompile/GNUmakefile index 7d59766..ad6c350 100644 --- a/BaseTools/Source/C/VfrCompile/GNUmakefile +++ b/BaseTools/Source/C/VfrCompile/GNUmakefile @@ -1,7 +1,7 @@ ## @file
# GNU/Linux makefile for 'VfrCompile' module build.
#
-# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2025, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
@@ -16,7 +16,7 @@ TOOL_INCLUDE = -I Pccts/h #OBJECTS = VfrSyntax.o VfrServices.o DLGLexer.o EfiVfrParser.o ATokenBuffer.o DLexerBase.o AParser.o
OBJECTS = AParser.o DLexerBase.o ATokenBuffer.o EfiVfrParser.o VfrLexer.o VfrSyntax.o \
VfrFormPkg.o VfrError.o VfrUtilityLib.o VfrCompiler.o
-CLANG:=$(shell $(CC) --version | grep clang)
+CLANG := $(findstring clang,$(shell $(CC) --version))
ifneq ($(CLANG),)
VFR_CPPFLAGS = -Wno-deprecated-register -std=c++14 -DPCCTS_USE_NAMESPACE_STD $(CPPFLAGS)
else
@@ -38,11 +38,17 @@ include $(MAKEROOT)/Makefiles/header.makefile APPLICATION = $(MAKEROOT)/bin/$(APPNAME)
+BIN_DIR=.
+export BIN_DIR
+
.PHONY:all
all: $(MAKEROOT)/bin $(APPLICATION)
$(APPLICATION): $(OBJECTS)
$(LINKER) -o $(APPLICATION) $(VFR_LFLAGS) $(OBJECTS) -L$(MAKEROOT)/libs $(LIBS)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ $(CP) $(APPLICATION).exe $(BIN_PATH)
+endif
VfrCompiler.o: ../Include/Common/BuildVersion.h
@@ -55,10 +61,10 @@ VfrLexer.cpp VfrLexer.h: Pccts/dlg/dlg VfrParser.dlg Pccts/dlg/dlg -C2 -i -CC -cl VfrLexer -o . VfrParser.dlg
Pccts/antlr/antlr:
- BIN_DIR='.' $(MAKE) -C Pccts/antlr
+ $(MAKE) -C Pccts/antlr
Pccts/dlg/dlg:
- BIN_DIR='.' $(MAKE) -C Pccts/dlg
+ $(MAKE) -C Pccts/dlg
ATokenBuffer.o: Pccts/h/ATokenBuffer.cpp
$(CXX) -c $(VFR_CPPFLAGS) $(INC) $(VFR_CXXFLAGS) $? -o $@
@@ -75,7 +81,9 @@ VfrSyntax.o: VfrSyntax.cpp clean: localClean
localClean:
- BIN_DIR='.' $(MAKE) -C Pccts/antlr clean
- BIN_DIR='.' $(MAKE) -C Pccts/dlg clean
- rm -f $(EXTRA_CLEAN_OBJECTS)
-
+ $(MAKE) -C Pccts/antlr clean
+ $(MAKE) -C Pccts/dlg clean
+ $(RM) $(EXTRA_CLEAN_OBJECTS)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ $(RM) $(BIN_PATH)/$(APPNAME).exe
+endif
diff --git a/BaseTools/Source/C/VfrCompile/Pccts/antlr/AntlrMS.mak b/BaseTools/Source/C/VfrCompile/Pccts/antlr/AntlrMS.mak index 6fc4d5c..26c590a 100644 --- a/BaseTools/Source/C/VfrCompile/Pccts/antlr/AntlrMS.mak +++ b/BaseTools/Source/C/VfrCompile/Pccts/antlr/AntlrMS.mak @@ -8,6 +8,15 @@ PCCTS_HOME=$(BASE_TOOLS_PATH)\Source\C\VfrCompile\Pccts ANTLR_SRC=$(PCCTS_HOME)\antlr
PCCTS_H=$(PCCTS_HOME)\h
+!IFNDEF HOST_ARCH
+HOST_ARCH = IA32
+!ENDIF
+
+!IF "$(HOST_ARCH)"=="IA32" || "$(HOST_ARCH)"=="ARM"
+SYS_BIN_PATH=$(EDK_TOOLS_PATH)\Bin\Win32
+!ELSE
+SYS_BIN_PATH=$(EDK_TOOLS_PATH)\Bin\Win64
+!ENDIF
# Support directories
SET=$(PCCTS_HOME)\support\set
@@ -27,10 +36,10 @@ SUPPORT_OBJS = set.obj # Dependencies
-$(EDK_TOOLS_PATH)\Bin\Win32\antlr.exe: $(ANTLR_OBJS) $(SUPPORT_OBJS)
+$(SYS_BIN_PATH)\antlr.exe: $(ANTLR_OBJS) $(SUPPORT_OBJS)
$(CC) $(CFLAGS) -Feantlr.exe $(ANTLR_OBJS) $(SUPPORT_OBJS)
- -@if not exist $(EDK_TOOLS_PATH)\Bin\Win32 mkdir $(EDK_TOOLS_PATH)\Bin\Win32
- copy antlr.exe $(EDK_TOOLS_PATH)\Bin\Win32
+ -@if not exist $(SYS_BIN_PATH) mkdir $(SYS_BIN_PATH)
+ copy antlr.exe $(SYS_BIN_PATH)
antlr.obj: $(ANTLR_SRC)\antlr.c \
diff --git a/BaseTools/Source/C/VfrCompile/Pccts/antlr/makefile b/BaseTools/Source/C/VfrCompile/Pccts/antlr/makefile index 746d58b..87eedf3 100644 --- a/BaseTools/Source/C/VfrCompile/Pccts/antlr/makefile +++ b/BaseTools/Source/C/VfrCompile/Pccts/antlr/makefile @@ -159,7 +159,14 @@ PCCTS_H=../h #set.$(OBJ_EXT): $(SET)/set.c
# $(CC) $(CFLAGS) -c $(OUT_OBJ)set.$(OBJ_EXT) $(SET)/set.c
-
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ SHELL := cmd.exe
+ SEP := $(shell echo \)
+ RM := del /f /q
+else
+ SEP :=/
+ RM := rm -f
+endif
#
# UNIX (default)
@@ -169,7 +176,7 @@ ANTLR=${BIN_DIR}/antlr DLG=${BIN_DIR}/dlg
OBJ_EXT=o
OUT_OBJ = -o
-CFLAGS= $(COPT) -I. -I$(SET) -I$(PCCTS_H) -DUSER_ZZSYN $(COTHER) -DZZLEXBUFSIZE=65536
+CFLAGS= $(COPT) -I. -I$(SET) -I$(PCCTS_H) -DUSER_ZZSYN $(COTHER) -DZZLEXBUFSIZE=65536 -std=gnu11
CPPFLAGS=
#
# SGI Users, use this CFLAGS
@@ -213,8 +220,8 @@ set.o : $(SET)/set.c #clean up all the intermediate files
clean:
- rm -f $(BIN_DIR)/antlr *.$(OBJ_EXT) core
+ $(RM) $(BIN_DIR)$(SEP)antlr *.$(OBJ_EXT) core
#remove everything in clean plus the PCCTS files generated
scrub:
- rm -f $(PCCTS_GEN) *.$(OBJ_EXT) core
+ $(RM) $(PCCTS_GEN) *.$(OBJ_EXT) core
diff --git a/BaseTools/Source/C/VfrCompile/Pccts/dlg/DlgMS.mak b/BaseTools/Source/C/VfrCompile/Pccts/dlg/DlgMS.mak index c2cac00..f2a31a7 100644 --- a/BaseTools/Source/C/VfrCompile/Pccts/dlg/DlgMS.mak +++ b/BaseTools/Source/C/VfrCompile/Pccts/dlg/DlgMS.mak @@ -12,6 +12,15 @@ PCCTS_H=$(PCCTS_HOME)\h # Support directories
SET=$(PCCTS_HOME)\support\set
+!IFNDEF HOST_ARCH
+HOST_ARCH = IA32
+!ENDIF
+
+!IF "$(HOST_ARCH)"=="IA32" || "$(HOST_ARCH)"=="ARM"
+SYS_BIN_PATH=$(EDK_TOOLS_PATH)\Bin\Win32
+!ELSE
+SYS_BIN_PATH=$(EDK_TOOLS_PATH)\Bin\Win64
+!ENDIF
# Compiler stuff
CC = cl
@@ -26,10 +35,10 @@ SUPPORT_OBJS = set.obj # Dependencies
-$(EDK_TOOLS_PATH)\Bin\Win32\dlg.exe: $(DLG_OBJS) $(SUPPORT_OBJS)
+$(SYS_BIN_PATH)\dlg.exe: $(DLG_OBJS) $(SUPPORT_OBJS)
$(CC) $(CFLAGS) -Fedlg.exe $(DLG_OBJS) $(SUPPORT_OBJS)
- -@if not exist $(EDK_TOOLS_PATH)\Bin\Win32 mkdir $(EDK_TOOLS_PATH)\Bin\Win32
- copy dlg.exe $(EDK_TOOLS_PATH)\Bin\Win32
+ -@if not exist $(SYS_BIN_PATH) mkdir $(SYS_BIN_PATH)
+ copy dlg.exe $(SYS_BIN_PATH)
dlg_p.obj: $(DLG_SRC)\dlg_p.c \
$(PCCTS_H)\antlr.h \
diff --git a/BaseTools/Source/C/VfrCompile/Pccts/dlg/makefile b/BaseTools/Source/C/VfrCompile/Pccts/dlg/makefile index e45ac98..e56b690 100644 --- a/BaseTools/Source/C/VfrCompile/Pccts/dlg/makefile +++ b/BaseTools/Source/C/VfrCompile/Pccts/dlg/makefile @@ -114,7 +114,15 @@ PCCTS_H=../h #
# UNIX
#
-CLANG:=$(shell $(CC) --version | grep clang)
+ifeq (Windows, $(findstring Windows,$(MAKE_HOST)))
+ SHELL := cmd.exe
+ SEP := $(shell echo \)
+ RM := del /f /q
+else
+ SEP :=/
+ RM := rm -f
+endif
+CLANG := $(findstring clang,$(shell $(CC) --version))
ifneq ($(CLANG),)
CC?=$(CLANG_BIN)clang
else ifeq ($(origin CC),default)
@@ -123,7 +131,7 @@ endif COPT=-O
ANTLR=${BIN_DIR}/antlr
DLG=${BIN_DIR}/dlg
-CFLAGS= $(COPT) -I. -I$(SET) -I$(PCCTS_H) -DUSER_ZZSYN -DZZLEXBUFSIZE=65536
+CFLAGS= $(COPT) -I. -I$(SET) -I$(PCCTS_H) -DUSER_ZZSYN -DZZLEXBUFSIZE=65536 -std=gnu11
CPPFLAGS=
OBJ_EXT=o
OUT_OBJ = -o
@@ -162,4 +170,4 @@ lint: #clean up all the intermediate files
clean:
- rm -f $(BIN_DIR)/dlg *.$(OBJ_EXT) core
+ $(RM) $(BIN_DIR)$(SEP)dlg *.$(OBJ_EXT) core
diff --git a/BaseTools/Source/C/VfrCompile/VfrSyntax.g b/BaseTools/Source/C/VfrCompile/VfrSyntax.g index 55fd067..942e0ed 100644 --- a/BaseTools/Source/C/VfrCompile/VfrSyntax.g +++ b/BaseTools/Source/C/VfrCompile/VfrSyntax.g @@ -1,7 +1,7 @@ /*++ @file
Vfr Syntax
-Copyright (c) 2004 - 2019, Intel Corporation. All rights reserved.<BR>
+Copyright (c) 2004 - 2025, Intel Corporation. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
--*/
@@ -111,6 +111,7 @@ VfrParserStart ( #token CloseBracket("]") "\]"
#token LineDefinition "#line\ [0-9]+\ \"~[\"]+\"[\ \t]*\n" << gCVfrErrorHandle.ParseFileScopeRecord (begexpr (), line ()); skip (); newline (); >>
+#token GccLineDefinition "#\ [0-9]+\ \"~[\"]+\"[\ \t]*([1234][\ \t]*)*\n" << gCVfrErrorHandle.ParseFileScopeRecord (begexpr (), line ()); skip (); newline (); >>
#token DevicePath("devicepath") "devicepath"
#token FormSet("formset") "formset"
#token FormSetId("formsetid") "formsetid"
diff --git a/BaseTools/Source/Python/AutoGen/BuildEngine.py b/BaseTools/Source/Python/AutoGen/BuildEngine.py index 45b39d7..b829a25 100644 --- a/BaseTools/Source/Python/AutoGen/BuildEngine.py +++ b/BaseTools/Source/Python/AutoGen/BuildEngine.py @@ -330,7 +330,6 @@ class BuildRule: else:
EdkLogger.error("build", PARAMETER_MISSING, ExtraData="No rule file or string given")
- self.SupportedToolChainFamilyList = SupportedFamily
self.RuleDatabase = tdict(True, 4) # {FileExt, ModuleType, Arch, Family : FileBuildRule object}
self.Ext2FileType = {} # {ext : file-type}
self.FileTypeList = set()
@@ -343,7 +342,6 @@ class BuildRule: self._ArchList = set()
self._FamilyList = []
self._TotalToolChainFamilySet = set()
- self._RuleObjectList = [] # FileBuildRule object list
self._FileVersion = ""
self.Parse()
diff --git a/BaseTools/Source/Python/AutoGen/GenC.py b/BaseTools/Source/Python/AutoGen/GenC.py index 5ad10ce..86991e7 100755 --- a/BaseTools/Source/Python/AutoGen/GenC.py +++ b/BaseTools/Source/Python/AutoGen/GenC.py @@ -21,6 +21,9 @@ from .StrGather import * from .GenPcdDb import CreatePcdDatabaseCode
from .IdfClassObject import *
+import json
+import secrets
+
## PCD type string
gItemTypeStringDatabase = {
TAB_PCDS_FEATURE_FLAG : TAB_PCDS_FIXED_AT_BUILD,
@@ -2039,6 +2042,34 @@ def CreateFooterCode(Info, AutoGenC, AutoGenH): def CreateCode(Info, AutoGenC, AutoGenH, StringH, UniGenCFlag, UniGenBinBuffer, StringIdf, IdfGenCFlag, IdfGenBinBuffer):
CreateHeaderCode(Info, AutoGenC, AutoGenH)
+ # The only 32 bit archs we have are IA32 and ARM, everything else is 64 bit
+ Bitwidth = 32 if Info.Arch == 'IA32' or Info.Arch == 'ARM' else 64
+
+ if GlobalData.gStackCookieValues64 == [] and os.path.exists(os.path.join(Info.PlatformInfo.BuildDir, "StackCookieValues64.json")):
+ with open (os.path.join(Info.PlatformInfo.BuildDir, "StackCookieValues64.json"), "r") as file:
+ GlobalData.gStackCookieValues64 = json.load(file)
+ if GlobalData.gStackCookieValues32 == [] and os.path.exists(os.path.join(Info.PlatformInfo.BuildDir, "StackCookieValues32.json")):
+ with open (os.path.join(Info.PlatformInfo.BuildDir, "StackCookieValues32.json"), "r") as file:
+ GlobalData.gStackCookieValues32 = json.load(file)
+
+ try:
+ if Bitwidth == 32:
+ CookieValue = int(GlobalData.gStackCookieValues32[hash(Info.Guid) % len(GlobalData.gStackCookieValues32)])
+ else:
+ CookieValue = int(GlobalData.gStackCookieValues64[hash(Info.Guid) % len(GlobalData.gStackCookieValues64)])
+ except:
+ EdkLogger.warn("build", "Failed to get Stack Cookie Value List! Generating random value.", ExtraData="[%s]" % str(Info))
+ if Bitwidth == 32:
+ CookieValue = secrets.randbelow (0xFFFFFFFF)
+ else:
+ CookieValue = secrets.randbelow (0xFFFFFFFFFFFFFFFF)
+
+ AutoGenH.Append((
+ '#define STACK_COOKIE_VALUE 0x%XULL\n' % CookieValue
+ if Bitwidth == 64 else
+ '#define STACK_COOKIE_VALUE 0x%X\n' % CookieValue
+ ))
+
CreateGuidDefinitionCode(Info, AutoGenC, AutoGenH)
CreateProtocolDefinitionCode(Info, AutoGenC, AutoGenH)
CreatePpiDefinitionCode(Info, AutoGenC, AutoGenH)
diff --git a/BaseTools/Source/Python/AutoGen/GenMake.py b/BaseTools/Source/Python/AutoGen/GenMake.py index fbd35d4..e5f282c 100755 --- a/BaseTools/Source/Python/AutoGen/GenMake.py +++ b/BaseTools/Source/Python/AutoGen/GenMake.py @@ -14,6 +14,7 @@ import sys import string
import re
import os.path as path
+from Common import EdkLogger
from Common.LongFilePathSupport import OpenLongFilePath as open
from Common.MultipleWorkspace import MultipleWorkspace as mws
from Common.BuildToolError import *
@@ -446,19 +447,14 @@ cleanlib: self.ResultFileList = []
self.IntermediateDirectoryList = ["$(DEBUG_DIR)", "$(OUTPUT_DIR)"]
- self.FileBuildTargetList = [] # [(src, target string)]
self.BuildTargetList = [] # [target string]
- self.PendingBuildTargetList = [] # [FileBuildRule objects]
self.CommonFileDependency = []
self.FileListMacros = {}
self.ListFileMacros = {}
self.ObjTargetDict = OrderedDict()
self.FileCache = {}
- self.LibraryBuildCommandList = []
- self.LibraryFileList = []
self.LibraryMakefileList = []
self.LibraryBuildDirectoryList = []
- self.SystemLibraryList = []
self.Macros = OrderedDict()
self.Macros["OUTPUT_DIR" ] = self._AutoGenObject.Macros["OUTPUT_DIR"]
self.Macros["DEBUG_DIR" ] = self._AutoGenObject.Macros["DEBUG_DIR"]
@@ -898,9 +894,20 @@ cleanlib: break
if self._AutoGenObject.ToolChainFamily == 'GCC':
- RespDict[Key] = Value.replace('\\', '/')
- else:
- RespDict[Key] = Value
+ #
+ # Replace '\' with '/' in the response file.
+ # Skip content within "" or \"\"
+ #
+ ValueList = re.split(r'("|\\"|\s+)', Value)
+ Skip = False
+ for i, v in enumerate(ValueList):
+ if v in ('"', '\\"'):
+ Skip = not Skip
+ elif not Skip:
+ ValueList[i] = v.replace('\\', '/')
+ Value = ''.join(ValueList)
+ RespDict[Key] = Value
+
for Target in BuildTargets:
for i, SingleCommand in enumerate(BuildTargets[Target].Commands):
if FlagDict[Flag]['Macro'] in SingleCommand:
@@ -1145,21 +1152,6 @@ cleanlib: if not LibraryAutoGen.IsBinaryModule:
self.LibraryBuildDirectoryList.append(self.PlaceMacro(LibraryAutoGen.BuildDir, self.Macros))
- ## Return a list containing source file's dependencies
- #
- # @param FileList The list of source files
- # @param ForceInculeList The list of files which will be included forcely
- # @param SearchPathList The list of search path
- #
- # @retval dict The mapping between source file path and its dependencies
- #
- def GetFileDependency(self, FileList, ForceInculeList, SearchPathList):
- Dependency = {}
- for F in FileList:
- Dependency[F] = GetDependencyList(self._AutoGenObject, self.FileCache, F, ForceInculeList, SearchPathList)
- return Dependency
-
-
## CustomMakefile class
#
# This class encapsules makefie and its generation for module. It uses template to generate
@@ -1451,7 +1443,6 @@ cleanlib: #
def __init__(self, PlatformAutoGen):
BuildFile.__init__(self, PlatformAutoGen)
- self.ModuleBuildCommandList = []
self.ModuleMakefileList = []
self.IntermediateDirectoryList = []
self.ModuleBuildDirectoryList = []
diff --git a/BaseTools/Source/Python/AutoGen/GenPcdDb.py b/BaseTools/Source/Python/AutoGen/GenPcdDb.py index ad5dae0..28df647 100644 --- a/BaseTools/Source/Python/AutoGen/GenPcdDb.py +++ b/BaseTools/Source/Python/AutoGen/GenPcdDb.py @@ -435,23 +435,6 @@ class DbStringHeadTableItemList(DbItemList): self.ListSize += self.ItemSize
return self.ListSize
-## DbSkuHeadTableItemList
-#
-# The class holds the Sku header value table
-#
-class DbSkuHeadTableItemList (DbItemList):
- def __init__(self, ItemSize, DataList=None, RawDataList=None):
- DbItemList.__init__(self, ItemSize, DataList, RawDataList)
-
- def PackData(self):
- PackStr = "=LL"
- Buffer = bytearray()
- for Data in self.RawDataList:
- Buffer += pack(PackStr,
- GetIntegerValue(Data[0]),
- GetIntegerValue(Data[1]))
- return Buffer
-
## DbSizeTableItemList
#
# The class holds the size table
diff --git a/BaseTools/Source/Python/AutoGen/GenVar.py b/BaseTools/Source/Python/AutoGen/GenVar.py index f2ad54b..54d7864 100644 --- a/BaseTools/Source/Python/AutoGen/GenVar.py +++ b/BaseTools/Source/Python/AutoGen/GenVar.py @@ -28,7 +28,6 @@ class VariableMgr(object): self.DefaultStoreMap = DefaultStoreMap
self.SkuIdMap = SkuIdMap
self.VpdRegionSize = 0
- self.VpdRegionOffset = 0
self.NVHeaderBuff = None
self.VarDefaultBuff = None
self.VarDeltaBuff = None
@@ -39,9 +38,6 @@ class VariableMgr(object): def SetVpdRegionMaxSize(self, maxsize):
self.VpdRegionSize = maxsize
- def SetVpdRegionOffset(self, vpdoffset):
- self.VpdRegionOffset = vpdoffset
-
def PatchNVStoreDefaultMaxSize(self, maxsize):
if not self.NVHeaderBuff:
return ""
diff --git a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py index 65a2176..aa0b716 100755 --- a/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/ModuleAutoGen.py @@ -254,7 +254,6 @@ class ModuleAutoGen(AutoGen): self.AutoGenDepSet = set()
self.ReferenceModules = []
self.ConstPcd = {}
- self.FileDependCache = {}
def __init_platform_info__(self):
pinfo = self.DataPipe.Get("P_Info")
@@ -677,50 +676,6 @@ class ModuleAutoGen(AutoGen): self.BuildRuleOrder = ['.%s' % Ext for Ext in self.BuildRuleOrder.split()]
return RetVal
- ## Get include path list from tool option for the module build
- #
- # @retval list The include path list
- #
- @cached_property
- def BuildOptionIncPathList(self):
- #
- # Regular expression for finding Include Directories, the difference between MSFT and INTEL/GCC
- # is the former use /I , the Latter used -I to specify include directories
- #
- if self.PlatformInfo.ToolChainFamily in (TAB_COMPILER_MSFT):
- BuildOptIncludeRegEx = gBuildOptIncludePatternMsft
- elif self.PlatformInfo.ToolChainFamily in ('INTEL', 'GCC'):
- BuildOptIncludeRegEx = gBuildOptIncludePatternOther
- else:
- #
- # New ToolChainFamily, don't known whether there is option to specify include directories
- #
- return []
-
- RetVal = []
- for Tool in ('CC', 'PP', 'VFRPP', 'ASLPP', 'ASLCC', 'APP', 'ASM'):
- try:
- FlagOption = self.BuildOption[Tool]['FLAGS']
- except KeyError:
- FlagOption = ''
-
- IncPathList = [NormPath(Path, self.Macros) for Path in BuildOptIncludeRegEx.findall(FlagOption)]
-
- #
- # EDK II modules must not reference header files outside of the packages they depend on or
- # within the module's directory tree. Report error if violation.
- #
- if GlobalData.gDisableIncludePathCheck == False:
- for Path in IncPathList:
- if (Path not in self.IncludePathList) and (CommonPath([Path, self.MetaFile.Dir]) != self.MetaFile.Dir):
- ErrMsg = "The include directory for the EDK II module in this line is invalid %s specified in %s FLAGS '%s'" % (Path, Tool, FlagOption)
- EdkLogger.error("build",
- PARAMETER_INVALID,
- ExtraData=ErrMsg,
- File=str(self.MetaFile))
- RetVal += IncPathList
- return RetVal
-
## Return a list of files which can be built from source
#
# What kind of files can be built is determined by build rules in
diff --git a/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py b/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py index dac8145..68b9d89 100644 --- a/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py +++ b/BaseTools/Source/Python/AutoGen/PlatformAutoGen.py @@ -190,9 +190,9 @@ class PlatformAutoGen(AutoGen): Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
else:
Ma.CreateMakeFile(CreateModuleMakeFile)
- self.CreateLibModuelDirs()
+ self.CreateLibModuleDirs()
- def CreateLibModuelDirs(self):
+ def CreateLibModuleDirs(self):
# No need to create makefile for the platform more than once.
if self.MakeFileName:
return
@@ -251,7 +251,6 @@ class PlatformAutoGen(AutoGen): VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)
VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
- VariableInfo.SetVpdRegionOffset(VpdRegionBase)
Index = 0
for Pcd in sorted(DynamicPcdSet):
pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
@@ -990,11 +989,6 @@ class PlatformAutoGen(AutoGen): def _BuildOptionWithToolDef(self, ToolDef):
return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)
- ## Return the build options specific for EDK modules in this platform
- @cached_property
- def EdkBuildOption(self):
- return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
-
## Return the build options specific for EDKII modules in this platform
@cached_property
def EdkIIBuildOption(self):
diff --git a/BaseTools/Source/Python/AutoGen/UniClassObject.py b/BaseTools/Source/Python/AutoGen/UniClassObject.py index b16330e..6d62fa0 100644 --- a/BaseTools/Source/Python/AutoGen/UniClassObject.py +++ b/BaseTools/Source/Python/AutoGen/UniClassObject.py @@ -168,7 +168,6 @@ codecs.register(Ucs2Search) class StringDefClassObject(object):
def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
self.StringName = ''
- self.StringNameByteList = []
self.StringValue = ''
self.StringValueByteList = ''
self.Token = 0
@@ -178,7 +177,6 @@ class StringDefClassObject(object): if Name is not None:
self.StringName = Name
- self.StringNameByteList = UniToHexList(Name)
if Value is not None:
self.StringValue = Value + u'\x00' # Add a NULL at string tail
self.StringValueByteList = UniToHexList(self.StringValue)
@@ -361,13 +359,6 @@ class UniFileClassObject(object): self.AddStringToList(Name, Language, Value)
#
- # Get include file list and load them
- #
- def GetIncludeFile(self, Item, Dir):
- FileName = Item[Item.find(u'#include ') + len(u'#include ') :Item.find(u' ', len(u'#include '))][1:-1]
- self.LoadUniFile(FileName)
-
- #
# Pre-process before parse .uni file
#
def PreProcess(self, File):
@@ -601,26 +592,6 @@ class UniFileClassObject(object): Item.Referenced = True
#
- # Search the string in language definition by Name
- #
- def FindStringValue(self, Name, Lang):
- if Name in self.OrderedStringDict[Lang]:
- ItemIndexInList = self.OrderedStringDict[Lang][Name]
- return self.OrderedStringList[Lang][ItemIndexInList]
-
- return None
-
- #
- # Search the string in language definition by Token
- #
- def FindByToken(self, Token, Lang):
- for Item in self.OrderedStringList[Lang]:
- if Item.Token == Token:
- return Item
-
- return None
-
- #
# Re-order strings and re-generate tokens
#
def ReToken(self):
diff --git a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py index ad8c9b5..d4fc31a 100644 --- a/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py +++ b/BaseTools/Source/Python/AutoGen/ValidCheckingInfoObject.py @@ -225,10 +225,8 @@ class VAR_CHECK_PCD_VALID_OBJ(object): self.data = set()
try:
self.StorageWidth = MAX_SIZE_TYPE[self.PcdDataType]
- self.ValidData = True
except:
self.StorageWidth = 0
- self.ValidData = False
def __eq__(self, validObj):
return validObj and self.VarOffset == validObj.VarOffset
diff --git a/BaseTools/Source/Python/BPDG/BPDG.py b/BaseTools/Source/Python/BPDG/BPDG.py index 283e08a..86f773c 100644 --- a/BaseTools/Source/Python/BPDG/BPDG.py +++ b/BaseTools/Source/Python/BPDG/BPDG.py @@ -18,7 +18,6 @@ from __future__ import print_function from __future__ import absolute_import
import Common.LongFilePathOs as os
import sys
-import encodings.ascii
from optparse import OptionParser
from Common import EdkLogger
diff --git a/BaseTools/Source/Python/Capsule/GenerateCapsule.py b/BaseTools/Source/Python/Capsule/GenerateCapsule.py index a773cfb..fd3ee4a 100644 --- a/BaseTools/Source/Python/Capsule/GenerateCapsule.py +++ b/BaseTools/Source/Python/Capsule/GenerateCapsule.py @@ -10,7 +10,7 @@ # keep the tool as simple as possible, it has the following limitations:
# * Do not support vendor code bytes in a capsule.
#
-# Copyright (c) 2018 - 2022, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2018 - 2024, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
@@ -38,11 +38,68 @@ from Common.Edk2.Capsule.FmpPayloadHeader import FmpPayloadHeaderClass # Globals for help information
#
__prog__ = 'GenerateCapsule'
-__version__ = '0.10'
-__copyright__ = 'Copyright (c) 2022, Intel Corporation. All rights reserved.'
+__version__ = '0.11'
+__copyright__ = 'Copyright (c) 2024, Intel Corporation. All rights reserved.'
__description__ = 'Generate a capsule.\n'
-def SignPayloadSignTool (Payload, ToolPath, PfxFile, SubjectName, Verbose = False):
+#
+# Globals definitions
+#
+HASH_ALG_MD5 = 'md5'
+HASH_ALG_SHA1 = 'sha1'
+HASH_ALG_SHA256 = 'sha256'
+HASH_ALG_SHA384 = 'sha384'
+HASH_ALG_SHA512 = 'sha512'
+DEFAULT_HASH_ALGORITHM = HASH_ALG_SHA256
+
+TOOL_SIGN_TOOL = 0x0
+TOOL_OPENSSL = 0x1
+
+SIGN_TOOL_HASH_ALG_EOL_LIST = [
+ HASH_ALG_MD5,
+ HASH_ALG_SHA1,
+ ]
+
+SIGN_TOOL_HASH_ALG_SUPPORT_LIST = [
+ HASH_ALG_SHA256,
+ HASH_ALG_SHA384,
+ HASH_ALG_SHA512,
+ ]
+
+OPENSSL_HASH_ALG_EOL_LIST = [
+ HASH_ALG_MD5,
+ HASH_ALG_SHA1,
+ ]
+
+OPENSSL_HASH_ALG_SUPPORT_LIST = [
+ HASH_ALG_SHA256,
+ HASH_ALG_SHA384,
+ HASH_ALG_SHA512,
+ ]
+
+def CheckHashAlgorithmSupported (ToolType, HashAlgorithm):
+ if ToolType == TOOL_SIGN_TOOL:
+ EolList = SIGN_TOOL_HASH_ALG_EOL_LIST
+ SupportList = SIGN_TOOL_HASH_ALG_SUPPORT_LIST
+ elif ToolType == TOOL_OPENSSL:
+ EolList = OPENSSL_HASH_ALG_EOL_LIST
+ SupportList = OPENSSL_HASH_ALG_SUPPORT_LIST
+ else:
+ raise ValueError ('GenerateCapsule: error: unsupported type of tool.')
+
+ if HashAlgorithm.lower () in EolList:
+ raise ValueError ('GenerateCapsule: error: hash algorithm [{HashAlgorithm}] had been EOL.'.format (HashAlgorithm = HashAlgorithm))
+ elif HashAlgorithm.lower () not in SupportList:
+ raise ValueError ('GenerateCapsule: error: hash algorithm [{HashAlgorithm}] is not supported.'.format (HashAlgorithm = HashAlgorithm))
+
+ return
+
+def SignPayloadSignTool (Payload, ToolPath, PfxFile, SubjectName, HashAlgorithm = DEFAULT_HASH_ALGORITHM, Verbose = False):
+ #
+ # Check the hash algorithm is supported
+ #
+ CheckHashAlgorithmSupported (TOOL_SIGN_TOOL, HashAlgorithm)
+
#
# Create a temporary directory
#
@@ -70,12 +127,12 @@ def SignPayloadSignTool (Payload, ToolPath, PfxFile, SubjectName, Verbose = Fals ToolPath = ''
Command = ''
Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'signtool.exe'))
- Command = Command + 'sign /fd sha256 /p7ce DetachedSignedData /p7co 1.2.840.113549.1.7.2 '
+ Command = Command + 'sign /fd {HashAlgorithm} /p7ce DetachedSignedData /p7co 1.2.840.113549.1.7.2 '.format (HashAlgorithm = HashAlgorithm)
Command = Command + '/p7 {TempDir} '.format (TempDir = TempDirectoryName)
if PfxFile is not None:
Command = Command + '/f {PfxFile} '.format (PfxFile = PfxFile)
if SubjectName is not None:
- Command = Command + '/n {SubjectName} '.format (SubjectName = SubjectName)
+ Command = Command + '/n "{SubjectName}" '.format (SubjectName = SubjectName)
Command = Command + TempFileName
if Verbose:
print (Command)
@@ -108,11 +165,16 @@ def SignPayloadSignTool (Payload, ToolPath, PfxFile, SubjectName, Verbose = Fals shutil.rmtree (TempDirectoryName)
return Signature
-def VerifyPayloadSignTool (Payload, CertData, ToolPath, PfxFile, SubjectName, Verbose = False):
+def VerifyPayloadSignTool (Payload, CertData, ToolPath, PfxFile, SubjectName, HashAlgorithm = DEFAULT_HASH_ALGORITHM, Verbose = False):
print ('signtool verify is not supported.')
raise ValueError ('GenerateCapsule: error: signtool verify is not supported.')
-def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
+def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, HashAlgorithm = DEFAULT_HASH_ALGORITHM, Verbose = False):
+ #
+ # Check the hash algorithm is supported
+ #
+ CheckHashAlgorithmSupported (TOOL_OPENSSL, HashAlgorithm)
+
#
# Build openssl command
#
@@ -120,7 +182,7 @@ def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCer ToolPath = ''
Command = ''
Command = Command + '"{Path}" '.format (Path = os.path.join (ToolPath, 'openssl'))
- Command = Command + 'smime -sign -binary -outform DER -md sha256 '
+ Command = Command + 'smime -sign -binary -outform DER -md {HashAlgorithm} '.format (HashAlgorithm = HashAlgorithm)
Command = Command + '-signer "{Private}" -certfile "{Public}"'.format (Private = SignerPrivateCertFile, Public = OtherPublicCertFile)
if Verbose:
print (Command)
@@ -141,7 +203,7 @@ def SignPayloadOpenSsl (Payload, ToolPath, SignerPrivateCertFile, OtherPublicCer return Signature
-def VerifyPayloadOpenSsl (Payload, CertData, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, Verbose = False):
+def VerifyPayloadOpenSsl (Payload, CertData, ToolPath, SignerPrivateCertFile, OtherPublicCertFile, TrustedPublicCertFile, HashAlgorithm = DEFAULT_HASH_ALGORITHM, Verbose = False):
#
# Create a temporary directory
#
@@ -251,8 +313,9 @@ if __name__ == '__main__': LowestSupportedVersion = ConvertJsonValue (Config, 'LowestSupportedVersion', ValidateUnsignedInteger, Required = False)
HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
+ HashAlgorithm = ConvertJsonValue (Config, 'HashAlgorithm', str, Required = False, Default = DEFAULT_HASH_ALGORITHM)
SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
- SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', os.path.expandvars, Required = False, Default = None, Open = True)
+ SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', str, Required = False, Default = None, Open = False)
OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
@@ -267,6 +330,7 @@ if __name__ == '__main__': MonotonicCount,
HardwareInstance,
UpdateImageIndex,
+ HashAlgorithm,
SignToolPfxFile,
SignToolSubjectName,
OpenSslSignerPrivateCertFile,
@@ -307,8 +371,9 @@ if __name__ == '__main__': HardwareInstance = ConvertJsonValue (Config, 'HardwareInstance', ValidateUnsignedInteger, Required = False, Default = 0)
UpdateImageIndex = ConvertJsonValue (Config, 'UpdateImageIndex', ValidateUnsignedInteger, Required = False, Default = 1)
MonotonicCount = ConvertJsonValue (Config, 'MonotonicCount', ValidateUnsignedInteger, Required = False, Default = 0)
+ HashAlgorithm = ConvertJsonValue (Config, 'HashAlgorithm', str, Required = False, Default = DEFAULT_HASH_ALGORITHM)
SignToolPfxFile = ConvertJsonValue (Config, 'SignToolPfxFile', os.path.expandvars, Required = False, Default = None, Open = True)
- SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', os.path.expandvars, Required = False, Default = None, Open = True)
+ SignToolSubjectName = ConvertJsonValue (Config, 'SignToolSubjectName', str, Required = False, Default = None, Open = False)
OpenSslSignerPrivateCertFile = ConvertJsonValue (Config, 'OpenSslSignerPrivateCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslOtherPublicCertFile = ConvertJsonValue (Config, 'OpenSslOtherPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
OpenSslTrustedPublicCertFile = ConvertJsonValue (Config, 'OpenSslTrustedPublicCertFile', os.path.expandvars, Required = False, Default = None, Open = True)
@@ -334,6 +399,7 @@ if __name__ == '__main__': MonotonicCount,
HardwareInstance,
UpdateImageIndex,
+ HashAlgorithm,
SignToolPfxFile,
SignToolSubjectName,
OpenSslSignerPrivateCertFile,
@@ -354,6 +420,7 @@ if __name__ == '__main__': "Payload": PayloadDescriptor.Payload,
"HardwareInstance": str(PayloadDescriptor.HardwareInstance),
"UpdateImageIndex": str(PayloadDescriptor.UpdateImageIndex),
+ "HashAlgorithm": str(PayloadDescriptor.HashAlgorithm),
"SignToolPfxFile": str(PayloadDescriptor.SignToolPfxFile),
"SignToolSubjectName": str(PayloadDescriptor.SignToolSubjectName),
"OpenSslSignerPrivateCertFile": str(PayloadDescriptor.OpenSslSignerPrivateCertFile),
@@ -409,11 +476,14 @@ if __name__ == '__main__': if args.HardwareInstance:
print ('GenerateCapsule: error: Argument --hardware-instance conflicts with Argument -j')
sys.exit (1)
+ if args.HashAlgorithm:
+ print ('GenerateCapsule: error: Argument --hash-algorithm conflicts with Argument -j')
+ sys.exit (1)
if args.SignToolPfxFile:
print ('GenerateCapsule: error: Argument --pfx-file conflicts with Argument -j')
sys.exit (1)
if args.SignToolSubjectName:
- print ('GenerateCapsule: error: Argument --SubjectName conflicts with Argument -j')
+ print ('GenerateCapsule: error: Argument --subject-name conflicts with Argument -j')
sys.exit (1)
if args.OpenSslSignerPrivateCertFile:
print ('GenerateCapsule: error: Argument --signer-private-cert conflicts with Argument -j')
@@ -437,6 +507,7 @@ if __name__ == '__main__': MonotonicCount = 0,
HardwareInstance = 0,
UpdateImageIndex = 1,
+ HashAlgorithm = None,
SignToolPfxFile = None,
SignToolSubjectName = None,
OpenSslSignerPrivateCertFile = None,
@@ -452,6 +523,7 @@ if __name__ == '__main__': self.MonotonicCount = MonotonicCount
self.HardwareInstance = HardwareInstance
self.UpdateImageIndex = UpdateImageIndex
+ self.HashAlgorithm = HashAlgorithm
self.SignToolPfxFile = SignToolPfxFile
self.SignToolSubjectName = SignToolSubjectName
self.OpenSslSignerPrivateCertFile = OpenSslSignerPrivateCertFile
@@ -523,6 +595,9 @@ if __name__ == '__main__': if args.OutputFile is None:
raise argparse.ArgumentTypeError ('--decode requires --output')
+ if self.HashAlgorithm is None:
+ self.HashAlgorithm = DEFAULT_HASH_ALGORITHM
+
if self.UseSignTool:
if self.SignToolPfxFile is not None:
self.SignToolPfxFile.close()
@@ -568,6 +643,7 @@ if __name__ == '__main__': args.MonotonicCount,
args.HardwareInstance,
args.UpdateImageIndex,
+ args.HashAlgorithm,
args.SignToolPfxFile,
args.SignToolSubjectName,
args.OpenSslSignerPrivateCertFile,
@@ -613,6 +689,7 @@ if __name__ == '__main__': SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.SignToolPfxFile,
SinglePayloadDescriptor.SignToolSubjectName,
+ HashAlgorithm = SinglePayloadDescriptor.HashAlgorithm,
Verbose = args.Verbose
)
else:
@@ -622,6 +699,7 @@ if __name__ == '__main__': SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
+ HashAlgorithm = SinglePayloadDescriptor.HashAlgorithm,
Verbose = args.Verbose
)
except Exception as Msg:
@@ -693,6 +771,7 @@ if __name__ == '__main__': args.MonotonicCount,
args.HardwareInstance,
args.UpdateImageIndex,
+ args.HashAlgorithm,
args.SignToolPfxFile,
args.SignToolSubjectName,
args.OpenSslSignerPrivateCertFile,
@@ -738,6 +817,7 @@ if __name__ == '__main__': None,
HardwareInstance,
UpdateImageIndex,
+ PayloadDescriptorList[Index].HashAlgorithm,
PayloadDescriptorList[Index].SignToolPfxFile,
PayloadDescriptorList[Index].SignToolSubjectName,
PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
@@ -751,7 +831,10 @@ if __name__ == '__main__': for Index in range (0, FmpCapsuleHeader.PayloadItemCount):
if Index > 0:
PayloadDecodeFile = FmpCapsuleHeader.GetFmpCapsuleImageHeader (Index).Payload
- PayloadDescriptorList.append (PayloadDescriptor (PayloadDecodeFile,
+ PayloadDescriptorList.append (PayloadDescriptor (
+ PayloadDecodeFile,
+ None,
+ None,
None,
None,
None,
@@ -777,6 +860,7 @@ if __name__ == '__main__': None,
HardwareInstance,
UpdateImageIndex,
+ PayloadDescriptorList[Index].HashAlgorithm,
PayloadDescriptorList[Index].SignToolPfxFile,
PayloadDescriptorList[Index].SignToolSubjectName,
PayloadDescriptorList[Index].OpenSslSignerPrivateCertFile,
@@ -812,6 +896,7 @@ if __name__ == '__main__': SinglePayloadDescriptor.SigningToolPath,
SinglePayloadDescriptor.SignToolPfxFile,
SinglePayloadDescriptor.SignToolSubjectName,
+ HashAlgorithm = SinglePayloadDescriptor.HashAlgorithm,
Verbose = args.Verbose
)
else:
@@ -822,6 +907,7 @@ if __name__ == '__main__': SinglePayloadDescriptor.OpenSslSignerPrivateCertFile,
SinglePayloadDescriptor.OpenSslOtherPublicCertFile,
SinglePayloadDescriptor.OpenSslTrustedPublicCertFile,
+ HashAlgorithm = SinglePayloadDescriptor.HashAlgorithm,
Verbose = args.Verbose
)
except Exception as Msg:
@@ -993,6 +1079,9 @@ if __name__ == '__main__': parser.add_argument ("--lsv", dest = 'LowestSupportedVersion', type = ValidateUnsignedInteger,
help = "The 32-bit lowest supported version of the binary payload (e.g. 0x11223344 or 5678). Required for encode operations.")
+ parser.add_argument ("--hash-algorithm", dest = 'HashAlgorithm', type = str,
+ help = "Hash algorithm for the payload digest.")
+
parser.add_argument ("--pfx-file", dest='SignToolPfxFile', type=argparse.FileType('rb'),
help="signtool PFX certificate filename.")
parser.add_argument ("--subject-name", dest='SignToolSubjectName',
diff --git a/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py b/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py index a29ac21..b82c518 100644 --- a/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py +++ b/BaseTools/Source/Python/Capsule/WindowsCapsuleSupportHelper.py @@ -23,11 +23,6 @@ from edk2toollib.windows.locate_tools import FindToolInWinSdk class WindowsCapsuleSupportHelper(object):
- def RegisterHelpers(self, obj):
- fp = os.path.abspath(__file__)
- obj.Register("PackageWindowsCapsuleFiles", WindowsCapsuleSupportHelper.PackageWindowsCapsuleFiles, fp)
-
-
@staticmethod
def PackageWindowsCapsuleFiles(OutputFolder, ProductName, ProductFmpGuid, CapsuleVersion_DotString,
CapsuleVersion_HexString, ProductFwProvider, ProductFwMfgName, ProductFwDesc, CapsuleFileName, PfxFile=None, PfxPass=None,
diff --git a/BaseTools/Source/Python/Common/Expression.py b/BaseTools/Source/Python/Common/Expression.py index 9d9cb0c..f8b34aa 100644 --- a/BaseTools/Source/Python/Common/Expression.py +++ b/BaseTools/Source/Python/Common/Expression.py @@ -917,7 +917,7 @@ class ValueExpressionEx(ValueExpression): TmpList.append('0x%02x' % ((TmpValue >> I * 8) & 0xff))
PcdValue = '{' + ', '.join(TmpList) + '}'
except:
- if PcdValue.strip().startswith('{'):
+ if PcdValue.strip().startswith('{') and PcdValue.strip().endswith('}'):
PcdValueList = SplitPcdValueString(PcdValue.strip()[1:-1])
LabelDict = {}
NewPcdValueList = []
@@ -969,8 +969,8 @@ class ValueExpressionEx(ValueExpression): NewPcdValueList.append(Item)
AllPcdValueList = []
+ Size = 0
for Item in NewPcdValueList:
- Size = 0
ValueStr = ''
TokenSpaceGuidName = ''
if Item.startswith(TAB_GUID) and Item.endswith(')'):
@@ -1027,6 +1027,8 @@ class ValueExpressionEx(ValueExpression): if Size > 0:
PcdValue = '{' + ', '.join(AllPcdValueList) + '}'
+ else:
+ raise BadExpression("PCD with value '%s' cannot be used. Please provide a valid value of at least one byte." % (self.PcdValue))
else:
raise BadExpression("Type: %s, Value: %s, %s"%(self.PcdType, PcdValue, Value))
diff --git a/BaseTools/Source/Python/Common/GlobalData.py b/BaseTools/Source/Python/Common/GlobalData.py index 11849e8..dd5316d 100755 --- a/BaseTools/Source/Python/Common/GlobalData.py +++ b/BaseTools/Source/Python/Common/GlobalData.py @@ -122,4 +122,5 @@ gEnableGenfdsMultiThread = True gSikpAutoGenCache = set()
# Common lock for the file access in multiple process AutoGens
file_lock = None
-
+gStackCookieValues32 = []
+gStackCookieValues64 = []
diff --git a/BaseTools/Source/Python/Common/MultipleWorkspace.py b/BaseTools/Source/Python/Common/MultipleWorkspace.py index ad5d485..0ddb14f 100644 --- a/BaseTools/Source/Python/Common/MultipleWorkspace.py +++ b/BaseTools/Source/Python/Common/MultipleWorkspace.py @@ -34,7 +34,7 @@ class MultipleWorkspace(object): #
@classmethod
def convertPackagePath(cls, Ws, Path):
- if str(os.path.normcase (Path)).startswith(Ws):
+ if str(os.path.normcase (os.path.normpath(Path))).startswith(os.path.normcase(os.path.normpath(Ws))):
return os.path.join(Ws, os.path.relpath(Path, Ws))
return Path
diff --git a/BaseTools/Source/Python/Common/TargetTxtClassObject.py b/BaseTools/Source/Python/Common/TargetTxtClassObject.py index 363c383..e741d03 100644 --- a/BaseTools/Source/Python/Common/TargetTxtClassObject.py +++ b/BaseTools/Source/Python/Common/TargetTxtClassObject.py @@ -176,7 +176,7 @@ class TargetTxtDict(): ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], ConfDirectoryPath)
else:
if "CONF_PATH" in os.environ:
- ConfDirectoryPath = os.path.normcase(os.path.normpath(os.environ["CONF_PATH"]))
+ ConfDirectoryPath = os.path.normpath(os.environ["CONF_PATH"])
else:
# Get standard WORKSPACE/Conf use the absolute path to the WORKSPACE/Conf
ConfDirectoryPath = mws.join(os.environ["WORKSPACE"], 'Conf')
diff --git a/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py b/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py index 0e59028..7211714 100644 --- a/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py +++ b/BaseTools/Source/Python/Common/Uefi/Capsule/UefiCapsuleHeader.py @@ -91,9 +91,9 @@ class UefiCapsuleHeaderClass (object): Buffer[0:self._StructSize]
)
if HeaderSize < self._StructSize:
- raise ValueError
+ raise ValueError("HeaderSize of {0} doesn't match _StructSize of {1}".format(HeaderSize, self._StructSize))
if CapsuleImageSize != len (Buffer):
- raise ValueError
+ raise ValueError("CapsuleImageSize of {0} doesn't match buffer length of {1}".format(CapsuleImageSize, len(Buffer)))
self.CapsuleGuid = uuid.UUID (bytes_le = CapsuleGuid)
self.HeaderSize = HeaderSize
self.OemFlags = Flags & 0xffff
diff --git a/BaseTools/Source/Python/Ecc/CParser4/CLexer.py b/BaseTools/Source/Python/Ecc/CParser4/CLexer.py index a2cc5bf..f0c6e66 100644 --- a/BaseTools/Source/Python/Ecc/CParser4/CLexer.py +++ b/BaseTools/Source/Python/Ecc/CParser4/CLexer.py @@ -1,7 +1,6 @@ # Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
from io import StringIO
-from typing.io import TextIO
import sys
diff --git a/BaseTools/Source/Python/Ecc/CParser4/CParser.py b/BaseTools/Source/Python/Ecc/CParser4/CParser.py index 31d23d5..3946497f8 100644 --- a/BaseTools/Source/Python/Ecc/CParser4/CParser.py +++ b/BaseTools/Source/Python/Ecc/CParser4/CParser.py @@ -2,7 +2,6 @@ # encoding: utf-8
from antlr4 import *
from io import StringIO
-from typing.io import TextIO
import sys
diff --git a/BaseTools/Source/Python/Ecc/Check.py b/BaseTools/Source/Python/Ecc/Check.py index 4561961..c31ee70 100644 --- a/BaseTools/Source/Python/Ecc/Check.py +++ b/BaseTools/Source/Python/Ecc/Check.py @@ -1092,7 +1092,7 @@ class Check(object): RecordSet = EccGlobalData.gDb.TblInf.Exec(SqlCommand)
for Record in RecordSet:
Path = Record[1]
- Path = Path.upper().replace('\X64', '').replace('\IA32', '').replace('\EBC', '').replace('\IPF', '').replace('\ARM', '')
+ Path = Path.upper().replace(r'\X64', '').replace(r'\IA32', '').replace(r'\EBC', '').replace(r'\IPF', '').replace(r'\ARM', '')
if Path in InfPathList:
if not EccGlobalData.gException.IsException(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, Record[2]):
EccGlobalData.gDb.TblReport.Insert(ERROR_META_DATA_FILE_CHECK_MODULE_FILE_NO_USE, OtherMsg="The source file [%s] is existing in module directory but it is not described in INF file." % (Record[2]), BelongsToTable='File', BelongsToItem=Record[0])
@@ -1501,7 +1501,6 @@ def FindPara(FilePath, Para, CallingLine): if Line.startswith('%s = ' % Para):
Line = Line.strip()
return Line
- break
return ''
diff --git a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py index d8d6aff..5bd3eec 100644 --- a/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py +++ b/BaseTools/Source/Python/Ecc/CodeFragmentCollector.py @@ -74,8 +74,6 @@ class CodeFragmentCollector: self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
self.TokenReleaceList = []
- self.__Token = ""
- self.__SkippedChars = ""
## __EndOfFile() method
#
@@ -98,21 +96,6 @@ class CodeFragmentCollector: else:
return False
- ## __EndOfLine() method
- #
- # Judge current buffer pos is at line end
- #
- # @param self The object pointer
- # @retval True Current File buffer position is at line end
- # @retval False Current File buffer position is NOT at line end
- #
- def __EndOfLine(self):
- SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
- if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
- return True
- else:
- return False
-
## Rewind() method
#
# Reset file data buffer to the initial state
@@ -123,25 +106,6 @@ class CodeFragmentCollector: self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
- ## __UndoOneChar() method
- #
- # Go back one char in the file buffer
- #
- # @param self The object pointer
- # @retval True Successfully go back one char
- # @retval False Not able to go back one char as file beginning reached
- #
- def __UndoOneChar(self):
-
- if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
- return False
- elif self.CurrentOffsetWithinLine == 0:
- self.CurrentLineNumber -= 1
- self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
- else:
- self.CurrentOffsetWithinLine -= 1
- return True
-
## __GetOneChar() method
#
# Move forward one char in the file buffer
@@ -211,32 +175,6 @@ class CodeFragmentCollector: def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
- ## __InsertComma() method
- #
- # Insert ',' to replace PP
- #
- # @param self The object pointer
- # @retval List current line contents
- #
- def __InsertComma(self, Line):
-
-
- if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
- BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
- if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
- return
-
- if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
- return
-
- if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
- return
-
- if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
- return
-
- self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
-
## PreprocessFile() method
#
# Preprocess file contents, replace comments with spaces.
diff --git a/BaseTools/Source/Python/Ecc/Configuration.py b/BaseTools/Source/Python/Ecc/Configuration.py index d4aab1d..9a9ca49 100644 --- a/BaseTools/Source/Python/Ecc/Configuration.py +++ b/BaseTools/Source/Python/Ecc/Configuration.py @@ -432,7 +432,7 @@ class Configuration(object): # test that our dict and out class still match in contents.
#
if __name__ == '__main__':
- myconfig = Configuration("BaseTools\Source\Python\Ecc\config.ini")
+ myconfig = Configuration(r"BaseTools\Source\Python\Ecc\config.ini")
for each in myconfig.__dict__:
if each == "Filename":
continue
diff --git a/BaseTools/Source/Python/Ecc/Database.py b/BaseTools/Source/Python/Ecc/Database.py index a5b70c5..f31dd93 100644 --- a/BaseTools/Source/Python/Ecc/Database.py +++ b/BaseTools/Source/Python/Ecc/Database.py @@ -78,7 +78,6 @@ class Database(object): self.Conn.execute("PRAGMA page_size=4096")
self.Conn.execute("PRAGMA synchronous=OFF")
# to avoid non-ascii character conversion error
- self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
self.TblDataModel = TableDataModel(self.Cur)
@@ -211,59 +210,6 @@ class Database(object): # Update the field "BelongsToFunction" for each Identifier
#
#
- def UpdateIdentifierBelongsToFunction_disabled(self):
- EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
-
- SqlCommand = """select ID, BelongsToFile, StartLine, EndLine, Model from Identifier"""
- EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
- self.Cur.execute(SqlCommand)
- Records = self.Cur.fetchall()
- for Record in Records:
- IdentifierID = Record[0]
- BelongsToFile = Record[1]
- StartLine = Record[2]
- EndLine = Record[3]
- Model = Record[4]
-
- #
- # Check whether an identifier belongs to a function
- #
- EdkLogger.debug(4, "For common identifiers ... ")
- SqlCommand = """select ID from Function
- where StartLine < %s and EndLine > %s
- and BelongsToFile = %s""" % (StartLine, EndLine, BelongsToFile)
- EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
- self.Cur.execute(SqlCommand)
- IDs = self.Cur.fetchall()
- for ID in IDs:
- SqlCommand = """Update Identifier set BelongsToFunction = %s where ID = %s""" % (ID[0], IdentifierID)
- EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
- self.Cur.execute(SqlCommand)
-
- #
- # Check whether the identifier is a function header
- #
- EdkLogger.debug(4, "For function headers ... ")
- if Model == DataClass.MODEL_IDENTIFIER_COMMENT:
- SqlCommand = """select ID from Function
- where StartLine = %s + 1
- and BelongsToFile = %s""" % (EndLine, BelongsToFile)
- EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
- self.Cur.execute(SqlCommand)
- IDs = self.Cur.fetchall()
- for ID in IDs:
- SqlCommand = """Update Identifier set BelongsToFunction = %s, Model = %s where ID = %s""" % (ID[0], DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER, IdentifierID)
- EdkLogger.debug(4, "SqlCommand: %s" %SqlCommand)
- self.Cur.execute(SqlCommand)
-
- EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers ... DONE")
-
-
- ## UpdateIdentifierBelongsToFunction
- #
- # Update the field "BelongsToFunction" for each Identifier
- #
- #
def UpdateIdentifierBelongsToFunction(self):
EdkLogger.verbose("Update 'BelongsToFunction' for Identifiers started ...")
diff --git a/BaseTools/Source/Python/Ecc/EccMain.py b/BaseTools/Source/Python/Ecc/EccMain.py index a349cd8..5211d09 100644 --- a/BaseTools/Source/Python/Ecc/EccMain.py +++ b/BaseTools/Source/Python/Ecc/EccMain.py @@ -63,7 +63,7 @@ class Ecc(object): self.ParseOption()
EdkLogger.info(time.strftime("%H:%M:%S, %b.%d %Y ", time.localtime()) + "[00:00]" + "\n")
- WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
+ WorkspaceDir = os.path.normpath(os.environ["WORKSPACE"])
os.environ["WORKSPACE"] = WorkspaceDir
# set multiple workspace
diff --git a/BaseTools/Source/Python/Ecc/EccToolError.py b/BaseTools/Source/Python/Ecc/EccToolError.py index 734a2b8..ba0bcff 100644 --- a/BaseTools/Source/Python/Ecc/EccToolError.py +++ b/BaseTools/Source/Python/Ecc/EccToolError.py @@ -171,7 +171,7 @@ gEccErrorMessage = { ERROR_DOXYGEN_CHECK_FUNCTION_HEADER : "The function headers should follow Doxygen special documentation blocks in section 2.3.5",
ERROR_DOXYGEN_CHECK_COMMENT_DESCRIPTION : """The first line of text in a comment block should be a brief description of the element being documented and the brief description must end with a period.""",
ERROR_DOXYGEN_CHECK_COMMENT_FORMAT : "For comment line with '///< ... text ...' format, if it is used, it should be after the code section",
- ERROR_DOXYGEN_CHECK_COMMAND : "Only Doxygen commands '@bug', '@todo', '@example', '@file', '@attention', '@param', '@post', '@pre', '@retval', '@return', '@sa', '@since', '@test', '@note', '@par', '@endcode', '@code', '@{', '@}' are allowed to mark the code",
+ ERROR_DOXYGEN_CHECK_COMMAND : "Only Doxygen commands '@bug', '@todo', '@example', '@file', '@attention', '@param', '@post', '@pre', '@retval', '@return', '@sa', '@since', '@test', '@note', '@par', '@endcode', '@code', '@endverbatim', '@verbatim', '@{', '@}' are allowed to mark the code",
ERROR_META_DATA_FILE_CHECK_ALL : "",
ERROR_META_DATA_FILE_CHECK_PATH_NAME : "The file defined in meta-data does not exist",
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py index 1d7f6eb..552344e 100644 --- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py +++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaDataTable.py @@ -139,12 +139,6 @@ class Table(object): def SetEndFlag(self):
pass
- def IsIntegral(self):
- Result = self.Exec("select min(ID) from %s" % (self.Table))
- if Result[0][0] != -1:
- return False
- return True
-
def GetAll(self):
return self.Exec("select * from %s where ID > 0 order by ID" % (self.Table))
@@ -195,19 +189,3 @@ class TableDataModel(Table): self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
- ## Get CrossIndex
- #
- # Get a model's cross index from its name
- #
- # @param ModelName: Name of the model
- # @retval CrossIndex: CrossIndex of the model
- #
- def GetCrossIndex(self, ModelName):
- CrossIndex = -1
- SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
- self.Cur.execute(SqlCommand)
- for Item in self.Cur:
- CrossIndex = Item[0]
-
- return CrossIndex
-
diff --git a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py index 2d98ac5..2ef2847 100644 --- a/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Ecc/MetaFileWorkspace/MetaFileParser.py @@ -1841,14 +1841,14 @@ class DecParser(MetaFileParser): if EccGlobalData.gConfig.UniCheckPCDInfo == '1' or EccGlobalData.gConfig.UniCheckAll == '1' or EccGlobalData.gConfig.CheckAll == '1':
# check Description, Prompt information
- PatternDesc = re.compile('##\s*([\x21-\x7E\s]*)', re.S)
- PatternPrompt = re.compile('#\s+@Prompt\s+([\x21-\x7E\s]*)', re.S)
+ PatternDesc = re.compile(r'##\s*([\x21-\x7E\s]*)', re.S)
+ PatternPrompt = re.compile(r'#\s+@Prompt\s+([\x21-\x7E\s]*)', re.S)
Description = None
Prompt = None
# check @ValidRange, @ValidList and @Expression format valid
ErrorCodeValid = '0x0 <= %s <= 0xFFFFFFFF'
- PatternValidRangeIn = '(NOT)?\s*(\d+\s*-\s*\d+|0[xX][a-fA-F0-9]+\s*-\s*0[xX][a-fA-F0-9]+|LT\s*\d+|LT\s*0[xX][a-fA-F0-9]+|GT\s*\d+|GT\s*0[xX][a-fA-F0-9]+|LE\s*\d+|LE\s*0[xX][a-fA-F0-9]+|GE\s*\d+|GE\s*0[xX][a-fA-F0-9]+|XOR\s*\d+|XOR\s*0[xX][a-fA-F0-9]+|EQ\s*\d+|EQ\s*0[xX][a-fA-F0-9]+)'
- PatternValidRng = re.compile('^' + '(NOT)?\s*' + PatternValidRangeIn + '$')
+ PatternValidRangeIn = r'(NOT)?\s*(\d+\s*-\s*\d+|0[xX][a-fA-F0-9]+\s*-\s*0[xX][a-fA-F0-9]+|LT\s*\d+|LT\s*0[xX][a-fA-F0-9]+|GT\s*\d+|GT\s*0[xX][a-fA-F0-9]+|LE\s*\d+|LE\s*0[xX][a-fA-F0-9]+|GE\s*\d+|GE\s*0[xX][a-fA-F0-9]+|XOR\s*\d+|XOR\s*0[xX][a-fA-F0-9]+|EQ\s*\d+|EQ\s*0[xX][a-fA-F0-9]+)'
+ PatternValidRng = re.compile('^' + r'(NOT)?\s*' + PatternValidRangeIn + '$')
for Comment in self._Comments:
Comm = Comment[0].strip()
if not Comm:
@@ -2071,7 +2071,7 @@ class UniParser(object): def CheckKeyValid(self, Key, Contents=None):
if not Contents:
Contents = self.FileIn
- KeyPattern = re.compile('#string\s+%s\s+.*?#language.*?".*?"' % Key, re.S)
+ KeyPattern = re.compile(r'#string\s+%s\s+.*?#language.*?".*?"' % Key, re.S)
if KeyPattern.search(Contents):
return True
return False
diff --git a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py index b02f663..1428afe 100644 --- a/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py +++ b/BaseTools/Source/Python/Ecc/Xml/XmlRoutines.py @@ -132,37 +132,6 @@ def XmlElement(Dom, String): except:
return ""
-
-## Get a single XML element of the current node.
-#
-# Return a single XML element specified by the current root Dom.
-# If the input Dom is not valid, then an empty string is returned.
-#
-# @param Dom The root XML DOM object.
-#
-# @revel Element An XML element in current root Dom.
-#
-def XmlElementData(Dom):
- try:
- return Dom.firstChild.data.strip()
- except:
- return ""
-
-
-## Get a list of XML elements using XPath style syntax.
-#
-# Return a list of XML elements from the root Dom specified by XPath String.
-# If the input Dom or String is not valid, then an empty list is returned.
-#
-# @param Dom The root XML DOM object.
-# @param String A XPath style path.
-#
-# @revel Elements A list of XML elements matching XPath style Sting.
-#
-def XmlElementList(Dom, String):
- return map(XmlElementData, XmlList(Dom, String))
-
-
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
@@ -179,22 +148,6 @@ def XmlAttribute(Dom, Attribute): except:
return ''
-
-## Get the XML node name of the current node.
-#
-# Return a single XML node name from the current root Dom.
-# If the input Dom is not valid, then an empty string is returned.
-#
-# @param Dom The root XML DOM object.
-#
-# @revel Element A single XML element matching XPath style Sting.
-#
-def XmlNodeName(Dom):
- try:
- return Dom.nodeName.strip()
- except:
- return ''
-
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
diff --git a/BaseTools/Source/Python/Ecc/c.py b/BaseTools/Source/Python/Ecc/c.py index 61ad084..a6b9076 100644 --- a/BaseTools/Source/Python/Ecc/c.py +++ b/BaseTools/Source/Python/Ecc/c.py @@ -43,15 +43,12 @@ def GetArrayPattern(): return p
def GetTypedefFuncPointerPattern():
- p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
+ p = re.compile(r'[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
return p
def GetDB():
return EccGlobalData.gDb
-def GetConfig():
- return EccGlobalData.gConfig
-
def PrintErrorMsg(ErrorType, Msg, TableName, ItemId):
Msg = Msg.replace('\n', '').replace('\r', '')
MsgPartList = Msg.split()
@@ -481,18 +478,6 @@ def GetFunctionList(): return FuncObjList
-def GetFileModificationTimeFromDB(FullFileName):
- TimeValue = 0.0
- Db = GetDB()
- SqlStatement = """ select TimeStamp
- from File
- where FullPath = \'%s\'
- """ % (FullFileName)
- ResultSet = Db.TblFile.Exec(SqlStatement)
- for Result in ResultSet:
- TimeValue = Result[0]
- return TimeValue
-
def CollectSourceCodeDataIntoDB(RootDir):
FileObjList = []
tuple = os.walk(RootDir)
@@ -2235,7 +2220,7 @@ def CheckDoxygenCommand(FullFileName): """ % (FileTable, DataClass.MODEL_IDENTIFIER_COMMENT, DataClass.MODEL_IDENTIFIER_FUNCTION_HEADER)
ResultSet = Db.TblFile.Exec(SqlStatement)
DoxygenCommandList = ['bug', 'todo', 'example', 'file', 'attention', 'param', 'post', 'pre', 'retval',
- 'return', 'sa', 'since', 'test', 'note', 'par', 'endcode', 'code']
+ 'return', 'sa', 'since', 'test', 'note', 'par', 'endcode', 'code', 'endverbatim', 'verbatim']
for Result in ResultSet:
CommentStr = Result[0]
CommentPartList = CommentStr.split()
diff --git a/BaseTools/Source/Python/Eot/CParser4/CLexer.py b/BaseTools/Source/Python/Eot/CParser4/CLexer.py index 54374fd..6666664 100644 --- a/BaseTools/Source/Python/Eot/CParser4/CLexer.py +++ b/BaseTools/Source/Python/Eot/CParser4/CLexer.py @@ -1,7 +1,6 @@ # Generated from C.g4 by ANTLR 4.7.1
from antlr4 import *
from io import StringIO
-from typing.io import TextIO
import sys
diff --git a/BaseTools/Source/Python/Eot/CParser4/CParser.py b/BaseTools/Source/Python/Eot/CParser4/CParser.py index 31d23d5..3946497f8 100644 --- a/BaseTools/Source/Python/Eot/CParser4/CParser.py +++ b/BaseTools/Source/Python/Eot/CParser4/CParser.py @@ -2,7 +2,6 @@ # encoding: utf-8
from antlr4 import *
from io import StringIO
-from typing.io import TextIO
import sys
diff --git a/BaseTools/Source/Python/Eot/CodeFragment.py b/BaseTools/Source/Python/Eot/CodeFragment.py index 94c3f52..ec73fc4 100644 --- a/BaseTools/Source/Python/Eot/CodeFragment.py +++ b/BaseTools/Source/Python/Eot/CodeFragment.py @@ -41,24 +41,6 @@ class PP_Directive : self.StartPos = Begin
self.EndPos = End
-## The description of assignment expression and start & end position
-#
-#
-class AssignmentExpression :
- ## The constructor
- #
- # @param self The object pointer
- # @param Str The message to record
- # @param Begin The start position tuple.
- # @param End The end position tuple.
- #
- def __init__(self, Lvalue, Op, Exp, Begin, End):
- self.Name = Lvalue
- self.Operator = Op
- self.Value = Exp
- self.StartPos = Begin
- self.EndPos = End
-
## The description of predicate expression and start & end position
#
#
diff --git a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py index a5c1cee..a6827c0 100644 --- a/BaseTools/Source/Python/Eot/CodeFragmentCollector.py +++ b/BaseTools/Source/Python/Eot/CodeFragmentCollector.py @@ -72,9 +72,6 @@ class CodeFragmentCollector: self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
- self.__Token = ""
- self.__SkippedChars = ""
-
## __EndOfFile() method
#
# Judge current buffer pos is at file end
@@ -93,21 +90,6 @@ class CodeFragmentCollector: else:
return False
- ## __EndOfLine() method
- #
- # Judge current buffer pos is at line end
- #
- # @param self The object pointer
- # @retval True Current File buffer position is at line end
- # @retval False Current File buffer position is NOT at line end
- #
- def __EndOfLine(self):
- SizeOfCurrentLine = len(self.Profile.FileLinesList[self.CurrentLineNumber - 1])
- if self.CurrentOffsetWithinLine >= SizeOfCurrentLine - 1:
- return True
- else:
- return False
-
## Rewind() method
#
# Reset file data buffer to the initial state
@@ -118,25 +100,6 @@ class CodeFragmentCollector: self.CurrentLineNumber = 1
self.CurrentOffsetWithinLine = 0
- ## __UndoOneChar() method
- #
- # Go back one char in the file buffer
- #
- # @param self The object pointer
- # @retval True Successfully go back one char
- # @retval False Not able to go back one char as file beginning reached
- #
- def __UndoOneChar(self):
-
- if self.CurrentLineNumber == 1 and self.CurrentOffsetWithinLine == 0:
- return False
- elif self.CurrentOffsetWithinLine == 0:
- self.CurrentLineNumber -= 1
- self.CurrentOffsetWithinLine = len(self.__CurrentLine()) - 1
- else:
- self.CurrentOffsetWithinLine -= 1
- return True
-
## __GetOneChar() method
#
# Move forward one char in the file buffer
@@ -205,32 +168,6 @@ class CodeFragmentCollector: def __CurrentLine(self):
return self.Profile.FileLinesList[self.CurrentLineNumber - 1]
- ## __InsertComma() method
- #
- # Insert ',' to replace PP
- #
- # @param self The object pointer
- # @retval List current line contents
- #
- def __InsertComma(self, Line):
-
-
- if self.Profile.FileLinesList[Line - 1][0] != T_CHAR_HASH:
- BeforeHashPart = str(self.Profile.FileLinesList[Line - 1]).split(T_CHAR_HASH)[0]
- if BeforeHashPart.rstrip().endswith(T_CHAR_COMMA) or BeforeHashPart.rstrip().endswith(';'):
- return
-
- if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(','):
- return
-
- if Line - 2 >= 0 and str(self.Profile.FileLinesList[Line - 2]).rstrip().endswith(';'):
- return
-
- if str(self.Profile.FileLinesList[Line]).lstrip().startswith(',') or str(self.Profile.FileLinesList[Line]).lstrip().startswith(';'):
- return
-
- self.Profile.FileLinesList[Line - 1].insert(self.CurrentOffsetWithinLine, ',')
-
## PreprocessFileWithClear() method
#
# Run a preprocess for the file to clean all comments
diff --git a/BaseTools/Source/Python/Eot/Database.py b/BaseTools/Source/Python/Eot/Database.py index fca08b9..64a7228 100644 --- a/BaseTools/Source/Python/Eot/Database.py +++ b/BaseTools/Source/Python/Eot/Database.py @@ -78,7 +78,6 @@ class Database(object): self.Conn.execute("PRAGMA page_size=8192")
self.Conn.execute("PRAGMA synchronous=OFF")
# to avoid non-ascii character conversion error
- self.Conn.text_factory = str
self.Cur = self.Conn.cursor()
self.TblDataModel = TableDataModel(self.Cur)
diff --git a/BaseTools/Source/Python/Eot/EotGlobalData.py b/BaseTools/Source/Python/Eot/EotGlobalData.py index 3218f86..887a7a2 100644 --- a/BaseTools/Source/Python/Eot/EotGlobalData.py +++ b/BaseTools/Source/Python/Eot/EotGlobalData.py @@ -11,7 +11,7 @@ from Common.LongFilePathSupport import OpenLongFilePath as open gEFI_SOURCE = ''
gEDK_SOURCE = ''
gWORKSPACE = ''
-gSHELL_INF = 'Application\Shell'
+gSHELL_INF = r'Application\Shell'
gMAKE_FILE = ''
gDSC_FILE = ''
gFV_FILE = []
diff --git a/BaseTools/Source/Python/Eot/EotMain.py b/BaseTools/Source/Python/Eot/EotMain.py index 791fcdf..e0b2ed2 100644 --- a/BaseTools/Source/Python/Eot/EotMain.py +++ b/BaseTools/Source/Python/Eot/EotMain.py @@ -25,7 +25,6 @@ from Eot import Database from array import array
from Eot.Report import Report
from Common.BuildVersion import gBUILD_VERSION
-from Eot.Parser import ConvertGuid
from Common.LongFilePathSupport import OpenLongFilePath as open
import struct
import uuid
@@ -1010,9 +1009,6 @@ class Ffs(Image): def Pack(self):
pass
- def SetFreeSpace(self, Size):
- self.FreeSpace = Size
-
def _GetGuid(self):
return gGuidStringFormat % self.Name
diff --git a/BaseTools/Source/Python/Eot/Identification.py b/BaseTools/Source/Python/Eot/Identification.py index 31d4760..168a886 100644 --- a/BaseTools/Source/Python/Eot/Identification.py +++ b/BaseTools/Source/Python/Eot/Identification.py @@ -29,20 +29,6 @@ class Identification(object): def GetFileName(self, FileFullPath, FileRelativePath):
pass
- ## GetFileName
- #
- # Reserved
- #
- def GetFileFullPath(self, FileName, FileRelativePath):
- pass
-
- ## GetFileName
- #
- # Reserved
- #
- def GetFileRelativePath(self, FileName, FileFullPath):
- pass
-
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
diff --git a/BaseTools/Source/Python/Eot/Parser.py b/BaseTools/Source/Python/Eot/Parser.py index f204051..b0b3d0b 100644 --- a/BaseTools/Source/Python/Eot/Parser.py +++ b/BaseTools/Source/Python/Eot/Parser.py @@ -109,17 +109,6 @@ def PreProcess(Filename, MergeMultipleLines = True, LineNo = -1): return Lines
-## AddToGlobalMacro() method
-#
-# Add a macro to EotGlobalData.gMACRO
-#
-# @param Name: Name of the macro
-# @param Value: Value of the macro
-#
-def AddToGlobalMacro(Name, Value):
- Value = ReplaceMacro(Value, EotGlobalData.gMACRO, True)
- EotGlobalData.gMACRO[Name] = Value
-
## AddToSelfMacro() method
#
# Parse a line of macro definition and add it to a macro set
@@ -238,139 +227,6 @@ def GetAllIncludeFiles(Db): return IncludeFileList
-## GetAllSourceFiles() method
-#
-# Find all source files
-#
-# @param Db: Eot database
-#
-# @return SourceFileList: A list of source files
-#
-def GetAllSourceFiles(Db):
- SourceFileList = []
- SqlCommand = """select distinct Value1 from Inf where Model = %s order by Value1""" % MODEL_EFI_SOURCE_FILE
- RecordSet = Db.TblInf.Exec(SqlCommand)
-
- for Record in RecordSet:
- SourceFileList.append(Record[0])
-
- return SourceFileList
-
-## GetAllFiles() method
-#
-# Find all files, both source files and include files
-#
-# @param Db: Eot database
-#
-# @return FileList: A list of files
-#
-def GetAllFiles(Db):
- FileList = []
- IncludeFileList = GetAllIncludeFiles(Db)
- SourceFileList = GetAllSourceFiles(Db)
- for Item in IncludeFileList:
- if os.path.isfile(Item) and Item not in FileList:
- FileList.append(Item)
- for Item in SourceFileList:
- if os.path.isfile(Item) and Item not in FileList:
- FileList.append(Item)
-
- return FileList
-
-## ParseConditionalStatement() method
-#
-# Parse conditional statement
-#
-# @param Line: One line to be parsed
-# @param Macros: A set of all macro
-# @param StatusSet: A set of all status
-#
-# @retval True: Find keyword of conditional statement
-# @retval False: Not find keyword of conditional statement
-#
-def ParseConditionalStatement(Line, Macros, StatusSet):
- NewLine = Line.upper()
- if NewLine.find(TAB_IF_EXIST.upper()) > -1:
- IfLine = Line[NewLine.find(TAB_IF_EXIST) + len(TAB_IF_EXIST) + 1:].strip()
- IfLine = ReplaceMacro(IfLine, EotGlobalData.gMACRO, True)
- IfLine = ReplaceMacro(IfLine, Macros, True)
- IfLine = IfLine.replace("\"", '')
- IfLine = IfLine.replace("(", '')
- IfLine = IfLine.replace(")", '')
- Status = os.path.exists(os.path.normpath(IfLine))
- StatusSet.append([Status])
- return True
- if NewLine.find(TAB_IF_DEF.upper()) > -1:
- IfLine = Line[NewLine.find(TAB_IF_DEF) + len(TAB_IF_DEF) + 1:].strip()
- Status = False
- if IfLine in Macros or IfLine in EotGlobalData.gMACRO:
- Status = True
- StatusSet.append([Status])
- return True
- if NewLine.find(TAB_IF_N_DEF.upper()) > -1:
- IfLine = Line[NewLine.find(TAB_IF_N_DEF) + len(TAB_IF_N_DEF) + 1:].strip()
- Status = False
- if IfLine not in Macros and IfLine not in EotGlobalData.gMACRO:
- Status = True
- StatusSet.append([Status])
- return True
- if NewLine.find(TAB_IF.upper()) > -1:
- IfLine = Line[NewLine.find(TAB_IF) + len(TAB_IF) + 1:].strip()
- Status = ParseConditionalStatementMacros(IfLine, Macros)
- StatusSet.append([Status])
- return True
- if NewLine.find(TAB_ELSE_IF.upper()) > -1:
- IfLine = Line[NewLine.find(TAB_ELSE_IF) + len(TAB_ELSE_IF) + 1:].strip()
- Status = ParseConditionalStatementMacros(IfLine, Macros)
- StatusSet[-1].append(Status)
- return True
- if NewLine.find(TAB_ELSE.upper()) > -1:
- Status = False
- for Item in StatusSet[-1]:
- Status = Status or Item
- StatusSet[-1].append(not Status)
- return True
- if NewLine.find(TAB_END_IF.upper()) > -1:
- StatusSet.pop()
- return True
-
- return False
-
-## ParseConditionalStatement() method
-#
-# Parse conditional statement with Macros
-#
-# @param Line: One line to be parsed
-# @param Macros: A set of macros
-#
-# @return Line: New line after replacing macros
-#
-def ParseConditionalStatementMacros(Line, Macros):
- if Line.upper().find('DEFINED(') > -1 or Line.upper().find('EXIST') > -1:
- return False
- Line = ReplaceMacro(Line, EotGlobalData.gMACRO, True)
- Line = ReplaceMacro(Line, Macros, True)
- Line = Line.replace("&&", "and")
- Line = Line.replace("||", "or")
- return eval(Line)
-
-## GetConditionalStatementStatus() method
-#
-# 1. Assume the latest status as True
-# 2. Pop the top status of status set, previous status
-# 3. Compare the latest one and the previous one and get new status
-#
-# @param StatusSet: A set of all status
-#
-# @return Status: The final status
-#
-def GetConditionalStatementStatus(StatusSet):
- Status = True
- for Item in StatusSet:
- Status = Status and Item[-1]
-
- return Status
-
## SearchBelongsToFunction() method
#
# Search all functions belong to the file
@@ -819,47 +675,6 @@ def ParseMapFile(Files): return AllMaps
-## ConvertGuid
-#
-# Convert a GUID to a GUID with all upper letters
-#
-# @param guid: The GUID to be converted
-#
-# @param newGuid: The GUID with all upper letters.
-#
-def ConvertGuid(guid):
- numList = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
- newGuid = ''
- if guid.startswith('g'):
- guid = guid[1:]
- for i in guid:
- if i.upper() == i and i not in numList:
- newGuid = newGuid + ('_' + i)
- else:
- newGuid = newGuid + i.upper()
- if newGuid.startswith('_'):
- newGuid = newGuid[1:]
- if newGuid.endswith('_'):
- newGuid = newGuid[:-1]
-
- return newGuid
-
-## ConvertGuid2() method
-#
-# Convert a GUID to a GUID with new string instead of old string
-#
-# @param guid: The GUID to be converted
-# @param old: Old string to be replaced
-# @param new: New string to replace the old one
-#
-# @param newGuid: The GUID after replacement
-#
-def ConvertGuid2(guid, old, new):
- newGuid = ConvertGuid(guid)
- newGuid = newGuid.replace(old, new)
-
- return newGuid
-
##
#
# This acts like the main() function for the script, unless it is 'import'ed into another
diff --git a/BaseTools/Source/Python/Eot/Report.py b/BaseTools/Source/Python/Eot/Report.py index 9d99fe2..32af504 100644 --- a/BaseTools/Source/Python/Eot/Report.py +++ b/BaseTools/Source/Python/Eot/Report.py @@ -149,30 +149,6 @@ class Report(object): </tr>""" % (DepexString)
self.WriteLn(Content)
- ## GeneratePpi() method
- #
- # Generate PPI information
- #
- # @param self: The object pointer
- # @param Name: CName of a GUID
- # @param Guid: Value of a GUID
- # @param Type: Type of a GUID
- #
- def GeneratePpi(self, Name, Guid, Type):
- self.GeneratePpiProtocol('Ppi', Name, Guid, Type, self.PpiIndex)
-
- ## GenerateProtocol() method
- #
- # Generate PROTOCOL information
- #
- # @param self: The object pointer
- # @param Name: CName of a GUID
- # @param Guid: Value of a GUID
- # @param Type: Type of a GUID
- #
- def GenerateProtocol(self, Name, Guid, Type):
- self.GeneratePpiProtocol('Protocol', Name, Guid, Type, self.ProtocolIndex)
-
## GeneratePpiProtocol() method
#
# Generate PPI/PROTOCOL information
diff --git a/BaseTools/Source/Python/Eot/c.py b/BaseTools/Source/Python/Eot/c.py index dd9530f..a85564d 100644 --- a/BaseTools/Source/Python/Eot/c.py +++ b/BaseTools/Source/Python/Eot/c.py @@ -54,7 +54,7 @@ def GetArrayPattern(): # @return p: the pattern of function pointer
#
def GetTypedefFuncPointerPattern():
- p = re.compile('[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
+ p = re.compile(r'[_\w\s]*\([\w\s]*\*+\s*[_\w]+\s*\)\s*\(.*\)', re.DOTALL)
return p
## GetDB() method
diff --git a/BaseTools/Source/Python/FMMT/FMMT.py b/BaseTools/Source/Python/FMMT/FMMT.py index 7505b6c..d4fa07a 100644 --- a/BaseTools/Source/Python/FMMT/FMMT.py +++ b/BaseTools/Source/Python/FMMT/FMMT.py @@ -49,7 +49,7 @@ def print_banner(): class FMMT():
def __init__(self) -> None:
- self.firmware_packet = {}
+ pass
def SetConfigFilePath(self, configfilepath:str) -> str:
os.environ['FmmtConfPath'] = os.path.abspath(configfilepath)
diff --git a/BaseTools/Source/Python/FMMT/core/BinaryFactoryProduct.py b/BaseTools/Source/Python/FMMT/core/BinaryFactoryProduct.py index de174f2..9dd717c 100644 --- a/BaseTools/Source/Python/FMMT/core/BinaryFactoryProduct.py +++ b/BaseTools/Source/Python/FMMT/core/BinaryFactoryProduct.py @@ -4,7 +4,6 @@ # Copyright (c) 2021-, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
-from re import T
import copy
import os
import sys
@@ -130,7 +129,6 @@ class SectionProduct(BinaryProduct): Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.HeaderLength: Rel_Offset+Section_Info.Size]
Section_Info.DOffset = Section_Offset + Section_Info.HeaderLength + Rel_Whole_Offset
Section_Info.HOffset = Section_Offset + Rel_Whole_Offset
- Section_Info.ROffset = Rel_Offset
if Section_Info.Header.Type == 0:
break
# The final Section in parent Section does not need to add padding, else must be 4-bytes align with parent Section start offset
@@ -174,7 +172,6 @@ class FfsProduct(BinaryProduct): Section_Info.Data = Whole_Data[Rel_Offset+Section_Info.HeaderLength: Rel_Offset+Section_Info.Size]
Section_Info.DOffset = Section_Offset + Section_Info.HeaderLength + Rel_Whole_Offset
Section_Info.HOffset = Section_Offset + Rel_Whole_Offset
- Section_Info.ROffset = Rel_Offset
if Section_Info.Header.Type == 0:
break
# The final Section in Ffs does not need to add padding, else must be 4-bytes align with Ffs start offset
@@ -227,7 +224,6 @@ class FvProduct(BinaryProduct): Ffs_Tree = BIOSTREE(Ffs_Info.Name)
Ffs_Info.HOffset = Ffs_Offset + Rel_Whole_Offset
Ffs_Info.DOffset = Ffs_Offset + Ffs_Info.Header.HeaderLength + Rel_Whole_Offset
- Ffs_Info.ROffset = Rel_Offset
if Ffs_Info.Name == PADVECTOR:
Ffs_Tree.type = FFS_PAD
Ffs_Info.Data = Whole_Data[Rel_Offset+Ffs_Info.Header.HeaderLength: Rel_Offset+Ffs_Info.Size]
@@ -363,15 +359,6 @@ class FdProduct(BinaryProduct): tmp_index += 1
return Fd_Struct
-class ElfSectionProduct(BinaryProduct):
- ## Decompress the compressed section.
- def ParserData(self, Section_Tree, whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
- pass
- def ParserSectionData(self, Section_Tree, whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
- pass
- def ParserProgramData(self, Section_Tree, whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
- pass
-
class ElfProduct(BinaryProduct):
def ParserData(self, ParTree, Whole_Data: bytes, Rel_Whole_Offset: int=0) -> None:
diff --git a/BaseTools/Source/Python/FMMT/core/BiosTree.py b/BaseTools/Source/Python/FMMT/core/BiosTree.py index c5a7b01..d99f964 100644 --- a/BaseTools/Source/Python/FMMT/core/BiosTree.py +++ b/BaseTools/Source/Python/FMMT/core/BiosTree.py @@ -72,36 +72,6 @@ class BIOSTREE: self.Child.insert(pos, newNode)
newNode.Parent = self
- # lastNode.insertRel(newNode)
- def insertRel(self, newNode) -> None:
- if self.Parent:
- parentTree = self.Parent
- new_index = parentTree.Child.index(self) + 1
- parentTree.Child.insert(new_index, newNode)
- self.NextRel = newNode
- newNode.LastRel = self
-
- def deleteNode(self, deletekey: str) -> None:
- FindStatus, DeleteTree = self.FindNode(deletekey)
- if FindStatus:
- parentTree = DeleteTree.Parent
- lastTree = DeleteTree.LastRel
- nextTree = DeleteTree.NextRel
- if parentTree:
- index = parentTree.Child.index(DeleteTree)
- del parentTree.Child[index]
- if lastTree and nextTree:
- lastTree.NextRel = nextTree
- nextTree.LastRel = lastTree
- elif lastTree:
- lastTree.NextRel = None
- elif nextTree:
- nextTree.LastRel = None
- return DeleteTree
- else:
- logger.error('Could not find the target tree')
- return None
-
def FindNode(self, key: str, Findlist: list) -> None:
if self.key == key or (self.Data and self.Data.Name == key) or (self.type == FFS_TREE and self.Data.UiName == key):
Findlist.append(self)
diff --git a/BaseTools/Source/Python/FMMT/core/BiosTreeNode.py b/BaseTools/Source/Python/FMMT/core/BiosTreeNode.py index 5ca4c20..92611f8 100644 --- a/BaseTools/Source/Python/FMMT/core/BiosTreeNode.py +++ b/BaseTools/Source/Python/FMMT/core/BiosTreeNode.py @@ -60,7 +60,6 @@ class ElfNode: self.HeaderLength = len(struct2stream(self.Header))
self.HOffset = 0
self.DOffset = 0
- self.ROffset = 0
self.Data = b''
self.PadData = b''
self.Upld_Info_Align = False
@@ -122,7 +121,6 @@ class FvNode: self.HeaderLength = self.Header.HeaderLength
self.HOffset = 0
self.DOffset = 0
- self.ROffset = 0
self.Data = b''
if self.Header.Signature != 1213613663:
logger.error('Invalid Fv Header! Fv {} signature {} is not "_FVH".'.format(struct2stream(self.Header), self.Header.Signature))
@@ -179,10 +177,8 @@ class FfsNode: self.HeaderLength = self.Header.HeaderLength
self.HOffset = 0
self.DOffset = 0
- self.ROffset = 0
self.Data = b''
self.PadData = b''
- self.SectionMaxAlignment = SECTION_COMMON_ALIGNMENT # 4-align
def ModCheckSum(self) -> None:
HeaderData = struct2stream(self.Header)
@@ -217,13 +213,10 @@ class SectionNode: self.Type = self.Header.Type
self.HOffset = 0
self.DOffset = 0
- self.ROffset = 0
self.Data = b''
self.OriData = b''
- self.OriHeader = b''
self.PadData = b''
self.IsPadSection = False
- self.SectionMaxAlignment = SECTION_COMMON_ALIGNMENT # 4-align
def GetExtHeader(self, Type: int, buffer: bytes, nums: int=0) -> None:
if Type == 0x01:
@@ -244,5 +237,4 @@ class FreeSpaceNode: self.Size = len(buffer)
self.HOffset = 0
self.DOffset = 0
- self.ROffset = 0
self.PadData = b''
diff --git a/BaseTools/Source/Python/FMMT/core/FMMTOperation.py b/BaseTools/Source/Python/FMMT/core/FMMTOperation.py index d4aa339..4ed976d 100644 --- a/BaseTools/Source/Python/FMMT/core/FMMTOperation.py +++ b/BaseTools/Source/Python/FMMT/core/FMMTOperation.py @@ -65,7 +65,10 @@ def DeleteFfs(inputfile: str, TargetFfs_name: str, outputfile: str, Fv_name: str if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
- if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
+ parent = FmmtParser.WholeFvTree.Findlist[index].Parent
+ if parent is None or parent.Data is None:
+ continue
+ if parent.key != Fv_name and parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
Status = False
if FmmtParser.WholeFvTree.Findlist != []:
@@ -152,7 +155,10 @@ def ReplaceFfs(inputfile: str, Ffs_name: str, newffsfile: str, outputfile: str, if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
- if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
+ parent = FmmtParser.WholeFvTree.Findlist[index].Parent
+ if parent is None or parent.Data is None:
+ continue
+ if parent.key != Fv_name and parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
if FmmtParser.WholeFvTree.Findlist != []:
for TargetFfs in FmmtParser.WholeFvTree.Findlist:
@@ -184,7 +190,10 @@ def ExtractFfs(inputfile: str, Ffs_name: str, outputfile: str, Fv_name: str=None if Fv_name:
FindNum = len(FmmtParser.WholeFvTree.Findlist)
for index in range(FindNum-1, -1, -1):
- if FmmtParser.WholeFvTree.Findlist[index].Parent.key != Fv_name and FmmtParser.WholeFvTree.Findlist[index].Parent.Data.Name != Fv_name:
+ parent = FmmtParser.WholeFvTree.Findlist[index].Parent
+ if parent is None or parent.Data is None:
+ continue
+ if parent.key != Fv_name and parent.Data.Name != Fv_name:
FmmtParser.WholeFvTree.Findlist.remove(FmmtParser.WholeFvTree.Findlist[index])
if FmmtParser.WholeFvTree.Findlist != []:
TargetNode = FmmtParser.WholeFvTree.Findlist[0]
diff --git a/BaseTools/Source/Python/FMMT/core/FvHandler.py b/BaseTools/Source/Python/FMMT/core/FvHandler.py index 7a60760..12d52c1 100644 --- a/BaseTools/Source/Python/FMMT/core/FvHandler.py +++ b/BaseTools/Source/Python/FMMT/core/FvHandler.py @@ -237,19 +237,28 @@ class FvHandler: Size_delta = len(CompressedData) - len(TargetTree.Data.OriData)
ChangeSize(TargetTree, -Size_delta)
if TargetTree.NextRel:
+ Original_Pad_Size = len(TargetTree.Data.PadData)
TargetTree.Data.PadData = b'\x00' * New_Pad_Size
- self.Remain_New_Free_Space = len(TargetTree.Data.OriData) + len(TargetTree.Data.PadData) - len(CompressedData) - New_Pad_Size
+ self.Remain_New_Free_Space = (
+ len(TargetTree.Data.OriData) +
+ Original_Pad_Size -
+ len(CompressedData) -
+ New_Pad_Size
+ )
else:
TargetTree.Data.PadData = b''
- self.Remain_New_Free_Space = len(TargetTree.Data.OriData) - len(CompressedData)
+ self.Remain_New_Free_Space = (
+ len(TargetTree.Data.OriData) -
+ len(CompressedData)
+ )
TargetTree.Data.OriData = CompressedData
elif len(CompressedData) == len(TargetTree.Data.OriData):
TargetTree.Data.OriData = CompressedData
elif len(CompressedData) > len(TargetTree.Data.OriData):
New_Pad_Size = GetPadSize(len(CompressedData), SECTION_COMMON_ALIGNMENT)
self.Remain_New_Free_Space = len(CompressedData) + New_Pad_Size - len(TargetTree.Data.OriData) - len(TargetTree.Data.PadData)
- self.ModifyTest(TargetTree, self.Remain_New_Free_Space)
self.Status = True
+ self.ModifyTest(TargetTree, self.Remain_New_Free_Space)
def ModifyTest(self, ParTree, Needed_Space: int) -> None:
# If have needed space, will find if there have free space in parent tree, meanwhile update the node data.
@@ -347,15 +356,24 @@ class FvHandler: ModifySectionType(ParTree)
Needed_Space += ParTree.Data.HeaderLength - OriHeaderLen
# Update needed space with Delta_Pad_Size
+ Original_Pad_Size = len(ParTree.Data.PadData)
if ParTree.NextRel:
New_Pad_Size = GetPadSize(ParTree.Data.Size, SECTION_COMMON_ALIGNMENT)
- Delta_Pad_Size = New_Pad_Size - len(ParTree.Data.PadData)
+ Delta_Pad_Size = New_Pad_Size - Original_Pad_Size
ParTree.Data.PadData = b'\x00' * New_Pad_Size
Needed_Space += Delta_Pad_Size
else:
ParTree.Data.PadData = b''
if Needed_Space < 0:
- self.Remain_New_Free_Space = len(ParTree.Data.OriData) - len(CompressedData)
+ if ParTree.NextRel:
+ self.Remain_New_Free_Space = (
+ len(ParTree.Data.OriData) + Original_Pad_Size -
+ len(CompressedData) - New_Pad_Size
+ )
+ else:
+ self.Remain_New_Free_Space = (
+ len(ParTree.Data.OriData) - len(CompressedData)
+ )
# If current section is not guided section
elif Needed_Space:
ChangeSize(ParTree, -Needed_Space)
diff --git a/BaseTools/Source/Python/FMMT/core/GuidTools.py b/BaseTools/Source/Python/FMMT/core/GuidTools.py index f6bdeff..d413fe3 100644 --- a/BaseTools/Source/Python/FMMT/core/GuidTools.py +++ b/BaseTools/Source/Python/FMMT/core/GuidTools.py @@ -15,8 +15,9 @@ from FirmwareStorageFormat.Common import * from utils.FmmtLogger import FmmtLogger as logger
import subprocess
-def ExecuteCommand(cmd: list) -> None:
- subprocess.run(cmd,stdout=subprocess.DEVNULL)
+def ExecuteCommand(cmd_list: list) -> None:
+ cmd = ' '.join(cmd_list)
+ subprocess.run(cmd, stdout=subprocess.DEVNULL, shell=True)
class GUIDTool:
def __init__(self, guid: str, short_name: str, command: str) -> None:
@@ -176,4 +177,3 @@ class GUIDTools: raise Exception("Process Failed: is not defined!")
guidtools = GUIDTools()
-
diff --git a/BaseTools/Source/Python/GenFds/EfiSection.py b/BaseTools/Source/Python/GenFds/EfiSection.py index fd58391..ca7a447 100644 --- a/BaseTools/Source/Python/GenFds/EfiSection.py +++ b/BaseTools/Source/Python/GenFds/EfiSection.py @@ -155,7 +155,7 @@ class EfiSection (EfiSectionClassObject): #if VerString == '' and
if BuildNumString == '':
if self.Optional == True :
- GenFdsGlobalVariable.VerboseLogger( "Optional Section don't exist!")
+ GenFdsGlobalVariable.VerboseLogger( "Optional Section doesn't exist!")
return [], None
else:
EdkLogger.error("GenFds", GENFDS_ERROR, "File: %s miss Version Section value" %InfFileName)
diff --git a/BaseTools/Source/Python/GenFds/Fd.py b/BaseTools/Source/Python/GenFds/Fd.py index 973936b..c21453a 100644 --- a/BaseTools/Source/Python/GenFds/Fd.py +++ b/BaseTools/Source/Python/GenFds/Fd.py @@ -139,17 +139,4 @@ class FD(FDClassObject): GenFdsGlobalVariable.ImageBinDict[self.FdUiName.upper() + 'fd'] = FdFileName
return FdFileName
- ## generate flash map file
- #
- # @param self The object pointer
- #
- def GenFlashMap (self):
- pass
-
-
-
-
-
-
-
diff --git a/BaseTools/Source/Python/GenFds/FdfParser.py b/BaseTools/Source/Python/GenFds/FdfParser.py index feb4c72..63e73a7 100644 --- a/BaseTools/Source/Python/GenFds/FdfParser.py +++ b/BaseTools/Source/Python/GenFds/FdfParser.py @@ -13,7 +13,6 @@ from __future__ import print_function
from __future__ import absolute_import
from re import compile, DOTALL
-from string import hexdigits
from uuid import UUID
from Common.BuildToolError import *
@@ -1537,7 +1536,6 @@ class FdfParser: if self._IsToken(TAB_VALUE_SPLIT):
pcdPair = self._GetNextPcdSettings()
- Obj.BaseAddressPcd = pcdPair
self.Profile.PcdDict[pcdPair] = Obj.BaseAddress
self.SetPcdLocalation(pcdPair)
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
@@ -1554,7 +1552,6 @@ class FdfParser: Size = self._Token
if self._IsToken(TAB_VALUE_SPLIT):
pcdPair = self._GetNextPcdSettings()
- Obj.SizePcd = pcdPair
self.Profile.PcdDict[pcdPair] = Size
self.SetPcdLocalation(pcdPair)
FileLineTuple = GetRealFileLine(self.FileName, self.CurrentLineNumber)
@@ -2392,9 +2389,6 @@ class FdfParser: if not ffsInf.InfFileName.endswith('.inf'):
raise Warning.Expected(".inf file path", self.FileName, self.CurrentLineNumber)
- ffsInf.CurrentLineNum = self.CurrentLineNumber
- ffsInf.CurrentLineContent = self._CurrentLine()
-
#Replace $(SAPCE) with real space
ffsInf.InfFileName = ffsInf.InfFileName.replace('$(SPACE)', ' ')
@@ -2651,8 +2645,6 @@ class FdfParser: self._GetRAWData(FfsFileObj)
else:
- FfsFileObj.CurrentLineNum = self.CurrentLineNumber
- FfsFileObj.CurrentLineContent = self._CurrentLine()
FfsFileObj.FileName = self._Token.replace('$(SPACE)', ' ')
self._VerifyFile(FfsFileObj.FileName)
@@ -3227,8 +3219,6 @@ class FdfParser: if not self._GetNextToken():
raise Warning.Expected("file name", self.FileName, self.CurrentLineNumber)
- CapsuleObj.CreateFile = self._Token
-
self._GetCapsuleStatements(CapsuleObj)
self.Profile.CapsuleDict[CapsuleObj.UiCapsuleName] = CapsuleObj
return True
diff --git a/BaseTools/Source/Python/GenFds/FfsFileStatement.py b/BaseTools/Source/Python/GenFds/FfsFileStatement.py index 1c6e59b..e1eb75d 100644 --- a/BaseTools/Source/Python/GenFds/FfsFileStatement.py +++ b/BaseTools/Source/Python/GenFds/FfsFileStatement.py @@ -33,8 +33,6 @@ class FileStatement (FileStatementClassObject): #
def __init__(self):
FileStatementClassObject.__init__(self)
- self.CurrentLineNum = None
- self.CurrentLineContent = None
self.FileName = None
self.InfFileName = None
self.SubAlignment = None
diff --git a/BaseTools/Source/Python/GenFds/FfsInfStatement.py b/BaseTools/Source/Python/GenFds/FfsInfStatement.py index ec97134..6c837ac 100644 --- a/BaseTools/Source/Python/GenFds/FfsInfStatement.py +++ b/BaseTools/Source/Python/GenFds/FfsInfStatement.py @@ -65,8 +65,6 @@ class FfsInfStatement(FfsInfStatementClassObject): self.PiSpecVersion = '0x00000000'
self.InfModule = None
self.FinalTargetSuffixMap = {}
- self.CurrentLineNum = None
- self.CurrentLineContent = None
self.FileName = None
self.InfFileName = None
self.OverrideGuid = None
@@ -151,7 +149,7 @@ class FfsInfStatement(FfsInfStatementClassObject): #
def __InfParse__(self, Dict = None, IsGenFfs=False):
- GenFdsGlobalVariable.VerboseLogger( " Begine parsing INf file : %s" %self.InfFileName)
+ GenFdsGlobalVariable.VerboseLogger( " Begin parsing INF file : %s" %self.InfFileName)
self.InfFileName = self.InfFileName.replace('$(WORKSPACE)', '')
if len(self.InfFileName) > 1 and self.InfFileName[0] == '\\' and self.InfFileName[1] == '\\':
@@ -169,7 +167,7 @@ class FfsInfStatement(FfsInfStatementClassObject): if not os.path.exists(InfPath):
InfPath = GenFdsGlobalVariable.ReplaceWorkspaceMacro(InfPath)
if not os.path.exists(InfPath):
- EdkLogger.error("GenFds", GENFDS_ERROR, "Non-existant Module %s !" % (self.InfFileName))
+ EdkLogger.error("GenFds", GENFDS_ERROR, "Non-existent Module %s !" % (self.InfFileName))
self.CurrentArch = self.GetCurrentArch()
#
@@ -365,7 +363,7 @@ class FfsInfStatement(FfsInfStatementClassObject): os.makedirs(self.OutputPath)
self.EfiOutputPath, self.EfiDebugPath = self.__GetEFIOutPutPath__()
- GenFdsGlobalVariable.VerboseLogger( "ModuelEFIPath: " + self.EfiOutputPath)
+ GenFdsGlobalVariable.VerboseLogger( "ModuleEFIPath: " + self.EfiOutputPath)
## PatchEfiFile
#
@@ -1130,5 +1128,3 @@ class FfsInfStatement(FfsInfStatementClassObject): EdkLogger.error("GenFds", FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." %UniVfrOffsetFileName, None)
fStringIO.close ()
-
-
diff --git a/BaseTools/Source/Python/GenFds/GenFds.py b/BaseTools/Source/Python/GenFds/GenFds.py index b48fe76..fe53d6b 100644 --- a/BaseTools/Source/Python/GenFds/GenFds.py +++ b/BaseTools/Source/Python/GenFds/GenFds.py @@ -65,7 +65,6 @@ def resetFdsGlobalVariable(): # will be FvDir + os.sep + 'Ffs'
GenFdsGlobalVariable.FfsDir = ''
GenFdsGlobalVariable.FdfParser = None
- GenFdsGlobalVariable.LibDir = ''
GenFdsGlobalVariable.WorkSpace = None
GenFdsGlobalVariable.WorkSpaceDir = ''
GenFdsGlobalVariable.ConfDir = ''
@@ -134,7 +133,7 @@ def GenFdsApi(FdsCommandDict, WorkSpaceDataBase=None): EdkLogger.error("GenFds", PARAMETER_INVALID, "WORKSPACE is invalid",
ExtraData="Please use '-w' switch to pass it or set the WORKSPACE environment variable.")
else:
- Workspace = os.path.normcase(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')))
+ Workspace = os.path.normpath(FdsCommandDict.get("Workspace",os.environ.get('WORKSPACE')))
GenFdsGlobalVariable.WorkSpaceDir = Workspace
if FdsCommandDict.get("debug"):
GenFdsGlobalVariable.VerboseLogger("Using Workspace:" + Workspace)
@@ -200,7 +199,7 @@ def GenFdsApi(FdsCommandDict, WorkSpaceDataBase=None): ConfDirectoryPath = os.path.join(GenFdsGlobalVariable.WorkSpaceDir, ConfDirectoryPath)
else:
if "CONF_PATH" in os.environ:
- ConfDirectoryPath = os.path.normcase(os.environ["CONF_PATH"])
+ ConfDirectoryPath = os.path.normpath(os.environ["CONF_PATH"])
else:
# Get standard WORKSPACE/Conf, use the absolute path to the WORKSPACE/Conf
ConfDirectoryPath = mws.join(GenFdsGlobalVariable.WorkSpaceDir, 'Conf')
@@ -549,41 +548,6 @@ class GenFds(object): return GenFdsGlobalVariable.FfsCmdDict
- ## GetFvBlockSize()
- #
- # @param FvObj Whose block size to get
- # @retval int Block size value
- #
- @staticmethod
- def GetFvBlockSize(FvObj):
- DefaultBlockSize = 0x1
- FdObj = None
- if GenFds.OnlyGenerateThisFd is not None and GenFds.OnlyGenerateThisFd.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict:
- FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict[GenFds.OnlyGenerateThisFd.upper()]
- if FdObj is None:
- for ElementFd in GenFdsGlobalVariable.FdfParser.Profile.FdDict.values():
- for ElementRegion in ElementFd.RegionList:
- if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
- for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
- if FvObj.BlockSizeList != []:
- return FvObj.BlockSizeList[0][0]
- else:
- return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
- if FvObj.BlockSizeList != []:
- return FvObj.BlockSizeList[0][0]
- return DefaultBlockSize
- else:
- for ElementRegion in FdObj.RegionList:
- if ElementRegion.RegionType == BINARY_FILE_TYPE_FV:
- for ElementRegionData in ElementRegion.RegionDataList:
- if ElementRegionData is not None and ElementRegionData.upper() == FvObj.UiFvName:
- if FvObj.BlockSizeList != []:
- return FvObj.BlockSizeList[0][0]
- else:
- return ElementRegion.BlockSizeOfRegion(ElementFd.BlockSizeList)
- return DefaultBlockSize
-
## DisplayFvSpaceInfo()
#
# @param FvObj Whose block size to get
diff --git a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py index d7668ba..4e014bf 100644 --- a/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py +++ b/BaseTools/Source/Python/GenFds/GenFdsGlobalVariable.py @@ -44,7 +44,6 @@ class GenFdsGlobalVariable: # will be FvDir + os.sep + 'Ffs'
FfsDir = ''
FdfParser = None
- LibDir = ''
WorkSpace = None
WorkSpaceDir = ''
ConfDir = ''
diff --git a/BaseTools/Source/Python/README.md b/BaseTools/Source/Python/README.md deleted file mode 100644 index 8c4d9e7..0000000 --- a/BaseTools/Source/Python/README.md +++ /dev/null @@ -1,29 +0,0 @@ -# Edk2 Basetools
-
-This folder has traditionally held the source of Python based tools used by EDK2.
-The official repo this source has moved to https://github.com/tianocore/edk2-basetools.
-This folder will remain in the tree until the next stable release (expected 202102).
-There is a new folder under Basetools `BinPipWrappers` that uses the pip module rather than this tree for Basetools.
-By adding the scope `pipbuild-win` or `pipbuild-unix` (depending on your host system), the SDE will use the
-`BinPipWrappers` instead of the regular `BinWrappers`.
-
-## Why Move It?
-
-The discussion is on the mailing list. The RFC is here: https://edk2.groups.io/g/rfc/topic/74009714#270
-The benefits allow for the Basetools project to be used separately from EDK2 itself as well as offering it in a
-globally accessible manner.
-This makes it much easier to build a module using Basetools.
-Separating the Basetools into their own repo allows for easier CI and contribution process.
-Additional pros, cons, and process can be found on the mailing list.
-
-## How Do I Install It?
-
-By default, EDK2 is tied to and tested with a specific version of the Basetools through `pip-requirements.txt`.
-You can simply run:
-
-```bash
-pip install -r pip-requirements.txt
-```
-
-This will install the required module, thought we strongly suggest setting up a virtual environment.
-Additionally, you can also install a local clone of the Basetools as well as a specific git commit.
diff --git a/BaseTools/Source/Python/Table/Table.py b/BaseTools/Source/Python/Table/Table.py index 7a60313..47ebea6 100644 --- a/BaseTools/Source/Python/Table/Table.py +++ b/BaseTools/Source/Python/Table/Table.py @@ -77,20 +77,6 @@ class Table(object): for Item in self.Cur:
return Item[0]
- ## Generate ID
- #
- # Generate an ID if input ID is -1
- #
- # @param ID: Input ID
- #
- # @retval ID: New generated ID
- #
- def GenerateID(self, ID):
- if ID == -1:
- self.ID = self.ID + 1
-
- return self.ID
-
## Init the ID of the table
#
# Init the ID of the table
diff --git a/BaseTools/Source/Python/Table/TableDataModel.py b/BaseTools/Source/Python/Table/TableDataModel.py index 3855807..7d1d7ed 100644 --- a/BaseTools/Source/Python/Table/TableDataModel.py +++ b/BaseTools/Source/Python/Table/TableDataModel.py @@ -72,19 +72,3 @@ class TableDataModel(Table): Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
-
- ## Get CrossIndex
- #
- # Get a model's cross index from its name
- #
- # @param ModelName: Name of the model
- # @retval CrossIndex: CrossIndex of the model
- #
- def GetCrossIndex(self, ModelName):
- CrossIndex = -1
- SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
- self.Cur.execute(SqlCommand)
- for Item in self.Cur:
- CrossIndex = Item[0]
-
- return CrossIndex
diff --git a/BaseTools/Source/Python/Table/TableEotReport.py b/BaseTools/Source/Python/Table/TableEotReport.py index 72bc11f..db6660f 100644 --- a/BaseTools/Source/Python/Table/TableEotReport.py +++ b/BaseTools/Source/Python/Table/TableEotReport.py @@ -13,7 +13,6 @@ import Common.EdkLogger as EdkLogger import Common.LongFilePathOs as os, time
from Table.Table import Table
from Common.StringUtils import ConvertToSqlString2
-import Eot.EotToolError as EotToolError
import Eot.EotGlobalData as EotGlobalData
## TableReport
@@ -63,9 +62,3 @@ class TableEotReport(Table): % (self.Table, self.ID, ModuleID, ModuleName, ModuleGuid, SourceFileID, SourceFileFullPath, \
ItemName, ItemType, ItemMode, GuidName, GuidMacro, GuidValue, BelongsToFunction, Enabled)
Table.Insert(self, SqlCommand)
-
- def GetMaxID(self):
- SqlCommand = """select max(ID) from %s""" % self.Table
- self.Cur.execute(SqlCommand)
- for Item in self.Cur:
- return Item[0]
diff --git a/BaseTools/Source/Python/TargetTool/TargetTool.py b/BaseTools/Source/Python/TargetTool/TargetTool.py index 7f2479f..8b34425 100644 --- a/BaseTools/Source/Python/TargetTool/TargetTool.py +++ b/BaseTools/Source/Python/TargetTool/TargetTool.py @@ -46,7 +46,6 @@ class TargetTool(): return self.ConvertTextFileToDict(filename, '#', '=')
else:
raise ParseError('LoadTargetTxtFile() : No Target.txt file exists.')
- return 1
#
# Convert a text file to a dictionary
diff --git a/BaseTools/Source/Python/Trim/Trim.py b/BaseTools/Source/Python/Trim/Trim.py index 6d7bc05..ea9d1a0 100644 --- a/BaseTools/Source/Python/Trim/Trim.py +++ b/BaseTools/Source/Python/Trim/Trim.py @@ -248,6 +248,23 @@ def TrimPreprocessedVfr(Source, Target): except:
EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Target)
+# Create a banner to indicate the start and
+# end of the included ASL file. Banner looks like:-
+#
+# /*************************************
+# * @param *
+# *************************************/
+#
+# @param Pathname File pathname to be included in the banner
+#
+def AddIncludeHeader(Pathname):
+ StartLine = "/*" + '*' * (len(Pathname) + 4)
+ EndLine = '*' * (len(Pathname) + 4) + "*/"
+ Banner = '\n' + StartLine
+ Banner += '\n' + ('{0} {1} {0}'.format('*', Pathname))
+ Banner += '\n' + EndLine + '\n'
+ return Banner
+
## Read the content ASL file, including ASL included, recursively
#
# @param Source File to be read
@@ -276,16 +293,18 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None, Inclu try:
with open(IncludeFile, "r") as File:
F = File.readlines()
- except:
+ except Exception:
with codecs.open(IncludeFile, "r", encoding='utf-8') as File:
F = File.readlines()
break
else:
- EdkLogger.error("Trim", "Failed to find include file %s" % Source)
+ EdkLogger.error("Trim", FILE_NOT_FOUND, ExtraData="Failed to find include file %s" % Source)
return []
- except:
- EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
- return []
+ except Exception as e:
+ if str(e) == str(FILE_NOT_FOUND):
+ raise
+ else:
+ EdkLogger.error("Trim", FILE_OPEN_FAILURE, ExtraData=Source)
# avoid A "include" B and B "include" A
@@ -312,7 +331,9 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None, Inclu LocalSearchPath = os.path.dirname(IncludeFile)
CurrentIndent = Indent + Result[0][0]
IncludedFile = Result[0][1]
+ NewFileContent.append(AddIncludeHeader(IncludedFile+" --START"))
NewFileContent.extend(DoInclude(IncludedFile, CurrentIndent, IncludePathList, LocalSearchPath,IncludeFileList,filetype))
+ NewFileContent.append(AddIncludeHeader(IncludedFile+" --END"))
NewFileContent.append("\n")
elif filetype == "ASM":
Result = gIncludePattern.findall(Line)
@@ -324,7 +345,9 @@ def DoInclude(Source, Indent='', IncludePathList=[], LocalSearchPath=None, Inclu IncludedFile = IncludedFile.strip()
IncludedFile = os.path.normpath(IncludedFile)
+ NewFileContent.append(AddIncludeHeader(IncludedFile+" --START"))
NewFileContent.extend(DoInclude(IncludedFile, '', IncludePathList, LocalSearchPath,IncludeFileList,filetype))
+ NewFileContent.append(AddIncludeHeader(IncludedFile+" --END"))
NewFileContent.append("\n")
gIncludedAslFile.pop()
diff --git a/BaseTools/Source/Python/UPT/Library/CommentGenerating.py b/BaseTools/Source/Python/UPT/Library/CommentGenerating.py index bded508..ac09ab8 100644 --- a/BaseTools/Source/Python/UPT/Library/CommentGenerating.py +++ b/BaseTools/Source/Python/UPT/Library/CommentGenerating.py @@ -16,7 +16,6 @@ CommentGenerating from Library.StringUtils import GetSplitValueList
from Library.DataType import TAB_SPACE_SPLIT
from Library.DataType import TAB_INF_GUIDTYPE_VAR
-from Library.DataType import USAGE_ITEM_NOTIFY
from Library.DataType import ITEM_UNDEFINED
from Library.DataType import TAB_HEADER_COMMENT
from Library.DataType import TAB_BINARY_HEADER_COMMENT
@@ -44,18 +43,6 @@ def GenTailCommentLines (TailCommentLines, LeadingSpaceNum = 0): return CommentStr
-## GenGenericComment
-#
-# @param CommentLines: Generic comment Text, maybe Multiple Lines
-#
-def GenGenericComment (CommentLines):
- if not CommentLines:
- return ''
- CommentLines = CommentLines.rstrip(END_OF_LINE)
- CommentStr = TAB_SPECIAL_COMMENT + TAB_SPACE_SPLIT + (END_OF_LINE + TAB_COMMENT_SPLIT + TAB_SPACE_SPLIT).join\
- (GetSplitValueList(CommentLines, END_OF_LINE)) + END_OF_LINE
- return CommentStr
-
## GenGenericCommentF
#
# similar to GenGenericComment but will remove <EOL> at end of comment once,
@@ -163,57 +150,6 @@ def GenHeaderCommentSection(Abstract, Description, Copyright, License, IsBinaryH return Content
-
-## GenInfPcdTailComment
-# Generate Pcd tail comment for Inf, this would be one line comment
-#
-# @param Usage: Usage type
-# @param TailCommentText: Comment text for tail comment
-#
-def GenInfPcdTailComment (Usage, TailCommentText):
- if (Usage == ITEM_UNDEFINED) and (not TailCommentText):
- return ''
-
- CommentLine = TAB_SPACE_SPLIT.join([Usage, TailCommentText])
- return GenTailCommentLines(CommentLine)
-
-## GenInfProtocolPPITailComment
-# Generate Protocol/PPI tail comment for Inf
-#
-# @param Usage: Usage type
-# @param TailCommentText: Comment text for tail comment
-#
-def GenInfProtocolPPITailComment (Usage, Notify, TailCommentText):
- if (not Notify) and (Usage == ITEM_UNDEFINED) and (not TailCommentText):
- return ''
-
- if Notify:
- CommentLine = USAGE_ITEM_NOTIFY + " ## "
- else:
- CommentLine = ''
-
- CommentLine += TAB_SPACE_SPLIT.join([Usage, TailCommentText])
- return GenTailCommentLines(CommentLine)
-
-## GenInfGuidTailComment
-# Generate Guid tail comment for Inf
-#
-# @param Usage: Usage type
-# @param TailCommentText: Comment text for tail comment
-#
-def GenInfGuidTailComment (Usage, GuidTypeList, VariableName, TailCommentText):
- GuidType = GuidTypeList[0]
- if (Usage == ITEM_UNDEFINED) and (GuidType == ITEM_UNDEFINED) and \
- (not TailCommentText):
- return ''
-
- FirstLine = Usage + " ## " + GuidType
- if GuidType == TAB_INF_GUIDTYPE_VAR:
- FirstLine += ":" + VariableName
-
- CommentLine = TAB_SPACE_SPLIT.join([FirstLine, TailCommentText])
- return GenTailCommentLines(CommentLine)
-
## GenDecGuidTailComment
#
# @param SupModuleList: Supported module type list
diff --git a/BaseTools/Source/Python/UPT/Library/CommentParsing.py b/BaseTools/Source/Python/UPT/Library/CommentParsing.py index 7ba9830..7b1ce05 100644 --- a/BaseTools/Source/Python/UPT/Library/CommentParsing.py +++ b/BaseTools/Source/Python/UPT/Library/CommentParsing.py @@ -238,7 +238,7 @@ def ParseDecPcdGenericComment (GenericComment, ContainerFile, TokenSpaceGuidCNam #
# To replace Macro
#
- MACRO_PATTERN = '[\t\s]*\$\([A-Z][_A-Z0-9]*\)'
+ MACRO_PATTERN = r'[\t\s]*\$\([A-Z][_A-Z0-9]*\)'
MatchedStrs = re.findall(MACRO_PATTERN, Comment)
for MatchedStr in MatchedStrs:
if MatchedStr:
diff --git a/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py b/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py index 7718ca1..2c0750e 100644 --- a/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py +++ b/BaseTools/Source/Python/UPT/Library/ExpressionValidate.py @@ -66,13 +66,13 @@ class _ExprError(Exception): ## _ExprBase
#
class _ExprBase:
- HEX_PATTERN = '[\t\s]*0[xX][a-fA-F0-9]+'
- INT_PATTERN = '[\t\s]*[0-9]+'
- MACRO_PATTERN = '[\t\s]*\$\(([A-Z][_A-Z0-9]*)\)'
+ HEX_PATTERN = r'[\t\s]*0[xX][a-fA-F0-9]+'
+ INT_PATTERN = r'[\t\s]*[0-9]+'
+ MACRO_PATTERN = r'[\t\s]*\$\(([A-Z][_A-Z0-9]*)\)'
PCD_PATTERN = \
- '[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*'
- QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
- BOOL_PATTERN = '[\t\s]*(true|True|TRUE|false|False|FALSE)'
+ r'[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*'
+ QUOTED_PATTERN = r'[\t\s]*L?"[^"]*"'
+ BOOL_PATTERN = r'[\t\s]*(true|True|TRUE|false|False|FALSE)'
def __init__(self, Token):
self.Token = Token
self.Index = 0
@@ -303,9 +303,9 @@ class _LogicalExpressionParser(_ExprBase): ## _ValidRangeExpressionParser
#
class _ValidRangeExpressionParser(_ExprBase):
- INT_RANGE_PATTERN = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
+ INT_RANGE_PATTERN = r'[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
HEX_RANGE_PATTERN = \
- '[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
+ r'[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.Parens = 0
@@ -407,7 +407,7 @@ class _ValidRangeExpressionParser(_ExprBase): ## _ValidListExpressionParser
#
class _ValidListExpressionParser(_ExprBase):
- VALID_LIST_PATTERN = '(0[xX][0-9a-fA-F]+|[0-9]+)([\t\s]*,[\t\s]*(0[xX][0-9a-fA-F]+|[0-9]+))*'
+ VALID_LIST_PATTERN = r'(0[xX][0-9a-fA-F]+|[0-9]+)([\t\s]*,[\t\s]*(0[xX][0-9a-fA-F]+|[0-9]+))*'
def __init__(self, Token):
_ExprBase.__init__(self, Token)
self.NUM = 1
diff --git a/BaseTools/Source/Python/UPT/Library/Misc.py b/BaseTools/Source/Python/UPT/Library/Misc.py index 77ba358..554d1ec 100644 --- a/BaseTools/Source/Python/UPT/Library/Misc.py +++ b/BaseTools/Source/Python/UPT/Library/Misc.py @@ -69,11 +69,11 @@ def GuidStringToGuidStructureString(Guid): def CheckGuidRegFormat(GuidValue):
## Regular expression used to find out register format of GUID
#
- RegFormatGuidPattern = re.compile("^\s*([0-9a-fA-F]){8}-"
+ RegFormatGuidPattern = re.compile(r"^\s*([0-9a-fA-F]){8}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
- "([0-9a-fA-F]){12}\s*$")
+ r"([0-9a-fA-F]){12}\s*$")
if RegFormatGuidPattern.match(GuidValue):
return True
@@ -495,41 +495,6 @@ def IsAllModuleList(ModuleList): else:
return True
-## Dictionary that use comment(GenericComment, TailComment) as value,
-# if a new comment which key already in the dic is inserted, then the
-# comment will be merged.
-# Key is (Statement, SupArch), when TailComment is added, it will ident
-# according to Statement
-#
-class MergeCommentDict(dict):
- ## []= operator
- #
- def __setitem__(self, Key, CommentVal):
- GenericComment, TailComment = CommentVal
- if Key in self:
- OrigVal1, OrigVal2 = dict.__getitem__(self, Key)
- Statement = Key[0]
- dict.__setitem__(self, Key, (OrigVal1 + GenericComment, OrigVal2 \
- + len(Statement) * ' ' + TailComment))
- else:
- dict.__setitem__(self, Key, (GenericComment, TailComment))
-
- ## =[] operator
- #
- def __getitem__(self, Key):
- return dict.__getitem__(self, Key)
-
-
-## GenDummyHelpTextObj
-#
-# @retval HelpTxt: Generated dummy help text object
-#
-def GenDummyHelpTextObj():
- HelpTxt = TextObject()
- HelpTxt.SetLang(TAB_LANGUAGE_EN_US)
- HelpTxt.SetString(' ')
- return HelpTxt
-
## ConvertVersionToDecimal, the minor version should be within 0 - 99
# <HexVersion> ::= "0x" <Major> <Minor>
# <Major> ::= (a-fA-F0-9){4}
@@ -837,8 +802,8 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo): ST.ERR_FILE_OPEN_FAILURE,
File=FullFileName)
- ReFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
- ReVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
+ ReFileGuidPattern = re.compile(r"^\s*FILE_GUID\s*=.*$")
+ ReVerStringPattern = re.compile(r"^\s*VERSION_STRING\s*=.*$")
FileLinesList = ProcessLineExtender(FileLinesList)
@@ -978,7 +943,7 @@ def ValidateUNIFilePath(Path): #
# Check if the file name is valid according to the DEC and INF specification
#
- Pattern = '[a-zA-Z0-9_][a-zA-Z0-9_\-\.]*'
+ Pattern = r'[a-zA-Z0-9_][a-zA-Z0-9_\-\.]*'
FileName = Path.replace(Suffix, '')
InvalidCh = re.sub(Pattern, '', FileName)
if InvalidCh:
diff --git a/BaseTools/Source/Python/UPT/Library/ParserValidate.py b/BaseTools/Source/Python/UPT/Library/ParserValidate.py index 62f4061..cfa51e7 100644 --- a/BaseTools/Source/Python/UPT/Library/ParserValidate.py +++ b/BaseTools/Source/Python/UPT/Library/ParserValidate.py @@ -15,7 +15,6 @@ import re import platform
from Library.DataType import MODULE_LIST
-from Library.DataType import COMPONENT_TYPE_LIST
from Library.DataType import PCD_USAGE_TYPE_LIST_OF_MODULE
from Library.DataType import TAB_SPACE_SPLIT
from Library.StringUtils import GetSplitValueList
@@ -104,43 +103,6 @@ def IsValidInfMoudleType(ModuleType): else:
return False
-## Is Valid Component Type or not
-#
-# @param ComponentType: A string contain ComponentType need to be judged.
-#
-def IsValidInfComponentType(ComponentType):
- if ComponentType.upper() in COMPONENT_TYPE_LIST:
- return True
- else:
- return False
-
-
-## Is valid Tool Family or not
-#
-# @param ToolFamily: A string contain Tool Family need to be judged.
-# Family := [A-Z]([a-zA-Z0-9])*
-#
-def IsValidToolFamily(ToolFamily):
- ReIsValidFamily = re.compile(r"^[A-Z]+[A-Za-z0-9]{0,}$", re.DOTALL)
- if ReIsValidFamily.match(ToolFamily) is None:
- return False
- return True
-
-## Is valid Tool TagName or not
-#
-# The TagName sample is MYTOOLS and VS2005.
-#
-# @param TagName: A string contain Tool TagName need to be judged.
-#
-def IsValidToolTagName(TagName):
- if TagName.strip() == '':
- return True
- if TagName.strip() == '*':
- return True
- if not IsValidWord(TagName):
- return False
- return True
-
## Is valid arch or not
#
# @param Arch The arch string need to be validated
@@ -456,37 +418,6 @@ def IsValidHexVersion(Word): return True
-## IsValidBuildNumber
-#
-# Check whether the BUILD_NUMBER is valid.
-# ["BUILD_NUMBER" "=" <Integer>{1,4} <EOL>]
-#
-# @param Word: The BUILD_NUMBER string need to be checked.
-#
-def IsValidBuildNumber(Word):
- ReIsValieBuildNumber = re.compile(r"[0-9]{1,4}$", re.DOTALL)
- if ReIsValieBuildNumber.match(Word) is None:
- return False
-
- return True
-
-## IsValidDepex
-#
-# Check whether the Depex is valid.
-#
-# @param Word: The Depex string need to be checked.
-#
-def IsValidDepex(Word):
- Index = Word.upper().find("PUSH")
- if Index > -1:
- return IsValidCFormatGuid(Word[Index+4:].strip())
-
- ReIsValidCName = re.compile(r"^[A-Za-z_][0-9A-Za-z_\s\.]*$", re.DOTALL)
- if ReIsValidCName.match(Word) is None:
- return False
-
- return True
-
## IsValidNormalizedString
#
# Check
@@ -533,25 +464,6 @@ def IsValidIdString(String): return False
-## IsValidVersionString
-#
-# Check whether the VersionString is valid.
-# <AsciiString> ::= [ [<WhiteSpace>]{0,} [<AsciiChars>]{0,} ] {0,}
-# <WhiteSpace> ::= {<Tab>} {<Space>}
-# <Tab> ::= 0x09
-# <Space> ::= 0x20
-# <AsciiChars> ::= (0x21 - 0x7E)
-#
-# @param VersionString: The VersionString need to be checked.
-#
-def IsValidVersionString(VersionString):
- VersionString = VersionString.strip()
- for Char in VersionString:
- if not (Char >= 0x21 and Char <= 0x7E):
- return False
-
- return True
-
## IsValidPcdValue
#
# Check whether the PcdValue is valid.
@@ -715,13 +627,3 @@ def IsValidUserId(UserId): if Char == '.' and not Quoted:
return False
return True
-
-#
-# Check if a UTF16-LE file has a BOM header
-#
-def CheckUTF16FileHeader(File):
- FileIn = open(File, 'rb').read(2)
- if FileIn != b'\xff\xfe':
- return False
-
- return True
diff --git a/BaseTools/Source/Python/UPT/Library/Parsing.py b/BaseTools/Source/Python/UPT/Library/Parsing.py index 6fb1337..5f2a448 100644 --- a/BaseTools/Source/Python/UPT/Library/Parsing.py +++ b/BaseTools/Source/Python/UPT/Library/Parsing.py @@ -41,29 +41,6 @@ from . import GlobalData gPKG_INFO_DICT = {}
-## GetBuildOption
-#
-# Parse a string with format "[<Family>:]<ToolFlag>=Flag"
-# Return (Family, ToolFlag, Flag)
-#
-# @param String: String with BuildOption statement
-# @param File: The file which defines build option, used in error report
-#
-def GetBuildOption(String, File, LineNo= -1):
- (Family, ToolChain, Flag) = ('', '', '')
- if String.find(DataType.TAB_EQUAL_SPLIT) < 0:
- RaiseParserError(String, 'BuildOptions', File, \
- '[<Family>:]<ToolFlag>=Flag', LineNo)
- else:
- List = GetSplitValueList(String, DataType.TAB_EQUAL_SPLIT, MaxSplit=1)
- if List[0].find(':') > -1:
- Family = List[0][ : List[0].find(':')].strip()
- ToolChain = List[0][List[0].find(':') + 1 : ].strip()
- else:
- ToolChain = List[0].strip()
- Flag = List[1].strip()
- return (Family, ToolChain, Flag)
-
## Get Library Class
#
# Get Library of Dsc as <LibraryClassKeyWord>|<LibraryInstance>
@@ -88,37 +65,6 @@ def GetLibraryClass(Item, ContainerFile, WorkspaceDir, LineNo= -1): return (List[0], List[1], SupMod)
-## Get Library Class
-#
-# Get Library of Dsc as <LibraryClassKeyWord>[|<LibraryInstance>]
-# [|<TokenSpaceGuidCName>.<PcdCName>]
-#
-# @param Item: String as <LibraryClassKeyWord>|<LibraryInstance>
-# @param ContainerFile: The file which describes the library class, used for
-# error report
-#
-def GetLibraryClassOfInf(Item, ContainerFile, WorkspaceDir, LineNo= -1):
- ItemList = GetSplitValueList((Item[0] + DataType.TAB_VALUE_SPLIT * 2))
- SupMod = DataType.SUP_MODULE_LIST_STRING
-
- if len(ItemList) > 5:
- RaiseParserError\
- (Item[0], 'LibraryClasses', ContainerFile, \
- '<LibraryClassKeyWord>[|<LibraryInstance>]\
- [|<TokenSpaceGuidCName>.<PcdCName>]')
- else:
- CheckFileType(ItemList[1], '.Inf', ContainerFile, 'LibraryClasses', \
- Item[0], LineNo)
- CheckFileExist(WorkspaceDir, ItemList[1], ContainerFile, \
- 'LibraryClasses', Item[0], LineNo)
- if ItemList[2] != '':
- CheckPcdTokenInfo(ItemList[2], 'LibraryClasses', \
- ContainerFile, LineNo)
- if Item[1] != '':
- SupMod = Item[1]
-
- return (ItemList[0], ItemList[1], ItemList[2], SupMod)
-
## CheckPcdTokenInfo
#
# Check if PcdTokenInfo is following <TokenSpaceGuidCName>.<PcdCName>
@@ -136,414 +82,6 @@ def CheckPcdTokenInfo(TokenInfoString, Section, File, LineNo= -1): RaiseParserError(TokenInfoString, Section, File, Format, LineNo)
-## Get Pcd
-#
-# Get Pcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<Value>
-# [|<Type>|<MaximumDatumSize>]
-#
-# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
-# <Value>[|<Type>|<MaximumDatumSize>]
-# @param ContainerFile: The file which describes the pcd, used for error
-# report
-
-#
-def GetPcd(Item, Type, ContainerFile, LineNo= -1):
- TokenGuid, TokenName, Value, MaximumDatumSize, Token = '', '', '', '', ''
- List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
-
- if len(List) < 4 or len(List) > 6:
- RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
- '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
- [|<Type>|<MaximumDatumSize>]', LineNo)
- else:
- Value = List[1]
- MaximumDatumSize = List[2]
- Token = List[3]
-
- if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
- (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
-
- return (TokenName, TokenGuid, Value, MaximumDatumSize, Token, Type)
-
-## Get FeatureFlagPcd
-#
-# Get FeatureFlagPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE
-#
-# @param Item: String as <PcdTokenSpaceGuidCName>
-# .<TokenCName>|TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
-# report
-#
-def GetFeatureFlagPcd(Item, Type, ContainerFile, LineNo= -1):
- TokenGuid, TokenName, Value = '', '', ''
- List = GetSplitValueList(Item)
- if len(List) != 2:
- RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
- '<PcdTokenSpaceGuidCName>.<TokenCName>|TRUE/FALSE', \
- LineNo)
- else:
- Value = List[1]
- if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
- (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
-
- return (TokenName, TokenGuid, Value, Type)
-
-## Get DynamicDefaultPcd
-#
-# Get DynamicDefaultPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>
-# |<Value>[|<DatumTyp>[|<MaxDatumSize>]]
-#
-# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
-# TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
-# report
-#
-def GetDynamicDefaultPcd(Item, Type, ContainerFile, LineNo= -1):
- TokenGuid, TokenName, Value, DatumTyp, MaxDatumSize = '', '', '', '', ''
- List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
- if len(List) < 4 or len(List) > 8:
- RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
- '<PcdTokenSpaceGuidCName>.<TokenCName>|<Value>\
- [|<DatumTyp>[|<MaxDatumSize>]]', LineNo)
- else:
- Value = List[1]
- DatumTyp = List[2]
- MaxDatumSize = List[3]
- if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
- (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
-
- return (TokenName, TokenGuid, Value, DatumTyp, MaxDatumSize, Type)
-
-## Get DynamicHiiPcd
-#
-# Get DynamicHiiPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|<String>|
-# <VariableGuidCName>|<VariableOffset>[|<DefaultValue>[|<MaximumDatumSize>]]
-#
-# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>|
-# TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
-# report
-#
-def GetDynamicHiiPcd(Item, Type, ContainerFile, LineNo= -1):
- TokenGuid, TokenName, List1, List2, List3, List4, List5 = \
- '', '', '', '', '', '', ''
- List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT * 2)
- if len(List) < 6 or len(List) > 8:
- RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
- '<PcdTokenSpaceGuidCName>.<TokenCName>|<String>|\
- <VariableGuidCName>|<VariableOffset>[|<DefaultValue>\
- [|<MaximumDatumSize>]]', LineNo)
- else:
- List1, List2, List3, List4, List5 = \
- List[1], List[2], List[3], List[4], List[5]
- if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
- (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
-
- return (TokenName, TokenGuid, List1, List2, List3, List4, List5, Type)
-
-## Get DynamicVpdPcd
-#
-# Get DynamicVpdPcd of Dsc as <PcdTokenSpaceGuidCName>.<TokenCName>|
-# <VpdOffset>[|<MaximumDatumSize>]
-#
-# @param Item: String as <PcdTokenSpaceGuidCName>.<TokenCName>
-# |TRUE/FALSE
-# @param ContainerFile: The file which describes the pcd, used for error
-# report
-#
-def GetDynamicVpdPcd(Item, Type, ContainerFile, LineNo= -1):
- TokenGuid, TokenName, List1, List2 = '', '', '', ''
- List = GetSplitValueList(Item + DataType.TAB_VALUE_SPLIT)
- if len(List) < 3 or len(List) > 4:
- RaiseParserError(Item, 'Pcds' + Type, ContainerFile, \
- '<PcdTokenSpaceGuidCName>.<TokenCName>|<VpdOffset>\
- [|<MaximumDatumSize>]', LineNo)
- else:
- List1, List2 = List[1], List[2]
- if CheckPcdTokenInfo(List[0], 'Pcds' + Type, ContainerFile, LineNo):
- (TokenGuid, TokenName) = GetSplitValueList(List[0], DataType.TAB_SPLIT)
-
- return (TokenName, TokenGuid, List1, List2, Type)
-
-## GetComponent
-#
-# Parse block of the components defined in dsc file
-# Set KeyValues as [ ['component name', [lib1, lib2, lib3],
-# [bo1, bo2, bo3], [pcd1, pcd2, pcd3]], ...]
-#
-# @param Lines: The content to be parsed
-# @param KeyValues: To store data after parsing
-#
-def GetComponent(Lines, KeyValues):
- (FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
- FindPcdsDynamicEx) = (False, False, False, False, False, False, False, \
- False)
- ListItem = None
- LibraryClassItem = []
- BuildOption = []
- Pcd = []
-
- for Line in Lines:
- Line = Line[0]
- #
- # Ignore !include statement
- #
- if Line.upper().find(DataType.TAB_INCLUDE.upper() + ' ') > -1 or \
- Line.upper().find(DataType.TAB_DEFINE + ' ') > -1:
- continue
-
- if FindBlock == False:
- ListItem = Line
- #
- # find '{' at line tail
- #
- if Line.endswith('{'):
- FindBlock = True
- ListItem = CleanString(Line.rsplit('{', 1)[0], \
- DataType.TAB_COMMENT_SPLIT)
-
- #
- # Parse a block content
- #
- if FindBlock:
- if Line.find('<LibraryClasses>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (True, False, False, False, False, False, False)
- continue
- if Line.find('<BuildOptions>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, True, False, False, False, False, False)
- continue
- if Line.find('<PcdsFeatureFlag>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, True, False, False, False, False)
- continue
- if Line.find('<PcdsPatchableInModule>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, True, False, False, False)
- continue
- if Line.find('<PcdsFixedAtBuild>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, True, False, False)
- continue
- if Line.find('<PcdsDynamic>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, True, False)
- continue
- if Line.find('<PcdsDynamicEx>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, False, True)
- continue
- if Line.endswith('}'):
- #
- # find '}' at line tail
- #
- KeyValues.append([ListItem, LibraryClassItem, \
- BuildOption, Pcd])
- (FindBlock, FindLibraryClass, FindBuildOption, \
- FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
- FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, False, False, False)
- LibraryClassItem, BuildOption, Pcd = [], [], []
- continue
-
- if FindBlock:
- if FindLibraryClass:
- LibraryClassItem.append(Line)
- elif FindBuildOption:
- BuildOption.append(Line)
- elif FindPcdsFeatureFlag:
- Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG_NULL, Line))
- elif FindPcdsPatchableInModule:
- Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE_NULL, Line))
- elif FindPcdsFixedAtBuild:
- Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD_NULL, Line))
- elif FindPcdsDynamic:
- Pcd.append((DataType.TAB_PCDS_DYNAMIC_DEFAULT_NULL, Line))
- elif FindPcdsDynamicEx:
- Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX_DEFAULT_NULL, Line))
- else:
- KeyValues.append([ListItem, [], [], []])
-
- return True
-
-## GetExec
-#
-# Parse a string with format "InfFilename [EXEC = ExecFilename]"
-# Return (InfFilename, ExecFilename)
-#
-# @param String: String with EXEC statement
-#
-def GetExec(String):
- InfFilename = ''
- ExecFilename = ''
- if String.find('EXEC') > -1:
- InfFilename = String[ : String.find('EXEC')].strip()
- ExecFilename = String[String.find('EXEC') + len('EXEC') : ].strip()
- else:
- InfFilename = String.strip()
-
- return (InfFilename, ExecFilename)
-
-## GetComponents
-#
-# Parse block of the components defined in dsc file
-# Set KeyValues as [ ['component name', [lib1, lib2, lib3], [bo1, bo2, bo3],
-# [pcd1, pcd2, pcd3]], ...]
-#
-# @param Lines: The content to be parsed
-# @param Key: Reserved
-# @param KeyValues: To store data after parsing
-# @param CommentCharacter: Comment char, used to ignore comment content
-#
-# @retval True Get component successfully
-#
-def GetComponents(Lines, KeyValues, CommentCharacter):
- if Lines.find(DataType.TAB_SECTION_END) > -1:
- Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
- (FindBlock, FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, FindPcdsDynamic, \
- FindPcdsDynamicEx) = \
- (False, False, False, False, False, False, False, False)
- ListItem = None
- LibraryClassItem = []
- BuildOption = []
- Pcd = []
-
- LineList = Lines.split('\n')
- for Line in LineList:
- Line = CleanString(Line, CommentCharacter)
- if Line is None or Line == '':
- continue
-
- if FindBlock == False:
- ListItem = Line
- #
- # find '{' at line tail
- #
- if Line.endswith('{'):
- FindBlock = True
- ListItem = CleanString(Line.rsplit('{', 1)[0], CommentCharacter)
-
- #
- # Parse a block content
- #
- if FindBlock:
- if Line.find('<LibraryClasses>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (True, False, False, False, False, False, False)
- continue
- if Line.find('<BuildOptions>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, True, False, False, False, False, False)
- continue
- if Line.find('<PcdsFeatureFlag>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, True, False, False, False, False)
- continue
- if Line.find('<PcdsPatchableInModule>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, True, False, False, False)
- continue
- if Line.find('<PcdsFixedAtBuild>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, True, False, False)
- continue
- if Line.find('<PcdsDynamic>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, True, False)
- continue
- if Line.find('<PcdsDynamicEx>') != -1:
- (FindLibraryClass, FindBuildOption, FindPcdsFeatureFlag, \
- FindPcdsPatchableInModule, FindPcdsFixedAtBuild, \
- FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, False, True)
- continue
- if Line.endswith('}'):
- #
- # find '}' at line tail
- #
- KeyValues.append([ListItem, LibraryClassItem, BuildOption, \
- Pcd])
- (FindBlock, FindLibraryClass, FindBuildOption, \
- FindPcdsFeatureFlag, FindPcdsPatchableInModule, \
- FindPcdsFixedAtBuild, FindPcdsDynamic, FindPcdsDynamicEx) = \
- (False, False, False, False, False, False, False, False)
- LibraryClassItem, BuildOption, Pcd = [], [], []
- continue
-
- if FindBlock:
- if FindLibraryClass:
- LibraryClassItem.append(Line)
- elif FindBuildOption:
- BuildOption.append(Line)
- elif FindPcdsFeatureFlag:
- Pcd.append((DataType.TAB_PCDS_FEATURE_FLAG, Line))
- elif FindPcdsPatchableInModule:
- Pcd.append((DataType.TAB_PCDS_PATCHABLE_IN_MODULE, Line))
- elif FindPcdsFixedAtBuild:
- Pcd.append((DataType.TAB_PCDS_FIXED_AT_BUILD, Line))
- elif FindPcdsDynamic:
- Pcd.append((DataType.TAB_PCDS_DYNAMIC, Line))
- elif FindPcdsDynamicEx:
- Pcd.append((DataType.TAB_PCDS_DYNAMIC_EX, Line))
- else:
- KeyValues.append([ListItem, [], [], []])
-
- return True
-
-## Get Source
-#
-# Get Source of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
-# [|<PcdFeatureFlag>]]]]
-#
-# @param Item: String as <Filename>[|<Family>[|<TagName>[|<ToolCode>
-# [|<PcdFeatureFlag>]]]]
-# @param ContainerFile: The file which describes the library class, used
-# for error report
-#
-def GetSource(Item, ContainerFile, FileRelativePath, LineNo= -1):
- ItemNew = Item + DataType.TAB_VALUE_SPLIT * 4
- List = GetSplitValueList(ItemNew)
- if len(List) < 5 or len(List) > 9:
- RaiseParserError(Item, 'Sources', ContainerFile, \
- '<Filename>[|<Family>[|<TagName>[|<ToolCode>\
- [|<PcdFeatureFlag>]]]]', LineNo)
- List[0] = NormPath(List[0])
- CheckFileExist(FileRelativePath, List[0], ContainerFile, 'Sources', \
- Item, LineNo)
- if List[4] != '':
- CheckPcdTokenInfo(List[4], 'Sources', ContainerFile, LineNo)
-
- return (List[0], List[1], List[2], List[3], List[4])
-
## Get Binary
#
# Get Binary of Inf as <Filename>[|<Family>[|<TagName>[|<ToolCode>
@@ -569,51 +107,6 @@ def GetBinary(Item, ContainerFile, LineNo= -1): elif len(List) == 3:
return (List[0], List[1], List[2], '')
-## Get Guids/Protocols/Ppis
-#
-# Get Guids/Protocols/Ppis of Inf as <GuidCName>[|<PcdFeatureFlag>]
-#
-# @param Item: String as <GuidCName>[|<PcdFeatureFlag>]
-# @param Type: Type of parsing string
-# @param ContainerFile: The file which describes the library class,
-# used for error report
-#
-def GetGuidsProtocolsPpisOfInf(Item):
- ItemNew = Item + DataType.TAB_VALUE_SPLIT
- List = GetSplitValueList(ItemNew)
- return (List[0], List[1])
-
-## Get Guids/Protocols/Ppis
-#
-# Get Guids/Protocols/Ppis of Dec as <GuidCName>=<GuidValue>
-#
-# @param Item: String as <GuidCName>=<GuidValue>
-# @param Type: Type of parsing string
-# @param ContainerFile: The file which describes the library class,
-# used for error report
-#
-def GetGuidsProtocolsPpisOfDec(Item, Type, ContainerFile, LineNo= -1):
- List = GetSplitValueList(Item, DataType.TAB_EQUAL_SPLIT)
- if len(List) != 2:
- RaiseParserError(Item, Type, ContainerFile, '<CName>=<GuidValue>', \
- LineNo)
- #
- #convert C-Format Guid to Register Format
- #
- if List[1][0] == '{' and List[1][-1] == '}':
- RegisterFormatGuid = GuidStructureStringToGuidString(List[1])
- if RegisterFormatGuid == '':
- RaiseParserError(Item, Type, ContainerFile, \
- 'CFormat or RegisterFormat', LineNo)
- else:
- if CheckGuidRegFormat(List[1]):
- RegisterFormatGuid = List[1]
- else:
- RaiseParserError(Item, Type, ContainerFile, \
- 'CFormat or RegisterFormat', LineNo)
-
- return (List[0], RegisterFormatGuid)
-
## GetPackage
#
# Get Package of Inf as <PackagePath>[|<PcdFeatureFlag>]
@@ -634,70 +127,6 @@ def GetPackage(Item, ContainerFile, FileRelativePath, LineNo= -1): return (List[0], List[1])
-## Get Pcd Values of Inf
-#
-# Get Pcd of Inf as <TokenSpaceGuidCName>.<PcdCName>[|<Value>]
-#
-# @param Item: The string describes pcd
-# @param Type: The type of Pcd
-# @param File: The file which describes the pcd, used for error report
-#
-def GetPcdOfInf(Item, Type, File, LineNo):
- Format = '<TokenSpaceGuidCName>.<PcdCName>[|<Value>]'
- TokenGuid, TokenName, Value, InfType = '', '', '', ''
-
- if Type == DataType.TAB_PCDS_FIXED_AT_BUILD:
- InfType = DataType.TAB_INF_FIXED_PCD
- elif Type == DataType.TAB_PCDS_PATCHABLE_IN_MODULE:
- InfType = DataType.TAB_INF_PATCH_PCD
- elif Type == DataType.TAB_PCDS_FEATURE_FLAG:
- InfType = DataType.TAB_INF_FEATURE_PCD
- elif Type == DataType.TAB_PCDS_DYNAMIC_EX:
- InfType = DataType.TAB_INF_PCD_EX
- elif Type == DataType.TAB_PCDS_DYNAMIC:
- InfType = DataType.TAB_INF_PCD
- List = GetSplitValueList(Item, DataType.TAB_VALUE_SPLIT, 1)
- TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
- if len(TokenInfo) != 2:
- RaiseParserError(Item, InfType, File, Format, LineNo)
- else:
- TokenGuid = TokenInfo[0]
- TokenName = TokenInfo[1]
-
- if len(List) > 1:
- Value = List[1]
- else:
- Value = None
- return (TokenGuid, TokenName, Value, InfType)
-
-
-## Get Pcd Values of Dec
-#
-# Get Pcd of Dec as <TokenSpcCName>.<TokenCName>|<Value>|<DatumType>|<Token>
-# @param Item: Pcd item
-# @param Type: Pcd type
-# @param File: Dec file
-# @param LineNo: Line number
-#
-def GetPcdOfDec(Item, Type, File, LineNo= -1):
- Format = '<TokenSpaceGuidCName>.<PcdCName>|<Value>|<DatumType>|<Token>'
- TokenGuid, TokenName, Value, DatumType, Token = '', '', '', '', ''
- List = GetSplitValueList(Item)
- if len(List) != 4:
- RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
- else:
- Value = List[1]
- DatumType = List[2]
- Token = List[3]
- TokenInfo = GetSplitValueList(List[0], DataType.TAB_SPLIT)
- if len(TokenInfo) != 2:
- RaiseParserError(Item, 'Pcds' + Type, File, Format, LineNo)
- else:
- TokenGuid = TokenInfo[0]
- TokenName = TokenInfo[1]
-
- return (TokenGuid, TokenName, Value, DatumType, Token, Type)
-
## Parse DEFINE statement
#
# Get DEFINE macros
@@ -725,60 +154,6 @@ def ParseDefine(LineValue, StartLine, Table, FileID, SectionName, \ '', '', Arch, SectionModel, FileID, StartLine, -1, \
StartLine, -1, 0)
-## InsertSectionItems
-#
-# Insert item data of a section to a dict
-#
-# @param Model: A model
-# @param CurrentSection: Current section
-# @param SectionItemList: Section item list
-# @param ArchList: Arch list
-# @param ThirdList: Third list
-# @param RecordSet: Record set
-#
-def InsertSectionItems(Model, SectionItemList, ArchList, \
- ThirdList, RecordSet):
- #
- # Insert each item data of a section
- #
- for Index in range(0, len(ArchList)):
- Arch = ArchList[Index]
- Third = ThirdList[Index]
- if Arch == '':
- Arch = DataType.TAB_ARCH_COMMON
-
- Records = RecordSet[Model]
- for SectionItem in SectionItemList:
- LineValue, StartLine, Comment = SectionItem[0], \
- SectionItem[1], SectionItem[2]
-
- Logger.Debug(4, ST.MSG_PARSING % LineValue)
- #
- # And then parse DEFINE statement
- #
- if LineValue.upper().find(DataType.TAB_DEFINE.upper() + ' ') > -1:
- continue
- #
- # At last parse other sections
- #
- IdNum = -1
- Records.append([LineValue, Arch, StartLine, IdNum, Third, Comment])
-
- if RecordSet != {}:
- RecordSet[Model] = Records
-
-## GenMetaDatSectionItem
-#
-# @param Key: A key
-# @param Value: A value
-# @param List: A list
-#
-def GenMetaDatSectionItem(Key, Value, List):
- if Key not in List:
- List[Key] = [Value]
- else:
- List[Key].append(Value)
-
## GetPkgInfoFromDec
#
# get package name, guid, version info from dec files
diff --git a/BaseTools/Source/Python/UPT/Library/StringUtils.py b/BaseTools/Source/Python/UPT/Library/StringUtils.py index fbc5177..fa6c121 100644 --- a/BaseTools/Source/Python/UPT/Library/StringUtils.py +++ b/BaseTools/Source/Python/UPT/Library/StringUtils.py @@ -23,7 +23,7 @@ from Logger import StringTable as ST #
# Regular expression for matching macro used in DSC/DEC/INF file inclusion
#
-gMACRO_PATTERN = re.compile("\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
+gMACRO_PATTERN = re.compile(r"\$\(([_A-Z][_A-Z0-9]*)\)", re.UNICODE)
## GetSplitValueList
#
@@ -75,50 +75,6 @@ def GenDefines(String, Arch, Defines): return -1
return 1
-## GetLibraryClassesWithModuleType
-#
-# Get Library Class definition when no module type defined
-#
-# @param Lines: The content to be parsed
-# @param Key: Reserved
-# @param KeyValues: To store data after parsing
-# @param CommentCharacter: Comment char, used to ignore comment content
-#
-def GetLibraryClassesWithModuleType(Lines, Key, KeyValues, CommentCharacter):
- NewKey = SplitModuleType(Key)
- Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
- LineList = Lines.splitlines()
- for Line in LineList:
- Line = CleanString(Line, CommentCharacter)
- if Line != '' and Line[0] != CommentCharacter:
- KeyValues.append([CleanString(Line, CommentCharacter), NewKey[1]])
-
- return True
-
-## GetDynamics
-#
-# Get Dynamic Pcds
-#
-# @param Lines: The content to be parsed
-# @param Key: Reserved
-# @param KeyValues: To store data after parsing
-# @param CommentCharacter: Comment char, used to ignore comment content
-#
-def GetDynamics(Lines, Key, KeyValues, CommentCharacter):
- #
- # Get SkuId Name List
- #
- SkuIdNameList = SplitModuleType(Key)
-
- Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
- LineList = Lines.splitlines()
- for Line in LineList:
- Line = CleanString(Line, CommentCharacter)
- if Line != '' and Line[0] != CommentCharacter:
- KeyValues.append([CleanString(Line, CommentCharacter), SkuIdNameList[1]])
-
- return True
-
## SplitModuleType
#
# Split ModuleType out of section defien to get key
@@ -337,29 +293,6 @@ def CleanString2(Line, CommentCharacter=DataType.TAB_COMMENT_SPLIT, AllowCppStyl return Line, Comment
-## GetMultipleValuesOfKeyFromLines
-#
-# Parse multiple strings to clean comment and spaces
-# The result is saved to KeyValues
-#
-# @param Lines: The content to be parsed
-# @param Key: Reserved
-# @param KeyValues: To store data after parsing
-# @param CommentCharacter: Comment char, used to ignore comment content
-#
-def GetMultipleValuesOfKeyFromLines(Lines, Key, KeyValues, CommentCharacter):
- if Key:
- pass
- if KeyValues:
- pass
- Lines = Lines.split(DataType.TAB_SECTION_END, 1)[1]
- LineList = Lines.split('\n')
- for Line in LineList:
- Line = CleanString(Line, CommentCharacter)
- if Line != '' and Line[0] != CommentCharacter:
- KeyValues += [Line]
- return True
-
## GetDefineValue
#
# Parse a DEFINE statement to get defined value
@@ -375,133 +308,6 @@ def GetDefineValue(String, Key, CommentCharacter): String = CleanString(String)
return String[String.find(Key + ' ') + len(Key + ' ') : ]
-## GetSingleValueOfKeyFromLines
-#
-# Parse multiple strings as below to get value of each definition line
-# Key1 = Value1
-# Key2 = Value2
-# The result is saved to Dictionary
-#
-# @param Lines: The content to be parsed
-# @param Dictionary: To store data after parsing
-# @param CommentCharacter: Comment char, be used to ignore comment content
-# @param KeySplitCharacter: Key split char, between key name and key value.
-# Key1 = Value1, '=' is the key split char
-# @param ValueSplitFlag: Value split flag, be used to decide if has
-# multiple values
-# @param ValueSplitCharacter: Value split char, be used to split multiple
-# values. Key1 = Value1|Value2, '|' is the value
-# split char
-#
-def GetSingleValueOfKeyFromLines(Lines, Dictionary, CommentCharacter, KeySplitCharacter, \
- ValueSplitFlag, ValueSplitCharacter):
- Lines = Lines.split('\n')
- Keys = []
- Value = ''
- DefineValues = ['']
- SpecValues = ['']
-
- for Line in Lines:
- #
- # Handle DEFINE and SPEC
- #
- if Line.find(DataType.TAB_INF_DEFINES_DEFINE + ' ') > -1:
- if '' in DefineValues:
- DefineValues.remove('')
- DefineValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_DEFINE, CommentCharacter))
- continue
- if Line.find(DataType.TAB_INF_DEFINES_SPEC + ' ') > -1:
- if '' in SpecValues:
- SpecValues.remove('')
- SpecValues.append(GetDefineValue(Line, DataType.TAB_INF_DEFINES_SPEC, CommentCharacter))
- continue
-
- #
- # Handle Others
- #
- LineList = Line.split(KeySplitCharacter, 1)
- if len(LineList) >= 2:
- Key = LineList[0].split()
- if len(Key) == 1 and Key[0][0] != CommentCharacter:
- #
- # Remove comments and white spaces
- #
- LineList[1] = CleanString(LineList[1], CommentCharacter)
- if ValueSplitFlag:
- Value = list(map(lambda x: x.strip(), LineList[1].split(ValueSplitCharacter)))
- else:
- Value = CleanString(LineList[1], CommentCharacter).splitlines()
-
- if Key[0] in Dictionary:
- if Key[0] not in Keys:
- Dictionary[Key[0]] = Value
- Keys.append(Key[0])
- else:
- Dictionary[Key[0]].extend(Value)
- else:
- Dictionary[DataType.TAB_INF_DEFINES_MACRO][Key[0]] = Value[0]
-
- if DefineValues == []:
- DefineValues = ['']
- if SpecValues == []:
- SpecValues = ['']
- Dictionary[DataType.TAB_INF_DEFINES_DEFINE] = DefineValues
- Dictionary[DataType.TAB_INF_DEFINES_SPEC] = SpecValues
-
- return True
-
-## The content to be parsed
-#
-# Do pre-check for a file before it is parsed
-# Check $()
-# Check []
-#
-# @param FileName: Used for error report
-# @param FileContent: File content to be parsed
-# @param SupSectionTag: Used for error report
-#
-def PreCheck(FileName, FileContent, SupSectionTag):
- if SupSectionTag:
- pass
- LineNo = 0
- IsFailed = False
- NewFileContent = ''
- for Line in FileContent.splitlines():
- LineNo = LineNo + 1
- #
- # Clean current line
- #
- Line = CleanString(Line)
- #
- # Remove commented line
- #
- if Line.find(DataType.TAB_COMMA_SPLIT) == 0:
- Line = ''
- #
- # Check $()
- #
- if Line.find('$') > -1:
- if Line.find('$(') < 0 or Line.find(')') < 0:
- Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
- #
- # Check []
- #
- if Line.find('[') > -1 or Line.find(']') > -1:
- #
- # Only get one '[' or one ']'
- #
- if not (Line.find('[') > -1 and Line.find(']') > -1):
- Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
- #
- # Regenerate FileContent
- #
- NewFileContent = NewFileContent + Line + '\r\n'
-
- if IsFailed:
- Logger.Error("Parser", FORMAT_INVALID, Line=LineNo, File=FileName, RaiseError=Logger.IS_RAISE_ERROR)
-
- return NewFileContent
-
## CheckFileType
#
# Check if the Filename is including ExtName
@@ -666,20 +472,6 @@ def GetHelpTextList(HelpTextClassList): List.extend(HelpText.String.split('\n'))
return List
-## Get String Array Length
-#
-# Get String Array Length
-#
-# @param String: the source string
-#
-def StringArrayLength(String):
- if String.startswith('L"'):
- return (len(String) - 3 + 1) * 2
- elif String.startswith('"'):
- return (len(String) - 2 + 1)
- else:
- return len(String.split()) + 1
-
## RemoveDupOption
#
# Remove Dup Option
@@ -707,27 +499,6 @@ def RemoveDupOption(OptionString, Which="/I", Against=None): ValueList.append(Val)
return " ".join(OptionList)
-## Check if the string is HexDgit
-#
-# Return true if all characters in the string are digits and there is at
-# least one character
-# or valid Hexs (started with 0x, following by hexdigit letters)
-# , false otherwise.
-# @param string: input string
-#
-def IsHexDigit(Str):
- try:
- int(Str, 10)
- return True
- except ValueError:
- if len(Str) > 2 and Str.upper().startswith('0X'):
- try:
- int(Str, 16)
- return True
- except ValueError:
- return False
- return False
-
## Check if the string is HexDgit and its integer value within limit of UINT32
#
# Return true if all characters in the string are digits and there is at
diff --git a/BaseTools/Source/Python/UPT/Library/UniClassObject.py b/BaseTools/Source/Python/UPT/Library/UniClassObject.py index 8c44dc2..1e7ca70 100644 --- a/BaseTools/Source/Python/UPT/Library/UniClassObject.py +++ b/BaseTools/Source/Python/UPT/Library/UniClassObject.py @@ -22,7 +22,6 @@ from Library.StringUtils import GetLineNo from Library.Misc import PathClass
from Library.Misc import GetCharIndexOutStr
from Library import DataType as DT
-from Library.ParserValidate import CheckUTF16FileHeader
##
# Static definitions
@@ -75,18 +74,6 @@ gLANG_CONV_TABLE = {'eng':'en', 'fra':'fr', \ 'wln':'wa', 'wol':'wo', 'xho':'xh', 'yid':'yi', 'yor':'yo', 'zha':'za', \
'zho':'zh', 'zul':'zu'}
-## Convert a python unicode string to a normal string
-#
-# Convert a python unicode string to a normal string
-# UniToStr(u'I am a string') is 'I am a string'
-#
-# @param Uni: The python unicode string
-#
-# @retval: The formatted normal string
-#
-def UniToStr(Uni):
- return repr(Uni)[2:-1]
-
## Convert a unicode string to a Hex list
#
# Convert a unicode string to a Hex list
@@ -134,40 +121,6 @@ def ConvertSpecialUnicodes(Uni): def GetLanguageCode1766(LangName, File=None):
return LangName
- length = len(LangName)
- if length == 2:
- if LangName.isalpha():
- for Key in gLANG_CONV_TABLE.keys():
- if gLANG_CONV_TABLE.get(Key) == LangName.lower():
- return Key
- elif length == 3:
- if LangName.isalpha() and gLANG_CONV_TABLE.get(LangName.lower()):
- return LangName
- else:
- EdkLogger.Error("Unicode File Parser",
- ToolError.FORMAT_INVALID,
- "Invalid RFC 1766 language code : %s" % LangName,
- File)
- elif length == 5:
- if LangName[0:2].isalpha() and LangName[2] == '-':
- for Key in gLANG_CONV_TABLE.keys():
- if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
- return Key
- elif length >= 6:
- if LangName[0:2].isalpha() and LangName[2] == '-':
- for Key in gLANG_CONV_TABLE.keys():
- if gLANG_CONV_TABLE.get(Key) == LangName[0:2].lower():
- return Key
- if LangName[0:3].isalpha() and gLANG_CONV_TABLE.get(LangName.lower()) is None and LangName[3] == '-':
- for Key in gLANG_CONV_TABLE.keys():
- if Key == LangName[0:3].lower():
- return Key
-
- EdkLogger.Error("Unicode File Parser",
- ToolError.FORMAT_INVALID,
- "Invalid RFC 4646 language code : %s" % LangName,
- File)
-
## GetLanguageCode
#
# Check the language code read from .UNI file and convert RFC 1766 codes to RFC 4646 codes if appropriate
@@ -259,7 +212,6 @@ def FormatUniEntry(StrTokenName, TokenValueList, ContainerFile): class StringDefClassObject(object):
def __init__(self, Name = None, Value = None, Referenced = False, Token = None, UseOtherLangDef = ''):
self.StringName = ''
- self.StringNameByteList = []
self.StringValue = ''
self.StringValueByteList = ''
self.Token = 0
@@ -269,7 +221,6 @@ class StringDefClassObject(object): if Name is not None:
self.StringName = Name
- self.StringNameByteList = UniToHexList(Name)
if Value is not None:
self.StringValue = Value
self.StringValueByteList = UniToHexList(self.StringValue)
@@ -407,15 +358,6 @@ class UniFileClassObject(object): self.AddStringToList(Name, Language, Value)
#
- # Get include file list and load them
- #
- def GetIncludeFile(self, Item, Dir = None):
- if Dir:
- pass
- FileName = Item[Item.find(u'!include ') + len(u'!include ') :Item.find(u' ', len(u'!include '))][1:-1]
- self.LoadUniFile(FileName)
-
- #
# Pre-process before parse .uni file
#
def PreProcess(self, File, IsIncludeFile=False):
@@ -977,26 +919,6 @@ class UniFileClassObject(object): Item.Referenced = True
#
- # Search the string in language definition by Name
- #
- def FindStringValue(self, Name, Lang):
- if Name in self.OrderedStringDict[Lang]:
- ItemIndexInList = self.OrderedStringDict[Lang][Name]
- return self.OrderedStringList[Lang][ItemIndexInList]
-
- return None
-
- #
- # Search the string in language definition by Token
- #
- def FindByToken(self, Token, Lang):
- for Item in self.OrderedStringList[Lang]:
- if Item.Token == Token:
- return Item
-
- return None
-
- #
# Re-order strings and re-generate tokens
#
def ReToken(self):
@@ -1050,25 +972,3 @@ class UniFileClassObject(object): print(Item)
for Member in self.OrderedStringList[Item]:
print(str(Member))
-
- #
- # Read content from '!include' UNI file
- #
- def ReadIncludeUNIfile(self, FilaPath):
- if self.File:
- pass
-
- if not os.path.exists(FilaPath) or not os.path.isfile(FilaPath):
- EdkLogger.Error("Unicode File Parser",
- ToolError.FILE_NOT_FOUND,
- ExtraData=FilaPath)
- try:
- FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_8').readlines()
- except UnicodeError as Xstr:
- FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16').readlines()
- except UnicodeError:
- FileIn = codecs.open(FilaPath, mode='rb', encoding='utf_16_le').readlines()
- except:
- EdkLogger.Error("Unicode File Parser", ToolError.FILE_OPEN_FAILURE, ExtraData=FilaPath)
- return FileIn
-
diff --git a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py index 94e97fa..b12999a 100644 --- a/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py +++ b/BaseTools/Source/Python/UPT/Library/Xml/XmlRoutines.py @@ -150,33 +150,6 @@ def XmlElement2(Dom, String): except BaseException:
return ""
-
-## Get a single XML element of the current node.
-#
-# Return a single XML element specified by the current root Dom.
-# If the input Dom is not valid, then an empty string is returned.
-#
-# @param Dom The root XML DOM object.
-#
-def XmlElementData(Dom):
- try:
- return Dom.firstChild.data.strip()
- except BaseException:
- return ""
-
-
-## Get a list of XML elements using XPath style syntax.
-#
-# Return a list of XML elements from the root Dom specified by XPath String.
-# If the input Dom or String is not valid, then an empty list is returned.
-#
-# @param Dom The root XML DOM object.
-# @param String A XPath style path.
-#
-def XmlElementList(Dom, String):
- return list(map(XmlElementData, XmlList(Dom, String)))
-
-
## Get the XML attribute of the current node.
#
# Return a single XML attribute named Attribute from the current root Dom.
@@ -191,20 +164,6 @@ def XmlAttribute(Dom, Attribute): except BaseException:
return ''
-
-## Get the XML node name of the current node.
-#
-# Return a single XML node name from the current root Dom.
-# If the input Dom is not valid, then an empty string is returned.
-#
-# @param Dom The root XML DOM object.
-#
-def XmlNodeName(Dom):
- try:
- return Dom.nodeName.strip()
- except BaseException:
- return ''
-
## Parse an XML file.
#
# Parse the input XML file named FileName and return a XML DOM it stands for.
diff --git a/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py b/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py index 6e515a2..895f3b6 100644 --- a/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py +++ b/BaseTools/Source/Python/UPT/Object/POM/ModuleObject.py @@ -424,7 +424,7 @@ class ExternObject(CommonPropertiesObject): class DepexObject(CommonPropertiesObject):
def __init__(self):
self.Depex = ''
- self.ModuelType = ''
+ self.ModuleType = ''
CommonPropertiesObject.__init__(self)
def SetDepex(self, Depex):
@@ -434,10 +434,10 @@ class DepexObject(CommonPropertiesObject): return self.Depex
def SetModuleType(self, ModuleType):
- self.ModuelType = ModuleType
+ self.ModuleType = ModuleType
def GetModuleType(self):
- return self.ModuelType
+ return self.ModuleType
##
# PackageDependencyObject
diff --git a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py index a1b691f..fda1fc9 100644 --- a/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py +++ b/BaseTools/Source/Python/UPT/Object/Parser/InfDefineObject.py @@ -792,7 +792,6 @@ class InfDefSection(InfDefSectionOptionRomInfo): ErrorInInf(ST.ERR_INF_PARSER_DEFINE_FROMAT_INVALID%(Name),
LineInfo=self.CurrentLine)
return False
- return True
def GetSpecification(self):
return self.Specification
diff --git a/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py b/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py index 2799046..92dbcaa 100644 --- a/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py +++ b/BaseTools/Source/Python/UPT/Parser/DecParserMisc.py @@ -25,7 +25,7 @@ from Library.ExpressionValidate import IsValidStringTest from Library.Misc import CheckGuidRegFormat
TOOL_NAME = 'DecParser'
-VERSION_PATTERN = '[0-9]+(\.[0-9]+)?'
+VERSION_PATTERN = r'[0-9]+(\.[0-9]+)?'
CVAR_PATTERN = '[_a-zA-Z][a-zA-Z0-9_]*'
PCD_TOKEN_PATTERN = '(0[xX]0*[a-fA-F0-9]{1,8})|([0-9]+)'
MACRO_PATTERN = '[A-Z][_A-Z0-9]*'
diff --git a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py index 992b609..fd0795e 100644 --- a/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py +++ b/BaseTools/Source/Python/UPT/Parser/InfAsBuiltProcess.py @@ -53,12 +53,12 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName): #
# To deal with library instance specified by GUID and version
#
- RegFormatGuidPattern = re.compile("\s*([0-9a-fA-F]){8}-"
+ RegFormatGuidPattern = re.compile(r"\s*([0-9a-fA-F]){8}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
"([0-9a-fA-F]){4}-"
- "([0-9a-fA-F]){12}\s*")
- VersionPattern = re.compile('[\t\s]*\d+(\.\d+)?[\t\s]*')
+ r"([0-9a-fA-F]){12}\s*")
+ VersionPattern = re.compile(r'[\t\s]*\d+(\.\d+)?[\t\s]*')
GuidMatchedObj = RegFormatGuidPattern.search(String)
if String.upper().startswith('GUID') and GuidMatchedObj and 'Version' in String:
@@ -75,8 +75,8 @@ def GetLibInstanceInfo(String, WorkSpace, LineNo, CurrentInfFileName): FileLinesList = GetFileLineContent(String, WorkSpace, LineNo, OriginalString)
- ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
- ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
+ ReFindFileGuidPattern = re.compile(r"^\s*FILE_GUID\s*=.*$")
+ ReFindVerStringPattern = re.compile(r"^\s*VERSION_STRING\s*=.*$")
for Line in FileLinesList:
if ReFindFileGuidPattern.match(Line):
@@ -106,8 +106,8 @@ def GetPackageListInfo(FileNameString, WorkSpace, LineNo): FileLinesList = GetFileLineContent(FileNameString, WorkSpace, LineNo, '')
- RePackageHeader = re.compile('^\s*\[Packages.*\].*$')
- ReDefineHeader = re.compile('^\s*\[Defines].*$')
+ RePackageHeader = re.compile(r'^\s*\[Packages.*\].*$')
+ ReDefineHeader = re.compile(r'^\s*\[Defines].*$')
PackageHederFlag = False
DefineHeaderFlag = False
@@ -215,69 +215,3 @@ def GetFileLineContent(FileName, WorkSpace, LineNo, OriginalString): FileLinesList = ProcessLineExtender(FileLinesList)
return FileLinesList
-
-##
-# Get all INF files from current workspace
-#
-#
-def GetInfsFromWorkSpace(WorkSpace):
- InfFiles = []
- for top, dirs, files in os.walk(WorkSpace):
- dirs = dirs # just for pylint
- for File in files:
- if File.upper().endswith(".INF"):
- InfFiles.append(os.path.join(top, File))
-
- return InfFiles
-
-##
-# Get GUID and version from library instance file
-#
-#
-def GetGuidVerFormLibInstance(Guid, Version, WorkSpace, CurrentInfFileName):
- for InfFile in GetInfsFromWorkSpace(WorkSpace):
- try:
- if InfFile.strip().upper() == CurrentInfFileName.strip().upper():
- continue
- InfFile = InfFile.replace('\\', '/')
- if InfFile not in GlobalData.gLIBINSTANCEDICT:
- InfFileObj = open(InfFile, "r")
- GlobalData.gLIBINSTANCEDICT[InfFile] = InfFileObj
- else:
- InfFileObj = GlobalData.gLIBINSTANCEDICT[InfFile]
-
- except BaseException:
- Logger.Error("InfParser",
- ToolError.FILE_READ_FAILURE,
- ST.ERR_FILE_OPEN_FAILURE,
- File=InfFile)
- try:
- FileLinesList = InfFileObj.readlines()
- FileLinesList = ProcessLineExtender(FileLinesList)
-
- ReFindFileGuidPattern = re.compile("^\s*FILE_GUID\s*=.*$")
- ReFindVerStringPattern = re.compile("^\s*VERSION_STRING\s*=.*$")
-
- for Line in FileLinesList:
- if ReFindFileGuidPattern.match(Line):
- FileGuidString = Line
- if ReFindVerStringPattern.match(Line):
- VerString = Line
-
- if FileGuidString:
- FileGuidString = GetSplitValueList(FileGuidString, '=', 1)[1]
- if VerString:
- VerString = GetSplitValueList(VerString, '=', 1)[1]
-
- if FileGuidString.strip().upper() == Guid.upper() and \
- VerString.strip().upper() == Version.upper():
- return Guid, Version
-
- except BaseException:
- Logger.Error("InfParser", ToolError.FILE_READ_FAILURE, ST.ERR_FILE_OPEN_FAILURE, File=InfFile)
- finally:
- InfFileObj.close()
-
- return '', ''
-
-
diff --git a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py index a63e40e..9edcc2c 100644 --- a/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py +++ b/BaseTools/Source/Python/UPT/Parser/InfDefineSectionParser.py @@ -40,7 +40,7 @@ def GetValidateArchList(LineContent): TempArch = GetSplitValueList(TempArch, '(', 1)[0]
- ArchList = re.split('\s+', TempArch)
+ ArchList = re.split(r'\s+', TempArch)
NewArchList = []
for Arch in ArchList:
if IsValidArch(Arch):
diff --git a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py index d01ae9a..fd0f819 100644 --- a/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py +++ b/BaseTools/Source/Python/UPT/Parser/InfParserMisc.py @@ -109,7 +109,7 @@ def InfExpandMacro(Content, LineInfo, GlobalMacros=None, SectionMacros=None, Fla return Content
else:
for Macro in MacroUsed:
- gQuotedMacro = re.compile(".*\".*\$\(%s\).*\".*"%(Macro))
+ gQuotedMacro = re.compile(r".*\".*\$\(%s\).*\".*"%(Macro))
if not gQuotedMacro.match(Content):
#
# Still have MACROs can't be expanded.
@@ -130,8 +130,8 @@ def IsBinaryInf(FileLineList): if not FileLineList:
return False
- ReIsSourcesSection = re.compile("^\s*\[Sources.*\]\s.*$", re.IGNORECASE)
- ReIsBinarySection = re.compile("^\s*\[Binaries.*\]\s.*$", re.IGNORECASE)
+ ReIsSourcesSection = re.compile(r"^\s*\[Sources.*\]\s.*$", re.IGNORECASE)
+ ReIsBinarySection = re.compile(r"^\s*\[Binaries.*\]\s.*$", re.IGNORECASE)
BinarySectionFoundFlag = False
for Line in FileLineList:
@@ -155,7 +155,7 @@ def IsBinaryInf(FileLineList): # @return Flag
#
def IsLibInstanceInfo(String):
- ReIsLibInstance = re.compile("^\s*##\s*@LIB_INSTANCES\s*$")
+ ReIsLibInstance = re.compile(r"^\s*##\s*@LIB_INSTANCES\s*$")
if ReIsLibInstance.match(String):
return True
else:
@@ -171,7 +171,7 @@ def IsLibInstanceInfo(String): # @return Flag
#
def IsAsBuildOptionInfo(String):
- ReIsAsBuildInstance = re.compile("^\s*##\s*@AsBuilt\s*$")
+ ReIsAsBuildInstance = re.compile(r"^\s*##\s*@AsBuilt\s*$")
if ReIsAsBuildInstance.match(String):
return True
else:
@@ -208,9 +208,6 @@ class InfParserSectionRoot(object): self.InfPpiSection = None
self.InfGuidSection = None
self.InfDepexSection = None
- self.InfPeiDepexSection = None
- self.InfDxeDepexSection = None
- self.InfSmmDepexSection = None
self.InfBinariesSection = None
self.InfHeader = None
self.InfSpecialCommentSection = None
diff --git a/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py b/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py index 474d373..4899e7d 100644 --- a/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py +++ b/BaseTools/Source/Python/UPT/Parser/InfSectionParser.py @@ -228,9 +228,6 @@ class InfSectionParser(InfDefinSectionParser, self.InfPpiSection = InfPpiObject()
self.InfGuidSection = InfGuidObject()
self.InfDepexSection = InfDepexObject()
- self.InfPeiDepexSection = InfDepexObject()
- self.InfDxeDepexSection = InfDepexObject()
- self.InfSmmDepexSection = InfDepexObject()
self.InfBinariesSection = InfBinariesObject()
self.InfHeader = InfHeaderObject()
self.InfBinaryHeader = InfHeaderObject()
diff --git a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py index da92fe5..944fd2f 100644 --- a/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py +++ b/BaseTools/Source/Python/UPT/PomAdapter/DecPomAlignment.py @@ -747,12 +747,12 @@ class DecPomAlignment(PackageObject): #
# deal with "NOT EQ", "NOT LT", "NOT GT", "NOT LE", "NOT GE", "NOT NOT"
#
- NOTNOT_Pattern = '[\t\s]*NOT[\t\s]+NOT[\t\s]*'
- NOTGE_Pattern = '[\t\s]*NOT[\t\s]+GE[\t\s]*'
- NOTLE_Pattern = '[\t\s]*NOT[\t\s]+LE[\t\s]*'
- NOTGT_Pattern = '[\t\s]*NOT[\t\s]+GT[\t\s]*'
- NOTLT_Pattern = '[\t\s]*NOT[\t\s]+LT[\t\s]*'
- NOTEQ_Pattern = '[\t\s]*NOT[\t\s]+EQ[\t\s]*'
+ NOTNOT_Pattern = r'[\t\s]*NOT[\t\s]+NOT[\t\s]*'
+ NOTGE_Pattern = r'[\t\s]*NOT[\t\s]+GE[\t\s]*'
+ NOTLE_Pattern = r'[\t\s]*NOT[\t\s]+LE[\t\s]*'
+ NOTGT_Pattern = r'[\t\s]*NOT[\t\s]+GT[\t\s]*'
+ NOTLT_Pattern = r'[\t\s]*NOT[\t\s]+LT[\t\s]*'
+ NOTEQ_Pattern = r'[\t\s]*NOT[\t\s]+EQ[\t\s]*'
ReplaceValue = re.compile(NOTNOT_Pattern).sub('', ReplaceValue)
ReplaceValue = re.compile(NOTLT_Pattern).sub('x >= ', ReplaceValue)
ReplaceValue = re.compile(NOTGT_Pattern).sub('x <= ', ReplaceValue)
@@ -785,7 +785,7 @@ class DecPomAlignment(PackageObject): if ReplaceValue.find('!') >= 0 and ReplaceValue[ReplaceValue.index('!') + 1] != '=':
ReplaceValue = ReplaceValue.replace('!', ' not ')
if '.' in ReplaceValue:
- Pattern = '[a-zA-Z0-9]{1,}\.[a-zA-Z0-9]{1,}'
+ Pattern = r'[a-zA-Z0-9]{1,}\.[a-zA-Z0-9]{1,}'
MatchedList = re.findall(Pattern, ReplaceValue)
for MatchedItem in MatchedList:
if MatchedItem not in self.PcdDefaultValueDict:
@@ -814,7 +814,7 @@ class DecPomAlignment(PackageObject): #
# Delete the 'L' prefix of a quoted string, this operation is for eval()
#
- QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
+ QUOTED_PATTERN = r'[\t\s]*L?"[^"]*"'
QuotedMatchedObj = re.search(QUOTED_PATTERN, Expression)
if QuotedMatchedObj:
MatchedStr = QuotedMatchedObj.group().strip()
@@ -847,7 +847,7 @@ class DecPomAlignment(PackageObject): #
# Delete the 'L' prefix of a quoted string, this operation is for eval()
#
- QUOTED_PATTERN = '[\t\s]*L?"[^"]*"'
+ QUOTED_PATTERN = r'[\t\s]*L?"[^"]*"'
QuotedMatchedObj = re.search(QUOTED_PATTERN, DefaultValue)
if QuotedMatchedObj:
MatchedStr = QuotedMatchedObj.group().strip()
@@ -891,53 +891,6 @@ class DecPomAlignment(PackageObject): self.SetModuleFileList(ModuleFileList)
- ## Show detailed information of Package
- #
- # Print all members and their values of Package class
- #
- def ShowPackage(self):
- print('\nName =', self.GetName())
- print('\nBaseName =', self.GetBaseName())
- print('\nVersion =', self.GetVersion())
- print('\nGuid =', self.GetGuid())
-
- print('\nStandardIncludes = %d ' \
- % len(self.GetStandardIncludeFileList()), end=' ')
- for Item in self.GetStandardIncludeFileList():
- print(Item.GetFilePath(), ' ', Item.GetSupArchList())
- print('\nPackageIncludes = %d \n' \
- % len(self.GetPackageIncludeFileList()), end=' ')
- for Item in self.GetPackageIncludeFileList():
- print(Item.GetFilePath(), ' ', Item.GetSupArchList())
-
- print('\nGuids =', self.GetGuidList())
- for Item in self.GetGuidList():
- print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
- print('\nProtocols =', self.GetProtocolList())
- for Item in self.GetProtocolList():
- print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
- print('\nPpis =', self.GetPpiList())
- for Item in self.GetPpiList():
- print(Item.GetCName(), Item.GetGuid(), Item.GetSupArchList())
- print('\nLibraryClasses =', self.GetLibraryClassList())
- for Item in self.GetLibraryClassList():
- print(Item.GetLibraryClass(), Item.GetRecommendedInstance(), \
- Item.GetSupArchList())
- print('\nPcds =', self.GetPcdList())
- for Item in self.GetPcdList():
- print('CName=', Item.GetCName(), 'TokenSpaceGuidCName=', \
- Item.GetTokenSpaceGuidCName(), \
- 'DefaultValue=', Item.GetDefaultValue(), \
- 'ValidUsage=', Item.GetValidUsage(), \
- 'SupArchList', Item.GetSupArchList(), \
- 'Token=', Item.GetToken(), 'DatumType=', Item.GetDatumType())
-
- for Item in self.GetMiscFileList():
- print(Item.GetName())
- for FileObjectItem in Item.GetFileList():
- print(FileObjectItem.GetURI())
- print('****************\n')
-
## GenPcdDeclaration
#
# @param ContainerFile: File name of the DEC file
diff --git a/BaseTools/Source/Python/UPT/Xml/CommonXml.py b/BaseTools/Source/Python/UPT/Xml/CommonXml.py index cfadacf..1c18d7f 100644 --- a/BaseTools/Source/Python/UPT/Xml/CommonXml.py +++ b/BaseTools/Source/Python/UPT/Xml/CommonXml.py @@ -582,7 +582,6 @@ class UserExtensionsXml(object): self.BinaryDescriptionList = []
self.BinaryCopyrightList = []
self.BinaryLicenseList = []
- self.LangDefsList = []
self.DefineDict = {}
self.BuildOptionDict = {}
self.IncludesDict = {}
diff --git a/BaseTools/Source/Python/UPT/Xml/IniToXml.py b/BaseTools/Source/Python/UPT/Xml/IniToXml.py index 3dc4001..2c01c97 100644 --- a/BaseTools/Source/Python/UPT/Xml/IniToXml.py +++ b/BaseTools/Source/Python/UPT/Xml/IniToXml.py @@ -200,9 +200,9 @@ def ValidateRegValues(Key, Value): ('[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}'
'-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}',
ST.ERR_GUID_VALUE % Value),
- 'Version' : ('[0-9]+(\.[0-9]+)?', ST.ERR_VERSION_VALUE % \
+ 'Version' : (r'[0-9]+(\.[0-9]+)?', ST.ERR_VERSION_VALUE % \
(Key, Value)),
- 'XmlSpecification' : ('1\.1', ST.ERR_VERSION_XMLSPEC % Value)
+ 'XmlSpecification' : (r'1\.1', ST.ERR_VERSION_XMLSPEC % Value)
}
if Key not in ValidateMap:
return True, ''
diff --git a/BaseTools/Source/Python/UPT/Xml/PcdXml.py b/BaseTools/Source/Python/UPT/Xml/PcdXml.py index bbcee45..ca95c82 100644 --- a/BaseTools/Source/Python/UPT/Xml/PcdXml.py +++ b/BaseTools/Source/Python/UPT/Xml/PcdXml.py @@ -100,11 +100,11 @@ class PcdErrorXml(object): def TransferValidRange2Expr(self, TokenSpaceGuidCName, CName, ValidRange):
if self.Expression:
pass
- INT_RANGE_PATTERN1 = '[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
- INT_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
+ INT_RANGE_PATTERN1 = r'[\t\s]*[0-9]+[\t\s]*-[\t\s]*[0-9]+'
+ INT_RANGE_PATTERN2 = r'[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
HEX_RANGE_PATTERN1 = \
- '[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
- HEX_RANGE_PATTERN2 = '[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][a-fA-F0-9]+[\t\s]*'
+ r'[\t\s]*0[xX][a-fA-F0-9]+[\t\s]*-[\t\s]*0[xX][a-fA-F0-9]+'
+ HEX_RANGE_PATTERN2 = r'[\t\s]*(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][a-fA-F0-9]+[\t\s]*'
IntMatch1 = re.compile(INT_RANGE_PATTERN1)
IntMatch2 = re.compile(INT_RANGE_PATTERN2)
HexMatch1 = re.compile(HEX_RANGE_PATTERN1)
@@ -158,18 +158,18 @@ class PcdErrorXml(object): pass
PCD_PATTERN = \
- '[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*'
+ r'[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*\.[\t\s]*[_a-zA-Z][a-zA-Z0-9_]*[\t\s]*'
IntPattern1 = \
- '[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+\d+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
- PCD_PATTERN+'[\t\s]+LE[\t\s]+\d+[\t\s]*\)'
+ r'[\t\s]*\([\t\s]*'+PCD_PATTERN+r'[\t\s]+GE[\t\s]+\d+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
+ PCD_PATTERN+r'[\t\s]+LE[\t\s]+\d+[\t\s]*\)'
IntPattern1 = IntPattern1.replace(' ', '')
- IntPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
+ IntPattern2 = r'[\t\s]*'+PCD_PATTERN+r'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+\d+[\t\s]*'
HexPattern1 = \
- '[\t\s]*\([\t\s]*'+PCD_PATTERN+'[\t\s]+GE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
- PCD_PATTERN+'[\t\s]+LE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)'
+ r'[\t\s]*\([\t\s]*'+PCD_PATTERN+r'[\t\s]+GE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)[\t\s]+AND[\t\s]+\([\t\s]*'+\
+ PCD_PATTERN+r'[\t\s]+LE[\t\s]+0[xX][0-9a-fA-F]+[\t\s]*\)'
HexPattern1 = HexPattern1.replace(' ', '')
- HexPattern2 = '[\t\s]*'+PCD_PATTERN+'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][0-9a-zA-Z]+[\t\s]*'
+ HexPattern2 = r'[\t\s]*'+PCD_PATTERN+r'[\t\s]+(LT|GT|LE|GE|XOR|EQ)[\t\s]+0[xX][0-9a-zA-Z]+[\t\s]*'
#
# Do the Hex1 conversion
@@ -180,7 +180,7 @@ class PcdErrorXml(object): #
# To match items on both sides of '-'
#
- RangeItemList = re.compile('[\t\s]*0[xX][0-9a-fA-F]+[\t\s]*').findall(HexMatchedItem)
+ RangeItemList = re.compile(r'[\t\s]*0[xX][0-9a-fA-F]+[\t\s]*').findall(HexMatchedItem)
if RangeItemList and len(RangeItemList) == 2:
HexRangeDict[HexMatchedItem] = RangeItemList
@@ -204,7 +204,7 @@ class PcdErrorXml(object): #
# To match items on both sides of '-'
#
- RangeItemList = re.compile('[\t\s]*\d+[\t\s]*').findall(MatchedItem)
+ RangeItemList = re.compile(r'[\t\s]*\d+[\t\s]*').findall(MatchedItem)
if RangeItemList and len(RangeItemList) == 2:
IntRangeDict[MatchedItem] = RangeItemList
diff --git a/BaseTools/Source/Python/UPT/Xml/XmlParser.py b/BaseTools/Source/Python/UPT/Xml/XmlParser.py index 8e22a28..f239588 100644 --- a/BaseTools/Source/Python/UPT/Xml/XmlParser.py +++ b/BaseTools/Source/Python/UPT/Xml/XmlParser.py @@ -281,33 +281,33 @@ class DistributionPackageXml(object): #
XmlContent = \
re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
- '[\s\r\n]*"', '', XmlContent)
+ r'[\s\r\n]*"', '', XmlContent)
XmlContent = \
re.sub(r'[\s\r\n]*SupArchList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
- '[\s\r\n]*"', '', XmlContent)
+ r'[\s\r\n]*"', '', XmlContent)
#
# Remove <SupArchList> COMMON </SupArchList>
#
XmlContent = \
re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*COMMON[\s\r\n]*'
- '</SupArchList>[\s\r\n]*', '', XmlContent)
+ r'</SupArchList>[\s\r\n]*', '', XmlContent)
#
# Remove <SupArchList> common </SupArchList>
#
XmlContent = \
re.sub(r'[\s\r\n]*<SupArchList>[\s\r\n]*'
- 'common[\s\r\n]*</SupArchList>[\s\r\n]*', '', XmlContent)
+ r'common[\s\r\n]*</SupArchList>[\s\r\n]*', '', XmlContent)
#
# Remove SupModList="COMMON" or "common"
#
XmlContent = \
re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*COMMON'
- '[\s\r\n]*"', '', XmlContent)
+ r'[\s\r\n]*"', '', XmlContent)
XmlContent = \
re.sub(r'[\s\r\n]*SupModList[\s\r\n]*=[\s\r\n]*"[\s\r\n]*common'
- '[\s\r\n]*"', '', XmlContent)
+ r'[\s\r\n]*"', '', XmlContent)
return XmlContent
diff --git a/BaseTools/Source/Python/Workspace/BuildClassObject.py b/BaseTools/Source/Python/Workspace/BuildClassObject.py index ef87372..631e019 100644 --- a/BaseTools/Source/Python/Workspace/BuildClassObject.py +++ b/BaseTools/Source/Python/Workspace/BuildClassObject.py @@ -145,11 +145,6 @@ class PcdClassObject(object): return True
return False
- def IsSimpleTypeArray(self):
- if self.IsArray() and self.BaseDatumType in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, "BOOLEAN"]:
- return True
- return False
-
@staticmethod
def GetPcdMaxSizeWorker(PcdString, MaxSize):
if PcdString.startswith("{") and PcdString.endswith("}"):
@@ -290,7 +285,6 @@ class StructurePcd(PcdClassObject): self.PackageDecs = Packages
self.DefaultStoreName = [default_store]
self.DefaultValues = OrderedDict()
- self.PcdMode = None
self.SkuOverrideValues = OrderedDict()
self.StructName = None
self.PcdDefineLineNo = 0
@@ -334,9 +328,6 @@ class StructurePcd(PcdClassObject): self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName] = [Value.strip(), FileName, LineNo]
return self.PcdFiledValueFromDscComponent[ModuleGuid][DimensionAttr][FieldName]
- def SetPcdMode (self, PcdMode):
- self.PcdMode = PcdMode
-
def copy(self, PcdObject):
self.TokenCName = PcdObject.TokenCName if PcdObject.TokenCName else self.TokenCName
self.TokenSpaceGuidCName = PcdObject.TokenSpaceGuidCName if PcdObject.TokenSpaceGuidCName else PcdObject.TokenSpaceGuidCName
@@ -365,7 +356,6 @@ class StructurePcd(PcdClassObject): self.StructuredPcdIncludeFile = PcdObject.StructuredPcdIncludeFile if PcdObject.StructuredPcdIncludeFile else self.StructuredPcdIncludeFile
self.PackageDecs = PcdObject.PackageDecs if PcdObject.PackageDecs else self.PackageDecs
self.DefaultValues = PcdObject.DefaultValues if PcdObject.DefaultValues else self.DefaultValues
- self.PcdMode = PcdObject.PcdMode if PcdObject.PcdMode else self.PcdMode
self.DefaultValueFromDec = PcdObject.DefaultValueFromDec if PcdObject.DefaultValueFromDec else self.DefaultValueFromDec
self.DefaultValueFromDecInfo = PcdObject.DefaultValueFromDecInfo if PcdObject.DefaultValueFromDecInfo else self.DefaultValueFromDecInfo
self.SkuOverrideValues = PcdObject.SkuOverrideValues if PcdObject.SkuOverrideValues else self.SkuOverrideValues
@@ -383,7 +373,6 @@ class StructurePcd(PcdClassObject): new_pcd.DefaultValueFromDec = self.DefaultValueFromDec
new_pcd.DefaultValueFromDecInfo = self.DefaultValueFromDecInfo
- new_pcd.PcdMode = self.PcdMode
new_pcd.StructName = self.DatumType
new_pcd.PcdDefineLineNo = self.PcdDefineLineNo
new_pcd.PkgPath = self.PkgPath
@@ -586,7 +575,6 @@ class PackageBuildClassObject(BuildData): # @var PlatformName: To store value for PlatformName
# @var Guid: To store value for Guid
# @var Version: To store value for Version
-# @var DscSpecification: To store value for DscSpecification
# @var OutputDirectory: To store value for OutputDirectory
# @var FlashDefinition: To store value for FlashDefinition
# @var BuildNumber: To store value for BuildNumber
@@ -609,7 +597,6 @@ class PlatformBuildClassObject(BuildData): self.PlatformName = ''
self.Guid = ''
self.Version = ''
- self.DscSpecification = ''
self.OutputDirectory = ''
self.FlashDefinition = ''
self.BuildNumber = ''
diff --git a/BaseTools/Source/Python/Workspace/DscBuildData.py b/BaseTools/Source/Python/Workspace/DscBuildData.py index 5df184f..248c562 100644 --- a/BaseTools/Source/Python/Workspace/DscBuildData.py +++ b/BaseTools/Source/Python/Workspace/DscBuildData.py @@ -1,8 +1,9 @@ ## @file
# This file is used to create a database used by build tool
#
-# Copyright (c) 2008 - 2020, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2025, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2016 Hewlett Packard Enterprise Development LP<BR>
+# Copyright (C) 2025 Advanced Micro Devices, Inc. All rights reserved.
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
@@ -38,7 +39,9 @@ from Common.Misc import SaveFileOnChange from Workspace.BuildClassObject import PlatformBuildClassObject, StructurePcd, PcdClassObject, ModuleBuildClassObject
from collections import OrderedDict, defaultdict
import json
+import os
import shutil
+import sys
def _IsFieldValueAnArray (Value):
Value = Value.strip()
@@ -106,9 +109,9 @@ $(APPFILE): $(APPLICATION) '''
PcdGccMakefile = '''
-MAKEROOT ?= $(EDK_TOOLS_PATH)/Source/C
+MAKEROOT ?= $(EDK_TOOLS_PATH)%sSource%sC
LIBS = -lCommon
-'''
+'''%(os.sep, os.sep)
variablePattern = re.compile(r'[\t\s]*0[xX][a-fA-F0-9]+$')
SkuIdPattern = re.compile(r'^[a-zA-Z_][a-zA-Z0-9_]*$')
@@ -231,6 +234,21 @@ class DscBuildData(PlatformBuildClassObject): self._Clear()
self.WorkspaceDir = os.getenv("WORKSPACE") if os.getenv("WORKSPACE") else ""
self.DefaultStores = None
+ MingwBaseToolsBuild = None
+ if sys.platform == 'win32':
+ MingwBaseToolsBuild = os.getenv("BASETOOLS_MINGW_BUILD")
+ if MingwBaseToolsBuild is not None:
+ try:
+ MingwBaseToolsBuild = int(MingwBaseToolsBuild)
+ except:
+ pass
+ try:
+ MingwBaseToolsBuild = bool(MingwBaseToolsBuild)
+ except:
+ pass
+ if not isinstance(MingwBaseToolsBuild, bool):
+ MingwBaseToolsBuild = False
+ self._MingwBaseToolsBuild = MingwBaseToolsBuild
self.SkuIdMgr = SkuClass(self.SkuName, self.SkuIds)
self.UpdatePcdTypeDict()
@property
@@ -447,16 +465,6 @@ class DscBuildData(PlatformBuildClassObject): EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No PLATFORM_VERSION", File=self.MetaFile)
return self._Version
- ## Retrieve platform description file version
- @property
- def DscSpecification(self):
- if self._DscSpecification is None:
- if self._Header is None:
- self._GetHeaderInfo()
- if self._DscSpecification is None:
- EdkLogger.error('build', ATTRIBUTE_NOT_AVAILABLE, "No DSC_SPECIFICATION", File=self.MetaFile)
- return self._DscSpecification
-
## Retrieve OUTPUT_DIRECTORY
@property
def OutputDirectory(self):
@@ -1250,27 +1258,6 @@ class DscBuildData(PlatformBuildClassObject): if ' ' + Option not in self._BuildOptions[CurKey]:
self._BuildOptions[CurKey] += ' ' + Option
return self._BuildOptions
- def GetBuildOptionsByPkg(self, Module, ModuleType):
-
- local_pkg = os.path.split(Module.LocalPkg())[0]
- if self._ModuleTypeOptions is None:
- self._ModuleTypeOptions = OrderedDict()
- if ModuleType not in self._ModuleTypeOptions:
- options = OrderedDict()
- self._ModuleTypeOptions[ ModuleType] = options
- RecordList = self._RawData[MODEL_META_DATA_BUILD_OPTION, self._Arch]
- for ToolChainFamily, ToolChain, Option, Dummy1, Dummy2, Dummy3, Dummy4, Dummy5 in RecordList:
- if Dummy2 not in (TAB_COMMON,local_pkg.upper(),"EDKII"):
- continue
- Type = Dummy3
- if Type.upper() == ModuleType.upper():
- Key = (ToolChainFamily, ToolChain)
- if Key not in options or not ToolChain.endswith('_FLAGS') or Option.startswith('='):
- options[Key] = Option
- else:
- if ' ' + Option not in options[Key]:
- options[Key] += ' ' + Option
- return self._ModuleTypeOptions[ModuleType]
def GetBuildOptionsByModuleType(self, Edk, ModuleType):
if self._ModuleTypeOptions is None:
self._ModuleTypeOptions = OrderedDict()
@@ -1684,7 +1671,7 @@ class DscBuildData(PlatformBuildClassObject): AllModulePcds = AllModulePcds | ModuleData.PcdsName
return AllModulePcds
- #Filter the StrucutrePcd that is not used by any module in dsc file and fdf file.
+ #Filter the StructurePcd that is not used by any module in dsc file and fdf file.
def FilterStrcturePcd(self, S_pcd_set):
UnusedStruPcds = set(S_pcd_set.keys()) - self.PlatformUsedPcds
for (Token, TokenSpaceGuid) in UnusedStruPcds:
@@ -2061,13 +2048,6 @@ class DscBuildData(PlatformBuildClassObject): indicator += "->" + FieldName
return indicator
- def GetStarNum(self,Pcd):
- if not Pcd.IsArray():
- return 1
- elif Pcd.IsSimpleTypeArray():
- return len(Pcd.Capacity)
- else:
- return len(Pcd.Capacity) + 1
def GenerateDefaultValueAssignFunction(self, Pcd):
CApp = "// Default value in Dec \n"
CApp = CApp + "void Assign_%s_%s_Default_Value(%s *Pcd){\n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Pcd.BaseDatumType)
@@ -2772,7 +2752,7 @@ class DscBuildData(PlatformBuildClassObject): def GetBuildOptionsValueList(self):
CC_FLAGS = LinuxCFLAGS
- if sys.platform == "win32":
+ if sys.platform == "win32" and not self._MingwBaseToolsBuild:
CC_FLAGS = WindowsCFLAGS
BuildOptions = OrderedDict()
for Options in self.BuildOptions:
@@ -2957,12 +2937,13 @@ class DscBuildData(PlatformBuildClassObject): # start generating makefile
MakeApp = PcdMakefileHeader
- if sys.platform == "win32":
+ if sys.platform == "win32" and not self._MingwBaseToolsBuild:
MakeApp = MakeApp + 'APPFILE = %s\\%s.exe\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s\\%s.obj %s.obj\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + 'INC = '
else:
+ AppSuffix = '.exe' if sys.platform == "win32" else ''
MakeApp = MakeApp + PcdGccMakefile
- MakeApp = MakeApp + 'APPFILE = %s/%s\n' % (self.OutputPath, PcdValueInitName) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s/%s.o %s.o\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + \
- 'include $(MAKEROOT)/Makefiles/app.makefile\n' + 'TOOL_INCLUDE +='
+ MakeApp = MakeApp + 'APPFILE = %s%s%s%s\n' % (self.OutputPath, os.sep, PcdValueInitName, AppSuffix) + 'APPNAME = %s\n' % (PcdValueInitName) + 'OBJECTS = %s/%s.o %s.o\n' % (self.OutputPath, PcdValueInitName, os.path.join(self.OutputPath, PcdValueCommonName)) + \
+ 'include $(MAKEROOT)/Makefiles/app.makefile\n' + 'TOOL_INCLUDE +='
IncSearchList = []
PlatformInc = OrderedDict()
@@ -3010,11 +2991,12 @@ class DscBuildData(PlatformBuildClassObject): MakeApp += CC_FLAGS
- if sys.platform == "win32":
+ if sys.platform == "win32" and not self._MingwBaseToolsBuild:
MakeApp = MakeApp + PcdMakefileEnd
MakeApp = MakeApp + AppTarget % ("""\tcopy $(APPLICATION) $(APPFILE) /y """)
else:
- MakeApp = MakeApp + AppTarget % ("""\tcp -p $(APPLICATION) $(APPFILE) """)
+ AppSuffix = '.exe' if sys.platform == "win32" else ''
+ MakeApp = MakeApp + AppTarget % ("""\t$(CP) $(APPLICATION)%s $(APPFILE)"""%(AppSuffix))
MakeApp = MakeApp + '\n'
IncludeFileFullPaths = []
for includefile in IncludeFiles:
@@ -3057,12 +3039,21 @@ class DscBuildData(PlatformBuildClassObject): #start building the structure pcd value tool
Messages = ''
- if sys.platform == "win32":
+ if sys.platform == "win32" and not self._MingwBaseToolsBuild:
MakeCommand = 'nmake -f %s' % (MakeFileName)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
Messages = StdOut
else:
- MakeCommand = 'make -f %s' % (MakeFileName)
+ if sys.platform == "win32" and self._MingwBaseToolsBuild:
+ if shutil.which('mingw32-make.exe') is not None:
+ MakeCommand = 'mingw32-make -f %s' % (MakeFileName)
+ else:
+ if self._Toolchain in ('CLANGPDB', 'CLANGDWARF') and 'CLANG_HOST_BIN' in os.environ:
+ MakeCommand = '%smake -f %s' % (os.environ.get('CLANG_HOST_BIN'), MakeFileName)
+ else:
+ MakeCommand = 'make -f %s' % (MakeFileName)
+ else:
+ MakeCommand = 'make -f %s' % (MakeFileName)
returncode, StdOut, StdErr = DscBuildData.ExecuteCommand (MakeCommand)
Messages = StdErr
@@ -3603,20 +3594,6 @@ class DscBuildData(PlatformBuildClassObject): list(map(self.FilterSkuSettings, Pcds.values()))
return Pcds
- ## Add external modules
- #
- # The external modules are mostly those listed in FDF file, which don't
- # need "build".
- #
- # @param FilePath The path of module description file
- #
- def AddModule(self, FilePath):
- FilePath = NormPath(FilePath)
- if FilePath not in self.Modules:
- Module = ModuleBuildClassObject()
- Module.MetaFile = FilePath
- self.Modules.append(Module)
-
@property
def ToolChainFamily(self):
self._ToolChainFamily = TAB_COMPILER_MSFT
@@ -3638,20 +3615,6 @@ class DscBuildData(PlatformBuildClassObject): self._ToolChainFamily = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self._Toolchain]
return self._ToolChainFamily
- ## Add external PCDs
- #
- # The external PCDs are mostly those listed in FDF file to specify address
- # or offset information.
- #
- # @param Name Name of the PCD
- # @param Guid Token space guid of the PCD
- # @param Value Value of the PCD
- #
- def AddPcd(self, Name, Guid, Value):
- if (Name, Guid) not in self.Pcds:
- self.Pcds[Name, Guid] = PcdClassObject(Name, Guid, '', '', '', '', '', {}, False, None)
- self.Pcds[Name, Guid].DefaultValue = Value
-
@property
def DecPcds(self):
if self._DecPcds is None:
diff --git a/BaseTools/Source/Python/Workspace/InfBuildData.py b/BaseTools/Source/Python/Workspace/InfBuildData.py index 6339e49..fa047a7 100644 --- a/BaseTools/Source/Python/Workspace/InfBuildData.py +++ b/BaseTools/Source/Python/Workspace/InfBuildData.py @@ -114,7 +114,6 @@ class InfBuildData(ModuleBuildClassObject): self._Target = Target
self._Toolchain = Toolchain
self._Platform = TAB_COMMON
- self._TailComments = None
self._BaseName = None
self._DxsFile = None
self._ModuleType = None
@@ -123,7 +122,6 @@ class InfBuildData(ModuleBuildClassObject): self._Guid = None
self._Version = None
self._PcdIsDriver = None
- self._BinaryModule = None
self._Shadow = None
self._MakefileName = None
self._CustomMakefile = None
diff --git a/BaseTools/Source/Python/Workspace/MetaDataTable.py b/BaseTools/Source/Python/Workspace/MetaDataTable.py index a20bd14..325d01a 100644 --- a/BaseTools/Source/Python/Workspace/MetaDataTable.py +++ b/BaseTools/Source/Python/Workspace/MetaDataTable.py @@ -110,14 +110,6 @@ class Table(object): Tab = self.Db.GetTable(self.Table)
Tab.append(self._DUMMY_)
-
- def IsIntegral(self):
- tab = self.Db.GetTable(self.Table)
- Id = min([int(item[0]) for item in tab])
- if Id != -1:
- return False
- return True
-
def GetAll(self):
tab = self.Db.GetTable(self.Table)
return tab
@@ -209,27 +201,6 @@ class TableFile(Table): return None
return RecordList[0][0]
- ## Get file timestamp of a given file
- #
- # @param FileId ID of file
- #
- # @retval timestamp TimeStamp value of given file in the table
- #
- def GetFileTimeStamp(self, FileId):
- QueryScript = "select TimeStamp from %s where ID = '%s'" % (self.Table, FileId)
- RecordList = self.Exec(QueryScript)
- if len(RecordList) == 0:
- return None
- return RecordList[0][0]
-
- ## Update the timestamp of a given file
- #
- # @param FileId ID of file
- # @param TimeStamp Time stamp of file
- #
- def SetFileTimeStamp(self, FileId, TimeStamp):
- self.Exec("update %s set TimeStamp=%s where ID='%s'" % (self.Table, TimeStamp, FileId))
-
## Get list of file with given type
#
# @param FileType Type value of file
@@ -287,20 +258,3 @@ class TableDataModel(Table): Description = Item[0]
self.Insert(CrossIndex, Name, Description)
EdkLogger.verbose("Initialize table DataModel ... DONE!")
-
- ## Get CrossIndex
- #
- # Get a model's cross index from its name
- #
- # @param ModelName: Name of the model
- # @retval CrossIndex: CrossIndex of the model
- #
- def GetCrossIndex(self, ModelName):
- CrossIndex = -1
- SqlCommand = """select CrossIndex from DataModel where name = '""" + ModelName + """'"""
- self.Db.execute(SqlCommand)
- for Item in self.Db:
- CrossIndex = Item[0]
-
- return CrossIndex
-
diff --git a/BaseTools/Source/Python/Workspace/MetaFileParser.py b/BaseTools/Source/Python/Workspace/MetaFileParser.py index 73a1654..ed1ccb1 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileParser.py +++ b/BaseTools/Source/Python/Workspace/MetaFileParser.py @@ -1,7 +1,7 @@ ## @file
# This file is used to parse meta files
#
-# Copyright (c) 2008 - 2018, Intel Corporation. All rights reserved.<BR>
+# Copyright (c) 2008 - 2025, Intel Corporation. All rights reserved.<BR>
# (C) Copyright 2015-2018 Hewlett Packard Enterprise Development LP<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
@@ -158,7 +158,6 @@ class MetaFileParser(object): self._Arch = Arch
self._FileType = FileType
self.MetaFile = FilePath
- self._FileDir = self.MetaFile.Dir
self._Defines = {}
self._Packages = []
self._FileLocalMacros = {}
@@ -1183,9 +1182,6 @@ class DscParser(MetaFileParser): def _LibraryInstanceParser(self):
self._ValueList[0] = self._CurrentLine
-
- def _DecodeCODEData(self):
- pass
## PCD sections parser
#
# [PcdsFixedAtBuild]
@@ -1388,7 +1384,7 @@ class DscParser(MetaFileParser): self._SectionsMacroDict.clear()
GlobalData.gPlatformDefines = {}
- # Get all macro and PCD which has straitforward value
+ # Get all macro and PCD which has straightforward value
self.__RetrievePcdValue()
self._Content = self._RawTable.GetAll()
self._ContentIndex = 0
@@ -1464,7 +1460,7 @@ class DscParser(MetaFileParser): self._ValueList[0],
self._ValueList[1],
self._ValueList[2],
- S1,
+ self._Scope[0][0],
S2,
S3,
NewOwner,
@@ -1711,6 +1707,7 @@ class DscParser(MetaFileParser): def __ProcessComponent(self):
self._ValueList[0] = ReplaceMacro(self._ValueList[0], self._Macros)
+ self._Scope[0][0] = ReplaceMacro(self._Scope[0][0], self._Macros)
def __ProcessBuildOption(self):
self._ValueList = [ReplaceMacro(Value, self._Macros, RaiseError=False)
@@ -1793,8 +1790,6 @@ class DecParser(MetaFileParser): self._include_flag = False
self._package_flag = False
- self._RestofValue = ""
-
## Parser starter
def Start(self):
Content = ''
diff --git a/BaseTools/Source/Python/Workspace/MetaFileTable.py b/BaseTools/Source/Python/Workspace/MetaFileTable.py index bebf906..7ff5f20 100644 --- a/BaseTools/Source/Python/Workspace/MetaFileTable.py +++ b/BaseTools/Source/Python/Workspace/MetaFileTable.py @@ -28,7 +28,6 @@ class MetaFileTable(): self.MetaFile = MetaFile
self.TableName = ""
self.DB = DB
- self._NumpyTab = None
self.CurrentContent = []
DB.TblFile.append([MetaFile.Name,
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py index 6ad7a3b..1bfd137 100644 --- a/BaseTools/Source/Python/Workspace/WorkspaceCommon.py +++ b/BaseTools/Source/Python/Workspace/WorkspaceCommon.py @@ -42,7 +42,7 @@ def GetPackageList(Platform, BuildDatabase, Arch, Target, Toolchain): for ModuleFile in Platform.Modules:
Data = BuildDatabase[ModuleFile, Arch, Target, Toolchain]
PkgSet.update(Data.Packages)
- for Lib in GetLiabraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
+ for Lib in GetLibraryInstances(Data, Platform, BuildDatabase, Arch, Target, Toolchain):
PkgSet.update(Lib.Packages)
return list(PkgSet)
@@ -87,9 +87,19 @@ def GetDeclaredPcd(Platform, BuildDatabase, Arch, Target, Toolchain, additionalP # @param Toolchain: Current toolchain
# @retval: List of dependent libraries which are InfBuildData instances
#
-def GetLiabraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
+def GetLibraryInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain):
return GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain,Platform.MetaFile,EdkLogger)
+def GenerateDependencyDump(ConsumedByList, M, Level, Visited):
+ if M in Visited:
+ return []
+ Visited.add(M)
+ Indentation = "\t" * Level
+ DependencyDump = [f"{Indentation}consumed by {M}"]
+ for m in ConsumedByList[M]:
+ DependencyDump.extend(GenerateDependencyDump(ConsumedByList, m, Level + 1, Visited))
+ return DependencyDump
+
def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolchain, FileName = '', EdkLogger = None):
if Module.LibInstances:
return Module.LibInstances
@@ -133,9 +143,11 @@ def GetModuleLibInstances(Module, Platform, BuildDatabase, Arch, Target, Toolcha if LibraryPath is None:
if not Module.LibraryClass:
EdkLogger.error("build", RESOURCE_NOT_AVAILABLE,
- "Instance of library class [%s] is not found" % LibraryClassName,
+ f"Instance of library class [{LibraryClassName}] is not found for"
+ f" module [{Module}], [{LibraryClassName}] is:",
File=FileName,
- ExtraData="in [%s] [%s]\n\tconsumed by module [%s]" % (str(M), Arch, str(Module)))
+ ExtraData="\n\t".join(GenerateDependencyDump(ConsumedByList, M, 0, set()))
+ )
else:
return []
diff --git a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py index d955c78..553b149 100644 --- a/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py +++ b/BaseTools/Source/Python/Workspace/WorkspaceDatabase.py @@ -132,15 +132,6 @@ class WorkspaceDatabase(object): )
return BuildObject
- # placeholder for file format conversion
- class TransformObjectFactory:
- def __init__(self, WorkspaceDb):
- self.WorkspaceDb = WorkspaceDb
-
- # key = FilePath, Arch
- def __getitem__(self, Key):
- pass
-
## Constructor of WorkspaceDatabase
#
# @param DbPath Path of database file
@@ -156,7 +147,6 @@ class WorkspaceDatabase(object): # conversion object for build or file format conversion purpose
self.BuildObject = WorkspaceDatabase.BuildObjectFactory(self)
- self.TransformObject = WorkspaceDatabase.TransformObjectFactory(self)
## Summarize all packages in the database
diff --git a/BaseTools/Source/Python/build/BuildReport.py b/BaseTools/Source/Python/build/BuildReport.py index 497bbbd..2d31be4 100644 --- a/BaseTools/Source/Python/build/BuildReport.py +++ b/BaseTools/Source/Python/build/BuildReport.py @@ -1494,36 +1494,6 @@ class PcdReport(object): else:
FileWrite(File, ' %-*s = %s' % (self.MaxLen + 4, '.' + Key, Value[0]))
- def StrtoHex(self, value):
- try:
- value = hex(int(value))
- return value
- except:
- if value.startswith("L\"") and value.endswith("\""):
- valuelist = []
- for ch in value[2:-1]:
- valuelist.append(hex(ord(ch)))
- valuelist.append('0x00')
- return valuelist
- elif value.startswith("\"") and value.endswith("\""):
- return hex(ord(value[1:-1]))
- elif value.startswith("{") and value.endswith("}"):
- valuelist = []
- if ',' not in value:
- return value[1:-1]
- for ch in value[1:-1].split(','):
- ch = ch.strip()
- if ch.startswith('0x') or ch.startswith('0X'):
- valuelist.append(ch)
- continue
- try:
- valuelist.append(hex(int(ch.strip())))
- except:
- pass
- return valuelist
- else:
- return value
-
def IsStructurePcd(self, PcdToken, PcdTokenSpaceGuid):
if GlobalData.gStructurePcd and (self.Arch in GlobalData.gStructurePcd) and ((PcdToken, PcdTokenSpaceGuid) in GlobalData.gStructurePcd[self.Arch]):
return True
@@ -1844,13 +1814,21 @@ class FdRegionReport(object): for Ffs in Wa.FdfProfile.FvDict[FvName.upper()].FfsList:
for Section in Ffs.SectionList:
try:
- for FvSection in Section.SectionList:
- if FvSection.FvName in self.FvList:
- continue
- self._GuidsDb[Ffs.NameGuid.upper()] = FvSection.FvName
- self.FvList.append(FvSection.FvName)
- self.FvInfo[FvSection.FvName] = ("Nested FV", 0, 0)
- self._DiscoverNestedFvList(FvSection.FvName, Wa)
+ # Handle the case where an entire FFS is a FV, and not
+ # a sub-section of the FFS.
+ if getattr(Section, 'FvFileName', None) is None:
+ for FvSection in Section.SectionList:
+ if FvSection.FvName in self.FvList:
+ continue
+ self._GuidsDb[Ffs.NameGuid.upper()] = FvSection.FvName
+ self.FvList.append(FvSection.FvName)
+ self.FvInfo[FvSection.FvName] = ("Nested FV", 0, 0)
+ self._DiscoverNestedFvList(FvSection.FvName, Wa)
+ else:
+ self._GuidsDb[Ffs.NameGuid.upper()] = Section.FvFileName
+ self.FvList.append(Section.FvName)
+ self.FvInfo[Section.FvName] = ("Nested FV", 0, 0)
+ self._DiscoverNestedFvList(Section.FvName, Wa)
except AttributeError:
pass
@@ -2379,16 +2357,21 @@ class BuildReport(object): # PPI's in module
module_report_data["PPI"] = []
for data_ppi in module.PpiList.keys():
- module_report_data["PPI"].append({"Name": data_ppi, "Guid": module.PpiList[data_ppi]})
+ module_report_data["PPI"].append({"Name": data_ppi, "Guid": GuidStructureStringToGuidString(module.PpiList[data_ppi])})
+
+ # GUID's in module
+ module_report_data["GUID"] = []
+ for data_ppi in module.GuidList.keys():
+ module_report_data["GUID"].append({"Name": data_ppi, "Guid": GuidStructureStringToGuidString(module.GuidList[data_ppi])})
# Protocol's in module
module_report_data["Protocol"] = []
for data_protocol in module.ProtocolList.keys():
- module_report_data["Protocol"].append({"Name": data_protocol, "Guid": module.ProtocolList[data_protocol]})
+ module_report_data["Protocol"].append({"Name": data_protocol, "Guid": GuidStructureStringToGuidString(module.ProtocolList[data_protocol])})
# PCD's in module
module_report_data["Pcd"] = []
- for data_pcd in module.LibraryPcdList:
+ for data_pcd in module.ModulePcdList + module.LibraryPcdList:
module_report_data["Pcd"].append({"Space": data_pcd.TokenSpaceGuidCName,
"Name": data_pcd.TokenCName,
"Value": data_pcd.TokenValue,
diff --git a/BaseTools/Source/Python/build/build.py b/BaseTools/Source/Python/build/build.py index 51fb1f4..0ca71e5 100755 --- a/BaseTools/Source/Python/build/build.py +++ b/BaseTools/Source/Python/build/build.py @@ -28,6 +28,8 @@ import threading from linecache import getlines
from subprocess import Popen,PIPE, STDOUT
from collections import OrderedDict, defaultdict
+import json
+import secrets
from AutoGen.PlatformAutoGen import PlatformAutoGen
from AutoGen.ModuleAutoGen import ModuleAutoGen
@@ -56,7 +58,6 @@ from PatchPcdValue.PatchPcdValue import PatchBinaryFile import Common.GlobalData as GlobalData
from GenFds.GenFds import GenFds, GenFdsApi
import multiprocessing as mp
-from multiprocessing import Manager
from AutoGen.DataPipe import MemoryDataPipe
from AutoGen.ModuleAutoGenHelper import WorkSpaceInfo, PlatformInfo
from GenFds.FdfParser import FdfParser
@@ -70,22 +71,6 @@ gSupportedTarget = ['all', 'genc', 'genmake', 'modules', 'libraries', 'fds', 'cl TemporaryTablePattern = re.compile(r'^_\d+_\d+_[a-fA-F0-9]+$')
TmpTableDict = {}
-## Check environment PATH variable to make sure the specified tool is found
-#
-# If the tool is found in the PATH, then True is returned
-# Otherwise, False is returned
-#
-def IsToolInPath(tool):
- if 'PATHEXT' in os.environ:
- extns = os.environ['PATHEXT'].split(os.path.pathsep)
- else:
- extns = ('',)
- for pathDir in os.environ['PATH'].split(os.path.pathsep):
- for ext in extns:
- if os.path.exists(os.path.join(pathDir, tool + ext)):
- return True
- return False
-
## Check environment variables
#
# Check environment variables that must be set for build. Currently they are
@@ -103,7 +88,7 @@ def CheckEnvVariable(): EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "Environment variable not found",
ExtraData="WORKSPACE")
- WorkspaceDir = os.path.normcase(os.path.normpath(os.environ["WORKSPACE"]))
+ WorkspaceDir = os.path.normpath(os.environ["WORKSPACE"])
if not os.path.exists(WorkspaceDir):
EdkLogger.error("build", FILE_NOT_FOUND, "WORKSPACE doesn't exist", ExtraData=WorkspaceDir)
elif ' ' in WorkspaceDir:
@@ -122,7 +107,7 @@ def CheckEnvVariable(): EdkLogger.error("build", FORMAT_NOT_SUPPORTED, "No space is allowed in PACKAGES_PATH", ExtraData=Path)
- os.environ["EDK_TOOLS_PATH"] = os.path.normcase(os.environ["EDK_TOOLS_PATH"])
+ os.environ["EDK_TOOLS_PATH"] = os.path.normpath(os.environ["EDK_TOOLS_PATH"])
# check EDK_TOOLS_PATH
if "EDK_TOOLS_PATH" not in os.environ:
@@ -282,6 +267,22 @@ def LaunchCommand(Command, WorkingDir,ModuleAuto = None): iau.CreateDepsTarget()
return "%dms" % (int(round((time.time() - BeginTime) * 1000)))
+def GenerateStackCookieValues():
+ if GlobalData.gBuildDirectory == "":
+ return
+
+ # Check if the 32 bit values array needs to be created
+ if not os.path.exists(os.path.join(GlobalData.gBuildDirectory, "StackCookieValues32.json")):
+ StackCookieValues32 = [secrets.randbelow(0xFFFFFFFF) for _ in range(0, 100)]
+ with open (os.path.join(GlobalData.gBuildDirectory, "StackCookieValues32.json"), "w") as file:
+ json.dump(StackCookieValues32, file)
+
+ # Check if the 64 bit values array needs to be created
+ if not os.path.exists(os.path.join(GlobalData.gBuildDirectory, "StackCookieValues64.json")):
+ StackCookieValues64 = [secrets.randbelow(0xFFFFFFFFFFFFFFFF) for _ in range(0, 100)]
+ with open (os.path.join(GlobalData.gBuildDirectory, "StackCookieValues64.json"), "w") as file:
+ json.dump(StackCookieValues64, file)
+
## The smallest unit that can be built in multi-thread build mode
#
# This is the base class of build unit. The "Obj" parameter must provide
@@ -368,26 +369,6 @@ class ModuleMakeUnit(BuildUnit): if Target in [None, "", "all"]:
self.Target = "tbuild"
-## The smallest platform unit that can be built by nmake/make command in multi-thread build mode
-#
-# This class is for platform build by nmake/make build system. The "Obj" parameter
-# must provide __str__(), __eq__() and __hash__() methods. Otherwise there could
-# be make units missing build.
-#
-# Currently the "Obj" should be only PlatformAutoGen object.
-#
-class PlatformMakeUnit(BuildUnit):
- ## The constructor
- #
- # @param self The object pointer
- # @param Obj The PlatformAutoGen object the build is working on
- # @param Target The build target name, one of gSupportedTarget
- #
- def __init__(self, Obj, BuildCommand, Target):
- Dependency = [ModuleMakeUnit(Lib, BuildCommand, Target) for Lib in self.BuildObject.LibraryAutoGenList]
- Dependency.extend([ModuleMakeUnit(Mod, BuildCommand,Target) for Mod in self.BuildObject.ModuleAutoGenList])
- BuildUnit.__init__(self, Obj, BuildCommand, Target, Dependency, Obj.MakeFileDir)
-
## The class representing the task of a module build or platform build
#
# This class manages the build tasks in multi-thread build mode. Its jobs include
@@ -545,15 +526,6 @@ class BuildTask: def HasError():
return BuildTask._ErrorFlag.is_set()
- ## Get error message in running thread
- #
- # Since the main thread cannot catch exceptions in other thread, we have to
- # use a static variable to communicate this message to main thread.
- #
- @staticmethod
- def GetErrorMessage():
- return BuildTask._ErrorMessage
-
## Factory method to create a BuildTask object
#
# This method will check if a module is building or has been built. And if
@@ -786,8 +758,6 @@ class Build(): self.LoadFixAddress = 0
self.UniFlag = BuildOptions.Flag
self.BuildModules = []
- self.HashSkipModules = []
- self.Db_Flag = False
self.LaunchPrebuildFlag = False
self.PlatformBuildPath = os.path.join(GlobalData.gConfDirectory, '.cache', '.PlatformBuild')
if BuildOptions.CommandLength:
@@ -799,11 +769,11 @@ class Build(): EdkLogger.quiet("%-16s = %s" % ("WORKSPACE", os.environ["WORKSPACE"]))
if "PACKAGES_PATH" in os.environ:
# WORKSPACE env has been converted before. Print the same path style with WORKSPACE env.
- EdkLogger.quiet("%-16s = %s" % ("PACKAGES_PATH", os.path.normcase(os.path.normpath(os.environ["PACKAGES_PATH"]))))
+ EdkLogger.quiet("%-16s = %s" % ("PACKAGES_PATH", os.path.normpath(os.environ["PACKAGES_PATH"])))
EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_PATH", os.environ["EDK_TOOLS_PATH"]))
if "EDK_TOOLS_BIN" in os.environ:
# Print the same path style with WORKSPACE env.
- EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_BIN", os.path.normcase(os.path.normpath(os.environ["EDK_TOOLS_BIN"]))))
+ EdkLogger.quiet("%-16s = %s" % ("EDK_TOOLS_BIN", os.path.normpath(os.environ["EDK_TOOLS_BIN"])))
EdkLogger.quiet("%-16s = %s" % ("CONF_PATH", GlobalData.gConfDirectory))
if "PYTHON3_ENABLE" in os.environ:
PYTHON3_ENABLE = os.environ["PYTHON3_ENABLE"]
@@ -1028,7 +998,6 @@ class Build(): if 'PREBUILD' in GlobalData.gCommandLineDefines:
self.Prebuild = GlobalData.gCommandLineDefines.get('PREBUILD')
else:
- self.Db_Flag = True
Platform = self.Db.MapPlatform(str(self.PlatformFile))
self.Prebuild = str(Platform.Prebuild)
if self.Prebuild:
@@ -1263,7 +1232,7 @@ class Build(): mqueue.put((None,None,None,None,None,None,None))
AutoGenObject.DataPipe.DataContainer = {"CommandTarget": self.Target}
AutoGenObject.DataPipe.DataContainer = {"Workspace_timestamp": AutoGenObject.Workspace._SrcTimeStamp}
- AutoGenObject.CreateLibModuelDirs()
+ AutoGenObject.CreateLibModuleDirs()
AutoGenObject.DataPipe.DataContainer = {"LibraryBuildDirectoryList":AutoGenObject.LibraryBuildDirectoryList}
AutoGenObject.DataPipe.DataContainer = {"ModuleBuildDirectoryList":AutoGenObject.ModuleBuildDirectoryList}
AutoGenObject.DataPipe.DataContainer = {"FdsCommandDict": AutoGenObject.Workspace.GenFdsCommandDict}
@@ -1794,6 +1763,7 @@ class Build(): self.UniFlag,
self.Progress
)
+ GenerateStackCookieValues()
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
self.BuildReport.AddPlatformReport(Wa)
@@ -1897,6 +1867,7 @@ class Build(): self.Progress,
self.ModuleFile
)
+ GenerateStackCookieValues()
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
Wa.CreateMakeFile(False)
@@ -2147,6 +2118,7 @@ class Build(): self.UniFlag,
self.Progress
)
+ GenerateStackCookieValues()
self.Fdf = Wa.FdfFile
self.LoadFixAddress = Wa.Platform.LoadFixAddress
self.BuildReport.AddPlatformReport(Wa)
@@ -2179,7 +2151,7 @@ class Build(): Pa.DataPipe.DataContainer = {"FfsCommand":CmdListDict}
Pa.DataPipe.DataContainer = {"Workspace_timestamp": Wa._SrcTimeStamp}
Pa.DataPipe.DataContainer = {"CommandTarget": self.Target}
- Pa.CreateLibModuelDirs()
+ Pa.CreateLibModuleDirs()
# Fetch the MakeFileName.
self.MakeFileName = Pa.MakeFileName
@@ -2410,9 +2382,18 @@ class Build(): if len(NameValue) == 2 and NameValue[0].strip() == 'EFI_FV_SPACE_SIZE':
FreeSizeValue = int(NameValue[1].strip(), 0)
if FreeSizeValue < Threshold:
- EdkLogger.error("build", FV_FREESIZE_ERROR,
- '%s FV free space %d is not enough to meet with the required spare space %d set by -D FV_SPARE_SPACE_THRESHOLD option.' % (
- FvName, FreeSizeValue, Threshold))
+ if FreeSizeValue == 0:
+ # A free size of 0 means the FV is exactly 100% full which usually indicates a special
+ # FV for a region that contains a fixed size image with special alignment requirements
+ # with potentiaily a fixed address. Log a warning for review, but do not generate an
+ # error.
+ EdkLogger.warn("build", FV_FREESIZE_ERROR,
+ '%s FV free space %d is not enough to meet with the required spare space %d set by -D FV_SPARE_SPACE_THRESHOLD option.' % (
+ FvName, FreeSizeValue, Threshold))
+ else:
+ EdkLogger.error("build", FV_FREESIZE_ERROR,
+ '%s FV free space %d is not enough to meet with the required spare space %d set by -D FV_SPARE_SPACE_THRESHOLD option.' % (
+ FvName, FreeSizeValue, Threshold))
break
## Generate GuidedSectionTools.txt in the FV directories.
@@ -2466,13 +2447,6 @@ class Build(): print(' '.join(guidedSectionTool), file=toolsFile)
toolsFile.close()
- ## Returns the real path of the tool.
- #
- def GetRealPathOfTool (self, tool):
- if os.path.exists(tool):
- return os.path.realpath(tool)
- return tool
-
## Launch the module or platform build
#
def Launch(self):
@@ -2664,7 +2638,7 @@ def Main(): if Option.ModuleFile:
if os.path.isabs (Option.ModuleFile):
- if os.path.normcase (os.path.normpath(Option.ModuleFile)).find (Workspace) == 0:
+ if os.path.normcase (os.path.normpath(Option.ModuleFile)).find (os.path.normcase(Workspace)) == 0:
Option.ModuleFile = NormFile(os.path.normpath(Option.ModuleFile), Workspace)
Option.ModuleFile = PathClass(Option.ModuleFile, Workspace)
ErrorCode, ErrorInfo = Option.ModuleFile.Validate(".inf", False)
@@ -2673,13 +2647,13 @@ def Main(): if Option.PlatformFile is not None:
if os.path.isabs (Option.PlatformFile):
- if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (Workspace) == 0:
+ if os.path.normcase (os.path.normpath(Option.PlatformFile)).find (os.path.normcase(Workspace)) == 0:
Option.PlatformFile = NormFile(os.path.normpath(Option.PlatformFile), Workspace)
Option.PlatformFile = PathClass(Option.PlatformFile, Workspace)
if Option.FdfFile is not None:
if os.path.isabs (Option.FdfFile):
- if os.path.normcase (os.path.normpath(Option.FdfFile)).find (Workspace) == 0:
+ if os.path.normcase (os.path.normpath(Option.FdfFile)).find (os.path.normcase(Workspace)) == 0:
Option.FdfFile = NormFile(os.path.normpath(Option.FdfFile), Workspace)
Option.FdfFile = PathClass(Option.FdfFile, Workspace)
ErrorCode, ErrorInfo = Option.FdfFile.Validate(".fdf", False)
|