diff options
author | Jiaxin Wu <jiaxin.wu@intel.com> | 2024-06-26 15:28:36 +0800 |
---|---|---|
committer | mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> | 2024-08-28 15:25:27 +0000 |
commit | 268397a8923173330b473b8a1d6b21000bc3b7d1 (patch) | |
tree | 4f05f2831a9a436aec777b6b879f83a7f86f1b7a /UefiCpuPkg | |
parent | 1c19ccd5103b918efef9c4076e92d175cfba8d81 (diff) | |
download | edk2-268397a8923173330b473b8a1d6b21000bc3b7d1.tar.gz |
UefiCpuPkg/PiSmmCpuDxeSmm: Enable CodeAccessCheck in MM Entry Point
For MM:
CodeAccessCheck is designed to enable in the MM CPU Driver Entry
Point.
For SMM:
CodeAccessCheck is still enabled in the first SMI when SMM ready
to lock happen.
This patch enables the CodeAccessCheck in MM CPU Driver Entry
Point for MM support.
Signed-off-by: Jiaxin Wu <jiaxin.wu@intel.com>
Cc: Ray Ni <ray.ni@intel.com>
Cc: Rahul Kumar <rahul1.kumar@intel.com>
Cc: Gerd Hoffmann <kraxel@redhat.com>
Cc: Star Zeng <star.zeng@intel.com>
Cc: Dun Tan <dun.tan@intel.com>
Cc: Hongbin1 Zhang <hongbin1.zhang@intel.com>
Cc: Wei6 Xu <wei6.xu@intel.com>
Cc: Yuanhao Xie <yuanhao.xie@intel.com>
Diffstat (limited to 'UefiCpuPkg')
-rw-r--r-- | UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.c | 40 | ||||
-rw-r--r-- | UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.h | 13 | ||||
-rw-r--r-- | UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c | 17 | ||||
-rw-r--r-- | UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.h | 3 |
4 files changed, 50 insertions, 23 deletions
diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.c b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.c index ba559afae6..cd43619d17 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.c @@ -362,7 +362,20 @@ InitializeSmm ( //
// Check XD and BTS features on each processor on normal boot
//
- CheckFeatureSupported ();
+ CheckFeatureSupported (Index);
+
+ if (mIsStandaloneMm) {
+ AcquireSpinLock (mConfigSmmCodeAccessCheckLock);
+
+ //
+ // Standalone MM does not allow call out to DXE at anytime.
+ // Code Access check can be enabled in the first SMI.
+ // While SMM needs to defer the enabling to EndOfDxe.
+ //
+ // Enable SMM Code Access Check feature.
+ //
+ ConfigSmmCodeAccessCheckOnCurrentProcessor (&Index);
+ }
} else if (IsBsp) {
//
// BSP rebase is already done above.
@@ -411,6 +424,11 @@ ExecuteFirstSmiInit ( ZeroMem ((VOID *)mSmmInitialized, sizeof (BOOLEAN) * mMaxNumberOfCpus);
//
+ // Initialize the lock used to serialize the MSR programming in BSP and all APs
+ //
+ InitializeSpinLock (mConfigSmmCodeAccessCheckLock);
+
+ //
// Get the BSP ApicId.
//
mBspApicId = GetApicId ();
@@ -1427,26 +1445,6 @@ ConfigSmmCodeAccessCheck ( // Check to see if the Feature Control MSR is supported on this CPU
//
Index = gSmmCpuPrivate->SmmCoreEntryContext.CurrentlyExecutingCpu;
- if (!SmmCpuFeaturesIsSmmRegisterSupported (Index, SmmRegFeatureControl)) {
- mSmmCodeAccessCheckEnable = FALSE;
- PERF_FUNCTION_END ();
- return;
- }
-
- //
- // Check to see if the CPU supports the SMM Code Access Check feature
- // Do not access this MSR unless the CPU supports the SmmRegFeatureControl
- //
- if ((AsmReadMsr64 (EFI_MSR_SMM_MCA_CAP) & SMM_CODE_ACCESS_CHK_BIT) == 0) {
- mSmmCodeAccessCheckEnable = FALSE;
- PERF_FUNCTION_END ();
- return;
- }
-
- //
- // Initialize the lock used to serialize the MSR programming in BSP and all APs
- //
- InitializeSpinLock (mConfigSmmCodeAccessCheckLock);
//
// Acquire Config SMM Code Access Check spin lock. The BSP will release the
diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.h b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.h index 5b2202490e..a0b5e6fafb 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.h +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/PiSmmCpuCommon.h @@ -474,6 +474,7 @@ extern EFI_SMRAM_DESCRIPTOR *mSmmCpuSmramRanges; extern UINTN mSmmCpuSmramRangeCount;
extern UINT8 mPhysicalAddressBits;
extern BOOLEAN mSmmDebugAgentSupport;
+extern BOOLEAN mSmmCodeAccessCheckEnable;
//
// Copy of the PcdPteMemoryEncryptionAddressOrMask
@@ -848,6 +849,18 @@ InitMsrSpinLockByIndex ( );
/**
+Configure SMM Code Access Check feature on an AP.
+SMM Feature Control MSR will be locked after configuration.
+
+@param[in,out] Buffer Pointer to private data buffer.
+**/
+VOID
+EFIAPI
+ConfigSmmCodeAccessCheckOnCurrentProcessor (
+ IN OUT VOID *Buffer
+ );
+
+/**
Configure SMM Code Access Check feature for all processors.
SMM Feature Control MSR will be locked after configuration.
**/
diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c index ba83c37cbf..bbac6064e2 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.c @@ -859,10 +859,11 @@ InitSmmProfileInternal ( /**
Check if feature is supported by a processor.
+ @param CpuIndex The index of the CPU.
**/
VOID
CheckFeatureSupported (
- VOID
+ IN UINTN CpuIndex
)
{
UINT32 RegEax;
@@ -904,6 +905,20 @@ CheckFeatureSupported ( }
}
}
+
+ if (mSmmCodeAccessCheckEnable) {
+ if (!SmmCpuFeaturesIsSmmRegisterSupported (CpuIndex, SmmRegFeatureControl)) {
+ mSmmCodeAccessCheckEnable = FALSE;
+ }
+
+ //
+ // Check to see if the CPU supports the SMM Code Access Check feature
+ // Do not access this MSR unless the CPU supports the SmmRegFeatureControl
+ //
+ if ((AsmReadMsr64 (EFI_MSR_SMM_MCA_CAP) & SMM_CODE_ACCESS_CHK_BIT) == 0) {
+ mSmmCodeAccessCheckEnable = FALSE;
+ }
+ }
}
/**
diff --git a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.h b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.h index 7a0f006d77..b34aab41ef 100644 --- a/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.h +++ b/UefiCpuPkg/PiSmmCpuDxeSmm/SmmProfile.h @@ -83,10 +83,11 @@ PageFaultIdtHandlerSmmProfile ( /**
Check if feature is supported by a processor.
+ @param CpuIndex The index of the CPU.
**/
VOID
CheckFeatureSupported (
- VOID
+ IN UINTN CpuIndex
);
/**
|