UefiCpuPkg: Split the path in RelocateApLoop into two.
Add the union RELOCATE_AP_LOOP_ENTRY, split the path in RelocateApLoop into two: 1. 64-bit AMD processors with SEV-ES 2. Intel processors (32-bit or 64-bit), 32-bit AMD processors, or 64-bit AMD processors without SEV-ES. Cc: Guo Dong <guo.dong@intel.com> Cc: Ray Ni <ray.ni@intel.com> Cc: Sean Rhodes <sean@starlabs.systems> Cc: James Lu <james.lu@intel.com> Cc: Gua Guo <gua.guo@intel.com> Signed-off-by: Yuanhao Xie <yuanhao.xie@intel.com> Acked-by: Gerd Hoffmann <kraxel@redhat.com> Tested-by: Gerd Hoffmann <kraxel@redhat.com> Reviewed-by: Ray Ni <ray.ni@intel.com>
This commit is contained in:
parent
0c3f8766b0
commit
a6f799e7fd
|
@ -1,7 +1,7 @@
|
||||||
/** @file
|
/** @file
|
||||||
MP initialize support functions for DXE phase.
|
MP initialize support functions for DXE phase.
|
||||||
|
|
||||||
Copyright (c) 2016 - 2020, Intel Corporation. All rights reserved.<BR>
|
Copyright (c) 2016 - 2023, Intel Corporation. All rights reserved.<BR>
|
||||||
SPDX-License-Identifier: BSD-2-Clause-Patent
|
SPDX-License-Identifier: BSD-2-Clause-Patent
|
||||||
|
|
||||||
**/
|
**/
|
||||||
|
@ -25,7 +25,7 @@ EFI_EVENT mCheckAllApsEvent = NULL;
|
||||||
EFI_EVENT mMpInitExitBootServicesEvent = NULL;
|
EFI_EVENT mMpInitExitBootServicesEvent = NULL;
|
||||||
EFI_EVENT mLegacyBootEvent = NULL;
|
EFI_EVENT mLegacyBootEvent = NULL;
|
||||||
volatile BOOLEAN mStopCheckAllApsStatus = TRUE;
|
volatile BOOLEAN mStopCheckAllApsStatus = TRUE;
|
||||||
VOID *mReservedApLoopFunc = NULL;
|
RELOCATE_AP_LOOP_ENTRY mReservedApLoop;
|
||||||
UINTN mReservedTopOfApStack;
|
UINTN mReservedTopOfApStack;
|
||||||
volatile UINT32 mNumberToFinish = 0;
|
volatile UINT32 mNumberToFinish = 0;
|
||||||
|
|
||||||
|
@ -380,7 +380,6 @@ RelocateApLoop (
|
||||||
{
|
{
|
||||||
CPU_MP_DATA *CpuMpData;
|
CPU_MP_DATA *CpuMpData;
|
||||||
BOOLEAN MwaitSupport;
|
BOOLEAN MwaitSupport;
|
||||||
ASM_RELOCATE_AP_LOOP AsmRelocateApLoopFunc;
|
|
||||||
UINTN ProcessorNumber;
|
UINTN ProcessorNumber;
|
||||||
UINTN StackStart;
|
UINTN StackStart;
|
||||||
|
|
||||||
|
@ -388,13 +387,11 @@ RelocateApLoop (
|
||||||
CpuMpData = GetCpuMpData ();
|
CpuMpData = GetCpuMpData ();
|
||||||
MwaitSupport = IsMwaitSupport ();
|
MwaitSupport = IsMwaitSupport ();
|
||||||
if (CpuMpData->UseSevEsAPMethod) {
|
if (CpuMpData->UseSevEsAPMethod) {
|
||||||
|
//
|
||||||
|
// 64-bit AMD processors with SEV-ES
|
||||||
|
//
|
||||||
StackStart = CpuMpData->SevEsAPResetStackStart;
|
StackStart = CpuMpData->SevEsAPResetStackStart;
|
||||||
} else {
|
mReservedApLoop.AmdSevEntry (
|
||||||
StackStart = mReservedTopOfApStack;
|
|
||||||
}
|
|
||||||
|
|
||||||
AsmRelocateApLoopFunc = (ASM_RELOCATE_AP_LOOP)(UINTN)mReservedApLoopFunc;
|
|
||||||
AsmRelocateApLoopFunc (
|
|
||||||
MwaitSupport,
|
MwaitSupport,
|
||||||
CpuMpData->ApTargetCState,
|
CpuMpData->ApTargetCState,
|
||||||
CpuMpData->PmCodeSegment,
|
CpuMpData->PmCodeSegment,
|
||||||
|
@ -404,6 +401,23 @@ RelocateApLoop (
|
||||||
CpuMpData->SevEsAPBuffer,
|
CpuMpData->SevEsAPBuffer,
|
||||||
CpuMpData->WakeupBuffer
|
CpuMpData->WakeupBuffer
|
||||||
);
|
);
|
||||||
|
} else {
|
||||||
|
//
|
||||||
|
// Intel processors (32-bit or 64-bit), 32-bit AMD processors, or 64-bit AMD processors without SEV-ES
|
||||||
|
//
|
||||||
|
StackStart = mReservedTopOfApStack;
|
||||||
|
mReservedApLoop.GenericEntry (
|
||||||
|
MwaitSupport,
|
||||||
|
CpuMpData->ApTargetCState,
|
||||||
|
CpuMpData->PmCodeSegment,
|
||||||
|
StackStart - ProcessorNumber * AP_SAFE_STACK_SIZE,
|
||||||
|
(UINTN)&mNumberToFinish,
|
||||||
|
CpuMpData->Pm16CodeSegment,
|
||||||
|
CpuMpData->SevEsAPBuffer,
|
||||||
|
CpuMpData->WakeupBuffer
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
//
|
//
|
||||||
// It should never reach here
|
// It should never reach here
|
||||||
//
|
//
|
||||||
|
@ -547,8 +561,8 @@ InitMpGlobalData (
|
||||||
);
|
);
|
||||||
ASSERT_EFI_ERROR (Status);
|
ASSERT_EFI_ERROR (Status);
|
||||||
|
|
||||||
mReservedApLoopFunc = (VOID *)(UINTN)Address;
|
mReservedApLoop.Data = (VOID *)(UINTN)Address;
|
||||||
ASSERT (mReservedApLoopFunc != NULL);
|
ASSERT (mReservedApLoop.Data != NULL);
|
||||||
|
|
||||||
//
|
//
|
||||||
// Make sure that the buffer memory is executable if NX protection is enabled
|
// Make sure that the buffer memory is executable if NX protection is enabled
|
||||||
|
@ -583,7 +597,7 @@ InitMpGlobalData (
|
||||||
mReservedTopOfApStack = (UINTN)Address + ApSafeBufferSize;
|
mReservedTopOfApStack = (UINTN)Address + ApSafeBufferSize;
|
||||||
ASSERT ((mReservedTopOfApStack & (UINTN)(CPU_STACK_ALIGNMENT - 1)) == 0);
|
ASSERT ((mReservedTopOfApStack & (UINTN)(CPU_STACK_ALIGNMENT - 1)) == 0);
|
||||||
CopyMem (
|
CopyMem (
|
||||||
mReservedApLoopFunc,
|
mReservedApLoop.Data,
|
||||||
CpuMpData->AddressMap.RelocateApLoopFuncAddress,
|
CpuMpData->AddressMap.RelocateApLoopFuncAddress,
|
||||||
CpuMpData->AddressMap.RelocateApLoopFuncSize
|
CpuMpData->AddressMap.RelocateApLoopFuncSize
|
||||||
);
|
);
|
||||||
|
|
|
@ -402,6 +402,12 @@ AsmExchangeRole (
|
||||||
IN CPU_EXCHANGE_ROLE_INFO *OthersInfo
|
IN CPU_EXCHANGE_ROLE_INFO *OthersInfo
|
||||||
);
|
);
|
||||||
|
|
||||||
|
typedef union {
|
||||||
|
VOID *Data;
|
||||||
|
ASM_RELOCATE_AP_LOOP AmdSevEntry; // 64-bit AMD Sev processors
|
||||||
|
ASM_RELOCATE_AP_LOOP GenericEntry; // Intel processors (32-bit or 64-bit), 32-bit AMD processors, or AMD non-Sev processors
|
||||||
|
} RELOCATE_AP_LOOP_ENTRY;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
Get the pointer to CPU MP Data structure.
|
Get the pointer to CPU MP Data structure.
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue