TianoCore EDK2 master
Loading...
Searching...
No Matches
IntelSmmCpuFeaturesLib.c
Go to the documentation of this file.
1
10#include "CpuFeaturesLib.h"
11
12#include <Library/MtrrLib.h>
15
16//
17// Machine Specific Registers (MSRs)
18//
19#define SMM_FEATURES_LIB_IA32_MTRR_CAP 0x0FE
20#define SMM_FEATURES_LIB_IA32_FEATURE_CONTROL 0x03A
21#define SMM_FEATURES_LIB_IA32_SMRR_PHYSBASE 0x1F2
22#define SMM_FEATURES_LIB_IA32_SMRR_PHYSMASK 0x1F3
23#define SMM_FEATURES_LIB_IA32_CORE_SMRR_PHYSBASE 0x0A0
24#define SMM_FEATURES_LIB_IA32_CORE_SMRR_PHYSMASK 0x0A1
25#define EFI_MSR_SMRR_MASK 0xFFFFF000
26#define EFI_MSR_SMRR_PHYS_MASK_VALID BIT11
27#define SMM_FEATURES_LIB_SMM_FEATURE_CONTROL 0x4E0
28
29//
30// MSRs required for configuration of SMM Code Access Check
31//
32#define SMM_FEATURES_LIB_IA32_MCA_CAP 0x17D
33#define SMM_CODE_ACCESS_CHK_BIT BIT58
34
35//
36// Set default value to assume IA-32 Architectural MSRs are used
37//
38UINT32 mSmrrPhysBaseMsr = SMM_FEATURES_LIB_IA32_SMRR_PHYSBASE;
39UINT32 mSmrrPhysMaskMsr = SMM_FEATURES_LIB_IA32_SMRR_PHYSMASK;
40
41//
42// Indicate SmBase for each Processors has been relocated or not. If TRUE,
43// means no need to do the relocation in SmmCpuFeaturesInitializeProcessor().
44//
45BOOLEAN mSmmCpuFeaturesSmmRelocated;
46
47//
48// Set default value to assume MTRRs need to be configured on each SMI
49//
50BOOLEAN mNeedConfigureMtrrs = TRUE;
51
52//
53// Array for state of SMRR enable on all CPUs
54//
55BOOLEAN *mSmrrEnabled;
56
64VOID
66 VOID
67 )
68{
69 UINT32 RegEax;
70 UINT32 RegEdx;
71 UINTN FamilyId;
72 UINTN ModelId;
73
74 //
75 // Retrieve CPU Family and Model
76 //
77 AsmCpuid (CPUID_VERSION_INFO, &RegEax, NULL, NULL, &RegEdx);
78 FamilyId = (RegEax >> 8) & 0xf;
79 ModelId = (RegEax >> 4) & 0xf;
80 if ((FamilyId == 0x06) || (FamilyId == 0x0f)) {
81 ModelId = ModelId | ((RegEax >> 12) & 0xf0);
82 }
83
84 //
85 // Check CPUID(CPUID_VERSION_INFO).EDX[12] for MTRR capability
86 //
87 if ((RegEdx & BIT12) != 0) {
88 //
89 // Check MTRR_CAP MSR bit 11 for SMRR support
90 //
91 if ((AsmReadMsr64 (SMM_FEATURES_LIB_IA32_MTRR_CAP) & BIT11) != 0) {
92 ASSERT (FeaturePcdGet (PcdSmrrEnable));
93 }
94 }
95
96 //
97 // Intel(R) 64 and IA-32 Architectures Software Developer's Manual
98 // Volume 3C, Section 35.3 MSRs in the Intel(R) Atom(TM) Processor Family
99 //
100 // If CPU Family/Model is 06_1CH, 06_26H, 06_27H, 06_35H or 06_36H, then
101 // SMRR Physical Base and SMM Physical Mask MSRs are not available.
102 //
103 if (FamilyId == 0x06) {
104 if ((ModelId == 0x1C) || (ModelId == 0x26) || (ModelId == 0x27) || (ModelId == 0x35) || (ModelId == 0x36)) {
105 ASSERT (!FeaturePcdGet (PcdSmrrEnable));
106 }
107 }
108
109 //
110 // Intel(R) 64 and IA-32 Architectures Software Developer's Manual
111 // Volume 3C, Section 35.2 MSRs in the Intel(R) Core(TM) 2 Processor Family
112 //
113 // If CPU Family/Model is 06_0F or 06_17, then use Intel(R) Core(TM) 2
114 // Processor Family MSRs
115 //
116 if (FamilyId == 0x06) {
117 if ((ModelId == 0x17) || (ModelId == 0x0f)) {
118 mSmrrPhysBaseMsr = SMM_FEATURES_LIB_IA32_CORE_SMRR_PHYSBASE;
119 mSmrrPhysMaskMsr = SMM_FEATURES_LIB_IA32_CORE_SMRR_PHYSMASK;
120 }
121 }
122
123 //
124 // Intel(R) 64 and IA-32 Architectures Software Developer's Manual
125 // Volume 3C, Section 34.4.2 SMRAM Caching
126 // An IA-32 processor does not automatically write back and invalidate its
127 // caches before entering SMM or before exiting SMM. Because of this behavior,
128 // care must be taken in the placement of the SMRAM in system memory and in
129 // the caching of the SMRAM to prevent cache incoherence when switching back
130 // and forth between SMM and protected mode operation.
131 //
132 // An IA-32 processor is a processor that does not support the Intel 64
133 // Architecture. Support for the Intel 64 Architecture can be detected from
134 // CPUID(CPUID_EXTENDED_CPU_SIG).EDX[29]
135 //
136 // If an IA-32 processor is detected, then set mNeedConfigureMtrrs to TRUE,
137 // so caches are flushed on SMI entry and SMI exit, the interrupted code
138 // MTRRs are saved/restored, and MTRRs for SMM are loaded.
139 //
140 AsmCpuid (CPUID_EXTENDED_FUNCTION, &RegEax, NULL, NULL, NULL);
141 if (RegEax >= CPUID_EXTENDED_CPU_SIG) {
143 if ((RegEdx & BIT29) != 0) {
144 mNeedConfigureMtrrs = FALSE;
145 }
146 }
147
148 //
149 // Allocate array for state of SMRR enable on all CPUs
150 //
151 mSmrrEnabled = (BOOLEAN *)AllocatePool (sizeof (BOOLEAN) * GetCpuMaxLogicalProcessorNumber ());
152 ASSERT (mSmrrEnabled != NULL);
153
154 //
155 // If gSmmBaseHobGuid found, means SmBase info has been relocated and recorded
156 // in the SmBase array.
157 //
158 mSmmCpuFeaturesSmmRelocated = (BOOLEAN)(GetFirstGuidHob (&gSmmBaseHobGuid) != NULL);
159}
160
185VOID
186EFIAPI
188 IN UINTN CpuIndex,
189 IN BOOLEAN IsMonarch,
190 IN EFI_PROCESSOR_INFORMATION *ProcessorInfo,
191 IN CPU_HOT_PLUG_DATA *CpuHotPlugData
192 )
193{
194 SMRAM_SAVE_STATE_MAP *CpuState;
195 UINT64 FeatureControl;
196 UINT32 RegEax;
197 UINT32 RegEdx;
198 UINTN FamilyId;
199 UINTN ModelId;
200
201 //
202 // No need to configure SMBASE if SmBase relocation has been done.
203 //
204 if (!mSmmCpuFeaturesSmmRelocated) {
205 //
206 // Configure SMBASE.
207 //
209 CpuState->x86.SMBASE = (UINT32)CpuHotPlugData->SmBase[CpuIndex];
210 }
211
212 //
213 // Intel(R) 64 and IA-32 Architectures Software Developer's Manual
214 // Volume 3C, Section 35.2 MSRs in the Intel(R) Core(TM) 2 Processor Family
215 //
216 // If Intel(R) Core(TM) Core(TM) 2 Processor Family MSRs are being used, then
217 // make sure SMRR Enable(BIT3) of MSR_FEATURE_CONTROL MSR(0x3A) is set before
218 // accessing SMRR base/mask MSRs. If Lock(BIT0) of MSR_FEATURE_CONTROL MSR(0x3A)
219 // is set, then the MSR is locked and can not be modified.
220 //
221 if ((FeaturePcdGet (PcdSmrrEnable)) && (mSmrrPhysBaseMsr == SMM_FEATURES_LIB_IA32_CORE_SMRR_PHYSBASE)) {
222 FeatureControl = AsmReadMsr64 (SMM_FEATURES_LIB_IA32_FEATURE_CONTROL);
223 if ((FeatureControl & BIT3) == 0) {
224 ASSERT ((FeatureControl & BIT0) == 0);
225 if ((FeatureControl & BIT0) == 0) {
226 AsmWriteMsr64 (SMM_FEATURES_LIB_IA32_FEATURE_CONTROL, FeatureControl | BIT3);
227 }
228 }
229 }
230
231 //
232 // If SMRR is supported, then program SMRR base/mask MSRs.
233 // The EFI_MSR_SMRR_PHYS_MASK_VALID bit is not set until the first normal SMI.
234 // The code that initializes SMM environment is running in normal mode
235 // from SMRAM region. If SMRR is enabled here, then the SMRAM region
236 // is protected and the normal mode code execution will fail.
237 //
238 if (FeaturePcdGet (PcdSmrrEnable)) {
239 //
240 // SMRR size cannot be less than 4-KBytes
241 // SMRR size must be of length 2^n
242 // SMRR base alignment cannot be less than SMRR length
243 //
244 if ((CpuHotPlugData->SmrrSize < SIZE_4KB) ||
245 (CpuHotPlugData->SmrrSize != GetPowerOfTwo32 (CpuHotPlugData->SmrrSize)) ||
246 ((CpuHotPlugData->SmrrBase & ~(CpuHotPlugData->SmrrSize - 1)) != CpuHotPlugData->SmrrBase))
247 {
248 //
249 // Print message and halt if CPU is Monarch
250 //
251 if (IsMonarch) {
252 DEBUG ((DEBUG_ERROR, "SMM Base/Size does not meet alignment/size requirement!\n"));
253 CpuDeadLoop ();
254 }
255 } else {
256 AsmWriteMsr64 (mSmrrPhysBaseMsr, CpuHotPlugData->SmrrBase | MTRR_CACHE_WRITE_BACK);
257 AsmWriteMsr64 (mSmrrPhysMaskMsr, (~(CpuHotPlugData->SmrrSize - 1) & EFI_MSR_SMRR_MASK));
258 mSmrrEnabled[CpuIndex] = FALSE;
259 }
260 }
261
262 //
263 // Retrieve CPU Family and Model
264 //
265 AsmCpuid (CPUID_VERSION_INFO, &RegEax, NULL, NULL, &RegEdx);
266 FamilyId = (RegEax >> 8) & 0xf;
267 ModelId = (RegEax >> 4) & 0xf;
268 if ((FamilyId == 0x06) || (FamilyId == 0x0f)) {
269 ModelId = ModelId | ((RegEax >> 12) & 0xf0);
270 }
271
272 //
273 // Intel(R) 64 and IA-32 Architectures Software Developer's Manual
274 // Volume 3C, Section 35.10.1 MSRs in 4th Generation Intel(R) Core(TM)
275 // Processor Family.
276 //
277 // If CPU Family/Model is 06_3C, 06_45, or 06_46 then use 4th Generation
278 // Intel(R) Core(TM) Processor Family MSRs.
279 //
280 if (FamilyId == 0x06) {
281 if ((ModelId == 0x3C) || (ModelId == 0x45) || (ModelId == 0x46) ||
282 (ModelId == 0x3D) || (ModelId == 0x47) || (ModelId == 0x4E) || (ModelId == 0x4F) ||
283 (ModelId == 0x3F) || (ModelId == 0x56) || (ModelId == 0x57) || (ModelId == 0x5C) ||
284 (ModelId == 0x8C))
285 {
286 //
287 // Check to see if the CPU supports the SMM Code Access Check feature
288 // Do not access this MSR unless the CPU supports the SmmRegFeatureControl
289 //
290 if ((AsmReadMsr64 (SMM_FEATURES_LIB_IA32_MCA_CAP) & SMM_CODE_ACCESS_CHK_BIT) != 0) {
291 ASSERT (FeaturePcdGet (PcdSmmFeatureControlEnable));
292 }
293 }
294 }
295
296 //
297 // Call internal worker function that completes the CPU initialization
298 //
300}
301
310BOOLEAN
311EFIAPI
313 VOID
314 )
315{
316 return mNeedConfigureMtrrs;
317}
318
323VOID
324EFIAPI
326 VOID
327 )
328{
329 if (FeaturePcdGet (PcdSmrrEnable) && mNeedConfigureMtrrs) {
330 AsmWriteMsr64 (mSmrrPhysMaskMsr, AsmReadMsr64 (mSmrrPhysMaskMsr) & ~EFI_MSR_SMRR_PHYS_MASK_VALID);
331 }
332}
333
338VOID
339EFIAPI
341 VOID
342 )
343{
344 if (FeaturePcdGet (PcdSmrrEnable) && mNeedConfigureMtrrs) {
345 AsmWriteMsr64 (mSmrrPhysMaskMsr, AsmReadMsr64 (mSmrrPhysMaskMsr) | EFI_MSR_SMRR_PHYS_MASK_VALID);
346 }
347}
348
356VOID
357EFIAPI
359 IN UINTN CpuIndex
360 )
361{
362 //
363 // If SMRR is supported and this is the first normal SMI, then enable SMRR
364 //
365 if (FeaturePcdGet (PcdSmrrEnable) && !mSmrrEnabled[CpuIndex]) {
366 AsmWriteMsr64 (mSmrrPhysMaskMsr, AsmReadMsr64 (mSmrrPhysMaskMsr) | EFI_MSR_SMRR_PHYS_MASK_VALID);
367 mSmrrEnabled[CpuIndex] = TRUE;
368 }
369}
370
383UINT64
384EFIAPI
386 IN UINTN CpuIndex,
387 IN SMM_REG_NAME RegName
388 )
389{
390 if (FeaturePcdGet (PcdSmmFeatureControlEnable) && (RegName == SmmRegFeatureControl)) {
391 return AsmReadMsr64 (SMM_FEATURES_LIB_SMM_FEATURE_CONTROL);
392 }
393
394 return 0;
395}
396
408VOID
409EFIAPI
411 IN UINTN CpuIndex,
412 IN SMM_REG_NAME RegName,
413 IN UINT64 Value
414 )
415{
416 if (FeaturePcdGet (PcdSmmFeatureControlEnable) && (RegName == SmmRegFeatureControl)) {
417 AsmWriteMsr64 (SMM_FEATURES_LIB_SMM_FEATURE_CONTROL, Value);
418 }
419}
420
452UINT64
453EFIAPI
455 IN UINTN CpuIndex,
456 IN SMRAM_SAVE_STATE_MAP *CpuState,
457 IN UINT64 NewInstructionPointer32,
458 IN UINT64 NewInstructionPointer
459 )
460{
461 return 0;
462}
463
477BOOLEAN
478EFIAPI
480 IN UINTN CpuIndex,
481 IN SMM_REG_NAME RegName
482 )
483{
484 if (FeaturePcdGet (PcdSmmFeatureControlEnable) && (RegName == SmmRegFeatureControl)) {
485 return TRUE;
486 }
487
488 return FALSE;
489}
UINT64 UINTN
VOID *EFIAPI GetFirstGuidHob(IN CONST EFI_GUID *Guid)
Definition: HobLib.c:215
UINT32 EFIAPI GetPowerOfTwo32(IN UINT32 Operand)
VOID EFIAPI CpuDeadLoop(VOID)
Definition: CpuDeadLoop.c:25
UINT32 GetCpuMaxLogicalProcessorNumber(VOID)
VOID FinishSmmCpuFeaturesInitializeProcessor(VOID)
UINT64 EFIAPI AsmReadMsr64(IN UINT32 Index)
Definition: GccInlinePriv.c:60
UINT64 EFIAPI AsmWriteMsr64(IN UINT32 Index, IN UINT64 Value)
UINT64 EFIAPI SmmCpuFeaturesGetSmmRegister(IN UINTN CpuIndex, IN SMM_REG_NAME RegName)
UINT64 EFIAPI SmmCpuFeaturesHookReturnFromSmm(IN UINTN CpuIndex, IN SMRAM_SAVE_STATE_MAP *CpuState, IN UINT64 NewInstructionPointer32, IN UINT64 NewInstructionPointer)
VOID EFIAPI SmmCpuFeaturesInitializeProcessor(IN UINTN CpuIndex, IN BOOLEAN IsMonarch, IN EFI_PROCESSOR_INFORMATION *ProcessorInfo, IN CPU_HOT_PLUG_DATA *CpuHotPlugData)
VOID EFIAPI SmmCpuFeaturesRendezvousEntry(IN UINTN CpuIndex)
VOID EFIAPI SmmCpuFeaturesReenableSmrr(VOID)
BOOLEAN EFIAPI SmmCpuFeaturesIsSmmRegisterSupported(IN UINTN CpuIndex, IN SMM_REG_NAME RegName)
VOID EFIAPI SmmCpuFeaturesSetSmmRegister(IN UINTN CpuIndex, IN SMM_REG_NAME RegName, IN UINT64 Value)
BOOLEAN EFIAPI SmmCpuFeaturesNeedConfigureMtrrs(VOID)
VOID CpuFeaturesLibInitialization(VOID)
VOID EFIAPI SmmCpuFeaturesDisableSmrr(VOID)
#define NULL
Definition: Base.h:319
#define TRUE
Definition: Base.h:301
#define FALSE
Definition: Base.h:307
#define IN
Definition: Base.h:279
#define DEBUG(Expression)
Definition: DebugLib.h:434
#define SMM_DEFAULT_SMBASE
#define CPUID_VERSION_INFO
Definition: Cpuid.h:81
#define CPUID_EXTENDED_CPU_SIG
Definition: Cpuid.h:3768
#define SMRAM_SAVE_STATE_MAP_OFFSET
UINT32 EFIAPI AsmCpuid(IN UINT32 Index, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
Definition: CpuId.c:36
#define FeaturePcdGet(TokenName)
Definition: PcdLib.h:50
VOID *EFIAPI AllocatePool(IN UINTN AllocationSize)
SMM_REG_NAME
@ SmmRegFeatureControl