TianoCore EDK2 master
Loading...
Searching...
No Matches
MtrrLib.c
Go to the documentation of this file.
1
13#include <Uefi.h>
15#include <Register/Intel/Msr.h>
16
17#include <Library/MtrrLib.h>
18#include <Library/BaseLib.h>
19#include <Library/CpuLib.h>
21#include <Library/DebugLib.h>
22
23#define OR_SEED 0x0101010101010101ull
24#define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull
25#define MAX_WEIGHT MAX_UINT8
26#define SCRATCH_BUFFER_SIZE (4 * SIZE_4KB)
27#define MTRR_LIB_ASSERT_ALIGNED(B, L) ASSERT ((B & ~(L - 1)) == B);
28
29#define M(x, y) ((x) * VertexCount + (y))
30#define O(x, y) ((y) * VertexCount + (x))
31
32//
33// Context to save and restore when MTRRs are programmed
34//
35typedef struct {
36 UINTN Cr4;
37 BOOLEAN InterruptState;
40
41typedef struct {
42 UINT64 Address;
43 UINT64 Alignment;
44 UINT64 Length;
45 MTRR_MEMORY_CACHE_TYPE Type : 7;
46
47 //
48 // Temprary use for calculating the best MTRR settings.
49 //
50 BOOLEAN Visited : 1;
51 UINT8 Weight;
52 UINT16 Previous;
54
55//
56// This table defines the offset, base and length of the fixed MTRRs
57//
58CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {
59 {
61 0,
62 SIZE_64KB
63 },
64 {
66 0x80000,
67 SIZE_16KB
68 },
69 {
71 0xA0000,
72 SIZE_16KB
73 },
74 {
76 0xC0000,
77 SIZE_4KB
78 },
79 {
81 0xC8000,
82 SIZE_4KB
83 },
84 {
86 0xD0000,
87 SIZE_4KB
88 },
89 {
91 0xD8000,
92 SIZE_4KB
93 },
94 {
96 0xE0000,
97 SIZE_4KB
98 },
99 {
101 0xE8000,
102 SIZE_4KB
103 },
104 {
106 0xF0000,
107 SIZE_4KB
108 },
109 {
111 0xF8000,
112 SIZE_4KB
113 }
114};
115
116//
117// Lookup table used to print MTRRs
118//
119GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {
120 "UC", // CacheUncacheable
121 "WC", // CacheWriteCombining
122 "R*", // Invalid
123 "R*", // Invalid
124 "WT", // CacheWriteThrough
125 "WP", // CacheWriteProtected
126 "WB", // CacheWriteBack
127 "R*" // Invalid
128};
129
139VOID
141 IN MTRR_SETTINGS *MtrrSetting
142 );
143
155BOOLEAN
157 OUT BOOLEAN *FixedMtrrSupported OPTIONAL,
158 OUT UINT32 *VariableMtrrCount OPTIONAL
159 )
160{
163
164 //
165 // MTRR is not supported in TD-Guest.
166 //
167 if (TdIsEnabled ()) {
168 return FALSE;
169 }
170
171 //
172 // Check CPUID(1).EDX[12] for MTRR capability
173 //
174 AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &Edx.Uint32);
175 if (Edx.Bits.MTRR == 0) {
176 if (FixedMtrrSupported != NULL) {
177 *FixedMtrrSupported = FALSE;
178 }
179
180 if (VariableMtrrCount != NULL) {
181 *VariableMtrrCount = 0;
182 }
183
184 return FALSE;
185 }
186
187 //
188 // Check the number of variable MTRRs and determine whether fixed MTRRs exist.
189 // If the count of variable MTRRs is zero and there are no fixed MTRRs,
190 // then return false
191 //
193 ASSERT (MtrrCap.Bits.VCNT <= ARRAY_SIZE (((MTRR_VARIABLE_SETTINGS *)0)->Mtrr));
194 if (FixedMtrrSupported != NULL) {
195 *FixedMtrrSupported = (BOOLEAN)(MtrrCap.Bits.FIX == 1);
196 }
197
198 if (VariableMtrrCount != NULL) {
199 *VariableMtrrCount = MtrrCap.Bits.VCNT;
200 }
201
202 if ((MtrrCap.Bits.VCNT == 0) && (MtrrCap.Bits.FIX == 0)) {
203 return FALSE;
204 }
205
206 return TRUE;
207}
208
215UINT32
217 VOID
218 )
219{
221
223 ASSERT (MtrrCap.Bits.VCNT <= ARRAY_SIZE (((MTRR_VARIABLE_SETTINGS *)0)->Mtrr));
224 return MtrrCap.Bits.VCNT;
225}
226
233UINT32
234EFIAPI
236 VOID
237 )
238{
239 if (!IsMtrrSupported ()) {
240 return 0;
241 }
242
244}
245
252UINT32
254 VOID
255 )
256{
257 UINT32 VariableMtrrCount;
258 UINT32 ReservedMtrrNumber;
259
260 VariableMtrrCount = GetVariableMtrrCountWorker ();
261 ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
262 if (VariableMtrrCount < ReservedMtrrNumber) {
263 return 0;
264 }
265
266 return VariableMtrrCount - ReservedMtrrNumber;
267}
268
275UINT32
276EFIAPI
278 VOID
279 )
280{
281 if (!IsMtrrSupported ()) {
282 return 0;
283 }
284
286}
287
300MTRR_MEMORY_CACHE_TYPE
302 IN CONST MTRR_SETTINGS *MtrrSetting
303 )
304{
306
307 if (MtrrSetting == NULL) {
309 } else {
310 DefType.Uint64 = MtrrSetting->MtrrDefType;
311 }
312
313 return (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;
314}
315
322MTRR_MEMORY_CACHE_TYPE
323EFIAPI
325 VOID
326 )
327{
328 if (!IsMtrrSupported ()) {
329 return CacheUncacheable;
330 }
331
333}
334
344VOID
346 OUT MTRR_CONTEXT *MtrrContext
347 )
348{
350
351 //
352 // Disable interrupts and save current interrupt state
353 //
354 MtrrContext->InterruptState = SaveAndDisableInterrupts ();
355
356 //
357 // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)
358 //
360
361 //
362 // Save original CR4 value and clear PGE flag (Bit 7)
363 //
364 MtrrContext->Cr4 = AsmReadCr4 ();
365 AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));
366
367 //
368 // Flush all TLBs
369 //
370 CpuFlushTlb ();
371
372 //
373 // Save current MTRR default type and disable MTRRs
374 //
375 MtrrContext->DefType.Uint64 = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);
376 DefType.Uint64 = MtrrContext->DefType.Uint64;
377 DefType.Bits.E = 0;
379}
380
390VOID
392 IN MTRR_CONTEXT *MtrrContext
393 )
394{
395 //
396 // Flush all TLBs
397 //
398 CpuFlushTlb ();
399
400 //
401 // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)
402 //
404
405 //
406 // Restore original CR4 value
407 //
408 AsmWriteCr4 (MtrrContext->Cr4);
409
410 //
411 // Restore original interrupt state
412 //
413 SetInterruptState (MtrrContext->InterruptState);
414}
415
425VOID
427 IN MTRR_CONTEXT *MtrrContext
428 )
429{
430 //
431 // Enable Cache MTRR
432 // Note: It's possible that MTRR was not enabled earlier.
433 // But it will be enabled here unconditionally.
434 //
435 MtrrContext->DefType.Bits.E = 1;
436 AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, MtrrContext->DefType.Uint64);
437
439}
440
451 OUT MTRR_FIXED_SETTINGS *FixedSettings
452 )
453{
454 UINT32 Index;
455
456 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
457 FixedSettings->Mtrr[Index] =
458 AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
459 }
460
461 return FixedSettings;
462}
463
473EFIAPI
475 OUT MTRR_FIXED_SETTINGS *FixedSettings
476 )
477{
478 BOOLEAN FixedMtrrSupported;
479
480 MtrrLibIsMtrrSupported (&FixedMtrrSupported, NULL);
481
482 if (!FixedMtrrSupported) {
483 return FixedSettings;
484 }
485
486 return MtrrGetFixedMtrrWorker (FixedSettings);
487}
488
505 IN MTRR_SETTINGS *MtrrSetting,
506 IN UINT32 VariableMtrrCount,
507 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
508 )
509{
510 UINT32 Index;
511
512 ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));
513
514 for (Index = 0; Index < VariableMtrrCount; Index++) {
515 if (MtrrSetting == NULL) {
516 VariableSettings->Mtrr[Index].Base =
517 AsmReadMsr64 (MSR_IA32_MTRR_PHYSBASE0 + (Index << 1));
518 VariableSettings->Mtrr[Index].Mask =
519 AsmReadMsr64 (MSR_IA32_MTRR_PHYSMASK0 + (Index << 1));
520 } else {
521 VariableSettings->Mtrr[Index].Base = MtrrSetting->Variables.Mtrr[Index].Base;
522 VariableSettings->Mtrr[Index].Mask = MtrrSetting->Variables.Mtrr[Index].Mask;
523 }
524 }
525
526 return VariableSettings;
527}
528
545RETURN_STATUS
547 IN MTRR_MEMORY_CACHE_TYPE Type,
548 IN OUT UINT64 *Base,
549 IN OUT UINT64 *Length,
550 IN OUT UINT32 *LastMsrIndex,
551 OUT UINT64 *ClearMask,
552 OUT UINT64 *OrMask
553 )
554{
555 UINT32 MsrIndex;
556 UINT32 LeftByteShift;
557 UINT32 RightByteShift;
558 UINT64 SubLength;
559
560 //
561 // Find the fixed MTRR index to be programmed
562 //
563 for (MsrIndex = *LastMsrIndex + 1; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
564 if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&
565 (*Base <
566 (
567 mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +
568 (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)
569 )
570 )
571 )
572 {
573 break;
574 }
575 }
576
577 ASSERT (MsrIndex != ARRAY_SIZE (mMtrrLibFixedMtrrTable));
578
579 //
580 // Find the begin offset in fixed MTRR and calculate byte offset of left shift
581 //
582 if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
583 //
584 // Base address should be aligned to the begin of a certain Fixed MTRR range.
585 //
586 return RETURN_UNSUPPORTED;
587 }
588
589 LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
590 ASSERT (LeftByteShift < 8);
591
592 //
593 // Find the end offset in fixed MTRR and calculate byte offset of right shift
594 //
595 SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);
596 if (*Length >= SubLength) {
597 RightByteShift = 0;
598 } else {
599 if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
600 //
601 // Length should be aligned to the end of a certain Fixed MTRR range.
602 //
603 return RETURN_UNSUPPORTED;
604 }
605
606 RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
607 //
608 // Update SubLength by actual length
609 //
610 SubLength = *Length;
611 }
612
613 *ClearMask = CLEAR_SEED;
614 *OrMask = MultU64x32 (OR_SEED, (UINT32)Type);
615
616 if (LeftByteShift != 0) {
617 //
618 // Clear the low bits by LeftByteShift
619 //
620 *ClearMask &= LShiftU64 (*ClearMask, LeftByteShift * 8);
621 *OrMask &= LShiftU64 (*OrMask, LeftByteShift * 8);
622 }
623
624 if (RightByteShift != 0) {
625 //
626 // Clear the high bits by RightByteShift
627 //
628 *ClearMask &= RShiftU64 (*ClearMask, RightByteShift * 8);
629 *OrMask &= RShiftU64 (*OrMask, RightByteShift * 8);
630 }
631
632 *Length -= SubLength;
633 *Base += SubLength;
634
635 *LastMsrIndex = MsrIndex;
636
637 return RETURN_SUCCESS;
638}
639
655UINT32
657 IN MTRR_VARIABLE_SETTINGS *VariableSettings,
658 IN UINTN VariableMtrrCount,
659 IN UINT64 MtrrValidBitsMask,
660 IN UINT64 MtrrValidAddressMask,
661 OUT VARIABLE_MTRR *VariableMtrr
662 )
663{
664 UINTN Index;
665 UINT32 UsedMtrr;
666
667 ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * ARRAY_SIZE (VariableSettings->Mtrr));
668 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
669 if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {
670 VariableMtrr[Index].Msr = (UINT32)Index;
671 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
672 VariableMtrr[Index].Length =
673 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
674 VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);
675 VariableMtrr[Index].Valid = TRUE;
676 VariableMtrr[Index].Used = TRUE;
677 UsedMtrr++;
678 }
679 }
680
681 return UsedMtrr;
682}
683
698UINT32
700 IN CONST MTRR_VARIABLE_SETTINGS *VariableSettings,
701 IN UINTN VariableMtrrCount,
702 IN UINT64 MtrrValidBitsMask,
703 IN UINT64 MtrrValidAddressMask,
704 OUT MTRR_MEMORY_RANGE *VariableMtrr
705 )
706{
707 UINTN Index;
708 UINT32 UsedMtrr;
709
710 ZeroMem (VariableMtrr, sizeof (MTRR_MEMORY_RANGE) * ARRAY_SIZE (VariableSettings->Mtrr));
711 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
712 if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {
713 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
714 VariableMtrr[Index].Length =
715 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
716 VariableMtrr[Index].Type = (MTRR_MEMORY_CACHE_TYPE)(VariableSettings->Mtrr[Index].Base & 0x0ff);
717 UsedMtrr++;
718 }
719 }
720
721 return UsedMtrr;
722}
723
738UINT32
739EFIAPI
741 IN UINT64 MtrrValidBitsMask,
742 IN UINT64 MtrrValidAddressMask,
743 OUT VARIABLE_MTRR *VariableMtrr
744 )
745{
746 MTRR_VARIABLE_SETTINGS VariableSettings;
747
748 if (!IsMtrrSupported ()) {
749 return 0;
750 }
751
753 NULL,
755 &VariableSettings
756 );
757
759 &VariableSettings,
761 MtrrValidBitsMask,
762 MtrrValidAddressMask,
763 VariableMtrr
764 );
765}
766
776UINT64
778 UINT64 Address,
779 UINT64 Alignment0
780 )
781{
782 if (Address == 0) {
783 return Alignment0;
784 }
785
786 return Address & ((~Address) + 1);
787}
788
804BOOLEAN
806 IN MTRR_MEMORY_CACHE_TYPE Left,
807 IN MTRR_MEMORY_CACHE_TYPE Right
808 )
809{
810 return (BOOLEAN)(Left == CacheUncacheable || (Left == CacheWriteThrough && Right == CacheWriteBack));
811}
812
822VOID
824 OUT UINT64 *MtrrValidBitsMask,
825 OUT UINT64 *MtrrValidAddressMask
826 )
827{
828 UINT32 MaxExtendedFunction;
829 CPUID_VIR_PHY_ADDRESS_SIZE_EAX VirPhyAddressSize;
830 UINT32 MaxFunction;
831 CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS_ECX ExtendedFeatureFlagsEcx;
833
834 AsmCpuid (CPUID_EXTENDED_FUNCTION, &MaxExtendedFunction, NULL, NULL, NULL);
835
836 if (MaxExtendedFunction >= CPUID_VIR_PHY_ADDRESS_SIZE) {
837 AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &VirPhyAddressSize.Uint32, NULL, NULL, NULL);
838 } else {
839 VirPhyAddressSize.Bits.PhysicalAddressBits = 36;
840 }
841
842 //
843 // CPUID enumeration of MAX_PA is unaffected by TME-MK activation and will continue
844 // to report the maximum physical address bits available for software to use,
845 // irrespective of the number of KeyID bits.
846 // So, we need to check if TME is enabled and adjust the PA size accordingly.
847 //
848 AsmCpuid (CPUID_SIGNATURE, &MaxFunction, NULL, NULL, NULL);
849 if (MaxFunction >= CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS) {
850 AsmCpuidEx (CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS, 0, NULL, NULL, &ExtendedFeatureFlagsEcx.Uint32, NULL);
851 if (ExtendedFeatureFlagsEcx.Bits.TME_EN == 1) {
853 if (TmeActivate.Bits.TmeEnable == 1) {
854 VirPhyAddressSize.Bits.PhysicalAddressBits -= TmeActivate.Bits.MkTmeKeyidBits;
855 }
856 }
857 }
858
859 *MtrrValidBitsMask = LShiftU64 (1, VirPhyAddressSize.Bits.PhysicalAddressBits) - 1;
860 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
861}
862
875MTRR_MEMORY_CACHE_TYPE
877 IN MTRR_MEMORY_CACHE_TYPE MtrrType1,
878 IN MTRR_MEMORY_CACHE_TYPE MtrrType2
879 )
880{
881 if (MtrrType1 == MtrrType2) {
882 return MtrrType1;
883 }
884
885 ASSERT (
886 MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2) ||
887 MtrrLibTypeLeftPrecedeRight (MtrrType2, MtrrType1)
888 );
889
890 if (MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2)) {
891 return MtrrType1;
892 } else {
893 return MtrrType2;
894 }
895}
896
910MTRR_MEMORY_CACHE_TYPE
912 IN MTRR_SETTINGS *MtrrSetting,
913 IN PHYSICAL_ADDRESS Address
914 )
915{
917 UINT64 FixedMtrr;
918 UINTN Index;
919 UINTN SubIndex;
920 MTRR_MEMORY_CACHE_TYPE MtrrType;
921 MTRR_MEMORY_RANGE VariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
922 UINT64 MtrrValidBitsMask;
923 UINT64 MtrrValidAddressMask;
924 UINT32 VariableMtrrCount;
925 MTRR_VARIABLE_SETTINGS VariableSettings;
926
927 //
928 // Check if MTRR is enabled, if not, return UC as attribute
929 //
930 if (MtrrSetting == NULL) {
932 } else {
933 DefType.Uint64 = MtrrSetting->MtrrDefType;
934 }
935
936 if (DefType.Bits.E == 0) {
937 return CacheUncacheable;
938 }
939
940 //
941 // If address is less than 1M, then try to go through the fixed MTRR
942 //
943 if (Address < BASE_1MB) {
944 if (DefType.Bits.FE != 0) {
945 //
946 // Go through the fixed MTRR
947 //
948 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
949 if ((Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress) &&
950 (Address < mMtrrLibFixedMtrrTable[Index].BaseAddress +
951 (mMtrrLibFixedMtrrTable[Index].Length * 8)))
952 {
953 SubIndex =
954 ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
955 mMtrrLibFixedMtrrTable[Index].Length;
956 if (MtrrSetting == NULL) {
957 FixedMtrr = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
958 } else {
959 FixedMtrr = MtrrSetting->Fixed.Mtrr[Index];
960 }
961
962 return (MTRR_MEMORY_CACHE_TYPE)(RShiftU64 (FixedMtrr, SubIndex * 8) & 0xFF);
963 }
964 }
965 }
966 }
967
968 VariableMtrrCount = GetVariableMtrrCountWorker ();
969 ASSERT (VariableMtrrCount <= ARRAY_SIZE (MtrrSetting->Variables.Mtrr));
970 MtrrGetVariableMtrrWorker (MtrrSetting, VariableMtrrCount, &VariableSettings);
971
972 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
974 &VariableSettings,
975 VariableMtrrCount,
976 MtrrValidBitsMask,
977 MtrrValidAddressMask,
978 VariableMtrr
979 );
980
981 //
982 // Go through the variable MTRR
983 //
984 MtrrType = CacheInvalid;
985 for (Index = 0; Index < VariableMtrrCount; Index++) {
986 if (VariableMtrr[Index].Length != 0) {
987 if ((Address >= VariableMtrr[Index].BaseAddress) &&
988 (Address < VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length))
989 {
990 if (MtrrType == CacheInvalid) {
991 MtrrType = (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type;
992 } else {
993 MtrrType = MtrrLibPrecedence (MtrrType, (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type);
994 }
995 }
996 }
997 }
998
999 //
1000 // If there is no MTRR which covers the Address, use the default MTRR type.
1001 //
1002 if (MtrrType == CacheInvalid) {
1003 MtrrType = (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;
1004 }
1005
1006 return MtrrType;
1007}
1008
1019MTRR_MEMORY_CACHE_TYPE
1020EFIAPI
1022 IN PHYSICAL_ADDRESS Address
1023 )
1024{
1025 if (!IsMtrrSupported ()) {
1026 return CacheUncacheable;
1027 }
1028
1030}
1031
1050RETURN_STATUS
1052 IN MTRR_MEMORY_RANGE *Ranges,
1053 IN UINTN Capacity,
1054 IN OUT UINTN *Count,
1055 IN UINT64 BaseAddress,
1056 IN UINT64 Length,
1057 IN MTRR_MEMORY_CACHE_TYPE Type
1058 )
1059{
1060 UINTN Index;
1061 UINT64 Limit;
1062 UINT64 LengthLeft;
1063 UINT64 LengthRight;
1064 UINTN StartIndex;
1065 UINTN EndIndex;
1066 UINTN DeltaCount;
1067
1068 ASSERT (Length != 0);
1069
1070 LengthRight = 0;
1071 LengthLeft = 0;
1072 Limit = BaseAddress + Length;
1073 StartIndex = *Count;
1074 EndIndex = *Count;
1075 for (Index = 0; Index < *Count; Index++) {
1076 if ((StartIndex == *Count) &&
1077 (Ranges[Index].BaseAddress <= BaseAddress) &&
1078 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length))
1079 {
1080 StartIndex = Index;
1081 LengthLeft = BaseAddress - Ranges[Index].BaseAddress;
1082 }
1083
1084 if ((EndIndex == *Count) &&
1085 (Ranges[Index].BaseAddress < Limit) &&
1086 (Limit <= Ranges[Index].BaseAddress + Ranges[Index].Length))
1087 {
1088 EndIndex = Index;
1089 LengthRight = Ranges[Index].BaseAddress + Ranges[Index].Length - Limit;
1090 break;
1091 }
1092 }
1093
1094 ASSERT (StartIndex != *Count && EndIndex != *Count);
1095 if ((StartIndex == EndIndex) && (Ranges[StartIndex].Type == Type)) {
1097 }
1098
1099 //
1100 // The type change may cause merging with previous range or next range.
1101 // Update the StartIndex, EndIndex, BaseAddress, Length so that following
1102 // logic doesn't need to consider merging.
1103 //
1104 if (StartIndex != 0) {
1105 if ((LengthLeft == 0) && (Ranges[StartIndex - 1].Type == Type)) {
1106 StartIndex--;
1107 Length += Ranges[StartIndex].Length;
1108 BaseAddress -= Ranges[StartIndex].Length;
1109 }
1110 }
1111
1112 if (EndIndex != (*Count) - 1) {
1113 if ((LengthRight == 0) && (Ranges[EndIndex + 1].Type == Type)) {
1114 EndIndex++;
1115 Length += Ranges[EndIndex].Length;
1116 }
1117 }
1118
1119 //
1120 // |- 0 -|- 1 -|- 2 -|- 3 -| StartIndex EndIndex DeltaCount Count (Count = 4)
1121 // |++++++++++++++++++| 0 3 1=3-0-2 3
1122 // |+++++++| 0 1 -1=1-0-2 5
1123 // |+| 0 0 -2=0-0-2 6
1124 // |+++| 0 0 -1=0-0-2+1 5
1125 //
1126 //
1127 DeltaCount = EndIndex - StartIndex - 2;
1128 if (LengthLeft == 0) {
1129 DeltaCount++;
1130 }
1131
1132 if (LengthRight == 0) {
1133 DeltaCount++;
1134 }
1135
1136 if (*Count - DeltaCount > Capacity) {
1138 }
1139
1140 //
1141 // Reserve (-DeltaCount) space
1142 //
1143 CopyMem (&Ranges[EndIndex + 1 - DeltaCount], &Ranges[EndIndex + 1], (*Count - EndIndex - 1) * sizeof (Ranges[0]));
1144 *Count -= DeltaCount;
1145
1146 if (LengthLeft != 0) {
1147 Ranges[StartIndex].Length = LengthLeft;
1148 StartIndex++;
1149 }
1150
1151 if (LengthRight != 0) {
1152 Ranges[EndIndex - DeltaCount].BaseAddress = BaseAddress + Length;
1153 Ranges[EndIndex - DeltaCount].Length = LengthRight;
1154 Ranges[EndIndex - DeltaCount].Type = Ranges[EndIndex].Type;
1155 }
1156
1157 Ranges[StartIndex].BaseAddress = BaseAddress;
1158 Ranges[StartIndex].Length = Length;
1159 Ranges[StartIndex].Type = Type;
1160 return RETURN_SUCCESS;
1161}
1162
1174UINT8
1176 IN CONST MTRR_MEMORY_RANGE *Ranges,
1177 IN UINTN RangeCount,
1178 IN UINT64 BaseAddress,
1179 IN UINT64 Length,
1180 IN OUT UINT8 *Types OPTIONAL
1181 )
1182{
1183 UINTN Index;
1184 UINT8 TypeCount;
1185 UINT8 LocalTypes;
1186
1187 TypeCount = 0;
1188 LocalTypes = 0;
1189 for (Index = 0; Index < RangeCount; Index++) {
1190 if ((Ranges[Index].BaseAddress <= BaseAddress) &&
1191 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length)
1192 )
1193 {
1194 if ((LocalTypes & (1 << Ranges[Index].Type)) == 0) {
1195 LocalTypes |= (UINT8)(1 << Ranges[Index].Type);
1196 TypeCount++;
1197 }
1198
1199 if (BaseAddress + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1200 Length -= Ranges[Index].BaseAddress + Ranges[Index].Length - BaseAddress;
1201 BaseAddress = Ranges[Index].BaseAddress + Ranges[Index].Length;
1202 } else {
1203 break;
1204 }
1205 }
1206 }
1207
1208 if (Types != NULL) {
1209 *Types = LocalTypes;
1210 }
1211
1212 return TypeCount;
1213}
1214
1227VOID
1229 IN UINT16 VertexCount,
1230 IN MTRR_LIB_ADDRESS *Vertices,
1231 IN OUT CONST UINT8 *Weight,
1232 IN UINT16 Start,
1233 IN UINT16 Stop,
1234 IN BOOLEAN IncludeOptional
1235 )
1236{
1237 UINT16 Index;
1238 UINT8 MinWeight;
1239 UINT16 MinI;
1240 UINT8 Mandatory;
1241 UINT8 Optional;
1242
1243 for (Index = Start; Index <= Stop; Index++) {
1244 Vertices[Index].Visited = FALSE;
1245 Mandatory = Weight[M (Start, Index)];
1246 Vertices[Index].Weight = Mandatory;
1247 if (Mandatory != MAX_WEIGHT) {
1248 Optional = IncludeOptional ? Weight[O (Start, Index)] : 0;
1249 Vertices[Index].Weight += Optional;
1250 ASSERT (Vertices[Index].Weight >= Optional);
1251 }
1252 }
1253
1254 MinI = Start;
1255 MinWeight = 0;
1256 while (!Vertices[Stop].Visited) {
1257 //
1258 // Update the weight from the shortest vertex to other unvisited vertices
1259 //
1260 for (Index = Start + 1; Index <= Stop; Index++) {
1261 if (!Vertices[Index].Visited) {
1262 Mandatory = Weight[M (MinI, Index)];
1263 if (Mandatory != MAX_WEIGHT) {
1264 Optional = IncludeOptional ? Weight[O (MinI, Index)] : 0;
1265 if (MinWeight + Mandatory + Optional <= Vertices[Index].Weight) {
1266 Vertices[Index].Weight = MinWeight + Mandatory + Optional;
1267 Vertices[Index].Previous = MinI; // Previous is Start based.
1268 }
1269 }
1270 }
1271 }
1272
1273 //
1274 // Find the shortest vertex from Start
1275 //
1276 MinI = VertexCount;
1277 MinWeight = MAX_WEIGHT;
1278 for (Index = Start + 1; Index <= Stop; Index++) {
1279 if (!Vertices[Index].Visited && (MinWeight > Vertices[Index].Weight)) {
1280 MinI = Index;
1281 MinWeight = Vertices[Index].Weight;
1282 }
1283 }
1284
1285 //
1286 // Mark the shortest vertex from Start as visited
1287 //
1288 Vertices[MinI].Visited = TRUE;
1289 }
1290}
1291
1305RETURN_STATUS
1307 IN OUT MTRR_MEMORY_RANGE *Mtrrs,
1308 IN UINT32 MtrrCapacity,
1309 IN OUT UINT32 *MtrrCount,
1310 IN UINT64 BaseAddress,
1311 IN UINT64 Length,
1312 IN MTRR_MEMORY_CACHE_TYPE Type
1313 )
1314{
1315 if (*MtrrCount == MtrrCapacity) {
1317 }
1318
1319 Mtrrs[*MtrrCount].BaseAddress = BaseAddress;
1320 Mtrrs[*MtrrCount].Length = Length;
1321 Mtrrs[*MtrrCount].Type = Type;
1322 (*MtrrCount)++;
1323 return RETURN_SUCCESS;
1324}
1325
1333MTRR_MEMORY_CACHE_TYPE
1335 IN UINT8 TypeBits
1336 )
1337{
1338 INT8 Type;
1339
1340 ASSERT (TypeBits != 0);
1341 for (Type = 7; (INT8)TypeBits > 0; Type--, TypeBits <<= 1) {
1342 }
1343
1344 return (MTRR_MEMORY_CACHE_TYPE)Type;
1345}
1346
1369RETURN_STATUS
1371 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1372 IN UINT64 A0,
1373 IN CONST MTRR_MEMORY_RANGE *Ranges,
1374 IN UINTN RangeCount,
1375 IN UINT16 VertexCount,
1376 IN MTRR_LIB_ADDRESS *Vertices,
1377 IN OUT UINT8 *Weight,
1378 IN UINT16 Start,
1379 IN UINT16 Stop,
1380 IN UINT8 Types,
1381 IN UINT8 TypeCount,
1382 IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL,
1383 IN UINT32 MtrrCapacity OPTIONAL,
1384 IN OUT UINT32 *MtrrCount OPTIONAL
1385 )
1386{
1387 RETURN_STATUS Status;
1388 UINT64 Base;
1389 UINT64 Length;
1390 UINT8 PrecedentTypes;
1391 UINTN Index;
1392 UINT64 HBase;
1393 UINT64 HLength;
1394 UINT64 SubLength;
1395 UINT16 SubStart;
1396 UINT16 SubStop;
1397 UINT16 Cur;
1398 UINT16 Pre;
1399 MTRR_MEMORY_CACHE_TYPE LowestType;
1400 MTRR_MEMORY_CACHE_TYPE LowestPrecedentType;
1401
1402 Base = Vertices[Start].Address;
1403 Length = Vertices[Stop].Address - Base;
1404
1405 LowestType = MtrrLibLowestType (Types);
1406
1407 //
1408 // Clear the lowest type (highest bit) to get the precedent types
1409 //
1410 PrecedentTypes = ~(1 << LowestType) & Types;
1411 LowestPrecedentType = MtrrLibLowestType (PrecedentTypes);
1412
1413 if (Mtrrs == NULL) {
1414 Weight[M (Start, Stop)] = ((LowestType == DefaultType) ? 0 : 1);
1415 Weight[O (Start, Stop)] = ((LowestType == DefaultType) ? 1 : 0);
1416 }
1417
1418 // Add all high level ranges
1419 HBase = MAX_UINT64;
1420 HLength = 0;
1421 for (Index = 0; Index < RangeCount; Index++) {
1422 if (Length == 0) {
1423 break;
1424 }
1425
1426 if ((Base < Ranges[Index].BaseAddress) || (Ranges[Index].BaseAddress + Ranges[Index].Length <= Base)) {
1427 continue;
1428 }
1429
1430 //
1431 // Base is in the Range[Index]
1432 //
1433 if (Base + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1434 SubLength = Ranges[Index].BaseAddress + Ranges[Index].Length - Base;
1435 } else {
1436 SubLength = Length;
1437 }
1438
1439 if (((1 << Ranges[Index].Type) & PrecedentTypes) != 0) {
1440 //
1441 // Meet a range whose types take precedence.
1442 // Update the [HBase, HBase + HLength) to include the range,
1443 // [HBase, HBase + HLength) may contain sub ranges with 2 different types, and both take precedence.
1444 //
1445 if (HBase == MAX_UINT64) {
1446 HBase = Base;
1447 }
1448
1449 HLength += SubLength;
1450 }
1451
1452 Base += SubLength;
1453 Length -= SubLength;
1454
1455 if (HLength == 0) {
1456 continue;
1457 }
1458
1459 if ((Ranges[Index].Type == LowestType) || (Length == 0)) {
1460 // meet low type or end
1461
1462 //
1463 // Add the MTRRs for each high priority type range
1464 // the range[HBase, HBase + HLength) contains only two types.
1465 // We might use positive or subtractive, depending on which way uses less MTRR
1466 //
1467 for (SubStart = Start; SubStart <= Stop; SubStart++) {
1468 if (Vertices[SubStart].Address == HBase) {
1469 break;
1470 }
1471 }
1472
1473 for (SubStop = SubStart; SubStop <= Stop; SubStop++) {
1474 if (Vertices[SubStop].Address == HBase + HLength) {
1475 break;
1476 }
1477 }
1478
1479 ASSERT (Vertices[SubStart].Address == HBase);
1480 ASSERT (Vertices[SubStop].Address == HBase + HLength);
1481
1482 if ((TypeCount == 2) || (SubStart == SubStop - 1)) {
1483 //
1484 // add subtractive MTRRs for [HBase, HBase + HLength)
1485 // [HBase, HBase + HLength) contains only one type.
1486 // while - loop is to split the range to MTRR - compliant aligned range.
1487 //
1488 if (Mtrrs == NULL) {
1489 Weight[M (Start, Stop)] += (UINT8)(SubStop - SubStart);
1490 } else {
1491 while (SubStart != SubStop) {
1492 Status = MtrrLibAppendVariableMtrr (
1493 Mtrrs,
1494 MtrrCapacity,
1495 MtrrCount,
1496 Vertices[SubStart].Address,
1497 Vertices[SubStart].Length,
1498 Vertices[SubStart].Type
1499 );
1500 if (RETURN_ERROR (Status)) {
1501 return Status;
1502 }
1503
1504 SubStart++;
1505 }
1506 }
1507 } else {
1508 ASSERT (TypeCount == 3);
1509 MtrrLibCalculateLeastMtrrs (VertexCount, Vertices, Weight, SubStart, SubStop, TRUE);
1510
1511 if (Mtrrs == NULL) {
1512 Weight[M (Start, Stop)] += Vertices[SubStop].Weight;
1513 } else {
1514 // When we need to collect the optimal path from SubStart to SubStop
1515 while (SubStop != SubStart) {
1516 Cur = SubStop;
1517 Pre = Vertices[Cur].Previous;
1518 SubStop = Pre;
1519
1520 if (Weight[M (Pre, Cur)] + Weight[O (Pre, Cur)] != 0) {
1521 Status = MtrrLibAppendVariableMtrr (
1522 Mtrrs,
1523 MtrrCapacity,
1524 MtrrCount,
1525 Vertices[Pre].Address,
1526 Vertices[Cur].Address - Vertices[Pre].Address,
1527 (Pre != Cur - 1) ? LowestPrecedentType : Vertices[Pre].Type
1528 );
1529 if (RETURN_ERROR (Status)) {
1530 return Status;
1531 }
1532 }
1533
1534 if (Pre != Cur - 1) {
1536 DefaultType,
1537 A0,
1538 Ranges,
1539 RangeCount,
1540 VertexCount,
1541 Vertices,
1542 Weight,
1543 Pre,
1544 Cur,
1545 PrecedentTypes,
1546 2,
1547 Mtrrs,
1548 MtrrCapacity,
1549 MtrrCount
1550 );
1551 if (RETURN_ERROR (Status)) {
1552 return Status;
1553 }
1554 }
1555 }
1556 }
1557 }
1558
1559 //
1560 // Reset HBase, HLength
1561 //
1562 HBase = MAX_UINT64;
1563 HLength = 0;
1564 }
1565 }
1566
1567 return RETURN_SUCCESS;
1568}
1569
1591RETURN_STATUS
1593 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1594 IN UINT64 A0,
1595 IN CONST MTRR_MEMORY_RANGE *Ranges,
1596 IN UINTN RangeCount,
1597 IN VOID *Scratch,
1598 IN OUT UINTN *ScratchSize,
1599 IN OUT MTRR_MEMORY_RANGE *Mtrrs,
1600 IN UINT32 MtrrCapacity,
1601 IN OUT UINT32 *MtrrCount
1602 )
1603{
1604 UINT64 Base0;
1605 UINT64 Base1;
1606 UINTN Index;
1607 UINT64 Base;
1608 UINT64 Length;
1609 UINT64 Alignment;
1610 UINT64 SubLength;
1611 MTRR_LIB_ADDRESS *Vertices;
1612 UINT8 *Weight;
1613 UINT32 VertexIndex;
1614 UINT32 VertexCount;
1615 UINTN RequiredScratchSize;
1616 UINT8 TypeCount;
1617 UINT16 Start;
1618 UINT16 Stop;
1619 UINT8 Type;
1620 RETURN_STATUS Status;
1621
1622 Base0 = Ranges[0].BaseAddress;
1623 Base1 = Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length;
1624 MTRR_LIB_ASSERT_ALIGNED (Base0, Base1 - Base0);
1625
1626 //
1627 // Count the number of vertices.
1628 //
1629 Vertices = (MTRR_LIB_ADDRESS *)Scratch;
1630 for (VertexIndex = 0, Index = 0; Index < RangeCount; Index++) {
1631 Base = Ranges[Index].BaseAddress;
1632 Length = Ranges[Index].Length;
1633 while (Length != 0) {
1634 Alignment = MtrrLibBiggestAlignment (Base, A0);
1635 SubLength = Alignment;
1636 if (SubLength > Length) {
1637 SubLength = GetPowerOfTwo64 (Length);
1638 }
1639
1640 if (VertexIndex < *ScratchSize / sizeof (*Vertices)) {
1641 Vertices[VertexIndex].Address = Base;
1642 Vertices[VertexIndex].Alignment = Alignment;
1643 Vertices[VertexIndex].Type = Ranges[Index].Type;
1644 Vertices[VertexIndex].Length = SubLength;
1645 }
1646
1647 Base += SubLength;
1648 Length -= SubLength;
1649 VertexIndex++;
1650 }
1651 }
1652
1653 //
1654 // Vertices[VertexIndex] = Base1, so whole vertex count is (VertexIndex + 1).
1655 //
1656 VertexCount = VertexIndex + 1;
1657 DEBUG ((
1658 DEBUG_CACHE,
1659 " Count of vertices (%016llx - %016llx) = %d\n",
1660 Ranges[0].BaseAddress,
1661 Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length,
1662 VertexCount
1663 ));
1664 ASSERT (VertexCount < MAX_UINT16);
1665
1666 RequiredScratchSize = VertexCount * sizeof (*Vertices) + VertexCount * VertexCount * sizeof (*Weight);
1667 if (*ScratchSize < RequiredScratchSize) {
1668 *ScratchSize = RequiredScratchSize;
1670 }
1671
1672 Vertices[VertexCount - 1].Address = Base1;
1673
1674 Weight = (UINT8 *)&Vertices[VertexCount];
1675 for (VertexIndex = 0; VertexIndex < VertexCount; VertexIndex++) {
1676 //
1677 // Set optional weight between vertices and self->self to 0
1678 //
1679 SetMem (&Weight[M (VertexIndex, 0)], VertexIndex + 1, 0);
1680 //
1681 // Set mandatory weight between vertices to MAX_WEIGHT
1682 //
1683 SetMem (&Weight[M (VertexIndex, VertexIndex + 1)], VertexCount - VertexIndex - 1, MAX_WEIGHT);
1684
1685 // Final result looks like:
1686 // 00 FF FF FF
1687 // 00 00 FF FF
1688 // 00 00 00 FF
1689 // 00 00 00 00
1690 }
1691
1692 //
1693 // Set mandatory weight and optional weight for adjacent vertices
1694 //
1695 for (VertexIndex = 0; VertexIndex < VertexCount - 1; VertexIndex++) {
1696 if (Vertices[VertexIndex].Type != DefaultType) {
1697 Weight[M (VertexIndex, VertexIndex + 1)] = 1;
1698 Weight[O (VertexIndex, VertexIndex + 1)] = 0;
1699 } else {
1700 Weight[M (VertexIndex, VertexIndex + 1)] = 0;
1701 Weight[O (VertexIndex, VertexIndex + 1)] = 1;
1702 }
1703 }
1704
1705 for (TypeCount = 2; TypeCount <= 3; TypeCount++) {
1706 for (Start = 0; (UINT32)Start < VertexCount; Start++) {
1707 for (Stop = Start + 2; (UINT32)Stop < VertexCount; Stop++) {
1708 ASSERT (Vertices[Stop].Address > Vertices[Start].Address);
1709 Length = Vertices[Stop].Address - Vertices[Start].Address;
1710 if (Length > Vertices[Start].Alignment) {
1711 //
1712 // Pickup a new Start when [Start, Stop) cannot be described by one MTRR.
1713 //
1714 break;
1715 }
1716
1717 if ((Weight[M (Start, Stop)] == MAX_WEIGHT) && IS_POW2 (Length)) {
1719 Ranges,
1720 RangeCount,
1721 Vertices[Start].Address,
1722 Vertices[Stop].Address - Vertices[Start].Address,
1723 &Type
1724 ) == TypeCount)
1725 {
1726 //
1727 // Update the Weight[Start, Stop] using subtractive path.
1728 //
1730 DefaultType,
1731 A0,
1732 Ranges,
1733 RangeCount,
1734 (UINT16)VertexCount,
1735 Vertices,
1736 Weight,
1737 Start,
1738 Stop,
1739 Type,
1740 TypeCount,
1741 NULL,
1742 0,
1743 NULL
1744 );
1745 } else if (TypeCount == 2) {
1746 //
1747 // Pick up a new Start when we expect 2-type range, but 3-type range is met.
1748 // Because no matter how Stop is increased, we always meet 3-type range.
1749 //
1750 break;
1751 }
1752 }
1753 }
1754 }
1755 }
1756
1757 Status = RETURN_SUCCESS;
1758 MtrrLibCalculateLeastMtrrs ((UINT16)VertexCount, Vertices, Weight, 0, (UINT16)VertexCount - 1, FALSE);
1759 Stop = (UINT16)VertexCount - 1;
1760 while (Stop != 0) {
1761 Start = Vertices[Stop].Previous;
1762 TypeCount = MAX_UINT8;
1763 Type = 0;
1764 if (Weight[M (Start, Stop)] != 0) {
1765 TypeCount = MtrrLibGetNumberOfTypes (Ranges, RangeCount, Vertices[Start].Address, Vertices[Stop].Address - Vertices[Start].Address, &Type);
1766 Status = MtrrLibAppendVariableMtrr (
1767 Mtrrs,
1768 MtrrCapacity,
1769 MtrrCount,
1770 Vertices[Start].Address,
1771 Vertices[Stop].Address - Vertices[Start].Address,
1772 MtrrLibLowestType (Type)
1773 );
1774 if (RETURN_ERROR (Status)) {
1775 break;
1776 }
1777 }
1778
1779 if (Start != Stop - 1) {
1780 //
1781 // substractive path
1782 //
1783 if (TypeCount == MAX_UINT8) {
1784 TypeCount = MtrrLibGetNumberOfTypes (
1785 Ranges,
1786 RangeCount,
1787 Vertices[Start].Address,
1788 Vertices[Stop].Address - Vertices[Start].Address,
1789 &Type
1790 );
1791 }
1792
1794 DefaultType,
1795 A0,
1796 Ranges,
1797 RangeCount,
1798 (UINT16)VertexCount,
1799 Vertices,
1800 Weight,
1801 Start,
1802 Stop,
1803 Type,
1804 TypeCount,
1805 Mtrrs,
1806 MtrrCapacity,
1807 MtrrCount
1808 );
1809 if (RETURN_ERROR (Status)) {
1810 break;
1811 }
1812 }
1813
1814 Stop = Start;
1815 }
1816
1817 return Status;
1818}
1819
1832RETURN_STATUS
1835 IN OUT MTRR_MEMORY_RANGE *Ranges,
1836 IN UINTN RangeCapacity,
1837 IN OUT UINTN *RangeCount
1838 )
1839{
1840 RETURN_STATUS Status;
1841 UINTN MsrIndex;
1842 UINTN Index;
1843 MTRR_MEMORY_CACHE_TYPE MemoryType;
1844 UINT64 Base;
1845
1846 Base = 0;
1847 for (MsrIndex = 0; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
1848 ASSERT (Base == mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress);
1849 for (Index = 0; Index < sizeof (UINT64); Index++) {
1850 MemoryType = (MTRR_MEMORY_CACHE_TYPE)((UINT8 *)(&Fixed->Mtrr[MsrIndex]))[Index];
1851 Status = MtrrLibSetMemoryType (
1852 Ranges,
1853 RangeCapacity,
1854 RangeCount,
1855 Base,
1856 mMtrrLibFixedMtrrTable[MsrIndex].Length,
1857 MemoryType
1858 );
1859 if (Status == RETURN_OUT_OF_RESOURCES) {
1860 return Status;
1861 }
1862
1863 Base += mMtrrLibFixedMtrrTable[MsrIndex].Length;
1864 }
1865 }
1866
1867 ASSERT (Base == BASE_1MB);
1868 return RETURN_SUCCESS;
1869}
1870
1883RETURN_STATUS
1885 IN CONST MTRR_MEMORY_RANGE *VariableMtrr,
1886 IN UINT32 VariableMtrrCount,
1887 IN OUT MTRR_MEMORY_RANGE *Ranges,
1888 IN UINTN RangeCapacity,
1889 IN OUT UINTN *RangeCount
1890 )
1891{
1892 RETURN_STATUS Status;
1893 UINTN Index;
1894
1895 //
1896 // WT > WB
1897 // UC > *
1898 // UC > * (except WB, UC) > WB
1899 //
1900
1901 //
1902 // 1. Set WB
1903 //
1904 for (Index = 0; Index < VariableMtrrCount; Index++) {
1905 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheWriteBack)) {
1906 Status = MtrrLibSetMemoryType (
1907 Ranges,
1908 RangeCapacity,
1909 RangeCount,
1910 VariableMtrr[Index].BaseAddress,
1911 VariableMtrr[Index].Length,
1912 VariableMtrr[Index].Type
1913 );
1914 if (Status == RETURN_OUT_OF_RESOURCES) {
1915 return Status;
1916 }
1917 }
1918 }
1919
1920 //
1921 // 2. Set other types than WB or UC
1922 //
1923 for (Index = 0; Index < VariableMtrrCount; Index++) {
1924 if ((VariableMtrr[Index].Length != 0) &&
1925 (VariableMtrr[Index].Type != CacheWriteBack) && (VariableMtrr[Index].Type != CacheUncacheable))
1926 {
1927 Status = MtrrLibSetMemoryType (
1928 Ranges,
1929 RangeCapacity,
1930 RangeCount,
1931 VariableMtrr[Index].BaseAddress,
1932 VariableMtrr[Index].Length,
1933 VariableMtrr[Index].Type
1934 );
1935 if (Status == RETURN_OUT_OF_RESOURCES) {
1936 return Status;
1937 }
1938 }
1939 }
1940
1941 //
1942 // 3. Set UC
1943 //
1944 for (Index = 0; Index < VariableMtrrCount; Index++) {
1945 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheUncacheable)) {
1946 Status = MtrrLibSetMemoryType (
1947 Ranges,
1948 RangeCapacity,
1949 RangeCount,
1950 VariableMtrr[Index].BaseAddress,
1951 VariableMtrr[Index].Length,
1952 VariableMtrr[Index].Type
1953 );
1954 if (Status == RETURN_OUT_OF_RESOURCES) {
1955 return Status;
1956 }
1957 }
1958 }
1959
1960 return RETURN_SUCCESS;
1961}
1962
1972UINT8
1974 IN CONST MTRR_MEMORY_RANGE *Ranges,
1975 IN UINTN RangeCount
1976 )
1977{
1978 ASSERT (RangeCount != 0);
1979
1980 switch (Ranges[0].Type) {
1981 case CacheWriteBack:
1982 case CacheWriteThrough:
1983 return (1 << CacheWriteBack) | (1 << CacheWriteThrough) | (1 << CacheUncacheable);
1984 break;
1985
1986 case CacheWriteCombining:
1987 case CacheWriteProtected:
1988 return (1 << Ranges[0].Type) | (1 << CacheUncacheable);
1989 break;
1990
1991 case CacheUncacheable:
1992 if (RangeCount == 1) {
1993 return (1 << CacheUncacheable);
1994 }
1995
1996 return MtrrLibGetCompatibleTypes (&Ranges[1], RangeCount - 1);
1997 break;
1998
1999 case CacheInvalid:
2000 default:
2001 ASSERT (FALSE);
2002 break;
2003 }
2004
2005 return 0;
2006}
2007
2019VOID
2021 MTRR_MEMORY_RANGE *DstMtrrs,
2022 UINT32 DstMtrrCount,
2023 MTRR_MEMORY_RANGE *SrcMtrrs,
2024 UINT32 SrcMtrrCount,
2025 BOOLEAN *Modified
2026 )
2027{
2028 UINT32 DstIndex;
2029 UINT32 SrcIndex;
2030
2031 ASSERT (SrcMtrrCount <= DstMtrrCount);
2032
2033 for (DstIndex = 0; DstIndex < DstMtrrCount; DstIndex++) {
2034 Modified[DstIndex] = FALSE;
2035
2036 if (DstMtrrs[DstIndex].Length == 0) {
2037 continue;
2038 }
2039
2040 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
2041 if ((DstMtrrs[DstIndex].BaseAddress == SrcMtrrs[SrcIndex].BaseAddress) &&
2042 (DstMtrrs[DstIndex].Length == SrcMtrrs[SrcIndex].Length) &&
2043 (DstMtrrs[DstIndex].Type == SrcMtrrs[SrcIndex].Type))
2044 {
2045 break;
2046 }
2047 }
2048
2049 if (SrcIndex == SrcMtrrCount) {
2050 //
2051 // Remove the one from DstMtrrs which is not in SrcMtrrs
2052 //
2053 DstMtrrs[DstIndex].Length = 0;
2054 Modified[DstIndex] = TRUE;
2055 } else {
2056 //
2057 // Remove the one from SrcMtrrs which is also in DstMtrrs
2058 //
2059 SrcMtrrs[SrcIndex].Length = 0;
2060 }
2061 }
2062
2063 //
2064 // Now valid MTRR only exists in either DstMtrrs or SrcMtrrs.
2065 // Merge MTRRs from SrcMtrrs to DstMtrrs
2066 //
2067 DstIndex = 0;
2068 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
2069 if (SrcMtrrs[SrcIndex].Length != 0) {
2070 //
2071 // Find the empty slot in DstMtrrs
2072 //
2073 while (DstIndex < DstMtrrCount) {
2074 if (DstMtrrs[DstIndex].Length == 0) {
2075 break;
2076 }
2077
2078 DstIndex++;
2079 }
2080
2081 ASSERT (DstIndex < DstMtrrCount);
2082 CopyMem (&DstMtrrs[DstIndex], &SrcMtrrs[SrcIndex], sizeof (SrcMtrrs[0]));
2083 Modified[DstIndex] = TRUE;
2084 }
2085 }
2086}
2087
2107RETURN_STATUS
2109 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
2110 IN UINT64 A0,
2111 IN MTRR_MEMORY_RANGE *Ranges,
2112 IN UINTN RangeCount,
2113 IN VOID *Scratch,
2114 IN OUT UINTN *ScratchSize,
2115 OUT MTRR_MEMORY_RANGE *VariableMtrr,
2116 IN UINT32 VariableMtrrCapacity,
2117 OUT UINT32 *VariableMtrrCount
2118 )
2119{
2120 RETURN_STATUS Status;
2121 UINT32 Index;
2122 UINT64 Base0;
2123 UINT64 Base1;
2124 UINT64 Alignment;
2125 UINT8 CompatibleTypes;
2126 UINT64 Length;
2127 UINT32 End;
2128 UINTN ActualScratchSize;
2129 UINTN BiggestScratchSize;
2130
2131 *VariableMtrrCount = 0;
2132
2133 //
2134 // Since the whole ranges need multiple calls of MtrrLibCalculateMtrrs().
2135 // Each call needs different scratch buffer size.
2136 // When the provided scratch buffer size is not sufficient in any call,
2137 // set the GetActualScratchSize to TRUE, and following calls will only
2138 // calculate the actual scratch size for the caller.
2139 //
2140 BiggestScratchSize = 0;
2141
2142 for (Index = 0; (UINTN)Index < RangeCount;) {
2143 Base0 = Ranges[Index].BaseAddress;
2144
2145 //
2146 // Full step is optimal
2147 //
2148 while ((UINTN)Index < RangeCount) {
2149 ASSERT (Ranges[Index].BaseAddress == Base0);
2150 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2151 while (Base0 + Alignment <= Ranges[Index].BaseAddress + Ranges[Index].Length) {
2152 if ((BiggestScratchSize <= *ScratchSize) && (Ranges[Index].Type != DefaultType)) {
2153 Status = MtrrLibAppendVariableMtrr (
2154 VariableMtrr,
2155 VariableMtrrCapacity,
2156 VariableMtrrCount,
2157 Base0,
2158 Alignment,
2159 Ranges[Index].Type
2160 );
2161 if (RETURN_ERROR (Status)) {
2162 return Status;
2163 }
2164 }
2165
2166 Base0 += Alignment;
2167 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2168 }
2169
2170 //
2171 // Remove the above range from Ranges[Index]
2172 //
2173 Ranges[Index].Length -= Base0 - Ranges[Index].BaseAddress;
2174 Ranges[Index].BaseAddress = Base0;
2175 if (Ranges[Index].Length != 0) {
2176 break;
2177 } else {
2178 Index++;
2179 }
2180 }
2181
2182 if (Index == RangeCount) {
2183 break;
2184 }
2185
2186 //
2187 // Find continous ranges [Base0, Base1) which could be combined by MTRR.
2188 // Per SDM, the compatible types between[B0, B1) are:
2189 // UC, *
2190 // WB, WT
2191 // UC, WB, WT
2192 //
2193 CompatibleTypes = MtrrLibGetCompatibleTypes (&Ranges[Index], RangeCount - Index);
2194
2195 End = Index; // End points to last one that matches the CompatibleTypes.
2196 while ((UINTN)(End + 1) < RangeCount) {
2197 if (((1 << Ranges[End + 1].Type) & CompatibleTypes) == 0) {
2198 break;
2199 }
2200
2201 End++;
2202 }
2203
2204 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2205 Length = GetPowerOfTwo64 (Ranges[End].BaseAddress + Ranges[End].Length - Base0);
2206 Base1 = Base0 + MIN (Alignment, Length);
2207
2208 //
2209 // Base1 may not in Ranges[End]. Update End to the range Base1 belongs to.
2210 //
2211 End = Index;
2212 while ((UINTN)(End + 1) < RangeCount) {
2213 if (Base1 <= Ranges[End + 1].BaseAddress) {
2214 break;
2215 }
2216
2217 End++;
2218 }
2219
2220 Length = Ranges[End].Length;
2221 Ranges[End].Length = Base1 - Ranges[End].BaseAddress;
2222 ActualScratchSize = *ScratchSize;
2223 Status = MtrrLibCalculateMtrrs (
2224 DefaultType,
2225 A0,
2226 &Ranges[Index],
2227 End + 1 - Index,
2228 Scratch,
2229 &ActualScratchSize,
2230 VariableMtrr,
2231 VariableMtrrCapacity,
2232 VariableMtrrCount
2233 );
2234 if (Status == RETURN_BUFFER_TOO_SMALL) {
2235 BiggestScratchSize = MAX (BiggestScratchSize, ActualScratchSize);
2236 //
2237 // Ignore this error, because we need to calculate the biggest
2238 // scratch buffer size.
2239 //
2240 Status = RETURN_SUCCESS;
2241 }
2242
2243 if (RETURN_ERROR (Status)) {
2244 return Status;
2245 }
2246
2247 if (Length != Ranges[End].Length) {
2248 Ranges[End].BaseAddress = Base1;
2249 Ranges[End].Length = Length - Ranges[End].Length;
2250 Index = End;
2251 } else {
2252 Index = End + 1;
2253 }
2254 }
2255
2256 if (*ScratchSize < BiggestScratchSize) {
2257 *ScratchSize = BiggestScratchSize;
2259 }
2260
2261 return RETURN_SUCCESS;
2262}
2263
2278RETURN_STATUS
2280 IN OUT UINT64 *ClearMasks,
2281 IN OUT UINT64 *OrMasks,
2282 IN PHYSICAL_ADDRESS BaseAddress,
2283 IN UINT64 Length,
2284 IN MTRR_MEMORY_CACHE_TYPE Type
2285 )
2286{
2287 RETURN_STATUS Status;
2288 UINT32 MsrIndex;
2289 UINT64 ClearMask;
2290 UINT64 OrMask;
2291
2292 ASSERT (BaseAddress < BASE_1MB);
2293
2294 MsrIndex = (UINT32)-1;
2295 while ((BaseAddress < BASE_1MB) && (Length != 0)) {
2296 Status = MtrrLibProgramFixedMtrr (Type, &BaseAddress, &Length, &MsrIndex, &ClearMask, &OrMask);
2297 if (RETURN_ERROR (Status)) {
2298 return Status;
2299 }
2300
2301 ClearMasks[MsrIndex] = ClearMasks[MsrIndex] | ClearMask;
2302 OrMasks[MsrIndex] = (OrMasks[MsrIndex] & ~ClearMask) | OrMask;
2303 }
2304
2305 return RETURN_SUCCESS;
2306}
2307
2334RETURN_STATUS
2335EFIAPI
2337 IN OUT MTRR_SETTINGS *MtrrSetting,
2338 IN VOID *Scratch,
2339 IN OUT UINTN *ScratchSize,
2340 IN CONST MTRR_MEMORY_RANGE *Ranges,
2341 IN UINTN RangeCount
2342 )
2343{
2344 RETURN_STATUS Status;
2345 UINT32 Index;
2346 UINT64 BaseAddress;
2347 UINT64 Length;
2348 BOOLEAN VariableMtrrNeeded;
2349
2350 UINT64 MtrrValidBitsMask;
2351 UINT64 MtrrValidAddressMask;
2352 MTRR_MEMORY_CACHE_TYPE DefaultType;
2353 MTRR_VARIABLE_SETTINGS VariableSettings;
2354 MTRR_MEMORY_RANGE WorkingRanges[2 * ARRAY_SIZE (MtrrSetting->Variables.Mtrr) + 2];
2355 UINTN WorkingRangeCount;
2356 BOOLEAN Modified;
2357 MTRR_VARIABLE_SETTING VariableSetting;
2358 UINT32 OriginalVariableMtrrCount;
2359 UINT32 FirmwareVariableMtrrCount;
2360 UINT32 WorkingVariableMtrrCount;
2361 MTRR_MEMORY_RANGE OriginalVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2362 MTRR_MEMORY_RANGE WorkingVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2363 BOOLEAN VariableSettingModified[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2364
2365 UINT64 FixedMtrrMemoryLimit;
2366 BOOLEAN FixedMtrrSupported;
2367 UINT64 ClearMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2368 UINT64 OrMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2369
2370 MTRR_CONTEXT MtrrContext;
2371 BOOLEAN MtrrContextValid;
2372
2373 Status = RETURN_SUCCESS;
2374 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
2375
2376 //
2377 // TRUE indicating the accordingly Variable setting needs modificaiton in OriginalVariableMtrr.
2378 //
2379 SetMem (VariableSettingModified, ARRAY_SIZE (VariableSettingModified), FALSE);
2380
2381 //
2382 // TRUE indicating the caller requests to set variable MTRRs.
2383 //
2384 VariableMtrrNeeded = FALSE;
2385 OriginalVariableMtrrCount = 0;
2386
2387 //
2388 // 0. Dump the requests.
2389 //
2391 DEBUG ((
2392 DEBUG_CACHE,
2393 "Mtrr: Set Mem Attribute to %a, ScratchSize = %x%a",
2394 (MtrrSetting == NULL) ? "Hardware" : "Buffer",
2395 *ScratchSize,
2396 (RangeCount <= 1) ? "," : "\n"
2397 ));
2398 for (Index = 0; Index < RangeCount; Index++) {
2399 DEBUG ((
2400 DEBUG_CACHE,
2401 " %a: [%016lx, %016lx)\n",
2402 mMtrrMemoryCacheTypeShortName[MIN (Ranges[Index].Type, CacheInvalid)],
2403 Ranges[Index].BaseAddress,
2404 Ranges[Index].BaseAddress + Ranges[Index].Length
2405 ));
2406 }
2407
2408 DEBUG_CODE_END ();
2409
2410 //
2411 // 1. Validate the parameters.
2412 //
2413 if (!MtrrLibIsMtrrSupported (&FixedMtrrSupported, &OriginalVariableMtrrCount)) {
2414 Status = RETURN_UNSUPPORTED;
2415 goto Exit;
2416 }
2417
2418 FixedMtrrMemoryLimit = FixedMtrrSupported ? BASE_1MB : 0;
2419
2420 for (Index = 0; Index < RangeCount; Index++) {
2421 if (Ranges[Index].Length == 0) {
2422 Status = RETURN_INVALID_PARAMETER;
2423 goto Exit;
2424 }
2425
2426 if (((Ranges[Index].BaseAddress & ~MtrrValidAddressMask) != 0) ||
2427 ((((Ranges[Index].BaseAddress + Ranges[Index].Length) & ~MtrrValidAddressMask) != 0) &&
2428 ((Ranges[Index].BaseAddress + Ranges[Index].Length) != MtrrValidBitsMask + 1))
2429 )
2430 {
2431 //
2432 // Either the BaseAddress or the Limit doesn't follow the alignment requirement.
2433 // Note: It's still valid if Limit doesn't follow the alignment requirement but equals to MAX Address.
2434 //
2435 Status = RETURN_UNSUPPORTED;
2436 goto Exit;
2437 }
2438
2439 if ((Ranges[Index].Type != CacheUncacheable) &&
2440 (Ranges[Index].Type != CacheWriteCombining) &&
2441 (Ranges[Index].Type != CacheWriteThrough) &&
2442 (Ranges[Index].Type != CacheWriteProtected) &&
2443 (Ranges[Index].Type != CacheWriteBack))
2444 {
2445 Status = RETURN_INVALID_PARAMETER;
2446 goto Exit;
2447 }
2448
2449 if (Ranges[Index].BaseAddress + Ranges[Index].Length > FixedMtrrMemoryLimit) {
2450 VariableMtrrNeeded = TRUE;
2451 }
2452 }
2453
2454 //
2455 // 2. Apply the above-1MB memory attribute settings.
2456 //
2457 if (VariableMtrrNeeded) {
2458 //
2459 // 2.1. Read all variable MTRRs and convert to Ranges.
2460 //
2461 MtrrGetVariableMtrrWorker (MtrrSetting, OriginalVariableMtrrCount, &VariableSettings);
2463 &VariableSettings,
2464 OriginalVariableMtrrCount,
2465 MtrrValidBitsMask,
2466 MtrrValidAddressMask,
2467 OriginalVariableMtrr
2468 );
2469
2470 DefaultType = MtrrGetDefaultMemoryTypeWorker (MtrrSetting);
2471 WorkingRangeCount = 1;
2472 WorkingRanges[0].BaseAddress = 0;
2473 WorkingRanges[0].Length = MtrrValidBitsMask + 1;
2474 WorkingRanges[0].Type = DefaultType;
2475
2476 Status = MtrrLibApplyVariableMtrrs (
2477 OriginalVariableMtrr,
2478 OriginalVariableMtrrCount,
2479 WorkingRanges,
2480 ARRAY_SIZE (WorkingRanges),
2481 &WorkingRangeCount
2482 );
2483 ASSERT_RETURN_ERROR (Status);
2484
2485 ASSERT (OriginalVariableMtrrCount >= PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs));
2486 FirmwareVariableMtrrCount = OriginalVariableMtrrCount - PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
2487 ASSERT (WorkingRangeCount <= 2 * FirmwareVariableMtrrCount + 1);
2488
2489 //
2490 // 2.2. Force [0, 1M) to UC, so that it doesn't impact subtraction algorithm.
2491 //
2492 if (FixedMtrrMemoryLimit != 0) {
2493 Status = MtrrLibSetMemoryType (
2494 WorkingRanges,
2495 ARRAY_SIZE (WorkingRanges),
2496 &WorkingRangeCount,
2497 0,
2498 FixedMtrrMemoryLimit,
2499 CacheUncacheable
2500 );
2501 ASSERT (Status != RETURN_OUT_OF_RESOURCES);
2502 }
2503
2504 //
2505 // 2.3. Apply the new memory attribute settings to Ranges.
2506 //
2507 Modified = FALSE;
2508 for (Index = 0; Index < RangeCount; Index++) {
2509 BaseAddress = Ranges[Index].BaseAddress;
2510 Length = Ranges[Index].Length;
2511 if (BaseAddress < FixedMtrrMemoryLimit) {
2512 if (Length <= FixedMtrrMemoryLimit - BaseAddress) {
2513 continue;
2514 }
2515
2516 Length -= FixedMtrrMemoryLimit - BaseAddress;
2517 BaseAddress = FixedMtrrMemoryLimit;
2518 }
2519
2520 Status = MtrrLibSetMemoryType (
2521 WorkingRanges,
2522 ARRAY_SIZE (WorkingRanges),
2523 &WorkingRangeCount,
2524 BaseAddress,
2525 Length,
2526 Ranges[Index].Type
2527 );
2528 if (Status == RETURN_ALREADY_STARTED) {
2529 Status = RETURN_SUCCESS;
2530 } else if (Status == RETURN_OUT_OF_RESOURCES) {
2531 goto Exit;
2532 } else {
2533 ASSERT_RETURN_ERROR (Status);
2534 Modified = TRUE;
2535 }
2536 }
2537
2538 if (Modified) {
2539 //
2540 // 2.4. Calculate the Variable MTRR settings based on the Ranges.
2541 // Buffer Too Small may be returned if the scratch buffer size is insufficient.
2542 //
2543 Status = MtrrLibSetMemoryRanges (
2544 DefaultType,
2545 LShiftU64 (1, (UINTN)HighBitSet64 (MtrrValidBitsMask)),
2546 WorkingRanges,
2547 WorkingRangeCount,
2548 Scratch,
2549 ScratchSize,
2550 WorkingVariableMtrr,
2551 FirmwareVariableMtrrCount + 1,
2552 &WorkingVariableMtrrCount
2553 );
2554 if (RETURN_ERROR (Status)) {
2555 goto Exit;
2556 }
2557
2558 //
2559 // 2.5. Remove the [0, 1MB) MTRR if it still exists (not merged with other range)
2560 //
2561 for (Index = 0; Index < WorkingVariableMtrrCount; Index++) {
2562 if ((WorkingVariableMtrr[Index].BaseAddress == 0) && (WorkingVariableMtrr[Index].Length == FixedMtrrMemoryLimit)) {
2563 ASSERT (WorkingVariableMtrr[Index].Type == CacheUncacheable);
2564 WorkingVariableMtrrCount--;
2565 CopyMem (
2566 &WorkingVariableMtrr[Index],
2567 &WorkingVariableMtrr[Index + 1],
2568 (WorkingVariableMtrrCount - Index) * sizeof (WorkingVariableMtrr[0])
2569 );
2570 break;
2571 }
2572 }
2573
2574 if (WorkingVariableMtrrCount > FirmwareVariableMtrrCount) {
2575 Status = RETURN_OUT_OF_RESOURCES;
2576 goto Exit;
2577 }
2578
2579 //
2580 // 2.6. Merge the WorkingVariableMtrr to OriginalVariableMtrr
2581 // Make sure least modification is made to OriginalVariableMtrr.
2582 //
2584 OriginalVariableMtrr,
2585 OriginalVariableMtrrCount,
2586 WorkingVariableMtrr,
2587 WorkingVariableMtrrCount,
2588 VariableSettingModified
2589 );
2590 }
2591 }
2592
2593 //
2594 // 3. Apply the below-1MB memory attribute settings.
2595 //
2596 // (Value & ~0 | 0) still equals to (Value)
2597 //
2598 ZeroMem (ClearMasks, sizeof (ClearMasks));
2599 ZeroMem (OrMasks, sizeof (OrMasks));
2600 for (Index = 0; Index < RangeCount; Index++) {
2601 if (Ranges[Index].BaseAddress >= FixedMtrrMemoryLimit) {
2602 continue;
2603 }
2604
2606 ClearMasks,
2607 OrMasks,
2608 Ranges[Index].BaseAddress,
2609 Ranges[Index].Length,
2610 Ranges[Index].Type
2611 );
2612 if (RETURN_ERROR (Status)) {
2613 goto Exit;
2614 }
2615 }
2616
2617 MtrrContextValid = FALSE;
2618 //
2619 // 4. Write fixed MTRRs that have been modified
2620 //
2621 for (Index = 0; Index < ARRAY_SIZE (ClearMasks); Index++) {
2622 if (ClearMasks[Index] != 0) {
2623 if (MtrrSetting != NULL) {
2624 //
2625 // Fixed MTRR is modified indicating fixed MTRR should be enabled in the end of MTRR programming.
2626 //
2627 ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.FE = 1;
2628 MtrrSetting->Fixed.Mtrr[Index] = (MtrrSetting->Fixed.Mtrr[Index] & ~ClearMasks[Index]) | OrMasks[Index];
2629 } else {
2630 if (!MtrrContextValid) {
2631 MtrrLibPreMtrrChange (&MtrrContext);
2632 //
2633 // Fixed MTRR is modified indicating fixed MTRR should be enabled in the end of MTRR programming.
2634 //
2635 MtrrContext.DefType.Bits.FE = 1;
2636 MtrrContextValid = TRUE;
2637 }
2638
2639 AsmMsrAndThenOr64 (mMtrrLibFixedMtrrTable[Index].Msr, ~ClearMasks[Index], OrMasks[Index]);
2640 }
2641 }
2642 }
2643
2644 //
2645 // 5. Write variable MTRRs that have been modified
2646 //
2647 for (Index = 0; Index < OriginalVariableMtrrCount; Index++) {
2648 if (VariableSettingModified[Index]) {
2649 if (OriginalVariableMtrr[Index].Length != 0) {
2650 VariableSetting.Base = (OriginalVariableMtrr[Index].BaseAddress & MtrrValidAddressMask)
2651 | (UINT8)OriginalVariableMtrr[Index].Type;
2652 VariableSetting.Mask = ((~(OriginalVariableMtrr[Index].Length - 1)) & MtrrValidAddressMask) | BIT11;
2653 } else {
2654 VariableSetting.Base = 0;
2655 VariableSetting.Mask = 0;
2656 }
2657
2658 if (MtrrSetting != NULL) {
2659 CopyMem (&MtrrSetting->Variables.Mtrr[Index], &VariableSetting, sizeof (VariableSetting));
2660 } else {
2661 if (!MtrrContextValid) {
2662 MtrrLibPreMtrrChange (&MtrrContext);
2663 MtrrContextValid = TRUE;
2664 }
2665
2667 MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),
2668 VariableSetting.Base
2669 );
2671 MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),
2672 VariableSetting.Mask
2673 );
2674 }
2675 }
2676 }
2677
2678 if (MtrrSetting != NULL) {
2679 //
2680 // Enable MTRR unconditionally
2681 //
2682 ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.E = 1;
2683 } else {
2684 if (MtrrContextValid) {
2685 MtrrLibPostMtrrChange (&MtrrContext);
2686 }
2687 }
2688
2689Exit:
2690 DEBUG ((DEBUG_CACHE, " Result = %r\n", Status));
2691 if (!RETURN_ERROR (Status)) {
2692 MtrrDebugPrintAllMtrrsWorker (MtrrSetting);
2693 }
2694
2695 return Status;
2696}
2697
2727RETURN_STATUS
2728EFIAPI
2730 IN OUT MTRR_SETTINGS *MtrrSetting,
2731 IN PHYSICAL_ADDRESS BaseAddress,
2732 IN UINT64 Length,
2733 IN MTRR_MEMORY_CACHE_TYPE Attribute
2734 )
2735{
2736 UINT8 Scratch[SCRATCH_BUFFER_SIZE];
2737 UINTN ScratchSize;
2738 MTRR_MEMORY_RANGE Range;
2739
2740 Range.BaseAddress = BaseAddress;
2741 Range.Length = Length;
2742 Range.Type = Attribute;
2743 ScratchSize = sizeof (Scratch);
2744 return MtrrSetMemoryAttributesInMtrrSettings (MtrrSetting, Scratch, &ScratchSize, &Range, 1);
2745}
2746
2780RETURN_STATUS
2781EFIAPI
2783 IN PHYSICAL_ADDRESS BaseAddress,
2784 IN UINT64 Length,
2785 IN MTRR_MEMORY_CACHE_TYPE Attribute
2786 )
2787{
2788 return MtrrSetMemoryAttributeInMtrrSettings (NULL, BaseAddress, Length, Attribute);
2789}
2790
2797VOID
2799 IN MTRR_VARIABLE_SETTINGS *VariableSettings
2800 )
2801{
2802 UINT32 Index;
2803 UINT32 VariableMtrrCount;
2804
2805 VariableMtrrCount = GetVariableMtrrCountWorker ();
2806 ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));
2807
2808 for (Index = 0; Index < VariableMtrrCount; Index++) {
2810 MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),
2811 VariableSettings->Mtrr[Index].Base
2812 );
2814 MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),
2815 VariableSettings->Mtrr[Index].Mask
2816 );
2817 }
2818}
2819
2826VOID
2828 IN MTRR_FIXED_SETTINGS *FixedSettings
2829 )
2830{
2831 UINT32 Index;
2832
2833 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
2835 mMtrrLibFixedMtrrTable[Index].Msr,
2836 FixedSettings->Mtrr[Index]
2837 );
2838 }
2839}
2840
2850EFIAPI
2852 OUT MTRR_SETTINGS *MtrrSetting
2853 )
2854{
2855 BOOLEAN FixedMtrrSupported;
2856 UINT32 VariableMtrrCount;
2858
2859 ZeroMem (MtrrSetting, sizeof (*MtrrSetting));
2860
2861 MtrrDefType = (MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType;
2862 if (!MtrrLibIsMtrrSupported (&FixedMtrrSupported, &VariableMtrrCount)) {
2863 return MtrrSetting;
2864 }
2865
2866 //
2867 // Get MTRR_DEF_TYPE value
2868 //
2870
2871 //
2872 // Enabling the Fixed MTRR bit when unsupported is not allowed.
2873 //
2874 ASSERT (FixedMtrrSupported || (MtrrDefType->Bits.FE == 0));
2875
2876 //
2877 // Get fixed MTRRs
2878 //
2879 if (MtrrDefType->Bits.FE == 1) {
2880 MtrrGetFixedMtrrWorker (&MtrrSetting->Fixed);
2881 }
2882
2883 //
2884 // Get variable MTRRs
2885 //
2887 NULL,
2888 VariableMtrrCount,
2889 &MtrrSetting->Variables
2890 );
2891
2892 return MtrrSetting;
2893}
2894
2907EFIAPI
2909 IN MTRR_SETTINGS *MtrrSetting
2910 )
2911{
2912 BOOLEAN FixedMtrrSupported;
2914 MTRR_CONTEXT MtrrContext;
2915
2916 MtrrDefType = (MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType;
2917 if (!MtrrLibIsMtrrSupported (&FixedMtrrSupported, NULL)) {
2918 return MtrrSetting;
2919 }
2920
2921 MtrrLibPreMtrrChange (&MtrrContext);
2922
2923 //
2924 // Enabling the Fixed MTRR bit when unsupported is not allowed.
2925 //
2926 ASSERT (FixedMtrrSupported || (MtrrDefType->Bits.FE == 0));
2927
2928 //
2929 // If the hardware supports Fixed MTRR, it is sufficient
2930 // to set MTRRs regardless of whether Fixed MTRR bit is enabled.
2931 //
2932 if (FixedMtrrSupported) {
2933 MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);
2934 }
2935
2936 //
2937 // Set Variable MTRRs
2938 //
2939 MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);
2940
2941 //
2942 // Set MTRR_DEF_TYPE value
2943 //
2944 AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);
2945
2946 MtrrLibPostMtrrChangeEnableCache (&MtrrContext);
2947
2948 return MtrrSetting;
2949}
2950
2958BOOLEAN
2959EFIAPI
2961 VOID
2962 )
2963{
2965}
2966
2982RETURN_STATUS
2983EFIAPI
2985 IN CONST MTRR_SETTINGS *MtrrSetting OPTIONAL,
2986 OUT MTRR_MEMORY_RANGE *Ranges,
2987 IN OUT UINTN *RangeCount
2988 )
2989{
2990 RETURN_STATUS Status;
2991 MTRR_SETTINGS LocalMtrrs;
2992 CONST MTRR_SETTINGS *Mtrrs;
2994 UINTN LocalRangeCount;
2995 UINT64 MtrrValidBitsMask;
2996 UINT64 MtrrValidAddressMask;
2997 UINT32 VariableMtrrCount;
2998 MTRR_MEMORY_RANGE RawVariableRanges[ARRAY_SIZE (Mtrrs->Variables.Mtrr)];
2999 MTRR_MEMORY_RANGE LocalRanges[
3000 ARRAY_SIZE (mMtrrLibFixedMtrrTable) * sizeof (UINT64) + 2 * ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1
3001 ];
3002
3003 if (RangeCount == NULL) {
3005 }
3006
3007 if ((*RangeCount != 0) && (Ranges == NULL)) {
3009 }
3010
3011 if (MtrrSetting != NULL) {
3012 Mtrrs = MtrrSetting;
3013 } else {
3014 MtrrGetAllMtrrs (&LocalMtrrs);
3015 Mtrrs = &LocalMtrrs;
3016 }
3017
3018 MtrrDefType = (MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&Mtrrs->MtrrDefType;
3019
3020 LocalRangeCount = 1;
3021 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
3022 LocalRanges[0].BaseAddress = 0;
3023 LocalRanges[0].Length = MtrrValidBitsMask + 1;
3024
3025 if (MtrrDefType->Bits.E == 0) {
3026 LocalRanges[0].Type = CacheUncacheable;
3027 } else {
3028 LocalRanges[0].Type = MtrrGetDefaultMemoryTypeWorker (Mtrrs);
3029
3030 VariableMtrrCount = GetVariableMtrrCountWorker ();
3031 ASSERT (VariableMtrrCount <= ARRAY_SIZE (MtrrSetting->Variables.Mtrr));
3032
3034 &Mtrrs->Variables,
3035 VariableMtrrCount,
3036 MtrrValidBitsMask,
3037 MtrrValidAddressMask,
3038 RawVariableRanges
3039 );
3040 Status = MtrrLibApplyVariableMtrrs (
3041 RawVariableRanges,
3042 VariableMtrrCount,
3043 LocalRanges,
3044 ARRAY_SIZE (LocalRanges),
3045 &LocalRangeCount
3046 );
3047 ASSERT_RETURN_ERROR (Status);
3048
3049 if (MtrrDefType->Bits.FE == 1) {
3050 MtrrLibApplyFixedMtrrs (&Mtrrs->Fixed, LocalRanges, ARRAY_SIZE (LocalRanges), &LocalRangeCount);
3051 }
3052 }
3053
3054 if (*RangeCount < LocalRangeCount) {
3055 *RangeCount = LocalRangeCount;
3057 }
3058
3059 CopyMem (Ranges, LocalRanges, LocalRangeCount * sizeof (LocalRanges[0]));
3060 *RangeCount = LocalRangeCount;
3061 return RETURN_SUCCESS;
3062}
3063
3073VOID
3075 IN MTRR_SETTINGS *MtrrSetting
3076 )
3077{
3079 UINT32 Index;
3080 MTRR_SETTINGS LocalMtrrs;
3081 MTRR_SETTINGS *Mtrrs;
3082 RETURN_STATUS Status;
3083 UINTN RangeCount;
3084 BOOLEAN ContainVariableMtrr;
3085 MTRR_MEMORY_RANGE Ranges[
3086 ARRAY_SIZE (mMtrrLibFixedMtrrTable) * sizeof (UINT64) + 2 * ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1
3087 ];
3088
3089 if (MtrrSetting != NULL) {
3090 Mtrrs = MtrrSetting;
3091 } else {
3092 MtrrGetAllMtrrs (&LocalMtrrs);
3093 Mtrrs = &LocalMtrrs;
3094 }
3095
3096 RangeCount = ARRAY_SIZE (Ranges);
3097 Status = MtrrGetMemoryAttributesInMtrrSettings (Mtrrs, Ranges, &RangeCount);
3098 if (RETURN_ERROR (Status)) {
3099 DEBUG ((DEBUG_CACHE, "MTRR is not enabled.\n"));
3100 return;
3101 }
3102
3103 //
3104 // Dump RAW MTRR contents
3105 //
3106 DEBUG ((DEBUG_CACHE, "MTRR Settings:\n"));
3107 DEBUG ((DEBUG_CACHE, "=============\n"));
3108 DEBUG ((DEBUG_CACHE, "MTRR Default Type: %016lx\n", Mtrrs->MtrrDefType));
3109 for (Index = 0; Index < ARRAY_SIZE (mMtrrLibFixedMtrrTable); Index++) {
3110 DEBUG ((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, Mtrrs->Fixed.Mtrr[Index]));
3111 }
3112
3113 ContainVariableMtrr = FALSE;
3114 for (Index = 0; Index < ARRAY_SIZE (Mtrrs->Variables.Mtrr); Index++) {
3115 if ((Mtrrs->Variables.Mtrr[Index].Mask & BIT11) == 0) {
3116 //
3117 // If mask is not valid, then do not display range
3118 //
3119 continue;
3120 }
3121
3122 ContainVariableMtrr = TRUE;
3123 DEBUG ((
3124 DEBUG_CACHE,
3125 "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
3126 Index,
3127 Mtrrs->Variables.Mtrr[Index].Base,
3128 Mtrrs->Variables.Mtrr[Index].Mask
3129 ));
3130 }
3131
3132 if (!ContainVariableMtrr) {
3133 DEBUG ((DEBUG_CACHE, "Variable MTRR : None.\n"));
3134 }
3135
3136 DEBUG ((DEBUG_CACHE, "\n"));
3137
3138 //
3139 // Dump MTRR setting in ranges
3140 //
3141 DEBUG ((DEBUG_CACHE, "Memory Ranges:\n"));
3142 DEBUG ((DEBUG_CACHE, "====================================\n"));
3143 for (Index = 0; Index < RangeCount; Index++) {
3144 DEBUG ((
3145 DEBUG_CACHE,
3146 "%a:%016lx-%016lx\n",
3147 mMtrrMemoryCacheTypeShortName[Ranges[Index].Type],
3148 Ranges[Index].BaseAddress,
3149 Ranges[Index].BaseAddress + Ranges[Index].Length - 1
3150 ));
3151 }
3152
3153 DEBUG_CODE_END ();
3154}
3155
3159VOID
3160EFIAPI
3162 VOID
3163 )
3164{
3166}
UINT64 UINTN
BOOLEAN EFIAPI SetInterruptState(IN BOOLEAN InterruptState)
Definition: Cpu.c:48
BOOLEAN EFIAPI SaveAndDisableInterrupts(VOID)
Definition: Cpu.c:21
UINT64 EFIAPI RShiftU64(IN UINT64 Operand, IN UINTN Count)
Definition: RShiftU64.c:28
UINT64 EFIAPI MultU64x32(IN UINT64 Multiplicand, IN UINT32 Multiplier)
Definition: MultU64x32.c:27
UINT64 EFIAPI GetPowerOfTwo64(IN UINT64 Operand)
UINT64 EFIAPI LShiftU64(IN UINT64 Operand, IN UINTN Count)
Definition: LShiftU64.c:28
INTN EFIAPI HighBitSet64(IN UINT64 Operand)
Definition: HighBitSet64.c:27
VOID *EFIAPI CopyMem(OUT VOID *DestinationBuffer, IN CONST VOID *SourceBuffer, IN UINTN Length)
VOID *EFIAPI SetMem(OUT VOID *Buffer, IN UINTN Length, IN UINT8 Value)
Definition: SetMemWrapper.c:38
VOID *EFIAPI ZeroMem(OUT VOID *Buffer, IN UINTN Length)
UINT32 EFIAPI AsmCpuidEx(IN UINT32 Index, IN UINT32 SubIndex, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
Definition: CpuIdEx.c:43
VOID EFIAPI CpuFlushTlb(VOID)
VOID EFIAPI AsmDisableCache(VOID)
Definition: DisableCache.c:18
VOID EFIAPI AsmEnableCache(VOID)
Definition: EnableCache.c:18
UINT64 EFIAPI AsmReadMsr64(IN UINT32 Index)
Definition: GccInlinePriv.c:60
UINTN EFIAPI AsmWriteCr4(UINTN Cr4)
UINT64 EFIAPI AsmWriteMsr64(IN UINT32 Index, IN UINT64 Value)
UINTN EFIAPI AsmReadCr4(VOID)
#define NULL
Definition: Base.h:319
#define CONST
Definition: Base.h:259
#define RETURN_BUFFER_TOO_SMALL
Definition: Base.h:1093
#define RETURN_ERROR(StatusCode)
Definition: Base.h:1061
#define MIN(a, b)
Definition: Base.h:1007
#define RETURN_UNSUPPORTED
Definition: Base.h:1081
#define RETURN_OUT_OF_RESOURCES
Definition: Base.h:1114
#define RETURN_SUCCESS
Definition: Base.h:1066
#define TRUE
Definition: Base.h:301
#define FALSE
Definition: Base.h:307
#define RETURN_ALREADY_STARTED
Definition: Base.h:1172
#define ARRAY_SIZE(Array)
Definition: Base.h:1393
#define IN
Definition: Base.h:279
#define OUT
Definition: Base.h:284
#define RETURN_INVALID_PARAMETER
Definition: Base.h:1076
#define GLOBAL_REMOVE_IF_UNREFERENCED
Definition: Base.h:48
#define IS_POW2(Value)
Definition: Base.h:901
#define MAX(a, b)
Definition: Base.h:992
#define ASSERT_RETURN_ERROR(StatusParameter)
Definition: DebugLib.h:493
#define DEBUG_CODE_BEGIN()
Definition: DebugLib.h:564
#define DEBUG(Expression)
Definition: DebugLib.h:434
#define DEBUG_CODE_END()
Definition: DebugLib.h:578
#define MSR_IA32_MTRR_PHYSBASE0
#define MSR_IA32_MTRR_FIX4K_E0000
#define MSR_IA32_MTRR_DEF_TYPE
#define MSR_IA32_MTRR_FIX4K_C8000
#define MSR_IA32_MTRR_FIX4K_E8000
#define MSR_IA32_MTRR_FIX4K_F8000
#define MSR_IA32_MTRRCAP
#define MSR_IA32_MTRR_FIX16K_80000
#define MSR_IA32_MTRR_FIX16K_A0000
#define MSR_IA32_MTRR_FIX4K_D0000
#define MSR_IA32_MTRR_PHYSMASK0
#define MSR_IA32_MTRR_FIX64K_00000
#define MSR_IA32_TME_ACTIVATE
#define MSR_IA32_MTRR_FIX4K_D8000
#define MSR_IA32_MTRR_FIX4K_C0000
#define MSR_IA32_MTRR_FIX4K_F0000
#define CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS
Definition: Cpuid.h:1301
#define CPUID_SIGNATURE
Definition: Cpuid.h:45
#define CPUID_VERSION_INFO
Definition: Cpuid.h:81
UINT32 EFIAPI AsmCpuid(IN UINT32 Index, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
Definition: CpuId.c:36
BOOLEAN EFIAPI TdIsEnabled()
Definition: IntelTdxNull.c:79
MTRR_MEMORY_CACHE_TYPE MtrrGetDefaultMemoryTypeWorker(IN CONST MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:301
RETURN_STATUS MtrrLibApplyFixedMtrrs(IN CONST MTRR_FIXED_SETTINGS *Fixed, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
Definition: MtrrLib.c:1833
VOID MtrrLibCalculateLeastMtrrs(IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT CONST UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN BOOLEAN IncludeOptional)
Definition: MtrrLib.c:1228
RETURN_STATUS MtrrLibCalculateSubtractivePath(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN UINT8 Types, IN UINT8 TypeCount, IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL, IN UINT32 MtrrCapacity OPTIONAL, IN OUT UINT32 *MtrrCount OPTIONAL)
Definition: MtrrLib.c:1370
UINT32 EFIAPI MtrrGetMemoryAttributeInVariableMtrr(IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
Definition: MtrrLib.c:740
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetMemoryAttribute(IN PHYSICAL_ADDRESS Address)
Definition: MtrrLib.c:1021
VOID MtrrLibInitializeMtrrMask(OUT UINT64 *MtrrValidBitsMask, OUT UINT64 *MtrrValidAddressMask)
Definition: MtrrLib.c:823
UINT32 MtrrGetMemoryAttributeInVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
Definition: MtrrLib.c:656
UINT32 GetVariableMtrrCountWorker(VOID)
Definition: MtrrLib.c:216
RETURN_STATUS MtrrLibApplyVariableMtrrs(IN CONST MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCount, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
Definition: MtrrLib.c:1884
VOID MtrrSetFixedMtrrWorker(IN MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:2827
VOID EFIAPI MtrrDebugPrintAllMtrrs(VOID)
Definition: MtrrLib.c:3161
MTRR_SETTINGS *EFIAPI MtrrSetAllMtrrs(IN MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:2908
RETURN_STATUS EFIAPI MtrrGetMemoryAttributesInMtrrSettings(IN CONST MTRR_SETTINGS *MtrrSetting OPTIONAL, OUT MTRR_MEMORY_RANGE *Ranges, IN OUT UINTN *RangeCount)
Definition: MtrrLib.c:2984
RETURN_STATUS MtrrLibAppendVariableMtrr(IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:1306
MTRR_MEMORY_CACHE_TYPE MtrrLibLowestType(IN UINT8 TypeBits)
Definition: MtrrLib.c:1334
VOID MtrrLibPreMtrrChange(OUT MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:345
VOID MtrrLibMergeVariableMtrr(MTRR_MEMORY_RANGE *DstMtrrs, UINT32 DstMtrrCount, MTRR_MEMORY_RANGE *SrcMtrrs, UINT32 SrcMtrrCount, BOOLEAN *Modified)
Definition: MtrrLib.c:2020
UINT64 MtrrLibBiggestAlignment(UINT64 Address, UINT64 Alignment0)
Definition: MtrrLib.c:777
RETURN_STATUS MtrrLibCalculateMtrrs(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount)
Definition: MtrrLib.c:1592
UINT8 MtrrLibGetNumberOfTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT64 BaseAddress, IN UINT64 Length, IN OUT UINT8 *Types OPTIONAL)
Definition: MtrrLib.c:1175
RETURN_STATUS EFIAPI MtrrSetMemoryAttributeInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
Definition: MtrrLib.c:2729
UINT32 EFIAPI GetVariableMtrrCount(VOID)
Definition: MtrrLib.c:235
UINT32 EFIAPI GetFirmwareVariableMtrrCount(VOID)
Definition: MtrrLib.c:277
VOID MtrrLibPostMtrrChangeEnableCache(IN MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:391
BOOLEAN EFIAPI IsMtrrSupported(VOID)
Definition: MtrrLib.c:2960
MTRR_MEMORY_CACHE_TYPE MtrrGetMemoryAttributeByAddressWorker(IN MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS Address)
Definition: MtrrLib.c:911
RETURN_STATUS EFIAPI MtrrSetMemoryAttribute(IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
Definition: MtrrLib.c:2782
VOID MtrrSetVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings)
Definition: MtrrLib.c:2798
UINT32 GetFirmwareVariableMtrrCountWorker(VOID)
Definition: MtrrLib.c:253
MTRR_FIXED_SETTINGS *EFIAPI MtrrGetFixedMtrr(OUT MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:474
UINT8 MtrrLibGetCompatibleTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
Definition: MtrrLib.c:1973
RETURN_STATUS MtrrLibSetBelow1MBMemoryAttribute(IN OUT UINT64 *ClearMasks, IN OUT UINT64 *OrMasks, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:2279
RETURN_STATUS MtrrLibProgramFixedMtrr(IN MTRR_MEMORY_CACHE_TYPE Type, IN OUT UINT64 *Base, IN OUT UINT64 *Length, IN OUT UINT32 *LastMsrIndex, OUT UINT64 *ClearMask, OUT UINT64 *OrMask)
Definition: MtrrLib.c:546
MTRR_VARIABLE_SETTINGS * MtrrGetVariableMtrrWorker(IN MTRR_SETTINGS *MtrrSetting, IN UINT32 VariableMtrrCount, OUT MTRR_VARIABLE_SETTINGS *VariableSettings)
Definition: MtrrLib.c:504
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetDefaultMemoryType(VOID)
Definition: MtrrLib.c:324
RETURN_STATUS EFIAPI MtrrSetMemoryAttributesInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
Definition: MtrrLib.c:2336
UINT32 MtrrLibGetRawVariableRanges(IN CONST MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT MTRR_MEMORY_RANGE *VariableMtrr)
Definition: MtrrLib.c:699
MTRR_MEMORY_CACHE_TYPE MtrrLibPrecedence(IN MTRR_MEMORY_CACHE_TYPE MtrrType1, IN MTRR_MEMORY_CACHE_TYPE MtrrType2)
Definition: MtrrLib.c:876
BOOLEAN MtrrLibIsMtrrSupported(OUT BOOLEAN *FixedMtrrSupported OPTIONAL, OUT UINT32 *VariableMtrrCount OPTIONAL)
Definition: MtrrLib.c:156
MTRR_SETTINGS *EFIAPI MtrrGetAllMtrrs(OUT MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:2851
BOOLEAN MtrrLibTypeLeftPrecedeRight(IN MTRR_MEMORY_CACHE_TYPE Left, IN MTRR_MEMORY_CACHE_TYPE Right)
Definition: MtrrLib.c:805
RETURN_STATUS MtrrLibSetMemoryType(IN MTRR_MEMORY_RANGE *Ranges, IN UINTN Capacity, IN OUT UINTN *Count, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:1051
RETURN_STATUS MtrrLibSetMemoryRanges(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, OUT MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCapacity, OUT UINT32 *VariableMtrrCount)
Definition: MtrrLib.c:2108
MTRR_FIXED_SETTINGS * MtrrGetFixedMtrrWorker(OUT MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:450
VOID MtrrLibPostMtrrChange(IN MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:426
VOID MtrrDebugPrintAllMtrrsWorker(IN MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:3074
#define PcdGet32(TokenName)
Definition: PcdLib.h:362
VOID EFIAPI Exit(IN EFI_STATUS Status)
UINT64 EFIAPI AsmMsrAndThenOr64(IN UINT32 Index, IN UINT64 AndData, IN UINT64 OrData)
Definition: X86Msr.c:437
struct CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS_ECX::@709 Bits
struct CPUID_VIR_PHY_ADDRESS_SIZE_EAX::@753 Bits
struct MSR_IA32_MTRR_DEF_TYPE_REGISTER::@653 Bits
struct MSR_IA32_MTRRCAP_REGISTER::@631 Bits
struct MSR_IA32_TME_ACTIVATE_REGISTER::@680 Bits