TianoCore EDK2 master
MtrrLib.c
Go to the documentation of this file.
1
13#include <Uefi.h>
15#include <Register/Intel/Msr.h>
16
17#include <Library/MtrrLib.h>
18#include <Library/BaseLib.h>
19#include <Library/CpuLib.h>
21#include <Library/DebugLib.h>
22
23#define OR_SEED 0x0101010101010101ull
24#define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull
25#define MAX_WEIGHT MAX_UINT8
26#define SCRATCH_BUFFER_SIZE (4 * SIZE_4KB)
27#define MTRR_LIB_ASSERT_ALIGNED(B, L) ASSERT ((B & ~(L - 1)) == B);
28
29#define M(x, y) ((x) * VertexCount + (y))
30#define O(x, y) ((y) * VertexCount + (x))
31
32//
33// Context to save and restore when MTRRs are programmed
34//
35typedef struct {
36 UINTN Cr4;
37 BOOLEAN InterruptState;
39
40typedef struct {
41 UINT64 Address;
42 UINT64 Alignment;
43 UINT64 Length;
44 MTRR_MEMORY_CACHE_TYPE Type : 7;
45
46 //
47 // Temprary use for calculating the best MTRR settings.
48 //
49 BOOLEAN Visited : 1;
50 UINT8 Weight;
51 UINT16 Previous;
53
54//
55// This table defines the offset, base and length of the fixed MTRRs
56//
57CONST FIXED_MTRR mMtrrLibFixedMtrrTable[] = {
58 {
60 0,
61 SIZE_64KB
62 },
63 {
65 0x80000,
66 SIZE_16KB
67 },
68 {
70 0xA0000,
71 SIZE_16KB
72 },
73 {
75 0xC0000,
76 SIZE_4KB
77 },
78 {
80 0xC8000,
81 SIZE_4KB
82 },
83 {
85 0xD0000,
86 SIZE_4KB
87 },
88 {
90 0xD8000,
91 SIZE_4KB
92 },
93 {
95 0xE0000,
96 SIZE_4KB
97 },
98 {
100 0xE8000,
101 SIZE_4KB
102 },
103 {
105 0xF0000,
106 SIZE_4KB
107 },
108 {
110 0xF8000,
111 SIZE_4KB
112 }
113};
114
115//
116// Lookup table used to print MTRRs
117//
118GLOBAL_REMOVE_IF_UNREFERENCED CONST CHAR8 *mMtrrMemoryCacheTypeShortName[] = {
119 "UC", // CacheUncacheable
120 "WC", // CacheWriteCombining
121 "R*", // Invalid
122 "R*", // Invalid
123 "WT", // CacheWriteThrough
124 "WP", // CacheWriteProtected
125 "WB", // CacheWriteBack
126 "R*" // Invalid
127};
128
138VOID
140 IN MTRR_SETTINGS *MtrrSetting
141 );
142
149UINT32
151 VOID
152 )
153{
155
157 ASSERT (MtrrCap.Bits.VCNT <= ARRAY_SIZE (((MTRR_VARIABLE_SETTINGS *)0)->Mtrr));
158 return MtrrCap.Bits.VCNT;
159}
160
167UINT32
168EFIAPI
170 VOID
171 )
172{
173 if (!IsMtrrSupported ()) {
174 return 0;
175 }
176
178}
179
186UINT32
188 VOID
189 )
190{
191 UINT32 VariableMtrrCount;
192 UINT32 ReservedMtrrNumber;
193
194 VariableMtrrCount = GetVariableMtrrCountWorker ();
195 ReservedMtrrNumber = PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
196 if (VariableMtrrCount < ReservedMtrrNumber) {
197 return 0;
198 }
199
200 return VariableMtrrCount - ReservedMtrrNumber;
201}
202
209UINT32
210EFIAPI
212 VOID
213 )
214{
215 if (!IsMtrrSupported ()) {
216 return 0;
217 }
218
220}
221
234MTRR_MEMORY_CACHE_TYPE
236 IN MTRR_SETTINGS *MtrrSetting
237 )
238{
240
241 if (MtrrSetting == NULL) {
243 } else {
244 DefType.Uint64 = MtrrSetting->MtrrDefType;
245 }
246
247 return (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;
248}
249
256MTRR_MEMORY_CACHE_TYPE
257EFIAPI
259 VOID
260 )
261{
262 if (!IsMtrrSupported ()) {
263 return CacheUncacheable;
264 }
265
267}
268
278VOID
280 OUT MTRR_CONTEXT *MtrrContext
281 )
282{
284
285 //
286 // Disable interrupts and save current interrupt state
287 //
288 MtrrContext->InterruptState = SaveAndDisableInterrupts ();
289
290 //
291 // Enter no fill cache mode, CD=1(Bit30), NW=0 (Bit29)
292 //
294
295 //
296 // Save original CR4 value and clear PGE flag (Bit 7)
297 //
298 MtrrContext->Cr4 = AsmReadCr4 ();
299 AsmWriteCr4 (MtrrContext->Cr4 & (~BIT7));
300
301 //
302 // Flush all TLBs
303 //
304 CpuFlushTlb ();
305
306 //
307 // Disable MTRRs
308 //
310 DefType.Bits.E = 0;
312}
313
323VOID
325 IN MTRR_CONTEXT *MtrrContext
326 )
327{
328 //
329 // Flush all TLBs
330 //
331 CpuFlushTlb ();
332
333 //
334 // Enable Normal Mode caching CD=NW=0, CD(Bit30), NW(Bit29)
335 //
337
338 //
339 // Restore original CR4 value
340 //
341 AsmWriteCr4 (MtrrContext->Cr4);
342
343 //
344 // Restore original interrupt state
345 //
346 SetInterruptState (MtrrContext->InterruptState);
347}
348
358VOID
360 IN MTRR_CONTEXT *MtrrContext
361 )
362{
364
365 //
366 // Enable Cache MTRR
367 //
369 DefType.Bits.E = 1;
370 DefType.Bits.FE = 1;
372
374}
375
386 OUT MTRR_FIXED_SETTINGS *FixedSettings
387 )
388{
389 UINT32 Index;
390
391 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
392 FixedSettings->Mtrr[Index] =
393 AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
394 }
395
396 return FixedSettings;
397}
398
408EFIAPI
410 OUT MTRR_FIXED_SETTINGS *FixedSettings
411 )
412{
413 if (!IsMtrrSupported ()) {
414 return FixedSettings;
415 }
416
417 return MtrrGetFixedMtrrWorker (FixedSettings);
418}
419
436 IN MTRR_SETTINGS *MtrrSetting,
437 IN UINT32 VariableMtrrCount,
438 OUT MTRR_VARIABLE_SETTINGS *VariableSettings
439 )
440{
441 UINT32 Index;
442
443 ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));
444
445 for (Index = 0; Index < VariableMtrrCount; Index++) {
446 if (MtrrSetting == NULL) {
447 VariableSettings->Mtrr[Index].Base =
448 AsmReadMsr64 (MSR_IA32_MTRR_PHYSBASE0 + (Index << 1));
449 VariableSettings->Mtrr[Index].Mask =
450 AsmReadMsr64 (MSR_IA32_MTRR_PHYSMASK0 + (Index << 1));
451 } else {
452 VariableSettings->Mtrr[Index].Base = MtrrSetting->Variables.Mtrr[Index].Base;
453 VariableSettings->Mtrr[Index].Mask = MtrrSetting->Variables.Mtrr[Index].Mask;
454 }
455 }
456
457 return VariableSettings;
458}
459
476RETURN_STATUS
478 IN MTRR_MEMORY_CACHE_TYPE Type,
479 IN OUT UINT64 *Base,
480 IN OUT UINT64 *Length,
481 IN OUT UINT32 *LastMsrIndex,
482 OUT UINT64 *ClearMask,
483 OUT UINT64 *OrMask
484 )
485{
486 UINT32 MsrIndex;
487 UINT32 LeftByteShift;
488 UINT32 RightByteShift;
489 UINT64 SubLength;
490
491 //
492 // Find the fixed MTRR index to be programmed
493 //
494 for (MsrIndex = *LastMsrIndex + 1; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
495 if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&
496 (*Base <
497 (
498 mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +
499 (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)
500 )
501 )
502 )
503 {
504 break;
505 }
506 }
507
508 ASSERT (MsrIndex != ARRAY_SIZE (mMtrrLibFixedMtrrTable));
509
510 //
511 // Find the begin offset in fixed MTRR and calculate byte offset of left shift
512 //
513 if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
514 //
515 // Base address should be aligned to the begin of a certain Fixed MTRR range.
516 //
517 return RETURN_UNSUPPORTED;
518 }
519
520 LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
521 ASSERT (LeftByteShift < 8);
522
523 //
524 // Find the end offset in fixed MTRR and calculate byte offset of right shift
525 //
526 SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);
527 if (*Length >= SubLength) {
528 RightByteShift = 0;
529 } else {
530 if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
531 //
532 // Length should be aligned to the end of a certain Fixed MTRR range.
533 //
534 return RETURN_UNSUPPORTED;
535 }
536
537 RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
538 //
539 // Update SubLength by actual length
540 //
541 SubLength = *Length;
542 }
543
544 *ClearMask = CLEAR_SEED;
545 *OrMask = MultU64x32 (OR_SEED, (UINT32)Type);
546
547 if (LeftByteShift != 0) {
548 //
549 // Clear the low bits by LeftByteShift
550 //
551 *ClearMask &= LShiftU64 (*ClearMask, LeftByteShift * 8);
552 *OrMask &= LShiftU64 (*OrMask, LeftByteShift * 8);
553 }
554
555 if (RightByteShift != 0) {
556 //
557 // Clear the high bits by RightByteShift
558 //
559 *ClearMask &= RShiftU64 (*ClearMask, RightByteShift * 8);
560 *OrMask &= RShiftU64 (*OrMask, RightByteShift * 8);
561 }
562
563 *Length -= SubLength;
564 *Base += SubLength;
565
566 *LastMsrIndex = MsrIndex;
567
568 return RETURN_SUCCESS;
569}
570
586UINT32
588 IN MTRR_VARIABLE_SETTINGS *VariableSettings,
589 IN UINTN VariableMtrrCount,
590 IN UINT64 MtrrValidBitsMask,
591 IN UINT64 MtrrValidAddressMask,
592 OUT VARIABLE_MTRR *VariableMtrr
593 )
594{
595 UINTN Index;
596 UINT32 UsedMtrr;
597
598 ZeroMem (VariableMtrr, sizeof (VARIABLE_MTRR) * ARRAY_SIZE (VariableSettings->Mtrr));
599 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
600 if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {
601 VariableMtrr[Index].Msr = (UINT32)Index;
602 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
603 VariableMtrr[Index].Length =
604 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
605 VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);
606 VariableMtrr[Index].Valid = TRUE;
607 VariableMtrr[Index].Used = TRUE;
608 UsedMtrr++;
609 }
610 }
611
612 return UsedMtrr;
613}
614
629UINT32
631 IN MTRR_VARIABLE_SETTINGS *VariableSettings,
632 IN UINTN VariableMtrrCount,
633 IN UINT64 MtrrValidBitsMask,
634 IN UINT64 MtrrValidAddressMask,
635 OUT MTRR_MEMORY_RANGE *VariableMtrr
636 )
637{
638 UINTN Index;
639 UINT32 UsedMtrr;
640
641 ZeroMem (VariableMtrr, sizeof (MTRR_MEMORY_RANGE) * ARRAY_SIZE (VariableSettings->Mtrr));
642 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
643 if (((MSR_IA32_MTRR_PHYSMASK_REGISTER *)&VariableSettings->Mtrr[Index].Mask)->Bits.V != 0) {
644 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
645 VariableMtrr[Index].Length =
646 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
647 VariableMtrr[Index].Type = (MTRR_MEMORY_CACHE_TYPE)(VariableSettings->Mtrr[Index].Base & 0x0ff);
648 UsedMtrr++;
649 }
650 }
651
652 return UsedMtrr;
653}
654
669UINT32
670EFIAPI
672 IN UINT64 MtrrValidBitsMask,
673 IN UINT64 MtrrValidAddressMask,
674 OUT VARIABLE_MTRR *VariableMtrr
675 )
676{
677 MTRR_VARIABLE_SETTINGS VariableSettings;
678
679 if (!IsMtrrSupported ()) {
680 return 0;
681 }
682
684 NULL,
686 &VariableSettings
687 );
688
690 &VariableSettings,
692 MtrrValidBitsMask,
693 MtrrValidAddressMask,
694 VariableMtrr
695 );
696}
697
707UINT64
709 UINT64 Address,
710 UINT64 Alignment0
711 )
712{
713 if (Address == 0) {
714 return Alignment0;
715 }
716
717 return Address & ((~Address) + 1);
718}
719
735BOOLEAN
737 IN MTRR_MEMORY_CACHE_TYPE Left,
738 IN MTRR_MEMORY_CACHE_TYPE Right
739 )
740{
741 return (BOOLEAN)(Left == CacheUncacheable || (Left == CacheWriteThrough && Right == CacheWriteBack));
742}
743
753VOID
755 OUT UINT64 *MtrrValidBitsMask,
756 OUT UINT64 *MtrrValidAddressMask
757 )
758{
759 UINT32 MaxExtendedFunction;
760 CPUID_VIR_PHY_ADDRESS_SIZE_EAX VirPhyAddressSize;
761
762 AsmCpuid (CPUID_EXTENDED_FUNCTION, &MaxExtendedFunction, NULL, NULL, NULL);
763
764 if (MaxExtendedFunction >= CPUID_VIR_PHY_ADDRESS_SIZE) {
765 AsmCpuid (CPUID_VIR_PHY_ADDRESS_SIZE, &VirPhyAddressSize.Uint32, NULL, NULL, NULL);
766 } else {
767 VirPhyAddressSize.Bits.PhysicalAddressBits = 36;
768 }
769
770 *MtrrValidBitsMask = LShiftU64 (1, VirPhyAddressSize.Bits.PhysicalAddressBits) - 1;
771 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
772}
773
786MTRR_MEMORY_CACHE_TYPE
788 IN MTRR_MEMORY_CACHE_TYPE MtrrType1,
789 IN MTRR_MEMORY_CACHE_TYPE MtrrType2
790 )
791{
792 if (MtrrType1 == MtrrType2) {
793 return MtrrType1;
794 }
795
796 ASSERT (
797 MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2) ||
798 MtrrLibTypeLeftPrecedeRight (MtrrType2, MtrrType1)
799 );
800
801 if (MtrrLibTypeLeftPrecedeRight (MtrrType1, MtrrType2)) {
802 return MtrrType1;
803 } else {
804 return MtrrType2;
805 }
806}
807
821MTRR_MEMORY_CACHE_TYPE
823 IN MTRR_SETTINGS *MtrrSetting,
824 IN PHYSICAL_ADDRESS Address
825 )
826{
828 UINT64 FixedMtrr;
829 UINTN Index;
830 UINTN SubIndex;
831 MTRR_MEMORY_CACHE_TYPE MtrrType;
832 MTRR_MEMORY_RANGE VariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
833 UINT64 MtrrValidBitsMask;
834 UINT64 MtrrValidAddressMask;
835 UINT32 VariableMtrrCount;
836 MTRR_VARIABLE_SETTINGS VariableSettings;
837
838 //
839 // Check if MTRR is enabled, if not, return UC as attribute
840 //
841 if (MtrrSetting == NULL) {
843 } else {
844 DefType.Uint64 = MtrrSetting->MtrrDefType;
845 }
846
847 if (DefType.Bits.E == 0) {
848 return CacheUncacheable;
849 }
850
851 //
852 // If address is less than 1M, then try to go through the fixed MTRR
853 //
854 if (Address < BASE_1MB) {
855 if (DefType.Bits.FE != 0) {
856 //
857 // Go through the fixed MTRR
858 //
859 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
860 if ((Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress) &&
861 (Address < mMtrrLibFixedMtrrTable[Index].BaseAddress +
862 (mMtrrLibFixedMtrrTable[Index].Length * 8)))
863 {
864 SubIndex =
865 ((UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
866 mMtrrLibFixedMtrrTable[Index].Length;
867 if (MtrrSetting == NULL) {
868 FixedMtrr = AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
869 } else {
870 FixedMtrr = MtrrSetting->Fixed.Mtrr[Index];
871 }
872
873 return (MTRR_MEMORY_CACHE_TYPE)(RShiftU64 (FixedMtrr, SubIndex * 8) & 0xFF);
874 }
875 }
876 }
877 }
878
879 VariableMtrrCount = GetVariableMtrrCountWorker ();
880 ASSERT (VariableMtrrCount <= ARRAY_SIZE (MtrrSetting->Variables.Mtrr));
881 MtrrGetVariableMtrrWorker (MtrrSetting, VariableMtrrCount, &VariableSettings);
882
883 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
885 &VariableSettings,
886 VariableMtrrCount,
887 MtrrValidBitsMask,
888 MtrrValidAddressMask,
889 VariableMtrr
890 );
891
892 //
893 // Go through the variable MTRR
894 //
895 MtrrType = CacheInvalid;
896 for (Index = 0; Index < VariableMtrrCount; Index++) {
897 if (VariableMtrr[Index].Length != 0) {
898 if ((Address >= VariableMtrr[Index].BaseAddress) &&
899 (Address < VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length))
900 {
901 if (MtrrType == CacheInvalid) {
902 MtrrType = (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type;
903 } else {
904 MtrrType = MtrrLibPrecedence (MtrrType, (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type);
905 }
906 }
907 }
908 }
909
910 //
911 // If there is no MTRR which covers the Address, use the default MTRR type.
912 //
913 if (MtrrType == CacheInvalid) {
914 MtrrType = (MTRR_MEMORY_CACHE_TYPE)DefType.Bits.Type;
915 }
916
917 return MtrrType;
918}
919
930MTRR_MEMORY_CACHE_TYPE
931EFIAPI
933 IN PHYSICAL_ADDRESS Address
934 )
935{
936 if (!IsMtrrSupported ()) {
937 return CacheUncacheable;
938 }
939
941}
942
961RETURN_STATUS
963 IN MTRR_MEMORY_RANGE *Ranges,
964 IN UINTN Capacity,
965 IN OUT UINTN *Count,
966 IN UINT64 BaseAddress,
967 IN UINT64 Length,
968 IN MTRR_MEMORY_CACHE_TYPE Type
969 )
970{
971 UINTN Index;
972 UINT64 Limit;
973 UINT64 LengthLeft;
974 UINT64 LengthRight;
975 UINTN StartIndex;
976 UINTN EndIndex;
977 UINTN DeltaCount;
978
979 LengthRight = 0;
980 LengthLeft = 0;
981 Limit = BaseAddress + Length;
982 StartIndex = *Count;
983 EndIndex = *Count;
984 for (Index = 0; Index < *Count; Index++) {
985 if ((StartIndex == *Count) &&
986 (Ranges[Index].BaseAddress <= BaseAddress) &&
987 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length))
988 {
989 StartIndex = Index;
990 LengthLeft = BaseAddress - Ranges[Index].BaseAddress;
991 }
992
993 if ((EndIndex == *Count) &&
994 (Ranges[Index].BaseAddress < Limit) &&
995 (Limit <= Ranges[Index].BaseAddress + Ranges[Index].Length))
996 {
997 EndIndex = Index;
998 LengthRight = Ranges[Index].BaseAddress + Ranges[Index].Length - Limit;
999 break;
1000 }
1001 }
1002
1003 ASSERT (StartIndex != *Count && EndIndex != *Count);
1004 if ((StartIndex == EndIndex) && (Ranges[StartIndex].Type == Type)) {
1006 }
1007
1008 //
1009 // The type change may cause merging with previous range or next range.
1010 // Update the StartIndex, EndIndex, BaseAddress, Length so that following
1011 // logic doesn't need to consider merging.
1012 //
1013 if (StartIndex != 0) {
1014 if ((LengthLeft == 0) && (Ranges[StartIndex - 1].Type == Type)) {
1015 StartIndex--;
1016 Length += Ranges[StartIndex].Length;
1017 BaseAddress -= Ranges[StartIndex].Length;
1018 }
1019 }
1020
1021 if (EndIndex != (*Count) - 1) {
1022 if ((LengthRight == 0) && (Ranges[EndIndex + 1].Type == Type)) {
1023 EndIndex++;
1024 Length += Ranges[EndIndex].Length;
1025 }
1026 }
1027
1028 //
1029 // |- 0 -|- 1 -|- 2 -|- 3 -| StartIndex EndIndex DeltaCount Count (Count = 4)
1030 // |++++++++++++++++++| 0 3 1=3-0-2 3
1031 // |+++++++| 0 1 -1=1-0-2 5
1032 // |+| 0 0 -2=0-0-2 6
1033 // |+++| 0 0 -1=0-0-2+1 5
1034 //
1035 //
1036 DeltaCount = EndIndex - StartIndex - 2;
1037 if (LengthLeft == 0) {
1038 DeltaCount++;
1039 }
1040
1041 if (LengthRight == 0) {
1042 DeltaCount++;
1043 }
1044
1045 if (*Count - DeltaCount > Capacity) {
1047 }
1048
1049 //
1050 // Reserve (-DeltaCount) space
1051 //
1052 CopyMem (&Ranges[EndIndex + 1 - DeltaCount], &Ranges[EndIndex + 1], (*Count - EndIndex - 1) * sizeof (Ranges[0]));
1053 *Count -= DeltaCount;
1054
1055 if (LengthLeft != 0) {
1056 Ranges[StartIndex].Length = LengthLeft;
1057 StartIndex++;
1058 }
1059
1060 if (LengthRight != 0) {
1061 Ranges[EndIndex - DeltaCount].BaseAddress = BaseAddress + Length;
1062 Ranges[EndIndex - DeltaCount].Length = LengthRight;
1063 Ranges[EndIndex - DeltaCount].Type = Ranges[EndIndex].Type;
1064 }
1065
1066 Ranges[StartIndex].BaseAddress = BaseAddress;
1067 Ranges[StartIndex].Length = Length;
1068 Ranges[StartIndex].Type = Type;
1069 return RETURN_SUCCESS;
1070}
1071
1083UINT8
1085 IN CONST MTRR_MEMORY_RANGE *Ranges,
1086 IN UINTN RangeCount,
1087 IN UINT64 BaseAddress,
1088 IN UINT64 Length,
1089 IN OUT UINT8 *Types OPTIONAL
1090 )
1091{
1092 UINTN Index;
1093 UINT8 TypeCount;
1094 UINT8 LocalTypes;
1095
1096 TypeCount = 0;
1097 LocalTypes = 0;
1098 for (Index = 0; Index < RangeCount; Index++) {
1099 if ((Ranges[Index].BaseAddress <= BaseAddress) &&
1100 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length)
1101 )
1102 {
1103 if ((LocalTypes & (1 << Ranges[Index].Type)) == 0) {
1104 LocalTypes |= (UINT8)(1 << Ranges[Index].Type);
1105 TypeCount++;
1106 }
1107
1108 if (BaseAddress + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1109 Length -= Ranges[Index].BaseAddress + Ranges[Index].Length - BaseAddress;
1110 BaseAddress = Ranges[Index].BaseAddress + Ranges[Index].Length;
1111 } else {
1112 break;
1113 }
1114 }
1115 }
1116
1117 if (Types != NULL) {
1118 *Types = LocalTypes;
1119 }
1120
1121 return TypeCount;
1122}
1123
1136VOID
1138 IN UINT16 VertexCount,
1139 IN MTRR_LIB_ADDRESS *Vertices,
1140 IN OUT CONST UINT8 *Weight,
1141 IN UINT16 Start,
1142 IN UINT16 Stop,
1143 IN BOOLEAN IncludeOptional
1144 )
1145{
1146 UINT16 Index;
1147 UINT8 MinWeight;
1148 UINT16 MinI;
1149 UINT8 Mandatory;
1150 UINT8 Optional;
1151
1152 for (Index = Start; Index <= Stop; Index++) {
1153 Vertices[Index].Visited = FALSE;
1154 Mandatory = Weight[M (Start, Index)];
1155 Vertices[Index].Weight = Mandatory;
1156 if (Mandatory != MAX_WEIGHT) {
1157 Optional = IncludeOptional ? Weight[O (Start, Index)] : 0;
1158 Vertices[Index].Weight += Optional;
1159 ASSERT (Vertices[Index].Weight >= Optional);
1160 }
1161 }
1162
1163 MinI = Start;
1164 MinWeight = 0;
1165 while (!Vertices[Stop].Visited) {
1166 //
1167 // Update the weight from the shortest vertex to other unvisited vertices
1168 //
1169 for (Index = Start + 1; Index <= Stop; Index++) {
1170 if (!Vertices[Index].Visited) {
1171 Mandatory = Weight[M (MinI, Index)];
1172 if (Mandatory != MAX_WEIGHT) {
1173 Optional = IncludeOptional ? Weight[O (MinI, Index)] : 0;
1174 if (MinWeight + Mandatory + Optional <= Vertices[Index].Weight) {
1175 Vertices[Index].Weight = MinWeight + Mandatory + Optional;
1176 Vertices[Index].Previous = MinI; // Previous is Start based.
1177 }
1178 }
1179 }
1180 }
1181
1182 //
1183 // Find the shortest vertex from Start
1184 //
1185 MinI = VertexCount;
1186 MinWeight = MAX_WEIGHT;
1187 for (Index = Start + 1; Index <= Stop; Index++) {
1188 if (!Vertices[Index].Visited && (MinWeight > Vertices[Index].Weight)) {
1189 MinI = Index;
1190 MinWeight = Vertices[Index].Weight;
1191 }
1192 }
1193
1194 //
1195 // Mark the shortest vertex from Start as visited
1196 //
1197 Vertices[MinI].Visited = TRUE;
1198 }
1199}
1200
1214RETURN_STATUS
1216 IN OUT MTRR_MEMORY_RANGE *Mtrrs,
1217 IN UINT32 MtrrCapacity,
1218 IN OUT UINT32 *MtrrCount,
1219 IN UINT64 BaseAddress,
1220 IN UINT64 Length,
1221 IN MTRR_MEMORY_CACHE_TYPE Type
1222 )
1223{
1224 if (*MtrrCount == MtrrCapacity) {
1226 }
1227
1228 Mtrrs[*MtrrCount].BaseAddress = BaseAddress;
1229 Mtrrs[*MtrrCount].Length = Length;
1230 Mtrrs[*MtrrCount].Type = Type;
1231 (*MtrrCount)++;
1232 return RETURN_SUCCESS;
1233}
1234
1242MTRR_MEMORY_CACHE_TYPE
1244 IN UINT8 TypeBits
1245 )
1246{
1247 INT8 Type;
1248
1249 ASSERT (TypeBits != 0);
1250 for (Type = 7; (INT8)TypeBits > 0; Type--, TypeBits <<= 1) {
1251 }
1252
1253 return (MTRR_MEMORY_CACHE_TYPE)Type;
1254}
1255
1262BOOLEAN
1264 IN UINT64 Operand
1265 )
1266{
1267 ASSERT (Operand != 0);
1268 return (BOOLEAN)((Operand & (Operand - 1)) == 0);
1269}
1270
1293RETURN_STATUS
1295 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1296 IN UINT64 A0,
1297 IN CONST MTRR_MEMORY_RANGE *Ranges,
1298 IN UINTN RangeCount,
1299 IN UINT16 VertexCount,
1300 IN MTRR_LIB_ADDRESS *Vertices,
1301 IN OUT UINT8 *Weight,
1302 IN UINT16 Start,
1303 IN UINT16 Stop,
1304 IN UINT8 Types,
1305 IN UINT8 TypeCount,
1306 IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL,
1307 IN UINT32 MtrrCapacity OPTIONAL,
1308 IN OUT UINT32 *MtrrCount OPTIONAL
1309 )
1310{
1311 RETURN_STATUS Status;
1312 UINT64 Base;
1313 UINT64 Length;
1314 UINT8 PrecedentTypes;
1315 UINTN Index;
1316 UINT64 HBase;
1317 UINT64 HLength;
1318 UINT64 SubLength;
1319 UINT16 SubStart;
1320 UINT16 SubStop;
1321 UINT16 Cur;
1322 UINT16 Pre;
1323 MTRR_MEMORY_CACHE_TYPE LowestType;
1324 MTRR_MEMORY_CACHE_TYPE LowestPrecedentType;
1325
1326 Base = Vertices[Start].Address;
1327 Length = Vertices[Stop].Address - Base;
1328
1329 LowestType = MtrrLibLowestType (Types);
1330
1331 //
1332 // Clear the lowest type (highest bit) to get the precedent types
1333 //
1334 PrecedentTypes = ~(1 << LowestType) & Types;
1335 LowestPrecedentType = MtrrLibLowestType (PrecedentTypes);
1336
1337 if (Mtrrs == NULL) {
1338 Weight[M (Start, Stop)] = ((LowestType == DefaultType) ? 0 : 1);
1339 Weight[O (Start, Stop)] = ((LowestType == DefaultType) ? 1 : 0);
1340 }
1341
1342 // Add all high level ranges
1343 HBase = MAX_UINT64;
1344 HLength = 0;
1345 for (Index = 0; Index < RangeCount; Index++) {
1346 if (Length == 0) {
1347 break;
1348 }
1349
1350 if ((Base < Ranges[Index].BaseAddress) || (Ranges[Index].BaseAddress + Ranges[Index].Length <= Base)) {
1351 continue;
1352 }
1353
1354 //
1355 // Base is in the Range[Index]
1356 //
1357 if (Base + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1358 SubLength = Ranges[Index].BaseAddress + Ranges[Index].Length - Base;
1359 } else {
1360 SubLength = Length;
1361 }
1362
1363 if (((1 << Ranges[Index].Type) & PrecedentTypes) != 0) {
1364 //
1365 // Meet a range whose types take precedence.
1366 // Update the [HBase, HBase + HLength) to include the range,
1367 // [HBase, HBase + HLength) may contain sub ranges with 2 different types, and both take precedence.
1368 //
1369 if (HBase == MAX_UINT64) {
1370 HBase = Base;
1371 }
1372
1373 HLength += SubLength;
1374 }
1375
1376 Base += SubLength;
1377 Length -= SubLength;
1378
1379 if (HLength == 0) {
1380 continue;
1381 }
1382
1383 if ((Ranges[Index].Type == LowestType) || (Length == 0)) {
1384 // meet low type or end
1385
1386 //
1387 // Add the MTRRs for each high priority type range
1388 // the range[HBase, HBase + HLength) contains only two types.
1389 // We might use positive or subtractive, depending on which way uses less MTRR
1390 //
1391 for (SubStart = Start; SubStart <= Stop; SubStart++) {
1392 if (Vertices[SubStart].Address == HBase) {
1393 break;
1394 }
1395 }
1396
1397 for (SubStop = SubStart; SubStop <= Stop; SubStop++) {
1398 if (Vertices[SubStop].Address == HBase + HLength) {
1399 break;
1400 }
1401 }
1402
1403 ASSERT (Vertices[SubStart].Address == HBase);
1404 ASSERT (Vertices[SubStop].Address == HBase + HLength);
1405
1406 if ((TypeCount == 2) || (SubStart == SubStop - 1)) {
1407 //
1408 // add subtractive MTRRs for [HBase, HBase + HLength)
1409 // [HBase, HBase + HLength) contains only one type.
1410 // while - loop is to split the range to MTRR - compliant aligned range.
1411 //
1412 if (Mtrrs == NULL) {
1413 Weight[M (Start, Stop)] += (UINT8)(SubStop - SubStart);
1414 } else {
1415 while (SubStart != SubStop) {
1416 Status = MtrrLibAppendVariableMtrr (
1417 Mtrrs,
1418 MtrrCapacity,
1419 MtrrCount,
1420 Vertices[SubStart].Address,
1421 Vertices[SubStart].Length,
1422 Vertices[SubStart].Type
1423 );
1424 if (RETURN_ERROR (Status)) {
1425 return Status;
1426 }
1427
1428 SubStart++;
1429 }
1430 }
1431 } else {
1432 ASSERT (TypeCount == 3);
1433 MtrrLibCalculateLeastMtrrs (VertexCount, Vertices, Weight, SubStart, SubStop, TRUE);
1434
1435 if (Mtrrs == NULL) {
1436 Weight[M (Start, Stop)] += Vertices[SubStop].Weight;
1437 } else {
1438 // When we need to collect the optimal path from SubStart to SubStop
1439 while (SubStop != SubStart) {
1440 Cur = SubStop;
1441 Pre = Vertices[Cur].Previous;
1442 SubStop = Pre;
1443
1444 if (Weight[M (Pre, Cur)] + Weight[O (Pre, Cur)] != 0) {
1445 Status = MtrrLibAppendVariableMtrr (
1446 Mtrrs,
1447 MtrrCapacity,
1448 MtrrCount,
1449 Vertices[Pre].Address,
1450 Vertices[Cur].Address - Vertices[Pre].Address,
1451 (Pre != Cur - 1) ? LowestPrecedentType : Vertices[Pre].Type
1452 );
1453 if (RETURN_ERROR (Status)) {
1454 return Status;
1455 }
1456 }
1457
1458 if (Pre != Cur - 1) {
1460 DefaultType,
1461 A0,
1462 Ranges,
1463 RangeCount,
1464 VertexCount,
1465 Vertices,
1466 Weight,
1467 Pre,
1468 Cur,
1469 PrecedentTypes,
1470 2,
1471 Mtrrs,
1472 MtrrCapacity,
1473 MtrrCount
1474 );
1475 if (RETURN_ERROR (Status)) {
1476 return Status;
1477 }
1478 }
1479 }
1480 }
1481 }
1482
1483 //
1484 // Reset HBase, HLength
1485 //
1486 HBase = MAX_UINT64;
1487 HLength = 0;
1488 }
1489 }
1490
1491 return RETURN_SUCCESS;
1492}
1493
1515RETURN_STATUS
1517 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1518 IN UINT64 A0,
1519 IN CONST MTRR_MEMORY_RANGE *Ranges,
1520 IN UINTN RangeCount,
1521 IN VOID *Scratch,
1522 IN OUT UINTN *ScratchSize,
1523 IN OUT MTRR_MEMORY_RANGE *Mtrrs,
1524 IN UINT32 MtrrCapacity,
1525 IN OUT UINT32 *MtrrCount
1526 )
1527{
1528 UINT64 Base0;
1529 UINT64 Base1;
1530 UINTN Index;
1531 UINT64 Base;
1532 UINT64 Length;
1533 UINT64 Alignment;
1534 UINT64 SubLength;
1535 MTRR_LIB_ADDRESS *Vertices;
1536 UINT8 *Weight;
1537 UINT32 VertexIndex;
1538 UINT32 VertexCount;
1539 UINTN RequiredScratchSize;
1540 UINT8 TypeCount;
1541 UINT16 Start;
1542 UINT16 Stop;
1543 UINT8 Type;
1544 RETURN_STATUS Status;
1545
1546 Base0 = Ranges[0].BaseAddress;
1547 Base1 = Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length;
1548 MTRR_LIB_ASSERT_ALIGNED (Base0, Base1 - Base0);
1549
1550 //
1551 // Count the number of vertices.
1552 //
1553 Vertices = (MTRR_LIB_ADDRESS *)Scratch;
1554 for (VertexIndex = 0, Index = 0; Index < RangeCount; Index++) {
1555 Base = Ranges[Index].BaseAddress;
1556 Length = Ranges[Index].Length;
1557 while (Length != 0) {
1558 Alignment = MtrrLibBiggestAlignment (Base, A0);
1559 SubLength = Alignment;
1560 if (SubLength > Length) {
1561 SubLength = GetPowerOfTwo64 (Length);
1562 }
1563
1564 if (VertexIndex < *ScratchSize / sizeof (*Vertices)) {
1565 Vertices[VertexIndex].Address = Base;
1566 Vertices[VertexIndex].Alignment = Alignment;
1567 Vertices[VertexIndex].Type = Ranges[Index].Type;
1568 Vertices[VertexIndex].Length = SubLength;
1569 }
1570
1571 Base += SubLength;
1572 Length -= SubLength;
1573 VertexIndex++;
1574 }
1575 }
1576
1577 //
1578 // Vertices[VertexIndex] = Base1, so whole vertex count is (VertexIndex + 1).
1579 //
1580 VertexCount = VertexIndex + 1;
1581 DEBUG ((
1582 DEBUG_CACHE,
1583 " Count of vertices (%016llx - %016llx) = %d\n",
1584 Ranges[0].BaseAddress,
1585 Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length,
1586 VertexCount
1587 ));
1588 ASSERT (VertexCount < MAX_UINT16);
1589
1590 RequiredScratchSize = VertexCount * sizeof (*Vertices) + VertexCount * VertexCount * sizeof (*Weight);
1591 if (*ScratchSize < RequiredScratchSize) {
1592 *ScratchSize = RequiredScratchSize;
1594 }
1595
1596 Vertices[VertexCount - 1].Address = Base1;
1597
1598 Weight = (UINT8 *)&Vertices[VertexCount];
1599 for (VertexIndex = 0; VertexIndex < VertexCount; VertexIndex++) {
1600 //
1601 // Set optional weight between vertices and self->self to 0
1602 //
1603 SetMem (&Weight[M (VertexIndex, 0)], VertexIndex + 1, 0);
1604 //
1605 // Set mandatory weight between vertices to MAX_WEIGHT
1606 //
1607 SetMem (&Weight[M (VertexIndex, VertexIndex + 1)], VertexCount - VertexIndex - 1, MAX_WEIGHT);
1608
1609 // Final result looks like:
1610 // 00 FF FF FF
1611 // 00 00 FF FF
1612 // 00 00 00 FF
1613 // 00 00 00 00
1614 }
1615
1616 //
1617 // Set mandatory weight and optional weight for adjacent vertices
1618 //
1619 for (VertexIndex = 0; VertexIndex < VertexCount - 1; VertexIndex++) {
1620 if (Vertices[VertexIndex].Type != DefaultType) {
1621 Weight[M (VertexIndex, VertexIndex + 1)] = 1;
1622 Weight[O (VertexIndex, VertexIndex + 1)] = 0;
1623 } else {
1624 Weight[M (VertexIndex, VertexIndex + 1)] = 0;
1625 Weight[O (VertexIndex, VertexIndex + 1)] = 1;
1626 }
1627 }
1628
1629 for (TypeCount = 2; TypeCount <= 3; TypeCount++) {
1630 for (Start = 0; Start < VertexCount; Start++) {
1631 for (Stop = Start + 2; Stop < VertexCount; Stop++) {
1632 ASSERT (Vertices[Stop].Address > Vertices[Start].Address);
1633 Length = Vertices[Stop].Address - Vertices[Start].Address;
1634 if (Length > Vertices[Start].Alignment) {
1635 //
1636 // Pickup a new Start when [Start, Stop) cannot be described by one MTRR.
1637 //
1638 break;
1639 }
1640
1641 if ((Weight[M (Start, Stop)] == MAX_WEIGHT) && MtrrLibIsPowerOfTwo (Length)) {
1643 Ranges,
1644 RangeCount,
1645 Vertices[Start].Address,
1646 Vertices[Stop].Address - Vertices[Start].Address,
1647 &Type
1648 ) == TypeCount)
1649 {
1650 //
1651 // Update the Weight[Start, Stop] using subtractive path.
1652 //
1654 DefaultType,
1655 A0,
1656 Ranges,
1657 RangeCount,
1658 (UINT16)VertexCount,
1659 Vertices,
1660 Weight,
1661 Start,
1662 Stop,
1663 Type,
1664 TypeCount,
1665 NULL,
1666 0,
1667 NULL
1668 );
1669 } else if (TypeCount == 2) {
1670 //
1671 // Pick up a new Start when we expect 2-type range, but 3-type range is met.
1672 // Because no matter how Stop is increased, we always meet 3-type range.
1673 //
1674 break;
1675 }
1676 }
1677 }
1678 }
1679 }
1680
1681 Status = RETURN_SUCCESS;
1682 MtrrLibCalculateLeastMtrrs ((UINT16)VertexCount, Vertices, Weight, 0, (UINT16)VertexCount - 1, FALSE);
1683 Stop = (UINT16)VertexCount - 1;
1684 while (Stop != 0) {
1685 Start = Vertices[Stop].Previous;
1686 TypeCount = MAX_UINT8;
1687 Type = 0;
1688 if (Weight[M (Start, Stop)] != 0) {
1689 TypeCount = MtrrLibGetNumberOfTypes (Ranges, RangeCount, Vertices[Start].Address, Vertices[Stop].Address - Vertices[Start].Address, &Type);
1690 Status = MtrrLibAppendVariableMtrr (
1691 Mtrrs,
1692 MtrrCapacity,
1693 MtrrCount,
1694 Vertices[Start].Address,
1695 Vertices[Stop].Address - Vertices[Start].Address,
1696 MtrrLibLowestType (Type)
1697 );
1698 if (RETURN_ERROR (Status)) {
1699 break;
1700 }
1701 }
1702
1703 if (Start != Stop - 1) {
1704 //
1705 // substractive path
1706 //
1707 if (TypeCount == MAX_UINT8) {
1708 TypeCount = MtrrLibGetNumberOfTypes (
1709 Ranges,
1710 RangeCount,
1711 Vertices[Start].Address,
1712 Vertices[Stop].Address - Vertices[Start].Address,
1713 &Type
1714 );
1715 }
1716
1718 DefaultType,
1719 A0,
1720 Ranges,
1721 RangeCount,
1722 (UINT16)VertexCount,
1723 Vertices,
1724 Weight,
1725 Start,
1726 Stop,
1727 Type,
1728 TypeCount,
1729 Mtrrs,
1730 MtrrCapacity,
1731 MtrrCount
1732 );
1733 if (RETURN_ERROR (Status)) {
1734 break;
1735 }
1736 }
1737
1738 Stop = Start;
1739 }
1740
1741 return Status;
1742}
1743
1756RETURN_STATUS
1758 IN MTRR_FIXED_SETTINGS *Fixed,
1759 IN OUT MTRR_MEMORY_RANGE *Ranges,
1760 IN UINTN RangeCapacity,
1761 IN OUT UINTN *RangeCount
1762 )
1763{
1764 RETURN_STATUS Status;
1765 UINTN MsrIndex;
1766 UINTN Index;
1767 MTRR_MEMORY_CACHE_TYPE MemoryType;
1768 UINT64 Base;
1769
1770 Base = 0;
1771 for (MsrIndex = 0; MsrIndex < ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
1772 ASSERT (Base == mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress);
1773 for (Index = 0; Index < sizeof (UINT64); Index++) {
1774 MemoryType = (MTRR_MEMORY_CACHE_TYPE)((UINT8 *)(&Fixed->Mtrr[MsrIndex]))[Index];
1775 Status = MtrrLibSetMemoryType (
1776 Ranges,
1777 RangeCapacity,
1778 RangeCount,
1779 Base,
1780 mMtrrLibFixedMtrrTable[MsrIndex].Length,
1781 MemoryType
1782 );
1783 if (Status == RETURN_OUT_OF_RESOURCES) {
1784 return Status;
1785 }
1786
1787 Base += mMtrrLibFixedMtrrTable[MsrIndex].Length;
1788 }
1789 }
1790
1791 ASSERT (Base == BASE_1MB);
1792 return RETURN_SUCCESS;
1793}
1794
1807RETURN_STATUS
1809 IN CONST MTRR_MEMORY_RANGE *VariableMtrr,
1810 IN UINT32 VariableMtrrCount,
1811 IN OUT MTRR_MEMORY_RANGE *Ranges,
1812 IN UINTN RangeCapacity,
1813 IN OUT UINTN *RangeCount
1814 )
1815{
1816 RETURN_STATUS Status;
1817 UINTN Index;
1818
1819 //
1820 // WT > WB
1821 // UC > *
1822 // UC > * (except WB, UC) > WB
1823 //
1824
1825 //
1826 // 1. Set WB
1827 //
1828 for (Index = 0; Index < VariableMtrrCount; Index++) {
1829 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheWriteBack)) {
1830 Status = MtrrLibSetMemoryType (
1831 Ranges,
1832 RangeCapacity,
1833 RangeCount,
1834 VariableMtrr[Index].BaseAddress,
1835 VariableMtrr[Index].Length,
1836 VariableMtrr[Index].Type
1837 );
1838 if (Status == RETURN_OUT_OF_RESOURCES) {
1839 return Status;
1840 }
1841 }
1842 }
1843
1844 //
1845 // 2. Set other types than WB or UC
1846 //
1847 for (Index = 0; Index < VariableMtrrCount; Index++) {
1848 if ((VariableMtrr[Index].Length != 0) &&
1849 (VariableMtrr[Index].Type != CacheWriteBack) && (VariableMtrr[Index].Type != CacheUncacheable))
1850 {
1851 Status = MtrrLibSetMemoryType (
1852 Ranges,
1853 RangeCapacity,
1854 RangeCount,
1855 VariableMtrr[Index].BaseAddress,
1856 VariableMtrr[Index].Length,
1857 VariableMtrr[Index].Type
1858 );
1859 if (Status == RETURN_OUT_OF_RESOURCES) {
1860 return Status;
1861 }
1862 }
1863 }
1864
1865 //
1866 // 3. Set UC
1867 //
1868 for (Index = 0; Index < VariableMtrrCount; Index++) {
1869 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheUncacheable)) {
1870 Status = MtrrLibSetMemoryType (
1871 Ranges,
1872 RangeCapacity,
1873 RangeCount,
1874 VariableMtrr[Index].BaseAddress,
1875 VariableMtrr[Index].Length,
1876 VariableMtrr[Index].Type
1877 );
1878 if (Status == RETURN_OUT_OF_RESOURCES) {
1879 return Status;
1880 }
1881 }
1882 }
1883
1884 return RETURN_SUCCESS;
1885}
1886
1896UINT8
1898 IN CONST MTRR_MEMORY_RANGE *Ranges,
1899 IN UINTN RangeCount
1900 )
1901{
1902 ASSERT (RangeCount != 0);
1903
1904 switch (Ranges[0].Type) {
1905 case CacheWriteBack:
1906 case CacheWriteThrough:
1907 return (1 << CacheWriteBack) | (1 << CacheWriteThrough) | (1 << CacheUncacheable);
1908 break;
1909
1910 case CacheWriteCombining:
1911 case CacheWriteProtected:
1912 return (1 << Ranges[0].Type) | (1 << CacheUncacheable);
1913 break;
1914
1915 case CacheUncacheable:
1916 if (RangeCount == 1) {
1917 return (1 << CacheUncacheable);
1918 }
1919
1920 return MtrrLibGetCompatibleTypes (&Ranges[1], RangeCount - 1);
1921 break;
1922
1923 case CacheInvalid:
1924 default:
1925 ASSERT (FALSE);
1926 break;
1927 }
1928
1929 return 0;
1930}
1931
1943VOID
1945 MTRR_MEMORY_RANGE *DstMtrrs,
1946 UINT32 DstMtrrCount,
1947 MTRR_MEMORY_RANGE *SrcMtrrs,
1948 UINT32 SrcMtrrCount,
1949 BOOLEAN *Modified
1950 )
1951{
1952 UINT32 DstIndex;
1953 UINT32 SrcIndex;
1954
1955 ASSERT (SrcMtrrCount <= DstMtrrCount);
1956
1957 for (DstIndex = 0; DstIndex < DstMtrrCount; DstIndex++) {
1958 Modified[DstIndex] = FALSE;
1959
1960 if (DstMtrrs[DstIndex].Length == 0) {
1961 continue;
1962 }
1963
1964 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
1965 if ((DstMtrrs[DstIndex].BaseAddress == SrcMtrrs[SrcIndex].BaseAddress) &&
1966 (DstMtrrs[DstIndex].Length == SrcMtrrs[SrcIndex].Length) &&
1967 (DstMtrrs[DstIndex].Type == SrcMtrrs[SrcIndex].Type))
1968 {
1969 break;
1970 }
1971 }
1972
1973 if (SrcIndex == SrcMtrrCount) {
1974 //
1975 // Remove the one from DstMtrrs which is not in SrcMtrrs
1976 //
1977 DstMtrrs[DstIndex].Length = 0;
1978 Modified[DstIndex] = TRUE;
1979 } else {
1980 //
1981 // Remove the one from SrcMtrrs which is also in DstMtrrs
1982 //
1983 SrcMtrrs[SrcIndex].Length = 0;
1984 }
1985 }
1986
1987 //
1988 // Now valid MTRR only exists in either DstMtrrs or SrcMtrrs.
1989 // Merge MTRRs from SrcMtrrs to DstMtrrs
1990 //
1991 DstIndex = 0;
1992 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
1993 if (SrcMtrrs[SrcIndex].Length != 0) {
1994 //
1995 // Find the empty slot in DstMtrrs
1996 //
1997 while (DstIndex < DstMtrrCount) {
1998 if (DstMtrrs[DstIndex].Length == 0) {
1999 break;
2000 }
2001
2002 DstIndex++;
2003 }
2004
2005 ASSERT (DstIndex < DstMtrrCount);
2006 CopyMem (&DstMtrrs[DstIndex], &SrcMtrrs[SrcIndex], sizeof (SrcMtrrs[0]));
2007 Modified[DstIndex] = TRUE;
2008 }
2009 }
2010}
2011
2031RETURN_STATUS
2033 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
2034 IN UINT64 A0,
2035 IN MTRR_MEMORY_RANGE *Ranges,
2036 IN UINTN RangeCount,
2037 IN VOID *Scratch,
2038 IN OUT UINTN *ScratchSize,
2039 OUT MTRR_MEMORY_RANGE *VariableMtrr,
2040 IN UINT32 VariableMtrrCapacity,
2041 OUT UINT32 *VariableMtrrCount
2042 )
2043{
2044 RETURN_STATUS Status;
2045 UINT32 Index;
2046 UINT64 Base0;
2047 UINT64 Base1;
2048 UINT64 Alignment;
2049 UINT8 CompatibleTypes;
2050 UINT64 Length;
2051 UINT32 End;
2052 UINTN ActualScratchSize;
2053 UINTN BiggestScratchSize;
2054
2055 *VariableMtrrCount = 0;
2056
2057 //
2058 // Since the whole ranges need multiple calls of MtrrLibCalculateMtrrs().
2059 // Each call needs different scratch buffer size.
2060 // When the provided scratch buffer size is not sufficient in any call,
2061 // set the GetActualScratchSize to TRUE, and following calls will only
2062 // calculate the actual scratch size for the caller.
2063 //
2064 BiggestScratchSize = 0;
2065
2066 for (Index = 0; Index < RangeCount;) {
2067 Base0 = Ranges[Index].BaseAddress;
2068
2069 //
2070 // Full step is optimal
2071 //
2072 while (Index < RangeCount) {
2073 ASSERT (Ranges[Index].BaseAddress == Base0);
2074 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2075 while (Base0 + Alignment <= Ranges[Index].BaseAddress + Ranges[Index].Length) {
2076 if ((BiggestScratchSize <= *ScratchSize) && (Ranges[Index].Type != DefaultType)) {
2077 Status = MtrrLibAppendVariableMtrr (
2078 VariableMtrr,
2079 VariableMtrrCapacity,
2080 VariableMtrrCount,
2081 Base0,
2082 Alignment,
2083 Ranges[Index].Type
2084 );
2085 if (RETURN_ERROR (Status)) {
2086 return Status;
2087 }
2088 }
2089
2090 Base0 += Alignment;
2091 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2092 }
2093
2094 //
2095 // Remove the above range from Ranges[Index]
2096 //
2097 Ranges[Index].Length -= Base0 - Ranges[Index].BaseAddress;
2098 Ranges[Index].BaseAddress = Base0;
2099 if (Ranges[Index].Length != 0) {
2100 break;
2101 } else {
2102 Index++;
2103 }
2104 }
2105
2106 if (Index == RangeCount) {
2107 break;
2108 }
2109
2110 //
2111 // Find continous ranges [Base0, Base1) which could be combined by MTRR.
2112 // Per SDM, the compatible types between[B0, B1) are:
2113 // UC, *
2114 // WB, WT
2115 // UC, WB, WT
2116 //
2117 CompatibleTypes = MtrrLibGetCompatibleTypes (&Ranges[Index], RangeCount - Index);
2118
2119 End = Index; // End points to last one that matches the CompatibleTypes.
2120 while (End + 1 < RangeCount) {
2121 if (((1 << Ranges[End + 1].Type) & CompatibleTypes) == 0) {
2122 break;
2123 }
2124
2125 End++;
2126 }
2127
2128 Alignment = MtrrLibBiggestAlignment (Base0, A0);
2129 Length = GetPowerOfTwo64 (Ranges[End].BaseAddress + Ranges[End].Length - Base0);
2130 Base1 = Base0 + MIN (Alignment, Length);
2131
2132 //
2133 // Base1 may not in Ranges[End]. Update End to the range Base1 belongs to.
2134 //
2135 End = Index;
2136 while (End + 1 < RangeCount) {
2137 if (Base1 <= Ranges[End + 1].BaseAddress) {
2138 break;
2139 }
2140
2141 End++;
2142 }
2143
2144 Length = Ranges[End].Length;
2145 Ranges[End].Length = Base1 - Ranges[End].BaseAddress;
2146 ActualScratchSize = *ScratchSize;
2147 Status = MtrrLibCalculateMtrrs (
2148 DefaultType,
2149 A0,
2150 &Ranges[Index],
2151 End + 1 - Index,
2152 Scratch,
2153 &ActualScratchSize,
2154 VariableMtrr,
2155 VariableMtrrCapacity,
2156 VariableMtrrCount
2157 );
2158 if (Status == RETURN_BUFFER_TOO_SMALL) {
2159 BiggestScratchSize = MAX (BiggestScratchSize, ActualScratchSize);
2160 //
2161 // Ignore this error, because we need to calculate the biggest
2162 // scratch buffer size.
2163 //
2164 Status = RETURN_SUCCESS;
2165 }
2166
2167 if (RETURN_ERROR (Status)) {
2168 return Status;
2169 }
2170
2171 if (Length != Ranges[End].Length) {
2172 Ranges[End].BaseAddress = Base1;
2173 Ranges[End].Length = Length - Ranges[End].Length;
2174 Index = End;
2175 } else {
2176 Index = End + 1;
2177 }
2178 }
2179
2180 if (*ScratchSize < BiggestScratchSize) {
2181 *ScratchSize = BiggestScratchSize;
2183 }
2184
2185 return RETURN_SUCCESS;
2186}
2187
2202RETURN_STATUS
2204 IN OUT UINT64 *ClearMasks,
2205 IN OUT UINT64 *OrMasks,
2206 IN PHYSICAL_ADDRESS BaseAddress,
2207 IN UINT64 Length,
2208 IN MTRR_MEMORY_CACHE_TYPE Type
2209 )
2210{
2211 RETURN_STATUS Status;
2212 UINT32 MsrIndex;
2213 UINT64 ClearMask;
2214 UINT64 OrMask;
2215
2216 ASSERT (BaseAddress < BASE_1MB);
2217
2218 MsrIndex = (UINT32)-1;
2219 while ((BaseAddress < BASE_1MB) && (Length != 0)) {
2220 Status = MtrrLibProgramFixedMtrr (Type, &BaseAddress, &Length, &MsrIndex, &ClearMask, &OrMask);
2221 if (RETURN_ERROR (Status)) {
2222 return Status;
2223 }
2224
2225 ClearMasks[MsrIndex] = ClearMasks[MsrIndex] | ClearMask;
2226 OrMasks[MsrIndex] = (OrMasks[MsrIndex] & ~ClearMask) | OrMask;
2227 }
2228
2229 return RETURN_SUCCESS;
2230}
2231
2258RETURN_STATUS
2259EFIAPI
2261 IN OUT MTRR_SETTINGS *MtrrSetting,
2262 IN VOID *Scratch,
2263 IN OUT UINTN *ScratchSize,
2264 IN CONST MTRR_MEMORY_RANGE *Ranges,
2265 IN UINTN RangeCount
2266 )
2267{
2268 RETURN_STATUS Status;
2269 UINT32 Index;
2270 UINT64 BaseAddress;
2271 UINT64 Length;
2272 BOOLEAN Above1MbExist;
2273
2274 UINT64 MtrrValidBitsMask;
2275 UINT64 MtrrValidAddressMask;
2276 MTRR_MEMORY_CACHE_TYPE DefaultType;
2277 MTRR_VARIABLE_SETTINGS VariableSettings;
2278 MTRR_MEMORY_RANGE WorkingRanges[2 * ARRAY_SIZE (MtrrSetting->Variables.Mtrr) + 2];
2279 UINTN WorkingRangeCount;
2280 BOOLEAN Modified;
2281 MTRR_VARIABLE_SETTING VariableSetting;
2282 UINT32 OriginalVariableMtrrCount;
2283 UINT32 FirmwareVariableMtrrCount;
2284 UINT32 WorkingVariableMtrrCount;
2285 MTRR_MEMORY_RANGE OriginalVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2286 MTRR_MEMORY_RANGE WorkingVariableMtrr[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2287 BOOLEAN VariableSettingModified[ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2288
2289 UINT64 ClearMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2290 UINT64 OrMasks[ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2291
2292 MTRR_CONTEXT MtrrContext;
2293 BOOLEAN MtrrContextValid;
2294
2295 Status = RETURN_SUCCESS;
2296 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
2297
2298 //
2299 // TRUE indicating the accordingly Variable setting needs modificaiton in OriginalVariableMtrr.
2300 //
2301 SetMem (VariableSettingModified, ARRAY_SIZE (VariableSettingModified), FALSE);
2302
2303 //
2304 // TRUE indicating the caller requests to set variable MTRRs.
2305 //
2306 Above1MbExist = FALSE;
2307 OriginalVariableMtrrCount = 0;
2308
2309 //
2310 // 0. Dump the requests.
2311 //
2313 DEBUG ((
2314 DEBUG_CACHE,
2315 "Mtrr: Set Mem Attribute to %a, ScratchSize = %x%a",
2316 (MtrrSetting == NULL) ? "Hardware" : "Buffer",
2317 *ScratchSize,
2318 (RangeCount <= 1) ? "," : "\n"
2319 ));
2320 for (Index = 0; Index < RangeCount; Index++) {
2321 DEBUG ((
2322 DEBUG_CACHE,
2323 " %a: [%016lx, %016lx)\n",
2324 mMtrrMemoryCacheTypeShortName[MIN (Ranges[Index].Type, CacheInvalid)],
2325 Ranges[Index].BaseAddress,
2326 Ranges[Index].BaseAddress + Ranges[Index].Length
2327 ));
2328 }
2329
2330 DEBUG_CODE_END ();
2331
2332 //
2333 // 1. Validate the parameters.
2334 //
2335 if (!IsMtrrSupported ()) {
2336 Status = RETURN_UNSUPPORTED;
2337 goto Exit;
2338 }
2339
2340 for (Index = 0; Index < RangeCount; Index++) {
2341 if (Ranges[Index].Length == 0) {
2342 Status = RETURN_INVALID_PARAMETER;
2343 goto Exit;
2344 }
2345
2346 if (((Ranges[Index].BaseAddress & ~MtrrValidAddressMask) != 0) ||
2347 ((((Ranges[Index].BaseAddress + Ranges[Index].Length) & ~MtrrValidAddressMask) != 0) &&
2348 ((Ranges[Index].BaseAddress + Ranges[Index].Length) != MtrrValidBitsMask + 1))
2349 )
2350 {
2351 //
2352 // Either the BaseAddress or the Limit doesn't follow the alignment requirement.
2353 // Note: It's still valid if Limit doesn't follow the alignment requirement but equals to MAX Address.
2354 //
2355 Status = RETURN_UNSUPPORTED;
2356 goto Exit;
2357 }
2358
2359 if ((Ranges[Index].Type != CacheUncacheable) &&
2360 (Ranges[Index].Type != CacheWriteCombining) &&
2361 (Ranges[Index].Type != CacheWriteThrough) &&
2362 (Ranges[Index].Type != CacheWriteProtected) &&
2363 (Ranges[Index].Type != CacheWriteBack))
2364 {
2365 Status = RETURN_INVALID_PARAMETER;
2366 goto Exit;
2367 }
2368
2369 if (Ranges[Index].BaseAddress + Ranges[Index].Length > BASE_1MB) {
2370 Above1MbExist = TRUE;
2371 }
2372 }
2373
2374 //
2375 // 2. Apply the above-1MB memory attribute settings.
2376 //
2377 if (Above1MbExist) {
2378 //
2379 // 2.1. Read all variable MTRRs and convert to Ranges.
2380 //
2381 OriginalVariableMtrrCount = GetVariableMtrrCountWorker ();
2382 MtrrGetVariableMtrrWorker (MtrrSetting, OriginalVariableMtrrCount, &VariableSettings);
2384 &VariableSettings,
2385 OriginalVariableMtrrCount,
2386 MtrrValidBitsMask,
2387 MtrrValidAddressMask,
2388 OriginalVariableMtrr
2389 );
2390
2391 DefaultType = MtrrGetDefaultMemoryTypeWorker (MtrrSetting);
2392 WorkingRangeCount = 1;
2393 WorkingRanges[0].BaseAddress = 0;
2394 WorkingRanges[0].Length = MtrrValidBitsMask + 1;
2395 WorkingRanges[0].Type = DefaultType;
2396
2397 Status = MtrrLibApplyVariableMtrrs (
2398 OriginalVariableMtrr,
2399 OriginalVariableMtrrCount,
2400 WorkingRanges,
2401 ARRAY_SIZE (WorkingRanges),
2402 &WorkingRangeCount
2403 );
2404 ASSERT_RETURN_ERROR (Status);
2405
2406 ASSERT (OriginalVariableMtrrCount >= PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs));
2407 FirmwareVariableMtrrCount = OriginalVariableMtrrCount - PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
2408 ASSERT (WorkingRangeCount <= 2 * FirmwareVariableMtrrCount + 1);
2409
2410 //
2411 // 2.2. Force [0, 1M) to UC, so that it doesn't impact subtraction algorithm.
2412 //
2413 Status = MtrrLibSetMemoryType (
2414 WorkingRanges,
2415 ARRAY_SIZE (WorkingRanges),
2416 &WorkingRangeCount,
2417 0,
2418 SIZE_1MB,
2419 CacheUncacheable
2420 );
2421 ASSERT (Status != RETURN_OUT_OF_RESOURCES);
2422
2423 //
2424 // 2.3. Apply the new memory attribute settings to Ranges.
2425 //
2426 Modified = FALSE;
2427 for (Index = 0; Index < RangeCount; Index++) {
2428 BaseAddress = Ranges[Index].BaseAddress;
2429 Length = Ranges[Index].Length;
2430 if (BaseAddress < BASE_1MB) {
2431 if (Length <= BASE_1MB - BaseAddress) {
2432 continue;
2433 }
2434
2435 Length -= BASE_1MB - BaseAddress;
2436 BaseAddress = BASE_1MB;
2437 }
2438
2439 Status = MtrrLibSetMemoryType (
2440 WorkingRanges,
2441 ARRAY_SIZE (WorkingRanges),
2442 &WorkingRangeCount,
2443 BaseAddress,
2444 Length,
2445 Ranges[Index].Type
2446 );
2447 if (Status == RETURN_ALREADY_STARTED) {
2448 Status = RETURN_SUCCESS;
2449 } else if (Status == RETURN_OUT_OF_RESOURCES) {
2450 goto Exit;
2451 } else {
2452 ASSERT_RETURN_ERROR (Status);
2453 Modified = TRUE;
2454 }
2455 }
2456
2457 if (Modified) {
2458 //
2459 // 2.4. Calculate the Variable MTRR settings based on the Ranges.
2460 // Buffer Too Small may be returned if the scratch buffer size is insufficient.
2461 //
2462 Status = MtrrLibSetMemoryRanges (
2463 DefaultType,
2464 LShiftU64 (1, (UINTN)HighBitSet64 (MtrrValidBitsMask)),
2465 WorkingRanges,
2466 WorkingRangeCount,
2467 Scratch,
2468 ScratchSize,
2469 WorkingVariableMtrr,
2470 FirmwareVariableMtrrCount + 1,
2471 &WorkingVariableMtrrCount
2472 );
2473 if (RETURN_ERROR (Status)) {
2474 goto Exit;
2475 }
2476
2477 //
2478 // 2.5. Remove the [0, 1MB) MTRR if it still exists (not merged with other range)
2479 //
2480 for (Index = 0; Index < WorkingVariableMtrrCount; Index++) {
2481 if ((WorkingVariableMtrr[Index].BaseAddress == 0) && (WorkingVariableMtrr[Index].Length == SIZE_1MB)) {
2482 ASSERT (WorkingVariableMtrr[Index].Type == CacheUncacheable);
2483 WorkingVariableMtrrCount--;
2484 CopyMem (
2485 &WorkingVariableMtrr[Index],
2486 &WorkingVariableMtrr[Index + 1],
2487 (WorkingVariableMtrrCount - Index) * sizeof (WorkingVariableMtrr[0])
2488 );
2489 break;
2490 }
2491 }
2492
2493 if (WorkingVariableMtrrCount > FirmwareVariableMtrrCount) {
2494 Status = RETURN_OUT_OF_RESOURCES;
2495 goto Exit;
2496 }
2497
2498 //
2499 // 2.6. Merge the WorkingVariableMtrr to OriginalVariableMtrr
2500 // Make sure least modification is made to OriginalVariableMtrr.
2501 //
2503 OriginalVariableMtrr,
2504 OriginalVariableMtrrCount,
2505 WorkingVariableMtrr,
2506 WorkingVariableMtrrCount,
2507 VariableSettingModified
2508 );
2509 }
2510 }
2511
2512 //
2513 // 3. Apply the below-1MB memory attribute settings.
2514 //
2515 // (Value & ~0 | 0) still equals to (Value)
2516 //
2517 ZeroMem (ClearMasks, sizeof (ClearMasks));
2518 ZeroMem (OrMasks, sizeof (OrMasks));
2519 for (Index = 0; Index < RangeCount; Index++) {
2520 if (Ranges[Index].BaseAddress >= BASE_1MB) {
2521 continue;
2522 }
2523
2525 ClearMasks,
2526 OrMasks,
2527 Ranges[Index].BaseAddress,
2528 Ranges[Index].Length,
2529 Ranges[Index].Type
2530 );
2531 if (RETURN_ERROR (Status)) {
2532 goto Exit;
2533 }
2534 }
2535
2536 MtrrContextValid = FALSE;
2537 //
2538 // 4. Write fixed MTRRs that have been modified
2539 //
2540 for (Index = 0; Index < ARRAY_SIZE (ClearMasks); Index++) {
2541 if (ClearMasks[Index] != 0) {
2542 if (MtrrSetting != NULL) {
2543 MtrrSetting->Fixed.Mtrr[Index] = (MtrrSetting->Fixed.Mtrr[Index] & ~ClearMasks[Index]) | OrMasks[Index];
2544 } else {
2545 if (!MtrrContextValid) {
2546 MtrrLibPreMtrrChange (&MtrrContext);
2547 MtrrContextValid = TRUE;
2548 }
2549
2550 AsmMsrAndThenOr64 (mMtrrLibFixedMtrrTable[Index].Msr, ~ClearMasks[Index], OrMasks[Index]);
2551 }
2552 }
2553 }
2554
2555 //
2556 // 5. Write variable MTRRs that have been modified
2557 //
2558 for (Index = 0; Index < OriginalVariableMtrrCount; Index++) {
2559 if (VariableSettingModified[Index]) {
2560 if (OriginalVariableMtrr[Index].Length != 0) {
2561 VariableSetting.Base = (OriginalVariableMtrr[Index].BaseAddress & MtrrValidAddressMask)
2562 | (UINT8)OriginalVariableMtrr[Index].Type;
2563 VariableSetting.Mask = ((~(OriginalVariableMtrr[Index].Length - 1)) & MtrrValidAddressMask) | BIT11;
2564 } else {
2565 VariableSetting.Base = 0;
2566 VariableSetting.Mask = 0;
2567 }
2568
2569 if (MtrrSetting != NULL) {
2570 CopyMem (&MtrrSetting->Variables.Mtrr[Index], &VariableSetting, sizeof (VariableSetting));
2571 } else {
2572 if (!MtrrContextValid) {
2573 MtrrLibPreMtrrChange (&MtrrContext);
2574 MtrrContextValid = TRUE;
2575 }
2576
2578 MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),
2579 VariableSetting.Base
2580 );
2582 MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),
2583 VariableSetting.Mask
2584 );
2585 }
2586 }
2587 }
2588
2589 if (MtrrSetting != NULL) {
2590 ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.E = 1;
2591 ((MSR_IA32_MTRR_DEF_TYPE_REGISTER *)&MtrrSetting->MtrrDefType)->Bits.FE = 1;
2592 } else {
2593 if (MtrrContextValid) {
2594 MtrrLibPostMtrrChange (&MtrrContext);
2595 }
2596 }
2597
2598Exit:
2599 DEBUG ((DEBUG_CACHE, " Result = %r\n", Status));
2600 if (!RETURN_ERROR (Status)) {
2601 MtrrDebugPrintAllMtrrsWorker (MtrrSetting);
2602 }
2603
2604 return Status;
2605}
2606
2636RETURN_STATUS
2637EFIAPI
2639 IN OUT MTRR_SETTINGS *MtrrSetting,
2640 IN PHYSICAL_ADDRESS BaseAddress,
2641 IN UINT64 Length,
2642 IN MTRR_MEMORY_CACHE_TYPE Attribute
2643 )
2644{
2645 UINT8 Scratch[SCRATCH_BUFFER_SIZE];
2646 UINTN ScratchSize;
2647 MTRR_MEMORY_RANGE Range;
2648
2649 Range.BaseAddress = BaseAddress;
2650 Range.Length = Length;
2651 Range.Type = Attribute;
2652 ScratchSize = sizeof (Scratch);
2653 return MtrrSetMemoryAttributesInMtrrSettings (MtrrSetting, Scratch, &ScratchSize, &Range, 1);
2654}
2655
2689RETURN_STATUS
2690EFIAPI
2692 IN PHYSICAL_ADDRESS BaseAddress,
2693 IN UINT64 Length,
2694 IN MTRR_MEMORY_CACHE_TYPE Attribute
2695 )
2696{
2697 return MtrrSetMemoryAttributeInMtrrSettings (NULL, BaseAddress, Length, Attribute);
2698}
2699
2706VOID
2708 IN MTRR_VARIABLE_SETTINGS *VariableSettings
2709 )
2710{
2711 UINT32 Index;
2712 UINT32 VariableMtrrCount;
2713
2714 VariableMtrrCount = GetVariableMtrrCountWorker ();
2715 ASSERT (VariableMtrrCount <= ARRAY_SIZE (VariableSettings->Mtrr));
2716
2717 for (Index = 0; Index < VariableMtrrCount; Index++) {
2719 MSR_IA32_MTRR_PHYSBASE0 + (Index << 1),
2720 VariableSettings->Mtrr[Index].Base
2721 );
2723 MSR_IA32_MTRR_PHYSMASK0 + (Index << 1),
2724 VariableSettings->Mtrr[Index].Mask
2725 );
2726 }
2727}
2728
2735VOID
2737 IN MTRR_FIXED_SETTINGS *FixedSettings
2738 )
2739{
2740 UINT32 Index;
2741
2742 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
2744 mMtrrLibFixedMtrrTable[Index].Msr,
2745 FixedSettings->Mtrr[Index]
2746 );
2747 }
2748}
2749
2759EFIAPI
2761 OUT MTRR_SETTINGS *MtrrSetting
2762 )
2763{
2764 if (!IsMtrrSupported ()) {
2765 return MtrrSetting;
2766 }
2767
2768 //
2769 // Get fixed MTRRs
2770 //
2771 MtrrGetFixedMtrrWorker (&MtrrSetting->Fixed);
2772
2773 //
2774 // Get variable MTRRs
2775 //
2777 NULL,
2779 &MtrrSetting->Variables
2780 );
2781
2782 //
2783 // Get MTRR_DEF_TYPE value
2784 //
2785 MtrrSetting->MtrrDefType = AsmReadMsr64 (MSR_IA32_MTRR_DEF_TYPE);
2786
2787 return MtrrSetting;
2788}
2789
2799EFIAPI
2801 IN MTRR_SETTINGS *MtrrSetting
2802 )
2803{
2804 MTRR_CONTEXT MtrrContext;
2805
2806 if (!IsMtrrSupported ()) {
2807 return MtrrSetting;
2808 }
2809
2810 MtrrLibPreMtrrChange (&MtrrContext);
2811
2812 //
2813 // Set fixed MTRRs
2814 //
2815 MtrrSetFixedMtrrWorker (&MtrrSetting->Fixed);
2816
2817 //
2818 // Set variable MTRRs
2819 //
2820 MtrrSetVariableMtrrWorker (&MtrrSetting->Variables);
2821
2822 //
2823 // Set MTRR_DEF_TYPE value
2824 //
2825 AsmWriteMsr64 (MSR_IA32_MTRR_DEF_TYPE, MtrrSetting->MtrrDefType);
2826
2827 MtrrLibPostMtrrChangeEnableCache (&MtrrContext);
2828
2829 return MtrrSetting;
2830}
2831
2839BOOLEAN
2840EFIAPI
2842 VOID
2843 )
2844{
2847
2848 //
2849 // Check CPUID(1).EDX[12] for MTRR capability
2850 //
2851 AsmCpuid (CPUID_VERSION_INFO, NULL, NULL, NULL, &Edx.Uint32);
2852 if (Edx.Bits.MTRR == 0) {
2853 return FALSE;
2854 }
2855
2856 //
2857 // Check number of variable MTRRs and fixed MTRRs existence.
2858 // If number of variable MTRRs is zero, or fixed MTRRs do not
2859 // exist, return false.
2860 //
2862 if ((MtrrCap.Bits.VCNT == 0) || (MtrrCap.Bits.FIX == 0)) {
2863 return FALSE;
2864 }
2865
2866 return TRUE;
2867}
2868
2878VOID
2880 IN MTRR_SETTINGS *MtrrSetting
2881 )
2882{
2884 MTRR_SETTINGS LocalMtrrs;
2885 MTRR_SETTINGS *Mtrrs;
2886 UINTN Index;
2887 UINTN RangeCount;
2888 UINT64 MtrrValidBitsMask;
2889 UINT64 MtrrValidAddressMask;
2890 UINT32 VariableMtrrCount;
2891 BOOLEAN ContainVariableMtrr;
2892 MTRR_MEMORY_RANGE Ranges[
2893 ARRAY_SIZE (mMtrrLibFixedMtrrTable) * sizeof (UINT64) + 2 * ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1
2894 ];
2895 MTRR_MEMORY_RANGE RawVariableRanges[ARRAY_SIZE (Mtrrs->Variables.Mtrr)];
2896
2897 if (!IsMtrrSupported ()) {
2898 return;
2899 }
2900
2901 VariableMtrrCount = GetVariableMtrrCountWorker ();
2902
2903 if (MtrrSetting != NULL) {
2904 Mtrrs = MtrrSetting;
2905 } else {
2906 MtrrGetAllMtrrs (&LocalMtrrs);
2907 Mtrrs = &LocalMtrrs;
2908 }
2909
2910 //
2911 // Dump RAW MTRR contents
2912 //
2913 DEBUG ((DEBUG_CACHE, "MTRR Settings:\n"));
2914 DEBUG ((DEBUG_CACHE, "=============\n"));
2915 DEBUG ((DEBUG_CACHE, "MTRR Default Type: %016lx\n", Mtrrs->MtrrDefType));
2916 for (Index = 0; Index < ARRAY_SIZE (mMtrrLibFixedMtrrTable); Index++) {
2917 DEBUG ((DEBUG_CACHE, "Fixed MTRR[%02d] : %016lx\n", Index, Mtrrs->Fixed.Mtrr[Index]));
2918 }
2919
2920 ContainVariableMtrr = FALSE;
2921 for (Index = 0; Index < VariableMtrrCount; Index++) {
2922 if ((Mtrrs->Variables.Mtrr[Index].Mask & BIT11) == 0) {
2923 //
2924 // If mask is not valid, then do not display range
2925 //
2926 continue;
2927 }
2928
2929 ContainVariableMtrr = TRUE;
2930 DEBUG ((
2931 DEBUG_CACHE,
2932 "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
2933 Index,
2934 Mtrrs->Variables.Mtrr[Index].Base,
2935 Mtrrs->Variables.Mtrr[Index].Mask
2936 ));
2937 }
2938
2939 if (!ContainVariableMtrr) {
2940 DEBUG ((DEBUG_CACHE, "Variable MTRR : None.\n"));
2941 }
2942
2943 DEBUG ((DEBUG_CACHE, "\n"));
2944
2945 //
2946 // Dump MTRR setting in ranges
2947 //
2948 DEBUG ((DEBUG_CACHE, "Memory Ranges:\n"));
2949 DEBUG ((DEBUG_CACHE, "====================================\n"));
2950 MtrrLibInitializeMtrrMask (&MtrrValidBitsMask, &MtrrValidAddressMask);
2951 Ranges[0].BaseAddress = 0;
2952 Ranges[0].Length = MtrrValidBitsMask + 1;
2953 Ranges[0].Type = MtrrGetDefaultMemoryTypeWorker (Mtrrs);
2954 RangeCount = 1;
2955
2957 &Mtrrs->Variables,
2958 VariableMtrrCount,
2959 MtrrValidBitsMask,
2960 MtrrValidAddressMask,
2961 RawVariableRanges
2962 );
2964 RawVariableRanges,
2965 VariableMtrrCount,
2966 Ranges,
2967 ARRAY_SIZE (Ranges),
2968 &RangeCount
2969 );
2970
2971 MtrrLibApplyFixedMtrrs (&Mtrrs->Fixed, Ranges, ARRAY_SIZE (Ranges), &RangeCount);
2972
2973 for (Index = 0; Index < RangeCount; Index++) {
2974 DEBUG ((
2975 DEBUG_CACHE,
2976 "%a:%016lx-%016lx\n",
2977 mMtrrMemoryCacheTypeShortName[Ranges[Index].Type],
2978 Ranges[Index].BaseAddress,
2979 Ranges[Index].BaseAddress + Ranges[Index].Length - 1
2980 ));
2981 }
2982
2983 DEBUG_CODE_END ();
2984}
2985
2989VOID
2990EFIAPI
2992 VOID
2993 )
2994{
2996}
UINT64 UINTN
#define NULL
Definition: Base.h:312
#define CONST
Definition: Base.h:259
#define RETURN_BUFFER_TOO_SMALL
Definition: Base.h:989
#define RETURN_ERROR(StatusCode)
Definition: Base.h:957
#define MIN(a, b)
Definition: Base.h:903
#define RETURN_UNSUPPORTED
Definition: Base.h:977
#define RETURN_OUT_OF_RESOURCES
Definition: Base.h:1010
#define RETURN_SUCCESS
Definition: Base.h:962
#define TRUE
Definition: Base.h:301
#define FALSE
Definition: Base.h:307
#define RETURN_ALREADY_STARTED
Definition: Base.h:1068
#define ARRAY_SIZE(Array)
Definition: Base.h:1279
#define IN
Definition: Base.h:279
#define OUT
Definition: Base.h:284
#define RETURN_INVALID_PARAMETER
Definition: Base.h:972
#define GLOBAL_REMOVE_IF_UNREFERENCED
Definition: Base.h:48
#define MAX(a, b)
Definition: Base.h:888
BOOLEAN EFIAPI SetInterruptState(IN BOOLEAN InterruptState)
Definition: Cpu.c:48
BOOLEAN EFIAPI SaveAndDisableInterrupts(VOID)
Definition: Cpu.c:21
UINT64 EFIAPI RShiftU64(IN UINT64 Operand, IN UINTN Count)
Definition: RShiftU64.c:28
UINT64 EFIAPI MultU64x32(IN UINT64 Multiplicand, IN UINT32 Multiplier)
Definition: MultU64x32.c:27
UINT64 EFIAPI GetPowerOfTwo64(IN UINT64 Operand)
UINT64 EFIAPI LShiftU64(IN UINT64 Operand, IN UINTN Count)
Definition: LShiftU64.c:28
INTN EFIAPI HighBitSet64(IN UINT64 Operand)
Definition: HighBitSet64.c:27
VOID *EFIAPI CopyMem(OUT VOID *DestinationBuffer, IN CONST VOID *SourceBuffer, IN UINTN Length)
VOID *EFIAPI SetMem(OUT VOID *Buffer, IN UINTN Length, IN UINT8 Value)
Definition: SetMemWrapper.c:38
VOID *EFIAPI ZeroMem(OUT VOID *Buffer, IN UINTN Length)
VOID EFIAPI CpuFlushTlb(VOID)
#define ASSERT_RETURN_ERROR(StatusParameter)
Definition: DebugLib.h:466
#define DEBUG_CODE_BEGIN()
Definition: DebugLib.h:532
#define DEBUG(Expression)
Definition: DebugLib.h:417
#define ASSERT(Expression)
Definition: DebugLib.h:391
#define DEBUG_CODE_END()
Definition: DebugLib.h:543
VOID EFIAPI AsmDisableCache(VOID)
Definition: DisableCache.c:18
VOID EFIAPI AsmEnableCache(VOID)
Definition: EnableCache.c:18
UINT64 EFIAPI AsmReadMsr64(IN UINT32 Index)
Definition: GccInlinePriv.c:60
UINTN EFIAPI AsmWriteCr4(UINTN Cr4)
UINT64 EFIAPI AsmWriteMsr64(IN UINT32 Index, IN UINT64 Value)
UINTN EFIAPI AsmReadCr4(VOID)
#define MSR_IA32_MTRR_PHYSBASE0
#define MSR_IA32_MTRR_FIX4K_E0000
#define MSR_IA32_MTRR_DEF_TYPE
#define MSR_IA32_MTRR_FIX4K_C8000
#define MSR_IA32_MTRR_FIX4K_E8000
#define MSR_IA32_MTRR_FIX4K_F8000
#define MSR_IA32_MTRRCAP
#define MSR_IA32_MTRR_FIX16K_80000
#define MSR_IA32_MTRR_FIX16K_A0000
#define MSR_IA32_MTRR_FIX4K_D0000
#define MSR_IA32_MTRR_PHYSMASK0
#define MSR_IA32_MTRR_FIX64K_00000
#define MSR_IA32_MTRR_FIX4K_D8000
#define MSR_IA32_MTRR_FIX4K_C0000
#define MSR_IA32_MTRR_FIX4K_F0000
#define CPUID_EXTENDED_FUNCTION
Definition: Cpuid.h:3740
#define CPUID_VIR_PHY_ADDRESS_SIZE
Definition: Cpuid.h:4047
#define CPUID_VERSION_INFO
Definition: Cpuid.h:81
UINT32 EFIAPI AsmCpuid(IN UINT32 Index, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
Definition: CpuId.c:36
RETURN_STATUS MtrrLibApplyFixedMtrrs(IN MTRR_FIXED_SETTINGS *Fixed, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
Definition: MtrrLib.c:1757
VOID MtrrLibCalculateLeastMtrrs(IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT CONST UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN BOOLEAN IncludeOptional)
Definition: MtrrLib.c:1137
RETURN_STATUS MtrrLibCalculateSubtractivePath(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN UINT8 Types, IN UINT8 TypeCount, IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL, IN UINT32 MtrrCapacity OPTIONAL, IN OUT UINT32 *MtrrCount OPTIONAL)
Definition: MtrrLib.c:1294
BOOLEAN MtrrLibIsPowerOfTwo(IN UINT64 Operand)
Definition: MtrrLib.c:1263
UINT32 EFIAPI MtrrGetMemoryAttributeInVariableMtrr(IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
Definition: MtrrLib.c:671
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetMemoryAttribute(IN PHYSICAL_ADDRESS Address)
Definition: MtrrLib.c:932
VOID MtrrLibInitializeMtrrMask(OUT UINT64 *MtrrValidBitsMask, OUT UINT64 *MtrrValidAddressMask)
Definition: MtrrLib.c:754
UINT32 MtrrGetMemoryAttributeInVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
Definition: MtrrLib.c:587
UINT32 GetVariableMtrrCountWorker(VOID)
Definition: MtrrLib.c:150
MTRR_MEMORY_CACHE_TYPE MtrrGetDefaultMemoryTypeWorker(IN MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:235
RETURN_STATUS MtrrLibApplyVariableMtrrs(IN CONST MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCount, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
Definition: MtrrLib.c:1808
VOID MtrrSetFixedMtrrWorker(IN MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:2736
VOID EFIAPI MtrrDebugPrintAllMtrrs(VOID)
Definition: MtrrLib.c:2991
MTRR_SETTINGS *EFIAPI MtrrSetAllMtrrs(IN MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:2800
RETURN_STATUS MtrrLibAppendVariableMtrr(IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:1215
MTRR_MEMORY_CACHE_TYPE MtrrLibLowestType(IN UINT8 TypeBits)
Definition: MtrrLib.c:1243
VOID MtrrLibPreMtrrChange(OUT MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:279
VOID MtrrLibMergeVariableMtrr(MTRR_MEMORY_RANGE *DstMtrrs, UINT32 DstMtrrCount, MTRR_MEMORY_RANGE *SrcMtrrs, UINT32 SrcMtrrCount, BOOLEAN *Modified)
Definition: MtrrLib.c:1944
UINT64 MtrrLibBiggestAlignment(UINT64 Address, UINT64 Alignment0)
Definition: MtrrLib.c:708
RETURN_STATUS MtrrLibCalculateMtrrs(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount)
Definition: MtrrLib.c:1516
UINT8 MtrrLibGetNumberOfTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT64 BaseAddress, IN UINT64 Length, IN OUT UINT8 *Types OPTIONAL)
Definition: MtrrLib.c:1084
RETURN_STATUS EFIAPI MtrrSetMemoryAttributeInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
Definition: MtrrLib.c:2638
UINT32 EFIAPI GetVariableMtrrCount(VOID)
Definition: MtrrLib.c:169
UINT32 EFIAPI GetFirmwareVariableMtrrCount(VOID)
Definition: MtrrLib.c:211
VOID MtrrLibPostMtrrChangeEnableCache(IN MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:324
BOOLEAN EFIAPI IsMtrrSupported(VOID)
Definition: MtrrLib.c:2841
MTRR_MEMORY_CACHE_TYPE MtrrGetMemoryAttributeByAddressWorker(IN MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS Address)
Definition: MtrrLib.c:822
RETURN_STATUS EFIAPI MtrrSetMemoryAttribute(IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
Definition: MtrrLib.c:2691
UINT32 MtrrLibGetRawVariableRanges(IN MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT MTRR_MEMORY_RANGE *VariableMtrr)
Definition: MtrrLib.c:630
VOID MtrrSetVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings)
Definition: MtrrLib.c:2707
UINT32 GetFirmwareVariableMtrrCountWorker(VOID)
Definition: MtrrLib.c:187
MTRR_FIXED_SETTINGS *EFIAPI MtrrGetFixedMtrr(OUT MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:409
UINT8 MtrrLibGetCompatibleTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
Definition: MtrrLib.c:1897
RETURN_STATUS MtrrLibSetBelow1MBMemoryAttribute(IN OUT UINT64 *ClearMasks, IN OUT UINT64 *OrMasks, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:2203
RETURN_STATUS MtrrLibProgramFixedMtrr(IN MTRR_MEMORY_CACHE_TYPE Type, IN OUT UINT64 *Base, IN OUT UINT64 *Length, IN OUT UINT32 *LastMsrIndex, OUT UINT64 *ClearMask, OUT UINT64 *OrMask)
Definition: MtrrLib.c:477
MTRR_VARIABLE_SETTINGS * MtrrGetVariableMtrrWorker(IN MTRR_SETTINGS *MtrrSetting, IN UINT32 VariableMtrrCount, OUT MTRR_VARIABLE_SETTINGS *VariableSettings)
Definition: MtrrLib.c:435
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetDefaultMemoryType(VOID)
Definition: MtrrLib.c:258
RETURN_STATUS EFIAPI MtrrSetMemoryAttributesInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
Definition: MtrrLib.c:2260
MTRR_MEMORY_CACHE_TYPE MtrrLibPrecedence(IN MTRR_MEMORY_CACHE_TYPE MtrrType1, IN MTRR_MEMORY_CACHE_TYPE MtrrType2)
Definition: MtrrLib.c:787
MTRR_SETTINGS *EFIAPI MtrrGetAllMtrrs(OUT MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:2760
BOOLEAN MtrrLibTypeLeftPrecedeRight(IN MTRR_MEMORY_CACHE_TYPE Left, IN MTRR_MEMORY_CACHE_TYPE Right)
Definition: MtrrLib.c:736
RETURN_STATUS MtrrLibSetMemoryType(IN MTRR_MEMORY_RANGE *Ranges, IN UINTN Capacity, IN OUT UINTN *Count, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
Definition: MtrrLib.c:962
RETURN_STATUS MtrrLibSetMemoryRanges(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, OUT MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCapacity, OUT UINT32 *VariableMtrrCount)
Definition: MtrrLib.c:2032
MTRR_FIXED_SETTINGS * MtrrGetFixedMtrrWorker(OUT MTRR_FIXED_SETTINGS *FixedSettings)
Definition: MtrrLib.c:385
VOID MtrrLibPostMtrrChange(IN MTRR_CONTEXT *MtrrContext)
Definition: MtrrLib.c:359
VOID MtrrDebugPrintAllMtrrsWorker(IN MTRR_SETTINGS *MtrrSetting)
Definition: MtrrLib.c:2879
#define PcdGet32(TokenName)
Definition: PcdLib.h:362
VOID EFIAPI Exit(IN EFI_STATUS Status)
UINT64 EFIAPI AsmMsrAndThenOr64(IN UINT32 Index, IN UINT64 AndData, IN UINT64 OrData)
Definition: X86Msr.c:437
struct CPUID_VIR_PHY_ADDRESS_SIZE_EAX::@664 Bits
struct MSR_IA32_MTRR_DEF_TYPE_REGISTER::@565 Bits
struct MSR_IA32_MTRRCAP_REGISTER::@543 Bits