23#define OR_SEED 0x0101010101010101ull
24#define CLEAR_SEED 0xFFFFFFFFFFFFFFFFull
25#define MAX_WEIGHT MAX_UINT8
26#define SCRATCH_BUFFER_SIZE (4 * SIZE_4KB)
27#define MTRR_LIB_ASSERT_ALIGNED(B, L) ASSERT ((B & ~(L - 1)) == B);
29#define M(x, y) ((x) * VertexCount + (y))
30#define O(x, y) ((y) * VertexCount + (x))
37 BOOLEAN InterruptState;
45 MTRR_MEMORY_CACHE_TYPE Type : 7;
157 OUT BOOLEAN *FixedMtrrSupported OPTIONAL,
158 OUT UINT32 *VariableMtrrCount OPTIONAL
175 if (Edx.Bits.MTRR == 0) {
176 if (FixedMtrrSupported !=
NULL) {
177 *FixedMtrrSupported =
FALSE;
180 if (VariableMtrrCount !=
NULL) {
181 *VariableMtrrCount = 0;
194 if (FixedMtrrSupported !=
NULL) {
195 *FixedMtrrSupported = (BOOLEAN)(MtrrCap.
Bits.
FIX == 1);
198 if (VariableMtrrCount !=
NULL) {
199 *VariableMtrrCount = MtrrCap.
Bits.
VCNT;
257 UINT32 VariableMtrrCount;
258 UINT32 ReservedMtrrNumber;
261 ReservedMtrrNumber =
PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
262 if (VariableMtrrCount < ReservedMtrrNumber) {
266 return VariableMtrrCount - ReservedMtrrNumber;
300MTRR_MEMORY_CACHE_TYPE
307 if (MtrrSetting ==
NULL) {
310 DefType.
Uint64 = MtrrSetting->MtrrDefType;
313 return (MTRR_MEMORY_CACHE_TYPE)DefType.
Bits.
Type;
322MTRR_MEMORY_CACHE_TYPE
329 return CacheUncacheable;
376 DefType.
Uint64 = MtrrContext->DefType.Uint64;
435 MtrrContext->DefType.Bits.E = 1;
456 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
457 FixedSettings->Mtrr[Index] =
461 return FixedSettings;
478 BOOLEAN FixedMtrrSupported;
482 if (!FixedMtrrSupported) {
483 return FixedSettings;
506 IN UINT32 VariableMtrrCount,
512 ASSERT (VariableMtrrCount <=
ARRAY_SIZE (VariableSettings->Mtrr));
514 for (Index = 0; Index < VariableMtrrCount; Index++) {
515 if (MtrrSetting ==
NULL) {
516 VariableSettings->Mtrr[Index].Base =
518 VariableSettings->Mtrr[Index].Mask =
521 VariableSettings->Mtrr[Index].Base = MtrrSetting->Variables.Mtrr[Index].Base;
522 VariableSettings->Mtrr[Index].Mask = MtrrSetting->Variables.Mtrr[Index].Mask;
526 return VariableSettings;
547 IN MTRR_MEMORY_CACHE_TYPE Type,
549 IN OUT UINT64 *Length,
550 IN OUT UINT32 *LastMsrIndex,
551 OUT UINT64 *ClearMask,
556 UINT32 LeftByteShift;
557 UINT32 RightByteShift;
563 for (MsrIndex = *LastMsrIndex + 1; MsrIndex <
ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
564 if ((*Base >= mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) &&
567 mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress +
568 (8 * mMtrrLibFixedMtrrTable[MsrIndex].Length)
577 ASSERT (MsrIndex !=
ARRAY_SIZE (mMtrrLibFixedMtrrTable));
582 if ((((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
589 LeftByteShift = ((UINT32)*Base - mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
590 ASSERT (LeftByteShift < 8);
595 SubLength = mMtrrLibFixedMtrrTable[MsrIndex].Length * (8 - LeftByteShift);
596 if (*Length >= SubLength) {
599 if (((UINT32)(*Length) % mMtrrLibFixedMtrrTable[MsrIndex].Length) != 0) {
606 RightByteShift = 8 - LeftByteShift - (UINT32)(*Length) / mMtrrLibFixedMtrrTable[MsrIndex].Length;
613 *ClearMask = CLEAR_SEED;
616 if (LeftByteShift != 0) {
620 *ClearMask &=
LShiftU64 (*ClearMask, LeftByteShift * 8);
621 *OrMask &=
LShiftU64 (*OrMask, LeftByteShift * 8);
624 if (RightByteShift != 0) {
628 *ClearMask &=
RShiftU64 (*ClearMask, RightByteShift * 8);
629 *OrMask &=
RShiftU64 (*OrMask, RightByteShift * 8);
632 *Length -= SubLength;
635 *LastMsrIndex = MsrIndex;
659 IN UINT64 MtrrValidBitsMask,
660 IN UINT64 MtrrValidAddressMask,
668 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
670 VariableMtrr[Index].Msr = (UINT32)Index;
671 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
672 VariableMtrr[Index].Length =
673 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
674 VariableMtrr[Index].Type = (VariableSettings->Mtrr[Index].Base & 0x0ff);
675 VariableMtrr[Index].Valid =
TRUE;
676 VariableMtrr[Index].Used =
TRUE;
702 IN UINT64 MtrrValidBitsMask,
703 IN UINT64 MtrrValidAddressMask,
711 for (Index = 0, UsedMtrr = 0; Index < VariableMtrrCount; Index++) {
713 VariableMtrr[Index].BaseAddress = (VariableSettings->Mtrr[Index].Base & MtrrValidAddressMask);
714 VariableMtrr[Index].Length =
715 ((~(VariableSettings->Mtrr[Index].Mask & MtrrValidAddressMask)) & MtrrValidBitsMask) + 1;
716 VariableMtrr[Index].Type = (MTRR_MEMORY_CACHE_TYPE)(VariableSettings->Mtrr[Index].Base & 0x0ff);
741 IN UINT64 MtrrValidBitsMask,
742 IN UINT64 MtrrValidAddressMask,
762 MtrrValidAddressMask,
786 return Address & ((~Address) + 1);
806 IN MTRR_MEMORY_CACHE_TYPE Left,
807 IN MTRR_MEMORY_CACHE_TYPE Right
810 return (BOOLEAN)(Left == CacheUncacheable || (Left == CacheWriteThrough && Right == CacheWriteBack));
824 OUT UINT64 *MtrrValidBitsMask,
825 OUT UINT64 *MtrrValidAddressMask
828 UINT32 MaxExtendedFunction;
836 if (MaxExtendedFunction >= CPUID_VIR_PHY_ADDRESS_SIZE) {
851 if (ExtendedFeatureFlagsEcx.
Bits.
TME_EN == 1) {
860 *MtrrValidAddressMask = *MtrrValidBitsMask & 0xfffffffffffff000ULL;
875MTRR_MEMORY_CACHE_TYPE
877 IN MTRR_MEMORY_CACHE_TYPE MtrrType1,
878 IN MTRR_MEMORY_CACHE_TYPE MtrrType2
881 if (MtrrType1 == MtrrType2) {
910MTRR_MEMORY_CACHE_TYPE
913 IN PHYSICAL_ADDRESS Address
920 MTRR_MEMORY_CACHE_TYPE MtrrType;
922 UINT64 MtrrValidBitsMask;
923 UINT64 MtrrValidAddressMask;
924 UINT32 VariableMtrrCount;
930 if (MtrrSetting ==
NULL) {
933 DefType.
Uint64 = MtrrSetting->MtrrDefType;
936 if (DefType.
Bits.
E == 0) {
937 return CacheUncacheable;
943 if (Address < BASE_1MB) {
944 if (DefType.
Bits.
FE != 0) {
948 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
949 if ((Address >= mMtrrLibFixedMtrrTable[Index].BaseAddress) &&
950 (Address < mMtrrLibFixedMtrrTable[Index].BaseAddress +
951 (mMtrrLibFixedMtrrTable[Index].Length * 8)))
954 ((
UINTN)Address - mMtrrLibFixedMtrrTable[Index].BaseAddress) /
955 mMtrrLibFixedMtrrTable[Index].Length;
956 if (MtrrSetting ==
NULL) {
957 FixedMtrr =
AsmReadMsr64 (mMtrrLibFixedMtrrTable[Index].Msr);
959 FixedMtrr = MtrrSetting->Fixed.Mtrr[Index];
962 return (MTRR_MEMORY_CACHE_TYPE)(
RShiftU64 (FixedMtrr, SubIndex * 8) & 0xFF);
969 ASSERT (VariableMtrrCount <=
ARRAY_SIZE (MtrrSetting->Variables.Mtrr));
977 MtrrValidAddressMask,
984 MtrrType = CacheInvalid;
985 for (Index = 0; Index < VariableMtrrCount; Index++) {
986 if (VariableMtrr[Index].Length != 0) {
987 if ((Address >= VariableMtrr[Index].BaseAddress) &&
988 (Address < VariableMtrr[Index].BaseAddress + VariableMtrr[Index].Length))
990 if (MtrrType == CacheInvalid) {
991 MtrrType = (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type;
993 MtrrType =
MtrrLibPrecedence (MtrrType, (MTRR_MEMORY_CACHE_TYPE)VariableMtrr[Index].Type);
1002 if (MtrrType == CacheInvalid) {
1003 MtrrType = (MTRR_MEMORY_CACHE_TYPE)DefType.
Bits.
Type;
1019MTRR_MEMORY_CACHE_TYPE
1022 IN PHYSICAL_ADDRESS Address
1026 return CacheUncacheable;
1055 IN UINT64 BaseAddress,
1057 IN MTRR_MEMORY_CACHE_TYPE Type
1068 ASSERT (Length != 0);
1072 Limit = BaseAddress + Length;
1073 StartIndex = *Count;
1075 for (Index = 0; Index < *Count; Index++) {
1076 if ((StartIndex == *Count) &&
1077 (Ranges[Index].BaseAddress <= BaseAddress) &&
1078 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length))
1081 LengthLeft = BaseAddress - Ranges[Index].BaseAddress;
1084 if ((EndIndex == *Count) &&
1085 (Ranges[Index].BaseAddress < Limit) &&
1086 (Limit <= Ranges[Index].BaseAddress + Ranges[Index].Length))
1089 LengthRight = Ranges[Index].BaseAddress + Ranges[Index].Length - Limit;
1094 ASSERT (StartIndex != *Count && EndIndex != *Count);
1095 if ((StartIndex == EndIndex) && (Ranges[StartIndex].Type == Type)) {
1104 if (StartIndex != 0) {
1105 if ((LengthLeft == 0) && (Ranges[StartIndex - 1].Type == Type)) {
1107 Length += Ranges[StartIndex].Length;
1108 BaseAddress -= Ranges[StartIndex].Length;
1112 if (EndIndex != (*Count) - 1) {
1113 if ((LengthRight == 0) && (Ranges[EndIndex + 1].Type == Type)) {
1115 Length += Ranges[EndIndex].Length;
1127 DeltaCount = EndIndex - StartIndex - 2;
1128 if (LengthLeft == 0) {
1132 if (LengthRight == 0) {
1136 if (*Count - DeltaCount > Capacity) {
1143 CopyMem (&Ranges[EndIndex + 1 - DeltaCount], &Ranges[EndIndex + 1], (*Count - EndIndex - 1) *
sizeof (Ranges[0]));
1144 *Count -= DeltaCount;
1146 if (LengthLeft != 0) {
1147 Ranges[StartIndex].Length = LengthLeft;
1151 if (LengthRight != 0) {
1152 Ranges[EndIndex - DeltaCount].BaseAddress = BaseAddress + Length;
1153 Ranges[EndIndex - DeltaCount].Length = LengthRight;
1154 Ranges[EndIndex - DeltaCount].Type = Ranges[EndIndex].Type;
1157 Ranges[StartIndex].BaseAddress = BaseAddress;
1158 Ranges[StartIndex].Length = Length;
1159 Ranges[StartIndex].Type = Type;
1178 IN UINT64 BaseAddress,
1180 IN OUT UINT8 *Types OPTIONAL
1189 for (Index = 0; Index < RangeCount; Index++) {
1190 if ((Ranges[Index].BaseAddress <= BaseAddress) &&
1191 (BaseAddress < Ranges[Index].BaseAddress + Ranges[Index].Length)
1194 if ((LocalTypes & (1 << Ranges[Index].Type)) == 0) {
1195 LocalTypes |= (UINT8)(1 << Ranges[Index].Type);
1199 if (BaseAddress + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1200 Length -= Ranges[Index].BaseAddress + Ranges[Index].Length - BaseAddress;
1201 BaseAddress = Ranges[Index].BaseAddress + Ranges[Index].Length;
1208 if (Types !=
NULL) {
1209 *Types = LocalTypes;
1229 IN UINT16 VertexCount,
1234 IN BOOLEAN IncludeOptional
1243 for (Index = Start; Index <= Stop; Index++) {
1244 Vertices[Index].Visited =
FALSE;
1245 Mandatory = Weight[M (Start, Index)];
1246 Vertices[Index].Weight = Mandatory;
1247 if (Mandatory != MAX_WEIGHT) {
1248 Optional = IncludeOptional ? Weight[O (Start, Index)] : 0;
1249 Vertices[Index].Weight += Optional;
1250 ASSERT (Vertices[Index].Weight >= Optional);
1256 while (!Vertices[Stop].Visited) {
1260 for (Index = Start + 1; Index <= Stop; Index++) {
1261 if (!Vertices[Index].Visited) {
1262 Mandatory = Weight[M (MinI, Index)];
1263 if (Mandatory != MAX_WEIGHT) {
1264 Optional = IncludeOptional ? Weight[O (MinI, Index)] : 0;
1265 if (MinWeight + Mandatory + Optional <= Vertices[Index].Weight) {
1266 Vertices[Index].Weight = MinWeight + Mandatory + Optional;
1267 Vertices[Index].Previous = MinI;
1277 MinWeight = MAX_WEIGHT;
1278 for (Index = Start + 1; Index <= Stop; Index++) {
1279 if (!Vertices[Index].Visited && (MinWeight > Vertices[Index].Weight)) {
1281 MinWeight = Vertices[Index].Weight;
1288 Vertices[MinI].Visited =
TRUE;
1308 IN UINT32 MtrrCapacity,
1309 IN OUT UINT32 *MtrrCount,
1310 IN UINT64 BaseAddress,
1312 IN MTRR_MEMORY_CACHE_TYPE Type
1315 if (*MtrrCount == MtrrCapacity) {
1319 Mtrrs[*MtrrCount].BaseAddress = BaseAddress;
1320 Mtrrs[*MtrrCount].Length = Length;
1321 Mtrrs[*MtrrCount].Type = Type;
1333MTRR_MEMORY_CACHE_TYPE
1340 ASSERT (TypeBits != 0);
1341 for (Type = 7; (INT8)TypeBits > 0; Type--, TypeBits <<= 1) {
1344 return (MTRR_MEMORY_CACHE_TYPE)Type;
1371 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1375 IN UINT16 VertexCount,
1377 IN OUT UINT8 *Weight,
1383 IN UINT32 MtrrCapacity OPTIONAL,
1384 IN OUT UINT32 *MtrrCount OPTIONAL
1387 RETURN_STATUS Status;
1390 UINT8 PrecedentTypes;
1399 MTRR_MEMORY_CACHE_TYPE LowestType;
1400 MTRR_MEMORY_CACHE_TYPE LowestPrecedentType;
1402 Base = Vertices[Start].Address;
1403 Length = Vertices[Stop].Address - Base;
1410 PrecedentTypes = ~(1 << LowestType) & Types;
1413 if (Mtrrs ==
NULL) {
1414 Weight[M (Start, Stop)] = ((LowestType == DefaultType) ? 0 : 1);
1415 Weight[O (Start, Stop)] = ((LowestType == DefaultType) ? 1 : 0);
1421 for (Index = 0; Index < RangeCount; Index++) {
1426 if ((Base < Ranges[Index].BaseAddress) || (Ranges[Index].BaseAddress + Ranges[Index].Length <= Base)) {
1433 if (Base + Length > Ranges[Index].BaseAddress + Ranges[Index].Length) {
1434 SubLength = Ranges[Index].BaseAddress + Ranges[Index].Length - Base;
1439 if (((1 << Ranges[Index].Type) & PrecedentTypes) != 0) {
1445 if (HBase == MAX_UINT64) {
1449 HLength += SubLength;
1453 Length -= SubLength;
1459 if ((Ranges[Index].Type == LowestType) || (Length == 0)) {
1467 for (SubStart = Start; SubStart <= Stop; SubStart++) {
1468 if (Vertices[SubStart].Address == HBase) {
1473 for (SubStop = SubStart; SubStop <= Stop; SubStop++) {
1474 if (Vertices[SubStop].Address == HBase + HLength) {
1479 ASSERT (Vertices[SubStart].Address == HBase);
1480 ASSERT (Vertices[SubStop].Address == HBase + HLength);
1482 if ((TypeCount == 2) || (SubStart == SubStop - 1)) {
1488 if (Mtrrs ==
NULL) {
1489 Weight[M (Start, Stop)] += (UINT8)(SubStop - SubStart);
1491 while (SubStart != SubStop) {
1496 Vertices[SubStart].Address,
1497 Vertices[SubStart].Length,
1498 Vertices[SubStart].Type
1508 ASSERT (TypeCount == 3);
1511 if (Mtrrs ==
NULL) {
1512 Weight[M (Start, Stop)] += Vertices[SubStop].Weight;
1515 while (SubStop != SubStart) {
1517 Pre = Vertices[Cur].Previous;
1520 if (Weight[M (Pre, Cur)] + Weight[O (Pre, Cur)] != 0) {
1525 Vertices[Pre].Address,
1526 Vertices[Cur].Address - Vertices[Pre].Address,
1527 (Pre != Cur - 1) ? LowestPrecedentType : Vertices[Pre].Type
1534 if (Pre != Cur - 1) {
1593 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
1600 IN UINT32 MtrrCapacity,
1601 IN OUT UINT32 *MtrrCount
1615 UINTN RequiredScratchSize;
1620 RETURN_STATUS Status;
1622 Base0 = Ranges[0].BaseAddress;
1623 Base1 = Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length;
1624 MTRR_LIB_ASSERT_ALIGNED (Base0, Base1 - Base0);
1630 for (VertexIndex = 0, Index = 0; Index < RangeCount; Index++) {
1631 Base = Ranges[Index].BaseAddress;
1632 Length = Ranges[Index].Length;
1633 while (Length != 0) {
1635 SubLength = Alignment;
1636 if (SubLength > Length) {
1640 if (VertexIndex < *ScratchSize /
sizeof (*Vertices)) {
1641 Vertices[VertexIndex].Address = Base;
1642 Vertices[VertexIndex].Alignment = Alignment;
1643 Vertices[VertexIndex].Type = Ranges[Index].Type;
1644 Vertices[VertexIndex].Length = SubLength;
1648 Length -= SubLength;
1656 VertexCount = VertexIndex + 1;
1659 " Count of vertices (%016llx - %016llx) = %d\n",
1660 Ranges[0].BaseAddress,
1661 Ranges[RangeCount - 1].BaseAddress + Ranges[RangeCount - 1].Length,
1664 ASSERT (VertexCount < MAX_UINT16);
1666 RequiredScratchSize = VertexCount *
sizeof (*Vertices) + VertexCount * VertexCount *
sizeof (*Weight);
1667 if (*ScratchSize < RequiredScratchSize) {
1668 *ScratchSize = RequiredScratchSize;
1672 Vertices[VertexCount - 1].Address = Base1;
1674 Weight = (UINT8 *)&Vertices[VertexCount];
1675 for (VertexIndex = 0; VertexIndex < VertexCount; VertexIndex++) {
1679 SetMem (&Weight[M (VertexIndex, 0)], VertexIndex + 1, 0);
1683 SetMem (&Weight[M (VertexIndex, VertexIndex + 1)], VertexCount - VertexIndex - 1, MAX_WEIGHT);
1695 for (VertexIndex = 0; VertexIndex < VertexCount - 1; VertexIndex++) {
1696 if (Vertices[VertexIndex].Type != DefaultType) {
1697 Weight[M (VertexIndex, VertexIndex + 1)] = 1;
1698 Weight[O (VertexIndex, VertexIndex + 1)] = 0;
1700 Weight[M (VertexIndex, VertexIndex + 1)] = 0;
1701 Weight[O (VertexIndex, VertexIndex + 1)] = 1;
1705 for (TypeCount = 2; TypeCount <= 3; TypeCount++) {
1706 for (Start = 0; (UINT32)Start < VertexCount; Start++) {
1707 for (Stop = Start + 2; (UINT32)Stop < VertexCount; Stop++) {
1708 ASSERT (Vertices[Stop].Address > Vertices[Start].Address);
1709 Length = Vertices[Stop].Address - Vertices[Start].Address;
1710 if (Length > Vertices[Start].Alignment) {
1717 if ((Weight[M (Start, Stop)] == MAX_WEIGHT) &&
IS_POW2 (Length)) {
1721 Vertices[Start].Address,
1722 Vertices[Stop].Address - Vertices[Start].Address,
1734 (UINT16)VertexCount,
1745 }
else if (TypeCount == 2) {
1759 Stop = (UINT16)VertexCount - 1;
1761 Start = Vertices[Stop].Previous;
1762 TypeCount = MAX_UINT8;
1764 if (Weight[M (Start, Stop)] != 0) {
1765 TypeCount =
MtrrLibGetNumberOfTypes (Ranges, RangeCount, Vertices[Start].Address, Vertices[Stop].Address - Vertices[Start].Address, &Type);
1770 Vertices[Start].Address,
1771 Vertices[Stop].Address - Vertices[Start].Address,
1779 if (Start != Stop - 1) {
1783 if (TypeCount == MAX_UINT8) {
1787 Vertices[Start].Address,
1788 Vertices[Stop].Address - Vertices[Start].Address,
1798 (UINT16)VertexCount,
1840 RETURN_STATUS Status;
1843 MTRR_MEMORY_CACHE_TYPE MemoryType;
1847 for (MsrIndex = 0; MsrIndex <
ARRAY_SIZE (mMtrrLibFixedMtrrTable); MsrIndex++) {
1848 ASSERT (Base == mMtrrLibFixedMtrrTable[MsrIndex].BaseAddress);
1849 for (Index = 0; Index <
sizeof (UINT64); Index++) {
1850 MemoryType = (MTRR_MEMORY_CACHE_TYPE)((UINT8 *)(&Fixed->Mtrr[MsrIndex]))[Index];
1856 mMtrrLibFixedMtrrTable[MsrIndex].Length,
1863 Base += mMtrrLibFixedMtrrTable[MsrIndex].Length;
1867 ASSERT (Base == BASE_1MB);
1886 IN UINT32 VariableMtrrCount,
1892 RETURN_STATUS Status;
1904 for (Index = 0; Index < VariableMtrrCount; Index++) {
1905 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheWriteBack)) {
1910 VariableMtrr[Index].BaseAddress,
1911 VariableMtrr[Index].Length,
1912 VariableMtrr[Index].Type
1923 for (Index = 0; Index < VariableMtrrCount; Index++) {
1924 if ((VariableMtrr[Index].Length != 0) &&
1925 (VariableMtrr[Index].Type != CacheWriteBack) && (VariableMtrr[Index].Type != CacheUncacheable))
1931 VariableMtrr[Index].BaseAddress,
1932 VariableMtrr[Index].Length,
1933 VariableMtrr[Index].Type
1944 for (Index = 0; Index < VariableMtrrCount; Index++) {
1945 if ((VariableMtrr[Index].Length != 0) && (VariableMtrr[Index].Type == CacheUncacheable)) {
1950 VariableMtrr[Index].BaseAddress,
1951 VariableMtrr[Index].Length,
1952 VariableMtrr[Index].Type
1978 ASSERT (RangeCount != 0);
1980 switch (Ranges[0].Type) {
1981 case CacheWriteBack:
1982 case CacheWriteThrough:
1983 return (1 << CacheWriteBack) | (1 << CacheWriteThrough) | (1 << CacheUncacheable);
1986 case CacheWriteCombining:
1987 case CacheWriteProtected:
1988 return (1 << Ranges[0].Type) | (1 << CacheUncacheable);
1991 case CacheUncacheable:
1992 if (RangeCount == 1) {
1993 return (1 << CacheUncacheable);
2022 UINT32 DstMtrrCount,
2024 UINT32 SrcMtrrCount,
2031 ASSERT (SrcMtrrCount <= DstMtrrCount);
2033 for (DstIndex = 0; DstIndex < DstMtrrCount; DstIndex++) {
2034 Modified[DstIndex] =
FALSE;
2036 if (DstMtrrs[DstIndex].Length == 0) {
2040 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
2041 if ((DstMtrrs[DstIndex].BaseAddress == SrcMtrrs[SrcIndex].BaseAddress) &&
2042 (DstMtrrs[DstIndex].Length == SrcMtrrs[SrcIndex].Length) &&
2043 (DstMtrrs[DstIndex].Type == SrcMtrrs[SrcIndex].Type))
2049 if (SrcIndex == SrcMtrrCount) {
2053 DstMtrrs[DstIndex].Length = 0;
2054 Modified[DstIndex] =
TRUE;
2059 SrcMtrrs[SrcIndex].Length = 0;
2068 for (SrcIndex = 0; SrcIndex < SrcMtrrCount; SrcIndex++) {
2069 if (SrcMtrrs[SrcIndex].Length != 0) {
2073 while (DstIndex < DstMtrrCount) {
2074 if (DstMtrrs[DstIndex].Length == 0) {
2081 ASSERT (DstIndex < DstMtrrCount);
2082 CopyMem (&DstMtrrs[DstIndex], &SrcMtrrs[SrcIndex],
sizeof (SrcMtrrs[0]));
2083 Modified[DstIndex] =
TRUE;
2109 IN MTRR_MEMORY_CACHE_TYPE DefaultType,
2116 IN UINT32 VariableMtrrCapacity,
2117 OUT UINT32 *VariableMtrrCount
2120 RETURN_STATUS Status;
2125 UINT8 CompatibleTypes;
2128 UINTN ActualScratchSize;
2129 UINTN BiggestScratchSize;
2131 *VariableMtrrCount = 0;
2140 BiggestScratchSize = 0;
2142 for (Index = 0; (
UINTN)Index < RangeCount;) {
2143 Base0 = Ranges[Index].BaseAddress;
2148 while ((
UINTN)Index < RangeCount) {
2149 ASSERT (Ranges[Index].BaseAddress == Base0);
2151 while (Base0 + Alignment <= Ranges[Index].BaseAddress + Ranges[Index].Length) {
2152 if ((BiggestScratchSize <= *ScratchSize) && (Ranges[Index].Type != DefaultType)) {
2155 VariableMtrrCapacity,
2173 Ranges[Index].Length -= Base0 - Ranges[Index].BaseAddress;
2174 Ranges[Index].BaseAddress = Base0;
2175 if (Ranges[Index].Length != 0) {
2182 if (Index == RangeCount) {
2196 while ((
UINTN)(End + 1) < RangeCount) {
2197 if (((1 << Ranges[End + 1].Type) & CompatibleTypes) == 0) {
2205 Length =
GetPowerOfTwo64 (Ranges[End].BaseAddress + Ranges[End].Length - Base0);
2206 Base1 = Base0 +
MIN (Alignment, Length);
2212 while ((
UINTN)(End + 1) < RangeCount) {
2213 if (Base1 <= Ranges[End + 1].BaseAddress) {
2220 Length = Ranges[End].Length;
2221 Ranges[End].Length = Base1 - Ranges[End].BaseAddress;
2222 ActualScratchSize = *ScratchSize;
2231 VariableMtrrCapacity,
2235 BiggestScratchSize =
MAX (BiggestScratchSize, ActualScratchSize);
2247 if (Length != Ranges[End].Length) {
2248 Ranges[End].BaseAddress = Base1;
2249 Ranges[End].Length = Length - Ranges[End].Length;
2256 if (*ScratchSize < BiggestScratchSize) {
2257 *ScratchSize = BiggestScratchSize;
2280 IN OUT UINT64 *ClearMasks,
2281 IN OUT UINT64 *OrMasks,
2282 IN PHYSICAL_ADDRESS BaseAddress,
2284 IN MTRR_MEMORY_CACHE_TYPE Type
2287 RETURN_STATUS Status;
2292 ASSERT (BaseAddress < BASE_1MB);
2294 MsrIndex = (UINT32)-1;
2295 while ((BaseAddress < BASE_1MB) && (Length != 0)) {
2301 ClearMasks[MsrIndex] = ClearMasks[MsrIndex] | ClearMask;
2302 OrMasks[MsrIndex] = (OrMasks[MsrIndex] & ~ClearMask) | OrMask;
2344 RETURN_STATUS Status;
2348 BOOLEAN VariableMtrrNeeded;
2350 UINT64 MtrrValidBitsMask;
2351 UINT64 MtrrValidAddressMask;
2352 MTRR_MEMORY_CACHE_TYPE DefaultType;
2355 UINTN WorkingRangeCount;
2358 UINT32 OriginalVariableMtrrCount;
2359 UINT32 FirmwareVariableMtrrCount;
2360 UINT32 WorkingVariableMtrrCount;
2363 BOOLEAN VariableSettingModified[
ARRAY_SIZE (MtrrSetting->Variables.Mtrr)];
2365 UINT64 FixedMtrrMemoryLimit;
2366 BOOLEAN FixedMtrrSupported;
2367 UINT64 ClearMasks[
ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2368 UINT64 OrMasks[
ARRAY_SIZE (mMtrrLibFixedMtrrTable)];
2371 BOOLEAN MtrrContextValid;
2384 VariableMtrrNeeded =
FALSE;
2385 OriginalVariableMtrrCount = 0;
2393 "Mtrr: Set Mem Attribute to %a, ScratchSize = %x%a",
2394 (MtrrSetting ==
NULL) ?
"Hardware" :
"Buffer",
2396 (RangeCount <= 1) ?
"," :
"\n"
2398 for (Index = 0; Index < RangeCount; Index++) {
2401 " %a: [%016lx, %016lx)\n",
2402 mMtrrMemoryCacheTypeShortName[
MIN (Ranges[Index].Type, CacheInvalid)],
2403 Ranges[Index].BaseAddress,
2404 Ranges[Index].BaseAddress + Ranges[Index].Length
2418 FixedMtrrMemoryLimit = FixedMtrrSupported ? BASE_1MB : 0;
2420 for (Index = 0; Index < RangeCount; Index++) {
2421 if (Ranges[Index].Length == 0) {
2426 if (((Ranges[Index].BaseAddress & ~MtrrValidAddressMask) != 0) ||
2427 ((((Ranges[Index].BaseAddress + Ranges[Index].Length) & ~MtrrValidAddressMask) != 0) &&
2428 ((Ranges[Index].BaseAddress + Ranges[Index].Length) != MtrrValidBitsMask + 1))
2439 if ((Ranges[Index].Type != CacheUncacheable) &&
2440 (Ranges[Index].Type != CacheWriteCombining) &&
2441 (Ranges[Index].Type != CacheWriteThrough) &&
2442 (Ranges[Index].Type != CacheWriteProtected) &&
2443 (Ranges[Index].Type != CacheWriteBack))
2449 if (Ranges[Index].BaseAddress + Ranges[Index].Length > FixedMtrrMemoryLimit) {
2450 VariableMtrrNeeded =
TRUE;
2457 if (VariableMtrrNeeded) {
2464 OriginalVariableMtrrCount,
2466 MtrrValidAddressMask,
2467 OriginalVariableMtrr
2471 WorkingRangeCount = 1;
2472 WorkingRanges[0].BaseAddress = 0;
2473 WorkingRanges[0].Length = MtrrValidBitsMask + 1;
2474 WorkingRanges[0].Type = DefaultType;
2477 OriginalVariableMtrr,
2478 OriginalVariableMtrrCount,
2485 ASSERT (OriginalVariableMtrrCount >=
PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs));
2486 FirmwareVariableMtrrCount = OriginalVariableMtrrCount -
PcdGet32 (PcdCpuNumberOfReservedVariableMtrrs);
2487 ASSERT (WorkingRangeCount <= 2 * FirmwareVariableMtrrCount + 1);
2492 if (FixedMtrrMemoryLimit != 0) {
2498 FixedMtrrMemoryLimit,
2508 for (Index = 0; Index < RangeCount; Index++) {
2509 BaseAddress = Ranges[Index].BaseAddress;
2510 Length = Ranges[Index].Length;
2511 if (BaseAddress < FixedMtrrMemoryLimit) {
2512 if (Length <= FixedMtrrMemoryLimit - BaseAddress) {
2516 Length -= FixedMtrrMemoryLimit - BaseAddress;
2517 BaseAddress = FixedMtrrMemoryLimit;
2550 WorkingVariableMtrr,
2551 FirmwareVariableMtrrCount + 1,
2552 &WorkingVariableMtrrCount
2561 for (Index = 0; Index < WorkingVariableMtrrCount; Index++) {
2562 if ((WorkingVariableMtrr[Index].BaseAddress == 0) && (WorkingVariableMtrr[Index].Length == FixedMtrrMemoryLimit)) {
2563 ASSERT (WorkingVariableMtrr[Index].Type == CacheUncacheable);
2564 WorkingVariableMtrrCount--;
2566 &WorkingVariableMtrr[Index],
2567 &WorkingVariableMtrr[Index + 1],
2568 (WorkingVariableMtrrCount - Index) *
sizeof (WorkingVariableMtrr[0])
2574 if (WorkingVariableMtrrCount > FirmwareVariableMtrrCount) {
2584 OriginalVariableMtrr,
2585 OriginalVariableMtrrCount,
2586 WorkingVariableMtrr,
2587 WorkingVariableMtrrCount,
2588 VariableSettingModified
2598 ZeroMem (ClearMasks,
sizeof (ClearMasks));
2599 ZeroMem (OrMasks,
sizeof (OrMasks));
2600 for (Index = 0; Index < RangeCount; Index++) {
2601 if (Ranges[Index].BaseAddress >= FixedMtrrMemoryLimit) {
2608 Ranges[Index].BaseAddress,
2609 Ranges[Index].Length,
2617 MtrrContextValid =
FALSE;
2621 for (Index = 0; Index <
ARRAY_SIZE (ClearMasks); Index++) {
2622 if (ClearMasks[Index] != 0) {
2623 if (MtrrSetting !=
NULL) {
2628 MtrrSetting->Fixed.Mtrr[Index] = (MtrrSetting->Fixed.Mtrr[Index] & ~ClearMasks[Index]) | OrMasks[Index];
2630 if (!MtrrContextValid) {
2635 MtrrContext.DefType.
Bits.
FE = 1;
2636 MtrrContextValid =
TRUE;
2639 AsmMsrAndThenOr64 (mMtrrLibFixedMtrrTable[Index].Msr, ~ClearMasks[Index], OrMasks[Index]);
2647 for (Index = 0; Index < OriginalVariableMtrrCount; Index++) {
2648 if (VariableSettingModified[Index]) {
2649 if (OriginalVariableMtrr[Index].Length != 0) {
2650 VariableSetting.Base = (OriginalVariableMtrr[Index].BaseAddress & MtrrValidAddressMask)
2651 | (UINT8)OriginalVariableMtrr[Index].Type;
2652 VariableSetting.Mask = ((~(OriginalVariableMtrr[Index].Length - 1)) & MtrrValidAddressMask) | BIT11;
2654 VariableSetting.Base = 0;
2655 VariableSetting.Mask = 0;
2658 if (MtrrSetting !=
NULL) {
2659 CopyMem (&MtrrSetting->Variables.Mtrr[Index], &VariableSetting, sizeof (VariableSetting));
2661 if (!MtrrContextValid) {
2663 MtrrContextValid =
TRUE;
2668 VariableSetting.Base
2672 VariableSetting.Mask
2678 if (MtrrSetting !=
NULL) {
2684 if (MtrrContextValid) {
2690 DEBUG ((DEBUG_CACHE,
" Result = %r\n", Status));
2731 IN PHYSICAL_ADDRESS BaseAddress,
2733 IN MTRR_MEMORY_CACHE_TYPE Attribute
2736 UINT8 Scratch[SCRATCH_BUFFER_SIZE];
2740 Range.BaseAddress = BaseAddress;
2741 Range.Length = Length;
2742 Range.Type = Attribute;
2743 ScratchSize =
sizeof (Scratch);
2783 IN PHYSICAL_ADDRESS BaseAddress,
2785 IN MTRR_MEMORY_CACHE_TYPE Attribute
2803 UINT32 VariableMtrrCount;
2806 ASSERT (VariableMtrrCount <=
ARRAY_SIZE (VariableSettings->Mtrr));
2808 for (Index = 0; Index < VariableMtrrCount; Index++) {
2811 VariableSettings->Mtrr[Index].Base
2815 VariableSettings->Mtrr[Index].Mask
2833 for (Index = 0; Index < MTRR_NUMBER_OF_FIXED_MTRR; Index++) {
2835 mMtrrLibFixedMtrrTable[Index].Msr,
2836 FixedSettings->Mtrr[Index]
2855 BOOLEAN FixedMtrrSupported;
2856 UINT32 VariableMtrrCount;
2859 ZeroMem (MtrrSetting,
sizeof (*MtrrSetting));
2874 ASSERT (FixedMtrrSupported || (MtrrDefType->
Bits.
FE == 0));
2879 if (MtrrDefType->
Bits.
FE == 1) {
2889 &MtrrSetting->Variables
2912 BOOLEAN FixedMtrrSupported;
2926 ASSERT (FixedMtrrSupported || (MtrrDefType->
Bits.
FE == 0));
2932 if (FixedMtrrSupported) {
2990 RETURN_STATUS Status;
2994 UINTN LocalRangeCount;
2995 UINT64 MtrrValidBitsMask;
2996 UINT64 MtrrValidAddressMask;
2997 UINT32 VariableMtrrCount;
3000 ARRAY_SIZE (mMtrrLibFixedMtrrTable) *
sizeof (UINT64) + 2 *
ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1
3003 if (RangeCount ==
NULL) {
3007 if ((*RangeCount != 0) && (Ranges ==
NULL)) {
3011 if (MtrrSetting !=
NULL) {
3012 Mtrrs = MtrrSetting;
3015 Mtrrs = &LocalMtrrs;
3020 LocalRangeCount = 1;
3022 LocalRanges[0].BaseAddress = 0;
3023 LocalRanges[0].Length = MtrrValidBitsMask + 1;
3025 if (MtrrDefType->
Bits.
E == 0) {
3026 LocalRanges[0].Type = CacheUncacheable;
3031 ASSERT (VariableMtrrCount <=
ARRAY_SIZE (MtrrSetting->Variables.Mtrr));
3037 MtrrValidAddressMask,
3049 if (MtrrDefType->
Bits.
FE == 1) {
3054 if (*RangeCount < LocalRangeCount) {
3055 *RangeCount = LocalRangeCount;
3059 CopyMem (Ranges, LocalRanges, LocalRangeCount *
sizeof (LocalRanges[0]));
3060 *RangeCount = LocalRangeCount;
3082 RETURN_STATUS Status;
3084 BOOLEAN ContainVariableMtrr;
3086 ARRAY_SIZE (mMtrrLibFixedMtrrTable) *
sizeof (UINT64) + 2 *
ARRAY_SIZE (Mtrrs->Variables.Mtrr) + 1
3089 if (MtrrSetting !=
NULL) {
3090 Mtrrs = MtrrSetting;
3093 Mtrrs = &LocalMtrrs;
3099 DEBUG ((DEBUG_CACHE,
"MTRR is not enabled.\n"));
3106 DEBUG ((DEBUG_CACHE,
"MTRR Settings:\n"));
3107 DEBUG ((DEBUG_CACHE,
"=============\n"));
3108 DEBUG ((DEBUG_CACHE,
"MTRR Default Type: %016lx\n", Mtrrs->MtrrDefType));
3109 for (Index = 0; Index <
ARRAY_SIZE (mMtrrLibFixedMtrrTable); Index++) {
3110 DEBUG ((DEBUG_CACHE,
"Fixed MTRR[%02d] : %016lx\n", Index, Mtrrs->Fixed.Mtrr[Index]));
3113 ContainVariableMtrr =
FALSE;
3114 for (Index = 0; Index <
ARRAY_SIZE (Mtrrs->Variables.Mtrr); Index++) {
3115 if ((Mtrrs->Variables.Mtrr[Index].Mask & BIT11) == 0) {
3122 ContainVariableMtrr =
TRUE;
3125 "Variable MTRR[%02d]: Base=%016lx Mask=%016lx\n",
3127 Mtrrs->Variables.Mtrr[Index].Base,
3128 Mtrrs->Variables.Mtrr[Index].Mask
3132 if (!ContainVariableMtrr) {
3133 DEBUG ((DEBUG_CACHE,
"Variable MTRR : None.\n"));
3136 DEBUG ((DEBUG_CACHE,
"\n"));
3141 DEBUG ((DEBUG_CACHE,
"Memory Ranges:\n"));
3142 DEBUG ((DEBUG_CACHE,
"====================================\n"));
3143 for (Index = 0; Index < RangeCount; Index++) {
3146 "%a:%016lx-%016lx\n",
3147 mMtrrMemoryCacheTypeShortName[Ranges[Index].Type],
3148 Ranges[Index].BaseAddress,
3149 Ranges[Index].BaseAddress + Ranges[Index].Length - 1
BOOLEAN EFIAPI SetInterruptState(IN BOOLEAN InterruptState)
BOOLEAN EFIAPI SaveAndDisableInterrupts(VOID)
UINT64 EFIAPI RShiftU64(IN UINT64 Operand, IN UINTN Count)
UINT64 EFIAPI MultU64x32(IN UINT64 Multiplicand, IN UINT32 Multiplier)
UINT64 EFIAPI GetPowerOfTwo64(IN UINT64 Operand)
UINT64 EFIAPI LShiftU64(IN UINT64 Operand, IN UINTN Count)
INTN EFIAPI HighBitSet64(IN UINT64 Operand)
VOID *EFIAPI CopyMem(OUT VOID *DestinationBuffer, IN CONST VOID *SourceBuffer, IN UINTN Length)
VOID *EFIAPI SetMem(OUT VOID *Buffer, IN UINTN Length, IN UINT8 Value)
VOID *EFIAPI ZeroMem(OUT VOID *Buffer, IN UINTN Length)
UINT32 EFIAPI AsmCpuidEx(IN UINT32 Index, IN UINT32 SubIndex, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
VOID EFIAPI CpuFlushTlb(VOID)
VOID EFIAPI AsmDisableCache(VOID)
VOID EFIAPI AsmEnableCache(VOID)
UINT64 EFIAPI AsmReadMsr64(IN UINT32 Index)
UINTN EFIAPI AsmWriteCr4(UINTN Cr4)
UINT64 EFIAPI AsmWriteMsr64(IN UINT32 Index, IN UINT64 Value)
UINTN EFIAPI AsmReadCr4(VOID)
#define RETURN_BUFFER_TOO_SMALL
#define RETURN_ERROR(StatusCode)
#define RETURN_UNSUPPORTED
#define RETURN_OUT_OF_RESOURCES
#define RETURN_ALREADY_STARTED
#define ARRAY_SIZE(Array)
#define RETURN_INVALID_PARAMETER
#define GLOBAL_REMOVE_IF_UNREFERENCED
#define ASSERT_RETURN_ERROR(StatusParameter)
#define DEBUG_CODE_BEGIN()
#define DEBUG(Expression)
#define MSR_IA32_MTRR_PHYSBASE0
#define MSR_IA32_MTRR_FIX4K_E0000
#define MSR_IA32_MTRR_DEF_TYPE
#define MSR_IA32_MTRR_FIX4K_C8000
#define MSR_IA32_MTRR_FIX4K_E8000
#define MSR_IA32_MTRR_FIX4K_F8000
#define MSR_IA32_MTRR_FIX16K_80000
#define MSR_IA32_MTRR_FIX16K_A0000
#define MSR_IA32_MTRR_FIX4K_D0000
#define MSR_IA32_MTRR_PHYSMASK0
#define MSR_IA32_MTRR_FIX64K_00000
#define MSR_IA32_TME_ACTIVATE
#define MSR_IA32_MTRR_FIX4K_D8000
#define MSR_IA32_MTRR_FIX4K_C0000
#define MSR_IA32_MTRR_FIX4K_F0000
#define CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS
#define CPUID_VERSION_INFO
UINT32 EFIAPI AsmCpuid(IN UINT32 Index, OUT UINT32 *RegisterEax OPTIONAL, OUT UINT32 *RegisterEbx OPTIONAL, OUT UINT32 *RegisterEcx OPTIONAL, OUT UINT32 *RegisterEdx OPTIONAL)
BOOLEAN EFIAPI TdIsEnabled()
MTRR_MEMORY_CACHE_TYPE MtrrGetDefaultMemoryTypeWorker(IN CONST MTRR_SETTINGS *MtrrSetting)
RETURN_STATUS MtrrLibApplyFixedMtrrs(IN CONST MTRR_FIXED_SETTINGS *Fixed, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
VOID MtrrLibCalculateLeastMtrrs(IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT CONST UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN BOOLEAN IncludeOptional)
RETURN_STATUS MtrrLibCalculateSubtractivePath(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT16 VertexCount, IN MTRR_LIB_ADDRESS *Vertices, IN OUT UINT8 *Weight, IN UINT16 Start, IN UINT16 Stop, IN UINT8 Types, IN UINT8 TypeCount, IN OUT MTRR_MEMORY_RANGE *Mtrrs OPTIONAL, IN UINT32 MtrrCapacity OPTIONAL, IN OUT UINT32 *MtrrCount OPTIONAL)
UINT32 EFIAPI MtrrGetMemoryAttributeInVariableMtrr(IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetMemoryAttribute(IN PHYSICAL_ADDRESS Address)
VOID MtrrLibInitializeMtrrMask(OUT UINT64 *MtrrValidBitsMask, OUT UINT64 *MtrrValidAddressMask)
UINT32 MtrrGetMemoryAttributeInVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT VARIABLE_MTRR *VariableMtrr)
UINT32 GetVariableMtrrCountWorker(VOID)
RETURN_STATUS MtrrLibApplyVariableMtrrs(IN CONST MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCount, IN OUT MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCapacity, IN OUT UINTN *RangeCount)
VOID MtrrSetFixedMtrrWorker(IN MTRR_FIXED_SETTINGS *FixedSettings)
VOID EFIAPI MtrrDebugPrintAllMtrrs(VOID)
MTRR_SETTINGS *EFIAPI MtrrSetAllMtrrs(IN MTRR_SETTINGS *MtrrSetting)
RETURN_STATUS EFIAPI MtrrGetMemoryAttributesInMtrrSettings(IN CONST MTRR_SETTINGS *MtrrSetting OPTIONAL, OUT MTRR_MEMORY_RANGE *Ranges, IN OUT UINTN *RangeCount)
RETURN_STATUS MtrrLibAppendVariableMtrr(IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
MTRR_MEMORY_CACHE_TYPE MtrrLibLowestType(IN UINT8 TypeBits)
VOID MtrrLibPreMtrrChange(OUT MTRR_CONTEXT *MtrrContext)
VOID MtrrLibMergeVariableMtrr(MTRR_MEMORY_RANGE *DstMtrrs, UINT32 DstMtrrCount, MTRR_MEMORY_RANGE *SrcMtrrs, UINT32 SrcMtrrCount, BOOLEAN *Modified)
UINT64 MtrrLibBiggestAlignment(UINT64 Address, UINT64 Alignment0)
RETURN_STATUS MtrrLibCalculateMtrrs(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN OUT MTRR_MEMORY_RANGE *Mtrrs, IN UINT32 MtrrCapacity, IN OUT UINT32 *MtrrCount)
UINT8 MtrrLibGetNumberOfTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN UINT64 BaseAddress, IN UINT64 Length, IN OUT UINT8 *Types OPTIONAL)
RETURN_STATUS EFIAPI MtrrSetMemoryAttributeInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
UINT32 EFIAPI GetVariableMtrrCount(VOID)
UINT32 EFIAPI GetFirmwareVariableMtrrCount(VOID)
VOID MtrrLibPostMtrrChangeEnableCache(IN MTRR_CONTEXT *MtrrContext)
BOOLEAN EFIAPI IsMtrrSupported(VOID)
MTRR_MEMORY_CACHE_TYPE MtrrGetMemoryAttributeByAddressWorker(IN MTRR_SETTINGS *MtrrSetting, IN PHYSICAL_ADDRESS Address)
RETURN_STATUS EFIAPI MtrrSetMemoryAttribute(IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Attribute)
VOID MtrrSetVariableMtrrWorker(IN MTRR_VARIABLE_SETTINGS *VariableSettings)
UINT32 GetFirmwareVariableMtrrCountWorker(VOID)
MTRR_FIXED_SETTINGS *EFIAPI MtrrGetFixedMtrr(OUT MTRR_FIXED_SETTINGS *FixedSettings)
UINT8 MtrrLibGetCompatibleTypes(IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
RETURN_STATUS MtrrLibSetBelow1MBMemoryAttribute(IN OUT UINT64 *ClearMasks, IN OUT UINT64 *OrMasks, IN PHYSICAL_ADDRESS BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
RETURN_STATUS MtrrLibProgramFixedMtrr(IN MTRR_MEMORY_CACHE_TYPE Type, IN OUT UINT64 *Base, IN OUT UINT64 *Length, IN OUT UINT32 *LastMsrIndex, OUT UINT64 *ClearMask, OUT UINT64 *OrMask)
MTRR_VARIABLE_SETTINGS * MtrrGetVariableMtrrWorker(IN MTRR_SETTINGS *MtrrSetting, IN UINT32 VariableMtrrCount, OUT MTRR_VARIABLE_SETTINGS *VariableSettings)
MTRR_MEMORY_CACHE_TYPE EFIAPI MtrrGetDefaultMemoryType(VOID)
RETURN_STATUS EFIAPI MtrrSetMemoryAttributesInMtrrSettings(IN OUT MTRR_SETTINGS *MtrrSetting, IN VOID *Scratch, IN OUT UINTN *ScratchSize, IN CONST MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount)
UINT32 MtrrLibGetRawVariableRanges(IN CONST MTRR_VARIABLE_SETTINGS *VariableSettings, IN UINTN VariableMtrrCount, IN UINT64 MtrrValidBitsMask, IN UINT64 MtrrValidAddressMask, OUT MTRR_MEMORY_RANGE *VariableMtrr)
MTRR_MEMORY_CACHE_TYPE MtrrLibPrecedence(IN MTRR_MEMORY_CACHE_TYPE MtrrType1, IN MTRR_MEMORY_CACHE_TYPE MtrrType2)
BOOLEAN MtrrLibIsMtrrSupported(OUT BOOLEAN *FixedMtrrSupported OPTIONAL, OUT UINT32 *VariableMtrrCount OPTIONAL)
MTRR_SETTINGS *EFIAPI MtrrGetAllMtrrs(OUT MTRR_SETTINGS *MtrrSetting)
BOOLEAN MtrrLibTypeLeftPrecedeRight(IN MTRR_MEMORY_CACHE_TYPE Left, IN MTRR_MEMORY_CACHE_TYPE Right)
RETURN_STATUS MtrrLibSetMemoryType(IN MTRR_MEMORY_RANGE *Ranges, IN UINTN Capacity, IN OUT UINTN *Count, IN UINT64 BaseAddress, IN UINT64 Length, IN MTRR_MEMORY_CACHE_TYPE Type)
RETURN_STATUS MtrrLibSetMemoryRanges(IN MTRR_MEMORY_CACHE_TYPE DefaultType, IN UINT64 A0, IN MTRR_MEMORY_RANGE *Ranges, IN UINTN RangeCount, IN VOID *Scratch, IN OUT UINTN *ScratchSize, OUT MTRR_MEMORY_RANGE *VariableMtrr, IN UINT32 VariableMtrrCapacity, OUT UINT32 *VariableMtrrCount)
MTRR_FIXED_SETTINGS * MtrrGetFixedMtrrWorker(OUT MTRR_FIXED_SETTINGS *FixedSettings)
VOID MtrrLibPostMtrrChange(IN MTRR_CONTEXT *MtrrContext)
VOID MtrrDebugPrintAllMtrrsWorker(IN MTRR_SETTINGS *MtrrSetting)
#define PcdGet32(TokenName)
VOID EFIAPI Exit(IN EFI_STATUS Status)
UINT64 EFIAPI AsmMsrAndThenOr64(IN UINT32 Index, IN UINT64 AndData, IN UINT64 OrData)
struct CPUID_STRUCTURED_EXTENDED_FEATURE_FLAGS_ECX::@709 Bits
struct CPUID_VIR_PHY_ADDRESS_SIZE_EAX::@753 Bits
UINT32 PhysicalAddressBits
struct MSR_IA32_MTRR_DEF_TYPE_REGISTER::@653 Bits
struct MSR_IA32_MTRRCAP_REGISTER::@631 Bits
struct MSR_IA32_TME_ACTIVATE_REGISTER::@680 Bits