TianoCore EDK2 master
Loading...
Searching...
No Matches
CpuPageTableMap.c
Go to the documentation of this file.
1
9#include "CpuPageTable.h"
10
21VOID
23 IN OUT volatile IA32_PTE_4K *Pte4K,
24 IN UINT64 Offset,
25 IN IA32_MAP_ATTRIBUTE *Attribute,
27 )
28{
29 IA32_PTE_4K LocalPte4K;
30
31 LocalPte4K.Uint64 = Pte4K->Uint64;
32 if (Mask->Bits.PageTableBaseAddressLow || Mask->Bits.PageTableBaseAddressHigh) {
33 LocalPte4K.Uint64 = (IA32_MAP_ATTRIBUTE_PAGE_TABLE_BASE_ADDRESS (Attribute) + Offset) | (LocalPte4K.Uint64 & ~IA32_PE_BASE_ADDRESS_MASK_40);
34 }
35
36 if (Mask->Bits.Present) {
37 LocalPte4K.Bits.Present = Attribute->Bits.Present;
38 }
39
40 if (Mask->Bits.ReadWrite) {
41 LocalPte4K.Bits.ReadWrite = Attribute->Bits.ReadWrite;
42 }
43
44 if (Mask->Bits.UserSupervisor) {
45 LocalPte4K.Bits.UserSupervisor = Attribute->Bits.UserSupervisor;
46 }
47
48 if (Mask->Bits.WriteThrough) {
49 LocalPte4K.Bits.WriteThrough = Attribute->Bits.WriteThrough;
50 }
51
52 if (Mask->Bits.CacheDisabled) {
53 LocalPte4K.Bits.CacheDisabled = Attribute->Bits.CacheDisabled;
54 }
55
56 if (Mask->Bits.Accessed) {
57 LocalPte4K.Bits.Accessed = Attribute->Bits.Accessed;
58 }
59
60 if (Mask->Bits.Dirty) {
61 LocalPte4K.Bits.Dirty = Attribute->Bits.Dirty;
62 }
63
64 if (Mask->Bits.Pat) {
65 LocalPte4K.Bits.Pat = Attribute->Bits.Pat;
66 }
67
68 if (Mask->Bits.Global) {
69 LocalPte4K.Bits.Global = Attribute->Bits.Global;
70 }
71
72 if (Mask->Bits.ProtectionKey) {
73 LocalPte4K.Bits.ProtectionKey = Attribute->Bits.ProtectionKey;
74 }
75
76 if (Mask->Bits.Nx) {
77 LocalPte4K.Bits.Nx = Attribute->Bits.Nx;
78 }
79
80 if (Pte4K->Uint64 != LocalPte4K.Uint64) {
81 Pte4K->Uint64 = LocalPte4K.Uint64;
82 }
83}
84
95VOID
98 IN UINT64 Offset,
99 IN IA32_MAP_ATTRIBUTE *Attribute,
101 )
102{
104
105 LocalPleB.Uint64 = PleB->Uint64;
106 if (Mask->Bits.PageTableBaseAddressLow || Mask->Bits.PageTableBaseAddressHigh) {
107 LocalPleB.Uint64 = (IA32_MAP_ATTRIBUTE_PAGE_TABLE_BASE_ADDRESS (Attribute) + Offset) | (LocalPleB.Uint64 & ~IA32_PE_BASE_ADDRESS_MASK_39);
108 }
109
110 LocalPleB.Bits.MustBeOne = 1;
111
112 if (Mask->Bits.Present) {
113 LocalPleB.Bits.Present = Attribute->Bits.Present;
114 }
115
116 if (Mask->Bits.ReadWrite) {
117 LocalPleB.Bits.ReadWrite = Attribute->Bits.ReadWrite;
118 }
119
120 if (Mask->Bits.UserSupervisor) {
121 LocalPleB.Bits.UserSupervisor = Attribute->Bits.UserSupervisor;
122 }
123
124 if (Mask->Bits.WriteThrough) {
125 LocalPleB.Bits.WriteThrough = Attribute->Bits.WriteThrough;
126 }
127
128 if (Mask->Bits.CacheDisabled) {
129 LocalPleB.Bits.CacheDisabled = Attribute->Bits.CacheDisabled;
130 }
131
132 if (Mask->Bits.Accessed) {
133 LocalPleB.Bits.Accessed = Attribute->Bits.Accessed;
134 }
135
136 if (Mask->Bits.Dirty) {
137 LocalPleB.Bits.Dirty = Attribute->Bits.Dirty;
138 }
139
140 if (Mask->Bits.Pat) {
141 LocalPleB.Bits.Pat = Attribute->Bits.Pat;
142 }
143
144 if (Mask->Bits.Global) {
145 LocalPleB.Bits.Global = Attribute->Bits.Global;
146 }
147
148 if (Mask->Bits.ProtectionKey) {
149 LocalPleB.Bits.ProtectionKey = Attribute->Bits.ProtectionKey;
150 }
151
152 if (Mask->Bits.Nx) {
153 LocalPleB.Bits.Nx = Attribute->Bits.Nx;
154 }
155
156 if (PleB->Uint64 != LocalPleB.Uint64) {
157 PleB->Uint64 = LocalPleB.Uint64;
158 }
159}
160
172VOID
174 IN UINTN Level,
175 IN OUT volatile IA32_PAGING_ENTRY *Ple,
176 IN UINT64 Offset,
177 IN IA32_MAP_ATTRIBUTE *Attribute,
179 )
180{
181 if (Level == 1) {
182 PageTableLibSetPte4K (&Ple->Pte4K, Offset, Attribute, Mask);
183 } else {
184 ASSERT (Level == 2 || Level == 3);
185 PageTableLibSetPleB (&Ple->PleB, Offset, Attribute, Mask);
186 }
187}
188
196VOID
198 IN OUT volatile IA32_PAGE_NON_LEAF_ENTRY *Pnle,
199 IN IA32_MAP_ATTRIBUTE *Attribute,
201 )
202{
203 IA32_PAGE_NON_LEAF_ENTRY LocalPnle;
204
205 LocalPnle.Uint64 = Pnle->Uint64;
206 if (Mask->Bits.Present) {
207 LocalPnle.Bits.Present = Attribute->Bits.Present;
208 }
209
210 if (Mask->Bits.ReadWrite) {
211 LocalPnle.Bits.ReadWrite = Attribute->Bits.ReadWrite;
212 }
213
214 if (Mask->Bits.UserSupervisor) {
215 LocalPnle.Bits.UserSupervisor = Attribute->Bits.UserSupervisor;
216 }
217
218 if (Mask->Bits.Nx) {
219 LocalPnle.Bits.Nx = Attribute->Bits.Nx;
220 }
221
222 LocalPnle.Bits.Accessed = 0;
223 LocalPnle.Bits.MustBeZero = 0;
224
225 //
226 // Set the attributes (WT, CD, A) to 0.
227 // WT and CD determin the memory type used to access the 4K page directory referenced by this entry.
228 // So, it implictly requires PAT[0] is Write Back.
229 // Create a new parameter if caller requires to use a different memory type for accessing page directories.
230 //
231 LocalPnle.Bits.WriteThrough = 0;
232 LocalPnle.Bits.CacheDisabled = 0;
233 if (Pnle->Uint64 != LocalPnle.Uint64) {
234 Pnle->Uint64 = LocalPnle.Uint64;
235 }
236}
237
250RETURN_STATUS
252 IN IA32_MAP_ATTRIBUTE *Attribute,
254 )
255{
256 if ((Mask->Bits.Present == 1) && (Attribute->Bits.Present == 1)) {
257 //
258 // Creating new page table or remapping non-present range to present.
259 //
260 if ((Mask->Bits.ReadWrite == 0) || (Mask->Bits.UserSupervisor == 0) || (Mask->Bits.WriteThrough == 0) || (Mask->Bits.CacheDisabled == 0) ||
261 (Mask->Bits.Accessed == 0) || (Mask->Bits.Dirty == 0) || (Mask->Bits.Pat == 0) || (Mask->Bits.Global == 0) ||
262 ((Mask->Bits.PageTableBaseAddressLow == 0) && (Mask->Bits.PageTableBaseAddressHigh == 0)) || (Mask->Bits.ProtectionKey == 0) || (Mask->Bits.Nx == 0))
263 {
265 }
266 } else if ((Mask->Bits.Present == 0) && (Mask->Uint64 > 1)) {
267 //
268 // Only change other attributes for non-present range is not permitted.
269 //
271 }
272
273 return RETURN_SUCCESS;
274}
275
303RETURN_STATUS
305 IN IA32_PAGING_ENTRY *ParentPagingEntry,
306 IN IA32_MAP_ATTRIBUTE *ParentAttribute,
307 IN BOOLEAN Modify,
308 IN VOID *Buffer,
309 IN OUT INTN *BufferSize,
310 IN IA32_PAGE_LEVEL Level,
311 IN IA32_PAGE_LEVEL MaxLeafLevel,
312 IN UINT64 LinearAddress,
313 IN UINT64 Length,
314 IN UINT64 Offset,
315 IN IA32_MAP_ATTRIBUTE *Attribute,
317 IN OUT BOOLEAN *IsModified
318 )
319{
320 RETURN_STATUS Status;
321 UINTN BitStart;
322 UINTN Index;
323 IA32_PAGING_ENTRY *PagingEntry;
324 UINTN PagingEntryIndex;
325 UINTN PagingEntryIndexEnd;
326 IA32_PAGING_ENTRY *CurrentPagingEntry;
327 UINT64 RegionLength;
328 UINT64 SubLength;
329 UINT64 SubOffset;
330 UINT64 RegionMask;
331 UINT64 RegionStart;
332 IA32_MAP_ATTRIBUTE AllOneMask;
333 IA32_MAP_ATTRIBUTE PleBAttribute;
334 IA32_MAP_ATTRIBUTE NopAttribute;
335 BOOLEAN CreateNew;
336 IA32_PAGING_ENTRY OneOfPagingEntry;
337 IA32_MAP_ATTRIBUTE ChildAttribute;
338 IA32_MAP_ATTRIBUTE ChildMask;
339 IA32_MAP_ATTRIBUTE CurrentMask;
340 IA32_MAP_ATTRIBUTE LocalParentAttribute;
341 UINT64 PhysicalAddrInEntry;
342 UINT64 PhysicalAddrInAttr;
343 IA32_PAGING_ENTRY OriginalParentPagingEntry;
344 IA32_PAGING_ENTRY OriginalCurrentPagingEntry;
345 IA32_PAGING_ENTRY TempPagingEntry;
346
347 ASSERT (Level != 0);
348 ASSERT ((Attribute != NULL) && (Mask != NULL));
349
350 CreateNew = FALSE;
351 AllOneMask.Uint64 = ~0ull;
352
353 NopAttribute.Uint64 = 0;
354 NopAttribute.Bits.Present = 1;
355 NopAttribute.Bits.ReadWrite = 1;
356 NopAttribute.Bits.UserSupervisor = 1;
357
358 LocalParentAttribute.Uint64 = ParentAttribute->Uint64;
359 ParentAttribute = &LocalParentAttribute;
360
361 OriginalParentPagingEntry.Uint64 = ParentPagingEntry->Uint64;
362 OneOfPagingEntry.Uint64 = 0;
363 TempPagingEntry.Uint64 = 0;
364
365 //
366 // RegionLength: 256T (1 << 48) 512G (1 << 39), 1G (1 << 30), 2M (1 << 21) or 4K (1 << 12).
367 //
368 BitStart = 12 + (Level - 1) * 9;
369 PagingEntryIndex = (UINTN)BitFieldRead64 (LinearAddress + Offset, BitStart, BitStart + 9 - 1);
370 RegionLength = REGION_LENGTH (Level);
371 RegionMask = RegionLength - 1;
372
373 //
374 // ParentPagingEntry ONLY is deferenced for checking Present and MustBeOne bits
375 // when Modify is FALSE.
376 //
377 if ((ParentPagingEntry->Pce.Present == 0) || IsPle (ParentPagingEntry, Level + 1)) {
378 //
379 // When ParentPagingEntry is non-present, parent entry is CR3 or PML5E/PML4E/PDPTE/PDE.
380 // It does NOT point to an existing page directory.
381 // When ParentPagingEntry is present, parent entry is leaf PDPTE_1G or PDE_2M. Split to 2M or 4K pages.
382 // Note: it's impossible the parent entry is a PTE_4K.
383 //
384 PleBAttribute.Uint64 = PageTableLibGetPleBMapAttribute (&ParentPagingEntry->PleB, ParentAttribute);
385 if (ParentPagingEntry->Pce.Present == 0) {
386 //
387 // [LinearAddress, LinearAddress + Length] contains non-present range.
388 //
389 Status = IsAttributesAndMaskValidForNonPresentEntry (Attribute, Mask);
390 if (RETURN_ERROR (Status)) {
391 return Status;
392 }
393 } else {
394 PageTableLibSetPle (Level, &OneOfPagingEntry, 0, &PleBAttribute, &AllOneMask);
395 }
396
397 //
398 // Check if the attribute, the physical address calculated by ParentPagingEntry is equal to
399 // the attribute, the physical address calculated by input Attribue and Mask.
400 //
401 if ((IA32_MAP_ATTRIBUTE_ATTRIBUTES (&PleBAttribute) & IA32_MAP_ATTRIBUTE_ATTRIBUTES (Mask))
402 == (IA32_MAP_ATTRIBUTE_ATTRIBUTES (Attribute) & IA32_MAP_ATTRIBUTE_ATTRIBUTES (Mask)))
403 {
404 if ((Mask->Bits.PageTableBaseAddressLow == 0) && (Mask->Bits.PageTableBaseAddressHigh == 0)) {
405 return RETURN_SUCCESS;
406 }
407
408 //
409 // Non-present entry won't reach there since:
410 // 1.When map non-present entry to present, the attribute must be different.
411 // 2.When still map non-present entry to non-present, PageTableBaseAddressLow and High in Mask must be 0.
412 //
413 ASSERT (ParentPagingEntry->Pce.Present == 1);
414 PhysicalAddrInEntry = IA32_MAP_ATTRIBUTE_PAGE_TABLE_BASE_ADDRESS (&PleBAttribute) + MultU64x32 (RegionLength, (UINT32)PagingEntryIndex);
415 PhysicalAddrInAttr = (IA32_MAP_ATTRIBUTE_PAGE_TABLE_BASE_ADDRESS (Attribute) + Offset) & (~RegionMask);
416 if (PhysicalAddrInEntry == PhysicalAddrInAttr) {
417 return RETURN_SUCCESS;
418 }
419 }
420
421 ASSERT (Buffer == NULL || *BufferSize >= SIZE_4KB);
422 CreateNew = TRUE;
423 *BufferSize -= SIZE_4KB;
424
425 if (Modify) {
426 PagingEntry = (IA32_PAGING_ENTRY *)((UINTN)Buffer + *BufferSize);
427 ZeroMem (PagingEntry, SIZE_4KB);
428
429 if (ParentPagingEntry->Pce.Present) {
430 //
431 // Create 512 child-level entries that map to 2M/4K.
432 //
433 for (SubOffset = 0, Index = 0; Index < 512; Index++) {
434 PagingEntry[Index].Uint64 = OneOfPagingEntry.Uint64 + SubOffset;
435 SubOffset += RegionLength;
436 }
437 }
438
439 //
440 // Set NOP attributes
441 // Note: Should NOT inherit the attributes from the original entry because a zero RW bit
442 // will make the entire region read-only even the child entries set the RW bit.
443 //
444 // Non-leaf entry doesn't have PAT bit. So use ~IA32_PE_BASE_ADDRESS_MASK_40 is to make sure PAT bit
445 // (bit12) in original big-leaf entry is not assigned to PageTableBaseAddress field of non-leaf entry.
446 //
447 TempPagingEntry.Uint64 = ParentPagingEntry->Uint64;
448 PageTableLibSetPnle (&TempPagingEntry.Pnle, &NopAttribute, &AllOneMask);
449 TempPagingEntry.Uint64 = ((UINTN)(VOID *)PagingEntry) | (TempPagingEntry.Uint64 & (~IA32_PE_BASE_ADDRESS_MASK_40));
450 *(volatile UINT64 *)&(ParentPagingEntry->Uint64) = TempPagingEntry.Uint64;
451 }
452 } else {
453 //
454 // If (LinearAddress + Length - 1) is not in the same ParentPagingEntry with (LinearAddress + Offset), then the remaining child PagingEntry
455 // starting from PagingEntryIndex of ParentPagingEntry is all covered by [LinearAddress + Offset, LinearAddress + Length - 1].
456 //
457 PagingEntryIndexEnd = (BitFieldRead64 (LinearAddress + Length - 1, BitStart + 9, 63) != BitFieldRead64 (LinearAddress + Offset, BitStart + 9, 63)) ? 511 :
458 (UINTN)BitFieldRead64 (LinearAddress + Length - 1, BitStart, BitStart + 9 - 1);
459 PagingEntry = (IA32_PAGING_ENTRY *)(UINTN)IA32_PNLE_PAGE_TABLE_BASE_ADDRESS (&ParentPagingEntry->Pnle);
460 for (Index = PagingEntryIndex; Index <= PagingEntryIndexEnd; Index++) {
461 if (PagingEntry[Index].Pce.Present == 0) {
462 //
463 // [LinearAddress, LinearAddress + Length] contains non-present range.
464 //
465 Status = IsAttributesAndMaskValidForNonPresentEntry (Attribute, Mask);
466 if (RETURN_ERROR (Status)) {
467 return Status;
468 }
469
470 break;
471 }
472 }
473
474 //
475 // It's a non-leaf entry
476 //
477 ChildAttribute.Uint64 = 0;
478 ChildMask.Uint64 = 0;
479
480 //
481 // If the inheritable attributes in the parent entry conflicts with the requested attributes,
482 // let the child entries take the parent attributes and
483 // loosen the attribute in the parent entry
484 // E.g.: when PDPTE[0].ReadWrite = 0 but caller wants to map [0-2MB] as ReadWrite = 1 (PDE[0].ReadWrite = 1)
485 // we need to change PDPTE[0].ReadWrite = 1 and let all PDE[0-255].ReadWrite = 0 in this step.
486 // when PDPTE[0].Nx = 1 but caller wants to map [0-2MB] as Nx = 0 (PDT[0].Nx = 0)
487 // we need to change PDPTE[0].Nx = 0 and let all PDE[0-255].Nx = 1 in this step.
488 if ((ParentPagingEntry->Pnle.Bits.ReadWrite == 0) && (Mask->Bits.ReadWrite == 1) && (Attribute->Bits.ReadWrite == 1)) {
489 if (Modify) {
490 ParentPagingEntry->Pnle.Bits.ReadWrite = 1;
491 }
492
493 ChildAttribute.Bits.ReadWrite = 0;
494 ChildMask.Bits.ReadWrite = 1;
495 }
496
497 if ((ParentPagingEntry->Pnle.Bits.UserSupervisor == 0) && (Mask->Bits.UserSupervisor == 1) && (Attribute->Bits.UserSupervisor == 1)) {
498 if (Modify) {
499 ParentPagingEntry->Pnle.Bits.UserSupervisor = 1;
500 }
501
502 ChildAttribute.Bits.UserSupervisor = 0;
503 ChildMask.Bits.UserSupervisor = 1;
504 }
505
506 if ((ParentPagingEntry->Pnle.Bits.Nx == 1) && (Mask->Bits.Nx == 1) && (Attribute->Bits.Nx == 0)) {
507 if (Modify) {
508 ParentPagingEntry->Pnle.Bits.Nx = 0;
509 }
510
511 ChildAttribute.Bits.Nx = 1;
512 ChildMask.Bits.Nx = 1;
513 }
514
515 if (ChildMask.Uint64 != 0) {
516 if (Modify) {
517 //
518 // Update child entries to use restrictive attribute inherited from parent.
519 // e.g.: Set PDE[0-255].ReadWrite = 0
520 //
521 for (Index = 0; Index < 512; Index++) {
522 if (PagingEntry[Index].Pce.Present == 0) {
523 continue;
524 }
525
526 if (IsPle (&PagingEntry[Index], Level)) {
527 PageTableLibSetPle (Level, &PagingEntry[Index], 0, &ChildAttribute, &ChildMask);
528 } else {
529 PageTableLibSetPnle (&PagingEntry[Index].Pnle, &ChildAttribute, &ChildMask);
530 }
531 }
532 }
533 }
534 }
535
536 //
537 // RegionStart: points to the linear address that's aligned on RegionLength and lower than (LinearAddress + Offset).
538 //
539 Index = PagingEntryIndex;
540 RegionStart = (LinearAddress + Offset) & ~RegionMask;
541 ParentAttribute->Uint64 = PageTableLibGetPnleMapAttribute (&ParentPagingEntry->Pnle, ParentAttribute);
542
543 //
544 // Apply the attribute.
545 //
546 PagingEntry = (IA32_PAGING_ENTRY *)(UINTN)IA32_PNLE_PAGE_TABLE_BASE_ADDRESS (&ParentPagingEntry->Pnle);
547 while (Offset < Length && Index < 512) {
548 CurrentPagingEntry = (!Modify && CreateNew) ? &OneOfPagingEntry : &PagingEntry[Index];
549 SubLength = MIN (Length - Offset, RegionStart + RegionLength - (LinearAddress + Offset));
550 if ((Level <= MaxLeafLevel) &&
551 (((LinearAddress + Offset) & RegionMask) == 0) &&
552 (((IA32_MAP_ATTRIBUTE_PAGE_TABLE_BASE_ADDRESS (Attribute) + Offset) & RegionMask) == 0) &&
553 (SubLength == RegionLength) &&
554 ((CurrentPagingEntry->Pce.Present == 0) || IsPle (CurrentPagingEntry, Level))
555 )
556 {
557 //
558 // Create one entry mapping the entire region (1G, 2M or 4K).
559 //
560 if (Modify) {
561 //
562 // When the inheritable attributes in parent entry could override the child attributes,
563 // e.g.: Present/ReadWrite/UserSupervisor is 0 in parent entry, or
564 // Nx is 1 in parent entry,
565 // we just skip setting any value to these attributes in child.
566 // We add assertion to make sure the requested settings don't conflict with parent attributes in this case.
567 //
568 CurrentMask.Uint64 = Mask->Uint64;
569 if (ParentAttribute->Bits.Present == 0) {
570 CurrentMask.Bits.Present = 0;
571 ASSERT (CreateNew || (Mask->Bits.Present == 0) || (Attribute->Bits.Present == 0));
572 }
573
574 if (ParentAttribute->Bits.ReadWrite == 0) {
575 CurrentMask.Bits.ReadWrite = 0;
576 ASSERT (CreateNew || (Mask->Bits.ReadWrite == 0) || (Attribute->Bits.ReadWrite == 0));
577 }
578
579 if (ParentAttribute->Bits.UserSupervisor == 0) {
580 CurrentMask.Bits.UserSupervisor = 0;
581 ASSERT (CreateNew || (Mask->Bits.UserSupervisor == 0) || (Attribute->Bits.UserSupervisor == 0));
582 }
583
584 if (ParentAttribute->Bits.Nx == 1) {
585 CurrentMask.Bits.Nx = 0;
586 ASSERT (CreateNew || (Mask->Bits.Nx == 0) || (Attribute->Bits.Nx == 1));
587 }
588
589 //
590 // Check if any leaf PagingEntry is modified.
591 //
592 OriginalCurrentPagingEntry.Uint64 = CurrentPagingEntry->Uint64;
593 PageTableLibSetPle (Level, CurrentPagingEntry, Offset, Attribute, &CurrentMask);
594
595 if (Modify && (OriginalCurrentPagingEntry.Uint64 != CurrentPagingEntry->Uint64)) {
596 //
597 // The page table entry can be changed by this function only when Modify is true.
598 //
599 *IsModified = TRUE;
600 }
601 }
602 } else {
603 //
604 // Recursively call to create page table.
605 // There are 3 cases:
606 // a. Level cannot be a leaf entry which points to physical memory.
607 // a. Level can be a leaf entry but (LinearAddress + Offset) is NOT aligned on the RegionStart.
608 // b. Level can be a leaf entry and (LinearAddress + Offset) is aligned on RegionStart,
609 // but the length is SMALLER than the RegionLength.
610 //
611 Status = PageTableLibMapInLevel (
612 CurrentPagingEntry,
613 ParentAttribute,
614 Modify,
615 Buffer,
616 BufferSize,
617 Level - 1,
618 MaxLeafLevel,
619 LinearAddress,
620 Length,
621 Offset,
622 Attribute,
623 Mask,
624 IsModified
625 );
626 if (RETURN_ERROR (Status)) {
627 return Status;
628 }
629 }
630
631 Offset += SubLength;
632 RegionStart += RegionLength;
633 Index++;
634 }
635
636 //
637 // Check if ParentPagingEntry entry is modified here is enough. Except the changes happen in leaf PagingEntry during
638 // the while loop, if there is any other change happens in page table, the ParentPagingEntry must has been modified.
639 //
640 if (Modify && (OriginalParentPagingEntry.Uint64 != ParentPagingEntry->Uint64)) {
641 //
642 // The page table entry can be changed by this function only when Modify is true.
643 //
644 *IsModified = TRUE;
645 }
646
647 return RETURN_SUCCESS;
648}
649
684RETURN_STATUS
685EFIAPI
687 IN OUT UINTN *PageTable OPTIONAL,
688 IN PAGING_MODE PagingMode,
689 IN VOID *Buffer,
690 IN OUT UINTN *BufferSize,
691 IN UINT64 LinearAddress,
692 IN UINT64 Length,
693 IN IA32_MAP_ATTRIBUTE *Attribute,
695 OUT BOOLEAN *IsModified OPTIONAL
696 )
697{
698 RETURN_STATUS Status;
699 IA32_PAGING_ENTRY TopPagingEntry;
700 INTN RequiredSize;
701 UINT64 MaxLinearAddress;
702 IA32_PAGE_LEVEL MaxLevel;
703 IA32_PAGE_LEVEL MaxLeafLevel;
704 IA32_MAP_ATTRIBUTE ParentAttribute;
705 BOOLEAN LocalIsModified;
706 UINTN Index;
707 IA32_PAGING_ENTRY *PagingEntry;
708 UINT8 BufferInStack[SIZE_4KB - 1 + MAX_PAE_PDPTE_NUM * sizeof (IA32_PAGING_ENTRY)];
709
710 if (Length == 0) {
711 return RETURN_SUCCESS;
712 }
713
714 if ((PagingMode == Paging32bit) || (PagingMode >= PagingModeMax)) {
715 //
716 // 32bit paging is never supported.
717 //
718 return RETURN_UNSUPPORTED;
719 }
720
721 if ((PageTable == NULL) || (BufferSize == NULL) || (Attribute == NULL) || (Mask == NULL)) {
723 }
724
725 if (*BufferSize % SIZE_4KB != 0) {
726 //
727 // BufferSize should be multiple of 4K.
728 //
730 }
731
732 if (((UINTN)LinearAddress % SIZE_4KB != 0) || ((UINTN)Length % SIZE_4KB != 0)) {
733 //
734 // LinearAddress and Length should be multiple of 4K.
735 //
737 }
738
739 if ((*BufferSize != 0) && (Buffer == NULL)) {
741 }
742
743 //
744 // If to map [LinearAddress, LinearAddress + Length] as non-present,
745 // all attributes except Present should not be provided.
746 //
747 if ((Attribute->Bits.Present == 0) && (Mask->Bits.Present == 1) && (Mask->Uint64 > 1)) {
749 }
750
751 MaxLeafLevel = (IA32_PAGE_LEVEL)(UINT8)PagingMode;
752 MaxLevel = (IA32_PAGE_LEVEL)(UINT8)(PagingMode >> 8);
753 MaxLinearAddress = (PagingMode == PagingPae) ? LShiftU64 (1, 32) : LShiftU64 (1, 12 + MaxLevel * 9);
754
755 if ((LinearAddress > MaxLinearAddress) || (Length > MaxLinearAddress - LinearAddress)) {
756 //
757 // Maximum linear address is (1 << 32), (1 << 48) or (1 << 57)
758 //
760 }
761
762 TopPagingEntry.Uintn = *PageTable;
763 if (TopPagingEntry.Uintn != 0) {
764 if (PagingMode == PagingPae) {
765 //
766 // Create 4 temporary PDPTE at a 4k-aligned address.
767 // Copy the original PDPTE content and set ReadWrite, UserSupervisor to 1, set Nx to 0.
768 //
769 TopPagingEntry.Uintn = ALIGN_VALUE ((UINTN)BufferInStack, BASE_4KB);
770 PagingEntry = (IA32_PAGING_ENTRY *)(TopPagingEntry.Uintn);
771 CopyMem (PagingEntry, (VOID *)(*PageTable), MAX_PAE_PDPTE_NUM * sizeof (IA32_PAGING_ENTRY));
772 for (Index = 0; Index < MAX_PAE_PDPTE_NUM; Index++) {
773 PagingEntry[Index].Pnle.Bits.ReadWrite = 1;
774 PagingEntry[Index].Pnle.Bits.UserSupervisor = 1;
775 PagingEntry[Index].Pnle.Bits.Nx = 0;
776 }
777 }
778
779 TopPagingEntry.Pce.Present = 1;
780 TopPagingEntry.Pce.ReadWrite = 1;
781 TopPagingEntry.Pce.UserSupervisor = 1;
782 TopPagingEntry.Pce.Nx = 0;
783 }
784
785 if (IsModified == NULL) {
786 IsModified = &LocalIsModified;
787 }
788
789 *IsModified = FALSE;
790
791 ParentAttribute.Uint64 = 0;
792 ParentAttribute.Bits.PageTableBaseAddressLow = 1;
793 ParentAttribute.Bits.Present = 1;
794 ParentAttribute.Bits.ReadWrite = 1;
795 ParentAttribute.Bits.UserSupervisor = 1;
796 ParentAttribute.Bits.Nx = 0;
797
798 //
799 // Query the required buffer size without modifying the page table.
800 //
801 RequiredSize = 0;
802 Status = PageTableLibMapInLevel (
803 &TopPagingEntry,
804 &ParentAttribute,
805 FALSE,
806 NULL,
807 &RequiredSize,
808 MaxLevel,
809 MaxLeafLevel,
810 LinearAddress,
811 Length,
812 0,
813 Attribute,
814 Mask,
815 IsModified
816 );
817 ASSERT (*IsModified == FALSE);
818 if (RETURN_ERROR (Status)) {
819 return Status;
820 }
821
822 RequiredSize = -RequiredSize;
823
824 if ((UINTN)RequiredSize > *BufferSize) {
825 *BufferSize = RequiredSize;
827 }
828
829 if ((RequiredSize != 0) && (Buffer == NULL)) {
831 }
832
833 //
834 // Update the page table when the supplied buffer is sufficient.
835 //
836 Status = PageTableLibMapInLevel (
837 &TopPagingEntry,
838 &ParentAttribute,
839 TRUE,
840 Buffer,
841 (INTN *)BufferSize,
842 MaxLevel,
843 MaxLeafLevel,
844 LinearAddress,
845 Length,
846 0,
847 Attribute,
848 Mask,
849 IsModified
850 );
851
852 if (!RETURN_ERROR (Status)) {
853 PagingEntry = (IA32_PAGING_ENTRY *)(UINTN)(TopPagingEntry.Uintn & IA32_PE_BASE_ADDRESS_MASK_40);
854
855 if (PagingMode == PagingPae) {
856 //
857 // These MustBeZero fields are treated as RW and other attributes by the common map logic. So they might be set to 1.
858 //
859 for (Index = 0; Index < MAX_PAE_PDPTE_NUM; Index++) {
860 PagingEntry[Index].PdptePae.Bits.MustBeZero = 0;
861 PagingEntry[Index].PdptePae.Bits.MustBeZero2 = 0;
862 PagingEntry[Index].PdptePae.Bits.MustBeZero3 = 0;
863 }
864
865 if (*PageTable != 0) {
866 //
867 // Copy temp PDPTE to original PDPTE.
868 //
869 CopyMem ((VOID *)(*PageTable), PagingEntry, MAX_PAE_PDPTE_NUM * sizeof (IA32_PAGING_ENTRY));
870 }
871 }
872
873 if (*PageTable == 0) {
874 //
875 // Do not assign the *PageTable when it's an existing page table.
876 // If it's an existing PAE page table, PagingEntry is the temp buffer in stack.
877 //
878 *PageTable = (UINTN)PagingEntry;
879 }
880 }
881
882 return Status;
883}
UINT64 UINTN
INT64 INTN
UINT64 EFIAPI MultU64x32(IN UINT64 Multiplicand, IN UINT32 Multiplier)
Definition: MultU64x32.c:27
UINT64 EFIAPI BitFieldRead64(IN UINT64 Operand, IN UINTN StartBit, IN UINTN EndBit)
Definition: BitField.c:719
UINT64 EFIAPI LShiftU64(IN UINT64 Operand, IN UINTN Count)
Definition: LShiftU64.c:28
VOID *EFIAPI CopyMem(OUT VOID *DestinationBuffer, IN CONST VOID *SourceBuffer, IN UINTN Length)
VOID *EFIAPI ZeroMem(OUT VOID *Buffer, IN UINTN Length)
RETURN_STATUS PageTableLibMapInLevel(IN IA32_PAGING_ENTRY *ParentPagingEntry, IN IA32_MAP_ATTRIBUTE *ParentAttribute, IN BOOLEAN Modify, IN VOID *Buffer, IN OUT INTN *BufferSize, IN IA32_PAGE_LEVEL Level, IN IA32_PAGE_LEVEL MaxLeafLevel, IN UINT64 LinearAddress, IN UINT64 Length, IN UINT64 Offset, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask, IN OUT BOOLEAN *IsModified)
VOID PageTableLibSetPnle(IN OUT volatile IA32_PAGE_NON_LEAF_ENTRY *Pnle, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask)
RETURN_STATUS IsAttributesAndMaskValidForNonPresentEntry(IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask)
VOID PageTableLibSetPleB(IN OUT volatile IA32_PAGE_LEAF_ENTRY_BIG_PAGESIZE *PleB, IN UINT64 Offset, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask)
RETURN_STATUS EFIAPI PageTableMap(IN OUT UINTN *PageTable OPTIONAL, IN PAGING_MODE PagingMode, IN VOID *Buffer, IN OUT UINTN *BufferSize, IN UINT64 LinearAddress, IN UINT64 Length, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask, OUT BOOLEAN *IsModified OPTIONAL)
VOID PageTableLibSetPle(IN UINTN Level, IN OUT volatile IA32_PAGING_ENTRY *Ple, IN UINT64 Offset, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask)
VOID PageTableLibSetPte4K(IN OUT volatile IA32_PTE_4K *Pte4K, IN UINT64 Offset, IN IA32_MAP_ATTRIBUTE *Attribute, IN IA32_MAP_ATTRIBUTE *Mask)
UINT64 PageTableLibGetPnleMapAttribute(IN IA32_PAGE_NON_LEAF_ENTRY *Pnle, IN IA32_MAP_ATTRIBUTE *ParentMapAttribute)
UINT64 PageTableLibGetPleBMapAttribute(IN IA32_PAGE_LEAF_ENTRY_BIG_PAGESIZE *PleB, IN IA32_MAP_ATTRIBUTE *ParentMapAttribute)
BOOLEAN IsPle(IN IA32_PAGING_ENTRY *PagingEntry, IN UINTN Level)
#define NULL
Definition: Base.h:319
#define RETURN_BUFFER_TOO_SMALL
Definition: Base.h:1093
#define RETURN_ERROR(StatusCode)
Definition: Base.h:1061
#define MIN(a, b)
Definition: Base.h:1007
#define RETURN_UNSUPPORTED
Definition: Base.h:1081
#define ALIGN_VALUE(Value, Alignment)
Definition: Base.h:948
#define RETURN_SUCCESS
Definition: Base.h:1066
#define TRUE
Definition: Base.h:301
#define FALSE
Definition: Base.h:307
#define IN
Definition: Base.h:279
#define OUT
Definition: Base.h:284
#define RETURN_INVALID_PARAMETER
Definition: Base.h:1076