Xenon/src/paging/pml4.zig

88 lines
2.3 KiB
Zig

const std = @import("std");
const assert = std.debug.assert;
const PageDirectoryPointer = @import("pdp.zig").PageDirectoryPointer;
pub const PageMapLevel4 = packed struct {
const Self = @This();
entries: [512]Entry_,
pub fn fromCr3(cr3: u64) *Self {
return @intToPtr(*Self, cr3 & 0xffffffffff000);
}
pub fn getEntry(self: *Self, ptr: var) *Entry_ {
const address = @ptrToInt(ptr);
return &self.entries[address >> 39 & 0x1ff];
}
pub fn getEntryConst(self: *const Self, ptr: var) *const Entry_ {
const address = @ptrToInt(ptr);
return &self.entries[address >> 39 & 0x1ff];
}
pub fn toPhysical(self: *const Self, ptr: var) ?@TypeOf(ptr) {
const entry = self.getEntryConst(ptr);
if (!entry.present()) return null;
return entry.ptr().toPhysical(ptr);
}
pub const Entry_ = packed struct {
comptime {
assert(@sizeOf(Entry) != 8); // zig #2627 might have been fixed, use Entry
}
value: u64,
usingnamespace @import("mixin.zig").PageEntryMixin();
const ptr_mask: u64 = 0xffffffffff000;
pub inline fn ptr(self: @This()) *PageDirectoryPointer {
const address = self.value & ptr_mask;
return @intToPtr(*PageDirectoryPointer, address);
}
pub inline fn ptrSet(self: *@This(), new_ptr: *PageDirectoryPointer) void {
self.value = (self.value & ~ptr_mask) | @ptrToInt(new_ptr);
}
};
pub const Entry = packed struct {
/// Table is loaded into physical memory
present: bool,
/// Allow writes to lower levels
read_write: bool,
/// Allow usermode access
user_supervisor: bool,
/// Table is writeback on false, otherwise writethrough
pwt: bool,
/// Table is cacheable
pcd: bool,
/// Table has been accessed
accessed: bool,
_ign6: u1,
_mbz7: u1 = 0,
_mbz8: u1 = 0,
avl: u3,
_ptr: u40,
available: u11,
/// Forbid execution of lower levels
no_execute: bool,
pub inline fn ptr(self: Entry) *PageDirectoryPointer {
const address = @bitCast(u64, self) & 0xffffffffff000;
return @intToPtr(*PageDirectoryPointer, address);
}
};
};