use core::{ ops::{Deref, DerefMut}, mem::{align_of, size_of}, }; use alloc::alloc::{dealloc, Layout, LayoutError}; use crate::mmu::{L1_PAGE_SIZE, L1Table}; pub struct UncachedSlice { layout: Layout, slice: &'static mut [T], } impl UncachedSlice { /// allocates in chunks of 1 MB pub fn new T>(len: usize, default: F) -> Result { // round to full pages let size = ((len * size_of::() - 1) | (L1_PAGE_SIZE - 1)) + 1; let align = align_of::() .max(L1_PAGE_SIZE); let layout = Layout::from_size_align(size, align)?; let ptr = unsafe { alloc::alloc::alloc(layout).cast::() }; let start = ptr as usize; assert_eq!(start & (L1_PAGE_SIZE - 1), 0); for page_start in (start..(start + size)).step_by(L1_PAGE_SIZE) { // non-shareable device L1Table::get() .update(page_start as *const (), |l1_section| { l1_section.tex = 0b10; l1_section.cacheable = true; l1_section.bufferable = false; }); } let slice = unsafe { core::slice::from_raw_parts_mut(ptr, len) }; // verify size assert!(unsafe { slice.get_unchecked(len) } as *const _ as usize <= start + size); // initialize for e in slice.iter_mut() { *e = default(); } Ok(UncachedSlice { layout, slice }) } } /// Does not yet mark the pages cachable again impl Drop for UncachedSlice { fn drop(&mut self) { unsafe { dealloc(self.slice.as_mut_ptr() as *mut _ as *mut u8, self.layout); } } } impl Deref for UncachedSlice { type Target = [T]; fn deref(&self) -> &Self::Target { self.slice } } impl DerefMut for UncachedSlice { fn deref_mut(&mut self) -> &mut Self::Target { self.slice } }