2016-07-17 08:34:26 +08:00
|
|
|
// This file is part of libfringe, a low-level green threading library.
|
|
|
|
// Copyright (c) whitequark <whitequark@whitequark.org>
|
2016-08-21 05:45:01 +08:00
|
|
|
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
|
|
|
|
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
|
|
|
|
// http://opensource.org/licenses/MIT>, at your option. This file may not be
|
|
|
|
// copied, modified, or distributed except according to those terms.
|
2019-02-08 09:18:13 +08:00
|
|
|
#![feature(alloc, allocator_api)]
|
2016-09-17 12:17:28 +08:00
|
|
|
|
2019-02-08 09:18:13 +08:00
|
|
|
extern crate core;
|
2016-09-17 12:17:28 +08:00
|
|
|
extern crate alloc;
|
2016-07-17 08:34:26 +08:00
|
|
|
extern crate fringe;
|
|
|
|
|
2019-02-08 09:18:13 +08:00
|
|
|
use alloc::alloc::alloc;
|
|
|
|
use core::alloc::Layout;
|
|
|
|
|
2016-09-17 12:17:28 +08:00
|
|
|
use alloc::boxed::Box;
|
|
|
|
use std::slice;
|
|
|
|
use fringe::{STACK_ALIGNMENT, Stack, SliceStack, OwnedStack, OsStack};
|
|
|
|
|
2017-09-10 17:41:04 +08:00
|
|
|
unsafe fn heap_allocate(size: usize, align: usize) -> *mut u8 {
|
2019-02-08 09:18:13 +08:00
|
|
|
alloc(Layout::from_size_align_unchecked(size, align))
|
2017-09-10 17:41:04 +08:00
|
|
|
}
|
|
|
|
|
2016-09-17 12:17:28 +08:00
|
|
|
#[test]
|
|
|
|
fn slice_aligned() {
|
|
|
|
unsafe {
|
2017-09-10 17:41:04 +08:00
|
|
|
let ptr = heap_allocate(16384, STACK_ALIGNMENT);
|
2016-09-17 12:17:28 +08:00
|
|
|
let mut slice = Box::from_raw(slice::from_raw_parts_mut(ptr, 16384));
|
|
|
|
let stack = SliceStack::new(&mut slice[4096..8192]);
|
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn slice_unaligned() {
|
|
|
|
unsafe {
|
2017-09-10 17:41:04 +08:00
|
|
|
let ptr = heap_allocate(16384, STACK_ALIGNMENT);
|
2016-09-17 12:17:28 +08:00
|
|
|
let mut slice = Box::from_raw(slice::from_raw_parts_mut(ptr, 16384));
|
|
|
|
let stack = SliceStack::new(&mut slice[4097..8193]);
|
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn slice_too_small() {
|
|
|
|
unsafe {
|
2017-09-10 17:41:04 +08:00
|
|
|
let ptr = heap_allocate(STACK_ALIGNMENT, STACK_ALIGNMENT);
|
2016-09-17 12:17:28 +08:00
|
|
|
let mut slice = Box::from_raw(slice::from_raw_parts_mut(ptr, STACK_ALIGNMENT));
|
|
|
|
let stack = SliceStack::new(&mut slice[0..1]);
|
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2017-02-25 21:24:36 +08:00
|
|
|
#[should_panic(expected = "SliceStack too small")]
|
2016-09-17 12:17:28 +08:00
|
|
|
fn slice_too_small_unaligned() {
|
|
|
|
unsafe {
|
2017-09-10 17:41:04 +08:00
|
|
|
let ptr = heap_allocate(STACK_ALIGNMENT, STACK_ALIGNMENT);
|
2016-09-17 12:17:28 +08:00
|
|
|
let mut slice = Box::from_raw(slice::from_raw_parts_mut(ptr, STACK_ALIGNMENT));
|
|
|
|
SliceStack::new(&mut slice[1..2]);
|
|
|
|
}
|
|
|
|
}
|
2016-07-17 08:34:26 +08:00
|
|
|
|
|
|
|
#[test]
|
2016-08-19 17:18:46 +08:00
|
|
|
fn slice_stack() {
|
2016-09-17 12:17:28 +08:00
|
|
|
let mut memory = [0; 1024];
|
|
|
|
let stack = SliceStack::new(&mut memory);
|
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
|
|
|
|
// Size may be a bit smaller due to alignment
|
|
|
|
assert!(stack.base() as usize - stack.limit() as usize > 1024 - STACK_ALIGNMENT * 2);
|
2016-08-19 17:18:46 +08:00
|
|
|
}
|
|
|
|
|
2016-08-19 21:58:45 +08:00
|
|
|
#[test]
|
|
|
|
fn owned_stack() {
|
2016-09-17 12:17:28 +08:00
|
|
|
let stack = OwnedStack::new(1024);
|
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.base() as usize - stack.limit() as usize, 1024);
|
2016-08-19 21:58:45 +08:00
|
|
|
}
|
|
|
|
|
2016-08-19 17:18:46 +08:00
|
|
|
#[test]
|
|
|
|
fn default_os_stack() {
|
2016-07-17 09:24:20 +08:00
|
|
|
let stack = OsStack::new(0).unwrap();
|
2016-09-17 12:17:28 +08:00
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
|
2016-07-17 09:24:20 +08:00
|
|
|
// Make sure the topmost page of the stack, at least, is accessible.
|
2016-08-12 07:18:36 +08:00
|
|
|
unsafe { *(stack.base().offset(-1)) = 0; }
|
2016-07-17 09:24:20 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2016-08-19 17:18:46 +08:00
|
|
|
fn one_page_os_stack() {
|
2016-07-17 08:34:26 +08:00
|
|
|
let stack = OsStack::new(4096).unwrap();
|
2016-09-17 12:17:28 +08:00
|
|
|
assert_eq!(stack.base() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
assert_eq!(stack.limit() as usize & (STACK_ALIGNMENT - 1), 0);
|
|
|
|
|
2016-07-17 08:34:26 +08:00
|
|
|
// Make sure the topmost page of the stack, at least, is accessible.
|
2016-08-12 07:18:36 +08:00
|
|
|
unsafe { *(stack.base().offset(-1)) = 0; }
|
2016-07-17 08:34:26 +08:00
|
|
|
}
|