Compare commits
5 Commits
2c9b1f5330
...
be55e2ac80
Author | SHA1 | Date |
---|---|---|
David Mak | be55e2ac80 | |
David Mak | 79c8b759ad | |
David Mak | 4798c53a21 | |
David Mak | 23974feae7 | |
David Mak | 40a3bded36 |
|
@ -1 +1 @@
|
||||||
doc-valid-idents = ["NumPy", ".."]
|
doc-valid-idents = ["CPython", "NumPy", ".."]
|
|
@ -0,0 +1,24 @@
|
||||||
|
# See https://pre-commit.com for more information
|
||||||
|
# See https://pre-commit.com/hooks.html for more hooks
|
||||||
|
|
||||||
|
default_stages: [commit]
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: nac3-cargo-fmt
|
||||||
|
name: nac3 cargo format
|
||||||
|
entry: cargo
|
||||||
|
language: system
|
||||||
|
types: [file, rust]
|
||||||
|
pass_filenames: false
|
||||||
|
description: Runs cargo fmt on the codebase.
|
||||||
|
args: [fmt]
|
||||||
|
- id: nac3-cargo-clippy
|
||||||
|
name: nac3 cargo clippy
|
||||||
|
entry: cargo
|
||||||
|
language: system
|
||||||
|
types: [file, rust]
|
||||||
|
pass_filenames: false
|
||||||
|
description: Runs cargo clippy on the codebase.
|
||||||
|
args: [clippy]
|
|
@ -51,3 +51,12 @@ Use ``nix develop`` in this repository to enter a development shell.
|
||||||
If you are using a different shell than bash you can use e.g. ``nix develop --command fish``.
|
If you are using a different shell than bash you can use e.g. ``nix develop --command fish``.
|
||||||
|
|
||||||
Build NAC3 with ``cargo build --release``. See the demonstrations in ``nac3artiq`` and ``nac3standalone``.
|
Build NAC3 with ``cargo build --release``. See the demonstrations in ``nac3artiq`` and ``nac3standalone``.
|
||||||
|
|
||||||
|
### Pre-Commit Hooks
|
||||||
|
|
||||||
|
You are strongly recommended to use the provided pre-commit hooks to automatically reformat files and check for non-optimal Rust practices using Clippy. Run `pre-commit install` to install the hook and `pre-commit` will automatically run `cargo fmt` and `cargo clippy` for you.
|
||||||
|
|
||||||
|
Several things to note:
|
||||||
|
|
||||||
|
- If `cargo fmt` or `cargo clippy` returns an error, the pre-commit hook will fail. You should fix all errors before trying to commit again.
|
||||||
|
- If `cargo fmt` reformats some files, the pre-commit hook will also fail. You should review the changes and, if satisfied, try to commit again.
|
||||||
|
|
|
@ -159,6 +159,7 @@
|
||||||
# development tools
|
# development tools
|
||||||
cargo-insta
|
cargo-insta
|
||||||
clippy
|
clippy
|
||||||
|
pre-commit
|
||||||
rustfmt
|
rustfmt
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
|
@ -260,7 +260,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
||||||
let start_expr = Located {
|
let start_expr = Located {
|
||||||
// location does not matter at this point
|
// location does not matter at this point
|
||||||
location: stmt.location,
|
location: stmt.location,
|
||||||
node: ExprKind::Name { id: start, ctx: name_ctx.clone() },
|
node: ExprKind::Name { id: start, ctx: *name_ctx },
|
||||||
custom: Some(ctx.primitives.int64),
|
custom: Some(ctx.primitives.int64),
|
||||||
};
|
};
|
||||||
let start = self
|
let start = self
|
||||||
|
@ -275,7 +275,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
|
||||||
let end_expr = Located {
|
let end_expr = Located {
|
||||||
// location does not matter at this point
|
// location does not matter at this point
|
||||||
location: stmt.location,
|
location: stmt.location,
|
||||||
node: ExprKind::Name { id: end, ctx: name_ctx.clone() },
|
node: ExprKind::Name { id: end, ctx: *name_ctx },
|
||||||
custom: Some(ctx.primitives.int64),
|
custom: Some(ctx.primitives.int64),
|
||||||
};
|
};
|
||||||
let end = self.gen_store_target(ctx, &end_expr, Some("end.addr"))?.unwrap();
|
let end = self.gen_store_target(ctx, &end_expr, Some("end.addr"))?.unwrap();
|
||||||
|
@ -442,7 +442,7 @@ fn rpc_codegen_callback_fn<'ctx>(
|
||||||
format!("tagptr{}", fun.1 .0).as_str(),
|
format!("tagptr{}", fun.1 .0).as_str(),
|
||||||
);
|
);
|
||||||
tag_arr_ptr.set_initializer(&int8.const_array(
|
tag_arr_ptr.set_initializer(&int8.const_array(
|
||||||
&tag.iter().map(|v| int8.const_int(*v as u64, false)).collect::<Vec<_>>(),
|
&tag.iter().map(|v| int8.const_int(u64::from(*v), false)).collect::<Vec<_>>(),
|
||||||
));
|
));
|
||||||
tag_arr_ptr.set_linkage(Linkage::Private);
|
tag_arr_ptr.set_linkage(Linkage::Private);
|
||||||
let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash);
|
let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash);
|
||||||
|
|
|
@ -1,3 +1,21 @@
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
unsafe_op_in_unsafe_fn,
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::cast_sign_loss,
|
||||||
|
clippy::enum_glob_use,
|
||||||
|
clippy::similar_names,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|
|
@ -125,7 +125,7 @@ impl StaticValue for PythonValue {
|
||||||
);
|
);
|
||||||
global.set_constant(true);
|
global.set_constant(true);
|
||||||
global.set_initializer(&ctx.ctx.const_struct(
|
global.set_initializer(&ctx.ctx.const_struct(
|
||||||
&[ctx.ctx.i32_type().const_int(id as u64, false).into()],
|
&[ctx.ctx.i32_type().const_int(u64::from(id), false).into()],
|
||||||
false,
|
false,
|
||||||
));
|
));
|
||||||
Ok(global.as_pointer_value().into())
|
Ok(global.as_pointer_value().into())
|
||||||
|
@ -146,10 +146,14 @@ impl StaticValue for PythonValue {
|
||||||
return Ok(match val {
|
return Ok(match val {
|
||||||
PrimitiveValue::I32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(),
|
PrimitiveValue::I32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(),
|
||||||
PrimitiveValue::I64(val) => ctx.ctx.i64_type().const_int(*val as u64, false).into(),
|
PrimitiveValue::I64(val) => ctx.ctx.i64_type().const_int(*val as u64, false).into(),
|
||||||
PrimitiveValue::U32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(),
|
PrimitiveValue::U32(val) => {
|
||||||
|
ctx.ctx.i32_type().const_int(u64::from(*val), false).into()
|
||||||
|
}
|
||||||
PrimitiveValue::U64(val) => ctx.ctx.i64_type().const_int(*val, false).into(),
|
PrimitiveValue::U64(val) => ctx.ctx.i64_type().const_int(*val, false).into(),
|
||||||
PrimitiveValue::F64(val) => ctx.ctx.f64_type().const_float(*val).into(),
|
PrimitiveValue::F64(val) => ctx.ctx.f64_type().const_float(*val).into(),
|
||||||
PrimitiveValue::Bool(val) => ctx.ctx.i8_type().const_int(*val as u64, false).into(),
|
PrimitiveValue::Bool(val) => {
|
||||||
|
ctx.ctx.i8_type().const_int(u64::from(*val), false).into()
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if let Some(global) = ctx.module.get_global(&self.id.to_string()) {
|
if let Some(global) = ctx.module.get_global(&self.id.to_string()) {
|
||||||
|
@ -864,7 +868,7 @@ impl InnerResolver {
|
||||||
} else if ty_id == self.primitive_ids.uint32 {
|
} else if ty_id == self.primitive_ids.uint32 {
|
||||||
let val: u32 = obj.extract().unwrap();
|
let val: u32 = obj.extract().unwrap();
|
||||||
self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val));
|
self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val));
|
||||||
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into()))
|
Ok(Some(ctx.ctx.i32_type().const_int(u64::from(val), false).into()))
|
||||||
} else if ty_id == self.primitive_ids.uint64 {
|
} else if ty_id == self.primitive_ids.uint64 {
|
||||||
let val: u64 = obj.extract().unwrap();
|
let val: u64 = obj.extract().unwrap();
|
||||||
self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val));
|
self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val));
|
||||||
|
@ -872,7 +876,7 @@ impl InnerResolver {
|
||||||
} else if ty_id == self.primitive_ids.bool {
|
} else if ty_id == self.primitive_ids.bool {
|
||||||
let val: bool = obj.extract().unwrap();
|
let val: bool = obj.extract().unwrap();
|
||||||
self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val));
|
self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val));
|
||||||
Ok(Some(ctx.ctx.i8_type().const_int(val as u64, false).into()))
|
Ok(Some(ctx.ctx.i8_type().const_int(u64::from(val), false).into()))
|
||||||
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
|
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
|
||||||
let val: f64 = obj.extract().unwrap();
|
let val: f64 = obj.extract().unwrap();
|
||||||
self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val));
|
self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val));
|
||||||
|
|
|
@ -15,7 +15,7 @@ lazy_static! {
|
||||||
}
|
}
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
static LOCAL_INTERNER: RefCell<HashMap<String, StrRef>> = Default::default();
|
static LOCAL_INTERNER: RefCell<HashMap<String, StrRef>> = RefCell::default();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Eq, PartialEq, Copy, Clone, Hash)]
|
#[derive(Eq, PartialEq, Copy, Clone, Hash)]
|
||||||
|
@ -24,14 +24,14 @@ pub struct StrRef(SymbolU32);
|
||||||
impl fmt::Debug for StrRef {
|
impl fmt::Debug for StrRef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let s: String = (*self).into();
|
let s: String = (*self).into();
|
||||||
write!(f, "{:?}", s)
|
write!(f, "{s:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for StrRef {
|
impl fmt::Display for StrRef {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let s: String = (*self).into();
|
let s: String = (*self).into();
|
||||||
write!(f, "{}", s)
|
write!(f, "{s}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,6 +69,7 @@ pub fn get_str_ref(lock: &mut MutexGuard<Interner>, str: &str) -> StrRef {
|
||||||
StrRef(lock.get_or_intern(str))
|
StrRef(lock.get_or_intern(str))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn get_str_from_ref<'a>(lock: &'a MutexGuard<Interner>, id: StrRef) -> &'a str {
|
pub fn get_str_from_ref<'a>(lock: &'a MutexGuard<Interner>, id: StrRef) -> &'a str {
|
||||||
lock.resolve(id.0).unwrap()
|
lock.resolve(id.0).unwrap()
|
||||||
}
|
}
|
||||||
|
@ -359,20 +360,20 @@ pub enum ExprKind<U = ()> {
|
||||||
}
|
}
|
||||||
pub type Expr<U = ()> = Located<ExprKind<U>, U>;
|
pub type Expr<U = ()> = Located<ExprKind<U>, U>;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub enum ExprContext {
|
pub enum ExprContext {
|
||||||
Load,
|
Load,
|
||||||
Store,
|
Store,
|
||||||
Del,
|
Del,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub enum Boolop {
|
pub enum Boolop {
|
||||||
And,
|
And,
|
||||||
Or,
|
Or,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub enum Operator {
|
pub enum Operator {
|
||||||
Add,
|
Add,
|
||||||
Sub,
|
Sub,
|
||||||
|
@ -389,7 +390,7 @@ pub enum Operator {
|
||||||
FloorDiv,
|
FloorDiv,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub enum Unaryop {
|
pub enum Unaryop {
|
||||||
Invert,
|
Invert,
|
||||||
Not,
|
Not,
|
||||||
|
@ -397,7 +398,7 @@ pub enum Unaryop {
|
||||||
USub,
|
USub,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub enum Cmpop {
|
pub enum Cmpop {
|
||||||
Eq,
|
Eq,
|
||||||
NotEq,
|
NotEq,
|
||||||
|
@ -451,7 +452,7 @@ pub struct KeywordData<U = ()> {
|
||||||
}
|
}
|
||||||
pub type Keyword<U = ()> = Located<KeywordData<U>, U>;
|
pub type Keyword<U = ()> = Located<KeywordData<U>, U>;
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||||
pub struct Alias {
|
pub struct Alias {
|
||||||
pub name: Ident,
|
pub name: Ident,
|
||||||
pub asname: Option<Ident>,
|
pub asname: Option<Ident>,
|
||||||
|
|
|
@ -28,12 +28,12 @@ impl From<bool> for Constant {
|
||||||
}
|
}
|
||||||
impl From<i32> for Constant {
|
impl From<i32> for Constant {
|
||||||
fn from(i: i32) -> Constant {
|
fn from(i: i32) -> Constant {
|
||||||
Self::Int(i as i128)
|
Self::Int(i128::from(i))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl From<i64> for Constant {
|
impl From<i64> for Constant {
|
||||||
fn from(i: i64) -> Constant {
|
fn from(i: i64) -> Constant {
|
||||||
Self::Int(i as i128)
|
Self::Int(i128::from(i))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,6 +50,7 @@ pub enum ConversionFlag {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ConversionFlag {
|
impl ConversionFlag {
|
||||||
|
#[must_use]
|
||||||
pub fn try_from_byte(b: u8) -> Option<Self> {
|
pub fn try_from_byte(b: u8) -> Option<Self> {
|
||||||
match b {
|
match b {
|
||||||
b's' => Some(Self::Str),
|
b's' => Some(Self::Str),
|
||||||
|
@ -69,6 +70,7 @@ pub struct ConstantOptimizer {
|
||||||
#[cfg(feature = "constant-optimization")]
|
#[cfg(feature = "constant-optimization")]
|
||||||
impl ConstantOptimizer {
|
impl ConstantOptimizer {
|
||||||
#[inline]
|
#[inline]
|
||||||
|
#[must_use]
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self { _priv: () }
|
Self { _priv: () }
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ use crate::{Constant, ExprKind};
|
||||||
|
|
||||||
impl<U> ExprKind<U> {
|
impl<U> ExprKind<U> {
|
||||||
/// Returns a short name for the node suitable for use in error messages.
|
/// Returns a short name for the node suitable for use in error messages.
|
||||||
|
#[must_use]
|
||||||
pub fn name(&self) -> &'static str {
|
pub fn name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
ExprKind::BoolOp { .. } | ExprKind::BinOp { .. } | ExprKind::UnaryOp { .. } => {
|
ExprKind::BoolOp { .. } | ExprKind::BinOp { .. } | ExprKind::UnaryOp { .. } => {
|
||||||
|
|
|
@ -1,3 +1,19 @@
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
|
|
||||||
|
|
|
@ -81,14 +81,17 @@ impl Location {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Location {
|
impl Location {
|
||||||
|
#[must_use]
|
||||||
pub fn new(row: usize, column: usize, file: FileName) -> Self {
|
pub fn new(row: usize, column: usize, file: FileName) -> Self {
|
||||||
Location { row, column, file }
|
Location { row, column, file }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn row(&self) -> usize {
|
pub fn row(&self) -> usize {
|
||||||
self.row
|
self.row
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
pub fn column(&self) -> usize {
|
pub fn column(&self) -> usize {
|
||||||
self.column
|
self.column
|
||||||
}
|
}
|
||||||
|
|
|
@ -164,7 +164,7 @@ pub trait UntypedArrayLikeAccessor<'ctx, Index = IntValue<'ctx>>:
|
||||||
idx: &Index,
|
idx: &Index,
|
||||||
name: Option<&str>,
|
name: Option<&str>,
|
||||||
) -> BasicValueEnum<'ctx> {
|
) -> BasicValueEnum<'ctx> {
|
||||||
let ptr = self.ptr_offset_unchecked(ctx, generator, idx, name);
|
let ptr = unsafe { self.ptr_offset_unchecked(ctx, generator, idx, name) };
|
||||||
ctx.builder.build_load(ptr, name.unwrap_or_default()).unwrap()
|
ctx.builder.build_load(ptr, name.unwrap_or_default()).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ pub trait UntypedArrayLikeMutator<'ctx, Index = IntValue<'ctx>>:
|
||||||
idx: &Index,
|
idx: &Index,
|
||||||
value: BasicValueEnum<'ctx>,
|
value: BasicValueEnum<'ctx>,
|
||||||
) {
|
) {
|
||||||
let ptr = self.ptr_offset_unchecked(ctx, generator, idx, None);
|
let ptr = unsafe { self.ptr_offset_unchecked(ctx, generator, idx, None) };
|
||||||
ctx.builder.build_store(ptr, value).unwrap();
|
ctx.builder.build_store(ptr, value).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,7 +233,7 @@ pub trait TypedArrayLikeAccessor<'ctx, T, Index = IntValue<'ctx>>:
|
||||||
idx: &Index,
|
idx: &Index,
|
||||||
name: Option<&str>,
|
name: Option<&str>,
|
||||||
) -> T {
|
) -> T {
|
||||||
let value = self.get_unchecked(ctx, generator, idx, name);
|
let value = unsafe { self.get_unchecked(ctx, generator, idx, name) };
|
||||||
self.downcast_to_type(ctx, value)
|
self.downcast_to_type(ctx, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -272,7 +272,7 @@ pub trait TypedArrayLikeMutator<'ctx, T, Index = IntValue<'ctx>>:
|
||||||
value: T,
|
value: T,
|
||||||
) {
|
) {
|
||||||
let value = self.upcast_from_type(ctx, value);
|
let value = self.upcast_from_type(ctx, value);
|
||||||
self.set_unchecked(ctx, generator, idx, value);
|
unsafe { self.set_unchecked(ctx, generator, idx, value) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the data at the `idx`-th index.
|
/// Sets the data at the `idx`-th index.
|
||||||
|
@ -360,7 +360,7 @@ where
|
||||||
idx: &Index,
|
idx: &Index,
|
||||||
name: Option<&str>,
|
name: Option<&str>,
|
||||||
) -> PointerValue<'ctx> {
|
) -> PointerValue<'ctx> {
|
||||||
self.adapted.ptr_offset_unchecked(ctx, generator, idx, name)
|
unsafe { self.adapted.ptr_offset_unchecked(ctx, generator, idx, name) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
||||||
|
@ -474,10 +474,12 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for ArraySliceValue<'ctx> {
|
||||||
) -> PointerValue<'ctx> {
|
) -> PointerValue<'ctx> {
|
||||||
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
||||||
&self,
|
&self,
|
||||||
|
@ -830,10 +832,12 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for ListDataProxy<'ctx, '_> {
|
||||||
) -> PointerValue<'ctx> {
|
) -> PointerValue<'ctx> {
|
||||||
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
||||||
&self,
|
&self,
|
||||||
|
@ -1501,10 +1505,12 @@ impl<'ctx> ArrayLikeIndexer<'ctx, IntValue<'ctx>> for NDArrayDimsProxy<'ctx, '_>
|
||||||
) -> PointerValue<'ctx> {
|
) -> PointerValue<'ctx> {
|
||||||
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
let var_name = name.map(|v| format!("{v}.addr")).unwrap_or_default();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], var_name.as_str())
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
||||||
&self,
|
&self,
|
||||||
|
@ -1594,10 +1600,16 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for NDArrayDataProxy<'ctx, '_> {
|
||||||
idx: &IntValue<'ctx>,
|
idx: &IntValue<'ctx>,
|
||||||
name: Option<&str>,
|
name: Option<&str>,
|
||||||
) -> PointerValue<'ctx> {
|
) -> PointerValue<'ctx> {
|
||||||
|
unsafe {
|
||||||
ctx.builder
|
ctx.builder
|
||||||
.build_in_bounds_gep(self.base_ptr(ctx, generator), &[*idx], name.unwrap_or_default())
|
.build_in_bounds_gep(
|
||||||
|
self.base_ptr(ctx, generator),
|
||||||
|
&[*idx],
|
||||||
|
name.unwrap_or_default(),
|
||||||
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
fn ptr_offset<G: CodeGenerator + ?Sized>(
|
||||||
&self,
|
&self,
|
||||||
|
|
|
@ -110,9 +110,9 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||||
match val {
|
match val {
|
||||||
SymbolValue::I32(v) => self.ctx.i32_type().const_int(*v as u64, true).into(),
|
SymbolValue::I32(v) => self.ctx.i32_type().const_int(*v as u64, true).into(),
|
||||||
SymbolValue::I64(v) => self.ctx.i64_type().const_int(*v as u64, true).into(),
|
SymbolValue::I64(v) => self.ctx.i64_type().const_int(*v as u64, true).into(),
|
||||||
SymbolValue::U32(v) => self.ctx.i32_type().const_int(*v as u64, false).into(),
|
SymbolValue::U32(v) => self.ctx.i32_type().const_int(u64::from(*v), false).into(),
|
||||||
SymbolValue::U64(v) => self.ctx.i64_type().const_int(*v, false).into(),
|
SymbolValue::U64(v) => self.ctx.i64_type().const_int(*v, false).into(),
|
||||||
SymbolValue::Bool(v) => self.ctx.i8_type().const_int(*v as u64, true).into(),
|
SymbolValue::Bool(v) => self.ctx.i8_type().const_int(u64::from(*v), true).into(),
|
||||||
SymbolValue::Double(v) => self.ctx.f64_type().const_float(*v).into(),
|
SymbolValue::Double(v) => self.ctx.f64_type().const_float(*v).into(),
|
||||||
SymbolValue::Str(v) => {
|
SymbolValue::Str(v) => {
|
||||||
let str_ptr = self
|
let str_ptr = self
|
||||||
|
@ -299,7 +299,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||||
pub fn gen_int_ops<G: CodeGenerator + ?Sized>(
|
pub fn gen_int_ops<G: CodeGenerator + ?Sized>(
|
||||||
&mut self,
|
&mut self,
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
op: &Operator,
|
op: Operator,
|
||||||
lhs: BasicValueEnum<'ctx>,
|
lhs: BasicValueEnum<'ctx>,
|
||||||
rhs: BasicValueEnum<'ctx>,
|
rhs: BasicValueEnum<'ctx>,
|
||||||
signed: bool,
|
signed: bool,
|
||||||
|
@ -371,7 +371,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||||
self.current_loc,
|
self.current_loc,
|
||||||
);
|
);
|
||||||
|
|
||||||
match *op {
|
match op {
|
||||||
Operator::LShift => {
|
Operator::LShift => {
|
||||||
self.builder.build_left_shift(lhs, rhs, "lshift").map(Into::into).unwrap()
|
self.builder.build_left_shift(lhs, rhs, "lshift").map(Into::into).unwrap()
|
||||||
}
|
}
|
||||||
|
@ -399,7 +399,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
|
||||||
/// Generates a binary operation `op` between two floating-point operands `lhs` and `rhs`.
|
/// Generates a binary operation `op` between two floating-point operands `lhs` and `rhs`.
|
||||||
pub fn gen_float_ops(
|
pub fn gen_float_ops(
|
||||||
&mut self,
|
&mut self,
|
||||||
op: &Operator,
|
op: Operator,
|
||||||
lhs: BasicValueEnum<'ctx>,
|
lhs: BasicValueEnum<'ctx>,
|
||||||
rhs: BasicValueEnum<'ctx>,
|
rhs: BasicValueEnum<'ctx>,
|
||||||
) -> BasicValueEnum<'ctx> {
|
) -> BasicValueEnum<'ctx> {
|
||||||
|
@ -1148,7 +1148,7 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||||
left: (&Option<Type>, BasicValueEnum<'ctx>),
|
left: (&Option<Type>, BasicValueEnum<'ctx>),
|
||||||
op: &Operator,
|
op: Operator,
|
||||||
right: (&Option<Type>, BasicValueEnum<'ctx>),
|
right: (&Option<Type>, BasicValueEnum<'ctx>),
|
||||||
loc: Location,
|
loc: Location,
|
||||||
is_aug_assign: bool,
|
is_aug_assign: bool,
|
||||||
|
@ -1166,14 +1166,14 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into()))
|
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into()))
|
||||||
} else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) {
|
} else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) {
|
||||||
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into()))
|
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into()))
|
||||||
} else if [Operator::LShift, Operator::RShift].contains(op) {
|
} else if [Operator::LShift, Operator::RShift].contains(&op) {
|
||||||
let signed = [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1);
|
let signed = [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1);
|
||||||
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, signed).into()))
|
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, signed).into()))
|
||||||
} else if ty1 == ty2 && ctx.primitives.float == ty1 {
|
} else if ty1 == ty2 && ctx.primitives.float == ty1 {
|
||||||
Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into()))
|
Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into()))
|
||||||
} else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 {
|
} else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 {
|
||||||
// Pow is the only operator that would pass typecheck between float and int
|
// Pow is the only operator that would pass typecheck between float and int
|
||||||
assert_eq!(*op, Operator::Pow);
|
assert_eq!(op, Operator::Pow);
|
||||||
let res = call_float_powi(
|
let res = call_float_powi(
|
||||||
ctx,
|
ctx,
|
||||||
left_val.into_float_value(),
|
left_val.into_float_value(),
|
||||||
|
@ -1200,7 +1200,7 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
let right_val =
|
let right_val =
|
||||||
NDArrayValue::from_ptr_val(right_val.into_pointer_value(), llvm_usize, None);
|
NDArrayValue::from_ptr_val(right_val.into_pointer_value(), llvm_usize, None);
|
||||||
|
|
||||||
let res = if *op == Operator::MatMult {
|
let res = if op == Operator::MatMult {
|
||||||
// MatMult is the only binop which is not an elementwise op
|
// MatMult is the only binop which is not an elementwise op
|
||||||
numpy::ndarray_matmul_2d(
|
numpy::ndarray_matmul_2d(
|
||||||
generator,
|
generator,
|
||||||
|
@ -1330,7 +1330,7 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||||
left: &Expr<Option<Type>>,
|
left: &Expr<Option<Type>>,
|
||||||
op: &Operator,
|
op: Operator,
|
||||||
right: &Expr<Option<Type>>,
|
right: &Expr<Option<Type>>,
|
||||||
loc: Location,
|
loc: Location,
|
||||||
is_aug_assign: bool,
|
is_aug_assign: bool,
|
||||||
|
@ -1362,7 +1362,7 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
|
||||||
pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
|
pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||||
op: &ast::Unaryop,
|
op: ast::Unaryop,
|
||||||
operand: (&Option<Type>, BasicValueEnum<'ctx>),
|
operand: (&Option<Type>, BasicValueEnum<'ctx>),
|
||||||
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
||||||
let (ty, val) = operand;
|
let (ty, val) = operand;
|
||||||
|
@ -1370,7 +1370,7 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
|
|
||||||
Ok(Some(if ty == ctx.primitives.bool {
|
Ok(Some(if ty == ctx.primitives.bool {
|
||||||
let val = val.into_int_value();
|
let val = val.into_int_value();
|
||||||
if *op == ast::Unaryop::Not {
|
if op == ast::Unaryop::Not {
|
||||||
let not = ctx.builder.build_not(val, "not").unwrap();
|
let not = ctx.builder.build_not(val, "not").unwrap();
|
||||||
let not_bool =
|
let not_bool =
|
||||||
ctx.builder.build_and(not, not.get_type().const_int(1, false), "").unwrap();
|
ctx.builder.build_and(not, not.get_type().const_int(1, false), "").unwrap();
|
||||||
|
@ -1434,8 +1434,8 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
// ndarray uses `~` rather than `not` to perform elementwise inversion, convert it before
|
// ndarray uses `~` rather than `not` to perform elementwise inversion, convert it before
|
||||||
// passing it to the elementwise codegen function
|
// passing it to the elementwise codegen function
|
||||||
let op = if ndarray_dtype.obj_id(&ctx.unifier).is_some_and(|id| id == PrimDef::Bool.id()) {
|
let op = if ndarray_dtype.obj_id(&ctx.unifier).is_some_and(|id| id == PrimDef::Bool.id()) {
|
||||||
if *op == ast::Unaryop::Invert {
|
if op == ast::Unaryop::Invert {
|
||||||
&ast::Unaryop::Not
|
ast::Unaryop::Not
|
||||||
} else {
|
} else {
|
||||||
unreachable!("ufunc {} not supported for ndarray[bool, N]", unaryop_name(op))
|
unreachable!("ufunc {} not supported for ndarray[bool, N]", unaryop_name(op))
|
||||||
}
|
}
|
||||||
|
@ -1469,7 +1469,7 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
pub fn gen_unaryop_expr<'ctx, G: CodeGenerator>(
|
pub fn gen_unaryop_expr<'ctx, G: CodeGenerator>(
|
||||||
generator: &mut G,
|
generator: &mut G,
|
||||||
ctx: &mut CodeGenContext<'ctx, '_>,
|
ctx: &mut CodeGenContext<'ctx, '_>,
|
||||||
op: &ast::Unaryop,
|
op: ast::Unaryop,
|
||||||
operand: &Expr<Option<Type>>,
|
operand: &Expr<Option<Type>>,
|
||||||
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
) -> Result<Option<ValueEnum<'ctx>>, String> {
|
||||||
let val = if let Some(v) = generator.gen_expr(ctx, operand)? {
|
let val = if let Some(v) = generator.gen_expr(ctx, operand)? {
|
||||||
|
@ -1503,7 +1503,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
|
|
||||||
let (Some(left_ty), lhs) = left else { unreachable!() };
|
let (Some(left_ty), lhs) = left else { unreachable!() };
|
||||||
let (Some(right_ty), rhs) = comparators[0] else { unreachable!() };
|
let (Some(right_ty), rhs) = comparators[0] else { unreachable!() };
|
||||||
let op = ops[0].clone();
|
let op = ops[0];
|
||||||
|
|
||||||
let is_ndarray1 =
|
let is_ndarray1 =
|
||||||
left_ty.obj_id(&ctx.unifier).is_some_and(|id| id == PrimDef::NDArray.id());
|
left_ty.obj_id(&ctx.unifier).is_some_and(|id| id == PrimDef::NDArray.id());
|
||||||
|
@ -1530,7 +1530,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
generator,
|
generator,
|
||||||
ctx,
|
ctx,
|
||||||
(Some(ndarray_dtype1), lhs),
|
(Some(ndarray_dtype1), lhs),
|
||||||
&[op.clone()],
|
&[op],
|
||||||
&[(Some(ndarray_dtype2), rhs)],
|
&[(Some(ndarray_dtype2), rhs)],
|
||||||
)?
|
)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -1562,7 +1562,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
|
||||||
generator,
|
generator,
|
||||||
ctx,
|
ctx,
|
||||||
(Some(ndarray_dtype), lhs),
|
(Some(ndarray_dtype), lhs),
|
||||||
&[op.clone()],
|
&[op],
|
||||||
&[(Some(ndarray_dtype), rhs)],
|
&[(Some(ndarray_dtype), rhs)],
|
||||||
)?
|
)?
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -1743,7 +1743,7 @@ fn gen_ndarray_subscript_expr<'ctx, G: CodeGenerator>(
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ndim| match *ndim {
|
.map(|ndim| match *ndim {
|
||||||
SymbolValue::U64(v) => Ok(v),
|
SymbolValue::U64(v) => Ok(v),
|
||||||
SymbolValue::U32(v) => Ok(v as u64),
|
SymbolValue::U32(v) => Ok(u64::from(v)),
|
||||||
SymbolValue::I32(v) => u64::try_from(v)
|
SymbolValue::I32(v) => u64::try_from(v)
|
||||||
.map_err(|_| format!("Expected non-negative literal for ndarray.ndims, got {v}")),
|
.map_err(|_| format!("Expected non-negative literal for ndarray.ndims, got {v}")),
|
||||||
SymbolValue::I64(v) => u64::try_from(v)
|
SymbolValue::I64(v) => u64::try_from(v)
|
||||||
|
@ -2202,9 +2202,9 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::BinOp { op, left, right } => {
|
ExprKind::BinOp { op, left, right } => {
|
||||||
return gen_binop_expr(generator, ctx, left, op, right, expr.location, false);
|
return gen_binop_expr(generator, ctx, left, *op, right, expr.location, false);
|
||||||
}
|
}
|
||||||
ExprKind::UnaryOp { op, operand } => return gen_unaryop_expr(generator, ctx, op, operand),
|
ExprKind::UnaryOp { op, operand } => return gen_unaryop_expr(generator, ctx, *op, operand),
|
||||||
ExprKind::Compare { left, ops, comparators } => {
|
ExprKind::Compare { left, ops, comparators } => {
|
||||||
return gen_cmpop_expr(generator, ctx, left, ops, comparators)
|
return gen_cmpop_expr(generator, ctx, left, ops, comparators)
|
||||||
}
|
}
|
||||||
|
@ -2367,7 +2367,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Some(v) => Ok(Some(v)),
|
Some(v) => Ok(Some(v)),
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
ValueEnum::Dynamic(BasicValueEnum::PointerValue(ptr)) => {
|
ValueEnum::Dynamic(BasicValueEnum::PointerValue(ptr)) => {
|
||||||
let not_null =
|
let not_null =
|
||||||
|
|
|
@ -1629,7 +1629,7 @@ pub fn ndarray_matmul_2d<'ctx, G: CodeGenerator>(
|
||||||
generator,
|
generator,
|
||||||
ctx,
|
ctx,
|
||||||
(&Some(elem_ty), a),
|
(&Some(elem_ty), a),
|
||||||
&Operator::Mult,
|
Operator::Mult,
|
||||||
(&Some(elem_ty), b),
|
(&Some(elem_ty), b),
|
||||||
ctx.current_loc,
|
ctx.current_loc,
|
||||||
false,
|
false,
|
||||||
|
@ -1642,7 +1642,7 @@ pub fn ndarray_matmul_2d<'ctx, G: CodeGenerator>(
|
||||||
generator,
|
generator,
|
||||||
ctx,
|
ctx,
|
||||||
(&Some(elem_ty), result),
|
(&Some(elem_ty), result),
|
||||||
&Operator::Add,
|
Operator::Add,
|
||||||
(&Some(elem_ty), a_mul_b),
|
(&Some(elem_ty), a_mul_b),
|
||||||
ctx.current_loc,
|
ctx.current_loc,
|
||||||
false,
|
false,
|
||||||
|
|
|
@ -1270,7 +1270,7 @@ pub fn gen_try<'ctx, 'a, G: CodeGenerator>(
|
||||||
// run end_catch before continue/break/return
|
// run end_catch before continue/break/return
|
||||||
let mut final_proxy_lambda =
|
let mut final_proxy_lambda =
|
||||||
|ctx: &mut CodeGenContext<'ctx, 'a>, target: BasicBlock<'ctx>, block: BasicBlock<'ctx>| {
|
|ctx: &mut CodeGenContext<'ctx, 'a>, target: BasicBlock<'ctx>, block: BasicBlock<'ctx>| {
|
||||||
final_proxy(ctx, target, block, final_data.as_mut().unwrap())
|
final_proxy(ctx, target, block, final_data.as_mut().unwrap());
|
||||||
};
|
};
|
||||||
let mut redirect_lambda =
|
let mut redirect_lambda =
|
||||||
|ctx: &mut CodeGenContext<'ctx, 'a>, target: BasicBlock<'ctx>, block: BasicBlock<'ctx>| {
|
|ctx: &mut CodeGenContext<'ctx, 'a>, target: BasicBlock<'ctx>, block: BasicBlock<'ctx>| {
|
||||||
|
@ -1574,7 +1574,7 @@ pub fn gen_stmt<G: CodeGenerator>(
|
||||||
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
|
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
|
||||||
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
|
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
|
||||||
StmtKind::AugAssign { target, op, value, .. } => {
|
StmtKind::AugAssign { target, op, value, .. } => {
|
||||||
let value = gen_binop_expr(generator, ctx, target, op, value, stmt.location, true)?;
|
let value = gen_binop_expr(generator, ctx, target, *op, value, stmt.location, true)?;
|
||||||
generator.gen_assign(ctx, target, value.unwrap())?;
|
generator.gen_assign(ctx, target, value.unwrap())?;
|
||||||
}
|
}
|
||||||
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,
|
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,
|
||||||
|
|
|
@ -1,5 +1,23 @@
|
||||||
#![warn(clippy::all)]
|
#![deny(
|
||||||
#![allow(dead_code)]
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
dead_code,
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::cast_sign_loss,
|
||||||
|
clippy::enum_glob_use,
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::similar_names,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
pub mod codegen;
|
pub mod codegen;
|
||||||
pub mod symbol_resolver;
|
pub mod symbol_resolver;
|
||||||
|
|
|
@ -241,7 +241,7 @@ impl TryFrom<SymbolValue> for u64 {
|
||||||
match value {
|
match value {
|
||||||
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| ()),
|
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| ()),
|
||||||
SymbolValue::I64(v) => u64::try_from(v).map_err(|_| ()),
|
SymbolValue::I64(v) => u64::try_from(v).map_err(|_| ()),
|
||||||
SymbolValue::U32(v) => Ok(v as u64),
|
SymbolValue::U32(v) => Ok(u64::from(v)),
|
||||||
SymbolValue::U64(v) => Ok(v),
|
SymbolValue::U64(v) => Ok(v),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
|
@ -255,10 +255,10 @@ impl TryFrom<SymbolValue> for i128 {
|
||||||
/// numeric.
|
/// numeric.
|
||||||
fn try_from(value: SymbolValue) -> Result<Self, Self::Error> {
|
fn try_from(value: SymbolValue) -> Result<Self, Self::Error> {
|
||||||
match value {
|
match value {
|
||||||
SymbolValue::I32(v) => Ok(v as i128),
|
SymbolValue::I32(v) => Ok(i128::from(v)),
|
||||||
SymbolValue::I64(v) => Ok(v as i128),
|
SymbolValue::I64(v) => Ok(i128::from(v)),
|
||||||
SymbolValue::U32(v) => Ok(v as i128),
|
SymbolValue::U32(v) => Ok(i128::from(v)),
|
||||||
SymbolValue::U64(v) => Ok(v as i128),
|
SymbolValue::U64(v) => Ok(i128::from(v)),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -561,7 +561,9 @@ impl TopLevelComposer {
|
||||||
unifier,
|
unifier,
|
||||||
&primitive_types,
|
&primitive_types,
|
||||||
b,
|
b,
|
||||||
vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(),
|
vec![(*class_def_id, class_type_vars.clone())]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
if let TypeAnnotation::CustomClass { .. } = &base_ty {
|
if let TypeAnnotation::CustomClass { .. } = &base_ty {
|
||||||
|
@ -1154,7 +1156,7 @@ impl TopLevelComposer {
|
||||||
annotation_expr,
|
annotation_expr,
|
||||||
vec![(class_id, class_type_vars_def.clone())]
|
vec![(class_id, class_type_vars_def.clone())]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect(),
|
.collect::<HashMap<_, _>>(),
|
||||||
)?
|
)?
|
||||||
};
|
};
|
||||||
// find type vars within this method parameter type annotation
|
// find type vars within this method parameter type annotation
|
||||||
|
@ -1219,7 +1221,9 @@ impl TopLevelComposer {
|
||||||
unifier,
|
unifier,
|
||||||
primitives,
|
primitives,
|
||||||
result,
|
result,
|
||||||
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(),
|
vec![(class_id, class_type_vars_def.clone())]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
)?;
|
)?;
|
||||||
// find type vars within this return type annotation
|
// find type vars within this return type annotation
|
||||||
let type_vars_within =
|
let type_vars_within =
|
||||||
|
@ -1313,7 +1317,9 @@ impl TopLevelComposer {
|
||||||
unifier,
|
unifier,
|
||||||
primitives,
|
primitives,
|
||||||
annotation.as_ref(),
|
annotation.as_ref(),
|
||||||
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(),
|
vec![(class_id, class_type_vars_def.clone())]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
)?;
|
)?;
|
||||||
// find type vars within this return type annotation
|
// find type vars within this return type annotation
|
||||||
let type_vars_within =
|
let type_vars_within =
|
||||||
|
|
|
@ -68,18 +68,18 @@ impl TypeAnnotation {
|
||||||
/// generic variables associated with the definition.
|
/// generic variables associated with the definition.
|
||||||
/// * `type_var` - The type variable associated with the type argument currently being parsed. Pass
|
/// * `type_var` - The type variable associated with the type argument currently being parsed. Pass
|
||||||
/// [`None`] when this function is invoked externally.
|
/// [`None`] when this function is invoked externally.
|
||||||
pub fn parse_ast_to_type_annotation_kinds<T>(
|
pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
|
||||||
resolver: &(dyn SymbolResolver + Send + Sync),
|
resolver: &(dyn SymbolResolver + Send + Sync),
|
||||||
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
top_level_defs: &[Arc<RwLock<TopLevelDef>>],
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
expr: &ast::Expr<T>,
|
expr: &ast::Expr<T>,
|
||||||
// the key stores the type_var of this topleveldef::class, we only need this field here
|
// the key stores the type_var of this topleveldef::class, we only need this field here
|
||||||
locked: HashMap<DefinitionId, Vec<Type>>,
|
locked: HashMap<DefinitionId, Vec<Type>, S>,
|
||||||
) -> Result<TypeAnnotation, HashSet<String>> {
|
) -> Result<TypeAnnotation, HashSet<String>> {
|
||||||
let name_handle = |id: &StrRef,
|
let name_handle = |id: &StrRef,
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
locked: HashMap<DefinitionId, Vec<Type>>| {
|
locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||||
if id == &"int32".into() {
|
if id == &"int32".into() {
|
||||||
Ok(TypeAnnotation::Primitive(primitives.int32))
|
Ok(TypeAnnotation::Primitive(primitives.int32))
|
||||||
} else if id == &"int64".into() {
|
} else if id == &"int64".into() {
|
||||||
|
@ -144,7 +144,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|
||||||
|id: &StrRef,
|
|id: &StrRef,
|
||||||
slice: &ast::Expr<T>,
|
slice: &ast::Expr<T>,
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
mut locked: HashMap<DefinitionId, Vec<Type>>| {
|
mut locked: HashMap<DefinitionId, Vec<Type>, S>| {
|
||||||
if ["virtual".into(), "Generic".into(), "list".into(), "tuple".into(), "Option".into()]
|
if ["virtual".into(), "Generic".into(), "list".into(), "tuple".into(), "Option".into()]
|
||||||
.contains(id)
|
.contains(id)
|
||||||
{
|
{
|
||||||
|
|
|
@ -14,7 +14,7 @@ use std::rc::Rc;
|
||||||
use strum::IntoEnumIterator;
|
use strum::IntoEnumIterator;
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn binop_name(op: &Operator) -> &'static str {
|
pub fn binop_name(op: Operator) -> &'static str {
|
||||||
match op {
|
match op {
|
||||||
Operator::Add => "__add__",
|
Operator::Add => "__add__",
|
||||||
Operator::Sub => "__sub__",
|
Operator::Sub => "__sub__",
|
||||||
|
@ -33,7 +33,7 @@ pub fn binop_name(op: &Operator) -> &'static str {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn binop_assign_name(op: &Operator) -> &'static str {
|
pub fn binop_assign_name(op: Operator) -> &'static str {
|
||||||
match op {
|
match op {
|
||||||
Operator::Add => "__iadd__",
|
Operator::Add => "__iadd__",
|
||||||
Operator::Sub => "__isub__",
|
Operator::Sub => "__isub__",
|
||||||
|
@ -52,7 +52,7 @@ pub fn binop_assign_name(op: &Operator) -> &'static str {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn unaryop_name(op: &Unaryop) -> &'static str {
|
pub fn unaryop_name(op: Unaryop) -> &'static str {
|
||||||
match op {
|
match op {
|
||||||
Unaryop::UAdd => "__pos__",
|
Unaryop::UAdd => "__pos__",
|
||||||
Unaryop::USub => "__neg__",
|
Unaryop::USub => "__neg__",
|
||||||
|
@ -62,7 +62,7 @@ pub fn unaryop_name(op: &Unaryop) -> &'static str {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn comparison_name(op: &Cmpop) -> Option<&'static str> {
|
pub fn comparison_name(op: Cmpop) -> Option<&'static str> {
|
||||||
match op {
|
match op {
|
||||||
Cmpop::Lt => Some("__lt__"),
|
Cmpop::Lt => Some("__lt__"),
|
||||||
Cmpop::LtE => Some("__le__"),
|
Cmpop::LtE => Some("__le__"),
|
||||||
|
@ -116,7 +116,7 @@ pub fn impl_binop(
|
||||||
let ret_ty = ret_ty.unwrap_or_else(|| unifier.get_fresh_var(None, None).0);
|
let ret_ty = ret_ty.unwrap_or_else(|| unifier.get_fresh_var(None, None).0);
|
||||||
|
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.insert(binop_name(op).into(), {
|
fields.insert(binop_name(*op).into(), {
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
ret: ret_ty,
|
ret: ret_ty,
|
||||||
|
@ -131,7 +131,7 @@ pub fn impl_binop(
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
fields.insert(binop_assign_name(op).into(), {
|
fields.insert(binop_assign_name(*op).into(), {
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
ret: ret_ty,
|
ret: ret_ty,
|
||||||
|
@ -155,7 +155,7 @@ pub fn impl_unaryop(unifier: &mut Unifier, ty: Type, ret_ty: Option<Type>, ops:
|
||||||
|
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.insert(
|
fields.insert(
|
||||||
unaryop_name(op).into(),
|
unaryop_name(*op).into(),
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
ret: ret_ty,
|
ret: ret_ty,
|
||||||
|
@ -195,7 +195,7 @@ pub fn impl_cmpop(
|
||||||
|
|
||||||
for op in ops {
|
for op in ops {
|
||||||
fields.insert(
|
fields.insert(
|
||||||
comparison_name(op).unwrap().into(),
|
comparison_name(*op).unwrap().into(),
|
||||||
(
|
(
|
||||||
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
unifier.add_ty(TypeEnum::TFunc(FunSignature {
|
||||||
ret: ret_ty,
|
ret: ret_ty,
|
||||||
|
@ -425,7 +425,7 @@ pub fn typeof_ndarray_broadcast(
|
||||||
pub fn typeof_binop(
|
pub fn typeof_binop(
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
op: &Operator,
|
op: Operator,
|
||||||
lhs: Type,
|
lhs: Type,
|
||||||
rhs: Type,
|
rhs: Type,
|
||||||
) -> Result<Option<Type>, String> {
|
) -> Result<Option<Type>, String> {
|
||||||
|
@ -466,7 +466,7 @@ pub fn typeof_binop(
|
||||||
(lhs, rhs) if lhs == 0 || rhs == 0 => {
|
(lhs, rhs) if lhs == 0 || rhs == 0 => {
|
||||||
return Err(format!(
|
return Err(format!(
|
||||||
"Input operand {} does not have enough dimensions (has {lhs}, requires {rhs})",
|
"Input operand {} does not have enough dimensions (has {lhs}, requires {rhs})",
|
||||||
(rhs == 0) as u8
|
u8::from(rhs == 0)
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
(lhs, rhs) => {
|
(lhs, rhs) => {
|
||||||
|
@ -520,12 +520,12 @@ pub fn typeof_binop(
|
||||||
pub fn typeof_unaryop(
|
pub fn typeof_unaryop(
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
op: &Unaryop,
|
op: Unaryop,
|
||||||
operand: Type,
|
operand: Type,
|
||||||
) -> Result<Option<Type>, String> {
|
) -> Result<Option<Type>, String> {
|
||||||
let operand_obj_id = operand.obj_id(unifier);
|
let operand_obj_id = operand.obj_id(unifier);
|
||||||
|
|
||||||
if *op == Unaryop::Not
|
if op == Unaryop::Not
|
||||||
&& operand_obj_id.is_some_and(|id| id == primitives.ndarray.obj_id(unifier).unwrap())
|
&& operand_obj_id.is_some_and(|id| id == primitives.ndarray.obj_id(unifier).unwrap())
|
||||||
{
|
{
|
||||||
return Err(
|
return Err(
|
||||||
|
@ -533,7 +533,7 @@ pub fn typeof_unaryop(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(match *op {
|
Ok(match op {
|
||||||
Unaryop::Not => match operand_obj_id {
|
Unaryop::Not => match operand_obj_id {
|
||||||
Some(v) if v == PrimDef::NDArray.id() => Some(operand),
|
Some(v) if v == PrimDef::NDArray.id() => Some(operand),
|
||||||
Some(_) => Some(primitives.bool),
|
Some(_) => Some(primitives.bool),
|
||||||
|
@ -554,7 +554,7 @@ pub fn typeof_unaryop(
|
||||||
if operand_obj_id.is_some_and(|id| id == PrimDef::NDArray.id()) {
|
if operand_obj_id.is_some_and(|id| id == PrimDef::NDArray.id()) {
|
||||||
let (dtype, _) = unpack_ndarray_var_tys(unifier, operand);
|
let (dtype, _) = unpack_ndarray_var_tys(unifier, operand);
|
||||||
if dtype.obj_id(unifier).is_some_and(|id| id == PrimDef::Bool.id()) {
|
if dtype.obj_id(unifier).is_some_and(|id| id == PrimDef::Bool.id()) {
|
||||||
return Err(if *op == Unaryop::UAdd {
|
return Err(if op == Unaryop::UAdd {
|
||||||
"The ufunc 'positive' cannot be applied to ndarray[bool, N]".to_string()
|
"The ufunc 'positive' cannot be applied to ndarray[bool, N]".to_string()
|
||||||
} else {
|
} else {
|
||||||
"The numpy boolean negative, the `-` operator, is not supported, use the `~` operator function instead.".to_string()
|
"The numpy boolean negative, the `-` operator, is not supported, use the `~` operator function instead.".to_string()
|
||||||
|
@ -577,7 +577,7 @@ pub fn typeof_unaryop(
|
||||||
pub fn typeof_cmpop(
|
pub fn typeof_cmpop(
|
||||||
unifier: &mut Unifier,
|
unifier: &mut Unifier,
|
||||||
primitives: &PrimitiveStore,
|
primitives: &PrimitiveStore,
|
||||||
_op: &Cmpop,
|
_op: Cmpop,
|
||||||
lhs: Type,
|
lhs: Type,
|
||||||
rhs: Type,
|
rhs: Type,
|
||||||
) -> Result<Option<Type>, String> {
|
) -> Result<Option<Type>, String> {
|
||||||
|
|
|
@ -465,7 +465,7 @@ impl<'a> Fold<()> for Inferencer<'a> {
|
||||||
(None, None) => {}
|
(None, None) => {}
|
||||||
},
|
},
|
||||||
ast::StmtKind::AugAssign { target, op, value, .. } => {
|
ast::StmtKind::AugAssign { target, op, value, .. } => {
|
||||||
let res_ty = self.infer_bin_ops(stmt.location, target, op, value, true)?;
|
let res_ty = self.infer_bin_ops(stmt.location, target, *op, value, true)?;
|
||||||
self.unify(res_ty, target.custom.unwrap(), &stmt.location)?;
|
self.unify(res_ty, target.custom.unwrap(), &stmt.location)?;
|
||||||
}
|
}
|
||||||
ast::StmtKind::Assert { test, msg, .. } => {
|
ast::StmtKind::Assert { test, msg, .. } => {
|
||||||
|
@ -543,20 +543,20 @@ impl<'a> Fold<()> for Inferencer<'a> {
|
||||||
ExprKind::List { elts, .. } => Some(self.infer_list(elts)?),
|
ExprKind::List { elts, .. } => Some(self.infer_list(elts)?),
|
||||||
ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?),
|
ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?),
|
||||||
ExprKind::Attribute { value, attr, ctx } => {
|
ExprKind::Attribute { value, attr, ctx } => {
|
||||||
Some(self.infer_attribute(value, *attr, ctx)?)
|
Some(self.infer_attribute(value, *attr, *ctx)?)
|
||||||
}
|
}
|
||||||
ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
|
ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
|
||||||
ExprKind::BinOp { left, op, right } => {
|
ExprKind::BinOp { left, op, right } => {
|
||||||
Some(self.infer_bin_ops(expr.location, left, op, right, false)?)
|
Some(self.infer_bin_ops(expr.location, left, *op, right, false)?)
|
||||||
}
|
}
|
||||||
ExprKind::UnaryOp { op, operand } => {
|
ExprKind::UnaryOp { op, operand } => {
|
||||||
Some(self.infer_unary_ops(expr.location, op, operand)?)
|
Some(self.infer_unary_ops(expr.location, *op, operand)?)
|
||||||
}
|
}
|
||||||
ExprKind::Compare { left, ops, comparators } => {
|
ExprKind::Compare { left, ops, comparators } => {
|
||||||
Some(self.infer_compare(expr.location, left, ops, comparators)?)
|
Some(self.infer_compare(expr.location, left, ops, comparators)?)
|
||||||
}
|
}
|
||||||
ExprKind::Subscript { value, slice, ctx, .. } => {
|
ExprKind::Subscript { value, slice, ctx, .. } => {
|
||||||
Some(self.infer_subscript(value.as_ref(), slice.as_ref(), ctx)?)
|
Some(self.infer_subscript(value.as_ref(), slice.as_ref(), *ctx)?)
|
||||||
}
|
}
|
||||||
ExprKind::IfExp { test, body, orelse } => {
|
ExprKind::IfExp { test, body, orelse } => {
|
||||||
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
|
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
|
||||||
|
@ -860,7 +860,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: None,
|
custom: None,
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -918,7 +918,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -956,7 +956,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -1058,7 +1058,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0, arg1],
|
args: vec![arg0, arg1],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -1137,7 +1137,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -1188,7 +1188,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -1237,7 +1237,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0, arg1],
|
args: vec![arg0, arg1],
|
||||||
keywords: vec![],
|
keywords: vec![],
|
||||||
|
@ -1301,7 +1301,7 @@ impl<'a> Inferencer<'a> {
|
||||||
func: Box::new(Located {
|
func: Box::new(Located {
|
||||||
custom: Some(custom),
|
custom: Some(custom),
|
||||||
location: func.location,
|
location: func.location,
|
||||||
node: ExprKind::Name { id: *id, ctx: ctx.clone() },
|
node: ExprKind::Name { id: *id, ctx: *ctx },
|
||||||
}),
|
}),
|
||||||
args: vec![arg0],
|
args: vec![arg0],
|
||||||
keywords,
|
keywords,
|
||||||
|
@ -1443,12 +1443,12 @@ impl<'a> Inferencer<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
value: &ast::Expr<Option<Type>>,
|
value: &ast::Expr<Option<Type>>,
|
||||||
attr: StrRef,
|
attr: StrRef,
|
||||||
ctx: &ExprContext,
|
ctx: ExprContext,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
let ty = value.custom.unwrap();
|
let ty = value.custom.unwrap();
|
||||||
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
|
||||||
// just a fast path
|
// just a fast path
|
||||||
match (fields.get(&attr), ctx == &ExprContext::Store) {
|
match (fields.get(&attr), ctx == ExprContext::Store) {
|
||||||
(Some((ty, true)), _) | (Some((ty, false)), false) => Ok(*ty),
|
(Some((ty, true)), _) | (Some((ty, false)), false) => Ok(*ty),
|
||||||
(Some((_, false)), true) => {
|
(Some((_, false)), true) => {
|
||||||
report_error(&format!("Field `{attr}` is immutable"), value.location)
|
report_error(&format!("Field `{attr}` is immutable"), value.location)
|
||||||
|
@ -1465,7 +1465,7 @@ impl<'a> Inferencer<'a> {
|
||||||
let attr_ty = self.unifier.get_dummy_var().0;
|
let attr_ty = self.unifier.get_dummy_var().0;
|
||||||
let fields = once((
|
let fields = once((
|
||||||
attr.into(),
|
attr.into(),
|
||||||
RecordField::new(attr_ty, ctx == &ExprContext::Store, Some(value.location)),
|
RecordField::new(attr_ty, ctx == ExprContext::Store, Some(value.location)),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
let record = self.unifier.add_record(fields);
|
let record = self.unifier.add_record(fields);
|
||||||
|
@ -1486,7 +1486,7 @@ impl<'a> Inferencer<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
location: Location,
|
location: Location,
|
||||||
left: &ast::Expr<Option<Type>>,
|
left: &ast::Expr<Option<Type>>,
|
||||||
op: &ast::Operator,
|
op: ast::Operator,
|
||||||
right: &ast::Expr<Option<Type>>,
|
right: &ast::Expr<Option<Type>>,
|
||||||
is_aug_assign: bool,
|
is_aug_assign: bool,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
|
@ -1522,7 +1522,7 @@ impl<'a> Inferencer<'a> {
|
||||||
fn infer_unary_ops(
|
fn infer_unary_ops(
|
||||||
&mut self,
|
&mut self,
|
||||||
location: Location,
|
location: Location,
|
||||||
op: &ast::Unaryop,
|
op: ast::Unaryop,
|
||||||
operand: &ast::Expr<Option<Type>>,
|
operand: &ast::Expr<Option<Type>>,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
let method = unaryop_name(op).into();
|
let method = unaryop_name(op).into();
|
||||||
|
@ -1555,14 +1555,14 @@ impl<'a> Inferencer<'a> {
|
||||||
|
|
||||||
let mut res = None;
|
let mut res = None;
|
||||||
for (a, b, c) in izip!(once(left).chain(comparators), comparators, ops) {
|
for (a, b, c) in izip!(once(left).chain(comparators), comparators, ops) {
|
||||||
let method = comparison_name(c)
|
let method = comparison_name(*c)
|
||||||
.ok_or_else(|| HashSet::from(["unsupported comparator".to_string()]))?
|
.ok_or_else(|| HashSet::from(["unsupported comparator".to_string()]))?
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
let ret = typeof_cmpop(
|
let ret = typeof_cmpop(
|
||||||
self.unifier,
|
self.unifier,
|
||||||
self.primitives,
|
self.primitives,
|
||||||
c,
|
*c,
|
||||||
a.custom.unwrap(),
|
a.custom.unwrap(),
|
||||||
b.custom.unwrap(),
|
b.custom.unwrap(),
|
||||||
)
|
)
|
||||||
|
@ -1604,7 +1604,7 @@ impl<'a> Inferencer<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|ndim| match *ndim {
|
.map(|ndim| match *ndim {
|
||||||
SymbolValue::U64(v) => Ok(v),
|
SymbolValue::U64(v) => Ok(v),
|
||||||
SymbolValue::U32(v) => Ok(v as u64),
|
SymbolValue::U32(v) => Ok(u64::from(v)),
|
||||||
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| {
|
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| {
|
||||||
HashSet::from([format!(
|
HashSet::from([format!(
|
||||||
"Expected non-negative literal for ndarray.ndims, got {v}"
|
"Expected non-negative literal for ndarray.ndims, got {v}"
|
||||||
|
@ -1653,7 +1653,7 @@ impl<'a> Inferencer<'a> {
|
||||||
&mut self,
|
&mut self,
|
||||||
value: &ast::Expr<Option<Type>>,
|
value: &ast::Expr<Option<Type>>,
|
||||||
slice: &ast::Expr<Option<Type>>,
|
slice: &ast::Expr<Option<Type>>,
|
||||||
ctx: &ExprContext,
|
ctx: ExprContext,
|
||||||
) -> InferenceResult {
|
) -> InferenceResult {
|
||||||
let ty = self.unifier.get_dummy_var().0;
|
let ty = self.unifier.get_dummy_var().0;
|
||||||
match &slice.node {
|
match &slice.node {
|
||||||
|
@ -1689,7 +1689,7 @@ impl<'a> Inferencer<'a> {
|
||||||
ind.ok_or_else(|| HashSet::from(["Index must be int32".to_string()]))?;
|
ind.ok_or_else(|| HashSet::from(["Index must be int32".to_string()]))?;
|
||||||
let map = once((
|
let map = once((
|
||||||
ind.into(),
|
ind.into(),
|
||||||
RecordField::new(ty, ctx == &ExprContext::Store, Some(value.location)),
|
RecordField::new(ty, ctx == ExprContext::Store, Some(value.location)),
|
||||||
))
|
))
|
||||||
.collect();
|
.collect();
|
||||||
let seq = self.unifier.add_record(map);
|
let seq = self.unifier.add_record(map);
|
||||||
|
|
|
@ -823,19 +823,9 @@ impl Unifier {
|
||||||
(TLiteral { values: val1, .. }, TLiteral { values: val2, .. }) => {
|
(TLiteral { values: val1, .. }, TLiteral { values: val2, .. }) => {
|
||||||
for (v1, v2) in zip(val1, val2) {
|
for (v1, v2) in zip(val1, val2) {
|
||||||
if v1 != v2 {
|
if v1 != v2 {
|
||||||
let symbol_value_to_int = |value: &SymbolValue| -> Option<i128> {
|
|
||||||
match value {
|
|
||||||
SymbolValue::I32(v) => Some(*v as i128),
|
|
||||||
SymbolValue::I64(v) => Some(*v as i128),
|
|
||||||
SymbolValue::U32(v) => Some(*v as i128),
|
|
||||||
SymbolValue::U64(v) => Some(*v as i128),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Try performing integer promotion on literals
|
// Try performing integer promotion on literals
|
||||||
let v1i = symbol_value_to_int(v1);
|
let v1i = i128::try_from(v1.clone()).ok();
|
||||||
let v2i = symbol_value_to_int(v2);
|
let v2i = i128::try_from(v2.clone()).ok();
|
||||||
|
|
||||||
if v1i != v2i {
|
if v1i != v2i {
|
||||||
return Self::incompatible_types(a, b);
|
return Self::incompatible_types(a, b);
|
||||||
|
|
|
@ -59,7 +59,7 @@ impl<'a> DwarfReader<'a> {
|
||||||
let mut byte: u8;
|
let mut byte: u8;
|
||||||
loop {
|
loop {
|
||||||
byte = self.read_u8();
|
byte = self.read_u8();
|
||||||
result |= ((byte & 0x7F) as u64) << shift;
|
result |= u64::from(byte & 0x7F) << shift;
|
||||||
shift += 7;
|
shift += 7;
|
||||||
if byte & 0x80 == 0 {
|
if byte & 0x80 == 0 {
|
||||||
break;
|
break;
|
||||||
|
@ -74,7 +74,7 @@ impl<'a> DwarfReader<'a> {
|
||||||
let mut byte: u8;
|
let mut byte: u8;
|
||||||
loop {
|
loop {
|
||||||
byte = self.read_u8();
|
byte = self.read_u8();
|
||||||
result |= ((byte & 0x7F) as u64) << shift;
|
result |= u64::from(byte & 0x7F) << shift;
|
||||||
shift += 7;
|
shift += 7;
|
||||||
if byte & 0x80 == 0 {
|
if byte & 0x80 == 0 {
|
||||||
break;
|
break;
|
||||||
|
@ -156,10 +156,9 @@ fn read_encoded_pointer(reader: &mut DwarfReader, encoding: u8) -> Result<usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
match encoding & 0x0F {
|
match encoding & 0x0F {
|
||||||
DW_EH_PE_absptr => Ok(reader.read_u32() as usize),
|
DW_EH_PE_absptr | DW_EH_PE_udata4 => Ok(reader.read_u32() as usize),
|
||||||
DW_EH_PE_uleb128 => Ok(reader.read_uleb128() as usize),
|
DW_EH_PE_uleb128 => Ok(reader.read_uleb128() as usize),
|
||||||
DW_EH_PE_udata2 => Ok(reader.read_u16() as usize),
|
DW_EH_PE_udata2 => Ok(reader.read_u16() as usize),
|
||||||
DW_EH_PE_udata4 => Ok(reader.read_u32() as usize),
|
|
||||||
DW_EH_PE_udata8 => Ok(reader.read_u64() as usize),
|
DW_EH_PE_udata8 => Ok(reader.read_u64() as usize),
|
||||||
DW_EH_PE_sleb128 => Ok(reader.read_sleb128() as usize),
|
DW_EH_PE_sleb128 => Ok(reader.read_sleb128() as usize),
|
||||||
DW_EH_PE_sdata2 => Ok(reader.read_i16() as usize),
|
DW_EH_PE_sdata2 => Ok(reader.read_i16() as usize),
|
||||||
|
@ -221,8 +220,8 @@ pub struct EH_Frame<'a> {
|
||||||
impl<'a> EH_Frame<'a> {
|
impl<'a> EH_Frame<'a> {
|
||||||
/// Creates an [EH_Frame] using the bytes in the `.eh_frame` section and its address in the ELF
|
/// Creates an [EH_Frame] using the bytes in the `.eh_frame` section and its address in the ELF
|
||||||
/// file.
|
/// file.
|
||||||
pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> Result<EH_Frame, ()> {
|
pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> EH_Frame {
|
||||||
Ok(EH_Frame { reader: DwarfReader::new(eh_frame_slice, eh_frame_addr) })
|
EH_Frame { reader: DwarfReader::new(eh_frame_slice, eh_frame_addr) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an [Iterator] over all Call Frame Information (CFI) records.
|
/// Returns an [Iterator] over all Call Frame Information (CFI) records.
|
||||||
|
@ -255,7 +254,7 @@ impl<'a> CFI_Record<'a> {
|
||||||
|
|
||||||
// length == u32::MAX means that the length is only representable with 64 bits,
|
// length == u32::MAX means that the length is only representable with 64 bits,
|
||||||
// which does not make sense in a system with 32-bit address.
|
// which does not make sense in a system with 32-bit address.
|
||||||
0xFFFFFFFF => unimplemented!(),
|
0xFFFF_FFFF => unimplemented!(),
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
let mut fde_reader = DwarfReader::from_reader(cie_reader, false);
|
let mut fde_reader = DwarfReader::from_reader(cie_reader, false);
|
||||||
|
@ -355,7 +354,7 @@ impl<'a> Iterator for CFI_Records<'a> {
|
||||||
let length = match length {
|
let length = match length {
|
||||||
// eh_frame with 0-length means the CIE is terminated
|
// eh_frame with 0-length means the CIE is terminated
|
||||||
0 => return None,
|
0 => return None,
|
||||||
0xFFFFFFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
|
0xFFFF_FFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
|
||||||
other => other,
|
other => other,
|
||||||
} as usize;
|
} as usize;
|
||||||
|
|
||||||
|
@ -401,7 +400,7 @@ impl<'a> Iterator for FDE_Records<'a> {
|
||||||
let length = match self.reader.read_u32() {
|
let length = match self.reader.read_u32() {
|
||||||
// eh_frame with 0-length means the CIE is terminated
|
// eh_frame with 0-length means the CIE is terminated
|
||||||
0 => return None,
|
0 => return None,
|
||||||
0xFFFFFFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
|
0xFFFF_FFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
|
||||||
other => other,
|
other => other,
|
||||||
} as usize;
|
} as usize;
|
||||||
|
|
||||||
|
@ -491,7 +490,7 @@ impl<'a> EH_Frame_Hdr<'a> {
|
||||||
// The original length field should be able to hold the entire value.
|
// The original length field should be able to hold the entire value.
|
||||||
// The device memory space is limited to 32-bits addresses anyway.
|
// The device memory space is limited to 32-bits addresses anyway.
|
||||||
let entry_length = reader.read_u32();
|
let entry_length = reader.read_u32();
|
||||||
if entry_length == 0 || entry_length == 0xFFFFFFFF {
|
if entry_length == 0 || entry_length == 0xFFFF_FFFF {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -502,7 +501,7 @@ impl<'a> EH_Frame_Hdr<'a> {
|
||||||
fde_count += 1;
|
fde_count += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
reader.offset(entry_length - mem::size_of::<u32>() as u32)
|
reader.offset(entry_length - mem::size_of::<u32>() as u32);
|
||||||
}
|
}
|
||||||
|
|
||||||
12 + fde_count * 8
|
12 + fde_count * 8
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
/* generated from elf.h with rust-bindgen and then manually altered */
|
/* generated from elf.h with rust-bindgen and then manually altered */
|
||||||
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, dead_code)]
|
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, dead_code, clippy::pedantic)]
|
||||||
|
|
||||||
pub const EI_NIDENT: usize = 16;
|
pub const EI_NIDENT: usize = 16;
|
||||||
pub const EI_MAG0: usize = 0;
|
pub const EI_MAG0: usize = 0;
|
||||||
|
|
|
@ -1,3 +1,26 @@
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
clippy::cast_possible_truncation,
|
||||||
|
clippy::cast_possible_wrap,
|
||||||
|
clippy::cast_sign_loss,
|
||||||
|
clippy::doc_markdown,
|
||||||
|
clippy::enum_glob_use,
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::similar_names,
|
||||||
|
clippy::struct_field_names,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
use dwarf::*;
|
use dwarf::*;
|
||||||
use elf::*;
|
use elf::*;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
@ -70,45 +93,45 @@ struct SectionRecord<'a> {
|
||||||
data: Vec<u8>,
|
data: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_unaligned<T: Copy>(data: &[u8], offset: usize) -> Result<T, ()> {
|
fn read_unaligned<T: Copy>(data: &[u8], offset: usize) -> Option<T> {
|
||||||
if data.len() < offset + mem::size_of::<T>() {
|
if data.len() < offset + mem::size_of::<T>() {
|
||||||
Err(())
|
None
|
||||||
} else {
|
} else {
|
||||||
let ptr = data.as_ptr().wrapping_add(offset) as *const T;
|
let ptr = data.as_ptr().wrapping_add(offset).cast();
|
||||||
Ok(unsafe { ptr::read_unaligned(ptr) })
|
Some(unsafe { ptr::read_unaligned(ptr) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_ref_slice<T: Copy>(data: &[u8], offset: usize, len: usize) -> Result<&[T], ()> {
|
#[must_use]
|
||||||
|
pub fn get_ref_slice<T: Copy>(data: &[u8], offset: usize, len: usize) -> Option<&[T]> {
|
||||||
if data.len() < offset + mem::size_of::<T>() * len {
|
if data.len() < offset + mem::size_of::<T>() * len {
|
||||||
Err(())
|
None
|
||||||
} else {
|
} else {
|
||||||
let ptr = data.as_ptr().wrapping_add(offset) as *const T;
|
let ptr = data.as_ptr().wrapping_add(offset).cast();
|
||||||
Ok(unsafe { slice::from_raw_parts(ptr, len) })
|
Some(unsafe { slice::from_raw_parts(ptr, len) })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_struct_vec<T>(struct_vec: Vec<T>) -> Vec<u8> {
|
fn from_struct_slice<T>(struct_vec: &[T]) -> Vec<u8> {
|
||||||
let ptr = struct_vec.as_ptr();
|
let ptr = struct_vec.as_ptr();
|
||||||
unsafe { slice::from_raw_parts(ptr as *const u8, struct_vec.len() * mem::size_of::<T>()) }
|
unsafe { slice::from_raw_parts(ptr.cast(), mem::size_of_val(struct_vec)) }.to_vec()
|
||||||
.to_vec()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_struct_slice<T>(bytes: &[u8]) -> &[T] {
|
fn to_struct_slice<T>(bytes: &[u8]) -> &[T] {
|
||||||
unsafe { slice::from_raw_parts(bytes.as_ptr() as *const T, bytes.len() / mem::size_of::<T>()) }
|
unsafe { slice::from_raw_parts(bytes.as_ptr().cast(), bytes.len() / mem::size_of::<T>()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn to_struct_mut_slice<T>(bytes: &mut [u8]) -> &mut [T] {
|
fn to_struct_mut_slice<T>(bytes: &mut [u8]) -> &mut [T] {
|
||||||
unsafe {
|
unsafe {
|
||||||
slice::from_raw_parts_mut(bytes.as_mut_ptr() as *mut T, bytes.len() / mem::size_of::<T>())
|
slice::from_raw_parts_mut(bytes.as_mut_ptr().cast(), bytes.len() / mem::size_of::<T>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn elf_hash(name: &[u8]) -> u32 {
|
fn elf_hash(name: &[u8]) -> u32 {
|
||||||
let mut h: u32 = 0;
|
let mut h: u32 = 0;
|
||||||
for c in name {
|
for c in name {
|
||||||
h = (h << 4) + *c as u32;
|
h = (h << 4) + u32::from(*c);
|
||||||
let g = h & 0xf0000000;
|
let g = h & 0xf000_0000;
|
||||||
if g != 0 {
|
if g != 0 {
|
||||||
h ^= g >> 24;
|
h ^= g >> 24;
|
||||||
h &= !g;
|
h &= !g;
|
||||||
|
@ -202,6 +225,15 @@ impl<'a> Linker<'a> {
|
||||||
relocs: &[R],
|
relocs: &[R],
|
||||||
target_section: Elf32_Word,
|
target_section: Elf32_Word,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
|
type RelocateFn = dyn Fn(&mut [u8], Elf32_Word);
|
||||||
|
|
||||||
|
struct RelocInfo<'a, R> {
|
||||||
|
pub defined_val: bool,
|
||||||
|
pub indirect_reloc: Option<&'a R>,
|
||||||
|
pub pc_relative: bool,
|
||||||
|
pub relocate: Option<Box<RelocateFn>>,
|
||||||
|
}
|
||||||
|
|
||||||
for reloc in relocs {
|
for reloc in relocs {
|
||||||
let sym = match reloc.sym_info() as usize {
|
let sym = match reloc.sym_info() as usize {
|
||||||
STN_UNDEF => None,
|
STN_UNDEF => None,
|
||||||
|
@ -212,10 +244,7 @@ impl<'a> Linker<'a> {
|
||||||
|
|
||||||
let resolve_symbol_addr =
|
let resolve_symbol_addr =
|
||||||
|sym_option: Option<&Elf32_Sym>| -> Result<Elf32_Word, Error> {
|
|sym_option: Option<&Elf32_Sym>| -> Result<Elf32_Word, Error> {
|
||||||
let sym = match sym_option {
|
let Some(sym) = sym_option else { return Ok(0) };
|
||||||
Some(sym) => sym,
|
|
||||||
None => return Ok(0),
|
|
||||||
};
|
|
||||||
|
|
||||||
match sym.st_shndx {
|
match sym.st_shndx {
|
||||||
SHN_UNDEF => Err(Error::Lookup("undefined symbol")),
|
SHN_UNDEF => Err(Error::Lookup("undefined symbol")),
|
||||||
|
@ -242,13 +271,6 @@ impl<'a> Linker<'a> {
|
||||||
.ok_or(Error::Parsing("Cannot find section with matching sh_index"))
|
.ok_or(Error::Parsing("Cannot find section with matching sh_index"))
|
||||||
};
|
};
|
||||||
|
|
||||||
struct RelocInfo<'a, R> {
|
|
||||||
pub defined_val: bool,
|
|
||||||
pub indirect_reloc: Option<&'a R>,
|
|
||||||
pub pc_relative: bool,
|
|
||||||
pub relocate: Option<Box<dyn Fn(&mut [u8], Elf32_Word)>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
let classify = |reloc: &R, sym_option: Option<&Elf32_Sym>| -> Option<RelocInfo<R>> {
|
let classify = |reloc: &R, sym_option: Option<&Elf32_Sym>| -> Option<RelocInfo<R>> {
|
||||||
let defined_val = sym_option.map_or(true, |sym| {
|
let defined_val = sym_option.map_or(true, |sym| {
|
||||||
sym.st_shndx != SHN_UNDEF || ELF32_ST_BIND(sym.st_info) == STB_LOCAL
|
sym.st_shndx != SHN_UNDEF || ELF32_ST_BIND(sym.st_info) == STB_LOCAL
|
||||||
|
@ -260,7 +282,7 @@ impl<'a> Linker<'a> {
|
||||||
indirect_reloc: None,
|
indirect_reloc: None,
|
||||||
pc_relative: true,
|
pc_relative: true,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
LittleEndian::write_u32(target_word, value)
|
LittleEndian::write_u32(target_word, value);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -271,9 +293,9 @@ impl<'a> Linker<'a> {
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
LittleEndian::write_u32(
|
LittleEndian::write_u32(
|
||||||
target_word,
|
target_word,
|
||||||
(LittleEndian::read_u32(target_word) & 0x80000000)
|
(LittleEndian::read_u32(target_word) & 0x8000_0000)
|
||||||
| value & 0x7FFFFFFF,
|
| value & 0x7FFF_FFFF,
|
||||||
)
|
);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -295,8 +317,8 @@ impl<'a> Linker<'a> {
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
let auipc_raw = LittleEndian::read_u32(target_word);
|
let auipc_raw = LittleEndian::read_u32(target_word);
|
||||||
let auipc_insn =
|
let auipc_insn =
|
||||||
(auipc_raw & 0xFFF) | ((value + 0x800) & 0xFFFFF000);
|
(auipc_raw & 0xFFF) | ((value + 0x800) & 0xFFFF_F000);
|
||||||
LittleEndian::write_u32(target_word, auipc_insn)
|
LittleEndian::write_u32(target_word, auipc_insn);
|
||||||
})),
|
})),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -306,7 +328,7 @@ impl<'a> Linker<'a> {
|
||||||
indirect_reloc: None,
|
indirect_reloc: None,
|
||||||
pc_relative: true,
|
pc_relative: true,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
LittleEndian::write_u32(target_word, value)
|
LittleEndian::write_u32(target_word, value);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -327,14 +349,14 @@ impl<'a> Linker<'a> {
|
||||||
// Here, we convert to direct addressing
|
// Here, we convert to direct addressing
|
||||||
// GOT reloc (indirect) -> lw + addi
|
// GOT reloc (indirect) -> lw + addi
|
||||||
// PCREL reloc (direct) -> addi
|
// PCREL reloc (direct) -> addi
|
||||||
let (lo_opcode, lo_funct3) = (0b0010011, 0b000);
|
let (lo_opcode, lo_funct3) = (0b001_0011, 0b000);
|
||||||
let addi_lw_raw = LittleEndian::read_u32(target_word);
|
let addi_lw_raw = LittleEndian::read_u32(target_word);
|
||||||
let addi_insn = lo_opcode
|
let addi_insn = lo_opcode
|
||||||
| (addi_lw_raw & 0xF8F80)
|
| (addi_lw_raw & 0xF8F80)
|
||||||
| (lo_funct3 << 12)
|
| (lo_funct3 << 12)
|
||||||
| ((value & 0xFFF) << 20);
|
| ((value & 0xFFF) << 20);
|
||||||
|
|
||||||
LittleEndian::write_u32(target_word, addi_insn)
|
LittleEndian::write_u32(target_word, addi_insn);
|
||||||
})),
|
})),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -351,7 +373,7 @@ impl<'a> Linker<'a> {
|
||||||
indirect_reloc: None,
|
indirect_reloc: None,
|
||||||
pc_relative: false,
|
pc_relative: false,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
LittleEndian::write_u32(target_word, value)
|
LittleEndian::write_u32(target_word, value);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -361,7 +383,7 @@ impl<'a> Linker<'a> {
|
||||||
pc_relative: false,
|
pc_relative: false,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
let old_value = LittleEndian::read_u32(target_word);
|
let old_value = LittleEndian::read_u32(target_word);
|
||||||
LittleEndian::write_u32(target_word, old_value.wrapping_add(value))
|
LittleEndian::write_u32(target_word, old_value.wrapping_add(value));
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -371,7 +393,7 @@ impl<'a> Linker<'a> {
|
||||||
pc_relative: false,
|
pc_relative: false,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
let old_value = LittleEndian::read_u32(target_word);
|
let old_value = LittleEndian::read_u32(target_word);
|
||||||
LittleEndian::write_u32(target_word, old_value.wrapping_sub(value))
|
LittleEndian::write_u32(target_word, old_value.wrapping_sub(value));
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -380,7 +402,7 @@ impl<'a> Linker<'a> {
|
||||||
indirect_reloc: None,
|
indirect_reloc: None,
|
||||||
pc_relative: false,
|
pc_relative: false,
|
||||||
relocate: Some(Box::new(|target_word, value| {
|
relocate: Some(Box::new(|target_word, value| {
|
||||||
LittleEndian::write_u16(target_word, value as u16)
|
LittleEndian::write_u16(target_word, value as u16);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -393,7 +415,7 @@ impl<'a> Linker<'a> {
|
||||||
LittleEndian::write_u16(
|
LittleEndian::write_u16(
|
||||||
target_word,
|
target_word,
|
||||||
old_value.wrapping_add(value as u16),
|
old_value.wrapping_add(value as u16),
|
||||||
)
|
);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -406,7 +428,7 @@ impl<'a> Linker<'a> {
|
||||||
LittleEndian::write_u16(
|
LittleEndian::write_u16(
|
||||||
target_word,
|
target_word,
|
||||||
old_value.wrapping_sub(value as u16),
|
old_value.wrapping_sub(value as u16),
|
||||||
)
|
);
|
||||||
})),
|
})),
|
||||||
}),
|
}),
|
||||||
|
|
||||||
|
@ -488,7 +510,7 @@ impl<'a> Linker<'a> {
|
||||||
|
|
||||||
if let Some(relocate) = reloc_info.relocate {
|
if let Some(relocate) = reloc_info.relocate {
|
||||||
let target_word = &mut target_sec_image[reloc.offset() as usize..];
|
let target_word = &mut target_sec_image[reloc.offset() as usize..];
|
||||||
relocate(target_word, value)
|
relocate(target_word, value);
|
||||||
} else {
|
} else {
|
||||||
self.rela_dyn_relas.push(Elf32_Rela {
|
self.rela_dyn_relas.push(Elf32_Rela {
|
||||||
r_offset: rela_off,
|
r_offset: rela_off,
|
||||||
|
@ -536,8 +558,7 @@ impl<'a> Linker<'a> {
|
||||||
let eh_frame_slice = eh_frame_rec.data.as_slice();
|
let eh_frame_slice = eh_frame_rec.data.as_slice();
|
||||||
// Prepare a new buffer to dodge borrow check
|
// Prepare a new buffer to dodge borrow check
|
||||||
let mut eh_frame_hdr_vec: Vec<u8> = vec![0; eh_frame_hdr_rec.shdr.sh_size as usize];
|
let mut eh_frame_hdr_vec: Vec<u8> = vec![0; eh_frame_hdr_rec.shdr.sh_size as usize];
|
||||||
let eh_frame = EH_Frame::new(eh_frame_slice, eh_frame_rec.shdr.sh_offset)
|
let eh_frame = EH_Frame::new(eh_frame_slice, eh_frame_rec.shdr.sh_offset);
|
||||||
.map_err(|()| "cannot read EH frame")?;
|
|
||||||
let mut eh_frame_hdr = EH_Frame_Hdr::new(
|
let mut eh_frame_hdr = EH_Frame_Hdr::new(
|
||||||
eh_frame_hdr_vec.as_mut_slice(),
|
eh_frame_hdr_vec.as_mut_slice(),
|
||||||
eh_frame_hdr_rec.shdr.sh_offset,
|
eh_frame_hdr_rec.shdr.sh_offset,
|
||||||
|
@ -547,7 +568,7 @@ impl<'a> Linker<'a> {
|
||||||
init_pos,
|
init_pos,
|
||||||
virt_addr,
|
virt_addr,
|
||||||
)| {
|
)| {
|
||||||
eh_frame_hdr.add_fde(init_pos, virt_addr)
|
eh_frame_hdr.add_fde(init_pos, virt_addr);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Sort FDE entries in .eh_frame_hdr
|
// Sort FDE entries in .eh_frame_hdr
|
||||||
|
@ -562,33 +583,109 @@ impl<'a> Linker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ld(data: &'a [u8]) -> Result<Vec<u8>, Error> {
|
pub fn ld(data: &'a [u8]) -> Result<Vec<u8>, Error> {
|
||||||
let ehdr = read_unaligned::<Elf32_Ehdr>(data, 0).map_err(|()| "cannot read ELF header")?;
|
fn allocate_rela_dyn<R: Relocatable>(
|
||||||
|
linker: &Linker,
|
||||||
|
relocs: &[R],
|
||||||
|
) -> Result<(usize, Vec<u32>), Error> {
|
||||||
|
let mut alloc_size = 0;
|
||||||
|
let mut rela_dyn_sym_indices = Vec::new();
|
||||||
|
for reloc in relocs {
|
||||||
|
if reloc.sym_info() as usize == STN_UNDEF {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let sym: &Elf32_Sym = linker
|
||||||
|
.symtab
|
||||||
|
.get(reloc.sym_info() as usize)
|
||||||
|
.ok_or("symbol out of bounds of symbol table")?;
|
||||||
|
|
||||||
|
match (linker.isa, reloc.type_info()) {
|
||||||
|
// Absolute address relocations
|
||||||
|
// A runtime relocation is needed to find the loading address
|
||||||
|
(Isa::CortexA9, R_ARM_ABS32) | (Isa::RiscV32, R_RISCV_32) => {
|
||||||
|
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
||||||
|
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
|
||||||
|
rela_dyn_sym_indices.push(reloc.sym_info());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Relative address relocations
|
||||||
|
// Relay the relocation to the runtime linker only if the symbol is not defined
|
||||||
|
(Isa::CortexA9, R_ARM_REL32 | R_ARM_PREL31 | R_ARM_TARGET2)
|
||||||
|
| (
|
||||||
|
Isa::RiscV32,
|
||||||
|
R_RISCV_CALL_PLT | R_RISCV_PCREL_HI20 | R_RISCV_GOT_HI20 | R_RISCV_32_PCREL
|
||||||
|
| R_RISCV_SET32 | R_RISCV_ADD32 | R_RISCV_SUB32 | R_RISCV_SET16
|
||||||
|
| R_RISCV_ADD16 | R_RISCV_SUB16 | R_RISCV_SET8 | R_RISCV_ADD8
|
||||||
|
| R_RISCV_SUB8 | R_RISCV_SET6 | R_RISCV_SUB6,
|
||||||
|
) => {
|
||||||
|
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
|
||||||
|
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
||||||
|
rela_dyn_sym_indices.push(reloc.sym_info());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RISC-V: Lower 12-bits relocations
|
||||||
|
// If the upper 20-bits relocation cannot be resolved,
|
||||||
|
// this relocation will be relayed to the runtime linker.
|
||||||
|
(Isa::RiscV32, R_RISCV_PCREL_LO12_I) => {
|
||||||
|
// Find the HI20 relocation
|
||||||
|
let indirect_reloc = relocs
|
||||||
|
.iter()
|
||||||
|
.find(|reloc| reloc.offset() == sym.st_value)
|
||||||
|
.ok_or("malformatted LO12 relocation")?;
|
||||||
|
let indirect_sym = linker.symtab[indirect_reloc.sym_info() as usize];
|
||||||
|
if ELF32_ST_BIND(indirect_sym.st_info) == STB_GLOBAL
|
||||||
|
&& indirect_sym.st_shndx == SHN_UNDEF
|
||||||
|
{
|
||||||
|
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
||||||
|
rela_dyn_sym_indices.push(reloc.sym_info());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
println!("Relocation type 0x{:X?} is not supported", reloc.type_info());
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok((alloc_size, rela_dyn_sym_indices))
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(ehdr) = read_unaligned::<Elf32_Ehdr>(data, 0) else {
|
||||||
|
Err("cannot read ELF header")?
|
||||||
|
};
|
||||||
let isa = match ehdr.e_machine {
|
let isa = match ehdr.e_machine {
|
||||||
EM_ARM => Isa::CortexA9,
|
EM_ARM => Isa::CortexA9,
|
||||||
EM_RISCV => Isa::RiscV32,
|
EM_RISCV => Isa::RiscV32,
|
||||||
_ => return Err(Error::Parsing("unsupported architecture")),
|
_ => return Err(Error::Parsing("unsupported architecture")),
|
||||||
};
|
};
|
||||||
|
|
||||||
let shdrs = get_ref_slice::<Elf32_Shdr>(data, ehdr.e_shoff as usize, ehdr.e_shnum as usize)
|
let Some(shdrs) =
|
||||||
.map_err(|()| "cannot read section header table")?;
|
get_ref_slice::<Elf32_Shdr>(data, ehdr.e_shoff as usize, ehdr.e_shnum as usize)
|
||||||
|
else {
|
||||||
|
Err("cannot read section header table")?
|
||||||
|
};
|
||||||
|
|
||||||
// Read .strtab
|
// Read .strtab
|
||||||
let strtab_shdr = shdrs[ehdr.e_shstrndx as usize];
|
let strtab_shdr = shdrs[ehdr.e_shstrndx as usize];
|
||||||
let strtab =
|
let Some(strtab) =
|
||||||
get_ref_slice::<u8>(data, strtab_shdr.sh_offset as usize, strtab_shdr.sh_size as usize)
|
get_ref_slice::<u8>(data, strtab_shdr.sh_offset as usize, strtab_shdr.sh_size as usize)
|
||||||
.map_err(|()| "cannot read the string table from data")?;
|
else {
|
||||||
|
Err("cannot read the string table from data")?
|
||||||
|
};
|
||||||
|
|
||||||
// Read .symtab
|
// Read .symtab
|
||||||
let symtab_shdr = shdrs
|
let symtab_shdr = shdrs
|
||||||
.iter()
|
.iter()
|
||||||
.find(|shdr| shdr.sh_type as usize == SHT_SYMTAB)
|
.find(|shdr| shdr.sh_type as usize == SHT_SYMTAB)
|
||||||
.ok_or(Error::Parsing("cannot find the symbol table"))?;
|
.ok_or(Error::Parsing("cannot find the symbol table"))?;
|
||||||
let symtab = get_ref_slice::<Elf32_Sym>(
|
let Some(symtab) = get_ref_slice::<Elf32_Sym>(
|
||||||
data,
|
data,
|
||||||
symtab_shdr.sh_offset as usize,
|
symtab_shdr.sh_offset as usize,
|
||||||
symtab_shdr.sh_size as usize / mem::size_of::<Elf32_Sym>(),
|
symtab_shdr.sh_size as usize / mem::size_of::<Elf32_Sym>(),
|
||||||
)
|
) else {
|
||||||
.map_err(|()| "cannot read the symbol table from data")?;
|
Err("cannot read the symbol table from data")?
|
||||||
|
};
|
||||||
|
|
||||||
// Section table for the .elf paired with the section name
|
// Section table for the .elf paired with the section name
|
||||||
// To be formalized incrementally
|
// To be formalized incrementally
|
||||||
|
@ -744,21 +841,27 @@ impl<'a> Linker<'a> {
|
||||||
($shdr: expr, $stmt: expr) => {
|
($shdr: expr, $stmt: expr) => {
|
||||||
match $shdr.sh_type as usize {
|
match $shdr.sh_type as usize {
|
||||||
SHT_RELA => {
|
SHT_RELA => {
|
||||||
let relocs = get_ref_slice::<Elf32_Rela>(
|
let Some(relocs) = get_ref_slice::<Elf32_Rela>(
|
||||||
data,
|
data,
|
||||||
$shdr.sh_offset as usize,
|
$shdr.sh_offset as usize,
|
||||||
$shdr.sh_size as usize / mem::size_of::<Elf32_Rela>(),
|
$shdr.sh_size as usize / mem::size_of::<Elf32_Rela>(),
|
||||||
)
|
) else {
|
||||||
.map_err(|()| "cannot parse relocations")?;
|
Err("cannot parse relocations")?
|
||||||
|
};
|
||||||
|
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
$stmt(relocs)
|
$stmt(relocs)
|
||||||
}
|
}
|
||||||
SHT_REL => {
|
SHT_REL => {
|
||||||
let relocs = get_ref_slice::<Elf32_Rel>(
|
let Some(relocs) = get_ref_slice::<Elf32_Rel>(
|
||||||
data,
|
data,
|
||||||
$shdr.sh_offset as usize,
|
$shdr.sh_offset as usize,
|
||||||
$shdr.sh_size as usize / mem::size_of::<Elf32_Rel>(),
|
$shdr.sh_size as usize / mem::size_of::<Elf32_Rel>(),
|
||||||
)
|
) else {
|
||||||
.map_err(|()| "cannot parse relocations")?;
|
Err("cannot parse relocations")?
|
||||||
|
};
|
||||||
|
|
||||||
|
#[allow(clippy::redundant_closure_call)]
|
||||||
$stmt(relocs)
|
$stmt(relocs)
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
|
@ -766,84 +869,6 @@ impl<'a> Linker<'a> {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn allocate_rela_dyn<R: Relocatable>(
|
|
||||||
linker: &Linker,
|
|
||||||
relocs: &[R],
|
|
||||||
) -> Result<(usize, Vec<u32>), Error> {
|
|
||||||
let mut alloc_size = 0;
|
|
||||||
let mut rela_dyn_sym_indices = Vec::new();
|
|
||||||
for reloc in relocs {
|
|
||||||
if reloc.sym_info() as usize == STN_UNDEF {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let sym: &Elf32_Sym = linker
|
|
||||||
.symtab
|
|
||||||
.get(reloc.sym_info() as usize)
|
|
||||||
.ok_or("symbol out of bounds of symbol table")?;
|
|
||||||
|
|
||||||
match (linker.isa, reloc.type_info()) {
|
|
||||||
// Absolute address relocations
|
|
||||||
// A runtime relocation is needed to find the loading address
|
|
||||||
(Isa::CortexA9, R_ARM_ABS32) | (Isa::RiscV32, R_RISCV_32) => {
|
|
||||||
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
|
||||||
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
|
|
||||||
rela_dyn_sym_indices.push(reloc.sym_info());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Relative address relocations
|
|
||||||
// Relay the relocation to the runtime linker only if the symbol is not defined
|
|
||||||
(Isa::CortexA9, R_ARM_REL32)
|
|
||||||
| (Isa::CortexA9, R_ARM_PREL31)
|
|
||||||
| (Isa::CortexA9, R_ARM_TARGET2)
|
|
||||||
| (Isa::RiscV32, R_RISCV_CALL_PLT)
|
|
||||||
| (Isa::RiscV32, R_RISCV_PCREL_HI20)
|
|
||||||
| (Isa::RiscV32, R_RISCV_GOT_HI20)
|
|
||||||
| (Isa::RiscV32, R_RISCV_32_PCREL)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SET32)
|
|
||||||
| (Isa::RiscV32, R_RISCV_ADD32)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SUB32)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SET16)
|
|
||||||
| (Isa::RiscV32, R_RISCV_ADD16)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SUB16)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SET8)
|
|
||||||
| (Isa::RiscV32, R_RISCV_ADD8)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SUB8)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SET6)
|
|
||||||
| (Isa::RiscV32, R_RISCV_SUB6) => {
|
|
||||||
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
|
|
||||||
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
|
||||||
rela_dyn_sym_indices.push(reloc.sym_info());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// RISC-V: Lower 12-bits relocations
|
|
||||||
// If the upper 20-bits relocation cannot be resolved,
|
|
||||||
// this relocation will be relayed to the runtime linker.
|
|
||||||
(Isa::RiscV32, R_RISCV_PCREL_LO12_I) => {
|
|
||||||
// Find the HI20 relocation
|
|
||||||
let indirect_reloc = relocs
|
|
||||||
.iter()
|
|
||||||
.find(|reloc| reloc.offset() == sym.st_value)
|
|
||||||
.ok_or("malformatted LO12 relocation")?;
|
|
||||||
let indirect_sym = linker.symtab[indirect_reloc.sym_info() as usize];
|
|
||||||
if ELF32_ST_BIND(indirect_sym.st_info) == STB_GLOBAL
|
|
||||||
&& indirect_sym.st_shndx == SHN_UNDEF
|
|
||||||
{
|
|
||||||
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
|
|
||||||
rela_dyn_sym_indices.push(reloc.sym_info());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {
|
|
||||||
println!("Relocation type 0x{:X?} is not supported", reloc.type_info());
|
|
||||||
unimplemented!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok((alloc_size, rela_dyn_sym_indices))
|
|
||||||
}
|
|
||||||
|
|
||||||
for shdr in shdrs
|
for shdr in shdrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|shdr| shdr.sh_type as usize == SHT_REL || shdr.sh_type as usize == SHT_RELA)
|
.filter(|shdr| shdr.sh_type as usize == SHT_REL || shdr.sh_type as usize == SHT_RELA)
|
||||||
|
@ -871,7 +896,7 @@ impl<'a> Linker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Avoid symbol duplication
|
// Avoid symbol duplication
|
||||||
rela_dyn_sym_indices.sort();
|
rela_dyn_sym_indices.sort_unstable();
|
||||||
rela_dyn_sym_indices.dedup();
|
rela_dyn_sym_indices.dedup();
|
||||||
|
|
||||||
if rela_dyn_size != 0 {
|
if rela_dyn_size != 0 {
|
||||||
|
@ -1056,7 +1081,7 @@ impl<'a> Linker<'a> {
|
||||||
sh_entsize: mem::size_of::<Elf32_Sym>() as Elf32_Word,
|
sh_entsize: mem::size_of::<Elf32_Sym>() as Elf32_Word,
|
||||||
},
|
},
|
||||||
".dynsym",
|
".dynsym",
|
||||||
from_struct_vec(dynsym),
|
from_struct_slice(&dynsym),
|
||||||
);
|
);
|
||||||
let hash_elf_index = linker.load_section(
|
let hash_elf_index = linker.load_section(
|
||||||
&Elf32_Shdr {
|
&Elf32_Shdr {
|
||||||
|
@ -1072,7 +1097,7 @@ impl<'a> Linker<'a> {
|
||||||
sh_entsize: 4,
|
sh_entsize: 4,
|
||||||
},
|
},
|
||||||
".hash",
|
".hash",
|
||||||
from_struct_vec(hash),
|
from_struct_slice(&hash),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Link .rela.dyn header to the .dynsym header
|
// Link .rela.dyn header to the .dynsym header
|
||||||
|
@ -1171,7 +1196,7 @@ impl<'a> Linker<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let dynamic_elf_index =
|
let dynamic_elf_index =
|
||||||
linker.load_section(&dynamic_shdr, ".dynamic", from_struct_vec(dyn_entries));
|
linker.load_section(&dynamic_shdr, ".dynamic", from_struct_slice(&dyn_entries));
|
||||||
|
|
||||||
let last_w_sec_elf_index = linker.elf_shdrs.len() - 1;
|
let last_w_sec_elf_index = linker.elf_shdrs.len() - 1;
|
||||||
|
|
||||||
|
@ -1322,7 +1347,7 @@ impl<'a> Linker<'a> {
|
||||||
// Prepare a STRTAB to hold the names of section headers
|
// Prepare a STRTAB to hold the names of section headers
|
||||||
// Fix the sh_name field of the section headers
|
// Fix the sh_name field of the section headers
|
||||||
let mut shstrtab = Vec::new();
|
let mut shstrtab = Vec::new();
|
||||||
for shdr_rec in linker.elf_shdrs.iter_mut() {
|
for shdr_rec in &mut linker.elf_shdrs {
|
||||||
let shstrtab_index = shstrtab.len();
|
let shstrtab_index = shstrtab.len();
|
||||||
shstrtab.extend(shdr_rec.name.as_bytes());
|
shstrtab.extend(shdr_rec.name.as_bytes());
|
||||||
shstrtab.push(0);
|
shstrtab.push(0);
|
||||||
|
@ -1363,20 +1388,17 @@ impl<'a> Linker<'a> {
|
||||||
let alignment = (4 - (linker.image.len() % 4)) % 4;
|
let alignment = (4 - (linker.image.len() % 4)) % 4;
|
||||||
let sec_headers_offset = linker.image.len() + alignment;
|
let sec_headers_offset = linker.image.len() + alignment;
|
||||||
linker.image.extend(vec![0; alignment]);
|
linker.image.extend(vec![0; alignment]);
|
||||||
for rec in linker.elf_shdrs.iter() {
|
for rec in &linker.elf_shdrs {
|
||||||
let shdr = rec.shdr;
|
let shdr = rec.shdr;
|
||||||
linker.image.extend(unsafe {
|
linker.image.extend(unsafe {
|
||||||
slice::from_raw_parts(
|
slice::from_raw_parts(ptr::addr_of!(shdr).cast(), mem::size_of::<Elf32_Shdr>())
|
||||||
&shdr as *const Elf32_Shdr as *const u8,
|
|
||||||
mem::size_of::<Elf32_Shdr>(),
|
|
||||||
)
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the PHDRs
|
// Update the PHDRs
|
||||||
let phdr_offset = mem::size_of::<Elf32_Ehdr>();
|
let phdr_offset = mem::size_of::<Elf32_Ehdr>();
|
||||||
unsafe {
|
unsafe {
|
||||||
let phdr_ptr = linker.image.as_mut_ptr().add(phdr_offset) as *mut Elf32_Phdr;
|
let phdr_ptr = linker.image.as_mut_ptr().add(phdr_offset).cast();
|
||||||
let phdr_slice = slice::from_raw_parts_mut(phdr_ptr, 5);
|
let phdr_slice = slice::from_raw_parts_mut(phdr_ptr, 5);
|
||||||
// List of program headers:
|
// List of program headers:
|
||||||
// 1. ELF headers & program headers
|
// 1. ELF headers & program headers
|
||||||
|
@ -1453,7 +1475,7 @@ impl<'a> Linker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the EHDR
|
// Update the EHDR
|
||||||
let ehdr_ptr = linker.image.as_mut_ptr() as *mut Elf32_Ehdr;
|
let ehdr_ptr = linker.image.as_mut_ptr().cast();
|
||||||
unsafe {
|
unsafe {
|
||||||
*ehdr_ptr = Elf32_Ehdr {
|
*ehdr_ptr = Elf32_Ehdr {
|
||||||
e_ident: ehdr.e_ident,
|
e_ident: ehdr.e_ident,
|
||||||
|
|
|
@ -17,9 +17,7 @@ pub fn make_config_comment(
|
||||||
location: com_loc,
|
location: com_loc,
|
||||||
error: LexicalErrorType::OtherError(
|
error: LexicalErrorType::OtherError(
|
||||||
format!(
|
format!(
|
||||||
"config comment at top must have the same indentation with what it applies (comment at {}, statement at {})",
|
"config comment at top must have the same indentation with what it applies (comment at {com_loc}, statement at {stmt_loc})",
|
||||||
com_loc,
|
|
||||||
stmt_loc,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl fmt::Display for LexicalErrorType {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
LexicalErrorType::StringError => write!(f, "Got unexpected string"),
|
LexicalErrorType::StringError => write!(f, "Got unexpected string"),
|
||||||
LexicalErrorType::FStringError(error) => write!(f, "Got error in f-string: {}", error),
|
LexicalErrorType::FStringError(error) => write!(f, "Got error in f-string: {error}"),
|
||||||
LexicalErrorType::UnicodeError => write!(f, "Got unexpected unicode"),
|
LexicalErrorType::UnicodeError => write!(f, "Got unexpected unicode"),
|
||||||
LexicalErrorType::NestingError => write!(f, "Got unexpected nesting"),
|
LexicalErrorType::NestingError => write!(f, "Got unexpected nesting"),
|
||||||
LexicalErrorType::IndentationError => {
|
LexicalErrorType::IndentationError => {
|
||||||
|
@ -59,13 +59,13 @@ impl fmt::Display for LexicalErrorType {
|
||||||
write!(f, "positional argument follows keyword argument")
|
write!(f, "positional argument follows keyword argument")
|
||||||
}
|
}
|
||||||
LexicalErrorType::UnrecognizedToken { tok } => {
|
LexicalErrorType::UnrecognizedToken { tok } => {
|
||||||
write!(f, "Got unexpected token {}", tok)
|
write!(f, "Got unexpected token {tok}")
|
||||||
}
|
}
|
||||||
LexicalErrorType::LineContinuationError => {
|
LexicalErrorType::LineContinuationError => {
|
||||||
write!(f, "unexpected character after line continuation character")
|
write!(f, "unexpected character after line continuation character")
|
||||||
}
|
}
|
||||||
LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"),
|
LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"),
|
||||||
LexicalErrorType::OtherError(msg) => write!(f, "{}", msg),
|
LexicalErrorType::OtherError(msg) => write!(f, "{msg}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -96,7 +96,7 @@ impl fmt::Display for FStringErrorType {
|
||||||
FStringErrorType::UnopenedRbrace => write!(f, "Unopened '}}'"),
|
FStringErrorType::UnopenedRbrace => write!(f, "Unopened '}}'"),
|
||||||
FStringErrorType::ExpectedRbrace => write!(f, "Expected '}}' after conversion flag."),
|
FStringErrorType::ExpectedRbrace => write!(f, "Expected '}}' after conversion flag."),
|
||||||
FStringErrorType::InvalidExpression(error) => {
|
FStringErrorType::InvalidExpression(error) => {
|
||||||
write!(f, "Invalid expression: {}", error)
|
write!(f, "Invalid expression: {error}")
|
||||||
}
|
}
|
||||||
FStringErrorType::InvalidConversionFlag => write!(f, "Invalid conversion flag"),
|
FStringErrorType::InvalidConversionFlag => write!(f, "Invalid conversion flag"),
|
||||||
FStringErrorType::EmptyExpression => write!(f, "Empty expression"),
|
FStringErrorType::EmptyExpression => write!(f, "Empty expression"),
|
||||||
|
@ -144,10 +144,6 @@ pub enum ParseErrorType {
|
||||||
impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
|
impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
|
||||||
fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self {
|
fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self {
|
||||||
match err {
|
match err {
|
||||||
// TODO: Are there cases where this isn't an EOF?
|
|
||||||
LalrpopError::InvalidToken { location } => {
|
|
||||||
ParseError { error: ParseErrorType::Eof, location }
|
|
||||||
}
|
|
||||||
LalrpopError::ExtraToken { token } => {
|
LalrpopError::ExtraToken { token } => {
|
||||||
ParseError { error: ParseErrorType::ExtraToken(token.1), location: token.0 }
|
ParseError { error: ParseErrorType::ExtraToken(token.1), location: token.0 }
|
||||||
}
|
}
|
||||||
|
@ -163,7 +159,10 @@ impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
|
||||||
location: token.0,
|
location: token.0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
LalrpopError::UnrecognizedEof { location, .. } => {
|
|
||||||
|
LalrpopError::UnrecognizedEof { location, .. }
|
||||||
|
// TODO: Are there cases where this isn't an EOF?
|
||||||
|
| LalrpopError::InvalidToken { location } => {
|
||||||
ParseError { error: ParseErrorType::Eof, location }
|
ParseError { error: ParseErrorType::Eof, location }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -180,7 +179,7 @@ impl fmt::Display for ParseErrorType {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
ParseErrorType::Eof => write!(f, "Got unexpected EOF"),
|
ParseErrorType::Eof => write!(f, "Got unexpected EOF"),
|
||||||
ParseErrorType::ExtraToken(ref tok) => write!(f, "Got extraneous token: {:?}", tok),
|
ParseErrorType::ExtraToken(ref tok) => write!(f, "Got extraneous token: {tok:?}"),
|
||||||
ParseErrorType::InvalidToken => write!(f, "Got invalid token"),
|
ParseErrorType::InvalidToken => write!(f, "Got invalid token"),
|
||||||
ParseErrorType::UnrecognizedToken(ref tok, ref expected) => {
|
ParseErrorType::UnrecognizedToken(ref tok, ref expected) => {
|
||||||
if *tok == Tok::Indent {
|
if *tok == Tok::Indent {
|
||||||
|
@ -188,10 +187,10 @@ impl fmt::Display for ParseErrorType {
|
||||||
} else if expected.as_deref() == Some("Indent") {
|
} else if expected.as_deref() == Some("Indent") {
|
||||||
write!(f, "expected an indented block")
|
write!(f, "expected an indented block")
|
||||||
} else {
|
} else {
|
||||||
write!(f, "Got unexpected token {}", tok)
|
write!(f, "Got unexpected token {tok}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ParseErrorType::Lexical(ref error) => write!(f, "{}", error),
|
ParseErrorType::Lexical(ref error) => write!(f, "{error}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -199,6 +198,7 @@ impl fmt::Display for ParseErrorType {
|
||||||
impl Error for ParseErrorType {}
|
impl Error for ParseErrorType {}
|
||||||
|
|
||||||
impl ParseErrorType {
|
impl ParseErrorType {
|
||||||
|
#[must_use]
|
||||||
pub fn is_indentation_error(&self) -> bool {
|
pub fn is_indentation_error(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
ParseErrorType::Lexical(LexicalErrorType::IndentationError) => true,
|
ParseErrorType::Lexical(LexicalErrorType::IndentationError) => true,
|
||||||
|
@ -208,11 +208,11 @@ impl ParseErrorType {
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[must_use]
|
||||||
pub fn is_tab_error(&self) -> bool {
|
pub fn is_tab_error(&self) -> bool {
|
||||||
matches!(
|
matches!(
|
||||||
self,
|
self,
|
||||||
ParseErrorType::Lexical(LexicalErrorType::TabError)
|
ParseErrorType::Lexical(LexicalErrorType::TabError | LexicalErrorType::TabsAfterSpaces)
|
||||||
| ParseErrorType::Lexical(LexicalErrorType::TabsAfterSpaces)
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -130,10 +130,10 @@ impl<'a> FStringParser<'a> {
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
Box::new(self.expr(ExprKind::Constant {
|
Box::new(self.expr(ExprKind::Constant {
|
||||||
value: spec_expression.to_owned().into(),
|
value: spec_expression.clone().into(),
|
||||||
kind: None,
|
kind: None,
|
||||||
}))
|
}))
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
'(' | '{' | '[' => {
|
'(' | '{' | '[' => {
|
||||||
expression.push(ch);
|
expression.push(ch);
|
||||||
|
@ -248,7 +248,7 @@ impl<'a> FStringParser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !content.is_empty() {
|
if !content.is_empty() {
|
||||||
values.push(self.expr(ExprKind::Constant { value: content.into(), kind: None }))
|
values.push(self.expr(ExprKind::Constant { value: content.into(), kind: None }));
|
||||||
}
|
}
|
||||||
|
|
||||||
let s = match values.len() {
|
let s = match values.len() {
|
||||||
|
@ -261,7 +261,7 @@ impl<'a> FStringParser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_fstring_expr(source: &str) -> Result<Expr, ParseError> {
|
fn parse_fstring_expr(source: &str) -> Result<Expr, ParseError> {
|
||||||
let fstring_body = format!("({})", source);
|
let fstring_body = format!("({source})");
|
||||||
parse_expression(&fstring_body)
|
parse_expression(&fstring_body)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -54,8 +54,7 @@ pub fn parse_args(func_args: Vec<FunctionArgument>) -> Result<ArgumentList, Lexi
|
||||||
|
|
||||||
let mut keyword_names = HashSet::with_capacity_and_hasher(func_args.len(), RandomState::new());
|
let mut keyword_names = HashSet::with_capacity_and_hasher(func_args.len(), RandomState::new());
|
||||||
for (name, value) in func_args {
|
for (name, value) in func_args {
|
||||||
match name {
|
if let Some((location, name)) = name {
|
||||||
Some((location, name)) => {
|
|
||||||
if let Some(keyword_name) = &name {
|
if let Some(keyword_name) = &name {
|
||||||
if keyword_names.contains(keyword_name) {
|
if keyword_names.contains(keyword_name) {
|
||||||
return Err(LexicalError {
|
return Err(LexicalError {
|
||||||
|
@ -69,10 +68,9 @@ pub fn parse_args(func_args: Vec<FunctionArgument>) -> Result<ArgumentList, Lexi
|
||||||
|
|
||||||
keywords.push(ast::Keyword::new(
|
keywords.push(ast::Keyword::new(
|
||||||
location,
|
location,
|
||||||
ast::KeywordData { arg: name.map(|name| name.into()), value: Box::new(value) },
|
ast::KeywordData { arg: name.map(String::into), value: Box::new(value) },
|
||||||
));
|
));
|
||||||
}
|
} else {
|
||||||
None => {
|
|
||||||
// Allow starred args after keyword arguments.
|
// Allow starred args after keyword arguments.
|
||||||
if !keywords.is_empty() && !is_starred(&value) {
|
if !keywords.is_empty() && !is_starred(&value) {
|
||||||
return Err(LexicalError {
|
return Err(LexicalError {
|
||||||
|
@ -84,7 +82,6 @@ pub fn parse_args(func_args: Vec<FunctionArgument>) -> Result<ArgumentList, Lexi
|
||||||
args.push(value);
|
args.push(value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Ok(ArgumentList { args, keywords })
|
Ok(ArgumentList { args, keywords })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -159,7 +159,7 @@ where
|
||||||
self.shift();
|
self.shift();
|
||||||
} else {
|
} else {
|
||||||
// Transform MAC EOL into \n
|
// Transform MAC EOL into \n
|
||||||
self.chr0 = Some('\n')
|
self.chr0 = Some('\n');
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
|
@ -179,7 +179,7 @@ where
|
||||||
chars: input,
|
chars: input,
|
||||||
at_begin_of_line: true,
|
at_begin_of_line: true,
|
||||||
nesting: 0,
|
nesting: 0,
|
||||||
indentation_stack: vec![Default::default()],
|
indentation_stack: vec![IndentationLevel::default()],
|
||||||
pending: Vec::new(),
|
pending: Vec::new(),
|
||||||
chr0: None,
|
chr0: None,
|
||||||
location: start,
|
location: start,
|
||||||
|
@ -207,11 +207,9 @@ where
|
||||||
let mut saw_f = false;
|
let mut saw_f = false;
|
||||||
loop {
|
loop {
|
||||||
// Detect r"", f"", b"" and u""
|
// Detect r"", f"", b"" and u""
|
||||||
if !(saw_b || saw_u || saw_f) && matches!(self.chr0, Some('b') | Some('B')) {
|
if !(saw_b || saw_u || saw_f) && matches!(self.chr0, Some('b' | 'B')) {
|
||||||
saw_b = true;
|
saw_b = true;
|
||||||
} else if !(saw_b || saw_r || saw_u || saw_f)
|
} else if !(saw_b || saw_r || saw_u || saw_f) && matches!(self.chr0, Some('u' | 'U')) {
|
||||||
&& matches!(self.chr0, Some('u') | Some('U'))
|
|
||||||
{
|
|
||||||
saw_u = true;
|
saw_u = true;
|
||||||
} else if !(saw_r || saw_u) && (self.chr0 == Some('r') || self.chr0 == Some('R')) {
|
} else if !(saw_r || saw_u) && (self.chr0 == Some('r') || self.chr0 == Some('R')) {
|
||||||
saw_r = true;
|
saw_r = true;
|
||||||
|
@ -281,7 +279,7 @@ where
|
||||||
IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX,
|
IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(LexicalError {
|
return Err(LexicalError {
|
||||||
error: LexicalErrorType::OtherError(format!("{:?}", e)),
|
error: LexicalErrorType::OtherError(format!("{e:?}")),
|
||||||
location: start_pos,
|
location: start_pos,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -362,7 +360,7 @@ where
|
||||||
|
|
||||||
/// Consume a sequence of numbers with the given radix,
|
/// Consume a sequence of numbers with the given radix,
|
||||||
/// the digits can be decorated with underscores
|
/// the digits can be decorated with underscores
|
||||||
/// like this: '1_2_3_4' == '1234'
|
/// like this: `'1_2_3_4'` == `'1234'`
|
||||||
fn radix_run(&mut self, radix: u32) -> String {
|
fn radix_run(&mut self, radix: u32) -> String {
|
||||||
let mut value_text = String::new();
|
let mut value_text = String::new();
|
||||||
|
|
||||||
|
@ -395,7 +393,7 @@ where
|
||||||
2 => matches!(c, Some('0'..='1')),
|
2 => matches!(c, Some('0'..='1')),
|
||||||
8 => matches!(c, Some('0'..='7')),
|
8 => matches!(c, Some('0'..='7')),
|
||||||
10 => matches!(c, Some('0'..='9')),
|
10 => matches!(c, Some('0'..='9')),
|
||||||
16 => matches!(c, Some('0'..='9') | Some('a'..='f') | Some('A'..='F')),
|
16 => matches!(c, Some('0'..='9' | 'a'..='f' | 'A'..='F')),
|
||||||
other => unimplemented!("Radix not implemented: {}", other),
|
other => unimplemented!("Radix not implemented: {}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -403,8 +401,8 @@ where
|
||||||
/// Test if we face '[eE][-+]?[0-9]+'
|
/// Test if we face '[eE][-+]?[0-9]+'
|
||||||
fn at_exponent(&self) -> bool {
|
fn at_exponent(&self) -> bool {
|
||||||
match self.chr0 {
|
match self.chr0 {
|
||||||
Some('e') | Some('E') => match self.chr1 {
|
Some('e' | 'E') => match self.chr1 {
|
||||||
Some('+') | Some('-') => matches!(self.chr2, Some('0'..='9')),
|
Some('+' | '-') => matches!(self.chr2, Some('0'..='9')),
|
||||||
Some('0'..='9') => true,
|
Some('0'..='9') => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -423,11 +421,10 @@ where
|
||||||
start_loc.go_left();
|
start_loc.go_left();
|
||||||
loop {
|
loop {
|
||||||
match self.chr0 {
|
match self.chr0 {
|
||||||
Some('\n') => return None,
|
Some('\n') | None => return None,
|
||||||
None => return None,
|
|
||||||
Some(c) => {
|
Some(c) => {
|
||||||
if let (true, Some(p)) = (is_comment, prefix.next()) {
|
if let (true, Some(p)) = (is_comment, prefix.next()) {
|
||||||
is_comment = is_comment && c == p
|
is_comment = is_comment && c == p;
|
||||||
} else {
|
} else {
|
||||||
// done checking prefix, if is comment then return the spanned
|
// done checking prefix, if is comment then return the spanned
|
||||||
if is_comment {
|
if is_comment {
|
||||||
|
@ -476,7 +473,7 @@ where
|
||||||
octet_content.push(first);
|
octet_content.push(first);
|
||||||
while octet_content.len() < 3 {
|
while octet_content.len() < 3 {
|
||||||
if let Some('0'..='7') = self.chr0 {
|
if let Some('0'..='7') = self.chr0 {
|
||||||
octet_content.push(self.next_char().unwrap())
|
octet_content.push(self.next_char().unwrap());
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -544,7 +541,7 @@ where
|
||||||
} else if is_raw {
|
} else if is_raw {
|
||||||
string_content.push('\\');
|
string_content.push('\\');
|
||||||
if let Some(c) = self.next_char() {
|
if let Some(c) = self.next_char() {
|
||||||
string_content.push(c)
|
string_content.push(c);
|
||||||
} else {
|
} else {
|
||||||
return Err(LexicalError {
|
return Err(LexicalError {
|
||||||
error: LexicalErrorType::StringError,
|
error: LexicalErrorType::StringError,
|
||||||
|
@ -577,7 +574,7 @@ where
|
||||||
Some('u') if !is_bytes => string_content.push(self.unicode_literal(4)?),
|
Some('u') if !is_bytes => string_content.push(self.unicode_literal(4)?),
|
||||||
Some('U') if !is_bytes => string_content.push(self.unicode_literal(8)?),
|
Some('U') if !is_bytes => string_content.push(self.unicode_literal(8)?),
|
||||||
Some('N') if !is_bytes => {
|
Some('N') if !is_bytes => {
|
||||||
string_content.push(self.parse_unicode_name()?)
|
string_content.push(self.parse_unicode_name()?);
|
||||||
}
|
}
|
||||||
Some(c) => {
|
Some(c) => {
|
||||||
string_content.push('\\');
|
string_content.push('\\');
|
||||||
|
@ -636,7 +633,7 @@ where
|
||||||
Ok((start_pos, tok, end_pos))
|
Ok((start_pos, tok, end_pos))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_identifier_start(&self, c: char) -> bool {
|
fn is_identifier_start(c: char) -> bool {
|
||||||
match c {
|
match c {
|
||||||
'_' | 'a'..='z' | 'A'..='Z' => true,
|
'_' | 'a'..='z' | 'A'..='Z' => true,
|
||||||
'+' | '-' | '*' | '/' | '=' | ' ' | '<' | '>' => false,
|
'+' | '-' | '*' | '/' | '=' | ' ' | '<' | '>' => false,
|
||||||
|
@ -808,7 +805,7 @@ where
|
||||||
// Check if we have some character:
|
// Check if we have some character:
|
||||||
if let Some(c) = self.chr0 {
|
if let Some(c) = self.chr0 {
|
||||||
// First check identifier:
|
// First check identifier:
|
||||||
if self.is_identifier_start(c) {
|
if Self::is_identifier_start(c) {
|
||||||
let identifier = self.lex_identifier()?;
|
let identifier = self.lex_identifier()?;
|
||||||
self.emit(identifier);
|
self.emit(identifier);
|
||||||
} else if is_emoji_presentation(c) {
|
} else if is_emoji_presentation(c) {
|
||||||
|
@ -868,18 +865,15 @@ where
|
||||||
'=' => {
|
'=' => {
|
||||||
let tok_start = self.get_pos();
|
let tok_start = self.get_pos();
|
||||||
self.next_char();
|
self.next_char();
|
||||||
match self.chr0 {
|
if let Some('=') = self.chr0 {
|
||||||
Some('=') => {
|
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::EqEqual, tok_end));
|
self.emit((tok_start, Tok::EqEqual, tok_end));
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::Equal, tok_end));
|
self.emit((tok_start, Tok::Equal, tok_end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
'+' => {
|
'+' => {
|
||||||
let tok_start = self.get_pos();
|
let tok_start = self.get_pos();
|
||||||
self.next_char();
|
self.next_char();
|
||||||
|
@ -903,18 +897,15 @@ where
|
||||||
}
|
}
|
||||||
Some('*') => {
|
Some('*') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
match self.chr0 {
|
if let Some('=') = self.chr0 {
|
||||||
Some('=') => {
|
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::DoubleStarEqual, tok_end));
|
self.emit((tok_start, Tok::DoubleStarEqual, tok_end));
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::DoubleStar, tok_end));
|
self.emit((tok_start, Tok::DoubleStar, tok_end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::Star, tok_end));
|
self.emit((tok_start, Tok::Star, tok_end));
|
||||||
|
@ -932,18 +923,15 @@ where
|
||||||
}
|
}
|
||||||
Some('/') => {
|
Some('/') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
match self.chr0 {
|
if let Some('=') = self.chr0 {
|
||||||
Some('=') => {
|
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::DoubleSlashEqual, tok_end));
|
self.emit((tok_start, Tok::DoubleSlashEqual, tok_end));
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::DoubleSlash, tok_end));
|
self.emit((tok_start, Tok::DoubleSlash, tok_end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::Slash, tok_end));
|
self.emit((tok_start, Tok::Slash, tok_end));
|
||||||
|
@ -1110,18 +1098,15 @@ where
|
||||||
match self.chr0 {
|
match self.chr0 {
|
||||||
Some('<') => {
|
Some('<') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
match self.chr0 {
|
if let Some('=') = self.chr0 {
|
||||||
Some('=') => {
|
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::LeftShiftEqual, tok_end));
|
self.emit((tok_start, Tok::LeftShiftEqual, tok_end));
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::LeftShift, tok_end));
|
self.emit((tok_start, Tok::LeftShift, tok_end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Some('=') => {
|
Some('=') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
|
@ -1139,18 +1124,15 @@ where
|
||||||
match self.chr0 {
|
match self.chr0 {
|
||||||
Some('>') => {
|
Some('>') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
match self.chr0 {
|
if let Some('=') = self.chr0 {
|
||||||
Some('=') => {
|
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::RightShiftEqual, tok_end));
|
self.emit((tok_start, Tok::RightShiftEqual, tok_end));
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
self.emit((tok_start, Tok::RightShift, tok_end));
|
self.emit((tok_start, Tok::RightShift, tok_end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Some('=') => {
|
Some('=') => {
|
||||||
self.next_char();
|
self.next_char();
|
||||||
let tok_end = self.get_pos();
|
let tok_end = self.get_pos();
|
||||||
|
|
|
@ -15,6 +15,24 @@
|
||||||
//!
|
//!
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(
|
||||||
|
clippy::enum_glob_use,
|
||||||
|
clippy::fn_params_excessive_bools,
|
||||||
|
clippy::missing_errors_doc,
|
||||||
|
clippy::missing_panics_doc,
|
||||||
|
clippy::module_name_repetitions,
|
||||||
|
clippy::too_many_lines,
|
||||||
|
clippy::wildcard_imports
|
||||||
|
)]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate log;
|
extern crate log;
|
||||||
use lalrpop_util::lalrpop_mod;
|
use lalrpop_util::lalrpop_mod;
|
||||||
|
@ -27,8 +45,15 @@ pub mod lexer;
|
||||||
pub mod mode;
|
pub mod mode;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
lalrpop_mod!(
|
lalrpop_mod!(
|
||||||
#[allow(clippy::all)]
|
#[allow(
|
||||||
#[allow(unused)]
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
unused,
|
||||||
|
clippy::all,
|
||||||
|
clippy::pedantic
|
||||||
|
)]
|
||||||
python
|
python
|
||||||
);
|
);
|
||||||
pub mod config_comment_helper;
|
pub mod config_comment_helper;
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
//! parse a whole program, a single statement, or a single
|
//! parse a whole program, a single statement, or a single
|
||||||
//! expression.
|
//! expression.
|
||||||
|
|
||||||
|
use nac3ast::Location;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
|
||||||
use crate::ast::{self, FileName};
|
use crate::ast::{self, FileName};
|
||||||
|
@ -63,7 +64,7 @@ pub fn parse_program(source: &str, file: FileName) -> Result<ast::Suite, ParseEr
|
||||||
///
|
///
|
||||||
/// ```
|
/// ```
|
||||||
pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
|
pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
|
||||||
parse(source, Mode::Expression, Default::default()).map(|top| match top {
|
parse(source, Mode::Expression, FileName::default()).map(|top| match top {
|
||||||
ast::Mod::Expression { body } => *body,
|
ast::Mod::Expression { body } => *body,
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
})
|
})
|
||||||
|
@ -72,7 +73,7 @@ pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
|
||||||
// Parse a given source code
|
// Parse a given source code
|
||||||
pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> {
|
pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> {
|
||||||
let lxr = lexer::make_tokenizer(source, file);
|
let lxr = lexer::make_tokenizer(source, file);
|
||||||
let marker_token = (Default::default(), mode.to_marker(), Default::default());
|
let marker_token = (Location::default(), mode.to_marker(), Location::default());
|
||||||
let tokenizer = iter::once(Ok(marker_token)).chain(lxr);
|
let tokenizer = iter::once(Ok(marker_token)).chain(lxr);
|
||||||
|
|
||||||
python::TopParser::new().parse(tokenizer).map_err(ParseError::from)
|
python::TopParser::new().parse(tokenizer).map_err(ParseError::from)
|
||||||
|
|
|
@ -115,19 +115,19 @@ impl fmt::Display for Tok {
|
||||||
write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name))
|
write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name))
|
||||||
}
|
}
|
||||||
Int { value } => {
|
Int { value } => {
|
||||||
if *value != i128::MAX {
|
if *value == i128::MAX {
|
||||||
write!(f, "'{}'", value)
|
|
||||||
} else {
|
|
||||||
write!(f, "'#OFL#'")
|
write!(f, "'#OFL#'")
|
||||||
|
} else {
|
||||||
|
write!(f, "'{value}'")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Float { value } => write!(f, "'{}'", value),
|
Float { value } => write!(f, "'{value}'"),
|
||||||
Complex { real, imag } => write!(f, "{}j{}", real, imag),
|
Complex { real, imag } => write!(f, "{real}j{imag}"),
|
||||||
String { value, is_fstring } => {
|
String { value, is_fstring } => {
|
||||||
if *is_fstring {
|
if *is_fstring {
|
||||||
write!(f, "f")?
|
write!(f, "f")?;
|
||||||
}
|
}
|
||||||
write!(f, "{:?}", value)
|
write!(f, "{value:?}")
|
||||||
}
|
}
|
||||||
Bytes { value } => {
|
Bytes { value } => {
|
||||||
write!(f, "b\"")?;
|
write!(f, "b\"")?;
|
||||||
|
@ -137,7 +137,7 @@ impl fmt::Display for Tok {
|
||||||
10 => f.write_str("\\n")?,
|
10 => f.write_str("\\n")?,
|
||||||
13 => f.write_str("\\r")?,
|
13 => f.write_str("\\r")?,
|
||||||
32..=126 => f.write_char(*i as char)?,
|
32..=126 => f.write_char(*i as char)?,
|
||||||
_ => write!(f, "\\x{:02x}", i)?,
|
_ => write!(f, "\\x{i:02x}")?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f.write_str("\"")
|
f.write_str("\"")
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(clippy::too_many_lines, clippy::wildcard_imports)]
|
||||||
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use inkwell::{
|
use inkwell::{
|
||||||
memory_buffer::MemoryBuffer, passes::PassBuilderOptions, support::is_multithreaded, targets::*,
|
memory_buffer::MemoryBuffer, passes::PassBuilderOptions, support::is_multithreaded, targets::*,
|
||||||
|
@ -5,6 +15,7 @@ use inkwell::{
|
||||||
};
|
};
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::num::NonZeroUsize;
|
||||||
use std::{collections::HashMap, fs, path::Path, sync::Arc};
|
use std::{collections::HashMap, fs, path::Path, sync::Arc};
|
||||||
|
|
||||||
use nac3core::{
|
use nac3core::{
|
||||||
|
@ -100,7 +111,7 @@ fn handle_typevar_definition(
|
||||||
unifier,
|
unifier,
|
||||||
primitives,
|
primitives,
|
||||||
x,
|
x,
|
||||||
HashMap::default(),
|
HashMap::new(),
|
||||||
)?;
|
)?;
|
||||||
get_type_from_type_annotation_kinds(def_list, unifier, &ty, &mut None)
|
get_type_from_type_annotation_kinds(def_list, unifier, &ty, &mut None)
|
||||||
})
|
})
|
||||||
|
@ -138,7 +149,7 @@ fn handle_typevar_definition(
|
||||||
unifier,
|
unifier,
|
||||||
primitives,
|
primitives,
|
||||||
&args[1],
|
&args[1],
|
||||||
HashMap::default(),
|
HashMap::new(),
|
||||||
)?;
|
)?;
|
||||||
let constraint =
|
let constraint =
|
||||||
get_type_from_type_annotation_kinds(def_list, unifier, &ty, &mut None)?;
|
get_type_from_type_annotation_kinds(def_list, unifier, &ty, &mut None)?;
|
||||||
|
@ -244,9 +255,9 @@ fn main() {
|
||||||
let target_features = target_features.unwrap_or_default();
|
let target_features = target_features.unwrap_or_default();
|
||||||
let threads = if is_multithreaded() {
|
let threads = if is_multithreaded() {
|
||||||
if threads == 0 {
|
if threads == 0 {
|
||||||
std::thread::available_parallelism().map(|threads| threads.get() as u32).unwrap_or(1u32)
|
std::thread::available_parallelism().map(NonZeroUsize::get).unwrap_or(1usize)
|
||||||
} else {
|
} else {
|
||||||
threads
|
threads as usize
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if threads != 1 {
|
if threads != 1 {
|
||||||
|
@ -310,10 +321,7 @@ fn main() {
|
||||||
StmtKind::ImportFrom { module, names, .. }
|
StmtKind::ImportFrom { module, names, .. }
|
||||||
if module == &Some("__future__".into())
|
if module == &Some("__future__".into())
|
||||||
&& names.len() == 1
|
&& names.len() == 1
|
||||||
&& names[0].name == "annotations".into() =>
|
&& names[0].name == "annotations".into() => {}
|
||||||
{
|
|
||||||
()
|
|
||||||
}
|
|
||||||
_ => {
|
_ => {
|
||||||
let (name, def_id, ty) = composer
|
let (name, def_id, ty) = composer
|
||||||
.register_top_level(stmt, Some(resolver.clone()), "__main__", true)
|
.register_top_level(stmt, Some(resolver.clone()), "__main__", true)
|
||||||
|
|
|
@ -1,3 +1,13 @@
|
||||||
|
#![deny(
|
||||||
|
future_incompatible,
|
||||||
|
let_underscore,
|
||||||
|
nonstandard_style,
|
||||||
|
rust_2024_compatibility,
|
||||||
|
clippy::all
|
||||||
|
)]
|
||||||
|
#![warn(clippy::pedantic)]
|
||||||
|
#![allow(clippy::semicolon_if_nothing_returned, clippy::uninlined_format_args)]
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
static mut NOW: i64 = 0;
|
static mut NOW: i64 = 0;
|
||||||
|
@ -29,17 +39,17 @@ pub extern "C" fn rtio_get_counter() -> i64 {
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern "C" fn rtio_output(target: i32, data: i32) {
|
pub extern "C" fn rtio_output(target: i32, data: i32) {
|
||||||
println!("rtio_output @{} target={:04x} data={}", unsafe { NOW }, target, data);
|
println!("rtio_output @{} target={target:04x} data={data}", unsafe { NOW });
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern "C" fn print_int32(x: i32) {
|
pub extern "C" fn print_int32(x: i32) {
|
||||||
println!("print_int32: {}", x);
|
println!("print_int32: {x}");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub extern "C" fn print_int64(x: i64) {
|
pub extern "C" fn print_int64(x: i64) {
|
||||||
println!("print_int64: {}", x);
|
println!("print_int64: {x}");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
|
@ -52,6 +62,6 @@ fn main() {
|
||||||
unsafe {
|
unsafe {
|
||||||
let lib = libloading::Library::new(filename).unwrap();
|
let lib = libloading::Library::new(filename).unwrap();
|
||||||
let func: libloading::Symbol<unsafe extern "C" fn()> = lib.get(b"__modinit__").unwrap();
|
let func: libloading::Symbol<unsafe extern "C" fn()> = lib.get(b"__modinit__").unwrap();
|
||||||
func()
|
func();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue