meta: Restrict number of allowed lints

This commit is contained in:
David Mak 2024-06-12 15:13:09 +08:00
parent 52cc822a53
commit 4bdd8128da
33 changed files with 584 additions and 558 deletions

View File

@ -1 +1 @@
doc-valid-idents = ["NumPy", ".."] doc-valid-idents = ["CPython", "NumPy", ".."]

View File

@ -259,7 +259,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
let start_expr = Located { let start_expr = Located {
// location does not matter at this point // location does not matter at this point
location: stmt.location, location: stmt.location,
node: ExprKind::Name { id: start, ctx: name_ctx.clone() }, node: ExprKind::Name { id: start, ctx: *name_ctx },
custom: Some(ctx.primitives.int64), custom: Some(ctx.primitives.int64),
}; };
let start = self let start = self
@ -274,7 +274,7 @@ impl<'b> CodeGenerator for ArtiqCodeGenerator<'b> {
let end_expr = Located { let end_expr = Located {
// location does not matter at this point // location does not matter at this point
location: stmt.location, location: stmt.location,
node: ExprKind::Name { id: end, ctx: name_ctx.clone() }, node: ExprKind::Name { id: end, ctx: *name_ctx },
custom: Some(ctx.primitives.int64), custom: Some(ctx.primitives.int64),
}; };
let end = self let end = self
@ -442,7 +442,7 @@ fn rpc_codegen_callback_fn<'ctx>(
format!("tagptr{}", fun.1 .0).as_str(), format!("tagptr{}", fun.1 .0).as_str(),
); );
tag_arr_ptr.set_initializer(&int8.const_array( tag_arr_ptr.set_initializer(&int8.const_array(
&tag.iter().map(|v| int8.const_int(*v as u64, false)).collect::<Vec<_>>(), &tag.iter().map(|v| int8.const_int(u64::from(*v), false)).collect::<Vec<_>>(),
)); ));
tag_arr_ptr.set_linkage(Linkage::Private); tag_arr_ptr.set_linkage(Linkage::Private);
let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash); let tag_ptr = ctx.module.add_global(tag_ptr_type, None, &hash);

View File

@ -1,13 +1,19 @@
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
clippy::cast_lossless, unsafe_op_in_unsafe_fn,
clippy::cast_possible_truncation, clippy::cast_possible_truncation,
clippy::cast_sign_loss, clippy::cast_sign_loss,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::similar_names, clippy::similar_names,
clippy::too_many_lines, clippy::too_many_lines,
clippy::wildcard_imports, clippy::wildcard_imports
)] )]
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
@ -26,16 +32,16 @@ use inkwell::{
OptimizationLevel, OptimizationLevel,
}; };
use itertools::Itertools; use itertools::Itertools;
use nac3core::codegen::{CodeGenLLVMOptions, CodeGenTargetMachineOptions, gen_func_impl}; use nac3core::codegen::{gen_func_impl, CodeGenLLVMOptions, CodeGenTargetMachineOptions};
use nac3core::toplevel::builtins::get_exn_constructor; use nac3core::toplevel::builtins::get_exn_constructor;
use nac3core::typecheck::typedef::{TypeEnum, Unifier, VarMap}; use nac3core::typecheck::typedef::{TypeEnum, Unifier, VarMap};
use nac3parser::{ use nac3parser::{
ast::{ExprKind, Stmt, StmtKind, StrRef}, ast::{ExprKind, Stmt, StmtKind, StrRef},
parser::parse_program, parser::parse_program,
}; };
use pyo3::create_exception;
use pyo3::prelude::*; use pyo3::prelude::*;
use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet}; use pyo3::{exceptions, types::PyBytes, types::PyDict, types::PySet};
use pyo3::create_exception;
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
@ -58,7 +64,7 @@ use tempfile::{self, TempDir};
use crate::codegen::attributes_writeback; use crate::codegen::attributes_writeback;
use crate::{ use crate::{
codegen::{rpc_codegen_callback, ArtiqCodeGenerator}, codegen::{rpc_codegen_callback, ArtiqCodeGenerator},
symbol_resolver::{InnerResolver, PythonHelper, Resolver, DeferredEvaluationStore}, symbol_resolver::{DeferredEvaluationStore, InnerResolver, PythonHelper, Resolver},
}; };
mod codegen; mod codegen;
@ -150,9 +156,7 @@ impl Nac3 {
for mut stmt in parser_result { for mut stmt in parser_result {
let include = match stmt.node { let include = match stmt.node {
StmtKind::ClassDef { StmtKind::ClassDef { ref decorator_list, ref mut body, ref mut bases, .. } => {
ref decorator_list, ref mut body, ref mut bases, ..
} => {
let nac3_class = decorator_list.iter().any(|decorator| { let nac3_class = decorator_list.iter().any(|decorator| {
if let ExprKind::Name { id, .. } = decorator.node { if let ExprKind::Name { id, .. } = decorator.node {
id.to_string() == "nac3" id.to_string() == "nac3"
@ -172,7 +176,8 @@ impl Nac3 {
if *id == "Exception".into() { if *id == "Exception".into() {
Ok(true) Ok(true)
} else { } else {
let base_obj = module.getattr(py, id.to_string().as_str())?; let base_obj =
module.getattr(py, id.to_string().as_str())?;
let base_id = id_fn.call1((base_obj,))?.extract()?; let base_id = id_fn.call1((base_obj,))?.extract()?;
Ok(registered_class_ids.contains(&base_id)) Ok(registered_class_ids.contains(&base_id))
} }
@ -353,8 +358,9 @@ impl Nac3 {
let class_obj; let class_obj;
if let StmtKind::ClassDef { name, .. } = &stmt.node { if let StmtKind::ClassDef { name, .. } = &stmt.node {
let class = py_module.getattr(name.to_string().as_str()).unwrap(); let class = py_module.getattr(name.to_string().as_str()).unwrap();
if issubclass.call1((class, exn_class)).unwrap().extract().unwrap() && if issubclass.call1((class, exn_class)).unwrap().extract().unwrap()
class.getattr("artiq_builtin").is_err() { && class.getattr("artiq_builtin").is_err()
{
class_obj = Some(class); class_obj = Some(class);
} else { } else {
class_obj = None; class_obj = None;
@ -400,12 +406,12 @@ impl Nac3 {
let (name, def_id, ty) = composer let (name, def_id, ty) = composer
.register_top_level(stmt.clone(), Some(resolver.clone()), path, false) .register_top_level(stmt.clone(), Some(resolver.clone()), path, false)
.map_err(|e| { .map_err(|e| {
CompileError::new_err(format!( CompileError::new_err(format!("compilation failed\n----------\n{e}"))
"compilation failed\n----------\n{e}"
))
})?; })?;
if let Some(class_obj) = class_obj { if let Some(class_obj) = class_obj {
self.exception_ids.write().insert(def_id.0, store_obj.call1(py, (class_obj, ))?.extract(py)?); self.exception_ids
.write()
.insert(def_id.0, store_obj.call1(py, (class_obj,))?.extract(py)?);
} }
match &stmt.node { match &stmt.node {
@ -482,7 +488,8 @@ impl Nac3 {
exception_ids: self.exception_ids.clone(), exception_ids: self.exception_ids.clone(),
deferred_eval_store: self.deferred_eval_store.clone(), deferred_eval_store: self.deferred_eval_store.clone(),
}); });
let resolver = Arc::new(Resolver(inner_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>; let resolver =
Arc::new(Resolver(inner_resolver.clone())) as Arc<dyn SymbolResolver + Send + Sync>;
let (_, def_id, _) = composer let (_, def_id, _) = composer
.register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "", false) .register_top_level(synthesized.pop().unwrap(), Some(resolver.clone()), "", false)
.unwrap(); .unwrap();
@ -491,8 +498,12 @@ impl Nac3 {
FunSignature { args: vec![], ret: self.primitive.none, vars: VarMap::new() }; FunSignature { args: vec![], ret: self.primitive.none, vars: VarMap::new() };
let mut store = ConcreteTypeStore::new(); let mut store = ConcreteTypeStore::new();
let mut cache = HashMap::new(); let mut cache = HashMap::new();
let signature = let signature = store.from_signature(
store.from_signature(&mut composer.unifier, &self.primitive, &fun_signature, &mut cache); &mut composer.unifier,
&self.primitive,
&fun_signature,
&mut cache,
);
let signature = store.add_cty(signature); let signature = store.add_cty(signature);
if let Err(e) = composer.start_analysis(true) { if let Err(e) = composer.start_analysis(true) {
@ -511,13 +522,11 @@ impl Nac3 {
msg.unwrap_or(e.iter().sorted().join("\n----------\n")) msg.unwrap_or(e.iter().sorted().join("\n----------\n"))
))) )))
} else { } else {
Err(CompileError::new_err( Err(CompileError::new_err(format!(
format!( "compilation failed\n----------\n{}",
"compilation failed\n----------\n{}", e.iter().sorted().join("\n----------\n"),
e.iter().sorted().join("\n----------\n"), )))
), };
))
}
} }
let top_level = Arc::new(composer.make_top_level_context()); let top_level = Arc::new(composer.make_top_level_context());
@ -545,7 +554,9 @@ impl Nac3 {
py, py,
( (
id.0.into_py(py), id.0.into_py(py),
class_def.getattr(py, name.to_string().as_str()).unwrap(), class_def
.getattr(py, name.to_string().as_str())
.unwrap(),
), ),
) )
.unwrap(); .unwrap();
@ -560,7 +571,8 @@ impl Nac3 {
let defs = top_level.definitions.read(); let defs = top_level.definitions.read();
let mut definition = defs[def_id.0].write(); let mut definition = defs[def_id.0].write();
let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } = let TopLevelDef::Function { instance_to_stmt, instance_to_symbol, .. } =
&mut *definition else { &mut *definition
else {
unreachable!() unreachable!()
}; };
@ -582,8 +594,12 @@ impl Nac3 {
let mut store = ConcreteTypeStore::new(); let mut store = ConcreteTypeStore::new();
let mut cache = HashMap::new(); let mut cache = HashMap::new();
let signature = let signature = store.from_signature(
store.from_signature(&mut composer.unifier, &self.primitive, &fun_signature, &mut cache); &mut composer.unifier,
&self.primitive,
&fun_signature,
&mut cache,
);
let signature = store.add_cty(signature); let signature = store.add_cty(signature);
let attributes_writeback_task = CodeGenTask { let attributes_writeback_task = CodeGenTask {
subst: Vec::default(), subst: Vec::default(),
@ -616,23 +632,28 @@ impl Nac3 {
let membuffer = membuffers.clone(); let membuffer = membuffers.clone();
py.allow_threads(|| { py.allow_threads(|| {
let (registry, handles) = WorkerRegistry::create_workers( let (registry, handles) =
threads, WorkerRegistry::create_workers(threads, top_level.clone(), &self.llvm_options, &f);
top_level.clone(),
&self.llvm_options,
&f
);
registry.add_task(task); registry.add_task(task);
registry.wait_tasks_complete(handles); registry.wait_tasks_complete(handles);
let mut generator = ArtiqCodeGenerator::new("attributes_writeback".to_string(), size_t, self.time_fns); let mut generator =
ArtiqCodeGenerator::new("attributes_writeback".to_string(), size_t, self.time_fns);
let context = inkwell::context::Context::create(); let context = inkwell::context::Context::create();
let module = context.create_module("attributes_writeback"); let module = context.create_module("attributes_writeback");
let builder = context.create_builder(); let builder = context.create_builder();
let (_, module, _) = gen_func_impl(&context, &mut generator, &registry, builder, module, let (_, module, _) = gen_func_impl(
attributes_writeback_task, |generator, ctx| { &context,
&mut generator,
&registry,
builder,
module,
attributes_writeback_task,
|generator, ctx| {
attributes_writeback(ctx, generator, inner_resolver.as_ref(), &host_attributes) attributes_writeback(ctx, generator, inner_resolver.as_ref(), &host_attributes)
}).unwrap(); },
)
.unwrap();
let buffer = module.write_bitcode_to_memory(); let buffer = module.write_bitcode_to_memory();
let buffer = buffer.as_slice().into(); let buffer = buffer.as_slice().into();
membuffer.lock().push(buffer); membuffer.lock().push(buffer);
@ -648,11 +669,16 @@ impl Nac3 {
.create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main")) .create_module_from_ir(MemoryBuffer::create_from_memory_range(buffer, "main"))
.unwrap(); .unwrap();
main.link_in_module(other) main.link_in_module(other).map_err(|err| CompileError::new_err(err.to_string()))?;
.map_err(|err| CompileError::new_err(err.to_string()))?;
} }
let builder = context.create_builder(); let builder = context.create_builder();
let modinit_return = main.get_function("__modinit__").unwrap().get_last_basic_block().unwrap().get_terminator().unwrap(); let modinit_return = main
.get_function("__modinit__")
.unwrap()
.get_last_basic_block()
.unwrap()
.get_terminator()
.unwrap();
builder.position_before(&modinit_return); builder.position_before(&modinit_return);
builder builder
.build_call( .build_call(
@ -674,10 +700,7 @@ impl Nac3 {
} }
// Demote all global variables that will not be referenced in the kernel to private // Demote all global variables that will not be referenced in the kernel to private
let preserved_symbols: Vec<&'static [u8]> = vec![ let preserved_symbols: Vec<&'static [u8]> = vec![b"typeinfo", b"now"];
b"typeinfo",
b"now",
];
let mut global_option = main.get_first_global(); let mut global_option = main.get_first_global();
while let Some(global) = global_option { while let Some(global) = global_option {
if !preserved_symbols.contains(&(global.get_name().to_bytes())) { if !preserved_symbols.contains(&(global.get_name().to_bytes())) {
@ -686,7 +709,9 @@ impl Nac3 {
global_option = global.get_next_global(); global_option = global.get_next_global();
} }
let target_machine = self.llvm_options.target let target_machine = self
.llvm_options
.target
.create_target_machine(self.llvm_options.opt_level) .create_target_machine(self.llvm_options.opt_level)
.expect("couldn't create target machine"); .expect("couldn't create target machine");
@ -750,10 +775,7 @@ impl Nac3 {
} }
} }
fn link_with_lld( fn link_with_lld(elf_filename: String, obj_filename: String) -> PyResult<()> {
elf_filename: String,
obj_filename: String,
) -> PyResult<()>{
let linker_args = vec![ let linker_args = vec![
"-shared".to_string(), "-shared".to_string(),
"--eh-frame-hdr".to_string(), "--eh-frame-hdr".to_string(),
@ -772,9 +794,7 @@ fn link_with_lld(
return Err(CompileError::new_err("failed to start linker")); return Err(CompileError::new_err("failed to start linker"));
} }
} else { } else {
return Err(CompileError::new_err( return Err(CompileError::new_err("linker returned non-zero status code"));
"linker returned non-zero status code",
));
} }
Ok(()) Ok(())
@ -784,7 +804,7 @@ fn add_exceptions(
composer: &mut TopLevelComposer, composer: &mut TopLevelComposer,
builtin_def: &mut HashMap<StrRef, DefinitionId>, builtin_def: &mut HashMap<StrRef, DefinitionId>,
builtin_ty: &mut HashMap<StrRef, Type>, builtin_ty: &mut HashMap<StrRef, Type>,
error_names: &[&str] error_names: &[&str],
) -> Vec<Type> { ) -> Vec<Type> {
let mut types = Vec::new(); let mut types = Vec::new();
// note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}" // note: this is only for builtin exceptions, i.e. the exception name is "0:{exn}"
@ -797,7 +817,7 @@ fn add_exceptions(
// constructor id // constructor id
def_id + 1, def_id + 1,
&mut composer.unifier, &mut composer.unifier,
&composer.primitives_ty &composer.primitives_ty,
); );
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None)); composer.definition_ast_list.push((Arc::new(RwLock::new(exception_class)), None));
composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None)); composer.definition_ast_list.push((Arc::new(RwLock::new(exception_fn)), None));
@ -846,7 +866,8 @@ impl Nac3 {
}, },
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| { Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
let arg_ty = fun.0.args[0].ty; let arg_ty = fun.0.args[0].ty;
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap(); let arg =
args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
time_fns.emit_at_mu(ctx, arg); time_fns.emit_at_mu(ctx, arg);
Ok(None) Ok(None)
}))), }))),
@ -864,7 +885,8 @@ impl Nac3 {
}, },
Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| { Arc::new(GenCall::new(Box::new(move |ctx, _, fun, args, generator| {
let arg_ty = fun.0.args[0].ty; let arg_ty = fun.0.args[0].ty;
let arg = args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap(); let arg =
args[0].1.clone().to_basic_value_enum(ctx, generator, arg_ty).unwrap();
time_fns.emit_delay_mu(ctx, arg); time_fns.emit_delay_mu(ctx, arg);
Ok(None) Ok(None)
}))), }))),
@ -879,8 +901,9 @@ impl Nac3 {
let types_mod = PyModule::import(py, "types").unwrap(); let types_mod = PyModule::import(py, "types").unwrap();
let get_id = |x: &PyAny| id_fn.call1((x,)).and_then(PyAny::extract).unwrap(); let get_id = |x: &PyAny| id_fn.call1((x,)).and_then(PyAny::extract).unwrap();
let get_attr_id = |obj: &PyModule, attr| id_fn.call1((obj.getattr(attr).unwrap(),)) let get_attr_id = |obj: &PyModule, attr| {
.unwrap().extract().unwrap(); id_fn.call1((obj.getattr(attr).unwrap(),)).unwrap().extract().unwrap()
};
let primitive_ids = PrimitivePythonId { let primitive_ids = PrimitivePythonId {
virtual_id: get_id(artiq_builtins.get_item("virtual").ok().flatten().unwrap()), virtual_id: get_id(artiq_builtins.get_item("virtual").ok().flatten().unwrap()),
generic_alias: ( generic_alias: (
@ -889,7 +912,9 @@ impl Nac3 {
), ),
none: get_id(artiq_builtins.get_item("none").ok().flatten().unwrap()), none: get_id(artiq_builtins.get_item("none").ok().flatten().unwrap()),
typevar: get_attr_id(typing_mod, "TypeVar"), typevar: get_attr_id(typing_mod, "TypeVar"),
const_generic_marker: get_id(artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap()), const_generic_marker: get_id(
artiq_builtins.get_item("_ConstGenericMarker").ok().flatten().unwrap(),
),
int: get_attr_id(builtins_mod, "int"), int: get_attr_id(builtins_mod, "int"),
int32: get_attr_id(numpy_mod, "int32"), int32: get_attr_id(numpy_mod, "int32"),
int64: get_attr_id(numpy_mod, "int64"), int64: get_attr_id(numpy_mod, "int64"),
@ -923,7 +948,7 @@ impl Nac3 {
llvm_options: CodeGenLLVMOptions { llvm_options: CodeGenLLVMOptions {
opt_level: OptimizationLevel::Default, opt_level: OptimizationLevel::Default,
target: Nac3::get_llvm_target_options(isa), target: Nac3::get_llvm_target_options(isa),
} },
}) })
} }
@ -964,7 +989,7 @@ impl Nac3 {
py: Python, py: Python,
) -> PyResult<()> { ) -> PyResult<()> {
let target_machine = self.get_llvm_target_machine(); let target_machine = self.get_llvm_target_machine();
if self.isa == Isa::Host { if self.isa == Isa::Host {
let link_fn = |module: &Module| { let link_fn = |module: &Module| {
let working_directory = self.working_directory.path().to_owned(); let working_directory = self.working_directory.path().to_owned();
@ -973,7 +998,7 @@ impl Nac3 {
.expect("couldn't write module to file"); .expect("couldn't write module to file");
link_with_lld( link_with_lld(
filename.to_string(), filename.to_string(),
working_directory.join("module.o").to_string_lossy().to_string() working_directory.join("module.o").to_string_lossy().to_string(),
)?; )?;
Ok(()) Ok(())
}; };
@ -1009,7 +1034,7 @@ impl Nac3 {
py: Python, py: Python,
) -> PyResult<PyObject> { ) -> PyResult<PyObject> {
let target_machine = self.get_llvm_target_machine(); let target_machine = self.get_llvm_target_machine();
if self.isa == Isa::Host { if self.isa == Isa::Host {
let link_fn = |module: &Module| { let link_fn = |module: &Module| {
let working_directory = self.working_directory.path().to_owned(); let working_directory = self.working_directory.path().to_owned();
@ -1021,7 +1046,7 @@ impl Nac3 {
let filename = filename_path.to_str().unwrap(); let filename = filename_path.to_str().unwrap();
link_with_lld( link_with_lld(
filename.to_string(), filename.to_string(),
working_directory.join("module.o").to_string_lossy().to_string() working_directory.join("module.o").to_string_lossy().to_string(),
)?; )?;
Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into()) Ok(PyBytes::new(py, &fs::read(filename).unwrap()).into())

View File

@ -127,7 +127,7 @@ impl StaticValue for PythonValue {
); );
global.set_constant(true); global.set_constant(true);
global.set_initializer(&ctx.ctx.const_struct( global.set_initializer(&ctx.ctx.const_struct(
&[ctx.ctx.i32_type().const_int(id as u64, false).into()], &[ctx.ctx.i32_type().const_int(u64::from(id), false).into()],
false, false,
)); ));
Ok(global.as_pointer_value().into()) Ok(global.as_pointer_value().into())
@ -147,10 +147,10 @@ impl StaticValue for PythonValue {
return Ok(match val { return Ok(match val {
PrimitiveValue::I32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(), PrimitiveValue::I32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(),
PrimitiveValue::I64(val) => ctx.ctx.i64_type().const_int(*val as u64, false).into(), PrimitiveValue::I64(val) => ctx.ctx.i64_type().const_int(*val as u64, false).into(),
PrimitiveValue::U32(val) => ctx.ctx.i32_type().const_int(*val as u64, false).into(), PrimitiveValue::U32(val) => ctx.ctx.i32_type().const_int(u64::from(*val), false).into(),
PrimitiveValue::U64(val) => ctx.ctx.i64_type().const_int(*val, false).into(), PrimitiveValue::U64(val) => ctx.ctx.i64_type().const_int(*val, false).into(),
PrimitiveValue::F64(val) => ctx.ctx.f64_type().const_float(*val).into(), PrimitiveValue::F64(val) => ctx.ctx.f64_type().const_float(*val).into(),
PrimitiveValue::Bool(val) => ctx.ctx.i8_type().const_int(*val as u64, false).into(), PrimitiveValue::Bool(val) => ctx.ctx.i8_type().const_int(u64::from(*val), false).into(),
}); });
} }
if let Some(global) = ctx.module.get_global(&self.id.to_string()) { if let Some(global) = ctx.module.get_global(&self.id.to_string()) {
@ -872,7 +872,7 @@ impl InnerResolver {
} else if ty_id == self.primitive_ids.uint32 { } else if ty_id == self.primitive_ids.uint32 {
let val: u32 = obj.extract().unwrap(); let val: u32 = obj.extract().unwrap();
self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::U32(val));
Ok(Some(ctx.ctx.i32_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.i32_type().const_int(u64::from(val), false).into()))
} else if ty_id == self.primitive_ids.uint64 { } else if ty_id == self.primitive_ids.uint64 {
let val: u64 = obj.extract().unwrap(); let val: u64 = obj.extract().unwrap();
self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::U64(val));
@ -880,7 +880,7 @@ impl InnerResolver {
} else if ty_id == self.primitive_ids.bool { } else if ty_id == self.primitive_ids.bool {
let val: bool = obj.extract().unwrap(); let val: bool = obj.extract().unwrap();
self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::Bool(val));
Ok(Some(ctx.ctx.i8_type().const_int(val as u64, false).into())) Ok(Some(ctx.ctx.i8_type().const_int(u64::from(val), false).into()))
} else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 { } else if ty_id == self.primitive_ids.float || ty_id == self.primitive_ids.float64 {
let val: f64 = obj.extract().unwrap(); let val: f64 = obj.extract().unwrap();
self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val)); self.id_to_primitive.write().insert(id, PrimitiveValue::F64(val));

View File

@ -14,7 +14,7 @@ lazy_static! {
} }
thread_local! { thread_local! {
static LOCAL_INTERNER: RefCell<HashMap<String, StrRef>> = Default::default(); static LOCAL_INTERNER: RefCell<HashMap<String, StrRef>> = RefCell::default();
} }
#[derive(Eq, PartialEq, Copy, Clone, Hash)] #[derive(Eq, PartialEq, Copy, Clone, Hash)]
@ -23,14 +23,14 @@ pub struct StrRef(SymbolU32);
impl fmt::Debug for StrRef { impl fmt::Debug for StrRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: String = (*self).into(); let s: String = (*self).into();
write!(f, "{:?}", s) write!(f, "{s:?}")
} }
} }
impl fmt::Display for StrRef { impl fmt::Display for StrRef {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: String = (*self).into(); let s: String = (*self).into();
write!(f, "{}", s) write!(f, "{s}")
} }
} }
@ -68,6 +68,7 @@ pub fn get_str_ref(lock: &mut MutexGuard<Interner>, str: &str) -> StrRef {
StrRef(lock.get_or_intern(str)) StrRef(lock.get_or_intern(str))
} }
#[must_use]
pub fn get_str_from_ref<'a>(lock: &'a MutexGuard<Interner>, id: StrRef) -> &'a str { pub fn get_str_from_ref<'a>(lock: &'a MutexGuard<Interner>, id: StrRef) -> &'a str {
lock.resolve(id.0).unwrap() lock.resolve(id.0).unwrap()
} }
@ -368,20 +369,20 @@ pub enum ExprKind<U = ()> {
} }
pub type Expr<U = ()> = Located<ExprKind<U>, U>; pub type Expr<U = ()> = Located<ExprKind<U>, U>;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum ExprContext { pub enum ExprContext {
Load, Load,
Store, Store,
Del, Del,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum Boolop { pub enum Boolop {
And, And,
Or, Or,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum Operator { pub enum Operator {
Add, Add,
Sub, Sub,
@ -398,7 +399,7 @@ pub enum Operator {
FloorDiv, FloorDiv,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum Unaryop { pub enum Unaryop {
Invert, Invert,
Not, Not,
@ -406,7 +407,7 @@ pub enum Unaryop {
USub, USub,
} }
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub enum Cmpop { pub enum Cmpop {
Eq, Eq,
NotEq, NotEq,
@ -464,7 +465,7 @@ pub struct KeywordData<U = ()> {
} }
pub type Keyword<U = ()> = Located<KeywordData<U>, U>; pub type Keyword<U = ()> = Located<KeywordData<U>, U>;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Copy, Debug, PartialEq)]
pub struct Alias { pub struct Alias {
pub name: Ident, pub name: Ident,
pub asname: Option<Ident>, pub asname: Option<Ident>,

View File

@ -28,12 +28,12 @@ impl From<bool> for Constant {
} }
impl From<i32> for Constant { impl From<i32> for Constant {
fn from(i: i32) -> Constant { fn from(i: i32) -> Constant {
Self::Int(i as i128) Self::Int(i128::from(i))
} }
} }
impl From<i64> for Constant { impl From<i64> for Constant {
fn from(i: i64) -> Constant { fn from(i: i64) -> Constant {
Self::Int(i as i128) Self::Int(i128::from(i))
} }
} }
@ -50,6 +50,7 @@ pub enum ConversionFlag {
} }
impl ConversionFlag { impl ConversionFlag {
#[must_use]
pub fn try_from_byte(b: u8) -> Option<Self> { pub fn try_from_byte(b: u8) -> Option<Self> {
match b { match b {
b's' => Some(Self::Str), b's' => Some(Self::Str),
@ -69,6 +70,7 @@ pub struct ConstantOptimizer {
#[cfg(feature = "constant-optimization")] #[cfg(feature = "constant-optimization")]
impl ConstantOptimizer { impl ConstantOptimizer {
#[inline] #[inline]
#[must_use]
pub fn new() -> Self { pub fn new() -> Self {
Self { _priv: () } Self { _priv: () }
} }

View File

@ -2,6 +2,7 @@ use crate::{Constant, ExprKind};
impl<U> ExprKind<U> { impl<U> ExprKind<U> {
/// Returns a short name for the node suitable for use in error messages. /// Returns a short name for the node suitable for use in error messages.
#[must_use]
pub fn name(&self) -> &'static str { pub fn name(&self) -> &'static str {
match self { match self {
ExprKind::BoolOp { .. } | ExprKind::BinOp { .. } | ExprKind::UnaryOp { .. } => { ExprKind::BoolOp { .. } | ExprKind::BinOp { .. } | ExprKind::UnaryOp { .. } => {

View File

@ -1,15 +1,16 @@
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
clippy::cast_lossless,
clippy::default_trait_access,
clippy::missing_errors_doc, clippy::missing_errors_doc,
clippy::missing_panics_doc, clippy::missing_panics_doc,
clippy::module_name_repetitions, clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::needless_pass_by_value,
clippy::too_many_lines, clippy::too_many_lines,
clippy::uninlined_format_args,
clippy::wildcard_imports, clippy::wildcard_imports,
)] )]

View File

@ -85,14 +85,17 @@ impl Location {
} }
impl Location { impl Location {
#[must_use]
pub fn new(row: usize, column: usize, file: FileName) -> Self { pub fn new(row: usize, column: usize, file: FileName) -> Self {
Location { row, column, file } Location { row, column, file }
} }
#[must_use]
pub fn row(&self) -> usize { pub fn row(&self) -> usize {
self.row self.row
} }
#[must_use]
pub fn column(&self) -> usize { pub fn column(&self) -> usize {
self.column self.column
} }

View File

@ -163,7 +163,7 @@ pub trait UntypedArrayLikeAccessor<'ctx, Index = IntValue<'ctx>>: ArrayLikeIndex
idx: &Index, idx: &Index,
name: Option<&str>, name: Option<&str>,
) -> BasicValueEnum<'ctx> { ) -> BasicValueEnum<'ctx> {
let ptr = self.ptr_offset_unchecked(ctx, generator, idx, name); let ptr = unsafe { self.ptr_offset_unchecked(ctx, generator, idx, name) };
ctx.builder.build_load(ptr, name.unwrap_or_default()).unwrap() ctx.builder.build_load(ptr, name.unwrap_or_default()).unwrap()
} }
@ -192,7 +192,7 @@ pub trait UntypedArrayLikeMutator<'ctx, Index = IntValue<'ctx>>: ArrayLikeIndexe
idx: &Index, idx: &Index,
value: BasicValueEnum<'ctx>, value: BasicValueEnum<'ctx>,
) { ) {
let ptr = self.ptr_offset_unchecked(ctx, generator, idx, None); let ptr = unsafe { self.ptr_offset_unchecked(ctx, generator, idx, None) };
ctx.builder.build_store(ptr, value).unwrap(); ctx.builder.build_store(ptr, value).unwrap();
} }
@ -224,7 +224,7 @@ pub trait TypedArrayLikeAccessor<'ctx, T, Index = IntValue<'ctx>>: UntypedArrayL
idx: &Index, idx: &Index,
name: Option<&str>, name: Option<&str>,
) -> T { ) -> T {
let value = self.get_unchecked(ctx, generator, idx, name); let value = unsafe { self.get_unchecked(ctx, generator, idx, name) };
self.downcast_to_type(ctx, value) self.downcast_to_type(ctx, value)
} }
@ -257,7 +257,7 @@ pub trait TypedArrayLikeMutator<'ctx, T, Index = IntValue<'ctx>>: UntypedArrayLi
value: T, value: T,
) { ) {
let value = self.upcast_from_type(ctx, value); let value = self.upcast_from_type(ctx, value);
self.set_unchecked(ctx, generator, idx, value); unsafe { self.set_unchecked(ctx, generator, idx, value) }
} }
/// Sets the data at the `idx`-th index. /// Sets the data at the `idx`-th index.
@ -337,7 +337,7 @@ impl<'ctx, T, Index, Adapted> ArrayLikeIndexer<'ctx, Index> for TypedArrayLikeAd
idx: &Index, idx: &Index,
name: Option<&str>, name: Option<&str>,
) -> PointerValue<'ctx> { ) -> PointerValue<'ctx> {
self.adapted.ptr_offset_unchecked(ctx, generator, idx, name) unsafe { self.adapted.ptr_offset_unchecked(ctx, generator, idx, name) }
} }
fn ptr_offset<G: CodeGenerator + ?Sized>( fn ptr_offset<G: CodeGenerator + ?Sized>(
@ -431,11 +431,13 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for ArraySliceValue<'ctx> {
.map(|v| format!("{v}.addr")) .map(|v| format!("{v}.addr"))
.unwrap_or_default(); .unwrap_or_default();
ctx.builder.build_in_bounds_gep( unsafe {
self.base_ptr(ctx, generator), ctx.builder.build_in_bounds_gep(
&[*idx], self.base_ptr(ctx, generator),
var_name.as_str(), &[*idx],
).unwrap() var_name.as_str(),
).unwrap()
}
} }
fn ptr_offset<G: CodeGenerator + ?Sized>( fn ptr_offset<G: CodeGenerator + ?Sized>(
@ -787,11 +789,13 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for ListDataProxy<'ctx, '_> {
.map(|v| format!("{v}.addr")) .map(|v| format!("{v}.addr"))
.unwrap_or_default(); .unwrap_or_default();
ctx.builder.build_in_bounds_gep( unsafe {
self.base_ptr(ctx, generator), ctx.builder.build_in_bounds_gep(
&[*idx], self.base_ptr(ctx, generator),
var_name.as_str(), &[*idx],
).unwrap() var_name.as_str(),
).unwrap()
}
} }
fn ptr_offset<G: CodeGenerator + ?Sized>( fn ptr_offset<G: CodeGenerator + ?Sized>(
@ -1453,11 +1457,13 @@ impl<'ctx> ArrayLikeIndexer<'ctx, IntValue<'ctx>> for NDArrayDimsProxy<'ctx, '_>
.map(|v| format!("{v}.addr")) .map(|v| format!("{v}.addr"))
.unwrap_or_default(); .unwrap_or_default();
ctx.builder.build_in_bounds_gep( unsafe {
self.base_ptr(ctx, generator), ctx.builder.build_in_bounds_gep(
&[*idx], self.base_ptr(ctx, generator),
var_name.as_str(), &[*idx],
).unwrap() var_name.as_str(),
).unwrap()
}
} }
fn ptr_offset<G: CodeGenerator + ?Sized>( fn ptr_offset<G: CodeGenerator + ?Sized>(
@ -1554,11 +1560,13 @@ impl<'ctx> ArrayLikeIndexer<'ctx> for NDArrayDataProxy<'ctx, '_> {
idx: &IntValue<'ctx>, idx: &IntValue<'ctx>,
name: Option<&str>, name: Option<&str>,
) -> PointerValue<'ctx> { ) -> PointerValue<'ctx> {
ctx.builder.build_in_bounds_gep( unsafe {
self.base_ptr(ctx, generator), ctx.builder.build_in_bounds_gep(
&[*idx], self.base_ptr(ctx, generator),
name.unwrap_or_default(), &[*idx],
).unwrap() name.unwrap_or_default(),
).unwrap()
}
} }
fn ptr_offset<G: CodeGenerator + ?Sized>( fn ptr_offset<G: CodeGenerator + ?Sized>(

View File

@ -123,9 +123,9 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
match val { match val {
SymbolValue::I32(v) => self.ctx.i32_type().const_int(*v as u64, true).into(), SymbolValue::I32(v) => self.ctx.i32_type().const_int(*v as u64, true).into(),
SymbolValue::I64(v) => self.ctx.i64_type().const_int(*v as u64, true).into(), SymbolValue::I64(v) => self.ctx.i64_type().const_int(*v as u64, true).into(),
SymbolValue::U32(v) => self.ctx.i32_type().const_int(*v as u64, false).into(), SymbolValue::U32(v) => self.ctx.i32_type().const_int(u64::from(*v), false).into(),
SymbolValue::U64(v) => self.ctx.i64_type().const_int(*v, false).into(), SymbolValue::U64(v) => self.ctx.i64_type().const_int(*v, false).into(),
SymbolValue::Bool(v) => self.ctx.i8_type().const_int(*v as u64, true).into(), SymbolValue::Bool(v) => self.ctx.i8_type().const_int(u64::from(*v), true).into(),
SymbolValue::Double(v) => self.ctx.f64_type().const_float(*v).into(), SymbolValue::Double(v) => self.ctx.f64_type().const_float(*v).into(),
SymbolValue::Str(v) => { SymbolValue::Str(v) => {
let str_ptr = self.builder let str_ptr = self.builder
@ -305,7 +305,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
pub fn gen_int_ops<G: CodeGenerator + ?Sized>( pub fn gen_int_ops<G: CodeGenerator + ?Sized>(
&mut self, &mut self,
generator: &mut G, generator: &mut G,
op: &Operator, op: Operator,
lhs: BasicValueEnum<'ctx>, lhs: BasicValueEnum<'ctx>,
rhs: BasicValueEnum<'ctx>, rhs: BasicValueEnum<'ctx>,
signed: bool signed: bool
@ -362,7 +362,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
self.current_loc self.current_loc
); );
match *op { match op {
Operator::LShift => self.builder.build_left_shift(lhs, rhs, "lshift").map(Into::into).unwrap(), Operator::LShift => self.builder.build_left_shift(lhs, rhs, "lshift").map(Into::into).unwrap(),
Operator::RShift => self.builder.build_right_shift(lhs, rhs, signed, "rshift").map(Into::into).unwrap(), Operator::RShift => self.builder.build_right_shift(lhs, rhs, signed, "rshift").map(Into::into).unwrap(),
_ => unreachable!() _ => unreachable!()
@ -380,7 +380,7 @@ impl<'ctx, 'a> CodeGenContext<'ctx, 'a> {
/// Generates a binary operation `op` between two floating-point operands `lhs` and `rhs`. /// Generates a binary operation `op` between two floating-point operands `lhs` and `rhs`.
pub fn gen_float_ops( pub fn gen_float_ops(
&mut self, &mut self,
op: &Operator, op: Operator,
lhs: BasicValueEnum<'ctx>, lhs: BasicValueEnum<'ctx>,
rhs: BasicValueEnum<'ctx>, rhs: BasicValueEnum<'ctx>,
) -> BasicValueEnum<'ctx> { ) -> BasicValueEnum<'ctx> {
@ -1100,7 +1100,7 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
generator: &mut G, generator: &mut G,
ctx: &mut CodeGenContext<'ctx, '_>, ctx: &mut CodeGenContext<'ctx, '_>,
left: (&Option<Type>, BasicValueEnum<'ctx>), left: (&Option<Type>, BasicValueEnum<'ctx>),
op: &Operator, op: Operator,
right: (&Option<Type>, BasicValueEnum<'ctx>), right: (&Option<Type>, BasicValueEnum<'ctx>),
loc: Location, loc: Location,
is_aug_assign: bool, is_aug_assign: bool,
@ -1118,14 +1118,14 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into())) Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, true).into()))
} else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) { } else if ty1 == ty2 && [ctx.primitives.uint32, ctx.primitives.uint64].contains(&ty1) {
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into())) Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, false).into()))
} else if [Operator::LShift, Operator::RShift].contains(op) { } else if [Operator::LShift, Operator::RShift].contains(&op) {
let signed = [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1); let signed = [ctx.primitives.int32, ctx.primitives.int64].contains(&ty1);
Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, signed).into())) Ok(Some(ctx.gen_int_ops(generator, op, left_val, right_val, signed).into()))
} else if ty1 == ty2 && ctx.primitives.float == ty1 { } else if ty1 == ty2 && ctx.primitives.float == ty1 {
Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into())) Ok(Some(ctx.gen_float_ops(op, left_val, right_val).into()))
} else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 { } else if ty1 == ctx.primitives.float && ty2 == ctx.primitives.int32 {
// Pow is the only operator that would pass typecheck between float and int // Pow is the only operator that would pass typecheck between float and int
assert_eq!(*op, Operator::Pow); assert_eq!(op, Operator::Pow);
let res = call_float_powi( let res = call_float_powi(
ctx, ctx,
left_val.into_float_value(), left_val.into_float_value(),
@ -1158,7 +1158,7 @@ pub fn gen_binop_expr_with_values<'ctx, G: CodeGenerator>(
None, None,
); );
let res = if *op == Operator::MatMult { let res = if op == Operator::MatMult {
// MatMult is the only binop which is not an elementwise op // MatMult is the only binop which is not an elementwise op
numpy::ndarray_matmul_2d( numpy::ndarray_matmul_2d(
generator, generator,
@ -1287,7 +1287,7 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
generator: &mut G, generator: &mut G,
ctx: &mut CodeGenContext<'ctx, '_>, ctx: &mut CodeGenContext<'ctx, '_>,
left: &Expr<Option<Type>>, left: &Expr<Option<Type>>,
op: &Operator, op: Operator,
right: &Expr<Option<Type>>, right: &Expr<Option<Type>>,
loc: Location, loc: Location,
is_aug_assign: bool, is_aug_assign: bool,
@ -1319,7 +1319,7 @@ pub fn gen_binop_expr<'ctx, G: CodeGenerator>(
pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>( pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
generator: &mut G, generator: &mut G,
ctx: &mut CodeGenContext<'ctx, '_>, ctx: &mut CodeGenContext<'ctx, '_>,
op: &ast::Unaryop, op: ast::Unaryop,
operand: (&Option<Type>, BasicValueEnum<'ctx>), operand: (&Option<Type>, BasicValueEnum<'ctx>),
) -> Result<Option<ValueEnum<'ctx>>, String> { ) -> Result<Option<ValueEnum<'ctx>>, String> {
let (ty, val) = operand; let (ty, val) = operand;
@ -1327,7 +1327,7 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
Ok(Some(if ty == ctx.primitives.bool { Ok(Some(if ty == ctx.primitives.bool {
let val = val.into_int_value(); let val = val.into_int_value();
if *op == ast::Unaryop::Not { if op == ast::Unaryop::Not {
let not = ctx.builder.build_not(val, "not").unwrap(); let not = ctx.builder.build_not(val, "not").unwrap();
let not_bool = ctx.builder.build_and( let not_bool = ctx.builder.build_and(
not, not,
@ -1386,8 +1386,8 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
// ndarray uses `~` rather than `not` to perform elementwise inversion, convert it before // ndarray uses `~` rather than `not` to perform elementwise inversion, convert it before
// passing it to the elementwise codegen function // passing it to the elementwise codegen function
let op = if ndarray_dtype.obj_id(&ctx.unifier).is_some_and(|id| id == PRIMITIVE_DEF_IDS.bool) { let op = if ndarray_dtype.obj_id(&ctx.unifier).is_some_and(|id| id == PRIMITIVE_DEF_IDS.bool) {
if *op == ast::Unaryop::Invert { if op == ast::Unaryop::Invert {
&ast::Unaryop::Not ast::Unaryop::Not
} else { } else {
unreachable!("ufunc {} not supported for ndarray[bool, N]", unaryop_name(op)) unreachable!("ufunc {} not supported for ndarray[bool, N]", unaryop_name(op))
} }
@ -1424,7 +1424,7 @@ pub fn gen_unaryop_expr_with_values<'ctx, G: CodeGenerator>(
pub fn gen_unaryop_expr<'ctx, G: CodeGenerator>( pub fn gen_unaryop_expr<'ctx, G: CodeGenerator>(
generator: &mut G, generator: &mut G,
ctx: &mut CodeGenContext<'ctx, '_>, ctx: &mut CodeGenContext<'ctx, '_>,
op: &ast::Unaryop, op: ast::Unaryop,
operand: &Expr<Option<Type>>, operand: &Expr<Option<Type>>,
) -> Result<Option<ValueEnum<'ctx>>, String> { ) -> Result<Option<ValueEnum<'ctx>>, String> {
let val = if let Some(v) = generator.gen_expr(ctx, operand)? { let val = if let Some(v) = generator.gen_expr(ctx, operand)? {
@ -1456,7 +1456,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
let (Some(left_ty), lhs) = left else { unreachable!() }; let (Some(left_ty), lhs) = left else { unreachable!() };
let (Some(right_ty), rhs) = comparators[0] else { unreachable!() }; let (Some(right_ty), rhs) = comparators[0] else { unreachable!() };
let op = ops[0].clone(); let op = ops[0];
let is_ndarray1 = left_ty.obj_id(&ctx.unifier) let is_ndarray1 = left_ty.obj_id(&ctx.unifier)
.is_some_and(|id| id == PRIMITIVE_DEF_IDS.ndarray); .is_some_and(|id| id == PRIMITIVE_DEF_IDS.ndarray);
@ -1486,7 +1486,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
generator, generator,
ctx, ctx,
(Some(ndarray_dtype1), lhs), (Some(ndarray_dtype1), lhs),
&[op.clone()], &[op],
&[(Some(ndarray_dtype2), rhs)], &[(Some(ndarray_dtype2), rhs)],
)?.unwrap().to_basic_value_enum(ctx, generator, ctx.primitives.bool)?; )?.unwrap().to_basic_value_enum(ctx, generator, ctx.primitives.bool)?;
@ -1512,7 +1512,7 @@ pub fn gen_cmpop_expr_with_values<'ctx, G: CodeGenerator>(
generator, generator,
ctx, ctx,
(Some(ndarray_dtype), lhs), (Some(ndarray_dtype), lhs),
&[op.clone()], &[op],
&[(Some(ndarray_dtype), rhs)], &[(Some(ndarray_dtype), rhs)],
)?.unwrap().to_basic_value_enum(ctx, generator, ctx.primitives.bool)?; )?.unwrap().to_basic_value_enum(ctx, generator, ctx.primitives.bool)?;
@ -1678,7 +1678,7 @@ fn gen_ndarray_subscript_expr<'ctx, G: CodeGenerator>(
let ndims = values.iter() let ndims = values.iter()
.map(|ndim| match *ndim { .map(|ndim| match *ndim {
SymbolValue::U64(v) => Ok(v), SymbolValue::U64(v) => Ok(v),
SymbolValue::U32(v) => Ok(v as u64), SymbolValue::U32(v) => Ok(u64::from(v)),
SymbolValue::I32(v) => u64::try_from(v) SymbolValue::I32(v) => u64::try_from(v)
.map_err(|_| format!("Expected non-negative literal for ndarray.ndims, got {v}")), .map_err(|_| format!("Expected non-negative literal for ndarray.ndims, got {v}")),
SymbolValue::I64(v) => u64::try_from(v) SymbolValue::I64(v) => u64::try_from(v)
@ -2145,10 +2145,10 @@ pub fn gen_expr<'ctx, G: CodeGenerator>(
} }
} }
ExprKind::BinOp { op, left, right } => { ExprKind::BinOp { op, left, right } => {
return gen_binop_expr(generator, ctx, left, op, right, expr.location, false); return gen_binop_expr(generator, ctx, left, *op, right, expr.location, false);
} }
ExprKind::UnaryOp { op, operand } => { ExprKind::UnaryOp { op, operand } => {
return gen_unaryop_expr(generator, ctx, op, operand) return gen_unaryop_expr(generator, ctx, *op, operand)
} }
ExprKind::Compare { left, ops, comparators } => { ExprKind::Compare { left, ops, comparators } => {
return gen_cmpop_expr(generator, ctx, left, ops, comparators) return gen_cmpop_expr(generator, ctx, left, ops, comparators)

View File

@ -1723,7 +1723,7 @@ pub fn ndarray_matmul_2d<'ctx, G: CodeGenerator>(
generator, generator,
ctx, ctx,
(&Some(elem_ty), a), (&Some(elem_ty), a),
&Operator::Mult, Operator::Mult,
(&Some(elem_ty), b), (&Some(elem_ty), b),
ctx.current_loc, ctx.current_loc,
false, false,
@ -1734,7 +1734,7 @@ pub fn ndarray_matmul_2d<'ctx, G: CodeGenerator>(
generator, generator,
ctx, ctx,
(&Some(elem_ty), result), (&Some(elem_ty), result),
&Operator::Add, Operator::Add,
(&Some(elem_ty), a_mul_b), (&Some(elem_ty), a_mul_b),
ctx.current_loc, ctx.current_loc,
false, false,

View File

@ -1617,7 +1617,7 @@ pub fn gen_stmt<G: CodeGenerator>(
StmtKind::For { .. } => generator.gen_for(ctx, stmt)?, StmtKind::For { .. } => generator.gen_for(ctx, stmt)?,
StmtKind::With { .. } => generator.gen_with(ctx, stmt)?, StmtKind::With { .. } => generator.gen_with(ctx, stmt)?,
StmtKind::AugAssign { target, op, value, .. } => { StmtKind::AugAssign { target, op, value, .. } => {
let value = gen_binop_expr(generator, ctx, target, op, value, stmt.location, true)?; let value = gen_binop_expr(generator, ctx, target, *op, value, stmt.location, true)?;
generator.gen_assign(ctx, target, value.unwrap())?; generator.gen_assign(ctx, target, value.unwrap())?;
} }
StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?, StmtKind::Try { .. } => gen_try(generator, ctx, stmt)?,

View File

@ -1,12 +1,16 @@
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
dead_code, dead_code,
clippy::cast_lossless,
clippy::cast_possible_truncation, clippy::cast_possible_truncation,
clippy::cast_sign_loss, clippy::cast_sign_loss,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::implicit_hasher,
clippy::missing_errors_doc, clippy::missing_errors_doc,
clippy::missing_panics_doc, clippy::missing_panics_doc,
clippy::module_name_repetitions, clippy::module_name_repetitions,

View File

@ -239,7 +239,7 @@ impl TryFrom<SymbolValue> for u64 {
match value { match value {
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| ()), SymbolValue::I32(v) => u64::try_from(v).map_err(|_| ()),
SymbolValue::I64(v) => u64::try_from(v).map_err(|_| ()), SymbolValue::I64(v) => u64::try_from(v).map_err(|_| ()),
SymbolValue::U32(v) => Ok(v as u64), SymbolValue::U32(v) => Ok(u64::from(v)),
SymbolValue::U64(v) => Ok(v), SymbolValue::U64(v) => Ok(v),
_ => Err(()), _ => Err(()),
} }
@ -253,10 +253,10 @@ impl TryFrom<SymbolValue> for i128 {
/// numeric. /// numeric.
fn try_from(value: SymbolValue) -> Result<Self, Self::Error> { fn try_from(value: SymbolValue) -> Result<Self, Self::Error> {
match value { match value {
SymbolValue::I32(v) => Ok(v as i128), SymbolValue::I32(v) => Ok(i128::from(v)),
SymbolValue::I64(v) => Ok(v as i128), SymbolValue::I64(v) => Ok(i128::from(v)),
SymbolValue::U32(v) => Ok(v as i128), SymbolValue::U32(v) => Ok(i128::from(v)),
SymbolValue::U64(v) => Ok(v as i128), SymbolValue::U64(v) => Ok(i128::from(v)),
_ => Err(()), _ => Err(()),
} }
} }

View File

@ -561,7 +561,9 @@ impl TopLevelComposer {
unifier, unifier,
&primitive_types, &primitive_types,
b, b,
vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(), vec![(*class_def_id, class_type_vars.clone())]
.into_iter()
.collect::<HashMap<_, _>>(),
)?; )?;
if let TypeAnnotation::CustomClass { .. } = &base_ty { if let TypeAnnotation::CustomClass { .. } = &base_ty {
@ -1153,7 +1155,7 @@ impl TopLevelComposer {
annotation_expr, annotation_expr,
vec![(class_id, class_type_vars_def.clone())] vec![(class_id, class_type_vars_def.clone())]
.into_iter() .into_iter()
.collect(), .collect::<HashMap<_, _>>(),
)? )?
}; };
// find type vars within this method parameter type annotation // find type vars within this method parameter type annotation
@ -1218,7 +1220,9 @@ impl TopLevelComposer {
unifier, unifier,
primitives, primitives,
result, result,
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(), vec![(class_id, class_type_vars_def.clone())]
.into_iter()
.collect::<HashMap<_, _>>(),
)?; )?;
// find type vars within this return type annotation // find type vars within this return type annotation
let type_vars_within = let type_vars_within =
@ -1312,7 +1316,9 @@ impl TopLevelComposer {
unifier, unifier,
primitives, primitives,
annotation.as_ref(), annotation.as_ref(),
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(), vec![(class_id, class_type_vars_def.clone())]
.into_iter()
.collect::<HashMap<_, _>>(),
)?; )?;
// find type vars within this return type annotation // find type vars within this return type annotation
let type_vars_within = let type_vars_within =

View File

@ -68,18 +68,18 @@ impl TypeAnnotation {
/// generic variables associated with the definition. /// generic variables associated with the definition.
/// * `type_var` - The type variable associated with the type argument currently being parsed. Pass /// * `type_var` - The type variable associated with the type argument currently being parsed. Pass
/// [`None`] when this function is invoked externally. /// [`None`] when this function is invoked externally.
pub fn parse_ast_to_type_annotation_kinds<T>( pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
resolver: &(dyn SymbolResolver + Send + Sync), resolver: &(dyn SymbolResolver + Send + Sync),
top_level_defs: &[Arc<RwLock<TopLevelDef>>], top_level_defs: &[Arc<RwLock<TopLevelDef>>],
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
expr: &ast::Expr<T>, expr: &ast::Expr<T>,
// the key stores the type_var of this topleveldef::class, we only need this field here // the key stores the type_var of this topleveldef::class, we only need this field here
locked: HashMap<DefinitionId, Vec<Type>>, locked: HashMap<DefinitionId, Vec<Type>, S>,
) -> Result<TypeAnnotation, HashSet<String>> { ) -> Result<TypeAnnotation, HashSet<String>> {
let name_handle = |id: &StrRef, let name_handle = |id: &StrRef,
unifier: &mut Unifier, unifier: &mut Unifier,
locked: HashMap<DefinitionId, Vec<Type>>| { locked: HashMap<DefinitionId, Vec<Type>, S>| {
if id == &"int32".into() { if id == &"int32".into() {
Ok(TypeAnnotation::Primitive(primitives.int32)) Ok(TypeAnnotation::Primitive(primitives.int32))
} else if id == &"int64".into() { } else if id == &"int64".into() {
@ -146,7 +146,7 @@ pub fn parse_ast_to_type_annotation_kinds<T>(
|id: &StrRef, |id: &StrRef,
slice: &ast::Expr<T>, slice: &ast::Expr<T>,
unifier: &mut Unifier, unifier: &mut Unifier,
mut locked: HashMap<DefinitionId, Vec<Type>>| { mut locked: HashMap<DefinitionId, Vec<Type>, S>| {
if ["virtual".into(), "Generic".into(), "list".into(), "tuple".into(), "Option".into()].contains(id) if ["virtual".into(), "Generic".into(), "list".into(), "tuple".into(), "Option".into()].contains(id)
{ {
return Err(HashSet::from([ return Err(HashSet::from([

View File

@ -13,7 +13,7 @@ use std::rc::Rc;
use itertools::Itertools; use itertools::Itertools;
#[must_use] #[must_use]
pub fn binop_name(op: &Operator) -> &'static str { pub fn binop_name(op: Operator) -> &'static str {
match op { match op {
Operator::Add => "__add__", Operator::Add => "__add__",
Operator::Sub => "__sub__", Operator::Sub => "__sub__",
@ -32,7 +32,7 @@ pub fn binop_name(op: &Operator) -> &'static str {
} }
#[must_use] #[must_use]
pub fn binop_assign_name(op: &Operator) -> &'static str { pub fn binop_assign_name(op: Operator) -> &'static str {
match op { match op {
Operator::Add => "__iadd__", Operator::Add => "__iadd__",
Operator::Sub => "__isub__", Operator::Sub => "__isub__",
@ -51,7 +51,7 @@ pub fn binop_assign_name(op: &Operator) -> &'static str {
} }
#[must_use] #[must_use]
pub fn unaryop_name(op: &Unaryop) -> &'static str { pub fn unaryop_name(op: Unaryop) -> &'static str {
match op { match op {
Unaryop::UAdd => "__pos__", Unaryop::UAdd => "__pos__",
Unaryop::USub => "__neg__", Unaryop::USub => "__neg__",
@ -61,7 +61,7 @@ pub fn unaryop_name(op: &Unaryop) -> &'static str {
} }
#[must_use] #[must_use]
pub fn comparison_name(op: &Cmpop) -> Option<&'static str> { pub fn comparison_name(op: Cmpop) -> Option<&'static str> {
match op { match op {
Cmpop::Lt => Some("__lt__"), Cmpop::Lt => Some("__lt__"),
Cmpop::LtE => Some("__le__"), Cmpop::LtE => Some("__le__"),
@ -115,7 +115,7 @@ pub fn impl_binop(
let ret_ty = ret_ty.unwrap_or_else(|| unifier.get_fresh_var(None, None).0); let ret_ty = ret_ty.unwrap_or_else(|| unifier.get_fresh_var(None, None).0);
for op in ops { for op in ops {
fields.insert(binop_name(op).into(), { fields.insert(binop_name(*op).into(), {
( (
unifier.add_ty(TypeEnum::TFunc(FunSignature { unifier.add_ty(TypeEnum::TFunc(FunSignature {
ret: ret_ty, ret: ret_ty,
@ -130,7 +130,7 @@ pub fn impl_binop(
) )
}); });
fields.insert(binop_assign_name(op).into(), { fields.insert(binop_assign_name(*op).into(), {
( (
unifier.add_ty(TypeEnum::TFunc(FunSignature { unifier.add_ty(TypeEnum::TFunc(FunSignature {
ret: ret_ty, ret: ret_ty,
@ -154,7 +154,7 @@ pub fn impl_unaryop(unifier: &mut Unifier, ty: Type, ret_ty: Option<Type>, ops:
for op in ops { for op in ops {
fields.insert( fields.insert(
unaryop_name(op).into(), unaryop_name(*op).into(),
( (
unifier.add_ty(TypeEnum::TFunc(FunSignature { unifier.add_ty(TypeEnum::TFunc(FunSignature {
ret: ret_ty, ret: ret_ty,
@ -194,7 +194,7 @@ pub fn impl_cmpop(
for op in ops { for op in ops {
fields.insert( fields.insert(
comparison_name(op).unwrap().into(), comparison_name(*op).unwrap().into(),
( (
unifier.add_ty(TypeEnum::TFunc(FunSignature { unifier.add_ty(TypeEnum::TFunc(FunSignature {
ret: ret_ty, ret: ret_ty,
@ -420,7 +420,7 @@ pub fn typeof_ndarray_broadcast(
pub fn typeof_binop( pub fn typeof_binop(
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
op: &Operator, op: Operator,
lhs: Type, lhs: Type,
rhs: Type, rhs: Type,
) -> Result<Option<Type>, String> { ) -> Result<Option<Type>, String> {
@ -465,7 +465,7 @@ pub fn typeof_binop(
(lhs, rhs) if lhs == 0 || rhs == 0 => { (lhs, rhs) if lhs == 0 || rhs == 0 => {
return Err(format!( return Err(format!(
"Input operand {} does not have enough dimensions (has {lhs}, requires {rhs})", "Input operand {} does not have enough dimensions (has {lhs}, requires {rhs})",
(rhs == 0) as u8 u8::from(rhs == 0)
)) ))
} }
(lhs, rhs) => { (lhs, rhs) => {
@ -511,16 +511,16 @@ pub fn typeof_binop(
pub fn typeof_unaryop( pub fn typeof_unaryop(
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
op: &Unaryop, op: Unaryop,
operand: Type, operand: Type,
) -> Result<Option<Type>, String> { ) -> Result<Option<Type>, String> {
let operand_obj_id = operand.obj_id(unifier); let operand_obj_id = operand.obj_id(unifier);
if *op == Unaryop::Not && operand_obj_id.is_some_and(|id| id == primitives.ndarray.obj_id(unifier).unwrap()) { if op == Unaryop::Not && operand_obj_id.is_some_and(|id| id == primitives.ndarray.obj_id(unifier).unwrap()) {
return Err("The truth value of an array with more than one element is ambiguous".to_string()) return Err("The truth value of an array with more than one element is ambiguous".to_string())
} }
Ok(match *op { Ok(match op {
Unaryop::Not => { Unaryop::Not => {
match operand_obj_id { match operand_obj_id {
Some(v) if v == PRIMITIVE_DEF_IDS.ndarray => Some(operand), Some(v) if v == PRIMITIVE_DEF_IDS.ndarray => Some(operand),
@ -544,7 +544,7 @@ pub fn typeof_unaryop(
if operand_obj_id.is_some_and(|id| id == PRIMITIVE_DEF_IDS.ndarray) { if operand_obj_id.is_some_and(|id| id == PRIMITIVE_DEF_IDS.ndarray) {
let (dtype, _) = unpack_ndarray_var_tys(unifier, operand); let (dtype, _) = unpack_ndarray_var_tys(unifier, operand);
if dtype.obj_id(unifier).is_some_and(|id| id == PRIMITIVE_DEF_IDS.bool) { if dtype.obj_id(unifier).is_some_and(|id| id == PRIMITIVE_DEF_IDS.bool) {
return Err(if *op == Unaryop::UAdd { return Err(if op == Unaryop::UAdd {
"The ufunc 'positive' cannot be applied to ndarray[bool, N]".to_string() "The ufunc 'positive' cannot be applied to ndarray[bool, N]".to_string()
} else { } else {
"The numpy boolean negative, the `-` operator, is not supported, use the `~` operator function instead.".to_string() "The numpy boolean negative, the `-` operator, is not supported, use the `~` operator function instead.".to_string()
@ -567,7 +567,7 @@ pub fn typeof_unaryop(
pub fn typeof_cmpop( pub fn typeof_cmpop(
unifier: &mut Unifier, unifier: &mut Unifier,
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
_op: &Cmpop, _op: Cmpop,
lhs: Type, lhs: Type,
rhs: Type, rhs: Type,
) -> Result<Option<Type>, String> { ) -> Result<Option<Type>, String> {

View File

@ -456,7 +456,7 @@ impl<'a> Fold<()> for Inferencer<'a> {
(None, None) => {} (None, None) => {}
}, },
ast::StmtKind::AugAssign { target, op, value, .. } => { ast::StmtKind::AugAssign { target, op, value, .. } => {
let res_ty = self.infer_bin_ops(stmt.location, target, op, value, true)?; let res_ty = self.infer_bin_ops(stmt.location, target, *op, value, true)?;
self.unify(res_ty, target.custom.unwrap(), &stmt.location)?; self.unify(res_ty, target.custom.unwrap(), &stmt.location)?;
} }
ast::StmtKind::Assert { test, msg, .. } => { ast::StmtKind::Assert { test, msg, .. } => {
@ -534,20 +534,20 @@ impl<'a> Fold<()> for Inferencer<'a> {
ExprKind::List { elts, .. } => Some(self.infer_list(elts)?), ExprKind::List { elts, .. } => Some(self.infer_list(elts)?),
ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?), ExprKind::Tuple { elts, .. } => Some(self.infer_tuple(elts)?),
ExprKind::Attribute { value, attr, ctx } => { ExprKind::Attribute { value, attr, ctx } => {
Some(self.infer_attribute(value, *attr, ctx)?) Some(self.infer_attribute(value, *attr, *ctx)?)
} }
ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?), ExprKind::BoolOp { values, .. } => Some(self.infer_bool_ops(values)?),
ExprKind::BinOp { left, op, right } => { ExprKind::BinOp { left, op, right } => {
Some(self.infer_bin_ops(expr.location, left, op, right, false)?) Some(self.infer_bin_ops(expr.location, left, *op, right, false)?)
} }
ExprKind::UnaryOp { op, operand } => { ExprKind::UnaryOp { op, operand } => {
Some(self.infer_unary_ops(expr.location, op, operand)?) Some(self.infer_unary_ops(expr.location, *op, operand)?)
} }
ExprKind::Compare { left, ops, comparators } => { ExprKind::Compare { left, ops, comparators } => {
Some(self.infer_compare(expr.location, left, ops, comparators)?) Some(self.infer_compare(expr.location, left, ops, comparators)?)
} }
ExprKind::Subscript { value, slice, ctx, .. } => { ExprKind::Subscript { value, slice, ctx, .. } => {
Some(self.infer_subscript(value.as_ref(), slice.as_ref(), ctx)?) Some(self.infer_subscript(value.as_ref(), slice.as_ref(), *ctx)?)
} }
ExprKind::IfExp { test, body, orelse } => { ExprKind::IfExp { test, body, orelse } => {
Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?) Some(self.infer_if_expr(test, body.as_ref(), orelse.as_ref())?)
@ -847,7 +847,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: None, custom: None,
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords: vec![], keywords: vec![],
@ -904,7 +904,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords: vec![], keywords: vec![],
@ -946,7 +946,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords: vec![], keywords: vec![],
@ -1054,7 +1054,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0, arg1], args: vec![arg0, arg1],
keywords: vec![], keywords: vec![],
@ -1135,7 +1135,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords: vec![], keywords: vec![],
@ -1189,7 +1189,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords: vec![], keywords: vec![],
@ -1246,7 +1246,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0, arg1], args: vec![arg0, arg1],
keywords: vec![], keywords: vec![],
@ -1317,7 +1317,7 @@ impl<'a> Inferencer<'a> {
func: Box::new(Located { func: Box::new(Located {
custom: Some(custom), custom: Some(custom),
location: func.location, location: func.location,
node: ExprKind::Name { id: *id, ctx: ctx.clone() }, node: ExprKind::Name { id: *id, ctx: *ctx },
}), }),
args: vec![arg0], args: vec![arg0],
keywords, keywords,
@ -1459,12 +1459,12 @@ impl<'a> Inferencer<'a> {
&mut self, &mut self,
value: &ast::Expr<Option<Type>>, value: &ast::Expr<Option<Type>>,
attr: StrRef, attr: StrRef,
ctx: &ExprContext, ctx: ExprContext,
) -> InferenceResult { ) -> InferenceResult {
let ty = value.custom.unwrap(); let ty = value.custom.unwrap();
if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) { if let TypeEnum::TObj { fields, .. } = &*self.unifier.get_ty(ty) {
// just a fast path // just a fast path
match (fields.get(&attr), ctx == &ExprContext::Store) { match (fields.get(&attr), ctx == ExprContext::Store) {
(Some((ty, true)), _) | (Some((ty, false)), false) => Ok(*ty), (Some((ty, true)), _) | (Some((ty, false)), false) => Ok(*ty),
(Some((_, false)), true) => { (Some((_, false)), true) => {
report_error(&format!("Field `{attr}` is immutable"), value.location) report_error(&format!("Field `{attr}` is immutable"), value.location)
@ -1478,7 +1478,7 @@ impl<'a> Inferencer<'a> {
let attr_ty = self.unifier.get_dummy_var().0; let attr_ty = self.unifier.get_dummy_var().0;
let fields = once(( let fields = once((
attr.into(), attr.into(),
RecordField::new(attr_ty, ctx == &ExprContext::Store, Some(value.location)), RecordField::new(attr_ty, ctx == ExprContext::Store, Some(value.location)),
)) ))
.collect(); .collect();
let record = self.unifier.add_record(fields); let record = self.unifier.add_record(fields);
@ -1499,7 +1499,7 @@ impl<'a> Inferencer<'a> {
&mut self, &mut self,
location: Location, location: Location,
left: &ast::Expr<Option<Type>>, left: &ast::Expr<Option<Type>>,
op: &ast::Operator, op: ast::Operator,
right: &ast::Expr<Option<Type>>, right: &ast::Expr<Option<Type>>,
is_aug_assign: bool, is_aug_assign: bool,
) -> InferenceResult { ) -> InferenceResult {
@ -1548,7 +1548,7 @@ impl<'a> Inferencer<'a> {
fn infer_unary_ops( fn infer_unary_ops(
&mut self, &mut self,
location: Location, location: Location,
op: &ast::Unaryop, op: ast::Unaryop,
operand: &ast::Expr<Option<Type>>, operand: &ast::Expr<Option<Type>>,
) -> InferenceResult { ) -> InferenceResult {
let method = unaryop_name(op).into(); let method = unaryop_name(op).into();
@ -1576,7 +1576,7 @@ impl<'a> Inferencer<'a> {
let mut res = None; let mut res = None;
for (a, b, c) in izip!(once(left).chain(comparators), comparators, ops) { for (a, b, c) in izip!(once(left).chain(comparators), comparators, ops) {
let method = comparison_name(c) let method = comparison_name(*c)
.ok_or_else(|| HashSet::from([ .ok_or_else(|| HashSet::from([
"unsupported comparator".to_string() "unsupported comparator".to_string()
]))? ]))?
@ -1585,7 +1585,7 @@ impl<'a> Inferencer<'a> {
let ret = typeof_cmpop( let ret = typeof_cmpop(
self.unifier, self.unifier,
self.primitives, self.primitives,
c, *c,
a.custom.unwrap(), a.custom.unwrap(),
b.custom.unwrap(), b.custom.unwrap(),
).map_err(|e| HashSet::from([format!("{e} (at {})", b.location)]))?; ).map_err(|e| HashSet::from([format!("{e} (at {})", b.location)]))?;
@ -1629,7 +1629,7 @@ impl<'a> Inferencer<'a> {
let ndims = values.iter() let ndims = values.iter()
.map(|ndim| match *ndim { .map(|ndim| match *ndim {
SymbolValue::U64(v) => Ok(v), SymbolValue::U64(v) => Ok(v),
SymbolValue::U32(v) => Ok(v as u64), SymbolValue::U32(v) => Ok(u64::from(v)),
SymbolValue::I32(v) => u64::try_from(v).map_err(|_| HashSet::from([ SymbolValue::I32(v) => u64::try_from(v).map_err(|_| HashSet::from([
format!("Expected non-negative literal for ndarray.ndims, got {v}"), format!("Expected non-negative literal for ndarray.ndims, got {v}"),
])), ])),
@ -1674,7 +1674,7 @@ impl<'a> Inferencer<'a> {
&mut self, &mut self,
value: &ast::Expr<Option<Type>>, value: &ast::Expr<Option<Type>>,
slice: &ast::Expr<Option<Type>>, slice: &ast::Expr<Option<Type>>,
ctx: &ExprContext, ctx: ExprContext,
) -> InferenceResult { ) -> InferenceResult {
let ty = self.unifier.get_dummy_var().0; let ty = self.unifier.get_dummy_var().0;
match &slice.node { match &slice.node {
@ -1707,7 +1707,7 @@ impl<'a> Inferencer<'a> {
let ind = ind.ok_or_else(|| HashSet::from(["Index must be int32".to_string()]))?; let ind = ind.ok_or_else(|| HashSet::from(["Index must be int32".to_string()]))?;
let map = once(( let map = once((
ind.into(), ind.into(),
RecordField::new(ty, ctx == &ExprContext::Store, Some(value.location)), RecordField::new(ty, ctx == ExprContext::Store, Some(value.location)),
)) ))
.collect(); .collect();
let seq = self.unifier.add_record(map); let seq = self.unifier.add_record(map);

View File

@ -801,19 +801,9 @@ impl Unifier {
(TLiteral { values: val1, .. }, TLiteral { values: val2, .. }) => { (TLiteral { values: val1, .. }, TLiteral { values: val2, .. }) => {
for (v1, v2) in zip(val1, val2) { for (v1, v2) in zip(val1, val2) {
if v1 != v2 { if v1 != v2 {
let symbol_value_to_int = |value: &SymbolValue| -> Option<i128> {
match value {
SymbolValue::I32(v) => Some(*v as i128),
SymbolValue::I64(v) => Some(*v as i128),
SymbolValue::U32(v) => Some(*v as i128),
SymbolValue::U64(v) => Some(*v as i128),
_ => None,
}
};
// Try performing integer promotion on literals // Try performing integer promotion on literals
let v1i = symbol_value_to_int(v1); let v1i = i128::try_from(v1.clone()).ok();
let v2i = symbol_value_to_int(v2); let v2i = i128::try_from(v2.clone()).ok();
if v1i != v2i { if v1i != v2i {
return Self::incompatible_types(a, b) return Self::incompatible_types(a, b)

View File

@ -60,7 +60,7 @@ impl<'a> DwarfReader<'a> {
let mut byte: u8; let mut byte: u8;
loop { loop {
byte = self.read_u8(); byte = self.read_u8();
result |= ((byte & 0x7F) as u64) << shift; result |= u64::from(byte & 0x7F) << shift;
shift += 7; shift += 7;
if byte & 0x80 == 0 { if byte & 0x80 == 0 {
break; break;
@ -75,7 +75,7 @@ impl<'a> DwarfReader<'a> {
let mut byte: u8; let mut byte: u8;
loop { loop {
byte = self.read_u8(); byte = self.read_u8();
result |= ((byte & 0x7F) as u64) << shift; result |= u64::from(byte & 0x7F) << shift;
shift += 7; shift += 7;
if byte & 0x80 == 0 { if byte & 0x80 == 0 {
break; break;
@ -157,10 +157,9 @@ fn read_encoded_pointer(reader: &mut DwarfReader, encoding: u8) -> Result<usize,
} }
match encoding & 0x0F { match encoding & 0x0F {
DW_EH_PE_absptr => Ok(reader.read_u32() as usize), DW_EH_PE_absptr | DW_EH_PE_udata4 => Ok(reader.read_u32() as usize),
DW_EH_PE_uleb128 => Ok(reader.read_uleb128() as usize), DW_EH_PE_uleb128 => Ok(reader.read_uleb128() as usize),
DW_EH_PE_udata2 => Ok(reader.read_u16() as usize), DW_EH_PE_udata2 => Ok(reader.read_u16() as usize),
DW_EH_PE_udata4 => Ok(reader.read_u32() as usize),
DW_EH_PE_udata8 => Ok(reader.read_u64() as usize), DW_EH_PE_udata8 => Ok(reader.read_u64() as usize),
DW_EH_PE_sleb128 => Ok(reader.read_sleb128() as usize), DW_EH_PE_sleb128 => Ok(reader.read_sleb128() as usize),
DW_EH_PE_sdata2 => Ok(reader.read_i16() as usize), DW_EH_PE_sdata2 => Ok(reader.read_i16() as usize),
@ -226,8 +225,8 @@ impl<'a> EH_Frame<'a> {
/// Creates an [EH_Frame] using the bytes in the `.eh_frame` section and its address in the ELF /// Creates an [EH_Frame] using the bytes in the `.eh_frame` section and its address in the ELF
/// file. /// file.
pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> Result<EH_Frame, ()> { pub fn new(eh_frame_slice: &[u8], eh_frame_addr: u32) -> EH_Frame {
Ok(EH_Frame { reader: DwarfReader::new(eh_frame_slice, eh_frame_addr) }) EH_Frame { reader: DwarfReader::new(eh_frame_slice, eh_frame_addr) }
} }
/// Returns an [Iterator] over all Call Frame Information (CFI) records. /// Returns an [Iterator] over all Call Frame Information (CFI) records.
@ -264,7 +263,7 @@ impl<'a> CFI_Record<'a> {
// length == u32::MAX means that the length is only representable with 64 bits, // length == u32::MAX means that the length is only representable with 64 bits,
// which does not make sense in a system with 32-bit address. // which does not make sense in a system with 32-bit address.
0xFFFFFFFF => unimplemented!(), 0xFFFF_FFFF => unimplemented!(),
_ => { _ => {
let mut fde_reader = DwarfReader::from_reader(cie_reader, false); let mut fde_reader = DwarfReader::from_reader(cie_reader, false);
@ -371,7 +370,7 @@ impl<'a> Iterator for CFI_Records<'a> {
let length = match length { let length = match length {
// eh_frame with 0-length means the CIE is terminated // eh_frame with 0-length means the CIE is terminated
0 => return None, 0 => return None,
0xFFFFFFFF => unimplemented!("CIE entries larger than 4 bytes not supported"), 0xFFFF_FFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
other => other, other => other,
} as usize; } as usize;
@ -417,7 +416,7 @@ impl<'a> Iterator for FDE_Records<'a> {
let length = match self.reader.read_u32() { let length = match self.reader.read_u32() {
// eh_frame with 0-length means the CIE is terminated // eh_frame with 0-length means the CIE is terminated
0 => return None, 0 => return None,
0xFFFFFFFF => unimplemented!("CIE entries larger than 4 bytes not supported"), 0xFFFF_FFFF => unimplemented!("CIE entries larger than 4 bytes not supported"),
other => other, other => other,
} as usize; } as usize;
@ -504,7 +503,7 @@ impl<'a> EH_Frame_Hdr<'a> {
// The original length field should be able to hold the entire value. // The original length field should be able to hold the entire value.
// The device memory space is limited to 32-bits addresses anyway. // The device memory space is limited to 32-bits addresses anyway.
let entry_length = reader.read_u32(); let entry_length = reader.read_u32();
if entry_length == 0 || entry_length == 0xFFFFFFFF { if entry_length == 0 || entry_length == 0xFFFF_FFFF {
unimplemented!() unimplemented!()
} }
@ -515,7 +514,7 @@ impl<'a> EH_Frame_Hdr<'a> {
fde_count += 1; fde_count += 1;
} }
reader.offset(entry_length - mem::size_of::<u32>() as u32) reader.offset(entry_length - mem::size_of::<u32>() as u32);
} }
12 + fde_count * 8 12 + fde_count * 8

View File

@ -1,5 +1,5 @@
/* generated from elf.h with rust-bindgen and then manually altered */ /* generated from elf.h with rust-bindgen and then manually altered */
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, dead_code)] #![allow(non_camel_case_types, non_snake_case, non_upper_case_globals, dead_code, clippy::pedantic)]
pub const EI_NIDENT: usize = 16; pub const EI_NIDENT: usize = 16;
pub const EI_MAG0: usize = 0; pub const EI_MAG0: usize = 0;

View File

@ -1,35 +1,23 @@
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
clippy::borrow_as_ptr,
clippy::cast_lossless,
clippy::cast_possible_truncation, clippy::cast_possible_truncation,
clippy::cast_possible_wrap, clippy::cast_possible_wrap,
clippy::cast_ptr_alignment,
clippy::cast_sign_loss, clippy::cast_sign_loss,
clippy::doc_markdown, clippy::doc_markdown,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::explicit_iter_loop,
clippy::expl_impl_clone_on_copy,
clippy::items_after_statements,
clippy::manual_let_else,
clippy::match_same_arms,
clippy::missing_errors_doc, clippy::missing_errors_doc,
clippy::missing_panics_doc, clippy::missing_panics_doc,
clippy::module_name_repetitions, clippy::module_name_repetitions,
clippy::needless_pass_by_value,
clippy::ptr_as_ptr,
clippy::redundant_closure_call,
clippy::result_unit_err,
clippy::semicolon_if_nothing_returned,
clippy::similar_names, clippy::similar_names,
clippy::stable_sort_primitive,
clippy::struct_field_names, clippy::struct_field_names,
clippy::too_many_lines, clippy::too_many_lines,
clippy::type_complexity,
clippy::unnecessary_wraps,
clippy::unnested_or_patterns,
clippy::unreadable_literal,
clippy::wildcard_imports, clippy::wildcard_imports,
)] )]
@ -105,45 +93,46 @@ struct SectionRecord<'a> {
data: Vec<u8>, data: Vec<u8>,
} }
fn read_unaligned<T: Copy>(data: &[u8], offset: usize) -> Result<T, ()> { fn read_unaligned<T: Copy>(data: &[u8], offset: usize) -> Option<T> {
if data.len() < offset + mem::size_of::<T>() { if data.len() < offset + mem::size_of::<T>() {
Err(()) None
} else { } else {
let ptr = data.as_ptr().wrapping_add(offset) as *const T; let ptr = data.as_ptr().wrapping_add(offset).cast();
Ok(unsafe { ptr::read_unaligned(ptr) }) Some(unsafe { ptr::read_unaligned(ptr) })
} }
} }
pub fn get_ref_slice<T: Copy>(data: &[u8], offset: usize, len: usize) -> Result<&[T], ()> { #[must_use]
pub fn get_ref_slice<T: Copy>(data: &[u8], offset: usize, len: usize) -> Option<&[T]> {
if data.len() < offset + mem::size_of::<T>() * len { if data.len() < offset + mem::size_of::<T>() * len {
Err(()) None
} else { } else {
let ptr = data.as_ptr().wrapping_add(offset) as *const T; let ptr = data.as_ptr().wrapping_add(offset).cast();
Ok(unsafe { slice::from_raw_parts(ptr, len) }) Some(unsafe { slice::from_raw_parts(ptr, len) })
} }
} }
fn from_struct_vec<T>(struct_vec: Vec<T>) -> Vec<u8> { fn from_struct_slice<T>(struct_vec: &[T]) -> Vec<u8> {
let ptr = struct_vec.as_ptr(); let ptr = struct_vec.as_ptr();
unsafe { slice::from_raw_parts(ptr as *const u8, struct_vec.len() * mem::size_of::<T>()) } unsafe { slice::from_raw_parts(ptr.cast(), mem::size_of_val(struct_vec)) }
.to_vec() .to_vec()
} }
fn to_struct_slice<T>(bytes: &[u8]) -> &[T] { fn to_struct_slice<T>(bytes: &[u8]) -> &[T] {
unsafe { slice::from_raw_parts(bytes.as_ptr() as *const T, bytes.len() / mem::size_of::<T>()) } unsafe { slice::from_raw_parts(bytes.as_ptr().cast(), bytes.len() / mem::size_of::<T>()) }
} }
fn to_struct_mut_slice<T>(bytes: &mut [u8]) -> &mut [T] { fn to_struct_mut_slice<T>(bytes: &mut [u8]) -> &mut [T] {
unsafe { unsafe {
slice::from_raw_parts_mut(bytes.as_mut_ptr() as *mut T, bytes.len() / mem::size_of::<T>()) slice::from_raw_parts_mut(bytes.as_mut_ptr().cast(), bytes.len() / mem::size_of::<T>())
} }
} }
fn elf_hash(name: &[u8]) -> u32 { fn elf_hash(name: &[u8]) -> u32 {
let mut h: u32 = 0; let mut h: u32 = 0;
for c in name { for c in name {
h = (h << 4) + *c as u32; h = (h << 4) + u32::from(*c);
let g = h & 0xf0000000; let g = h & 0xf000_0000;
if g != 0 { if g != 0 {
h ^= g >> 24; h ^= g >> 24;
h &= !g; h &= !g;
@ -237,6 +226,15 @@ impl<'a> Linker<'a> {
relocs: &[R], relocs: &[R],
target_section: Elf32_Word, target_section: Elf32_Word,
) -> Result<(), Error> { ) -> Result<(), Error> {
type RelocateFn = dyn Fn(&mut [u8], Elf32_Word);
struct RelocInfo<'a, R> {
pub defined_val: bool,
pub indirect_reloc: Option<&'a R>,
pub pc_relative: bool,
pub relocate: Option<Box<RelocateFn>>,
}
for reloc in relocs { for reloc in relocs {
let sym = match reloc.sym_info() as usize { let sym = match reloc.sym_info() as usize {
STN_UNDEF => None, STN_UNDEF => None,
@ -249,9 +247,8 @@ impl<'a> Linker<'a> {
let resolve_symbol_addr = let resolve_symbol_addr =
|sym_option: Option<&Elf32_Sym>| -> Result<Elf32_Word, Error> { |sym_option: Option<&Elf32_Sym>| -> Result<Elf32_Word, Error> {
let sym = match sym_option { let Some(sym) = sym_option else {
Some(sym) => sym, return Ok(0)
None => return Ok(0),
}; };
match sym.st_shndx { match sym.st_shndx {
@ -279,13 +276,6 @@ impl<'a> Linker<'a> {
.ok_or(Error::Parsing("Cannot find section with matching sh_index")) .ok_or(Error::Parsing("Cannot find section with matching sh_index"))
}; };
struct RelocInfo<'a, R> {
pub defined_val: bool,
pub indirect_reloc: Option<&'a R>,
pub pc_relative: bool,
pub relocate: Option<Box<dyn Fn(&mut [u8], Elf32_Word)>>,
}
let classify = |reloc: &R, sym_option: Option<&Elf32_Sym>| -> Option<RelocInfo<R>> { let classify = |reloc: &R, sym_option: Option<&Elf32_Sym>| -> Option<RelocInfo<R>> {
let defined_val = sym_option.map_or(true, |sym| { let defined_val = sym_option.map_or(true, |sym| {
sym.st_shndx != SHN_UNDEF || ELF32_ST_BIND(sym.st_info) == STB_LOCAL sym.st_shndx != SHN_UNDEF || ELF32_ST_BIND(sym.st_info) == STB_LOCAL
@ -297,7 +287,7 @@ impl<'a> Linker<'a> {
indirect_reloc: None, indirect_reloc: None,
pc_relative: true, pc_relative: true,
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
LittleEndian::write_u32(target_word, value) LittleEndian::write_u32(target_word, value);
})), })),
}), }),
@ -308,9 +298,9 @@ impl<'a> Linker<'a> {
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
LittleEndian::write_u32( LittleEndian::write_u32(
target_word, target_word,
(LittleEndian::read_u32(target_word) & 0x80000000) (LittleEndian::read_u32(target_word) & 0x8000_0000)
| value & 0x7FFFFFFF, | value & 0x7FFF_FFFF,
) );
})), })),
}), }),
@ -332,8 +322,8 @@ impl<'a> Linker<'a> {
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
let auipc_raw = LittleEndian::read_u32(target_word); let auipc_raw = LittleEndian::read_u32(target_word);
let auipc_insn = let auipc_insn =
(auipc_raw & 0xFFF) | ((value + 0x800) & 0xFFFFF000); (auipc_raw & 0xFFF) | ((value + 0x800) & 0xFFFF_F000);
LittleEndian::write_u32(target_word, auipc_insn) LittleEndian::write_u32(target_word, auipc_insn);
})), })),
}) })
} }
@ -343,7 +333,7 @@ impl<'a> Linker<'a> {
indirect_reloc: None, indirect_reloc: None,
pc_relative: true, pc_relative: true,
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
LittleEndian::write_u32(target_word, value) LittleEndian::write_u32(target_word, value);
})), })),
}), }),
@ -365,14 +355,14 @@ impl<'a> Linker<'a> {
// Here, we convert to direct addressing // Here, we convert to direct addressing
// GOT reloc (indirect) -> lw + addi // GOT reloc (indirect) -> lw + addi
// PCREL reloc (direct) -> addi // PCREL reloc (direct) -> addi
let (lo_opcode, lo_funct3) = (0b0010011, 0b000); let (lo_opcode, lo_funct3) = (0b001_0011, 0b000);
let addi_lw_raw = LittleEndian::read_u32(target_word); let addi_lw_raw = LittleEndian::read_u32(target_word);
let addi_insn = lo_opcode let addi_insn = lo_opcode
| (addi_lw_raw & 0xF8F80) | (addi_lw_raw & 0xF8F80)
| (lo_funct3 << 12) | (lo_funct3 << 12)
| ((value & 0xFFF) << 20); | ((value & 0xFFF) << 20);
LittleEndian::write_u32(target_word, addi_insn) LittleEndian::write_u32(target_word, addi_insn);
})), })),
}) })
} }
@ -392,7 +382,7 @@ impl<'a> Linker<'a> {
LittleEndian::write_u32( LittleEndian::write_u32(
target_word, target_word,
value, value,
) );
})), })),
}), }),
@ -402,7 +392,7 @@ impl<'a> Linker<'a> {
pc_relative: false, pc_relative: false,
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
let old_value = LittleEndian::read_u32(target_word); let old_value = LittleEndian::read_u32(target_word);
LittleEndian::write_u32(target_word, old_value.wrapping_add(value)) LittleEndian::write_u32(target_word, old_value.wrapping_add(value));
})), })),
}), }),
@ -412,7 +402,7 @@ impl<'a> Linker<'a> {
pc_relative: false, pc_relative: false,
relocate: Some(Box::new(|target_word, value| { relocate: Some(Box::new(|target_word, value| {
let old_value = LittleEndian::read_u32(target_word); let old_value = LittleEndian::read_u32(target_word);
LittleEndian::write_u32(target_word, old_value.wrapping_sub(value)) LittleEndian::write_u32(target_word, old_value.wrapping_sub(value));
})), })),
}), }),
@ -424,7 +414,7 @@ impl<'a> Linker<'a> {
LittleEndian::write_u16( LittleEndian::write_u16(
target_word, target_word,
value as u16, value as u16,
) );
})), })),
}), }),
@ -437,7 +427,7 @@ impl<'a> Linker<'a> {
LittleEndian::write_u16( LittleEndian::write_u16(
target_word, target_word,
old_value.wrapping_add(value as u16), old_value.wrapping_add(value as u16),
) );
})), })),
}), }),
@ -450,7 +440,7 @@ impl<'a> Linker<'a> {
LittleEndian::write_u16( LittleEndian::write_u16(
target_word, target_word,
old_value.wrapping_sub(value as u16), old_value.wrapping_sub(value as u16),
) );
})), })),
}), }),
@ -532,7 +522,7 @@ impl<'a> Linker<'a> {
if let Some(relocate) = reloc_info.relocate { if let Some(relocate) = reloc_info.relocate {
let target_word = &mut target_sec_image[reloc.offset() as usize..]; let target_word = &mut target_sec_image[reloc.offset() as usize..];
relocate(target_word, value) relocate(target_word, value);
} else { } else {
self.rela_dyn_relas.push(Elf32_Rela { self.rela_dyn_relas.push(Elf32_Rela {
r_offset: rela_off, r_offset: rela_off,
@ -580,8 +570,7 @@ impl<'a> Linker<'a> {
let eh_frame_slice = eh_frame_rec.data.as_slice(); let eh_frame_slice = eh_frame_rec.data.as_slice();
// Prepare a new buffer to dodge borrow check // Prepare a new buffer to dodge borrow check
let mut eh_frame_hdr_vec: Vec<u8> = vec![0; eh_frame_hdr_rec.shdr.sh_size as usize]; let mut eh_frame_hdr_vec: Vec<u8> = vec![0; eh_frame_hdr_rec.shdr.sh_size as usize];
let eh_frame = EH_Frame::new(eh_frame_slice, eh_frame_rec.shdr.sh_offset) let eh_frame = EH_Frame::new(eh_frame_slice, eh_frame_rec.shdr.sh_offset);
.map_err(|()| "cannot read EH frame")?;
let mut eh_frame_hdr = EH_Frame_Hdr::new( let mut eh_frame_hdr = EH_Frame_Hdr::new(
eh_frame_hdr_vec.as_mut_slice(), eh_frame_hdr_vec.as_mut_slice(),
eh_frame_hdr_rec.shdr.sh_offset, eh_frame_hdr_rec.shdr.sh_offset,
@ -603,55 +592,137 @@ impl<'a> Linker<'a> {
} }
pub fn ld(data: &'a [u8]) -> Result<Vec<u8>, Error> { pub fn ld(data: &'a [u8]) -> Result<Vec<u8>, Error> {
let ehdr = read_unaligned::<Elf32_Ehdr>(data, 0).map_err(|()| "cannot read ELF header")?; fn allocate_rela_dyn<R: Relocatable>(
linker: &Linker,
relocs: &[R],
) -> Result<(usize, Vec<u32>), Error> {
let mut alloc_size = 0;
let mut rela_dyn_sym_indices = Vec::new();
for reloc in relocs {
if reloc.sym_info() as usize == STN_UNDEF {
continue;
}
let sym: &Elf32_Sym = linker
.symtab
.get(reloc.sym_info() as usize)
.ok_or("symbol out of bounds of symbol table")?;
match (linker.isa, reloc.type_info()) {
// Absolute address relocations
// A runtime relocation is needed to find the loading address
(Isa::CortexA9, R_ARM_ABS32) | (Isa::RiscV32, R_RISCV_32) => {
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
// Relative address relocations
// Relay the relocation to the runtime linker only if the symbol is not defined
(Isa::CortexA9, R_ARM_REL32 | R_ARM_PREL31 | R_ARM_TARGET2)
| (Isa::RiscV32, R_RISCV_CALL_PLT
| R_RISCV_PCREL_HI20
| R_RISCV_GOT_HI20
| R_RISCV_32_PCREL
| R_RISCV_SET32
| R_RISCV_ADD32
| R_RISCV_SUB32
| R_RISCV_SET16
| R_RISCV_ADD16
| R_RISCV_SUB16
| R_RISCV_SET8
| R_RISCV_ADD8
| R_RISCV_SUB8
| R_RISCV_SET6
| R_RISCV_SUB6) => {
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
// RISC-V: Lower 12-bits relocations
// If the upper 20-bits relocation cannot be resolved,
// this relocation will be relayed to the runtime linker.
(Isa::RiscV32, R_RISCV_PCREL_LO12_I) => {
// Find the HI20 relocation
let indirect_reloc = relocs
.iter()
.find(|reloc| reloc.offset() == sym.st_value)
.ok_or("malformatted LO12 relocation")?;
let indirect_sym = linker.symtab[indirect_reloc.sym_info() as usize];
if ELF32_ST_BIND(indirect_sym.st_info) == STB_GLOBAL
&& indirect_sym.st_shndx == SHN_UNDEF
{
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
_ => {
println!("Relocation type 0x{:X?} is not supported", reloc.type_info());
unimplemented!()
}
}
}
Ok((alloc_size, rela_dyn_sym_indices))
}
let Some(ehdr) = read_unaligned::<Elf32_Ehdr>(data, 0) else {
Err("cannot read ELF header")?
};
let isa = match ehdr.e_machine { let isa = match ehdr.e_machine {
EM_ARM => Isa::CortexA9, EM_ARM => Isa::CortexA9,
EM_RISCV => Isa::RiscV32, EM_RISCV => Isa::RiscV32,
_ => return Err(Error::Parsing("unsupported architecture")), _ => return Err(Error::Parsing("unsupported architecture")),
}; };
let shdrs = get_ref_slice::<Elf32_Shdr>(data, ehdr.e_shoff as usize, ehdr.e_shnum as usize) let Some(shdrs) = get_ref_slice::<Elf32_Shdr>(
.map_err(|()| "cannot read section header table")?; data, ehdr.e_shoff as usize, ehdr.e_shnum as usize,
) else {
Err("cannot read section header table")?
};
// Read .strtab // Read .strtab
let strtab_shdr = shdrs[ehdr.e_shstrndx as usize]; let strtab_shdr = shdrs[ehdr.e_shstrndx as usize];
let strtab = let Some(strtab) = get_ref_slice::<u8>(
get_ref_slice::<u8>(data, strtab_shdr.sh_offset as usize, strtab_shdr.sh_size as usize) data, strtab_shdr.sh_offset as usize, strtab_shdr.sh_size as usize,
.map_err(|()| "cannot read the string table from data")?; ) else {
Err("cannot read the string table from data")?
};
// Read .symtab // Read .symtab
let symtab_shdr = shdrs let symtab_shdr = shdrs
.iter() .iter()
.find(|shdr| shdr.sh_type as usize == SHT_SYMTAB) .find(|shdr| shdr.sh_type as usize == SHT_SYMTAB)
.ok_or(Error::Parsing("cannot find the symbol table"))?; .ok_or(Error::Parsing("cannot find the symbol table"))?;
let symtab = get_ref_slice::<Elf32_Sym>( let Some(symtab) = get_ref_slice::<Elf32_Sym>(
data, data,
symtab_shdr.sh_offset as usize, symtab_shdr.sh_offset as usize,
symtab_shdr.sh_size as usize / mem::size_of::<Elf32_Sym>(), symtab_shdr.sh_size as usize / mem::size_of::<Elf32_Sym>(),
) ) else {
.map_err(|()| "cannot read the symbol table from data")?; Err("cannot read the symbol table from data")?
};
// Section table for the .elf paired with the section name // Section table for the .elf paired with the section name
// To be formalized incrementally // To be formalized incrementally
// Very hashmap-like structure, but the order matters, so it is a vector // Very hashmap-like structure, but the order matters, so it is a vector
let elf_shdrs = vec![ let elf_shdrs = vec![SectionRecord {
SectionRecord { shdr: Elf32_Shdr {
shdr: Elf32_Shdr { sh_name: 0,
sh_name: 0, sh_type: 0,
sh_type: 0, sh_flags: 0,
sh_flags: 0, sh_addr: 0,
sh_addr: 0, sh_offset: 0,
sh_offset: 0, sh_size: 0,
sh_size: 0, sh_link: 0,
sh_link: 0, sh_info: 0,
sh_info: 0, sh_addralign: 0,
sh_addralign: 0, sh_entsize: 0,
sh_entsize: 0,
},
name: "",
data: vec![0; 0],
}, },
]; name: "",
data: vec![0; 0],
}];
let elf_sh_data_off = mem::size_of::<Elf32_Ehdr>() + mem::size_of::<Elf32_Phdr>() * 5; let elf_sh_data_off = mem::size_of::<Elf32_Ehdr>() + mem::size_of::<Elf32_Phdr>() * 5;
// Image of the linked dynamic library, to be formalized incrementally // Image of the linked dynamic library, to be formalized incrementally
@ -787,21 +858,27 @@ impl<'a> Linker<'a> {
($shdr: expr, $stmt: expr) => { ($shdr: expr, $stmt: expr) => {
match $shdr.sh_type as usize { match $shdr.sh_type as usize {
SHT_RELA => { SHT_RELA => {
let relocs = get_ref_slice::<Elf32_Rela>( let Some(relocs) = get_ref_slice::<Elf32_Rela>(
data, data,
$shdr.sh_offset as usize, $shdr.sh_offset as usize,
$shdr.sh_size as usize / mem::size_of::<Elf32_Rela>(), $shdr.sh_size as usize / mem::size_of::<Elf32_Rela>(),
) ) else {
.map_err(|()| "cannot parse relocations")?; Err("cannot parse relocations")?
};
#[allow(clippy::redundant_closure_call)]
$stmt(relocs) $stmt(relocs)
} }
SHT_REL => { SHT_REL => {
let relocs = get_ref_slice::<Elf32_Rel>( let Some(relocs) = get_ref_slice::<Elf32_Rel>(
data, data,
$shdr.sh_offset as usize, $shdr.sh_offset as usize,
$shdr.sh_size as usize / mem::size_of::<Elf32_Rel>(), $shdr.sh_size as usize / mem::size_of::<Elf32_Rel>(),
) ) else {
.map_err(|()| "cannot parse relocations")?; Err("cannot parse relocations")?
};
#[allow(clippy::redundant_closure_call)]
$stmt(relocs) $stmt(relocs)
} }
_ => unreachable!(), _ => unreachable!(),
@ -809,84 +886,6 @@ impl<'a> Linker<'a> {
}; };
} }
fn allocate_rela_dyn<R: Relocatable>(
linker: &Linker,
relocs: &[R],
) -> Result<(usize, Vec<u32>), Error> {
let mut alloc_size = 0;
let mut rela_dyn_sym_indices = Vec::new();
for reloc in relocs {
if reloc.sym_info() as usize == STN_UNDEF {
continue;
}
let sym: &Elf32_Sym = linker
.symtab
.get(reloc.sym_info() as usize)
.ok_or("symbol out of bounds of symbol table")?;
match (linker.isa, reloc.type_info()) {
// Absolute address relocations
// A runtime relocation is needed to find the loading address
(Isa::CortexA9, R_ARM_ABS32) | (Isa::RiscV32, R_RISCV_32) => {
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
// Relative address relocations
// Relay the relocation to the runtime linker only if the symbol is not defined
(Isa::CortexA9, R_ARM_REL32)
| (Isa::CortexA9, R_ARM_PREL31)
| (Isa::CortexA9, R_ARM_TARGET2)
| (Isa::RiscV32, R_RISCV_CALL_PLT)
| (Isa::RiscV32, R_RISCV_PCREL_HI20)
| (Isa::RiscV32, R_RISCV_GOT_HI20)
| (Isa::RiscV32, R_RISCV_32_PCREL)
| (Isa::RiscV32, R_RISCV_SET32)
| (Isa::RiscV32, R_RISCV_ADD32)
| (Isa::RiscV32, R_RISCV_SUB32)
| (Isa::RiscV32, R_RISCV_SET16)
| (Isa::RiscV32, R_RISCV_ADD16)
| (Isa::RiscV32, R_RISCV_SUB16)
| (Isa::RiscV32, R_RISCV_SET8)
| (Isa::RiscV32, R_RISCV_ADD8)
| (Isa::RiscV32, R_RISCV_SUB8)
| (Isa::RiscV32, R_RISCV_SET6)
| (Isa::RiscV32, R_RISCV_SUB6) => {
if ELF32_ST_BIND(sym.st_info) == STB_GLOBAL && sym.st_shndx == SHN_UNDEF {
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
// RISC-V: Lower 12-bits relocations
// If the upper 20-bits relocation cannot be resolved,
// this relocation will be relayed to the runtime linker.
(Isa::RiscV32, R_RISCV_PCREL_LO12_I) => {
// Find the HI20 relocation
let indirect_reloc = relocs
.iter()
.find(|reloc| reloc.offset() == sym.st_value)
.ok_or("malformatted LO12 relocation")?;
let indirect_sym = linker.symtab[indirect_reloc.sym_info() as usize];
if ELF32_ST_BIND(indirect_sym.st_info) == STB_GLOBAL
&& indirect_sym.st_shndx == SHN_UNDEF
{
alloc_size += mem::size_of::<Elf32_Rela>(); // FIXME: RELA vs REL
rela_dyn_sym_indices.push(reloc.sym_info());
}
}
_ => {
println!("Relocation type 0x{:X?} is not supported", reloc.type_info());
unimplemented!()
}
}
}
Ok((alloc_size, rela_dyn_sym_indices))
}
for shdr in shdrs for shdr in shdrs
.iter() .iter()
.filter(|shdr| shdr.sh_type as usize == SHT_REL || shdr.sh_type as usize == SHT_RELA) .filter(|shdr| shdr.sh_type as usize == SHT_REL || shdr.sh_type as usize == SHT_RELA)
@ -914,7 +913,7 @@ impl<'a> Linker<'a> {
} }
// Avoid symbol duplication // Avoid symbol duplication
rela_dyn_sym_indices.sort(); rela_dyn_sym_indices.sort_unstable();
rela_dyn_sym_indices.dedup(); rela_dyn_sym_indices.dedup();
if rela_dyn_size != 0 { if rela_dyn_size != 0 {
@ -1097,7 +1096,7 @@ impl<'a> Linker<'a> {
sh_entsize: mem::size_of::<Elf32_Sym>() as Elf32_Word, sh_entsize: mem::size_of::<Elf32_Sym>() as Elf32_Word,
}, },
".dynsym", ".dynsym",
from_struct_vec(dynsym), from_struct_slice(&dynsym),
); );
let hash_elf_index = linker.load_section( let hash_elf_index = linker.load_section(
&Elf32_Shdr { &Elf32_Shdr {
@ -1113,7 +1112,7 @@ impl<'a> Linker<'a> {
sh_entsize: 4, sh_entsize: 4,
}, },
".hash", ".hash",
from_struct_vec(hash), from_struct_slice(&hash),
); );
// Link .rela.dyn header to the .dynsym header // Link .rela.dyn header to the .dynsym header
@ -1212,7 +1211,7 @@ impl<'a> Linker<'a> {
}; };
let dynamic_elf_index = let dynamic_elf_index =
linker.load_section(&dynamic_shdr, ".dynamic", from_struct_vec(dyn_entries)); linker.load_section(&dynamic_shdr, ".dynamic", from_struct_slice(&dyn_entries));
let last_w_sec_elf_index = linker.elf_shdrs.len() - 1; let last_w_sec_elf_index = linker.elf_shdrs.len() - 1;
@ -1361,7 +1360,7 @@ impl<'a> Linker<'a> {
// Prepare a STRTAB to hold the names of section headers // Prepare a STRTAB to hold the names of section headers
// Fix the sh_name field of the section headers // Fix the sh_name field of the section headers
let mut shstrtab = Vec::new(); let mut shstrtab = Vec::new();
for shdr_rec in linker.elf_shdrs.iter_mut() { for shdr_rec in &mut linker.elf_shdrs {
let shstrtab_index = shstrtab.len(); let shstrtab_index = shstrtab.len();
shstrtab.extend(shdr_rec.name.as_bytes()); shstrtab.extend(shdr_rec.name.as_bytes());
shstrtab.push(0); shstrtab.push(0);
@ -1402,11 +1401,11 @@ impl<'a> Linker<'a> {
let alignment = (4 - (linker.image.len() % 4)) % 4; let alignment = (4 - (linker.image.len() % 4)) % 4;
let sec_headers_offset = linker.image.len() + alignment; let sec_headers_offset = linker.image.len() + alignment;
linker.image.extend(vec![0; alignment]); linker.image.extend(vec![0; alignment]);
for rec in linker.elf_shdrs.iter() { for rec in &linker.elf_shdrs {
let shdr = rec.shdr; let shdr = rec.shdr;
linker.image.extend(unsafe { linker.image.extend(unsafe {
slice::from_raw_parts( slice::from_raw_parts(
&shdr as *const Elf32_Shdr as *const u8, ptr::addr_of!(shdr).cast(),
mem::size_of::<Elf32_Shdr>(), mem::size_of::<Elf32_Shdr>(),
) )
}); });
@ -1415,7 +1414,7 @@ impl<'a> Linker<'a> {
// Update the PHDRs // Update the PHDRs
let phdr_offset = mem::size_of::<Elf32_Ehdr>(); let phdr_offset = mem::size_of::<Elf32_Ehdr>();
unsafe { unsafe {
let phdr_ptr = linker.image.as_mut_ptr().add(phdr_offset) as *mut Elf32_Phdr; let phdr_ptr = linker.image.as_mut_ptr().add(phdr_offset).cast();
let phdr_slice = slice::from_raw_parts_mut(phdr_ptr, 5); let phdr_slice = slice::from_raw_parts_mut(phdr_ptr, 5);
// List of program headers: // List of program headers:
// 1. ELF headers & program headers // 1. ELF headers & program headers
@ -1492,7 +1491,7 @@ impl<'a> Linker<'a> {
} }
// Update the EHDR // Update the EHDR
let ehdr_ptr = linker.image.as_mut_ptr() as *mut Elf32_Ehdr; let ehdr_ptr = linker.image.as_mut_ptr().cast();
unsafe { unsafe {
*ehdr_ptr = Elf32_Ehdr { *ehdr_ptr = Elf32_Ehdr {
e_ident: ehdr.e_ident, e_ident: ehdr.e_ident,

View File

@ -17,9 +17,7 @@ pub fn make_config_comment(
location: com_loc, location: com_loc,
error: LexicalErrorType::OtherError( error: LexicalErrorType::OtherError(
format!( format!(
"config comment at top must have the same indentation with what it applies (comment at {}, statement at {})", "config comment at top must have the same indentation with what it applies (comment at {com_loc}, statement at {stmt_loc})",
com_loc,
stmt_loc,
) )
) )
} }

View File

@ -37,7 +37,7 @@ impl fmt::Display for LexicalErrorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self { match self {
LexicalErrorType::StringError => write!(f, "Got unexpected string"), LexicalErrorType::StringError => write!(f, "Got unexpected string"),
LexicalErrorType::FStringError(error) => write!(f, "Got error in f-string: {}", error), LexicalErrorType::FStringError(error) => write!(f, "Got error in f-string: {error}"),
LexicalErrorType::UnicodeError => write!(f, "Got unexpected unicode"), LexicalErrorType::UnicodeError => write!(f, "Got unexpected unicode"),
LexicalErrorType::NestingError => write!(f, "Got unexpected nesting"), LexicalErrorType::NestingError => write!(f, "Got unexpected nesting"),
LexicalErrorType::IndentationError => { LexicalErrorType::IndentationError => {
@ -59,13 +59,13 @@ impl fmt::Display for LexicalErrorType {
write!(f, "positional argument follows keyword argument") write!(f, "positional argument follows keyword argument")
} }
LexicalErrorType::UnrecognizedToken { tok } => { LexicalErrorType::UnrecognizedToken { tok } => {
write!(f, "Got unexpected token {}", tok) write!(f, "Got unexpected token {tok}")
} }
LexicalErrorType::LineContinuationError => { LexicalErrorType::LineContinuationError => {
write!(f, "unexpected character after line continuation character") write!(f, "unexpected character after line continuation character")
} }
LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"), LexicalErrorType::Eof => write!(f, "unexpected EOF while parsing"),
LexicalErrorType::OtherError(msg) => write!(f, "{}", msg), LexicalErrorType::OtherError(msg) => write!(f, "{msg}"),
} }
} }
} }
@ -96,7 +96,7 @@ impl fmt::Display for FStringErrorType {
FStringErrorType::UnopenedRbrace => write!(f, "Unopened '}}'"), FStringErrorType::UnopenedRbrace => write!(f, "Unopened '}}'"),
FStringErrorType::ExpectedRbrace => write!(f, "Expected '}}' after conversion flag."), FStringErrorType::ExpectedRbrace => write!(f, "Expected '}}' after conversion flag."),
FStringErrorType::InvalidExpression(error) => { FStringErrorType::InvalidExpression(error) => {
write!(f, "Invalid expression: {}", error) write!(f, "Invalid expression: {error}")
} }
FStringErrorType::InvalidConversionFlag => write!(f, "Invalid conversion flag"), FStringErrorType::InvalidConversionFlag => write!(f, "Invalid conversion flag"),
FStringErrorType::EmptyExpression => write!(f, "Empty expression"), FStringErrorType::EmptyExpression => write!(f, "Empty expression"),
@ -144,11 +144,6 @@ pub enum ParseErrorType {
impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError { impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self { fn from(err: LalrpopError<Location, Tok, LexicalError>) -> Self {
match err { match err {
// TODO: Are there cases where this isn't an EOF?
LalrpopError::InvalidToken { location } => ParseError {
error: ParseErrorType::Eof,
location,
},
LalrpopError::ExtraToken { token } => ParseError { LalrpopError::ExtraToken { token } => ParseError {
error: ParseErrorType::ExtraToken(token.1), error: ParseErrorType::ExtraToken(token.1),
location: token.0, location: token.0,
@ -170,7 +165,10 @@ impl From<LalrpopError<Location, Tok, LexicalError>> for ParseError {
location: token.0, location: token.0,
} }
} }
LalrpopError::UnrecognizedEof { location, .. } => ParseError {
LalrpopError::UnrecognizedEof { location, .. }
// TODO: Are there cases where this isn't an EOF?
| LalrpopError::InvalidToken { location } => ParseError {
error: ParseErrorType::Eof, error: ParseErrorType::Eof,
location, location,
}, },
@ -188,7 +186,7 @@ impl fmt::Display for ParseErrorType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self { match *self {
ParseErrorType::Eof => write!(f, "Got unexpected EOF"), ParseErrorType::Eof => write!(f, "Got unexpected EOF"),
ParseErrorType::ExtraToken(ref tok) => write!(f, "Got extraneous token: {:?}", tok), ParseErrorType::ExtraToken(ref tok) => write!(f, "Got extraneous token: {tok:?}"),
ParseErrorType::InvalidToken => write!(f, "Got invalid token"), ParseErrorType::InvalidToken => write!(f, "Got invalid token"),
ParseErrorType::UnrecognizedToken(ref tok, ref expected) => { ParseErrorType::UnrecognizedToken(ref tok, ref expected) => {
if *tok == Tok::Indent { if *tok == Tok::Indent {
@ -196,10 +194,10 @@ impl fmt::Display for ParseErrorType {
} else if expected.as_deref() == Some("Indent") { } else if expected.as_deref() == Some("Indent") {
write!(f, "expected an indented block") write!(f, "expected an indented block")
} else { } else {
write!(f, "Got unexpected token {}", tok) write!(f, "Got unexpected token {tok}")
} }
} }
ParseErrorType::Lexical(ref error) => write!(f, "{}", error), ParseErrorType::Lexical(ref error) => write!(f, "{error}"),
} }
} }
} }
@ -207,6 +205,7 @@ impl fmt::Display for ParseErrorType {
impl Error for ParseErrorType {} impl Error for ParseErrorType {}
impl ParseErrorType { impl ParseErrorType {
#[must_use]
pub fn is_indentation_error(&self) -> bool { pub fn is_indentation_error(&self) -> bool {
match self { match self {
ParseErrorType::Lexical(LexicalErrorType::IndentationError) => true, ParseErrorType::Lexical(LexicalErrorType::IndentationError) => true,
@ -216,11 +215,11 @@ impl ParseErrorType {
_ => false, _ => false,
} }
} }
#[must_use]
pub fn is_tab_error(&self) -> bool { pub fn is_tab_error(&self) -> bool {
matches!( matches!(
self, self,
ParseErrorType::Lexical(LexicalErrorType::TabError) ParseErrorType::Lexical(LexicalErrorType::TabError | LexicalErrorType::TabsAfterSpaces)
| ParseErrorType::Lexical(LexicalErrorType::TabsAfterSpaces)
) )
} }
} }

View File

@ -133,10 +133,10 @@ impl<'a> FStringParser<'a> {
) )
} else { } else {
Box::new(self.expr(ExprKind::Constant { Box::new(self.expr(ExprKind::Constant {
value: spec_expression.to_owned().into(), value: spec_expression.clone().into(),
kind: None, kind: None,
})) }))
}) });
} }
'(' | '{' | '[' => { '(' | '{' | '[' => {
expression.push(ch); expression.push(ch);
@ -254,7 +254,7 @@ impl<'a> FStringParser<'a> {
values.push(self.expr(ExprKind::Constant { values.push(self.expr(ExprKind::Constant {
value: content.into(), value: content.into(),
kind: None, kind: None,
})) }));
} }
let s = match values.len() { let s = match values.len() {
@ -270,7 +270,7 @@ impl<'a> FStringParser<'a> {
} }
fn parse_fstring_expr(source: &str) -> Result<Expr, ParseError> { fn parse_fstring_expr(source: &str) -> Result<Expr, ParseError> {
let fstring_body = format!("({})", source); let fstring_body = format!("({source})");
parse_expression(&fstring_body) parse_expression(&fstring_body)
} }

View File

@ -54,38 +54,35 @@ pub fn parse_args(func_args: Vec<FunctionArgument>) -> Result<ArgumentList, Lexi
let mut keyword_names = HashSet::with_capacity_and_hasher(func_args.len(), RandomState::new()); let mut keyword_names = HashSet::with_capacity_and_hasher(func_args.len(), RandomState::new());
for (name, value) in func_args { for (name, value) in func_args {
match name { if let Some((location, name)) = name {
Some((location, name)) => { if let Some(keyword_name) = &name {
if let Some(keyword_name) = &name { if keyword_names.contains(keyword_name) {
if keyword_names.contains(keyword_name) {
return Err(LexicalError {
error: LexicalErrorType::DuplicateKeywordArgumentError,
location,
});
}
keyword_names.insert(keyword_name.clone());
}
keywords.push(ast::Keyword::new(
location,
ast::KeywordData {
arg: name.map(|name| name.into()),
value: Box::new(value),
},
));
}
None => {
// Allow starred args after keyword arguments.
if !keywords.is_empty() && !is_starred(&value) {
return Err(LexicalError { return Err(LexicalError {
error: LexicalErrorType::PositionalArgumentError, error: LexicalErrorType::DuplicateKeywordArgumentError,
location: value.location, location,
}); });
} }
args.push(value); keyword_names.insert(keyword_name.clone());
} }
keywords.push(ast::Keyword::new(
location,
ast::KeywordData {
arg: name.map(String::into),
value: Box::new(value),
},
));
} else {
// Allow starred args after keyword arguments.
if !keywords.is_empty() && !is_starred(&value) {
return Err(LexicalError {
error: LexicalErrorType::PositionalArgumentError,
location: value.location,
});
}
args.push(value);
} }
} }
Ok(ArgumentList { args, keywords }) Ok(ArgumentList { args, keywords })

View File

@ -169,7 +169,7 @@ where
self.shift(); self.shift();
} else { } else {
// Transform MAC EOL into \n // Transform MAC EOL into \n
self.chr0 = Some('\n') self.chr0 = Some('\n');
} }
} else { } else {
break; break;
@ -189,7 +189,7 @@ where
chars: input, chars: input,
at_begin_of_line: true, at_begin_of_line: true,
nesting: 0, nesting: 0,
indentation_stack: vec![Default::default()], indentation_stack: vec![IndentationLevel::default()],
pending: Vec::new(), pending: Vec::new(),
chr0: None, chr0: None,
location: start, location: start,
@ -217,10 +217,10 @@ where
let mut saw_f = false; let mut saw_f = false;
loop { loop {
// Detect r"", f"", b"" and u"" // Detect r"", f"", b"" and u""
if !(saw_b || saw_u || saw_f) && matches!(self.chr0, Some('b') | Some('B')) { if !(saw_b || saw_u || saw_f) && matches!(self.chr0, Some('b' | 'B')) {
saw_b = true; saw_b = true;
} else if !(saw_b || saw_r || saw_u || saw_f) } else if !(saw_b || saw_r || saw_u || saw_f)
&& matches!(self.chr0, Some('u') | Some('U')) && matches!(self.chr0, Some('u' | 'U'))
{ {
saw_u = true; saw_u = true;
} else if !(saw_r || saw_u) && (self.chr0 == Some('r') || self.chr0 == Some('R')) { } else if !(saw_r || saw_u) && (self.chr0 == Some('r') || self.chr0 == Some('R')) {
@ -291,7 +291,7 @@ where
match e.kind() { match e.kind() {
IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX, IntErrorKind::PosOverflow | IntErrorKind::NegOverflow => i128::MAX,
_ => return Err(LexicalError { _ => return Err(LexicalError {
error: LexicalErrorType::OtherError(format!("{:?}", e)), error: LexicalErrorType::OtherError(format!("{e:?}")),
location: start_pos, location: start_pos,
}), }),
} }
@ -379,7 +379,7 @@ where
/// Consume a sequence of numbers with the given radix, /// Consume a sequence of numbers with the given radix,
/// the digits can be decorated with underscores /// the digits can be decorated with underscores
/// like this: '1_2_3_4' == '1234' /// like this: `'1_2_3_4'` == `'1234'`
fn radix_run(&mut self, radix: u32) -> String { fn radix_run(&mut self, radix: u32) -> String {
let mut value_text = String::new(); let mut value_text = String::new();
@ -412,7 +412,7 @@ where
2 => matches!(c, Some('0'..='1')), 2 => matches!(c, Some('0'..='1')),
8 => matches!(c, Some('0'..='7')), 8 => matches!(c, Some('0'..='7')),
10 => matches!(c, Some('0'..='9')), 10 => matches!(c, Some('0'..='9')),
16 => matches!(c, Some('0'..='9') | Some('a'..='f') | Some('A'..='F')), 16 => matches!(c, Some('0'..='9' | 'a'..='f' | 'A'..='F')),
other => unimplemented!("Radix not implemented: {}", other), other => unimplemented!("Radix not implemented: {}", other),
} }
} }
@ -420,8 +420,8 @@ where
/// Test if we face '[eE][-+]?[0-9]+' /// Test if we face '[eE][-+]?[0-9]+'
fn at_exponent(&self) -> bool { fn at_exponent(&self) -> bool {
match self.chr0 { match self.chr0 {
Some('e') | Some('E') => match self.chr1 { Some('e' | 'E') => match self.chr1 {
Some('+') | Some('-') => matches!(self.chr2, Some('0'..='9')), Some('+' | '-') => matches!(self.chr2, Some('0'..='9')),
Some('0'..='9') => true, Some('0'..='9') => true,
_ => false, _ => false,
}, },
@ -441,11 +441,10 @@ where
start_loc.go_left(); start_loc.go_left();
loop { loop {
match self.chr0 { match self.chr0 {
Some('\n') => return None, Some('\n') | None => return None,
None => return None,
Some(c) => { Some(c) => {
if let (true, Some(p)) = (is_comment, prefix.next()) { if let (true, Some(p)) = (is_comment, prefix.next()) {
is_comment = is_comment && c == p is_comment = is_comment && c == p;
} else { } else {
// done checking prefix, if is comment then return the spanned // done checking prefix, if is comment then return the spanned
if is_comment { if is_comment {
@ -496,7 +495,7 @@ where
octet_content.push(first); octet_content.push(first);
while octet_content.len() < 3 { while octet_content.len() < 3 {
if let Some('0'..='7') = self.chr0 { if let Some('0'..='7') = self.chr0 {
octet_content.push(self.next_char().unwrap()) octet_content.push(self.next_char().unwrap());
} else { } else {
break; break;
} }
@ -566,7 +565,7 @@ where
} else if is_raw { } else if is_raw {
string_content.push('\\'); string_content.push('\\');
if let Some(c) = self.next_char() { if let Some(c) = self.next_char() {
string_content.push(c) string_content.push(c);
} else { } else {
return Err(LexicalError { return Err(LexicalError {
error: LexicalErrorType::StringError, error: LexicalErrorType::StringError,
@ -599,7 +598,7 @@ where
Some('u') if !is_bytes => string_content.push(self.unicode_literal(4)?), Some('u') if !is_bytes => string_content.push(self.unicode_literal(4)?),
Some('U') if !is_bytes => string_content.push(self.unicode_literal(8)?), Some('U') if !is_bytes => string_content.push(self.unicode_literal(8)?),
Some('N') if !is_bytes => { Some('N') if !is_bytes => {
string_content.push(self.parse_unicode_name()?) string_content.push(self.parse_unicode_name()?);
} }
Some(c) => { Some(c) => {
string_content.push('\\'); string_content.push('\\');
@ -663,7 +662,7 @@ where
Ok((start_pos, tok, end_pos)) Ok((start_pos, tok, end_pos))
} }
fn is_identifier_start(&self, c: char) -> bool { fn is_identifier_start(c: char) -> bool {
match c { match c {
'_' | 'a'..='z' | 'A'..='Z' => true, '_' | 'a'..='z' | 'A'..='Z' => true,
'+' | '-' | '*' | '/' | '=' | ' ' | '<' | '>' => false, '+' | '-' | '*' | '/' | '=' | ' ' | '<' | '>' => false,
@ -835,7 +834,7 @@ where
// Check if we have some character: // Check if we have some character:
if let Some(c) = self.chr0 { if let Some(c) = self.chr0 {
// First check identifier: // First check identifier:
if self.is_identifier_start(c) { if Self::is_identifier_start(c) {
let identifier = self.lex_identifier()?; let identifier = self.lex_identifier()?;
self.emit(identifier); self.emit(identifier);
} else if is_emoji_presentation(c) { } else if is_emoji_presentation(c) {
@ -899,16 +898,13 @@ where
'=' => { '=' => {
let tok_start = self.get_pos(); let tok_start = self.get_pos();
self.next_char(); self.next_char();
match self.chr0 { if let Some('=') = self.chr0 {
Some('=') => { self.next_char();
self.next_char(); let tok_end = self.get_pos();
let tok_end = self.get_pos(); self.emit((tok_start, Tok::EqEqual, tok_end));
self.emit((tok_start, Tok::EqEqual, tok_end)); } else {
} let tok_end = self.get_pos();
_ => { self.emit((tok_start, Tok::Equal, tok_end));
let tok_end = self.get_pos();
self.emit((tok_start, Tok::Equal, tok_end));
}
} }
} }
'+' => { '+' => {
@ -934,16 +930,13 @@ where
} }
Some('*') => { Some('*') => {
self.next_char(); self.next_char();
match self.chr0 { if let Some('=') = self.chr0 {
Some('=') => { self.next_char();
self.next_char(); let tok_end = self.get_pos();
let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleStarEqual, tok_end));
self.emit((tok_start, Tok::DoubleStarEqual, tok_end)); } else {
} let tok_end = self.get_pos();
_ => { self.emit((tok_start, Tok::DoubleStar, tok_end));
let tok_end = self.get_pos();
self.emit((tok_start, Tok::DoubleStar, tok_end));
}
} }
} }
_ => { _ => {
@ -963,16 +956,13 @@ where
} }
Some('/') => { Some('/') => {
self.next_char(); self.next_char();
match self.chr0 { if let Some('=') = self.chr0 {
Some('=') => { self.next_char();
self.next_char(); let tok_end = self.get_pos();
let tok_end = self.get_pos(); self.emit((tok_start, Tok::DoubleSlashEqual, tok_end));
self.emit((tok_start, Tok::DoubleSlashEqual, tok_end)); } else {
} let tok_end = self.get_pos();
_ => { self.emit((tok_start, Tok::DoubleSlash, tok_end));
let tok_end = self.get_pos();
self.emit((tok_start, Tok::DoubleSlash, tok_end));
}
} }
} }
_ => { _ => {
@ -1141,16 +1131,13 @@ where
match self.chr0 { match self.chr0 {
Some('<') => { Some('<') => {
self.next_char(); self.next_char();
match self.chr0 { if let Some('=') = self.chr0 {
Some('=') => { self.next_char();
self.next_char(); let tok_end = self.get_pos();
let tok_end = self.get_pos(); self.emit((tok_start, Tok::LeftShiftEqual, tok_end));
self.emit((tok_start, Tok::LeftShiftEqual, tok_end)); } else {
} let tok_end = self.get_pos();
_ => { self.emit((tok_start, Tok::LeftShift, tok_end));
let tok_end = self.get_pos();
self.emit((tok_start, Tok::LeftShift, tok_end));
}
} }
} }
Some('=') => { Some('=') => {
@ -1170,16 +1157,13 @@ where
match self.chr0 { match self.chr0 {
Some('>') => { Some('>') => {
self.next_char(); self.next_char();
match self.chr0 { if let Some('=') = self.chr0 {
Some('=') => { self.next_char();
self.next_char(); let tok_end = self.get_pos();
let tok_end = self.get_pos(); self.emit((tok_start, Tok::RightShiftEqual, tok_end));
self.emit((tok_start, Tok::RightShiftEqual, tok_end)); } else {
} let tok_end = self.get_pos();
_ => { self.emit((tok_start, Tok::RightShift, tok_end));
let tok_end = self.get_pos();
self.emit((tok_start, Tok::RightShift, tok_end));
}
} }
} }
Some('=') => { Some('=') => {

View File

@ -15,27 +15,21 @@
//! //!
//! ``` //! ```
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
clippy::default_trait_access,
clippy::doc_markdown,
clippy::enum_glob_use, clippy::enum_glob_use,
clippy::fn_params_excessive_bools, clippy::fn_params_excessive_bools,
clippy::if_not_else,
clippy::implicit_clone,
clippy::match_same_arms,
clippy::missing_errors_doc, clippy::missing_errors_doc,
clippy::missing_panics_doc, clippy::missing_panics_doc,
clippy::module_name_repetitions, clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::redundant_closure_for_method_calls,
clippy::semicolon_if_nothing_returned,
clippy::single_match_else,
clippy::too_many_lines, clippy::too_many_lines,
clippy::uninlined_format_args,
clippy::unnested_or_patterns,
clippy::unused_self,
clippy::wildcard_imports, clippy::wildcard_imports,
)] )]
@ -51,8 +45,15 @@ pub mod lexer;
pub mod mode; pub mod mode;
pub mod parser; pub mod parser;
lalrpop_mod!( lalrpop_mod!(
#[allow(clippy::all, clippy::pedantic)] #[allow(
#[allow(unused)] future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
unused,
clippy::all,
clippy::pedantic,
)]
python python
); );
pub mod token; pub mod token;

View File

@ -6,6 +6,7 @@
//! expression. //! expression.
use std::iter; use std::iter;
use nac3ast::Location;
use crate::ast::{self, FileName}; use crate::ast::{self, FileName};
use crate::error::ParseError; use crate::error::ParseError;
@ -63,7 +64,7 @@ pub fn parse_program(source: &str, file: FileName) -> Result<ast::Suite, ParseEr
/// ///
/// ``` /// ```
pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> { pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
parse(source, Mode::Expression, Default::default()).map(|top| match top { parse(source, Mode::Expression, FileName::default()).map(|top| match top {
ast::Mod::Expression { body } => *body, ast::Mod::Expression { body } => *body,
_ => unreachable!(), _ => unreachable!(),
}) })
@ -72,7 +73,7 @@ pub fn parse_expression(source: &str) -> Result<ast::Expr, ParseError> {
// Parse a given source code // Parse a given source code
pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> { pub fn parse(source: &str, mode: Mode, file: FileName) -> Result<ast::Mod, ParseError> {
let lxr = lexer::make_tokenizer(source, file); let lxr = lexer::make_tokenizer(source, file);
let marker_token = (Default::default(), mode.to_marker(), Default::default()); let marker_token = (Location::default(), mode.to_marker(), Location::default());
let tokenizer = iter::once(Ok(marker_token)).chain(lxr); let tokenizer = iter::once(Ok(marker_token)).chain(lxr);
python::TopParser::new() python::TopParser::new()

View File

@ -112,14 +112,14 @@ impl fmt::Display for Tok {
use Tok::*; use Tok::*;
match self { match self {
Name { name } => write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name)), Name { name } => write!(f, "'{}'", ast::get_str_from_ref(&ast::get_str_ref_lock(), *name)),
Int { value } => if *value != i128::MAX { write!(f, "'{}'", value) } else { write!(f, "'#OFL#'") }, Int { value } => if *value == i128::MAX { write!(f, "'#OFL#'") } else { write!(f, "'{value}'") },
Float { value } => write!(f, "'{}'", value), Float { value } => write!(f, "'{value}'"),
Complex { real, imag } => write!(f, "{}j{}", real, imag), Complex { real, imag } => write!(f, "{real}j{imag}"),
String { value, is_fstring } => { String { value, is_fstring } => {
if *is_fstring { if *is_fstring {
write!(f, "f")? write!(f, "f")?;
} }
write!(f, "{:?}", value) write!(f, "{value:?}")
} }
Bytes { value } => { Bytes { value } => {
write!(f, "b\"")?; write!(f, "b\"")?;
@ -129,7 +129,7 @@ impl fmt::Display for Tok {
10 => f.write_str("\\n")?, 10 => f.write_str("\\n")?,
13 => f.write_str("\\r")?, 13 => f.write_str("\\r")?,
32..=126 => f.write_char(*i as char)?, 32..=126 => f.write_char(*i as char)?,
_ => write!(f, "\\x{:02x}", i)?, _ => write!(f, "\\x{i:02x}")?,
} }
} }
f.write_str("\"") f.write_str("\"")

View File

@ -1,7 +1,12 @@
#![deny(clippy::all)] #![deny(
future_incompatible,
let_underscore,
nonstandard_style,
rust_2024_compatibility,
clippy::all,
)]
#![warn(clippy::pedantic)] #![warn(clippy::pedantic)]
#![allow( #![allow(
clippy::cast_possible_truncation,
clippy::too_many_lines, clippy::too_many_lines,
clippy::wildcard_imports, clippy::wildcard_imports,
)] )]
@ -17,6 +22,7 @@ use inkwell::{
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use std::{collections::HashMap, fs, path::Path, sync::Arc}; use std::{collections::HashMap, fs, path::Path, sync::Arc};
use std::collections::HashSet; use std::collections::HashSet;
use std::num::NonZeroUsize;
use nac3core::{ use nac3core::{
codegen::{ codegen::{
@ -26,7 +32,7 @@ use nac3core::{
symbol_resolver::SymbolResolver, symbol_resolver::SymbolResolver,
toplevel::{ toplevel::{
composer::{ComposerConfig, TopLevelComposer}, composer::{ComposerConfig, TopLevelComposer},
helper::parse_parameter_default_value, helper::parse_parameter_default_value,
type_annotation::*, type_annotation::*,
TopLevelDef, TopLevelDef,
}, },
@ -112,7 +118,7 @@ fn handle_typevar_definition(
unifier, unifier,
primitives, primitives,
x, x,
HashMap::default(), HashMap::new(),
)?; )?;
get_type_from_type_annotation_kinds( get_type_from_type_annotation_kinds(
def_list, unifier, &ty, &mut None def_list, unifier, &ty, &mut None
@ -153,7 +159,7 @@ fn handle_typevar_definition(
unifier, unifier,
primitives, primitives,
&args[1], &args[1],
HashMap::default(), HashMap::new(),
)?; )?;
let constraint = get_type_from_type_annotation_kinds( let constraint = get_type_from_type_annotation_kinds(
def_list, unifier, &ty, &mut None def_list, unifier, &ty, &mut None
@ -279,10 +285,10 @@ fn main() {
let threads = if is_multithreaded() { let threads = if is_multithreaded() {
if threads == 0 { if threads == 0 {
std::thread::available_parallelism() std::thread::available_parallelism()
.map(|threads| threads.get() as u32) .map(NonZeroUsize::get)
.unwrap_or(1u32) .unwrap_or(1usize)
} else { } else {
threads threads as usize
} }
} else { } else {
if threads != 1 { if threads != 1 {

View File

@ -1,9 +1,11 @@
#![deny(clippy::all)] #![deny(
#![warn(clippy::pedantic)] future_incompatible,
#![allow( let_underscore,
clippy::semicolon_if_nothing_returned, nonstandard_style,
clippy::uninlined_format_args, rust_2024_compatibility,
clippy::all,
)] )]
#![warn(clippy::pedantic)]
use std::env; use std::env;
@ -36,17 +38,17 @@ pub extern "C" fn rtio_get_counter() -> i64 {
#[no_mangle] #[no_mangle]
pub extern "C" fn rtio_output(target: i32, data: i32) { pub extern "C" fn rtio_output(target: i32, data: i32) {
println!("rtio_output @{} target={:04x} data={}", unsafe { NOW }, target, data); println!("rtio_output @{} target={target:04x} data={data}", unsafe { NOW });
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn print_int32(x: i32) { pub extern "C" fn print_int32(x: i32) {
println!("print_int32: {}", x); println!("print_int32: {x}");
} }
#[no_mangle] #[no_mangle]
pub extern "C" fn print_int64(x: i64) { pub extern "C" fn print_int64(x: i64) {
println!("print_int64: {}", x); println!("print_int64: {x}");
} }
#[no_mangle] #[no_mangle]
@ -54,12 +56,11 @@ pub extern "C" fn __nac3_personality(_state: u32, _exception_object: u32, _conte
unimplemented!(); unimplemented!();
} }
fn main() { fn main() {
let filename = env::args().nth(1).unwrap(); let filename = env::args().nth(1).unwrap();
unsafe { unsafe {
let lib = libloading::Library::new(filename).unwrap(); let lib = libloading::Library::new(filename).unwrap();
let func: libloading::Symbol<unsafe extern fn()> = lib.get(b"__modinit__").unwrap(); let func: libloading::Symbol<unsafe extern "C" fn()> = lib.get(b"__modinit__").unwrap();
func() func();
} }
} }