diff --git a/Cargo.lock b/Cargo.lock index a06f5021..1f976cd2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -500,15 +500,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.12.0" @@ -650,6 +641,7 @@ name = "nac3artiq" version = "0.1.0" dependencies = [ "inkwell", + "itertools 0.12.0", "nac3core", "nac3ld", "nac3parser", diff --git a/nac3artiq/Cargo.toml b/nac3artiq/Cargo.toml index 1b4788f2..133ff283 100644 --- a/nac3artiq/Cargo.toml +++ b/nac3artiq/Cargo.toml @@ -9,6 +9,7 @@ name = "nac3artiq" crate-type = ["cdylib"] [dependencies] +itertools = "0.12" pyo3 = { version = "0.20", features = ["extension-module"] } parking_lot = "0.12" tempfile = "3.8" diff --git a/nac3artiq/src/lib.rs b/nac3artiq/src/lib.rs index 59b3a91d..42e7b38d 100644 --- a/nac3artiq/src/lib.rs +++ b/nac3artiq/src/lib.rs @@ -13,6 +13,7 @@ use inkwell::{ targets::*, OptimizationLevel, }; +use itertools::Itertools; use nac3core::codegen::{CodeGenLLVMOptions, CodeGenTargetMachineOptions, gen_func_impl}; use nac3core::toplevel::builtins::get_exn_constructor; use nac3core::typecheck::typedef::{TypeEnum, Unifier}; @@ -470,7 +471,7 @@ impl Nac3 { if let Err(e) = composer.start_analysis(true) { // report error of __modinit__ separately - return if e.contains("") { + return if e.iter().any(|err| err.contains("")) { let msg = Self::report_modinit( &arg_names, method_name, @@ -481,12 +482,15 @@ impl Nac3 { ); Err(CompileError::new_err(format!( "compilation failed\n----------\n{}", - msg.unwrap_or(e) + msg.unwrap_or(e.iter().sorted().join("\n----------\n")) ))) } else { - Err(CompileError::new_err(format!( - "compilation failed\n----------\n{e}" - ))) + Err(CompileError::new_err( + format!( + "compilation failed\n----------\n{}", + e.iter().sorted().join("\n----------\n"), + ), + )) } } let top_level = Arc::new(composer.make_top_level_context()); diff --git a/nac3artiq/src/symbol_resolver.rs b/nac3artiq/src/symbol_resolver.rs index bb5f6b09..a5440a12 100644 --- a/nac3artiq/src/symbol_resolver.rs +++ b/nac3artiq/src/symbol_resolver.rs @@ -15,7 +15,7 @@ use pyo3::{ PyAny, PyObject, PyResult, Python, }; use std::{ - collections::HashMap, + collections::{HashMap, HashSet}, sync::{ Arc, atomic::{AtomicBool, Ordering::Relaxed} @@ -1172,17 +1172,21 @@ impl SymbolResolver for Resolver { }) } - fn get_identifier_def(&self, id: StrRef) -> Result { + fn get_identifier_def(&self, id: StrRef) -> Result> { { let id_to_def = self.0.id_to_def.read(); id_to_def.get(&id).copied().ok_or_else(String::new) } .or_else(|_| { - let py_id = - self.0.name_to_pyid.get(&id).ok_or(format!("Undefined identifier `{id}`"))?; - let result = self.0.pyid_to_def.read().get(py_id).copied().ok_or(format!( - "`{id}` is not registered with NAC3 (@nac3 decorator missing?)" - ))?; + let py_id = self.0.name_to_pyid.get(&id) + .ok_or_else(|| HashSet::from([ + format!("Undefined identifier `{id}`"), + ]))?; + let result = self.0.pyid_to_def.read().get(py_id) + .copied() + .ok_or_else(|| HashSet::from([ + format!("`{id}` is not registered with NAC3 (@nac3 decorator missing?)"), + ]))?; self.0.id_to_def.write().insert(id, result); Ok(result) }) diff --git a/nac3core/src/codegen/expr.rs b/nac3core/src/codegen/expr.rs index 6615a696..fc71c8e0 100644 --- a/nac3core/src/codegen/expr.rs +++ b/nac3core/src/codegen/expr.rs @@ -1649,7 +1649,7 @@ pub fn gen_expr<'ctx, G: CodeGenerator>( let fun = ctx .resolver .get_identifier_def(*id) - .map_err(|e| format!("{} (at {})", e, func.location))?; + .map_err(|e| format!("{} (at {})", e.iter().next().unwrap(), func.location))?; return Ok(generator .gen_call(ctx, None, (&signature, fun), params)? .map(Into::into)); diff --git a/nac3core/src/codegen/test.rs b/nac3core/src/codegen/test.rs index de28ec4e..c3a8c719 100644 --- a/nac3core/src/codegen/test.rs +++ b/nac3core/src/codegen/test.rs @@ -63,12 +63,14 @@ impl SymbolResolver for Resolver { unimplemented!() } - fn get_identifier_def(&self, id: StrRef) -> Result { + fn get_identifier_def(&self, id: StrRef) -> Result> { self.id_to_def .read() .get(&id) .cloned() - .ok_or_else(|| format!("cannot find symbol `{}`", id)) + .ok_or_else(|| HashSet::from([ + format!("cannot find symbol `{}`", id), + ])) } fn get_string_id(&self, _: &str) -> i32 { diff --git a/nac3core/src/symbol_resolver.rs b/nac3core/src/symbol_resolver.rs index 5224659d..54a243a6 100644 --- a/nac3core/src/symbol_resolver.rs +++ b/nac3core/src/symbol_resolver.rs @@ -1,6 +1,6 @@ use std::fmt::Debug; use std::sync::Arc; -use std::{collections::HashMap, fmt::Display}; +use std::{collections::HashMap, collections::HashSet, fmt::Display}; use std::rc::Rc; use crate::typecheck::typedef::TypeEnum; @@ -296,7 +296,7 @@ pub trait SymbolResolver { ) -> Result; /// Get the top-level definition of identifiers. - fn get_identifier_def(&self, str: StrRef) -> Result; + fn get_identifier_def(&self, str: StrRef) -> Result>; fn get_symbol_value<'ctx>( &self, @@ -341,7 +341,7 @@ pub fn parse_type_annotation( unifier: &mut Unifier, primitives: &PrimitiveStore, expr: &Expr, -) -> Result { +) -> Result> { use nac3parser::ast::ExprKind::*; let ids = IDENTIFIER_ID.with(|ids| *ids); let int32_id = ids[0]; @@ -379,10 +379,12 @@ pub fn parse_type_annotation( let def = top_level_defs[obj_id.0].read(); if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if !type_vars.is_empty() { - return Err(format!( - "Unexpected number of type parameters: expected {} but got 0", - type_vars.len() - )); + return Err(HashSet::from([ + format!( + "Unexpected number of type parameters: expected {} but got 0", + type_vars.len() + ), + ])) } let fields = chain( fields.iter().map(|(k, v, m)| (*k, (*v, *m))), @@ -395,16 +397,22 @@ pub fn parse_type_annotation( params: HashMap::default(), })) } else { - Err(format!("Cannot use function name as type at {loc}")) + Err(HashSet::from([ + format!("Cannot use function name as type at {loc}"), + ])) } } else { let ty = resolver .get_symbol_type(unifier, top_level_defs, primitives, *id) - .map_err(|e| format!("Unknown type annotation at {loc}: {e}"))?; + .map_err(|e| HashSet::from([ + format!("Unknown type annotation at {loc}: {e}"), + ]))?; if let TypeEnum::TVar { .. } = &*unifier.get_ty(ty) { Ok(ty) } else { - Err(format!("Unknown type annotation {id} at {loc}")) + Err(HashSet::from([ + format!("Unknown type annotation {id} at {loc}"), + ])) } } } @@ -427,7 +435,9 @@ pub fn parse_type_annotation( .collect::, _>>()?; Ok(unifier.add_ty(TypeEnum::TTuple { ty })) } else { - Err("Expected multiple elements for tuple".into()) + Err(HashSet::from([ + "Expected multiple elements for tuple".into() + ])) } } else { let types = if let Tuple { elts, .. } = &slice.node { @@ -444,11 +454,13 @@ pub fn parse_type_annotation( let def = top_level_defs[obj_id.0].read(); if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if types.len() != type_vars.len() { - return Err(format!( - "Unexpected number of type parameters: expected {} but got {}", - type_vars.len(), - types.len() - )); + return Err(HashSet::from([ + format!( + "Unexpected number of type parameters: expected {} but got {}", + type_vars.len(), + types.len() + ), + ])) } let mut subst = HashMap::new(); for (var, ty) in izip!(type_vars.iter(), types.iter()) { @@ -472,7 +484,9 @@ pub fn parse_type_annotation( })); Ok(unifier.add_ty(TypeEnum::TObj { obj_id, fields, params: subst })) } else { - Err("Cannot use function name as type".into()) + Err(HashSet::from([ + "Cannot use function name as type".into(), + ])) } } }; @@ -483,10 +497,14 @@ pub fn parse_type_annotation( if let Name { id, .. } = &value.node { subscript_name_handle(id, slice, unifier) } else { - Err(format!("unsupported type expression at {}", expr.location)) + Err(HashSet::from([ + format!("unsupported type expression at {}", expr.location), + ])) } } - _ => Err(format!("unsupported type expression at {}", expr.location)), + _ => Err(HashSet::from([ + format!("unsupported type expression at {}", expr.location), + ])), } } @@ -497,7 +515,7 @@ impl dyn SymbolResolver + Send + Sync { unifier: &mut Unifier, primitives: &PrimitiveStore, expr: &Expr, - ) -> Result { + ) -> Result> { parse_type_annotation(self, top_level_defs, unifier, primitives, expr) } diff --git a/nac3core/src/toplevel/composer.rs b/nac3core/src/toplevel/composer.rs index d47be350..e3fb80b4 100644 --- a/nac3core/src/toplevel/composer.rs +++ b/nac3core/src/toplevel/composer.rs @@ -362,7 +362,7 @@ impl TopLevelComposer { } } - pub fn start_analysis(&mut self, inference: bool) -> Result<(), String> { + pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet> { self.analyze_top_level_class_type_var()?; self.analyze_top_level_class_bases()?; self.analyze_top_level_class_fields_methods()?; @@ -374,7 +374,7 @@ impl TopLevelComposer { } /// step 1, analyze the type vars associated with top level class - fn analyze_top_level_class_type_var(&mut self) -> Result<(), String> { + fn analyze_top_level_class_type_var(&mut self) -> Result<(), HashSet> { let def_list = &self.definition_ast_list; let temp_def_list = self.extract_def_list(); let unifier = self.unifier.borrow_mut(); @@ -417,10 +417,12 @@ impl TopLevelComposer { } => { if is_generic { - return Err(format!( - "only single Generic[...] is allowed (at {})", - b.location - )); + return Err(HashSet::from([ + format!( + "only single Generic[...] is allowed (at {})", + b.location + ), + ])) } is_generic = true; @@ -459,10 +461,12 @@ impl TopLevelComposer { }) }; if !all_unique_type_var { - return Err(format!( - "duplicate type variable occurs (at {})", - slice.location - )); + return Err(HashSet::from([ + format!( + "duplicate type variable occurs (at {})", + slice.location + ), + ])) } // add to TopLevelDef @@ -481,11 +485,11 @@ impl TopLevelComposer { continue; } if let Err(e) = analyze(class_def, class_ast) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } Ok(()) } @@ -493,7 +497,7 @@ impl TopLevelComposer { /// step 2, base classes. /// now that the type vars of all classes are done, handle base classes and /// put Self class into the ancestors list. We only allow single inheritance - fn analyze_top_level_class_bases(&mut self) -> Result<(), String> { + fn analyze_top_level_class_bases(&mut self) -> Result<(), HashSet> { if self.unifier.top_level.is_none() { let ctx = Arc::new(self.make_top_level_context()); self.unifier.top_level = Some(ctx); @@ -542,11 +546,13 @@ impl TopLevelComposer { } if has_base { - return Err(format!( - "a class definition can only have at most one base class \ - declaration and one generic declaration (at {})", - b.location - )); + return Err(HashSet::from([ + format!( + "a class definition can only have at most one base class \ + declaration and one generic declaration (at {})", + b.location + ), + ])) } has_base = true; @@ -565,10 +571,12 @@ impl TopLevelComposer { if let TypeAnnotation::CustomClass { .. } = &base_ty { class_ancestors.push(base_ty); } else { - return Err(format!( - "class base declaration can only be custom class (at {})", - b.location, - )); + return Err(HashSet::from([ + format!( + "class base declaration can only be custom class (at {})", + b.location, + ), + ])) } } Ok(()) @@ -581,16 +589,16 @@ impl TopLevelComposer { continue; } if let Err(e) = get_direct_parents(class_def, class_ast) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } // second, get all ancestors let mut ancestors_store: HashMap> = HashMap::default(); - let mut get_all_ancestors = |class_def: &Arc>| { + let mut get_all_ancestors = |class_def: &Arc>| -> Result<(), HashSet> { let class_def = class_def.read(); let (class_ancestors, class_id) = { if let TopLevelDef::Class { ancestors, object_id, .. } = &*class_def { @@ -615,11 +623,11 @@ impl TopLevelComposer { continue; } if let Err(e) = get_all_ancestors(class_def) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } // insert the ancestors to the def list @@ -657,7 +665,9 @@ impl TopLevelComposer { stmt.node, ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } ) { - return Err("Classes inherited from exception should have no custom fields/methods".into()); + return Err(HashSet::from([ + "Classes inherited from exception should have no custom fields/methods".into() + ])) } } } else { @@ -680,7 +690,7 @@ impl TopLevelComposer { } /// step 3, class fields and methods - fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), String> { + fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), HashSet> { let temp_def_list = self.extract_def_list(); let primitives = &self.primitives_ty; let def_ast_list = &self.definition_ast_list; @@ -703,12 +713,12 @@ impl TopLevelComposer { &mut type_var_to_concrete_def, (&self.keyword_list, &self.core_config), ) { - errors.insert(e); + errors.extend(e); } } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } // handle the inherited methods and fields @@ -752,15 +762,16 @@ impl TopLevelComposer { let mut subst_list = Some(Vec::new()); // unification of previously assigned typevar - let mut unification_helper = |ty, def| { + let mut unification_helper = |ty, def| -> Result<(), HashSet> { let target_ty = get_type_from_type_annotation_kinds(&temp_def_list, unifier, &def, &mut subst_list)?; - unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?; + unifier.unify(ty, target_ty) + .map_err(|e| HashSet::from([e.to_display(unifier).to_string()]))?; Ok(()) }; for (ty, def) in type_var_to_concrete_def { if let Err(e) = unification_helper(ty, def) { - errors.insert(e); + errors.extend(e); } } for ty in subst_list.unwrap() { @@ -787,7 +798,7 @@ impl TopLevelComposer { } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } for (def, _) in def_ast_list.iter().skip(self.builtin_num) { @@ -806,7 +817,7 @@ impl TopLevelComposer { } /// step 4, after class methods are done, top level functions have nothing unknown - fn analyze_top_level_function(&mut self) -> Result<(), String> { + fn analyze_top_level_function(&mut self) -> Result<(), HashSet> { let def_list = &self.definition_ast_list; let keyword_list = &self.keyword_list; let temp_def_list = self.extract_def_list(); @@ -842,11 +853,13 @@ impl TopLevelComposer { if !defined_parameter_name.insert(x.node.arg) || keyword_list.contains(&x.node.arg) { - return Err(format!( - "top level function must have unique parameter names \ - and names should not be the same as the keywords (at {})", - x.location - )); + return Err(HashSet::from([ + format!( + "top level function must have unique parameter names \ + and names should not be the same as the keywords (at {})", + x.location + ), + ])) } } @@ -869,17 +882,17 @@ impl TopLevelComposer { arg_with_default .iter() .rev() - .map(|(x, default)| -> Result { + .map(|(x, default)| -> Result> { let annotation = x .node .annotation .as_ref() - .ok_or_else(|| { + .ok_or_else(|| HashSet::from([ format!( "function parameter `{}` needs type annotation at {}", x.node.arg, x.location - ) - })? + ), + ]))? .as_ref(); let type_annotation = parse_ast_to_type_annotation_kinds( @@ -897,7 +910,7 @@ impl TopLevelComposer { let type_vars_within = get_type_var_contained_in_type_annotation(&type_annotation) .into_iter() - .map(|x| -> Result<(u32, Type), String> { + .map(|x| -> Result<(u32, Type), HashSet> { if let TypeAnnotation::TypeVar(ty) = x { Ok((Self::get_var_id(ty, unifier)?, ty)) } else { @@ -934,9 +947,9 @@ impl TopLevelComposer { primitives_store, unifier, ) - .map_err( - |err| format!("{} (at {})", err, x.location), - )?; + .map_err(|err| HashSet::from([ + format!("{} (at {})", err, x.location), + ]))?; v }), }, @@ -965,7 +978,7 @@ impl TopLevelComposer { let type_vars_within = get_type_var_contained_in_type_annotation(&return_ty_annotation) .into_iter() - .map(|x| -> Result<(u32, Type), String> { + .map(|x| -> Result<(u32, Type), HashSet> { if let TypeAnnotation::TypeVar(ty) = x { Ok((Self::get_var_id(ty, unifier)?, ty)) } else { @@ -1007,9 +1020,9 @@ impl TopLevelComposer { ret: return_ty, vars: function_var_map, })); - unifier.unify(*dummy_ty, function_ty).map_err(|e| { - e.at(Some(function_ast.location)).to_display(unifier).to_string() - })?; + unifier.unify(*dummy_ty, function_ty).map_err(|e| HashSet::from([ + e.at(Some(function_ast.location)).to_display(unifier).to_string(), + ]))?; } else { unreachable!("must be both function"); } @@ -1024,11 +1037,11 @@ impl TopLevelComposer { continue; } if let Err(e) = analyze(function_def, function_ast) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } Ok(()) } @@ -1041,7 +1054,7 @@ impl TopLevelComposer { primitives: &PrimitiveStore, type_var_to_concrete_def: &mut HashMap, core_info: (&HashSet, &ComposerConfig), - ) -> Result<(), String> { + ) -> Result<(), HashSet> { let (keyword_list, core_config) = core_info; let mut class_def = class_def.write(); let ( @@ -1092,25 +1105,23 @@ impl TopLevelComposer { if !defined_parameter_name.insert(x.node.arg) || (keyword_list.contains(&x.node.arg) && x.node.arg != zelf) { - return Err(format!( - "top level function must have unique parameter names \ + return Err(HashSet::from([ + format!("top level function must have unique parameter names \ and names should not be the same as the keywords (at {})", - x.location - )); + x.location), + ])) } } if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) { - return Err(format!( - "__init__ method must have a `self` parameter (at {})", - b.location - )); + return Err(HashSet::from([ + format!("__init__ method must have a `self` parameter (at {})", b.location), + ])) } if !defined_parameter_name.contains(&zelf) { - return Err(format!( - "class method must have a `self` parameter (at {})", - b.location - )); + return Err(HashSet::from([ + format!("class method must have a `self` parameter (at {})", b.location), + ])) } let mut result = Vec::new(); @@ -1139,12 +1150,12 @@ impl TopLevelComposer { .node .annotation .as_ref() - .ok_or_else(|| { + .ok_or_else(|| HashSet::from([ format!( "type annotation needed for `{}` at {}", x.node.arg, x.location - ) - })? + ), + ]))? .as_ref(); parse_ast_to_type_annotation_kinds( class_resolver, @@ -1181,7 +1192,9 @@ impl TopLevelComposer { None => None, Some(default) => { if name == "self".into() { - return Err(format!("`self` parameter cannot take default value (at {})", x.location)); + return Err(HashSet::from([ + format!("`self` parameter cannot take default value (at {})", x.location), + ])); } Some({ let v = Self::parse_parameter_default_value( @@ -1191,9 +1204,9 @@ impl TopLevelComposer { Self::check_default_param_type( &v, &type_ann, primitives, unifier, ) - .map_err(|err| { - format!("{} (at {})", err, x.location) - })?; + .map_err(|err| HashSet::from([ + format!("{} (at {})", err, x.location), + ]))?; v }) } @@ -1279,7 +1292,7 @@ impl TopLevelComposer { // which should be fine since type within method_type will be subst later unifier .unify(method_dummy_ty, method_type) - .map_err(|e| e.to_display(unifier).to_string())?; + .map_err(|e| HashSet::from([e.to_display(unifier).to_string()]))?; } ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => { if let ast::ExprKind::Name { id: attr, .. } = &target.node { @@ -1325,11 +1338,13 @@ impl TopLevelComposer { for type_var_within in type_vars_within { if let TypeAnnotation::TypeVar(t) = type_var_within { if !class_type_vars_def.contains(&t) { - return Err(format!( - "class fields can only use type \ - vars over which the class is generic (at {})", - annotation.location - )); + return Err(HashSet::from([ + format!( + "class fields can only use type \ + vars over which the class is generic (at {})", + annotation.location + ), + ])) } } else { unreachable!("must be type var annotation"); @@ -1337,26 +1352,32 @@ impl TopLevelComposer { } type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation); } else { - return Err(format!( - "same class fields `{}` defined twice (at {})", - attr, target.location - )); + return Err(HashSet::from([ + format!( + "same class fields `{}` defined twice (at {})", + attr, target.location + ), + ])) } } else { - return Err(format!( - "unsupported statement type in class definition body (at {})", - target.location - )); + return Err(HashSet::from([ + format!( + "unsupported statement type in class definition body (at {})", + target.location + ), + ])) } } ast::StmtKind::Assign { .. } => {}, // we don't class attributes ast::StmtKind::Pass { .. } => {} ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior _ => { - return Err(format!( - "unsupported statement in class definition body (at {})", - b.location - )) + return Err(HashSet::from([ + format!( + "unsupported statement in class definition body (at {})", + b.location + ), + ])) } } } @@ -1369,7 +1390,7 @@ impl TopLevelComposer { unifier: &mut Unifier, _primitives: &PrimitiveStore, type_var_to_concrete_def: &mut HashMap, - ) -> Result<(), String> { + ) -> Result<(), HashSet> { let ( _class_id, class_ancestor_def, @@ -1420,9 +1441,11 @@ impl TopLevelComposer { type_var_to_concrete_def, ); if !ok { - return Err(format!( - "method {class_method_name} has same name as ancestors' method, but incompatible type" - )); + return Err(HashSet::from([ + format!( + "method {class_method_name} has same name as ancestors' method, but incompatible type" + ), + ])) } // mark it as added is_override.insert(*class_method_name); @@ -1457,9 +1480,11 @@ impl TopLevelComposer { // find if there is a fields with the same name in the child class for (class_field_name, ..) in &*class_fields_def { if class_field_name == anc_field_name { - return Err(format!( - "field `{class_field_name}` has already declared in the ancestor classes" - )); + return Err(HashSet::from([ + format!( + "field `{class_field_name}` has already declared in the ancestor classes" + ), + ])) } } new_child_fields.push(to_be_added); @@ -1483,7 +1508,7 @@ impl TopLevelComposer { /// step 5, analyze and call type inferencer to fill the `instance_to_stmt` of /// [`TopLevelDef::Function`] - fn analyze_function_instance(&mut self) -> Result<(), String> { + fn analyze_function_instance(&mut self) -> Result<(), HashSet> { // first get the class constructor type correct for the following type check in function body // also do class field instantiation check let init_str_id = "__init__".into(); @@ -1588,9 +1613,9 @@ impl TopLevelComposer { }; constructors.push((i, signature, definition_extension.len())); definition_extension.push((Arc::new(RwLock::new(cons_fun)), None)); - unifier.unify(constructor.unwrap(), signature).map_err(|e| { + unifier.unify(constructor.unwrap(), signature).map_err(|e| HashSet::from([ e.at(Some(ast.as_ref().unwrap().location)).to_display(unifier).to_string() - })?; + ]))?; return Ok(()); } let mut init_id: Option = None; @@ -1618,9 +1643,9 @@ impl TopLevelComposer { ret: self_type, vars: contor_type_vars, })); - unifier.unify(constructor.unwrap(), contor_type).map_err(|e| { + unifier.unify(constructor.unwrap(), contor_type).map_err(|e| HashSet::from([ e.at(Some(ast.as_ref().unwrap().location)).to_display(unifier).to_string() - })?; + ]))?; // class field instantiation check if let (Some(init_id), false) = (init_id, fields.is_empty()) { @@ -1632,12 +1657,14 @@ impl TopLevelComposer { let all_inited = Self::get_all_assigned_field(body.as_slice())?; for (f, _, _) in fields { if !all_inited.contains(f) { - return Err(format!( - "fields `{}` of class `{}` not fully initialized in the initializer (at {})", - f, - class_name, - body[0].location, - )); + return Err(HashSet::from([ + format!( + "fields `{}` of class `{}` not fully initialized in the initializer (at {})", + f, + class_name, + body[0].location, + ), + ])) } } } @@ -1650,11 +1677,11 @@ impl TopLevelComposer { continue; } if let Err(e) = analyze(i, def, ast) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n---------\n")); + return Err(errors) } for (i, signature, id) in constructors { @@ -1869,9 +1896,9 @@ impl TopLevelComposer { if let TypeEnum::TObj { obj_id, .. } = &*base { *obj_id } else { - return Err(format!( - "Base type should be a class (at {loc})" - )); + return Err(HashSet::from([ + format!("Base type should be a class (at {loc})"), + ])) } }; let subtype_id = { @@ -1881,9 +1908,11 @@ impl TopLevelComposer { } else { let base_repr = inferencer.unifier.stringify(*base); let subtype_repr = inferencer.unifier.stringify(*subtype); - return Err(format!( - "Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})" - )); + return Err(HashSet::from([ + format!( + "Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})" + ), + ])) } }; let subtype_entry = defs[subtype_id.0].read(); @@ -1893,9 +1922,11 @@ impl TopLevelComposer { if m.is_none() { let base_repr = inferencer.unifier.stringify(*base); let subtype_repr = inferencer.unifier.stringify(*subtype); - return Err(format!( - "Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})" - )); + return Err(HashSet::from([ + format!( + "Expected a subtype of {base_repr}, but got {subtype_repr} (at {loc})" + ), + ])) } } else { unreachable!(); @@ -1918,12 +1949,14 @@ impl TopLevelComposer { &mut |id| format!("typevar{id}"), &mut None, ); - return Err(format!( - "expected return type of `{}` in function `{}` (at {})", - ret_str, - name, - ast.as_ref().unwrap().location - )); + return Err(HashSet::from([ + format!( + "expected return type of `{}` in function `{}` (at {})", + ret_str, + name, + ast.as_ref().unwrap().location + ), + ])) } instance_to_stmt.insert( @@ -1947,11 +1980,11 @@ impl TopLevelComposer { continue; } if let Err(e) = analyze_2(id, def, ast) { - errors.insert(e); + errors.extend(e); } } if !errors.is_empty() { - return Err(errors.into_iter().sorted().join("\n----------\n")); + return Err(errors) } Ok(()) } diff --git a/nac3core/src/toplevel/helper.rs b/nac3core/src/toplevel/helper.rs index 6399f6da..1367543e 100644 --- a/nac3core/src/toplevel/helper.rs +++ b/nac3core/src/toplevel/helper.rs @@ -204,13 +204,13 @@ impl TopLevelComposer { pub fn get_class_method_def_info( class_methods_def: &[(StrRef, Type, DefinitionId)], method_name: StrRef, - ) -> Result<(Type, DefinitionId), String> { + ) -> Result<(Type, DefinitionId), HashSet> { for (name, ty, def_id) in class_methods_def { if name == &method_name { return Ok((*ty, *def_id)); } } - Err(format!("no method {method_name} in the current class")) + Err(HashSet::from([format!("no method {method_name} in the current class")])) } /// get all base class def id of a class, excluding itself. \ @@ -221,7 +221,7 @@ impl TopLevelComposer { pub fn get_all_ancestors_helper( child: &TypeAnnotation, temp_def_list: &[Arc>], - ) -> Result, String> { + ) -> Result, HashSet> { let mut result: Vec = Vec::new(); let mut parent = Self::get_parent(child, temp_def_list); while let Some(p) = parent { @@ -242,7 +242,7 @@ impl TopLevelComposer { if no_cycle { result.push(p); } else { - return Err("cyclic inheritance detected".into()); + return Err(HashSet::from(["cyclic inheritance detected".into()])); } } Ok(result) @@ -272,11 +272,13 @@ impl TopLevelComposer { } /// get the `var_id` of a given `TVar` type - pub fn get_var_id(var_ty: Type, unifier: &mut Unifier) -> Result { + pub fn get_var_id(var_ty: Type, unifier: &mut Unifier) -> Result> { if let TypeEnum::TVar { id, .. } = unifier.get_ty(var_ty).as_ref() { Ok(*id) } else { - Err("not type var".to_string()) + Err(HashSet::from([ + "not type var".to_string(), + ])) } } @@ -338,7 +340,7 @@ impl TopLevelComposer { ) } - pub fn get_all_assigned_field(stmts: &[Stmt<()>]) -> Result, String> { + pub fn get_all_assigned_field(stmts: &[Stmt<()>]) -> Result, HashSet> { let mut result = HashSet::new(); for s in stmts { match &s.node { @@ -355,10 +357,12 @@ impl TopLevelComposer { } } => { - return Err(format!( - "redundant type annotation for class fields at {}", - s.location - )) + return Err(HashSet::from([ + format!( + "redundant type annotation for class fields at {}", + s.location + ), + ])) } ast::StmtKind::Assign { targets, .. } => { for t in targets { @@ -410,7 +414,7 @@ impl TopLevelComposer { pub fn parse_parameter_default_value( default: &ast::Expr, resolver: &(dyn SymbolResolver + Send + Sync), - ) -> Result { + ) -> Result> { parse_parameter_default_value(default, resolver) } @@ -467,14 +471,14 @@ impl TopLevelComposer { pub fn parse_parameter_default_value( default: &ast::Expr, resolver: &(dyn SymbolResolver + Send + Sync), -) -> Result { - fn handle_constant(val: &Constant, loc: &Location) -> Result { +) -> Result> { + fn handle_constant(val: &Constant, loc: &Location) -> Result> { match val { Constant::Int(v) => { if let Ok(v) = (*v).try_into() { Ok(SymbolValue::I32(v)) } else { - Err(format!("integer value out of range at {loc}")) + Err(HashSet::from([format!("integer value out of range at {loc}")])) } } Constant::Float(v) => Ok(SymbolValue::Double(*v)), @@ -482,9 +486,11 @@ pub fn parse_parameter_default_value( Constant::Tuple(tuple) => Ok(SymbolValue::Tuple( tuple.iter().map(|x| handle_constant(x, loc)).collect::, _>>()?, )), - Constant::None => Err(format!( - "`None` is not supported, use `none` for option type instead ({loc})" - )), + Constant::None => Err(HashSet::from([ + format!( + "`None` is not supported, use `none` for option type instead ({loc})" + ), + ])), _ => unimplemented!("this constant is not supported at {}", loc), } } @@ -497,37 +503,51 @@ pub fn parse_parameter_default_value( let v: Result = (*v).try_into(); match v { Ok(v) => Ok(SymbolValue::I64(v)), - _ => Err(format!("default param value out of range at {}", default.location)), + _ => Err(HashSet::from([ + format!("default param value out of range at {}", default.location) + ])), } } - _ => Err(format!("only allow constant integer here at {}", default.location)) + _ => Err(HashSet::from([ + format!("only allow constant integer here at {}", default.location), + ])) } ast::ExprKind::Name { id, .. } if *id == "uint32".into() => match &args[0].node { ast::ExprKind::Constant { value: Constant::Int(v), .. } => { let v: Result = (*v).try_into(); match v { Ok(v) => Ok(SymbolValue::U32(v)), - _ => Err(format!("default param value out of range at {}", default.location)), + _ => Err(HashSet::from([ + format!("default param value out of range at {}", default.location), + ])), } } - _ => Err(format!("only allow constant integer here at {}", default.location)) + _ => Err(HashSet::from([ + format!("only allow constant integer here at {}", default.location), + ])) } ast::ExprKind::Name { id, .. } if *id == "uint64".into() => match &args[0].node { ast::ExprKind::Constant { value: Constant::Int(v), .. } => { let v: Result = (*v).try_into(); match v { Ok(v) => Ok(SymbolValue::U64(v)), - _ => Err(format!("default param value out of range at {}", default.location)), + _ => Err(HashSet::from([ + format!("default param value out of range at {}", default.location), + ])), } } - _ => Err(format!("only allow constant integer here at {}", default.location)) + _ => Err(HashSet::from([ + format!("only allow constant integer here at {}", default.location), + ])) } ast::ExprKind::Name { id, .. } if *id == "Some".into() => Ok( SymbolValue::OptionSome( Box::new(parse_parameter_default_value(&args[0], resolver)?) ) ), - _ => Err(format!("unsupported default parameter at {}", default.location)), + _ => Err(HashSet::from([ + format!("unsupported default parameter at {}", default.location), + ])), } } ast::ExprKind::Tuple { elts, .. } => Ok(SymbolValue::Tuple(elts @@ -538,17 +558,21 @@ pub fn parse_parameter_default_value( ast::ExprKind::Name { id, .. } if id == &"none".into() => Ok(SymbolValue::OptionNone), ast::ExprKind::Name { id, .. } => { resolver.get_default_param_value(default).ok_or_else( - || format!( - "`{}` cannot be used as a default parameter at {} \ - (not primitive type, option or tuple / not defined?)", - id, - default.location - ) + || HashSet::from([ + format!( + "`{}` cannot be used as a default parameter at {} \ + (not primitive type, option or tuple / not defined?)", + id, + default.location + ), + ]) ) } - _ => Err(format!( - "unsupported default parameter (not primitive type, option or tuple) at {}", - default.location - )) + _ => Err(HashSet::from([ + format!( + "unsupported default parameter (not primitive type, option or tuple) at {}", + default.location + ), + ])) } } diff --git a/nac3core/src/toplevel/test.rs b/nac3core/src/toplevel/test.rs index 2b8b03ad..19dc2165 100644 --- a/nac3core/src/toplevel/test.rs +++ b/nac3core/src/toplevel/test.rs @@ -64,8 +64,9 @@ impl SymbolResolver for Resolver { unimplemented!() } - fn get_identifier_def(&self, id: StrRef) -> Result { - self.0.id_to_def.lock().get(&id).cloned().ok_or_else(|| "Unknown identifier".to_string()) + fn get_identifier_def(&self, id: StrRef) -> Result> { + self.0.id_to_def.lock().get(&id).cloned() + .ok_or_else(|| HashSet::from(["Unknown identifier".to_string()])) } fn get_string_id(&self, _: &str) -> i32 { @@ -551,9 +552,9 @@ fn test_analyze(source: Vec<&str>, res: Vec<&str>) { if let Err(msg) = composer.start_analysis(false) { if print { - println!("{}", msg); + println!("{}", msg.iter().sorted().join("\n----------\n")); } else { - assert_eq!(res[0], msg); + assert_eq!(res[0], msg.iter().next().unwrap()); } } else { // skip 5 to skip primitives @@ -735,9 +736,9 @@ fn test_inference(source: Vec<&str>, res: Vec<&str>) { if let Err(msg) = composer.start_analysis(true) { if print { - println!("{}", msg); + println!("{}", msg.iter().sorted().join("\n----------\n")); } else { - assert_eq!(res[0], msg); + assert_eq!(res[0], msg.iter().next().unwrap()); } } else { // skip 5 to skip primitives diff --git a/nac3core/src/toplevel/type_annotation.rs b/nac3core/src/toplevel/type_annotation.rs index 54922776..82b5bf99 100644 --- a/nac3core/src/toplevel/type_annotation.rs +++ b/nac3core/src/toplevel/type_annotation.rs @@ -82,7 +82,7 @@ pub fn parse_ast_to_type_annotation_kinds( // the key stores the type_var of this topleveldef::class, we only need this field here locked: HashMap>, type_var: Option, -) -> Result { +) -> Result> { let name_handle = |id: &StrRef, unifier: &mut Unifier, locked: HashMap>| { @@ -109,10 +109,12 @@ pub fn parse_ast_to_type_annotation_kinds( if let TopLevelDef::Class { type_vars, .. } = &*def_read { type_vars.clone() } else { - return Err(format!( - "function cannot be used as a type (at {})", - expr.location - )); + return Err(HashSet::from([ + format!( + "function cannot be used as a type (at {})", + expr.location + ), + ])) } } else { locked.get(&obj_id).unwrap().clone() @@ -120,11 +122,13 @@ pub fn parse_ast_to_type_annotation_kinds( }; // check param number here if !type_vars.is_empty() { - return Err(format!( - "expect {} type variable parameter but got 0 (at {})", - type_vars.len(), - expr.location, - )); + return Err(HashSet::from([ + format!( + "expect {} type variable parameter but got 0 (at {})", + type_vars.len(), + expr.location, + ), + ])) } Ok(TypeAnnotation::CustomClass { id: obj_id, params: vec![] }) } else if let Ok(ty) = resolver.get_symbol_type(unifier, top_level_defs, primitives, *id) { @@ -133,10 +137,14 @@ pub fn parse_ast_to_type_annotation_kinds( unifier.unify(var, ty).unwrap(); Ok(TypeAnnotation::TypeVar(ty)) } else { - Err(format!("`{}` is not a valid type annotation (at {})", id, expr.location)) + Err(HashSet::from([ + format!("`{}` is not a valid type annotation (at {})", id, expr.location), + ])) } } else { - Err(format!("`{}` is not a valid type annotation (at {})", id, expr.location)) + Err(HashSet::from([ + format!("`{}` is not a valid type annotation (at {})", id, expr.location), + ])) } }; @@ -147,7 +155,9 @@ pub fn parse_ast_to_type_annotation_kinds( mut locked: HashMap>| { if ["virtual".into(), "Generic".into(), "list".into(), "tuple".into(), "Option".into()].contains(id) { - return Err(format!("keywords cannot be class name (at {})", expr.location)); + return Err(HashSet::from([ + format!("keywords cannot be class name (at {})", expr.location), + ])) } let obj_id = resolver.get_identifier_def(*id)?; let type_vars = { @@ -170,12 +180,14 @@ pub fn parse_ast_to_type_annotation_kinds( vec![slice] }; if type_vars.len() != params_ast.len() { - return Err(format!( - "expect {} type parameters but got {} (at {})", - type_vars.len(), - params_ast.len(), - params_ast[0].location, - )); + return Err(HashSet::from([ + format!( + "expect {} type parameters but got {} (at {})", + type_vars.len(), + params_ast.len(), + params_ast[0].location, + ), + ])) } let result = params_ast .iter() @@ -201,11 +213,12 @@ pub fn parse_ast_to_type_annotation_kinds( if no_type_var { result } else { - return Err(format!( - "application of type vars to generic class \ - is not currently supported (at {})", - params_ast[0].location - )); + return Err(HashSet::from([ + format!( + "application of type vars to generic class is not currently supported (at {})", + params_ast[0].location + ), + ])) } }; Ok(TypeAnnotation::CustomClass { id: obj_id, params: param_type_infos }) @@ -311,7 +324,9 @@ pub fn parse_ast_to_type_annotation_kinds( if let ast::ExprKind::Name { id, .. } = &value.node { class_name_handle(id, slice, unifier, locked) } else { - Err(format!("unsupported expression type for class name (at {})", value.location)) + Err(HashSet::from([ + format!("unsupported expression type for class name (at {})", value.location) + ])) } } @@ -324,13 +339,16 @@ pub fn parse_ast_to_type_annotation_kinds( }; let underlying_ty = underlying_ty[0]; - let value = SymbolValue::from_constant(value, underlying_ty, primitives, unifier)?; + let value = SymbolValue::from_constant(value, underlying_ty, primitives, unifier) + .map_err(|err| HashSet::from([err]))?; if matches!(value, SymbolValue::Str(_) | SymbolValue::Tuple(_) | SymbolValue::OptionSome(_)) { - return Err(format!( - "expression {value} is not allowed for constant type annotation (at {})", - expr.location - )) + return Err(HashSet::from([ + format!( + "expression {value} is not allowed for constant type annotation (at {})", + expr.location + ), + ])) } Ok(TypeAnnotation::Constant { @@ -339,7 +357,9 @@ pub fn parse_ast_to_type_annotation_kinds( }) } - _ => Err(format!("unsupported expression for type annotation (at {})", expr.location)), + _ => Err(HashSet::from([ + format!("unsupported expression for type annotation (at {})", expr.location), + ])), } } @@ -351,7 +371,7 @@ pub fn get_type_from_type_annotation_kinds( unifier: &mut Unifier, ann: &TypeAnnotation, subst_list: &mut Option> -) -> Result { +) -> Result> { match ann { TypeAnnotation::CustomClass { id: obj_id, params } => { let def_read = top_level_defs[obj_id.0].read(); @@ -361,11 +381,13 @@ pub fn get_type_from_type_annotation_kinds( }; if type_vars.len() != params.len() { - return Err(format!( - "unexpected number of type parameters: expected {} but got {}", - type_vars.len(), - params.len() - )) + return Err(HashSet::from([ + format!( + "unexpected number of type parameters: expected {} but got {}", + type_vars.len(), + params.len() + ), + ])) } let param_ty = params @@ -401,16 +423,18 @@ pub fn get_type_from_type_annotation_kinds( if ok { result.insert(*id, p); } else { - return Err(format!( - "cannot apply type {} to type variable with id {:?}", - unifier.internal_stringify( - p, - &mut |id| format!("class{id}"), - &mut |id| format!("typevar{id}"), - &mut None - ), - *id - )); + return Err(HashSet::from([ + format!( + "cannot apply type {} to type variable with id {:?}", + unifier.internal_stringify( + p, + &mut |id| format!("class{id}"), + &mut |id| format!("typevar{id}"), + &mut None + ), + *id + ) + ])) } } @@ -430,11 +454,13 @@ pub fn get_type_from_type_annotation_kinds( if ok { result.insert(*id, p); } else { - return Err(format!( - "cannot apply type {} to type variable {}", - unifier.stringify(p), - name.unwrap_or_else(|| format!("typevar{id}").into()), - )) + return Err(HashSet::from([ + format!( + "cannot apply type {} to type variable {}", + unifier.stringify(p), + name.unwrap_or_else(|| format!("typevar{id}").into()), + ), + ])) } } diff --git a/nac3core/src/typecheck/function_check.rs b/nac3core/src/typecheck/function_check.rs index 212646c3..a8461f52 100644 --- a/nac3core/src/typecheck/function_check.rs +++ b/nac3core/src/typecheck/function_check.rs @@ -6,9 +6,11 @@ use nac3parser::ast::{self, Constant, Expr, ExprKind, Operator::{LShift, RShift} use std::{collections::HashSet, iter::once}; impl<'a> Inferencer<'a> { - fn should_have_value(&mut self, expr: &Expr>) -> Result<(), String> { + fn should_have_value(&mut self, expr: &Expr>) -> Result<(), HashSet> { if matches!(expr.custom, Some(ty) if self.unifier.unioned(ty, self.primitives.none)) { - Err(format!("Error at {}: cannot have value none", expr.location)) + Err(HashSet::from([ + format!("Error at {}: cannot have value none", expr.location), + ])) } else { Ok(()) } @@ -18,10 +20,11 @@ impl<'a> Inferencer<'a> { &mut self, pattern: &Expr>, defined_identifiers: &mut HashSet, - ) -> Result<(), String> { + ) -> Result<(), HashSet> { match &pattern.node { - ExprKind::Name { id, .. } if id == &"none".into() => - Err(format!("cannot assign to a `none` (at {})", pattern.location)), + ExprKind::Name { id, .. } if id == &"none".into() => Err(HashSet::from([ + format!("cannot assign to a `none` (at {})", pattern.location), + ])), ExprKind::Name { id, .. } => { if !defined_identifiers.contains(id) { defined_identifiers.insert(*id); @@ -41,15 +44,19 @@ impl<'a> Inferencer<'a> { self.should_have_value(value)?; self.check_expr(slice, defined_identifiers)?; if let TypeEnum::TTuple { .. } = &*self.unifier.get_ty(value.custom.unwrap()) { - return Err(format!( - "Error at {}: cannot assign to tuple element", - value.location - )); + return Err(HashSet::from([ + format!( + "Error at {}: cannot assign to tuple element", + value.location + ), + ])) } Ok(()) } ExprKind::Constant { .. } => { - Err(format!("cannot assign to a constant (at {})", pattern.location)) + Err(HashSet::from([ + format!("cannot assign to a constant (at {})", pattern.location), + ])) } _ => self.check_expr(pattern, defined_identifiers), } @@ -59,15 +66,17 @@ impl<'a> Inferencer<'a> { &mut self, expr: &Expr>, defined_identifiers: &mut HashSet, - ) -> Result<(), String> { + ) -> Result<(), HashSet> { // there are some cases where the custom field is None if let Some(ty) = &expr.custom { if !matches!(&expr.node, ExprKind::Constant { value: Constant::Ellipsis, .. }) && !self.unifier.is_concrete(*ty, &self.function_data.bound_variables) { - return Err(format!( - "expected concrete type at {} but got {}", - expr.location, - self.unifier.get_ty(*ty).get_type_name() - )); + return Err(HashSet::from([ + format!( + "expected concrete type at {} but got {}", + expr.location, + self.unifier.get_ty(*ty).get_type_name() + ) + ])) } } match &expr.node { @@ -87,10 +96,12 @@ impl<'a> Inferencer<'a> { self.defined_identifiers.insert(*id); } Err(e) => { - return Err(format!( - "type error at identifier `{}` ({}) at {}", - id, e, expr.location - )); + return Err(HashSet::from([ + format!( + "type error at identifier `{}` ({}) at {}", + id, e, expr.location + ) + ])) } } } @@ -121,10 +132,12 @@ impl<'a> Inferencer<'a> { }; if *rhs_val < 0 { - return Err(format!( - "shift count is negative at {}", - right.location - )); + return Err(HashSet::from([ + format!( + "shift count is negative at {}", + right.location + ), + ])) } } } @@ -200,7 +213,7 @@ impl<'a> Inferencer<'a> { &mut self, stmt: &Stmt>, defined_identifiers: &mut HashSet, - ) -> Result { + ) -> Result> { match &stmt.node { StmtKind::For { target, iter, body, orelse, .. } => { self.check_expr(iter, defined_identifiers)?; @@ -307,11 +320,11 @@ impl<'a> Inferencer<'a> { &mut self, block: &[Stmt>], defined_identifiers: &mut HashSet, - ) -> Result { + ) -> Result> { let mut ret = false; for stmt in block { if ret { - println!("warning: dead code at {:?}\n", stmt.location); + eprintln!("warning: dead code at {:?}\n", stmt.location); } if self.check_stmt(stmt, defined_identifiers)? { ret = true; diff --git a/nac3core/src/typecheck/type_inferencer/mod.rs b/nac3core/src/typecheck/type_inferencer/mod.rs index a7b73de2..7cc5dd9a 100644 --- a/nac3core/src/typecheck/type_inferencer/mod.rs +++ b/nac3core/src/typecheck/type_inferencer/mod.rs @@ -64,19 +64,19 @@ pub struct Inferencer<'a> { struct NaiveFolder(); impl Fold<()> for NaiveFolder { type TargetU = Option; - type Error = String; + type Error = HashSet; fn map_user(&mut self, (): ()) -> Result { Ok(None) } } -fn report_error(msg: &str, location: Location) -> Result { - Err(format!("{msg} at {location}")) +fn report_error(msg: &str, location: Location) -> Result> { + Err(HashSet::from([format!("{msg} at {location}")])) } impl<'a> Fold<()> for Inferencer<'a> { type TargetU = Option; - type Error = String; + type Error = HashSet; fn map_user(&mut self, (): ()) -> Result { Ok(None) @@ -159,9 +159,9 @@ impl<'a> Fold<()> for Inferencer<'a> { } if let Some(old_typ) = self.variable_mapping.insert(name, typ) { let loc = handler.location; - self.unifier.unify(old_typ, typ).map_err(|e| { - e.at(Some(loc)).to_display(self.unifier).to_string() - })?; + self.unifier.unify(old_typ, typ).map_err(|e| HashSet::from([ + e.at(Some(loc)).to_display(self.unifier).to_string(), + ]))?; } } let mut type_ = naive_folder.fold_expr(*type_)?; @@ -274,7 +274,7 @@ impl<'a> Fold<()> for Inferencer<'a> { .collect(); let loc = node.location; let targets = targets - .map_err(|e| e.at(Some(loc)).to_display(self.unifier).to_string())?; + .map_err(|e| HashSet::from([e.at(Some(loc)).to_display(self.unifier).to_string()]))?; return Ok(Located { location: node.location, node: ast::StmtKind::Assign { @@ -528,22 +528,24 @@ impl<'a> Fold<()> for Inferencer<'a> { } } -type InferenceResult = Result; +type InferenceResult = Result>; impl<'a> Inferencer<'a> { /// Constrain a <: b /// Currently implemented as unification - fn constrain(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> { + fn constrain(&mut self, a: Type, b: Type, location: &Location) -> Result<(), HashSet> { self.unify(a, b, location) } - fn unify(&mut self, a: Type, b: Type, location: &Location) -> Result<(), String> { + fn unify(&mut self, a: Type, b: Type, location: &Location) -> Result<(), HashSet> { self.unifier .unify(a, b) - .map_err(|e| e.at(Some(*location)).to_display(self.unifier).to_string()) + .map_err(|e| HashSet::from([ + e.at(Some(*location)).to_display(self.unifier).to_string(), + ])) } - fn infer_pattern(&mut self, pattern: &ast::Expr<()>) -> Result<(), String> { + fn infer_pattern(&mut self, pattern: &ast::Expr<()>) -> Result<(), HashSet> { match &pattern.node { ExprKind::Name { id, .. } => { if !self.defined_identifiers.contains(id) { @@ -592,9 +594,9 @@ impl<'a> Inferencer<'a> { .map(|v| v.name) .rev() .collect(); - self.unifier.unify_call(&call, ty, sign, &required).map_err(|e| { - e.at(Some(location)).to_display(self.unifier).to_string() - })?; + self.unifier.unify_call(&call, ty, sign, &required).map_err(|e| HashSet::from([ + e.at(Some(location)).to_display(self.unifier).to_string(), + ]))?; return Ok(sign.ret); } } @@ -623,7 +625,7 @@ impl<'a> Inferencer<'a> { location: Location, args: Arguments, body: ast::Expr<()>, - ) -> Result>, String> { + ) -> Result>, HashSet> { if !args.posonlyargs.is_empty() || args.vararg.is_some() || !args.kwonlyargs.is_empty() @@ -692,7 +694,7 @@ impl<'a> Inferencer<'a> { location: Location, elt: ast::Expr<()>, mut generators: Vec, - ) -> Result>, String> { + ) -> Result>, HashSet> { if generators.len() != 1 { return report_error( "Only 1 generator statement for list comprehension is supported", @@ -765,7 +767,7 @@ impl<'a> Inferencer<'a> { func: ast::Expr<()>, mut args: Vec>, keywords: Vec>, - ) -> Result>, String> { + ) -> Result>, HashSet> { let func = if let Located { location: func_location, custom, node: ExprKind::Name { id, ctx } } = func @@ -899,7 +901,9 @@ impl<'a> Inferencer<'a> { .collect(); self.unifier .unify_call(&call, func.custom.unwrap(), sign, &required) - .map_err(|e| e.at(Some(location)).to_display(self.unifier).to_string())?; + .map_err(|e| HashSet::from([ + e.at(Some(location)).to_display(self.unifier).to_string(), + ]))?; return Ok(Located { location, custom: Some(sign.ret), @@ -1073,8 +1077,11 @@ impl<'a> Inferencer<'a> { ) -> InferenceResult { let boolean = self.primitives.bool; for (a, b, c) in izip!(once(left).chain(comparators), comparators, ops) { - let method = - comparison_name(c).ok_or_else(|| "unsupported comparator".to_string())?.into(); + let method = comparison_name(c) + .ok_or_else(|| HashSet::from([ + "unsupported comparator".to_string() + ]))? + .into(); self.build_method_call( a.location, method, @@ -1105,7 +1112,7 @@ impl<'a> Inferencer<'a> { ExprKind::Constant { value: ast::Constant::Int(val), .. } => { // the index is a constant, so value can be a sequence. let ind: Option = (*val).try_into().ok(); - let ind = ind.ok_or_else(|| "Index must be int32".to_string())?; + let ind = ind.ok_or_else(|| HashSet::from(["Index must be int32".to_string()]))?; let map = once(( ind.into(), RecordField::new(ty, ctx == &ExprContext::Store, Some(value.location)), diff --git a/nac3core/src/typecheck/type_inferencer/test.rs b/nac3core/src/typecheck/type_inferencer/test.rs index cc18715c..4589c839 100644 --- a/nac3core/src/typecheck/type_inferencer/test.rs +++ b/nac3core/src/typecheck/type_inferencer/test.rs @@ -43,8 +43,9 @@ impl SymbolResolver for Resolver { unimplemented!() } - fn get_identifier_def(&self, id: StrRef) -> Result { - self.id_to_def.get(&id).cloned().ok_or_else(|| "Unknown identifier".to_string()) + fn get_identifier_def(&self, id: StrRef) -> Result> { + self.id_to_def.get(&id).cloned() + .ok_or_else(|| HashSet::from(["Unknown identifier".to_string()])) } fn get_string_id(&self, _: &str) -> i32 { diff --git a/nac3standalone/src/basic_symbol_resolver.rs b/nac3standalone/src/basic_symbol_resolver.rs index 3108847f..cdc3575c 100644 --- a/nac3standalone/src/basic_symbol_resolver.rs +++ b/nac3standalone/src/basic_symbol_resolver.rs @@ -10,6 +10,7 @@ use nac3core::{ use nac3parser::ast::{self, StrRef}; use parking_lot::{Mutex, RwLock}; use std::{collections::HashMap, sync::Arc}; +use std::collections::HashSet; pub struct ResolverInternal { pub id_to_type: Mutex>, @@ -61,8 +62,9 @@ impl SymbolResolver for Resolver { unimplemented!() } - fn get_identifier_def(&self, id: StrRef) -> Result { - self.0.id_to_def.lock().get(&id).copied().ok_or_else(|| "Undefined identifier".to_string()) + fn get_identifier_def(&self, id: StrRef) -> Result> { + self.0.id_to_def.lock().get(&id).copied() + .ok_or_else(|| HashSet::from(["Undefined identifier".to_string()])) } fn get_string_id(&self, s: &str) -> i32 { diff --git a/nac3standalone/src/main.rs b/nac3standalone/src/main.rs index d29a44a9..3ea7238d 100644 --- a/nac3standalone/src/main.rs +++ b/nac3standalone/src/main.rs @@ -8,6 +8,7 @@ use inkwell::{ }; use parking_lot::{Mutex, RwLock}; use std::{collections::HashMap, fs, path::Path, sync::Arc}; +use std::collections::HashSet; use nac3core::{ codegen::{ @@ -74,24 +75,28 @@ fn handle_typevar_definition( def_list: &[Arc>], unifier: &mut Unifier, primitives: &PrimitiveStore, -) -> Result { +) -> Result> { let ExprKind::Call { func, args, .. } = &var.node else { - return Err(format!( - "expression {var:?} cannot be handled as a generic parameter in global scope" - )) + return Err(HashSet::from([ + format!( + "expression {var:?} cannot be handled as a generic parameter in global scope" + ), + ])) }; match &func.node { ExprKind::Name { id, .. } if id == &"TypeVar".into() => { let ExprKind::Constant { value: Constant::Str(ty_name), .. } = &args[0].node else { - return Err(format!("Expected string constant for first parameter of `TypeVar`, got {:?}", &args[0].node)) + return Err(HashSet::from([ + format!("Expected string constant for first parameter of `TypeVar`, got {:?}", &args[0].node), + ])) }; let generic_name: StrRef = ty_name.to_string().into(); let constraints = args .iter() .skip(1) - .map(|x| -> Result { + .map(|x| -> Result> { let ty = parse_ast_to_type_annotation_kinds( resolver, def_list, @@ -109,7 +114,9 @@ fn handle_typevar_definition( let loc = func.location; if constraints.len() == 1 { - return Err(format!("A single constraint is not allowed (at {loc})")) + return Err(HashSet::from([ + format!("A single constraint is not allowed (at {loc})"), + ])) } Ok(unifier.get_fresh_var_with_range(&constraints, Some(generic_name), Some(loc)).0) @@ -117,14 +124,18 @@ fn handle_typevar_definition( ExprKind::Name { id, .. } if id == &"ConstGeneric".into() => { if args.len() != 2 { - return Err(format!("Expected 2 arguments for `ConstGeneric`, got {}", args.len())) + return Err(HashSet::from([ + format!("Expected 2 arguments for `ConstGeneric`, got {}", args.len()), + ])) } let ExprKind::Constant { value: Constant::Str(ty_name), .. } = &args[0].node else { - return Err(format!( - "Expected string constant for first parameter of `ConstGeneric`, got {:?}", - &args[0].node - )) + return Err(HashSet::from([ + format!( + "Expected string constant for first parameter of `ConstGeneric`, got {:?}", + &args[0].node + ), + ])) }; let generic_name: StrRef = ty_name.to_string().into(); @@ -145,9 +156,11 @@ fn handle_typevar_definition( Ok(unifier.get_fresh_const_generic_var(constraint, Some(generic_name), Some(loc)).0) } - _ => Err(format!( - "expression {var:?} cannot be handled as a generic parameter in global scope" - )) + _ => Err(HashSet::from([ + format!( + "expression {var:?} cannot be handled as a generic parameter in global scope" + ), + ])) } }