artiq, core: Return HashSet of error messages for analysis

This commit is contained in:
David Mak 2023-10-26 13:27:53 +08:00
parent 10538b5296
commit dacef0b7a4
8 changed files with 665 additions and 270 deletions

1
Cargo.lock generated
View File

@ -641,6 +641,7 @@ name = "nac3artiq"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"inkwell", "inkwell",
"itertools 0.11.0",
"nac3core", "nac3core",
"nac3ld", "nac3ld",
"nac3parser", "nac3parser",

View File

@ -9,6 +9,7 @@ name = "nac3artiq"
crate-type = ["cdylib"] crate-type = ["cdylib"]
[dependencies] [dependencies]
itertools = "0.11"
pyo3 = { version = "0.20", features = ["extension-module"] } pyo3 = { version = "0.20", features = ["extension-module"] }
parking_lot = "0.12" parking_lot = "0.12"
tempfile = "3.8" tempfile = "3.8"

View File

@ -13,6 +13,7 @@ use inkwell::{
targets::*, targets::*,
OptimizationLevel, OptimizationLevel,
}; };
use itertools::Itertools;
use nac3core::codegen::{CodeGenLLVMOptions, CodeGenTargetMachineOptions, gen_func_impl}; use nac3core::codegen::{CodeGenLLVMOptions, CodeGenTargetMachineOptions, gen_func_impl};
use nac3core::toplevel::builtins::get_exn_constructor; use nac3core::toplevel::builtins::get_exn_constructor;
use nac3core::typecheck::typedef::{TypeEnum, Unifier}; use nac3core::typecheck::typedef::{TypeEnum, Unifier};
@ -474,11 +475,11 @@ impl Nac3 {
if let Err(e) = composer.start_analysis(true) { if let Err(e) = composer.start_analysis(true) {
// report error of __modinit__ separately // report error of __modinit__ separately
if !e.contains("<nac3_synthesized_modinit>") { return if !e.iter().any(|err| err.contains("<nac3_synthesized_modinit>")) {
return Err(CompileError::new_err(format!( Err(CompileError::new_err(format!(
"compilation failed\n----------\n{}", "compilation failed\n----------\n{}",
e e.into_iter().sorted().join("\n----------\n")
))); )))
} else { } else {
let msg = Self::report_modinit( let msg = Self::report_modinit(
&arg_names, &arg_names,
@ -488,10 +489,10 @@ impl Nac3 {
&mut composer.unifier, &mut composer.unifier,
&self.primitive, &self.primitive,
); );
return Err(CompileError::new_err(format!( Err(CompileError::new_err(format!(
"compilation failed\n----------\n{}", "compilation failed\n----------\n{}",
msg.unwrap_or(e) msg.unwrap_or(e.into_iter().sorted().join("\n----------\n"))
))); )))
} }
} }
let top_level = Arc::new(composer.make_top_level_context()); let top_level = Arc::new(composer.make_top_level_context());

View File

@ -144,7 +144,9 @@ fn test_primitives() {
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.unwrap(); .unwrap();
inferencer.check_block(&statements, &mut identifiers).unwrap(); let (_, errs) = inferencer.check_block(&statements, &mut identifiers);
assert_eq!(errs.is_empty(), true);
let top_level = Arc::new(TopLevelContext { let top_level = Arc::new(TopLevelContext {
definitions: Arc::new(RwLock::new(std::mem::take(&mut *top_level.definitions.write()))), definitions: Arc::new(RwLock::new(std::mem::take(&mut *top_level.definitions.write()))),
unifiers: Arc::new(RwLock::new(vec![(unifier.get_shared_unifier(), primitives)])), unifiers: Arc::new(RwLock::new(vec![(unifier.get_shared_unifier(), primitives)])),
@ -346,7 +348,9 @@ fn test_simple_call() {
unreachable!() unreachable!()
} }
inferencer.check_block(&statements_1, &mut identifiers).unwrap(); let (_, errs) = inferencer.check_block(&statements_1, &mut identifiers);
assert_eq!(errs.is_empty(), true);
let top_level = Arc::new(TopLevelContext { let top_level = Arc::new(TopLevelContext {
definitions: Arc::new(RwLock::new(std::mem::take(&mut *top_level.definitions.write()))), definitions: Arc::new(RwLock::new(std::mem::take(&mut *top_level.definitions.write()))),
unifiers: Arc::new(RwLock::new(vec![(unifier.get_shared_unifier(), primitives)])), unifiers: Arc::new(RwLock::new(vec![(unifier.get_shared_unifier(), primitives)])),

View File

@ -22,19 +22,19 @@ impl Default for ComposerConfig {
type DefAst = (Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>); type DefAst = (Arc<RwLock<TopLevelDef>>, Option<ast::Stmt<()>>);
pub struct TopLevelComposer { pub struct TopLevelComposer {
// list of top level definitions, same as top level context /// List of top level definitions, same as top level context
pub definition_ast_list: Vec<DefAst>, pub definition_ast_list: Vec<DefAst>,
// start as a primitive unifier, will add more top_level defs inside /// Start as a primitive unifier, will add more top_level defs inside
pub unifier: Unifier, pub unifier: Unifier,
// primitive store /// Primitive store
pub primitives_ty: PrimitiveStore, pub primitives_ty: PrimitiveStore,
// keyword list to prevent same user-defined name /// Keyword list to prevent same user-defined name
pub keyword_list: HashSet<StrRef>, pub keyword_list: HashSet<StrRef>,
// to prevent duplicate definition /// To prevent duplicate definition
pub defined_names: HashSet<String>, pub defined_names: HashSet<String>,
// get the class def id of a class method /// Get the class def id of a class method
pub method_class: HashMap<DefinitionId, DefinitionId>, pub method_class: HashMap<DefinitionId, DefinitionId>,
// number of built-in function and classes in the definition list, later skip /// Number of built-in function and classes in the definition list, later skip
pub builtin_num: usize, pub builtin_num: usize,
pub core_config: ComposerConfig, pub core_config: ComposerConfig,
} }
@ -227,8 +227,8 @@ impl TopLevelComposer {
// TODO: Fix this hack. We will generate constructor for classes that inherit // TODO: Fix this hack. We will generate constructor for classes that inherit
// from Exception class (directly or indirectly), but this code cannot handle // from Exception class (directly or indirectly), but this code cannot handle
// subclass of other exception classes. // subclass of other exception classes.
let mut contains_constructor = bases let mut contains_constructor = bases.iter()
.iter().any(|base| matches!(base.node, ast::ExprKind::Name { id, .. } if id == exception_id)); .any(|base| matches!(base.node, ast::ExprKind::Name { id, .. } if id == exception_id));
for b in body { for b in body {
if let ast::StmtKind::FunctionDef { name: method_name, .. } = &b.node { if let ast::StmtKind::FunctionDef { name: method_name, .. } = &b.node {
if method_name == &init_id { if method_name == &init_id {
@ -348,25 +348,44 @@ impl TopLevelComposer {
} }
} }
pub fn start_analysis(&mut self, inference: bool) -> Result<(), String> { pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet<String>> {
self.analyze_top_level_class_type_var()?; let mut errors = HashSet::new();
self.analyze_top_level_class_bases()?;
self.analyze_top_level_class_fields_methods()?; if let Err(errs) = self.analyze_top_level_class_type_var() {
self.analyze_top_level_function()?; errors.extend(errs);
if inference { }
self.analyze_function_instance()?; if let Err(errs) = self.analyze_top_level_class_bases() {
errors.extend(errs);
}
if let Err(errs) = self.analyze_top_level_class_fields_methods() {
errors.extend(errs);
}
if let Err(errs) = self.analyze_top_level_function() {
errors.extend(errs);
}
if inference {
if let Err(errs) = self.analyze_function_instance() {
errors.extend(errs);
}
}
if !errors.is_empty() {
Err(errors)
} else {
Ok(())
} }
Ok(())
} }
/// step 1, analyze the type vars associated with top level class /// step 1, analyze the type vars associated with top level class
fn analyze_top_level_class_type_var(&mut self) -> Result<(), String> { fn analyze_top_level_class_type_var(&mut self) -> Result<(), HashSet<String>> {
let def_list = &self.definition_ast_list; let def_list = &self.definition_ast_list;
let temp_def_list = self.extract_def_list(); let temp_def_list = self.extract_def_list();
let unifier = self.unifier.borrow_mut(); let unifier = self.unifier.borrow_mut();
let primitives_store = &self.primitives_ty; let primitives_store = &self.primitives_ty;
let mut analyze = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| { let mut analyze = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
let mut errors = HashSet::new();
// only deal with class def here // only deal with class def here
let mut class_def = class_def.write(); let mut class_def = class_def.write();
let (class_bases_ast, class_def_type_vars, class_resolver) = { let (class_bases_ast, class_def_type_vars, class_resolver) = {
@ -405,7 +424,7 @@ impl TopLevelComposer {
if !is_generic { if !is_generic {
is_generic = true; is_generic = true;
} else { } else {
return Err(format!( errors.insert(format!(
"only single Generic[...] is allowed (at {})", "only single Generic[...] is allowed (at {})",
b.location b.location
)); ));
@ -421,7 +440,7 @@ impl TopLevelComposer {
} }
// parse the type vars // parse the type vars
let type_vars = type_var_list let type_vars = match type_var_list
.into_iter() .into_iter()
.map(|e| { .map(|e| {
class_resolver.parse_type_annotation( class_resolver.parse_type_annotation(
@ -431,7 +450,13 @@ impl TopLevelComposer {
e, e,
) )
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>() {
Ok(v) => v,
Err(e) => {
errors.insert(e);
continue
}
};
// check if all are unique type vars // check if all are unique type vars
let all_unique_type_var = { let all_unique_type_var = {
@ -445,11 +470,13 @@ impl TopLevelComposer {
} }
}) })
}; };
if !all_unique_type_var { if !all_unique_type_var {
return Err(format!( errors.insert(format!(
"duplicate type variable occurs (at {})", "duplicate type variable occurs (at {})",
slice.location slice.location
)); ));
continue
} }
// add to TopLevelDef // add to TopLevelDef
@ -460,27 +487,35 @@ impl TopLevelComposer {
_ => continue, _ => continue,
} }
} }
Ok(())
if !errors.is_empty() {
Err(errors)
} else {
Ok(())
}
}; };
let mut errors = HashSet::new(); let mut errors = HashSet::new();
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) { for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
if class_ast.is_none() { if class_ast.is_none() {
continue; continue;
} }
if let Err(e) = analyze(class_def, class_ast) { if let Err(errs) = analyze(class_def, class_ast) {
errors.insert(e); errors.extend(errs);
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); Err(errors)
} else {
Ok(())
} }
Ok(())
} }
/// step 2, base classes. /// step 2, base classes.
/// now that the type vars of all classes are done, handle base classes and /// now that the type vars of all classes are done, handle base classes and
/// put Self class into the ancestors list. We only allow single inheritance /// put Self class into the ancestors list. We only allow single inheritance
fn analyze_top_level_class_bases(&mut self) -> Result<(), String> { fn analyze_top_level_class_bases(&mut self) -> Result<(), HashSet<String>> {
if self.unifier.top_level.is_none() { if self.unifier.top_level.is_none() {
let ctx = Arc::new(self.make_top_level_context()); let ctx = Arc::new(self.make_top_level_context());
self.unifier.top_level = Some(ctx); self.unifier.top_level = Some(ctx);
@ -492,6 +527,7 @@ impl TopLevelComposer {
let mut get_direct_parents = let mut get_direct_parents =
|class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| { |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
let mut errors = HashSet::new();
let mut class_def = class_def.write(); let mut class_def = class_def.write();
let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = { let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = {
if let TopLevelDef::Class { if let TopLevelDef::Class {
@ -529,35 +565,47 @@ impl TopLevelComposer {
} }
if has_base { if has_base {
return Err(format!( errors.insert(format!(
"a class definition can only have at most one base class \ "a class definition can only have at most one base class \
declaration and one generic declaration (at {})", declaration and one generic declaration (at {})",
b.location b.location
)); ));
continue
} }
has_base = true; has_base = true;
// the function parse_ast_to make sure that no type var occurred in // the function parse_ast_to make sure that no type var occurred in
// bast_ty if it is a CustomClassKind // bast_ty if it is a CustomClassKind
let base_ty = parse_ast_to_type_annotation_kinds( let base_ty = match parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
&temp_def_list, &temp_def_list,
unifier, unifier,
&primitive_types, &primitive_types,
b, b,
vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(), vec![(*class_def_id, class_type_vars.clone())].into_iter().collect(),
)?; ) {
Ok(v) => v,
Err(e) => {
errors.insert(e);
continue
}
};
if let TypeAnnotation::CustomClass { .. } = &base_ty { if let TypeAnnotation::CustomClass { .. } = &base_ty {
class_ancestors.push(base_ty); class_ancestors.push(base_ty);
} else { } else {
return Err(format!( errors.insert(format!(
"class base declaration can only be custom class (at {})", "class base declaration can only be custom class (at {})",
b.location, b.location,
)); ));
} }
} }
Ok(())
if !errors.is_empty() {
Err(errors)
} else {
Ok(())
}
}; };
// first, only push direct parent into the list // first, only push direct parent into the list
@ -566,12 +614,13 @@ impl TopLevelComposer {
if class_ast.is_none() { if class_ast.is_none() {
continue; continue;
} }
if let Err(e) = get_direct_parents(class_def, class_ast) { if let Err(errs) = get_direct_parents(class_def, class_ast) {
errors.insert(e); errors.extend(errs);
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); return Err(errors);
} }
// second, get all ancestors // second, get all ancestors
@ -585,6 +634,7 @@ impl TopLevelComposer {
return Ok(()); return Ok(());
} }
}; };
ancestors_store.insert( ancestors_store.insert(
class_id, class_id,
// if class has direct parents, get all ancestors of its parents. Else just empty // if class has direct parents, get all ancestors of its parents. Else just empty
@ -594,18 +644,22 @@ impl TopLevelComposer {
Self::get_all_ancestors_helper(&class_ancestors[0], temp_def_list.as_slice())? Self::get_all_ancestors_helper(&class_ancestors[0], temp_def_list.as_slice())?
}, },
); );
Ok(()) Ok(())
}; };
for (class_def, ast) in self.definition_ast_list.iter().skip(self.builtin_num) { for (class_def, ast) in self.definition_ast_list.iter().skip(self.builtin_num) {
if ast.is_none() { if ast.is_none() {
continue; continue;
} }
if let Err(e) = get_all_ancestors(class_def) { if let Err(e) = get_all_ancestors(class_def) {
errors.insert(e); errors.insert(e);
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); return Err(errors);
} }
// insert the ancestors to the def list // insert the ancestors to the def list
@ -643,7 +697,9 @@ impl TopLevelComposer {
stmt.node, stmt.node,
ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. }
) { ) {
return Err("Classes inherited from exception should have no custom fields/methods".into()); return Err(
HashSet::from(["Classes inherited from exception should have no custom fields/methods".into()])
);
} }
} }
} else { } else {
@ -666,7 +722,7 @@ impl TopLevelComposer {
} }
/// step 3, class fields and methods /// step 3, class fields and methods
fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), String> { fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), HashSet<String>> {
let temp_def_list = self.extract_def_list(); let temp_def_list = self.extract_def_list();
let primitives = &self.primitives_ty; let primitives = &self.primitives_ty;
let def_ast_list = &self.definition_ast_list; let def_ast_list = &self.definition_ast_list;
@ -689,12 +745,12 @@ impl TopLevelComposer {
&mut type_var_to_concrete_def, &mut type_var_to_concrete_def,
(&self.keyword_list, &self.core_config), (&self.keyword_list, &self.core_config),
) { ) {
errors.insert(e); errors.extend(e);
} }
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); return Err(errors);
} }
// handle the inherited methods and fields // handle the inherited methods and fields
@ -709,19 +765,26 @@ impl TopLevelComposer {
if class_ast.is_none() { if class_ast.is_none() {
continue; continue;
} }
let mut class_def = class_def.write(); let mut class_def = class_def.write();
if let TopLevelDef::Class { ancestors, .. } = class_def.deref() { if let TopLevelDef::Class { ancestors, .. } = class_def.deref() {
// if the length of the ancestor is equal to the current depth // if the length of the ancestor is equal to the current depth
// it means that all the ancestors of the class is handled // it means that all the ancestors of the class is handled
if ancestors.len() == current_ancestor_depth { if ancestors.len() == current_ancestor_depth {
finished = false; finished = false;
Self::analyze_single_class_ancestors( match Self::analyze_single_class_ancestors(
class_def.deref_mut(), class_def.deref_mut(),
&temp_def_list, &temp_def_list,
unifier, unifier,
primitives, primitives,
&mut type_var_to_concrete_def, &mut type_var_to_concrete_def,
)?; ) {
Ok(()) => (),
Err(errs) => {
errors.extend(errs);
return Err(errors)
}
}
} }
} }
} }
@ -743,28 +806,33 @@ impl TopLevelComposer {
let target_ty = let target_ty =
get_type_from_type_annotation_kinds(&temp_def_list, unifier, primitives, &def, &mut subst_list)?; get_type_from_type_annotation_kinds(&temp_def_list, unifier, primitives, &def, &mut subst_list)?;
unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?; unifier.unify(ty, target_ty).map_err(|e| e.to_display(unifier).to_string())?;
Ok(()) as Result<(), String> Ok(())
}; };
for (ty, def) in type_var_to_concrete_def { for (ty, def) in type_var_to_concrete_def {
if let Err(e) = unification_helper(ty, def) { if let Err(e) = unification_helper(ty, def) {
errors.insert(e); errors.insert(e);
} }
} }
for ty in subst_list.unwrap().into_iter() { for ty in subst_list.unwrap().into_iter() {
if let TypeEnum::TObj { obj_id, params, fields } = &*unifier.get_ty(ty) { if let TypeEnum::TObj { obj_id, params, fields } = &*unifier.get_ty(ty) {
let mut new_fields = HashMap::new(); let mut new_fields = HashMap::new();
let mut need_subst = false; let mut need_subst = false;
for (name, (ty, mutable)) in fields.iter() { for (name, (ty, mutable)) in fields.iter() {
let substituted = unifier.subst(*ty, params); let substituted = unifier.subst(*ty, params);
need_subst |= substituted.is_some(); need_subst |= substituted.is_some();
new_fields.insert(*name, (substituted.unwrap_or(*ty), *mutable)); new_fields.insert(*name, (substituted.unwrap_or(*ty), *mutable));
} }
if need_subst { if need_subst {
let new_ty = unifier.add_ty(TypeEnum::TObj { let new_ty = unifier.add_ty(TypeEnum::TObj {
obj_id: *obj_id, obj_id: *obj_id,
params: params.clone(), params: params.clone(),
fields: new_fields, fields: new_fields,
}); });
if let Err(e) = unifier.unify(ty, new_ty) { if let Err(e) = unifier.unify(ty, new_ty) {
errors.insert(e.to_display(unifier).to_string()); errors.insert(e.to_display(unifier).to_string());
} }
@ -773,8 +841,9 @@ impl TopLevelComposer {
unreachable!() unreachable!()
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); return Err(errors);
} }
for (def, _) in def_ast_list.iter().skip(self.builtin_num) { for (def, _) in def_ast_list.iter().skip(self.builtin_num) {
@ -793,7 +862,7 @@ impl TopLevelComposer {
} }
/// step 4, after class methods are done, top level functions have nothing unknown /// step 4, after class methods are done, top level functions have nothing unknown
fn analyze_top_level_function(&mut self) -> Result<(), String> { fn analyze_top_level_function(&mut self) -> Result<(), HashSet<String>> {
let def_list = &self.definition_ast_list; let def_list = &self.definition_ast_list;
let keyword_list = &self.keyword_list; let keyword_list = &self.keyword_list;
let temp_def_list = self.extract_def_list(); let temp_def_list = self.extract_def_list();
@ -802,6 +871,8 @@ impl TopLevelComposer {
let mut errors = HashSet::new(); let mut errors = HashSet::new();
let mut analyze = |function_def: &Arc<RwLock<TopLevelDef>>, function_ast: &Option<Stmt>| { let mut analyze = |function_def: &Arc<RwLock<TopLevelDef>>, function_ast: &Option<Stmt>| {
let mut errors = HashSet::new();
let mut function_def = function_def.write(); let mut function_def = function_def.write();
let function_def = function_def.deref_mut(); let function_def = function_def.deref_mut();
let function_ast = if let Some(x) = function_ast.as_ref() { let function_ast = if let Some(x) = function_ast.as_ref() {
@ -818,6 +889,7 @@ impl TopLevelComposer {
// already have a function type, is class method, skip // already have a function type, is class method, skip
return Ok(()); return Ok(());
} }
if let ast::StmtKind::FunctionDef { args, returns, .. } = &function_ast.node { if let ast::StmtKind::FunctionDef { args, returns, .. } = &function_ast.node {
let resolver = resolver.as_ref(); let resolver = resolver.as_ref();
let resolver = resolver.unwrap(); let resolver = resolver.unwrap();
@ -825,13 +897,15 @@ impl TopLevelComposer {
let mut function_var_map: HashMap<u32, Type> = HashMap::new(); let mut function_var_map: HashMap<u32, Type> = HashMap::new();
let arg_types = { let arg_types = {
let mut errors = HashSet::new();
// make sure no duplicate parameter // make sure no duplicate parameter
let mut defined_parameter_name: HashSet<_> = HashSet::new(); let mut defined_parameter_name: HashSet<_> = HashSet::new();
for x in args.args.iter() { for x in args.args.iter() {
if !defined_parameter_name.insert(x.node.arg) if !defined_parameter_name.insert(x.node.arg)
|| keyword_list.contains(&x.node.arg) || keyword_list.contains(&x.node.arg)
{ {
return Err(format!( errors.insert(format!(
"top level function must have unique parameter names \ "top level function must have unique parameter names \
and names should not be the same as the keywords (at {})", and names should not be the same as the keywords (at {})",
x.location x.location
@ -839,6 +913,10 @@ impl TopLevelComposer {
} }
} }
if !errors.is_empty() {
return Err(errors)
}
let arg_with_default: Vec<( let arg_with_default: Vec<(
&ast::Located<ast::ArgData<()>>, &ast::Located<ast::ArgData<()>>,
Option<&ast::Expr>, Option<&ast::Expr>,
@ -855,7 +933,7 @@ impl TopLevelComposer {
) )
.collect_vec(); .collect_vec();
arg_with_default match arg_with_default
.iter() .iter()
.rev() .rev()
.map(|(x, default)| -> Result<FuncArg, String> { .map(|(x, default)| -> Result<FuncArg, String> {
@ -923,22 +1001,29 @@ impl TopLevelComposer {
primitives_store, primitives_store,
unifier, unifier,
) )
.map_err( .map_err(
|err| format!("{} (at {})", err, x.location), |err| format!("{} (at {})", err, x.location),
)?; )?;
v v
}), }),
}, },
}) })
}) })
.collect::<Result<Vec<_>, _>>()? .collect::<Result<Vec<_>, _>>() {
}; Ok(v) => Ok(v),
Err(e) => {
errors.insert(e);
Err(errors)
},
}
}?;
let return_ty = { let return_ty = {
if let Some(returns) = returns { if let Some(returns) = returns {
let return_ty_annotation = { let return_ty_annotation = {
let return_annotation = returns.as_ref(); let return_annotation = returns.as_ref();
parse_ast_to_type_annotation_kinds( match parse_ast_to_type_annotation_kinds(
resolver, resolver,
&temp_def_list, &temp_def_list,
unifier, unifier,
@ -947,11 +1032,13 @@ impl TopLevelComposer {
// NOTE: since only class need this, for function // NOTE: since only class need this, for function
// it should be fine to be empty map // it should be fine to be empty map
HashMap::new(), HashMap::new(),
)? ) {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
}
}; };
let type_vars_within = let type_vars_within = match get_type_var_contained_in_type_annotation(&return_ty_annotation)
get_type_var_contained_in_type_annotation(&return_ty_annotation)
.into_iter() .into_iter()
.map(|x| -> Result<(u32, Type), String> { .map(|x| -> Result<(u32, Type), String> {
if let TypeAnnotation::TypeVar(ty) = x { if let TypeAnnotation::TypeVar(ty) = x {
@ -960,7 +1047,11 @@ impl TopLevelComposer {
unreachable!("must be type var here") unreachable!("must be type var here")
} }
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>() {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
};
for (id, ty) in type_vars_within { for (id, ty) in type_vars_within {
if let Some(prev_ty) = function_var_map.insert(id, ty) { if let Some(prev_ty) = function_var_map.insert(id, ty) {
// if already have the type inserted, make sure they are the same thing // if already have the type inserted, make sure they are the same thing
@ -968,17 +1059,21 @@ impl TopLevelComposer {
} }
} }
get_type_from_type_annotation_kinds( match get_type_from_type_annotation_kinds(
&temp_def_list, &temp_def_list,
unifier, unifier,
primitives_store, primitives_store,
&return_ty_annotation, &return_ty_annotation,
&mut None &mut None
)? ) {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
}
} else { } else {
primitives_store.none primitives_store.none
} }
}; };
var_id.extend_from_slice(function_var_map var_id.extend_from_slice(function_var_map
.iter() .iter()
.filter_map(|(id, ty)| { .filter_map(|(id, ty)| {
@ -991,35 +1086,46 @@ impl TopLevelComposer {
.collect_vec() .collect_vec()
.as_slice() .as_slice()
); );
let function_ty = unifier.add_ty(TypeEnum::TFunc(FunSignature { let function_ty = unifier.add_ty(TypeEnum::TFunc(FunSignature {
args: arg_types, args: arg_types,
ret: return_ty, ret: return_ty,
vars: function_var_map, vars: function_var_map,
})); }));
unifier.unify(*dummy_ty, function_ty).map_err(|e| {
match unifier.unify(*dummy_ty, function_ty).map_err(|e| {
e.at(Some(function_ast.location)).to_display(unifier).to_string() e.at(Some(function_ast.location)).to_display(unifier).to_string()
})?; }) {
Err(e) => {
errors.insert(e);
Err(errors)
}
_ => Ok(())
}
} else { } else {
unreachable!("must be both function"); unreachable!("must be both function");
} }
} else { } else {
// not top level function def, skip // not top level function def, skip
return Ok(()); Ok(())
} }
Ok(())
}; };
for (function_def, function_ast) in def_list.iter().skip(self.builtin_num) { for (function_def, function_ast) in def_list.iter().skip(self.builtin_num) {
if function_ast.is_none() { if function_ast.is_none() {
continue; continue;
} }
if let Err(e) = analyze(function_def, function_ast) { if let Err(e) = analyze(function_def, function_ast) {
errors.insert(e); errors.extend(e);
} }
} }
if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); return if !errors.is_empty() {
Err(errors)
} else {
Ok(())
} }
Ok(())
} }
fn analyze_single_class_methods_fields( fn analyze_single_class_methods_fields(
@ -1030,7 +1136,8 @@ impl TopLevelComposer {
primitives: &PrimitiveStore, primitives: &PrimitiveStore,
type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>, type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>,
core_info: (&HashSet<StrRef>, &ComposerConfig), core_info: (&HashSet<StrRef>, &ComposerConfig),
) -> Result<(), String> { ) -> Result<(), HashSet<String>> {
let mut errors = HashSet::new();
let (keyword_list, core_config) = core_info; let (keyword_list, core_config) = core_info;
let mut class_def = class_def.write(); let mut class_def = class_def.write();
let ( let (
@ -1061,6 +1168,7 @@ impl TopLevelComposer {
} else { } else {
unreachable!("here must be toplevel class def"); unreachable!("here must be toplevel class def");
}; };
let class_resolver = class_resolver.as_ref().unwrap(); let class_resolver = class_resolver.as_ref().unwrap();
let class_resolver = class_resolver.as_ref(); let class_resolver = class_resolver.as_ref();
@ -1068,8 +1176,10 @@ impl TopLevelComposer {
for b in class_body_ast { for b in class_body_ast {
match &b.node { match &b.node {
ast::StmtKind::FunctionDef { args, returns, name, .. } => { ast::StmtKind::FunctionDef { args, returns, name, .. } => {
let (method_dummy_ty, method_id) = let (method_dummy_ty, method_id) = match Self::get_class_method_def_info(class_methods_def, *name) {
Self::get_class_method_def_info(class_methods_def, *name)?; Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
};
let mut method_var_map: HashMap<u32, Type> = HashMap::new(); let mut method_var_map: HashMap<u32, Type> = HashMap::new();
@ -1081,7 +1191,7 @@ impl TopLevelComposer {
if !defined_parameter_name.insert(x.node.arg) if !defined_parameter_name.insert(x.node.arg)
|| (keyword_list.contains(&x.node.arg) && x.node.arg != zelf) || (keyword_list.contains(&x.node.arg) && x.node.arg != zelf)
{ {
return Err(format!( errors.insert(format!(
"top level function must have unique parameter names \ "top level function must have unique parameter names \
and names should not be the same as the keywords (at {})", and names should not be the same as the keywords (at {})",
x.location x.location
@ -1090,13 +1200,13 @@ impl TopLevelComposer {
} }
if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) { if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) {
return Err(format!( errors.insert(format!(
"__init__ method must have a `self` parameter (at {})", "__init__ method must have a `self` parameter (at {})",
b.location b.location
)); ));
} }
if !defined_parameter_name.contains(&zelf) { if !defined_parameter_name.contains(&zelf) {
return Err(format!( errors.insert(format!(
"class method must have a `self` parameter (at {})", "class method must have a `self` parameter (at {})",
b.location b.location
)); ));
@ -1124,18 +1234,19 @@ impl TopLevelComposer {
let name = x.node.arg; let name = x.node.arg;
if name != zelf { if name != zelf {
let type_ann = { let type_ann = {
let annotation_expr = x let annotation_expr = match x.node.annotation.as_ref() {
.node Some(v) => v.as_ref(),
.annotation None => {
.as_ref() errors.insert(format!(
.ok_or_else(|| {
format!(
"type annotation needed for `{}` at {}", "type annotation needed for `{}` at {}",
x.node.arg, x.location x.node.arg, x.location
) ));
})?
.as_ref(); continue
parse_ast_to_type_annotation_kinds( },
};
match parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
temp_def_list, temp_def_list,
unifier, unifier,
@ -1144,15 +1255,23 @@ impl TopLevelComposer {
vec![(class_id, class_type_vars_def.clone())] vec![(class_id, class_type_vars_def.clone())]
.into_iter() .into_iter()
.collect(), .collect(),
)? ) {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
}
}; };
// find type vars within this method parameter type annotation // find type vars within this method parameter type annotation
let type_vars_within = let type_vars_within =
get_type_var_contained_in_type_annotation(&type_ann); get_type_var_contained_in_type_annotation(&type_ann);
// handle the class type var and the method type var // handle the class type var and the method type var
for type_var_within in type_vars_within { for type_var_within in type_vars_within {
if let TypeAnnotation::TypeVar(ty) = type_var_within { if let TypeAnnotation::TypeVar(ty) = type_var_within {
let id = Self::get_var_id(ty, unifier)?; let id = match Self::get_var_id(ty, unifier) {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
};
if let Some(prev_ty) = method_var_map.insert(id, ty) { if let Some(prev_ty) = method_var_map.insert(id, ty) {
// if already in the list, make sure they are the same? // if already in the list, make sure they are the same?
assert_eq!(prev_ty, ty); assert_eq!(prev_ty, ty);
@ -1169,24 +1288,39 @@ impl TopLevelComposer {
None => None, None => None,
Some(default) => { Some(default) => {
if name == "self".into() { if name == "self".into() {
return Err(format!("`self` parameter cannot take default value (at {})", x.location)); errors.insert(format!(
"`self` parameter cannot take default value (at {})", x.location
));
continue
}
let v = match Self::parse_parameter_default_value(
default,
class_resolver,
) {
Ok(v) => v,
Err(e) => {
errors.insert(e);
continue
}
};
match Self::check_default_param_type(
&v, &type_ann, primitives, unifier,
) {
Ok(_) => Some(v),
Err(e) => {
errors.insert(
format!("{} (at {})", e, x.location)
);
continue
}
} }
Some({
let v = Self::parse_parameter_default_value(
default,
class_resolver,
)?;
Self::check_default_param_type(
&v, &type_ann, primitives, unifier,
)
.map_err(|err| {
format!("{} (at {})", err, x.location)
})?;
v
})
} }
}, },
}; };
// push the dummy type and the type annotation // push the dummy type and the type annotation
// into the list for later unification // into the list for later unification
type_var_to_concrete_def type_var_to_concrete_def
@ -1194,27 +1328,40 @@ impl TopLevelComposer {
result.push(dummy_func_arg) result.push(dummy_func_arg)
} }
} }
result result
}; };
let ret_type = { let ret_type = {
if let Some(result) = returns { if let Some(result) = returns {
let result = result.as_ref(); let result = result.as_ref();
let annotation = parse_ast_to_type_annotation_kinds( let annotation = match parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
temp_def_list, temp_def_list,
unifier, unifier,
primitives, primitives,
result, result,
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(), vec![(class_id, class_type_vars_def.clone())].into_iter().collect(),
)?; ) {
Ok(v) => v,
Err(e) => {
errors.insert(e);
continue
}
};
// find type vars within this return type annotation // find type vars within this return type annotation
let type_vars_within = let type_vars_within =
get_type_var_contained_in_type_annotation(&annotation); get_type_var_contained_in_type_annotation(&annotation);
// handle the class type var and the method type var // handle the class type var and the method type var
for type_var_within in type_vars_within { for type_var_within in type_vars_within {
if let TypeAnnotation::TypeVar(ty) = type_var_within { if let TypeAnnotation::TypeVar(ty) = type_var_within {
let id = Self::get_var_id(ty, unifier)?; let id = match Self::get_var_id(ty, unifier) {
Ok(v) => v,
Err(e) => return Err(HashSet::from([e])),
};
if let Some(prev_ty) = method_var_map.insert(id, ty) { if let Some(prev_ty) = method_var_map.insert(id, ty) {
// if already in the list, make sure they are the same? // if already in the list, make sure they are the same?
assert_eq!(prev_ty, ty); assert_eq!(prev_ty, ty);
@ -1264,10 +1411,15 @@ impl TopLevelComposer {
// unify now since function type is not in type annotation define // unify now since function type is not in type annotation define
// which should be fine since type within method_type will be subst later // which should be fine since type within method_type will be subst later
unifier match unifier.unify(method_dummy_ty, method_type) {
.unify(method_dummy_ty, method_type) Ok(()) => (),
.map_err(|e| e.to_display(unifier).to_string())?; Err(e) => {
errors.insert(e.to_display(unifier).to_string());
continue
}
}
} }
ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => { ast::StmtKind::AnnAssign { target, annotation, value: None, .. } => {
if let ast::ExprKind::Name { id: attr, .. } = &target.node { if let ast::ExprKind::Name { id: attr, .. } = &target.node {
if defined_fields.insert(attr.to_string()) { if defined_fields.insert(attr.to_string()) {
@ -1296,26 +1448,35 @@ impl TopLevelComposer {
}; };
class_fields_def.push((*attr, dummy_field_type, mutable)); class_fields_def.push((*attr, dummy_field_type, mutable));
let parsed_annotation = parse_ast_to_type_annotation_kinds( let parsed_annotation = match parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
temp_def_list, temp_def_list,
unifier, unifier,
primitives, primitives,
annotation.as_ref(), annotation.as_ref(),
vec![(class_id, class_type_vars_def.clone())].into_iter().collect(), vec![(class_id, class_type_vars_def.clone())].into_iter().collect(),
)?; ) {
Ok(v) => v,
Err(e) => {
errors.insert(e);
continue
}
};
// find type vars within this return type annotation // find type vars within this return type annotation
let type_vars_within = let type_vars_within =
get_type_var_contained_in_type_annotation(&parsed_annotation); get_type_var_contained_in_type_annotation(&parsed_annotation);
// handle the class type var and the method type var // handle the class type var and the method type var
for type_var_within in type_vars_within { for type_var_within in type_vars_within {
if let TypeAnnotation::TypeVar(t) = type_var_within { if let TypeAnnotation::TypeVar(t) = type_var_within {
if !class_type_vars_def.contains(&t) { if !class_type_vars_def.contains(&t) {
return Err(format!( errors.insert(format!(
"class fields can only use type \ "class fields can only use type \
vars over which the class is generic (at {})", vars over which the class is generic (at {})",
annotation.location annotation.location
)); ));
continue
} }
} else { } else {
unreachable!("must be type var annotation"); unreachable!("must be type var annotation");
@ -1323,30 +1484,37 @@ impl TopLevelComposer {
} }
type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation); type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation);
} else { } else {
return Err(format!( errors.insert(format!(
"same class fields `{}` defined twice (at {})", "same class fields `{}` defined twice (at {})",
attr, target.location attr, target.location
)); ));
} }
} else { } else {
return Err(format!( errors.insert(format!(
"unsupported statement type in class definition body (at {})", "unsupported statement type in class definition body (at {})",
target.location target.location
)); ));
} }
} }
ast::StmtKind::Assign { .. } => {}, // we don't class attributes ast::StmtKind::Assign { .. } => {}, // we don't class attributes
ast::StmtKind::Pass { .. } => {} ast::StmtKind::Pass { .. } => {}
ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior ast::StmtKind::Expr { value: _, .. } => {} // typically a docstring; ignoring all expressions matches CPython behavior
_ => { _ => {
return Err(format!( errors.insert(format!(
"unsupported statement in class definition body (at {})", "unsupported statement in class definition body (at {})",
b.location b.location
)) ));
} }
} }
} }
Ok(())
if !errors.is_empty() {
Err(errors)
} else {
Ok(())
}
} }
fn analyze_single_class_ancestors( fn analyze_single_class_ancestors(
@ -1355,7 +1523,7 @@ impl TopLevelComposer {
unifier: &mut Unifier, unifier: &mut Unifier,
_primitives: &PrimitiveStore, _primitives: &PrimitiveStore,
type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>, type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>,
) -> Result<(), String> { ) -> Result<(), HashSet<String>> {
let ( let (
_class_id, _class_id,
class_ancestor_def, class_ancestor_def,
@ -1406,10 +1574,12 @@ impl TopLevelComposer {
type_var_to_concrete_def, type_var_to_concrete_def,
); );
if !ok { if !ok {
return Err(format!( return Err(HashSet::from([
"method {} has same name as ancestors' method, but incompatible type", format!(
class_method_name "method {} has same name as ancestors' method, but incompatible type",
)); class_method_name
)
]));
} }
// mark it as added // mark it as added
is_override.insert(*class_method_name); is_override.insert(*class_method_name);
@ -1444,10 +1614,12 @@ impl TopLevelComposer {
// find if there is a fields with the same name in the child class // find if there is a fields with the same name in the child class
for (class_field_name, ..) in class_fields_def.iter() { for (class_field_name, ..) in class_fields_def.iter() {
if class_field_name == anc_field_name { if class_field_name == anc_field_name {
return Err(format!( return Err(HashSet::from([
"field `{}` has already declared in the ancestor classes", format!(
class_field_name "field `{}` has already declared in the ancestor classes",
)); class_field_name
)
]));
} }
} }
new_child_fields.push(to_be_added); new_child_fields.push(to_be_added);
@ -1470,7 +1642,7 @@ impl TopLevelComposer {
} }
/// step 5, analyze and call type inferencer to fill the `instance_to_stmt` of topleveldef::function /// step 5, analyze and call type inferencer to fill the `instance_to_stmt` of topleveldef::function
fn analyze_function_instance(&mut self) -> Result<(), String> { fn analyze_function_instance(&mut self) -> Result<(), HashSet<String>> {
// first get the class constructor type correct for the following type check in function body // first get the class constructor type correct for the following type check in function body
// also do class field instantiation check // also do class field instantiation check
let init_str_id = "__init__".into(); let init_str_id = "__init__".into();
@ -1521,8 +1693,7 @@ impl TopLevelComposer {
object_id, object_id,
resolver: _, resolver: _,
.. ..
} = &*class_def } = &*class_def {
{
let self_type = get_type_from_type_annotation_kinds( let self_type = get_type_from_type_annotation_kinds(
&def_list, &def_list,
unifier, unifier,
@ -1642,7 +1813,7 @@ impl TopLevelComposer {
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n---------\n")); return Err(errors);
} }
for (i, signature, id) in constructors.into_iter() { for (i, signature, id) in constructors.into_iter() {
@ -1847,8 +2018,11 @@ impl TopLevelComposer {
.map(|b| inferencer.fold_stmt(b)) .map(|b| inferencer.fold_stmt(b))
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let returned = let (returned, errs) = inferencer.check_block(fun_body.as_slice(), &mut identifiers);
inferencer.check_block(fun_body.as_slice(), &mut identifiers)?; if !errs.is_empty() {
return Err(errs.into_iter().sorted().join("\n----------\n"))
}
{ {
// check virtuals // check virtuals
let defs = ctx.definitions.read(); let defs = ctx.definitions.read();
@ -1934,6 +2108,7 @@ impl TopLevelComposer {
} }
Ok(()) Ok(())
}; };
for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num) { for (id, (def, ast)) in self.definition_ast_list.iter().enumerate().skip(self.builtin_num) {
if ast.is_none() { if ast.is_none() {
continue; continue;
@ -1942,9 +2117,11 @@ impl TopLevelComposer {
errors.insert(e); errors.insert(e);
} }
} }
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors.into_iter().sorted().join("\n----------\n")); Err(errors)
} else {
Ok(())
} }
Ok(())
} }
} }

View File

@ -551,9 +551,9 @@ fn test_analyze(source: Vec<&str>, res: Vec<&str>) {
if let Err(msg) = composer.start_analysis(false) { if let Err(msg) = composer.start_analysis(false) {
if print { if print {
println!("{}", msg); println!("{}", msg.into_iter().sorted().join("\n----------\n"));
} else { } else {
assert_eq!(res[0], msg); assert_eq!(res[0], msg.into_iter().sorted().next().unwrap());
} }
} else { } else {
// skip 5 to skip primitives // skip 5 to skip primitives
@ -735,9 +735,9 @@ fn test_inference(source: Vec<&str>, res: Vec<&str>) {
if let Err(msg) = composer.start_analysis(true) { if let Err(msg) = composer.start_analysis(true) {
if print { if print {
println!("{}", msg); println!("{}", msg.into_iter().sorted().join("\n----------\n"));
} else { } else {
assert_eq!(res[0], msg); assert_eq!(res[0], msg.into_iter().sorted().next().unwrap());
} }
} else { } else {
// skip 5 to skip primitives // skip 5 to skip primitives

View File

@ -18,40 +18,68 @@ impl<'a> Inferencer<'a> {
&mut self, &mut self,
pattern: &Expr<Option<Type>>, pattern: &Expr<Option<Type>>,
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<(), String> { ) -> Result<(), HashSet<String>> {
let mut errors = HashSet::new();
match &pattern.node { match &pattern.node {
ast::ExprKind::Name { id, .. } if id == &"none".into() => ExprKind::Name { id, .. } if id == &"none".into() => {
Err(format!("cannot assign to a `none` (at {})", pattern.location)), errors.insert(format!("cannot assign to a `none` (at {})", pattern.location));
}
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
if !defined_identifiers.contains(id) { if !defined_identifiers.contains(id) {
defined_identifiers.insert(*id); defined_identifiers.insert(*id);
} }
self.should_have_value(pattern)?;
Ok(()) if let Err(e) = self.should_have_value(pattern) {
errors.insert(e);
}
} }
ExprKind::Tuple { elts, .. } => { ExprKind::Tuple { elts, .. } => {
for elt in elts.iter() { for elt in elts.iter() {
self.check_pattern(elt, defined_identifiers)?; if let Err(e) = self.check_pattern(elt, defined_identifiers) {
self.should_have_value(elt)?; errors.extend(e);
}
if let Err(e) = self.should_have_value(elt) {
errors.insert(e);
}
} }
Ok(())
} }
ExprKind::Subscript { value, slice, .. } => { ExprKind::Subscript { value, slice, .. } => {
self.check_expr(value, defined_identifiers)?; if let Err(e) = self.check_expr(value, defined_identifiers) {
self.should_have_value(value)?; errors.extend(e);
self.check_expr(slice, defined_identifiers)?; }
if let Err(e) = self.should_have_value(value) {
errors.insert(e);
}
if let Err(e) = self.check_expr(slice, defined_identifiers) {
errors.extend(e);
}
if let TypeEnum::TTuple { .. } = &*self.unifier.get_ty(value.custom.unwrap()) { if let TypeEnum::TTuple { .. } = &*self.unifier.get_ty(value.custom.unwrap()) {
return Err(format!( errors.insert(format!(
"Error at {}: cannot assign to tuple element", "Error at {}: cannot assign to tuple element",
value.location value.location
)); ));
} }
Ok(())
} }
ExprKind::Constant { .. } => { ExprKind::Constant { .. } => {
Err(format!("cannot assign to a constant (at {})", pattern.location)) errors.insert(format!("cannot assign to a constant (at {})", pattern.location));
} }
_ => self.check_expr(pattern, defined_identifiers),
_ => {
if let Err(e) = self.check_expr(pattern, defined_identifiers) {
errors.extend(e);
}
}
}
return if !errors.is_empty() {
Err(errors)
} else {
Ok(())
} }
} }
@ -59,59 +87,84 @@ impl<'a> Inferencer<'a> {
&mut self, &mut self,
expr: &Expr<Option<Type>>, expr: &Expr<Option<Type>>,
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<(), String> { ) -> Result<(), HashSet<String>> {
let mut errors = HashSet::new();
// there are some cases where the custom field is None // there are some cases where the custom field is None
if let Some(ty) = &expr.custom { if let Some(ty) = &expr.custom {
if !self.unifier.is_concrete(*ty, &self.function_data.bound_variables) { if !self.unifier.is_concrete(*ty, &self.function_data.bound_variables) {
return Err(format!( errors.insert(format!(
"expected concrete type at {} but got {}", "expected concrete type at {} but got {}",
expr.location, expr.location,
self.unifier.get_ty(*ty).get_type_name() self.unifier.get_ty(*ty).get_type_name()
)); ));
} }
} }
match &expr.node { match &expr.node {
ExprKind::Name { id, .. } => { ExprKind::Name { id, .. } => {
if id == &"none".into() { if id != &"none".into() {
return Ok(()); if let Err(e) = self.should_have_value(expr) {
} errors.insert(e);
self.should_have_value(expr)?; }
if !defined_identifiers.contains(id) {
match self.function_data.resolver.get_symbol_type( if !defined_identifiers.contains(id) {
self.unifier, match self.function_data.resolver.get_symbol_type(
&self.top_level.definitions.read(), self.unifier,
self.primitives, &self.top_level.definitions.read(),
*id, self.primitives,
) { *id,
Ok(_) => { ) {
self.defined_identifiers.insert(*id); Ok(_) => {
} self.defined_identifiers.insert(*id);
Err(e) => { }
return Err(format!(
"type error at identifier `{}` ({}) at {}", Err(e) => {
id, e, expr.location errors.insert(format!(
)); "type error at identifier `{}` ({}) at {}",
id, e, expr.location
));
}
} }
} }
} }
} }
ExprKind::List { elts, .. } ExprKind::List { elts, .. }
| ExprKind::Tuple { elts, .. } | ExprKind::Tuple { elts, .. }
| ExprKind::BoolOp { values: elts, .. } => { | ExprKind::BoolOp { values: elts, .. } => {
for elt in elts.iter() { for elt in elts.iter() {
self.check_expr(elt, defined_identifiers)?; if let Err(errs) = self.check_expr(elt, defined_identifiers) {
self.should_have_value(elt)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(elt) {
errors.insert(e);
}
} }
} }
ExprKind::Attribute { value, .. } => { ExprKind::Attribute { value, .. } => {
self.check_expr(value, defined_identifiers)?; if let Err(errs) = self.check_expr(value, defined_identifiers) {
self.should_have_value(value)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(value) {
errors.insert(e);
}
} }
ExprKind::BinOp { left, op, right } => {
self.check_expr(left, defined_identifiers)?; ExprKind::BinOp { left, op, right, .. } => {
self.check_expr(right, defined_identifiers)?; if let Err(errs) = self.check_expr(left, defined_identifiers) {
self.should_have_value(left)?; errors.extend(errs);
self.should_have_value(right)?; }
if let Err(errs) = self.check_expr(right, defined_identifiers) {
errors.extend(errs);
}
if let Err(e) = self.should_have_value(left) {
errors.insert(e);
}
if let Err(e) = self.should_have_value(right) {
errors.insert(e);
}
// Check whether a bitwise shift has a negative RHS constant value // Check whether a bitwise shift has a negative RHS constant value
if *op == LShift || *op == RShift { if *op == LShift || *op == RShift {
@ -122,7 +175,7 @@ impl<'a> Inferencer<'a> {
}; };
if *rhs_val < 0 { if *rhs_val < 0 {
return Err(format!( errors.insert(format!(
"shift count is negative at {}", "shift count is negative at {}",
right.location right.location
)); ));
@ -130,32 +183,62 @@ impl<'a> Inferencer<'a> {
} }
} }
} }
ExprKind::UnaryOp { operand, .. } => { ExprKind::UnaryOp { operand, .. } => {
self.check_expr(operand, defined_identifiers)?; if let Err(errs) = self.check_expr(operand, defined_identifiers) {
self.should_have_value(operand)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(operand) {
errors.insert(e);
}
} }
ExprKind::Compare { left, comparators, .. } => { ExprKind::Compare { left, comparators, .. } => {
for elt in once(left.as_ref()).chain(comparators.iter()) { for elt in once(left.as_ref()).chain(comparators.iter()) {
self.check_expr(elt, defined_identifiers)?; if let Err(errs) = self.check_expr(elt, defined_identifiers) {
self.should_have_value(elt)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(elt) {
errors.insert(e);
}
} }
} }
ExprKind::Subscript { value, slice, .. } => { ExprKind::Subscript { value, slice, .. } => {
self.should_have_value(value)?; if let Err(e) = self.should_have_value(value) {
self.check_expr(value, defined_identifiers)?; errors.insert(e);
self.check_expr(slice, defined_identifiers)?; }
if let Err(errs) = self.check_expr(value, defined_identifiers) {
errors.extend(errs);
}
if let Err(errs) = self.check_expr(slice, defined_identifiers) {
errors.extend(errs);
}
} }
ExprKind::IfExp { test, body, orelse } => { ExprKind::IfExp { test, body, orelse } => {
self.check_expr(test, defined_identifiers)?; if let Err(errs) = self.check_expr(test, defined_identifiers) {
self.check_expr(body, defined_identifiers)?; errors.extend(errs);
self.check_expr(orelse, defined_identifiers)?; }
if let Err(errs) = self.check_expr(body, defined_identifiers) {
errors.extend(errs);
}
if let Err(errs) = self.check_expr(orelse, defined_identifiers) {
errors.extend(errs);
}
} }
ExprKind::Slice { lower, upper, step } => { ExprKind::Slice { lower, upper, step } => {
for elt in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() { for elt in [lower.as_ref(), upper.as_ref(), step.as_ref()].iter().flatten() {
self.should_have_value(elt)?; if let Err(e) = self.should_have_value(elt) {
self.check_expr(elt, defined_identifiers)?; errors.insert(e);
}
if let Err(errs) = self.check_expr(elt, defined_identifiers) {
errors.extend(errs);
}
} }
} }
ExprKind::Lambda { args, body } => { ExprKind::Lambda { args, body } => {
let mut defined_identifiers = defined_identifiers.clone(); let mut defined_identifiers = defined_identifiers.clone();
for arg in args.args.iter() { for arg in args.args.iter() {
@ -164,160 +247,286 @@ impl<'a> Inferencer<'a> {
defined_identifiers.insert(arg.node.arg); defined_identifiers.insert(arg.node.arg);
} }
} }
self.check_expr(body, &mut defined_identifiers)?;
if let Err(errs) = self.check_expr(body, &mut defined_identifiers) {
errors.extend(errs);
}
} }
ExprKind::ListComp { elt, generators, .. } => { ExprKind::ListComp { elt, generators, .. } => {
// in our type inference stage, we already make sure that there is only 1 generator // in our type inference stage, we already make sure that there is only 1 generator
let ast::Comprehension { target, iter, ifs, .. } = &generators[0]; let ast::Comprehension { target, iter, ifs, .. } = &generators[0];
self.check_expr(iter, defined_identifiers)?; if let Err(errs) = self.check_expr(iter, defined_identifiers) {
self.should_have_value(iter)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(iter) {
errors.insert(e);
}
let mut defined_identifiers = defined_identifiers.clone(); let mut defined_identifiers = defined_identifiers.clone();
self.check_pattern(target, &mut defined_identifiers)?; if let Err(errs) = self.check_pattern(target, &mut defined_identifiers) {
self.should_have_value(target)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(target) {
errors.insert(e);
}
for term in once(elt.as_ref()).chain(ifs.iter()) { for term in once(elt.as_ref()).chain(ifs.iter()) {
self.check_expr(term, &mut defined_identifiers)?; if let Err(errs) = self.check_expr(term, &mut defined_identifiers) {
self.should_have_value(term)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(term) {
errors.insert(e);
}
} }
} }
ExprKind::Call { func, args, keywords } => { ExprKind::Call { func, args, keywords } => {
for expr in once(func.as_ref()) for expr in once(func.as_ref())
.chain(args.iter()) .chain(args.iter())
.chain(keywords.iter().map(|v| v.node.value.as_ref())) .chain(keywords.iter().map(|v| v.node.value.as_ref()))
{ {
self.check_expr(expr, defined_identifiers)?; if let Err(errs) = self.check_expr(expr, defined_identifiers) {
self.should_have_value(expr)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(expr) {
errors.insert(e);
}
} }
} }
ExprKind::Constant { .. } => {} ExprKind::Constant { .. } => {}
_ => { _ => {
unimplemented!() unimplemented!()
} }
} }
Ok(())
return if !errors.is_empty() {
Err(errors)
} else {
Ok(())
}
} }
// check statements for proper identifier def-use and return on all paths /// Check statements for proper identifier def-use and return on all paths.
///
/// Returns a tuple containing a `bool` representing whether the statement returns on all paths,
/// and a [HashSet] of all collected errors.
fn check_stmt( fn check_stmt(
&mut self, &mut self,
stmt: &Stmt<Option<Type>>, stmt: &Stmt<Option<Type>>,
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<bool, String> { ) -> (bool, HashSet<String>) {
match &stmt.node { let mut errors = HashSet::new();
StmtKind::For { target, iter, body, orelse, .. } => { let stmt_returns = match &stmt.node {
self.check_expr(iter, defined_identifiers)?; StmtKind::For {
self.should_have_value(iter)?; target,
iter,
body,
orelse,
..
} => {
if let Err(errs) = self.check_expr(iter, defined_identifiers) {
errors.extend(errs);
}
if let Err(e) = self.should_have_value(iter) {
errors.insert(e);
}
let mut local_defined_identifiers = defined_identifiers.clone(); let mut local_defined_identifiers = defined_identifiers.clone();
for stmt in orelse.iter() { for stmt in orelse.iter() {
self.check_stmt(stmt, &mut local_defined_identifiers)?; let (_, errs) = self.check_stmt(stmt, &mut local_defined_identifiers);
errors.extend(errs);
} }
let mut local_defined_identifiers = defined_identifiers.clone(); let mut local_defined_identifiers = defined_identifiers.clone();
self.check_pattern(target, &mut local_defined_identifiers)?; if let Err(errs) = self.check_pattern(target, &mut local_defined_identifiers) {
self.should_have_value(target)?; errors.extend(errs);
for stmt in body.iter() {
self.check_stmt(stmt, &mut local_defined_identifiers)?;
} }
Ok(false) if let Err(e) = self.should_have_value(target) {
errors.insert(e);
}
for stmt in body.iter() {
let (_, errs) = self.check_stmt(stmt, &mut local_defined_identifiers);
errors.extend(errs);
}
false
} }
StmtKind::If { test, body, orelse, .. } => { StmtKind::If { test, body, orelse, .. } => {
self.check_expr(test, defined_identifiers)?; if let Err(errs) = self.check_expr(test, defined_identifiers) {
self.should_have_value(test)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(test) {
errors.insert(e);
}
let mut body_identifiers = defined_identifiers.clone(); let mut body_identifiers = defined_identifiers.clone();
let mut orelse_identifiers = defined_identifiers.clone(); let mut orelse_identifiers = defined_identifiers.clone();
let body_returned = self.check_block(body, &mut body_identifiers)?; let (body_returned, errs) = self.check_block(body, &mut body_identifiers);
let orelse_returned = self.check_block(orelse, &mut orelse_identifiers)?; errors.extend(errs);
let (orelse_returned, errs) = self.check_block(orelse, &mut orelse_identifiers);
errors.extend(errs);
for ident in body_identifiers.iter() { for ident in body_identifiers.iter() {
if !defined_identifiers.contains(ident) && orelse_identifiers.contains(ident) { if !defined_identifiers.contains(ident) && orelse_identifiers.contains(ident) {
defined_identifiers.insert(*ident); defined_identifiers.insert(*ident);
} }
} }
Ok(body_returned && orelse_returned)
body_returned && orelse_returned
} }
StmtKind::While { test, body, orelse, .. } => { StmtKind::While { test, body, orelse, .. } => {
self.check_expr(test, defined_identifiers)?; if let Err(errs) = self.check_expr(test, defined_identifiers) {
self.should_have_value(test)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(test) {
errors.insert(e);
}
let mut defined_identifiers = defined_identifiers.clone(); let mut defined_identifiers = defined_identifiers.clone();
self.check_block(body, &mut defined_identifiers)?; let (_, errs) = self.check_block(body, &mut defined_identifiers);
self.check_block(orelse, &mut defined_identifiers)?; errors.extend(errs);
Ok(false) let (_, errs) = self.check_block(orelse, &mut defined_identifiers);
errors.extend(errs);
false
} }
StmtKind::With { items, body, .. } => { StmtKind::With { items, body, .. } => {
let mut new_defined_identifiers = defined_identifiers.clone(); let mut new_defined_identifiers = defined_identifiers.clone();
for item in items.iter() { for item in items.iter() {
self.check_expr(&item.context_expr, defined_identifiers)?; if let Err(errs) = self.check_expr(&item.context_expr, defined_identifiers) {
errors.extend(errs);
}
if let Some(var) = item.optional_vars.as_ref() { if let Some(var) = item.optional_vars.as_ref() {
self.check_pattern(var, &mut new_defined_identifiers)?; if let Err(errs) = self.check_pattern(var, &mut new_defined_identifiers) {
errors.extend(errs);
}
} }
} }
self.check_block(body, &mut new_defined_identifiers)?;
Ok(false) let (_, errs) = self.check_block(body, &mut new_defined_identifiers);
errors.extend(errs);
false
} }
StmtKind::Try { body, handlers, orelse, finalbody, .. } => { StmtKind::Try { body, handlers, orelse, finalbody, .. } => {
self.check_block(body, &mut defined_identifiers.clone())?; let (_, errs) = self.check_block(body, &mut defined_identifiers.clone());
self.check_block(orelse, &mut defined_identifiers.clone())?; errors.extend(errs);
let (_, errs) = self.check_block(orelse, &mut defined_identifiers.clone());
errors.extend(errs);
for handler in handlers.iter() { for handler in handlers.iter() {
let mut defined_identifiers = defined_identifiers.clone(); let mut defined_identifiers = defined_identifiers.clone();
let ast::ExcepthandlerKind::ExceptHandler { name, body, .. } = &handler.node; let ast::ExcepthandlerKind::ExceptHandler { name, body, .. } = &handler.node;
if let Some(name) = name { if let Some(name) = name {
defined_identifiers.insert(*name); defined_identifiers.insert(*name);
} }
self.check_block(body, &mut defined_identifiers)?;
let (_, errs) = self.check_block(body, &mut defined_identifiers);
errors.extend(errs);
} }
self.check_block(finalbody, defined_identifiers)?;
Ok(false) let (_, errs) = self.check_block(finalbody, defined_identifiers);
errors.extend(errs);
false
} }
StmtKind::Expr { value, .. } => { StmtKind::Expr { value, .. } => {
self.check_expr(value, defined_identifiers)?; if let Err(e) = self.check_expr(value, defined_identifiers) {
Ok(false) errors.extend(e);
}
StmtKind::Assign { targets, value, .. } => {
self.check_expr(value, defined_identifiers)?;
self.should_have_value(value)?;
for target in targets {
self.check_pattern(target, defined_identifiers)?;
} }
Ok(false)
false
} }
StmtKind::Assign { targets, value, .. } => {
if let Err(errs) = self.check_expr(value, defined_identifiers) {
errors.extend(errs);
}
if let Err(e) = self.should_have_value(value) {
errors.insert(e);
}
for target in targets {
if let Err(errs) = self.check_pattern(target, defined_identifiers) {
errors.extend(errs);
}
}
false
}
StmtKind::AnnAssign { target, value, .. } => { StmtKind::AnnAssign { target, value, .. } => {
if let Some(value) = value { if let Some(value) = value {
self.check_expr(value, defined_identifiers)?; if let Err(errs) = self.check_expr(value, defined_identifiers) {
self.should_have_value(value)?; errors.extend(errs);
self.check_pattern(target, defined_identifiers)?; }
if let Err(e) = self.should_have_value(value) {
errors.insert(e);
}
if let Err(errs) = self.check_pattern(target, defined_identifiers) {
errors.extend(errs);
}
} }
Ok(false)
false
} }
StmtKind::Return { value, .. } => { StmtKind::Return { value, .. } => {
if let Some(value) = value { if let Some(value) = value {
self.check_expr(value, defined_identifiers)?; if let Err(errs) = self.check_expr(value, defined_identifiers) {
self.should_have_value(value)?; errors.extend(errs);
}
if let Err(e) = self.should_have_value(value) {
errors.insert(e);
}
} }
Ok(true)
true
} }
StmtKind::Raise { exc, .. } => { StmtKind::Raise { exc, .. } => {
if let Some(value) = exc { if let Some(value) = exc {
self.check_expr(value, defined_identifiers)?; if let Err(errs) = self.check_expr(value, defined_identifiers) {
errors.extend(errs);
}
} }
Ok(true)
true
} }
// break, raise, etc. // break, raise, etc.
_ => Ok(false), _ => false,
} };
(stmt_returns, errors)
} }
pub fn check_block( pub fn check_block(
&mut self, &mut self,
block: &[Stmt<Option<Type>>], block: &[Stmt<Option<Type>>],
defined_identifiers: &mut HashSet<StrRef>, defined_identifiers: &mut HashSet<StrRef>,
) -> Result<bool, String> { ) -> (bool, HashSet<String>) {
let mut errors = HashSet::new();
let mut ret = false; let mut ret = false;
for stmt in block { for stmt in block {
if ret { if ret {
println!("warning: dead code at {:?}\n", stmt.location) println!("warning: dead code at {:?}\n", stmt.location)
} }
if self.check_stmt(stmt, defined_identifiers)? {
ret = true; let (stmt_rets, stmt_errs) = self.check_stmt(stmt, defined_identifiers);
} ret |= stmt_rets;
errors.extend(stmt_errs);
} }
Ok(ret)
(ret, errors)
} }
} }

View File

@ -543,7 +543,8 @@ fn test_basic(source: &str, mapping: HashMap<&str, &str>, virtuals: &[(&str, &st
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.unwrap(); .unwrap();
inferencer.check_block(&statements, &mut defined_identifiers).unwrap(); let (_, errs) = inferencer.check_block(&statements, &mut defined_identifiers);
assert_eq!(errs.is_empty(), true);
for (k, v) in inferencer.variable_mapping.iter() { for (k, v) in inferencer.variable_mapping.iter() {
let name = inferencer.unifier.internal_stringify( let name = inferencer.unifier.internal_stringify(
@ -689,7 +690,8 @@ fn test_primitive_magic_methods(source: &str, mapping: HashMap<&str, &str>) {
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.unwrap(); .unwrap();
inferencer.check_block(&statements, &mut defined_identifiers).unwrap(); let (_, errs) = inferencer.check_block(&statements, &mut defined_identifiers);
assert_eq!(errs.is_empty(), true);
for (k, v) in inferencer.variable_mapping.iter() { for (k, v) in inferencer.variable_mapping.iter() {
let name = inferencer.unifier.internal_stringify( let name = inferencer.unifier.internal_stringify(