diff --git a/flake.nix b/flake.nix index 07cea776..7987b259 100644 --- a/flake.nix +++ b/flake.nix @@ -180,7 +180,9 @@ clippy pre-commit rustfmt + rust-analyzer ]; + RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}"; shellHook = '' export DEMO_LINALG_STUB=${packages.x86_64-linux.demo-linalg-stub}/lib/liblinalg.a diff --git a/nac3artiq/demo/dataset_db.mdb b/nac3artiq/demo/dataset_db.mdb new file mode 100644 index 00000000..5511ad47 Binary files /dev/null and b/nac3artiq/demo/dataset_db.mdb differ diff --git a/nac3artiq/demo/dataset_db.mdb-lock b/nac3artiq/demo/dataset_db.mdb-lock new file mode 100644 index 00000000..82238114 Binary files /dev/null and b/nac3artiq/demo/dataset_db.mdb-lock differ diff --git a/nac3artiq/demo/demo.py b/nac3artiq/demo/demo.py index aa135757..8e4251b6 100644 --- a/nac3artiq/demo/demo.py +++ b/nac3artiq/demo/demo.py @@ -1,26 +1,87 @@ from min_artiq import * +from numpy import int32 +# @nac3 +# class A: +# a: int32 +# core: KernelInvariant[Core] + +# def __init__(self, a: int32): +# self.core = Core() +# self.a = a + +# @kernel +# def output_all_fields(self): +# #print(self.a) +# pass + +# @kernel +# def set_a(self, a: int32): +# self.a = a + +# @nac3 +# class B(A): +# b: int32 + +# def __init__(self, b: int32): +# # A.__init__(self, b + 1) +# self.core = Core() +# self.a = b +# self.b = b +# self.set_b(b) + +# @kernel +# def output_parent_fields(self): +# # A.output_all_fields(self) +# pass + +# @kernel +# def output_all_fields(self): +# # A.output_all_fields(self) +# pass +# #print(self.b) + +# @kernel +# def set_b(self, b: int32): +# self.b = b + @nac3 -class Demo: +class C: + c: Kernel[int32] + a: Kernel[int32] + b: Kernel[int32] core: KernelInvariant[Core] - led0: KernelInvariant[TTLOut] - led1: KernelInvariant[TTLOut] - def __init__(self): + def __init__(self, c: int32): + # B.__init__(self, c + 1) self.core = Core() - self.led0 = TTLOut(self.core, 18) - self.led1 = TTLOut(self.core, 19) + self.a = c + self.b = c + self.c = c + + @kernel + def output_parent_fields(self): + # B.output_all_fields(self) + pass @kernel - def run(self): - self.core.reset() - while True: - with parallel: - self.led0.pulse(100.*ms) - self.led1.pulse(100.*ms) - self.core.delay(100.*ms) + def output_all_fields(self): + # B.output_all_fields(self) + #print(self.c) + pass + @kernel + def set_c(self, c: int32): + self.c = c + + @kernel + def run(self): + self.output_all_fields() + # self.set_a(1) + # self.set_b(2) + self.set_c(3) + self.output_all_fields() if __name__ == "__main__": - Demo().run() + C(10).run() diff --git a/nac3artiq/demo/module.elf b/nac3artiq/demo/module.elf new file mode 100644 index 00000000..41377d06 Binary files /dev/null and b/nac3artiq/demo/module.elf differ diff --git a/nac3core/src/symbol_resolver.rs b/nac3core/src/symbol_resolver.rs index 9d7084b9..a6095b63 100644 --- a/nac3core/src/symbol_resolver.rs +++ b/nac3core/src/symbol_resolver.rs @@ -439,6 +439,9 @@ pub fn parse_type_annotation( } else { let obj_id = resolver.get_identifier_def(*id); if let Ok(obj_id) = obj_id { + // let Some(top_level_def) = top_level_defs.get(obj_id.0) else { + // return Err( HashSet::from([format!("Name Error undefined name {id} (at {})", expr.location),])); + // }; let def = top_level_defs[obj_id.0].read(); if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if !type_vars.is_empty() { diff --git a/nac3core/src/toplevel/composer.rs b/nac3core/src/toplevel/composer.rs index 603a508e..c04805ac 100644 --- a/nac3core/src/toplevel/composer.rs +++ b/nac3core/src/toplevel/composer.rs @@ -1,3 +1,4 @@ +use indexmap::IndexMap; use nac3parser::ast::fold::Fold; use std::rc::Rc; @@ -5,7 +6,7 @@ use crate::{ codegen::{expr::get_subst_key, stmt::exn_constructor}, symbol_resolver::SymbolValue, typecheck::{ - type_inferencer::{FunctionData, Inferencer}, + type_inferencer::{report_error, FunctionData, Inferencer}, typedef::{TypeVar, VarMap}, }, }; @@ -388,10 +389,30 @@ impl TopLevelComposer { } } + /// Analyze the AST and generate relevant `TopLevelDef` + /// + /// Mainly divided into two categories: + /// 1. Class Definition + /// i. Parse class bases + /// ii. Parse ancestors + /// iii. Parse methods and dunction pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet> { - self.analyze_top_level_class_type_var()?; - self.analyze_top_level_class_bases()?; - self.analyze_top_level_class_fields_methods()?; + let unifier = self.unifier.borrow_mut(); + let primitives_store = &self.primitives_ty; + let def_list = &self.definition_ast_list; + + // Step 1. Analyze class definitions + Self::analyze_top_level_class_definition( + def_list, + unifier, + primitives_store, + self.builtin_num, + (&self.keyword_list, &self.core_config), + )?; + + // self.analyze_top_level_class_type_var()?; + // self.analyze_top_level_class_bases()?; + // self.analyze_top_level_class_fields_methods()?; self.analyze_top_level_function()?; if inference { self.analyze_function_instance()?; @@ -399,393 +420,403 @@ impl TopLevelComposer { Ok(()) } - /// step 1, analyze the type vars associated with top level class - fn analyze_top_level_class_type_var(&mut self) -> Result<(), HashSet> { - let def_list = &self.definition_ast_list; - let temp_def_list = self.extract_def_list(); - let unifier = self.unifier.borrow_mut(); - let primitives_store = &self.primitives_ty; - - let mut analyze = |class_def: &Arc>, class_ast: &Option| { - // only deal with class def here - let mut class_def = class_def.write(); - let (class_bases_ast, class_def_type_vars, class_resolver) = { - if let TopLevelDef::Class { type_vars, resolver, .. } = &mut *class_def { - let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) = - class_ast - else { - unreachable!() - }; - - (bases, type_vars, resolver) - } else { - return Ok(()); - } + fn analyze_bases( + class_def: &Arc>, + class_ast: &Option, + temp_def_list:&[Arc>], + unifier: &mut Unifier, + primitives_store: &PrimitiveStore, + ) -> Result<(), HashSet> { + let mut class_def = class_def.write(); + let (class_def_id, class_ancestors, class_bases_ast, class_type_vars, class_resolver) = { + let TopLevelDef::Class { object_id, ancestors, type_vars, resolver, .. } = + &mut *class_def + else { + unreachable!() }; - let class_resolver = class_resolver.as_ref().unwrap(); - let class_resolver = &**class_resolver; - - let mut is_generic = false; - for b in class_bases_ast { - match &b.node { - // analyze typevars bounded to the class, - // only support things like `class A(Generic[T, V])`, - // things like `class A(Generic[T, V, ImportedModule.T])` is not supported - // i.e. only simple names are allowed in the subscript - // should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params - ast::ExprKind::Subscript { value, slice, .. } - if { - matches!( - &value.node, - ast::ExprKind::Name { id, .. } if id == &"Generic".into() - ) - } => - { - if is_generic { - return Err(HashSet::from([format!( - "only single Generic[...] is allowed (at {})", - b.location - )])); - } - is_generic = true; - - let type_var_list: Vec<&ast::Expr<()>>; - // if `class A(Generic[T, V, G])` - if let ast::ExprKind::Tuple { elts, .. } = &slice.node { - type_var_list = elts.iter().collect_vec(); - // `class A(Generic[T])` - } else { - type_var_list = vec![&**slice]; - } - - // parse the type vars - let type_vars = type_var_list - .into_iter() - .map(|e| { - class_resolver.parse_type_annotation( - &temp_def_list, - unifier, - primitives_store, - e, - ) - }) - .collect::, _>>()?; - - // check if all are unique type vars - let all_unique_type_var = { - let mut occurred_type_var_id: HashSet = HashSet::new(); - type_vars.iter().all(|x| { - let ty = unifier.get_ty(*x); - if let TypeEnum::TVar { id, .. } = ty.as_ref() { - occurred_type_var_id.insert(*id) - } else { - false - } - }) - }; - if !all_unique_type_var { - return Err(HashSet::from([format!( - "duplicate type variable occurs (at {})", - slice.location - )])); - } - - // add to TopLevelDef - class_def_type_vars.extend(type_vars); - } - - // if others, do nothing in this function - _ => continue, - } - } - Ok(()) + let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) = class_ast + else { + unreachable!() + }; + (object_id, ancestors, bases, type_vars, resolver.as_ref().unwrap().as_ref()) }; - let mut errors = HashSet::new(); - for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) { - if class_ast.is_none() { - continue; - } - if let Err(e) = analyze(class_def, class_ast) { - errors.extend(e); - } - } - if !errors.is_empty() { - return Err(errors); - } - Ok(()) - } - /// step 2, base classes. - /// now that the type vars of all classes are done, handle base classes and - /// put Self class into the ancestors list. We only allow single inheritance - fn analyze_top_level_class_bases(&mut self) -> Result<(), HashSet> { - if self.unifier.top_level.is_none() { - let ctx = Arc::new(self.make_top_level_context()); - self.unifier.top_level = Some(ctx); - } + let mut is_generic = false; + let mut has_base = false; + // Check class bases for typevars + for b in class_bases_ast { + match &b.node { + // analyze typevars bounded to the class, + // only support things like `class A(Generic[T, V])`, + // things like `class A(Generic[T, V, ImportedModule.T])` is not supported + // i.e. only simple names are allowed in the subscript + // should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params + ast::ExprKind::Subscript { value, slice, .. } if matches!(&value.node, ast::ExprKind::Name { id, .. } if id == &"Generic".into()) => + { + if is_generic { + return report_error("only single Generic[...] is allowed", b.location); + } + is_generic = true; - let temp_def_list = self.extract_def_list(); - let unifier = self.unifier.borrow_mut(); - let primitive_types = self.primitives_ty; - - let mut get_direct_parents = - |class_def: &Arc>, class_ast: &Option| { - let mut class_def = class_def.write(); - let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = { - if let TopLevelDef::Class { - ancestors, resolver, object_id, type_vars, .. - } = &mut *class_def - { - let Some(ast::Located { - node: ast::StmtKind::ClassDef { bases, .. }, .. - }) = class_ast - else { - unreachable!() - }; - - (object_id, bases, ancestors, resolver, type_vars) + let type_var_list: Vec<&ast::Expr<()>>; + // if `class A(Generic[T, V, G])` + if let ast::ExprKind::Tuple { elts, .. } = &slice.node { + type_var_list = elts.iter().collect_vec(); + // `class A(Generic[T])` } else { - return Ok(()); + type_var_list = vec![&**slice]; } - }; - let class_resolver = class_resolver.as_ref().unwrap(); - let class_resolver = &**class_resolver; - let mut has_base = false; - for b in class_bases { - // type vars have already been handled, so skip on `Generic[...]` - if matches!( - &b.node, - ast::ExprKind::Subscript { value, .. } - if matches!( - &value.node, - ast::ExprKind::Name { id, .. } if id == &"Generic".into() + let type_vars = type_var_list + .into_iter() + .map(|e| { + class_resolver.parse_type_annotation( + &temp_def_list, + unifier, + primitives_store, + e, ) - ) { - continue; - } + }) + .collect::, _>>()?; + class_type_vars.extend(type_vars); + } + ast::ExprKind::Name { .. } | ast::ExprKind::Subscript { .. } => { if has_base { - return Err(HashSet::from([format!( - "a class definition can only have at most one base class \ - declaration and one generic declaration (at {})", - b.location - )])); + return report_error("a class definition can only have at most one base class declaration and one generic declaration", b.location); } has_base = true; - // the function parse_ast_to make sure that no type var occurred in // bast_ty if it is a CustomClassKind let base_ty = parse_ast_to_type_annotation_kinds( class_resolver, - &temp_def_list, + temp_def_list, unifier, - &primitive_types, + primitives_store, b, vec![(*class_def_id, class_type_vars.clone())] .into_iter() .collect::>(), )?; - if let TypeAnnotation::CustomClass { .. } = &base_ty { class_ancestors.push(base_ty); } else { - return Err(HashSet::from([format!( - "class base declaration can only be custom class (at {})", + return report_error( + "class base declaration can only be custom class", b.location, - )])); + ); } } - Ok(()) - }; - - // first, only push direct parent into the list - let mut errors = HashSet::new(); - for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) { - if class_ast.is_none() { - continue; - } - if let Err(e) = get_direct_parents(class_def, class_ast) { - errors.extend(e); - } - } - if !errors.is_empty() { - return Err(errors); - } - - // second, get all ancestors - let mut ancestors_store: HashMap> = HashMap::default(); - let mut get_all_ancestors = - |class_def: &Arc>| -> Result<(), HashSet> { - let class_def = class_def.read(); - let (class_ancestors, class_id) = { - if let TopLevelDef::Class { ancestors, object_id, .. } = &*class_def { - (ancestors, *object_id) - } else { - return Ok(()); - } - }; - ancestors_store.insert( - class_id, - // if class has direct parents, get all ancestors of its parents. Else just empty - if class_ancestors.is_empty() { - vec![] - } else { - Self::get_all_ancestors_helper( - &class_ancestors[0], - temp_def_list.as_slice(), - )? - }, - ); - Ok(()) - }; - for (class_def, ast) in self.definition_ast_list.iter().skip(self.builtin_num) { - if ast.is_none() { - continue; - } - if let Err(e) = get_all_ancestors(class_def) { - errors.extend(e); - } - } - if !errors.is_empty() { - return Err(errors); - } - - // insert the ancestors to the def list - for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) { - if class_ast.is_none() { - continue; - } - let mut class_def = class_def.write(); - let (class_ancestors, class_id, class_type_vars) = { - if let TopLevelDef::Class { ancestors, object_id, type_vars, .. } = &mut *class_def - { - (ancestors, *object_id, type_vars) - } else { - continue; - } - }; - - let ans = ancestors_store.get_mut(&class_id).unwrap(); - class_ancestors.append(ans); - - // insert self type annotation to the front of the vector to maintain the order - class_ancestors - .insert(0, make_self_type_annotation(class_type_vars.as_slice(), class_id)); - - // special case classes that inherit from Exception - if class_ancestors - .iter() - .any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) - { - // if inherited from Exception, the body should be a pass - let ast::StmtKind::ClassDef { body, .. } = &class_ast.as_ref().unwrap().node else { - unreachable!() - }; - - for stmt in body { - if matches!( - stmt.node, - ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } - ) { - return Err(HashSet::from([ - "Classes inherited from exception should have no custom fields/methods" - .into(), - ])); - } + // TODO: Report Error here + _ => { + println!("Type was => {}", b.node.name()); } } } - // deal with ancestor of Exception object - let TopLevelDef::Class { name, ancestors, object_id, .. } = - &mut *self.definition_ast_list[7].0.write() - else { - unreachable!() - }; - - assert_eq!(*name, "Exception".into()); - ancestors.push(make_self_type_annotation(&[], *object_id)); - Ok(()) } - /// step 3, class fields and methods - fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), HashSet> { - let temp_def_list = self.extract_def_list(); - let primitives = &self.primitives_ty; - let def_ast_list = &self.definition_ast_list; - let unifier = self.unifier.borrow_mut(); + fn analyze_ancestors( + class_def: &Arc>, + temp_def_list: &[Arc>], + ) { + // Check if class has a direct parent + let mut class_def = class_def.write(); + let TopLevelDef::Class { ancestors, type_vars, object_id, .. } = &mut *class_def else { + unreachable!() + }; + let mut anc_set = HashMap::new(); - let mut type_var_to_concrete_def: HashMap = HashMap::new(); - - let mut errors = HashSet::new(); - for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) { - if class_ast.is_none() { - continue; + if let Some(ancestor) = ancestors.first() { + let TypeAnnotation::CustomClass { id, .. } = ancestor else { unreachable!() }; + let TopLevelDef::Class { ancestors: parent_ancestors, .. } = + &*temp_def_list[id.0].read() + else { + unreachable!() + }; + for anc in parent_ancestors.iter().skip(1) { + let TypeAnnotation::CustomClass { id, .. } = anc else { unreachable!() }; + anc_set.insert(id, anc.clone()); } - if matches!(&*class_def.read(), TopLevelDef::Class { .. }) { + ancestors.extend(anc_set.into_iter().map(|f| f.1).collect::>()); + } + + ancestors.insert(0, make_self_type_annotation(type_vars.as_slice(), *object_id)); + } + + /// step 1, analyze the top level class definitions + /// TODO: Add description + fn analyze_top_level_class_definition( + def_list: &[DefAst], + unifier: &mut Unifier, + primitives_store: &PrimitiveStore, + builtin_num: usize, + core_info: (&HashSet, &ComposerConfig), + ) -> Result<(), HashSet> { + let mut errors = HashSet::new(); + let mut type_var_to_concrete_def: HashMap = HashMap::new(); + // let top_level_def = def_list.iter().map(|f| f.0.clone()).collect_vec(); + let mut temp_def_list: Vec>> = def_list.iter().take(builtin_num).map(|f| f.0.clone()).collect_vec(); + + // Analyze class_bases + for (class_def, class_ast) in def_list.iter().skip(builtin_num) { + if class_ast.is_some() && matches!(&*class_def.read(), TopLevelDef::Class { .. }) { + println!("Class Bases Analysis start"); + if let Err(e) = Self::analyze_bases( + class_def, + class_ast, + &temp_def_list, + unifier, + primitives_store, + ) { + errors.extend(e); + } + println!("Class Bases Analyzed"); + println!("Final Phase"); + Self::analyze_ancestors(class_def, &temp_def_list); + println!("ClassFonet"); + // special case classes that inherit from Exception + println!("Chekcing for exception"); + let TopLevelDef::Class { name, ancestors: class_ancestors, loc, .. } = &*class_def.read() + else { + unreachable!() + }; + println!("Class had name: {name}"); + + if class_ancestors + .iter() + .any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) + { + // if inherited from Exception, the body should be a pass + let ast::StmtKind::ClassDef { body, .. } = &class_ast.as_ref().unwrap().node + else { + unreachable!() + }; + for stmt in body { + if matches!( + stmt.node, + ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } + ) { + errors.extend(report_error("Classes inherited from exception should have no custom fields/methods", loc.unwrap())); + } + } + } + + } + temp_def_list.push(class_def.clone()); + } + + println!("Phase I executed .... Final Length {}", temp_def_list.len()); + + if !errors.is_empty() { + return Err(errors) + } + + // Analyze class methods + for (class_def, class_ast) in def_list.iter().skip(builtin_num) { + if class_ast.is_some() && matches!(&*class_def.read(), TopLevelDef::Class { .. }) { if let Err(e) = Self::analyze_single_class_methods_fields( class_def, &class_ast.as_ref().unwrap().node, &temp_def_list, unifier, - primitives, + primitives_store, &mut type_var_to_concrete_def, - (&self.keyword_list, &self.core_config), + core_info, ) { errors.extend(e); } - } - } - if !errors.is_empty() { - return Err(errors); - } - // handle the inherited methods and fields - // Note: we cannot defer error handling til the end of the loop, because there is loop - // carried dependency, ignoring the error (temporarily) will cause all assumptions to break - // and produce weird error messages - let mut current_ancestor_depth: usize = 2; - loop { - let mut finished = true; - - for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) { - if class_ast.is_none() { - continue; + if !errors.is_empty() { + return Err(errors) } - let mut class_def = class_def.write(); - if let TopLevelDef::Class { ancestors, .. } = &*class_def { - // if the length of the ancestor is equal to the current depth - // it means that all the ancestors of the class is handled - if ancestors.len() == current_ancestor_depth { - finished = false; - Self::analyze_single_class_ancestors( + + { + let mut class_def = class_def.write(); + let TopLevelDef::Class { ancestors, .. } = &*class_def else { unreachable!() }; + // Methods and fields are passed only if class inherits from another class + if ancestors.len() > 1 { + if let Err(e) = Self::analyze_single_class_ancestors( &mut class_def, &temp_def_list, unifier, - primitives, + primitives_store, &mut type_var_to_concrete_def, - )?; + ){ + errors.extend(e); + }; } } - } - if finished { - break; - } - - current_ancestor_depth += 1; - if current_ancestor_depth > def_ast_list.len() + 1 { - unreachable!("cannot be longer than the whole top level def list") + if let Err(e) = Self::subst_class_fields_methods( + &temp_def_list, + primitives_store, + unifier, + &type_var_to_concrete_def, + ){ + errors.extend(e); + }; } } + if !errors.is_empty() { + return Err(errors) + } + + // // Decide whether or not we can skip the functions + // // let mut idx = builtin_num; + // // while idx < def_list.len() { + // // let (class_def, class_ast) = &def_list[idx]; + // // if class_ast.is_some() && matches!(&*class_def.read(), TopLevelDef::Class { .. }) { + // // let TopLevelDef::Class { methods, .. } = &*class_def.read() else {unreachable!()}; + // // // Add the methods to the `temp_def_list` as they can be accessed within the class + // // for (_, _, def_id) in methods { + // // temp_def_list.insert(def_id.0, def_list[def_id.0].0.clone()); + // // } + // // } + // // } + + // // Defintions are parsed inorder of their occurance in python program + // for (class_def, class_ast) in def_list.iter().skip(builtin_num) { + // if class_ast.is_some() && matches!(&*class_def.read(), TopLevelDef::Class { .. }) { + // // let TopLevelDef::Class { methods, .. } = &*class_def.read() else {unreachable!()}; + + // // Add the methods to the `temp_def_list` as they can be accessed within the class + // // Hacky Fix + + // // for (_, _, def_id) in methods { + // // temp_def_list.insert(def_id.0, def_list[def_id.0].0.clone()); + // // } + + // // Add type vars and direct parents + // if let Err(e) = Self::analyze_bases( + // class_def, + // class_ast, + // &temp_def_list, + // unifier, + // primitives_store, + // ) { + // errors.extend(e); + // } + + // if !errors.is_empty() { + // return Err(errors); + // } + + // Self::analyze_ancestors(class_def, &temp_def_list); + + // if let Err(e) = Self::analyze_single_class_methods_fields( + // class_def, + // &class_ast.as_ref().unwrap().node, + // &top_level_def, + // &top_level_def, + // unifier, + // primitives_store, + // &mut type_var_to_concrete_def, + // core_info, + // ) { + // errors.extend(e); + // } + // if !errors.is_empty() { + // return Err(errors); + // } + + + // { + // let mut class_def = class_def.write(); + // let TopLevelDef::Class { ancestors, .. } = &*class_def else { unreachable!() }; + // // Methods and fields are passed only if class inherits from another class + // if ancestors.len() > 1 { + // Self::analyze_single_class_ancestors( + // &mut class_def, + // &temp_def_list, + // unifier, + // primitives_store, + // &mut type_var_to_concrete_def, + // )?; + // } + // } + + // // + // Self::subst_class_fields_methods( + // &temp_def_list, + // primitives_store, + // unifier, + // &type_var_to_concrete_def, + // )?; + + // // special case classes that inherit from Exception + // let TopLevelDef::Class { ancestors: class_ancestors, loc, .. } = &*class_def.read() + // else { + // unreachable!() + // }; + // if class_ancestors + // .iter() + // .any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) + // { + // // if inherited from Exception, the body should be a pass + // let ast::StmtKind::ClassDef { body, .. } = &class_ast.as_ref().unwrap().node + // else { + // unreachable!() + // }; + // for stmt in body { + // if matches!( + // stmt.node, + // ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } + // ) { + // errors.extend(report_error("Classes inherited from exception should have no custom fields/methods", loc.unwrap())); + // } + // } + // } + // } + // temp_def_list.push(class_def.clone()); + // } + + // Add .skip(self.builtin_num) + for (def, _) in def_list.iter().skip(builtin_num) { + match &*def.read() { + TopLevelDef::Class { resolver: Some(resolver), .. } + | TopLevelDef::Function { resolver: Some(resolver), .. } => { + if let Err(e) = + resolver.handle_deferred_eval(unifier, &temp_def_list, primitives_store) + { + errors.insert(e); + } + } + _ => {} + } + } + + // deal with ancestor of Exception object + let TopLevelDef::Class { name, ancestors, object_id, .. } = &mut *def_list[7].0.write() + else { + unreachable!() + }; + assert_eq!(*name, "Exception".into()); + ancestors.push(make_self_type_annotation(&[], *object_id)); + + if !errors.is_empty() { + return Err(errors); + } + Ok(()) + } + + /// make substitution + /// TODO: Imporve name + comment + fn subst_class_fields_methods( + temp_def_list: &[Arc>], + primitives: &PrimitiveStore, + unifier: &mut Unifier, + type_var_to_concrete_def: &HashMap, + ) -> Result<(), HashSet> { + let mut errors = HashSet::new(); + let mut subst_list = Some(Vec::new()); // unification of previously assigned typevar let mut unification_helper = |ty, def| -> Result<(), HashSet> { let target_ty = get_type_from_type_annotation_kinds( - &temp_def_list, + temp_def_list, unifier, primitives, &def, @@ -797,7 +828,7 @@ impl TopLevelComposer { Ok(()) }; for (ty, def) in type_var_to_concrete_def { - if let Err(e) = unification_helper(ty, def) { + if let Err(e) = unification_helper(*ty, def.clone()) { errors.extend(e); } } @@ -828,20 +859,6 @@ impl TopLevelComposer { return Err(errors); } - for (def, _) in def_ast_list.iter().skip(self.builtin_num) { - match &*def.read() { - TopLevelDef::Class { resolver: Some(resolver), .. } - | TopLevelDef::Function { resolver: Some(resolver), .. } => { - if let Err(e) = - resolver.handle_deferred_eval(unifier, &temp_def_list, primitives) - { - errors.insert(e); - } - } - _ => {} - } - } - Ok(()) } @@ -1186,11 +1203,18 @@ impl TopLevelComposer { resolver, ); + // println!("\n+++++++++++++++++++++"); + // for e in class_methods_def.iter() { + // println!("1. Class Name: {_class_name} with function {} which has type {}", e.0, unifier.stringify(e.1)); + // } + // println!("+++++++++++++++++++++\n"); + let class_resolver = class_resolver.as_ref().unwrap(); let class_resolver = class_resolver.as_ref(); let mut defined_fields: HashSet<_> = HashSet::new(); for b in class_body_ast { + println!("Node type: {:?}\n", b.node); match &b.node { ast::StmtKind::FunctionDef { args, returns, name, .. } => { let (method_dummy_ty, method_id) = @@ -1199,126 +1223,81 @@ impl TopLevelComposer { let mut method_var_map = VarMap::new(); let arg_types: Vec = { - // check method parameters cannot have same name + // Function arguments must have: + // 1) `self` as first argument + // 2) unique names + // 3) names different than keywords + match args.args.first() { + Some(id) if id.node.arg == "self".into() => {}, + _ => return report_error("class method must have a `self` parameter", b.location), + } let mut defined_parameter_name: HashSet<_> = HashSet::new(); - let zelf: StrRef = "self".into(); - for x in &args.args { - if !defined_parameter_name.insert(x.node.arg) - || (keyword_list.contains(&x.node.arg) && x.node.arg != zelf) - { - return Err(HashSet::from([ - format!("top level function must have unique parameter names \ - and names should not be the same as the keywords (at {})", - x.location), - ])) + for arg in args.args.iter().skip(1) { + if !defined_parameter_name.insert(arg.node.arg) { + return report_error("class method must have a unique parameter names", b.location) + } + if keyword_list.contains(&arg.node.arg) { + return report_error("parameter names should not be the same as the keywords", b.location) } } - if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) { - return Err(HashSet::from([ - format!("__init__ method must have a `self` parameter (at {})", b.location), - ])) + // `self` must not be provided type annotation or default value + if args.args.len() == args.defaults.len() { + return report_error("`self` cannot have a default value", b.location) } - if !defined_parameter_name.contains(&zelf) { - return Err(HashSet::from([ - format!("class method must have a `self` parameter (at {})", b.location), - ])) + if args.args[0].node.annotation.is_some() { + return report_error("`self` cannot have a type annotation", b.location) } - let mut result = Vec::new(); - - let arg_with_default: Vec<( - &ast::Located>, - Option<&ast::Expr>, - )> = args - .args - .iter() - .rev() - .zip( - args.defaults - .iter() - .rev() - .map(|x| -> Option<&ast::Expr> { Some(x) }) - .chain(std::iter::repeat(None)), - ) - .collect_vec(); - - for (x, default) in arg_with_default.into_iter().rev() { - let name = x.node.arg; - if name != zelf { - let type_ann = { - let annotation_expr = x - .node - .annotation - .as_ref() - .ok_or_else(|| HashSet::from([ - format!( - "type annotation needed for `{}` at {}", - x.node.arg, x.location - ), - ]))? - .as_ref(); - parse_ast_to_type_annotation_kinds( - class_resolver, - temp_def_list, - unifier, - primitives, - annotation_expr, - vec![(class_id, class_type_vars_def.clone())] - .into_iter() - .collect::>(), - )? + let no_defaults = args.args.len() - args.defaults.len() - 1; + for (idx, x) in itertools::enumerate(args.args.iter().skip(1)) { + let type_ann = { + let Some(annotation_expr) = x.node.annotation.as_ref() else {return report_error(format!("type annotation needed for `{}`", x.node.arg).as_str(), x.location)}; + parse_ast_to_type_annotation_kinds( + class_resolver, + temp_def_list, + unifier, + primitives, + annotation_expr, + vec![(class_id, class_type_vars_def.clone())] + .into_iter() + .collect::>(), + )? + }; + // find type vars within this method parameter type annotation + let type_vars_within = get_type_var_contained_in_type_annotation(&type_ann); + // handle the class type var and the method type var + for type_var_within in type_vars_within { + let TypeAnnotation::TypeVar(ty) = type_var_within else { + unreachable!("must be type var annotation") }; - // find type vars within this method parameter type annotation - let type_vars_within = - get_type_var_contained_in_type_annotation(&type_ann); - // handle the class type var and the method type var - for type_var_within in type_vars_within { - let TypeAnnotation::TypeVar(ty) = type_var_within else { - unreachable!("must be type var annotation") - }; - let id = Self::get_var_id(ty, unifier)?; - if let Some(prev_ty) = method_var_map.insert(id, ty) { - // if already in the list, make sure they are the same? - assert_eq!(prev_ty, ty); - } + let id = Self::get_var_id(ty, unifier)?; + if let Some(prev_ty) = method_var_map.insert(id, ty) { + // if already in the list, make sure they are the same? + assert_eq!(prev_ty, ty); } - // finish handling type vars - let dummy_func_arg = FuncArg { - name, - ty: unifier.get_dummy_var().ty, - default_value: match default { - None => None, - Some(default) => { - if name == "self".into() { - return Err(HashSet::from([ - format!("`self` parameter cannot take default value (at {})", x.location), - ])); - } - Some({ - let v = Self::parse_parameter_default_value( - default, - class_resolver, - )?; - Self::check_default_param_type( - &v, &type_ann, primitives, unifier, - ) - .map_err(|err| HashSet::from([ - format!("{} (at {})", err, x.location), - ]))?; - v - }) - } - }, - is_vararg: false, - }; - // push the dummy type and the type annotation - // into the list for later unification - type_var_to_concrete_def - .insert(dummy_func_arg.ty, type_ann.clone()); - result.push(dummy_func_arg); } + // finish handling type vars + let dummy_func_arg = FuncArg { + name: x.node.arg, + ty: unifier.get_dummy_var().ty, + default_value: if idx < no_defaults { None } else { + let default_idx = idx - no_defaults; + + Some({ + let v = Self::parse_parameter_default_value(&args.defaults[default_idx], class_resolver)?; + Self::check_default_param_type(&v, &type_ann, primitives, unifier).map_err(|err| report_error::<()>(err.as_str(), x.location).unwrap_err())?; + v + }) + }, + is_vararg: false, + }; + // push the dummy type and the type annotation + // into the list for later unification + type_var_to_concrete_def + .insert(dummy_func_arg.ty, type_ann.clone()); + result.push(dummy_func_arg); } result }; @@ -1390,6 +1369,7 @@ impl TopLevelComposer { // unify now since function type is not in type annotation define // which should be fine since type within method_type will be subst later + println!("Method Dummy Type: {} and method type: {}", unifier.stringify(method_dummy_ty), unifier.stringify(method_type)); unifier .unify(method_dummy_ty, method_type) .map_err(|e| HashSet::from([e.to_display(unifier).to_string()]))?; @@ -1440,23 +1420,13 @@ impl TopLevelComposer { match v { ast::Constant::Bool(_) | ast::Constant::Str(_) | ast::Constant::Int(_) | ast::Constant::Float(_) => {} _ => { - return Err(HashSet::from([ - format!( - "unsupported statement in class definition body (at {})", - b.location - ), - ])) + return report_error("unsupported statement in class definition body", b.location) } } class_attributes_def.push((*attr, dummy_field_type, v.clone())); } _ => { - return Err(HashSet::from([ - format!( - "unsupported statement in class definition body (at {})", - b.location - ), - ])) + return report_error("unsupported statement in class definition body", b.location) } } annotation @@ -1482,49 +1452,30 @@ impl TopLevelComposer { }; if !class_type_vars_def.contains(&t) { - return Err(HashSet::from([ - format!( - "class fields can only use type \ - vars over which the class is generic (at {})", - annotation.location - ), - ])) + return report_error("class fields can only use type vars over which the class is generic", b.location) } } type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation); } else { - return Err(HashSet::from([ - format!( - "same class fields `{}` defined twice (at {})", - attr, target.location - ), - ])) + return report_error(format!("same class fields `{attr}` defined twice").as_str(), target.location) } } else { - return Err(HashSet::from([ - format!( - "unsupported statement type in class definition body (at {})", - target.location - ), - ])) + return report_error("unsupported statement in class definition body", target.location) } } ast::StmtKind::Assign { .. } // we don't class attributes | ast::StmtKind::Expr { value: _, .. } // typically a docstring; ignoring all expressions matches CPython behavior | ast::StmtKind::Pass { .. } => {} _ => { - return Err(HashSet::from([ - format!( - "unsupported statement in class definition body (at {})", - b.location - ), - ])) + return report_error("unsupported statement in class definition body", b.location) } } } + println!("+++++++++++++++++++++\n"); Ok(()) + } - + // type_var_to_concrete_def: &mut HashMap, fn analyze_single_class_ancestors( class_def: &mut TopLevelDef, temp_def_list: &[Arc>], @@ -1561,7 +1512,6 @@ impl TopLevelComposer { let TypeAnnotation::CustomClass { id, params: _ } = base else { unreachable!("must be class type annotation") }; - let base = temp_def_list.get(id.0).unwrap(); let base = base.read(); let TopLevelDef::Class { methods, fields, attributes, .. } = &*base else { @@ -1570,89 +1520,77 @@ impl TopLevelComposer { // handle methods override // since we need to maintain the order, create a new list - let mut new_child_methods: Vec<(StrRef, Type, DefinitionId)> = Vec::new(); - let mut is_override: HashSet = HashSet::new(); - for (anc_method_name, anc_method_ty, anc_method_def_id) in methods { - // find if there is a method with same name in the child class - let mut to_be_added = (*anc_method_name, *anc_method_ty, *anc_method_def_id); - for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def { - if class_method_name == anc_method_name { - // ignore and handle self - // if is __init__ method, no need to check return type - let ok = class_method_name == &"__init__".into() - || Self::check_overload_function_type( - *class_method_ty, - *anc_method_ty, - unifier, - type_var_to_concrete_def, - ); - if !ok { - return Err(HashSet::from([format!( - "method {class_method_name} has same name as ancestors' method, but incompatible type"), - ])); - } - // mark it as added - is_override.insert(*class_method_name); - to_be_added = (*class_method_name, *class_method_ty, *class_method_defid); - break; + let mut new_child_methods: IndexMap = + methods.iter().map(|m| (m.0, (m.1, m.2))).collect(); + for e in new_child_methods.iter() { + println!("Method Name: {} with type: {}", e.0, unifier.stringify(e.1.0)); + } + /* + + class A: + def fun1 + def fun2 + + method: TFunc(fun1, ...) + TVar + */ + // let mut new_child_methods: Vec<(StrRef, Type, DefinitionId)> = methods.clone(); + for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def { + if let Some((ty, _)) = new_child_methods + .insert(*class_method_name, (*class_method_ty, *class_method_defid)) + { + println!("Class Method Name: {class_method_name}"); + println!("Types are {} and {}", unifier.stringify(ty), unifier.stringify(*class_method_ty)); + let ok = class_method_name == &"__init__".into() + || Self::check_overload_function_type( + *class_method_ty, + ty, + unifier, + type_var_to_concrete_def, + ); + if !ok { + return Err(HashSet::from([format!( + "method {class_method_name} has same name as ancestors' method, but incompatible type"), + ])); } } - new_child_methods.push(to_be_added); } - // add those that are not overriding method to the new_child_methods - for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def { - if !is_override.contains(class_method_name) { - new_child_methods.push((*class_method_name, *class_method_ty, *class_method_defid)); - } - } - // use the new_child_methods to replace all the elements in `class_methods_def` class_methods_def.clear(); - class_methods_def.extend(new_child_methods); + class_methods_def + .extend(new_child_methods.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec()); // handle class fields - let mut new_child_fields: Vec<(StrRef, Type, bool)> = Vec::new(); - // let mut is_override: HashSet<_> = HashSet::new(); - for (anc_field_name, anc_field_ty, mutable) in fields { - let to_be_added = (*anc_field_name, *anc_field_ty, *mutable); - // find if there is a fields with the same name in the child class - for (class_field_name, ..) in &*class_fields_def { - if class_field_name == anc_field_name - || attributes.iter().any(|f| f.0 == *class_field_name) - { - return Err(HashSet::from([format!( - "field `{class_field_name}` has already declared in the ancestor classes" - )])); - } + let mut new_child_fields: IndexMap = + fields.iter().map(|f| (f.0, (f.1, f.2))).collect(); + let mut new_child_attributes: IndexMap = + attributes.iter().map(|f| (f.0, (f.1, f.2.clone()))).collect(); + // Overriding class fields and attributes is currently not supported + for (name, ty, mutable) in &*class_fields_def { + if new_child_fields.insert(*name, (*ty, *mutable)).is_some() + || new_child_attributes.contains_key(name) + { + return Err(HashSet::from([format!( + "field `{name}` has already declared in the ancestor classes" + )])); + } + } + for (name, ty, val) in &*class_attribute_def { + if new_child_attributes.insert(*name, (*ty, val.clone())).is_some() + || new_child_fields.contains_key(name) + { + return Err(HashSet::from([format!( + "attribute `{name}` has already declared in the ancestor classes" + )])); } - new_child_fields.push(to_be_added); } - // handle class attributes - let mut new_child_attributes: Vec<(StrRef, Type, ast::Constant)> = Vec::new(); - for (anc_attr_name, anc_attr_ty, attr_value) in attributes { - let to_be_added = (*anc_attr_name, *anc_attr_ty, attr_value.clone()); - // find if there is a attribute with the same name in the child class - for (class_attr_name, ..) in &*class_attribute_def { - if class_attr_name == anc_attr_name - || fields.iter().any(|f| f.0 == *class_attr_name) - { - return Err(HashSet::from([format!( - "attribute `{class_attr_name}` has already declared in the ancestor classes" - )])); - } - } - new_child_attributes.push(to_be_added); - } - - for (class_field_name, class_field_ty, mutable) in &*class_fields_def { - if !is_override.contains(class_field_name) { - new_child_fields.push((*class_field_name, *class_field_ty, *mutable)); - } - } class_fields_def.clear(); - class_fields_def.extend(new_child_fields); + class_fields_def + .extend(new_child_fields.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec()); class_attribute_def.clear(); - class_attribute_def.extend(new_child_attributes); + class_attribute_def.extend( + new_child_attributes.iter().map(|f| (*f.0, f.1 .0, f.1 .1.clone())).collect_vec(), + ); Ok(()) } diff --git a/nac3core/src/toplevel/helper.rs b/nac3core/src/toplevel/helper.rs index 29a662c5..0becc055 100644 --- a/nac3core/src/toplevel/helper.rs +++ b/nac3core/src/toplevel/helper.rs @@ -685,6 +685,16 @@ impl TopLevelComposer { let this = this.as_ref(); let other = unifier.get_ty(other); let other = other.as_ref(); + println!("Type of this was: {}", this.get_type_name()); + println!("Type of other was: {}", other.get_type_name()); + if let TypeEnum::TVar { name, .. } = other { + if name.is_some(){ + println!("Name of other was {}", name.unwrap()); + }else { + println!("Name of other was None"); + + } + } let ( TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, .. }), TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. }), diff --git a/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__cyclic2.snap.new b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__cyclic2.snap.new new file mode 100644 index 00000000..d7a0372c --- /dev/null +++ b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__cyclic2.snap.new @@ -0,0 +1,14 @@ +--- +source: nac3core/src/toplevel/test.rs +assertion_line: 576 +expression: res_vec + +--- +[ + "Class {\nname: \"A\",\nancestors: [\"class_def_104\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: []\n}\n", + "Function {\nname: \"A.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", + "Class {\nname: \"B\",\nancestors: [\"class_def_106[typevar230, typevar229]\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: [\"typevar230\", \"typevar229\"]\n}\n", + "Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", + "Class {\nname: \"C\",\nancestors: [\"class_def_108[typevar229]\", \"class_def_104\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: [\"typevar229\"]\n}\n", + "Function {\nname: \"C.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", +] diff --git a/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__inheritance_override.snap.new b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__inheritance_override.snap.new new file mode 100644 index 00000000..9fc21a14 --- /dev/null +++ b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__inheritance_override.snap.new @@ -0,0 +1,17 @@ +--- +source: nac3core/src/toplevel/test.rs +assertion_line: 576 +expression: res_vec + +--- +[ + "Class {\nname: \"A\",\nancestors: [\"class_def_104[T]\"],\nfields: [\"a\", \"b\", \"c\"],\nmethods: [(\"__init__\", \"fn[[t:T], 4]\"), (\"fun\", \"fn[[a:0, b:T], 11[virtual[108[3]]]]\"), (\"foo\", \"fn[[c:111], 4]\")],\ntype_vars: [\"T\"]\n}\n", + "Function {\nname: \"A.__init__\",\nsig: \"fn[[t:T], 4]\",\nvar_id: []\n}\n", + "Function {\nname: \"A.fun\",\nsig: \"fn[[a:0, b:T], 11[virtual[108[3]]]]\",\nvar_id: []\n}\n", + "Function {\nname: \"A.foo\",\nsig: \"fn[[c:111], 4]\",\nvar_id: []\n}\n", + "Class {\nname: \"B\",\nancestors: [\"class_def_108[typevar230]\", \"class_def_104[2]\"],\nfields: [\"a\", \"b\", \"c\", \"d\"],\nmethods: [(\"__init__\", \"fn[[], 4]\"), (\"fun\", \"fn[[a:0, b:T], 11[virtual[108[3]]]]\"), (\"foo\", \"fn[[c:111], 4]\")],\ntype_vars: [\"typevar230\"]\n}\n", + "Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", + "Function {\nname: \"B.fun\",\nsig: \"fn[[a:0, b:T], 11[virtual[108[3]]]]\",\nvar_id: []\n}\n", + "Class {\nname: \"C\",\nancestors: [\"class_def_111\", \"class_def_108[3]\", \"class_def_104[2]\"],\nfields: [\"a\", \"b\", \"c\", \"d\", \"e\"],\nmethods: [(\"__init__\", \"fn[[], 4]\"), (\"fun\", \"fn[[a:0, b:T], 11[virtual[108[3]]]]\"), (\"foo\", \"fn[[c:111], 4]\")],\ntype_vars: []\n}\n", + "Function {\nname: \"C.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", +] diff --git a/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__list_tuple_generic.snap.new b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__list_tuple_generic.snap.new new file mode 100644 index 00000000..60cea96a --- /dev/null +++ b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__list_tuple_generic.snap.new @@ -0,0 +1,15 @@ +--- +source: nac3core/src/toplevel/test.rs +assertion_line: 576 +expression: res_vec + +--- +[ + "Function {\nname: \"foo\",\nsig: \"fn[[a:11[0], b:tuple[T, 2]], 105[109, 3]]\",\nvar_id: []\n}\n", + "Class {\nname: \"A\",\nancestors: [\"class_def_105[T, V]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v:V], 4]\"), (\"fun\", \"fn[[a:T], V]\")],\ntype_vars: [\"T\", \"V\"]\n}\n", + "Function {\nname: \"A.__init__\",\nsig: \"fn[[v:V], 4]\",\nvar_id: [TypeVarId(243)]\n}\n", + "Function {\nname: \"A.fun\",\nsig: \"fn[[a:T], V]\",\nvar_id: [TypeVarId(248)]\n}\n", + "Function {\nname: \"gfun\",\nsig: \"fn[[a:105[11[2], 0]], 4]\",\nvar_id: []\n}\n", + "Class {\nname: \"B\",\nancestors: [\"class_def_109\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: []\n}\n", + "Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", +] diff --git a/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__self1.snap.new b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__self1.snap.new new file mode 100644 index 00000000..0da9920f --- /dev/null +++ b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__self1.snap.new @@ -0,0 +1,15 @@ +--- +source: nac3core/src/toplevel/test.rs +assertion_line: 576 +expression: res_vec + +--- +[ + "Class {\nname: \"A\",\nancestors: [\"class_def_104[typevar229, typevar230]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a:104[2, 3], b:107], 4]\"), (\"fun\", \"fn[[a:104[2, 3]], 104[3, 0]]\")],\ntype_vars: [\"typevar229\", \"typevar230\"]\n}\n", + "Function {\nname: \"A.__init__\",\nsig: \"fn[[a:104[2, 3], b:107], 4]\",\nvar_id: []\n}\n", + "Function {\nname: \"A.fun\",\nsig: \"fn[[a:104[2, 3]], 104[3, 0]]\",\nvar_id: []\n}\n", + "Class {\nname: \"B\",\nancestors: [\"class_def_107\", \"class_def_104[1, 3]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[], 4]\"), (\"fun\", \"fn[[a:104[2, 3]], 104[3, 0]]\"), (\"foo\", \"fn[[b:107], 107]\"), (\"bar\", \"fn[[a:104[11[107], 0]], tuple[104[virtual[104[107, 0]], 3], 107]]\")],\ntype_vars: []\n}\n", + "Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n", + "Function {\nname: \"B.foo\",\nsig: \"fn[[b:107], 107]\",\nvar_id: []\n}\n", + "Function {\nname: \"B.bar\",\nsig: \"fn[[a:104[11[107], 0]], tuple[104[virtual[104[107, 0]], 3], 107]]\",\nvar_id: []\n}\n", +] diff --git a/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__simple_pass_in_class.snap.new b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__simple_pass_in_class.snap.new new file mode 100644 index 00000000..4071300c --- /dev/null +++ b/nac3core/src/toplevel/snapshots/nac3core__toplevel__test__test_analyze__simple_pass_in_class.snap.new @@ -0,0 +1,9 @@ +--- +source: nac3core/src/toplevel/test.rs +assertion_line: 576 +expression: res_vec + +--- +[ + "Class {\nname: \"A\",\nancestors: [\"class_def_104\"],\nfields: [],\nmethods: [],\ntype_vars: []\n}\n", +] diff --git a/nac3core/src/toplevel/test.rs b/nac3core/src/toplevel/test.rs index 522cb23d..3244ba42 100644 --- a/nac3core/src/toplevel/test.rs +++ b/nac3core/src/toplevel/test.rs @@ -521,7 +521,7 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) { "class same name" )] fn test_analyze(source: &[&str], res: &[&str]) { - let print = false; + let print = true; let mut composer = TopLevelComposer::new(Vec::new(), Vec::new(), ComposerConfig::default(), 64).0; diff --git a/nac3core/src/toplevel/type_annotation.rs b/nac3core/src/toplevel/type_annotation.rs index d997f176..12238ebe 100644 --- a/nac3core/src/toplevel/type_annotation.rs +++ b/nac3core/src/toplevel/type_annotation.rs @@ -1,6 +1,7 @@ use super::*; use crate::symbol_resolver::SymbolValue; use crate::toplevel::helper::{PrimDef, PrimDefDetails}; +use crate::typecheck::type_inferencer::report_error; use crate::typecheck::typedef::VarMap; use nac3parser::ast::Constant; use strum::IntoEnumIterator; @@ -97,7 +98,13 @@ pub fn parse_ast_to_type_annotation_kinds( Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() }) } else if let Ok(obj_id) = resolver.get_identifier_def(*id) { let type_vars = { - let def_read = top_level_defs[obj_id.0].try_read(); + let Some(top_level_def) = top_level_defs.get(obj_id.0) else { + return report_error( + format!("Name Error undefined name {id}").as_str(), + expr.location, + ); + }; + let def_read = top_level_def.try_read(); if let Some(def_read) = def_read { if let TopLevelDef::Class { type_vars, .. } = &*def_read { type_vars.clone() @@ -152,12 +159,19 @@ pub fn parse_ast_to_type_annotation_kinds( } let obj_id = resolver.get_identifier_def(*id)?; let type_vars = { - let def_read = top_level_defs[obj_id.0].try_read(); + println!("Executing Type Var"); + let Some(top_level_def) = top_level_defs.get(obj_id.0) else { + return report_error( + format!("Name Error undefined name {id}").as_str(), + expr.location, + ); + }; + let def_read = top_level_def.try_read(); if let Some(def_read) = def_read { - let TopLevelDef::Class { type_vars, .. } = &*def_read else { + let TopLevelDef::Class { type_vars, name, .. } = &*def_read else { unreachable!("must be class here") }; - + println!("class has name: {name}"); type_vars.clone() } else { locked.get(&obj_id).unwrap().clone() diff --git a/nac3core/src/typecheck/type_inferencer/mod.rs b/nac3core/src/typecheck/type_inferencer/mod.rs index a5b8cd49..fa1352b5 100644 --- a/nac3core/src/typecheck/type_inferencer/mod.rs +++ b/nac3core/src/typecheck/type_inferencer/mod.rs @@ -114,7 +114,7 @@ impl Fold<()> for NaiveFolder { } } -fn report_error(msg: &str, location: Location) -> Result { +pub fn report_error(msg: &str, location: Location) -> Result { Err(HashSet::from([format!("{msg} at {location}")])) } diff --git a/nac3standalone/demo/interpreted.log b/nac3standalone/demo/interpreted.log new file mode 100644 index 00000000..190a1803 --- /dev/null +++ b/nac3standalone/demo/interpreted.log @@ -0,0 +1 @@ +123 diff --git a/nac3standalone/demo/src/_tests.py b/nac3standalone/demo/src/_tests.py new file mode 100644 index 00000000..f8318c7c --- /dev/null +++ b/nac3standalone/demo/src/_tests.py @@ -0,0 +1,34 @@ +from __future__ import annotations + + +T = TypeVar("T") +V = TypeVar("V") + +class A(Generic[T]): + a: int32 + b: T + c: A[int64] + def __init__(self, t: T): + self.a = 3 + self.b = T + def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: + pass + def foo(self, c: C): + pass + +class B(Generic[V], A[float]): + d: C + def __init__(self): + pass + def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: + # override + pass + +class C(B[bool]): + e: int64 + def __init__(self): + pass + +def run() -> int32: + + return 0 diff --git a/nac3standalone/demo/src/recursive_type.py b/nac3standalone/demo/src/recursive_type.py index 1444f5ac..dae155f5 100644 --- a/nac3standalone/demo/src/recursive_type.py +++ b/nac3standalone/demo/src/recursive_type.py @@ -4,7 +4,7 @@ from __future__ import annotations def output_int32(a: int32): ... -class A: +class A(B): d: int32 a: list[B] def __init__(self, b: list[B]): diff --git a/pyo3/nac3artiq.so b/pyo3/nac3artiq.so new file mode 100755 index 00000000..e73ca32b Binary files /dev/null and b/pyo3/nac3artiq.so differ