Fix error msgs

This commit is contained in:
abdul124 2024-08-28 17:52:02 +08:00
parent 600a5c8679
commit 368fcf23c7
17 changed files with 797 additions and 821 deletions

View File

@ -180,7 +180,9 @@
clippy clippy
pre-commit pre-commit
rustfmt rustfmt
rust-analyzer
]; ];
RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}";
shellHook = shellHook =
'' ''
export DEMO_LINALG_STUB=${packages.x86_64-linux.demo-linalg-stub}/lib/liblinalg.a export DEMO_LINALG_STUB=${packages.x86_64-linux.demo-linalg-stub}/lib/liblinalg.a

Binary file not shown.

Binary file not shown.

View File

@ -1,26 +1,87 @@
from min_artiq import * from min_artiq import *
from numpy import int32
# @nac3
# class A:
# a: int32
# core: KernelInvariant[Core]
# def __init__(self, a: int32):
# self.core = Core()
# self.a = a
# @kernel
# def output_all_fields(self):
# #print(self.a)
# pass
# @kernel
# def set_a(self, a: int32):
# self.a = a
# @nac3
# class B(A):
# b: int32
# def __init__(self, b: int32):
# # A.__init__(self, b + 1)
# self.core = Core()
# self.a = b
# self.b = b
# self.set_b(b)
# @kernel
# def output_parent_fields(self):
# # A.output_all_fields(self)
# pass
# @kernel
# def output_all_fields(self):
# # A.output_all_fields(self)
# pass
# #print(self.b)
# @kernel
# def set_b(self, b: int32):
# self.b = b
@nac3 @nac3
class Demo: class C:
c: Kernel[int32]
a: Kernel[int32]
b: Kernel[int32]
core: KernelInvariant[Core] core: KernelInvariant[Core]
led0: KernelInvariant[TTLOut]
led1: KernelInvariant[TTLOut]
def __init__(self): def __init__(self, c: int32):
# B.__init__(self, c + 1)
self.core = Core() self.core = Core()
self.led0 = TTLOut(self.core, 18) self.a = c
self.led1 = TTLOut(self.core, 19) self.b = c
self.c = c
@kernel
def output_parent_fields(self):
# B.output_all_fields(self)
pass
@kernel
def output_all_fields(self):
# B.output_all_fields(self)
#print(self.c)
pass
@kernel
def set_c(self, c: int32):
self.c = c
@kernel @kernel
def run(self): def run(self):
self.core.reset() self.output_all_fields()
while True: # self.set_a(1)
with parallel: # self.set_b(2)
self.led0.pulse(100.*ms) self.set_c(3)
self.led1.pulse(100.*ms) self.output_all_fields()
self.core.delay(100.*ms)
if __name__ == "__main__": if __name__ == "__main__":
Demo().run() C(10).run()

BIN
nac3artiq/demo/module.elf Normal file

Binary file not shown.

View File

@ -439,6 +439,9 @@ pub fn parse_type_annotation<T>(
} else { } else {
let obj_id = resolver.get_identifier_def(*id); let obj_id = resolver.get_identifier_def(*id);
if let Ok(obj_id) = obj_id { if let Ok(obj_id) = obj_id {
// let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
// return Err( HashSet::from([format!("Name Error undefined name {id} (at {})", expr.location),]));
// };
let def = top_level_defs[obj_id.0].read(); let def = top_level_defs[obj_id.0].read();
if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def { if let TopLevelDef::Class { fields, methods, type_vars, .. } = &*def {
if !type_vars.is_empty() { if !type_vars.is_empty() {

View File

@ -1,3 +1,4 @@
use indexmap::IndexMap;
use nac3parser::ast::fold::Fold; use nac3parser::ast::fold::Fold;
use std::rc::Rc; use std::rc::Rc;
@ -5,7 +6,7 @@ use crate::{
codegen::{expr::get_subst_key, stmt::exn_constructor}, codegen::{expr::get_subst_key, stmt::exn_constructor},
symbol_resolver::SymbolValue, symbol_resolver::SymbolValue,
typecheck::{ typecheck::{
type_inferencer::{FunctionData, Inferencer}, type_inferencer::{report_error, FunctionData, Inferencer},
typedef::{TypeVar, VarMap}, typedef::{TypeVar, VarMap},
}, },
}; };
@ -388,10 +389,29 @@ impl TopLevelComposer {
} }
} }
/// Analyze the AST and generate relevant `TopLevelDef`
///
/// Mainly divided into two categories:
/// 1. Class Definition
/// i. Parse class bases
/// ii. Parse ancestors
/// iii. Parse methods and dunction
pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet<String>> { pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet<String>> {
self.analyze_top_level_class_type_var()?; let unifier = self.unifier.borrow_mut();
self.analyze_top_level_class_bases()?; let primitives_store = &self.primitives_ty;
self.analyze_top_level_class_fields_methods()?; let def_list = &self.definition_ast_list;
// Step 1. Analyze class definitions
Self::analyze_top_level_class_definition(
def_list,
unifier,
primitives_store,
(&self.keyword_list, &self.core_config),
)?;
// self.analyze_top_level_class_type_var()?;
// self.analyze_top_level_class_bases()?;
// self.analyze_top_level_class_fields_methods()?;
self.analyze_top_level_function()?; self.analyze_top_level_function()?;
if inference { if inference {
self.analyze_function_instance()?; self.analyze_function_instance()?;
@ -399,33 +419,30 @@ impl TopLevelComposer {
Ok(()) Ok(())
} }
/// step 1, analyze the type vars associated with top level class fn analyze_bases(
fn analyze_top_level_class_type_var(&mut self) -> Result<(), HashSet<String>> { class_def: &Arc<RwLock<TopLevelDef>>,
let def_list = &self.definition_ast_list; class_ast: &Option<Stmt>,
let temp_def_list = self.extract_def_list(); temp_def_list: &[Arc<RwLock<TopLevelDef>>],
let unifier = self.unifier.borrow_mut(); unifier: &mut Unifier,
let primitives_store = &self.primitives_ty; primitives_store: &PrimitiveStore,
) -> Result<(), HashSet<String>> {
let mut analyze = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
// only deal with class def here
let mut class_def = class_def.write(); let mut class_def = class_def.write();
let (class_bases_ast, class_def_type_vars, class_resolver) = { let (class_def_id, class_ancestors, class_bases_ast, class_type_vars, class_resolver) = {
if let TopLevelDef::Class { type_vars, resolver, .. } = &mut *class_def { let TopLevelDef::Class { object_id, ancestors, type_vars, resolver, .. } =
let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) = &mut *class_def
class_ast
else { else {
unreachable!() unreachable!()
}; };
let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) = class_ast
(bases, type_vars, resolver) else {
} else { unreachable!()
return Ok(()); };
} (object_id, ancestors, bases, type_vars, resolver.as_ref().unwrap().as_ref())
}; };
let class_resolver = class_resolver.as_ref().unwrap();
let class_resolver = &**class_resolver;
let mut is_generic = false; let mut is_generic = false;
let mut has_base = false;
// Check class bases for typevars
for b in class_bases_ast { for b in class_bases_ast {
match &b.node { match &b.node {
// analyze typevars bounded to the class, // analyze typevars bounded to the class,
@ -433,19 +450,10 @@ impl TopLevelComposer {
// things like `class A(Generic[T, V, ImportedModule.T])` is not supported // things like `class A(Generic[T, V, ImportedModule.T])` is not supported
// i.e. only simple names are allowed in the subscript // i.e. only simple names are allowed in the subscript
// should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params // should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params
ast::ExprKind::Subscript { value, slice, .. } ast::ExprKind::Subscript { value, slice, .. } if matches!(&value.node, ast::ExprKind::Name { id, .. } if id == &"Generic".into()) =>
if {
matches!(
&value.node,
ast::ExprKind::Name { id, .. } if id == &"Generic".into()
)
} =>
{ {
if is_generic { if is_generic {
return Err(HashSet::from([format!( return report_error("only single Generic[...] is allowed", b.location);
"only single Generic[...] is allowed (at {})",
b.location
)]));
} }
is_generic = true; is_generic = true;
@ -458,12 +466,11 @@ impl TopLevelComposer {
type_var_list = vec![&**slice]; type_var_list = vec![&**slice];
} }
// parse the type vars
let type_vars = type_var_list let type_vars = type_var_list
.into_iter() .into_iter()
.map(|e| { .map(|e| {
class_resolver.parse_type_annotation( class_resolver.parse_type_annotation(
&temp_def_list, temp_def_list,
unifier, unifier,
primitives_store, primitives_store,
e, e,
@ -471,321 +478,207 @@ impl TopLevelComposer {
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
// check if all are unique type vars class_type_vars.extend(type_vars);
let all_unique_type_var = {
let mut occurred_type_var_id: HashSet<TypeVarId> = HashSet::new();
type_vars.iter().all(|x| {
let ty = unifier.get_ty(*x);
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
occurred_type_var_id.insert(*id)
} else {
false
} }
}) ast::ExprKind::Name { .. } | ast::ExprKind::Subscript { .. } => {
};
if !all_unique_type_var {
return Err(HashSet::from([format!(
"duplicate type variable occurs (at {})",
slice.location
)]));
}
// add to TopLevelDef
class_def_type_vars.extend(type_vars);
}
// if others, do nothing in this function
_ => continue,
}
}
Ok(())
};
let mut errors = HashSet::new();
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
if class_ast.is_none() {
continue;
}
if let Err(e) = analyze(class_def, class_ast) {
errors.extend(e);
}
}
if !errors.is_empty() {
return Err(errors);
}
Ok(())
}
/// step 2, base classes.
/// now that the type vars of all classes are done, handle base classes and
/// put Self class into the ancestors list. We only allow single inheritance
fn analyze_top_level_class_bases(&mut self) -> Result<(), HashSet<String>> {
if self.unifier.top_level.is_none() {
let ctx = Arc::new(self.make_top_level_context());
self.unifier.top_level = Some(ctx);
}
let temp_def_list = self.extract_def_list();
let unifier = self.unifier.borrow_mut();
let primitive_types = self.primitives_ty;
let mut get_direct_parents =
|class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
let mut class_def = class_def.write();
let (class_def_id, class_bases, class_ancestors, class_resolver, class_type_vars) = {
if let TopLevelDef::Class {
ancestors, resolver, object_id, type_vars, ..
} = &mut *class_def
{
let Some(ast::Located {
node: ast::StmtKind::ClassDef { bases, .. }, ..
}) = class_ast
else {
unreachable!()
};
(object_id, bases, ancestors, resolver, type_vars)
} else {
return Ok(());
}
};
let class_resolver = class_resolver.as_ref().unwrap();
let class_resolver = &**class_resolver;
let mut has_base = false;
for b in class_bases {
// type vars have already been handled, so skip on `Generic[...]`
if matches!(
&b.node,
ast::ExprKind::Subscript { value, .. }
if matches!(
&value.node,
ast::ExprKind::Name { id, .. } if id == &"Generic".into()
)
) {
continue;
}
if has_base { if has_base {
return Err(HashSet::from([format!( return report_error("a class definition can only have at most one base class declaration and one generic declaration", b.location);
"a class definition can only have at most one base class \
declaration and one generic declaration (at {})",
b.location
)]));
} }
has_base = true; has_base = true;
// the function parse_ast_to make sure that no type var occurred in // the function parse_ast_to make sure that no type var occurred in
// bast_ty if it is a CustomClassKind // bast_ty if it is a CustomClassKind
let base_ty = parse_ast_to_type_annotation_kinds( let base_ty = parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
&temp_def_list, temp_def_list,
unifier, unifier,
&primitive_types, primitives_store,
b, b,
vec![(*class_def_id, class_type_vars.clone())] vec![(*class_def_id, class_type_vars.clone())]
.into_iter() .into_iter()
.collect::<HashMap<_, _>>(), .collect::<HashMap<_, _>>(),
)?; )?;
if let TypeAnnotation::CustomClass { .. } = &base_ty { if let TypeAnnotation::CustomClass { .. } = &base_ty {
class_ancestors.push(base_ty); class_ancestors.push(base_ty);
} else { } else {
return Err(HashSet::from([format!( return report_error(
"class base declaration can only be custom class (at {})", "class base declaration can only be custom class",
b.location, b.location,
)]));
}
}
Ok(())
};
// first, only push direct parent into the list
let mut errors = HashSet::new();
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
if class_ast.is_none() {
continue;
}
if let Err(e) = get_direct_parents(class_def, class_ast) {
errors.extend(e);
}
}
if !errors.is_empty() {
return Err(errors);
}
// second, get all ancestors
let mut ancestors_store: HashMap<DefinitionId, Vec<TypeAnnotation>> = HashMap::default();
let mut get_all_ancestors =
|class_def: &Arc<RwLock<TopLevelDef>>| -> Result<(), HashSet<String>> {
let class_def = class_def.read();
let (class_ancestors, class_id) = {
if let TopLevelDef::Class { ancestors, object_id, .. } = &*class_def {
(ancestors, *object_id)
} else {
return Ok(());
}
};
ancestors_store.insert(
class_id,
// if class has direct parents, get all ancestors of its parents. Else just empty
if class_ancestors.is_empty() {
vec![]
} else {
Self::get_all_ancestors_helper(
&class_ancestors[0],
temp_def_list.as_slice(),
)?
},
); );
Ok(())
};
for (class_def, ast) in self.definition_ast_list.iter().skip(self.builtin_num) {
if ast.is_none() {
continue;
} }
if let Err(e) = get_all_ancestors(class_def) { }
// TODO: Report Error here
_ => {
println!("Type was => {}", b.node.name());
}
}
}
Ok(())
}
fn analyze_ancestors(
class_def: &Arc<RwLock<TopLevelDef>>,
temp_def_list: &[Arc<RwLock<TopLevelDef>>],
) {
// Check if class has a direct parent
let mut class_def = class_def.write();
let TopLevelDef::Class { ancestors, type_vars, object_id, .. } = &mut *class_def else {
unreachable!()
};
let mut anc_set = HashMap::new();
if let Some(ancestor) = ancestors.first() {
let TypeAnnotation::CustomClass { id, .. } = ancestor else { unreachable!() };
let TopLevelDef::Class { ancestors: parent_ancestors, .. } =
&*temp_def_list[id.0].read()
else {
unreachable!()
};
for anc in parent_ancestors.iter().skip(1) {
let TypeAnnotation::CustomClass { id, .. } = anc else { unreachable!() };
anc_set.insert(id, anc.clone());
}
ancestors.extend(anc_set.into_iter().map(|f| f.1).collect::<Vec<_>>());
}
ancestors.insert(0, make_self_type_annotation(type_vars.as_slice(), *object_id));
}
/// step 1, analyze the top level class definitions
/// TODO: Add description
fn analyze_top_level_class_definition(
def_list: &[DefAst],
unifier: &mut Unifier,
primitives_store: &PrimitiveStore,
core_info: (&HashSet<StrRef>, &ComposerConfig),
) -> Result<(), HashSet<String>> {
let mut errors = HashSet::new();
let mut temp_def_list: Vec<Arc<RwLock<TopLevelDef>>> = Vec::default();
let mut type_var_to_concrete_def: HashMap<Type, TypeAnnotation> = HashMap::new();
let all_def = def_list.iter().map(|f| f.0.clone()).collect_vec();
// Defintions are parsed inorder of their occurance in python program
for (class_def, class_ast) in def_list {
if class_ast.is_some() && matches!(&*class_def.read(), TopLevelDef::Class { .. }) {
// Add type vars and direct parents
if let Err(e) = Self::analyze_bases(
class_def,
class_ast,
&temp_def_list,
unifier,
primitives_store,
) {
errors.extend(e); errors.extend(e);
} }
}
if !errors.is_empty() { Self::analyze_ancestors(class_def, &temp_def_list);
return Err(errors);
if let Err(e) = Self::analyze_single_class_methods_fields(
class_def,
&class_ast.as_ref().unwrap().node,
&all_def,
unifier,
primitives_store,
&mut type_var_to_concrete_def,
core_info,
) {
errors.extend(e);
} }
// insert the ancestors to the def list
for (class_def, class_ast) in self.definition_ast_list.iter_mut().skip(self.builtin_num) {
if class_ast.is_none() {
continue;
}
let mut class_def = class_def.write();
let (class_ancestors, class_id, class_type_vars) = {
if let TopLevelDef::Class { ancestors, object_id, type_vars, .. } = &mut *class_def
{ {
(ancestors, *object_id, type_vars) let mut class_def = class_def.write();
} else { let TopLevelDef::Class { ancestors, .. } = &*class_def else { unreachable!() };
continue; // Methods and fields are passed only if class inherits from another class
if ancestors.len() > 1 {
Self::analyze_single_class_ancestors(
&mut class_def,
&all_def,
unifier,
primitives_store,
&mut type_var_to_concrete_def,
)?;
}
} }
};
let ans = ancestors_store.get_mut(&class_id).unwrap(); //
class_ancestors.append(ans); Self::subst_class_fields_methods(
&all_def,
// insert self type annotation to the front of the vector to maintain the order primitives_store,
class_ancestors unifier,
.insert(0, make_self_type_annotation(class_type_vars.as_slice(), class_id)); &type_var_to_concrete_def,
)?;
// special case classes that inherit from Exception // special case classes that inherit from Exception
let TopLevelDef::Class { ancestors: class_ancestors, loc, .. } = &*class_def.read()
else {
unreachable!()
};
if class_ancestors if class_ancestors
.iter() .iter()
.any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7)) .any(|ann| matches!(ann, TypeAnnotation::CustomClass { id, .. } if id.0 == 7))
{ {
// if inherited from Exception, the body should be a pass // if inherited from Exception, the body should be a pass
let ast::StmtKind::ClassDef { body, .. } = &class_ast.as_ref().unwrap().node else { let ast::StmtKind::ClassDef { body, .. } = &class_ast.as_ref().unwrap().node
else {
unreachable!() unreachable!()
}; };
for stmt in body { for stmt in body {
if matches!( if matches!(
stmt.node, stmt.node,
ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. } ast::StmtKind::FunctionDef { .. } | ast::StmtKind::AnnAssign { .. }
) { ) {
return Err(HashSet::from([ errors.extend(report_error("Classes inherited from exception should have no custom fields/methods", loc.unwrap()));
"Classes inherited from exception should have no custom fields/methods"
.into(),
]));
} }
} }
} }
} }
temp_def_list.push(class_def.clone());
}
// Add .skip(self.builtin_num)
for (def, _) in def_list {
match &*def.read() {
TopLevelDef::Class { resolver: Some(resolver), .. }
| TopLevelDef::Function { resolver: Some(resolver), .. } => {
if let Err(e) =
resolver.handle_deferred_eval(unifier, &temp_def_list, primitives_store)
{
errors.insert(e);
}
}
_ => {}
}
}
// deal with ancestor of Exception object // deal with ancestor of Exception object
let TopLevelDef::Class { name, ancestors, object_id, .. } = let TopLevelDef::Class { name, ancestors, object_id, .. } = &mut *def_list[7].0.write()
&mut *self.definition_ast_list[7].0.write()
else { else {
unreachable!() unreachable!()
}; };
assert_eq!(*name, "Exception".into()); assert_eq!(*name, "Exception".into());
ancestors.push(make_self_type_annotation(&[], *object_id)); ancestors.push(make_self_type_annotation(&[], *object_id));
Ok(())
}
/// step 3, class fields and methods
fn analyze_top_level_class_fields_methods(&mut self) -> Result<(), HashSet<String>> {
let temp_def_list = self.extract_def_list();
let primitives = &self.primitives_ty;
let def_ast_list = &self.definition_ast_list;
let unifier = self.unifier.borrow_mut();
let mut type_var_to_concrete_def: HashMap<Type, TypeAnnotation> = HashMap::new();
let mut errors = HashSet::new();
for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) {
if class_ast.is_none() {
continue;
}
if matches!(&*class_def.read(), TopLevelDef::Class { .. }) {
if let Err(e) = Self::analyze_single_class_methods_fields(
class_def,
&class_ast.as_ref().unwrap().node,
&temp_def_list,
unifier,
primitives,
&mut type_var_to_concrete_def,
(&self.keyword_list, &self.core_config),
) {
errors.extend(e);
}
}
}
if !errors.is_empty() { if !errors.is_empty() {
return Err(errors); return Err(errors);
} }
Ok(())
// handle the inherited methods and fields
// Note: we cannot defer error handling til the end of the loop, because there is loop
// carried dependency, ignoring the error (temporarily) will cause all assumptions to break
// and produce weird error messages
let mut current_ancestor_depth: usize = 2;
loop {
let mut finished = true;
for (class_def, class_ast) in def_ast_list.iter().skip(self.builtin_num) {
if class_ast.is_none() {
continue;
}
let mut class_def = class_def.write();
if let TopLevelDef::Class { ancestors, .. } = &*class_def {
// if the length of the ancestor is equal to the current depth
// it means that all the ancestors of the class is handled
if ancestors.len() == current_ancestor_depth {
finished = false;
Self::analyze_single_class_ancestors(
&mut class_def,
&temp_def_list,
unifier,
primitives,
&mut type_var_to_concrete_def,
)?;
}
}
} }
if finished { /// make substitution
break; /// TODO: Imporve name + comment
} fn subst_class_fields_methods(
temp_def_list: &[Arc<RwLock<TopLevelDef>>],
current_ancestor_depth += 1; primitives: &PrimitiveStore,
if current_ancestor_depth > def_ast_list.len() + 1 { unifier: &mut Unifier,
unreachable!("cannot be longer than the whole top level def list") type_var_to_concrete_def: &HashMap<Type, TypeAnnotation>,
} ) -> Result<(), HashSet<String>> {
} let mut errors = HashSet::new();
let mut subst_list = Some(Vec::new()); let mut subst_list = Some(Vec::new());
// unification of previously assigned typevar // unification of previously assigned typevar
let mut unification_helper = |ty, def| -> Result<(), HashSet<String>> { let mut unification_helper = |ty, def| -> Result<(), HashSet<String>> {
let target_ty = get_type_from_type_annotation_kinds( let target_ty = get_type_from_type_annotation_kinds(
&temp_def_list, temp_def_list,
unifier, unifier,
primitives, primitives,
&def, &def,
@ -797,7 +690,7 @@ impl TopLevelComposer {
Ok(()) Ok(())
}; };
for (ty, def) in type_var_to_concrete_def { for (ty, def) in type_var_to_concrete_def {
if let Err(e) = unification_helper(ty, def) { if let Err(e) = unification_helper(*ty, def.clone()) {
errors.extend(e); errors.extend(e);
} }
} }
@ -828,20 +721,6 @@ impl TopLevelComposer {
return Err(errors); return Err(errors);
} }
for (def, _) in def_ast_list.iter().skip(self.builtin_num) {
match &*def.read() {
TopLevelDef::Class { resolver: Some(resolver), .. }
| TopLevelDef::Function { resolver: Some(resolver), .. } => {
if let Err(e) =
resolver.handle_deferred_eval(unifier, &temp_def_list, primitives)
{
errors.insert(e);
}
}
_ => {}
}
}
Ok(()) Ok(())
} }
@ -1186,11 +1065,18 @@ impl TopLevelComposer {
resolver, resolver,
); );
// println!("\n+++++++++++++++++++++");
// for e in class_methods_def.iter() {
// println!("1. Class Name: {_class_name} with function {} which has type {}", e.0, unifier.stringify(e.1));
// }
// println!("+++++++++++++++++++++\n");
let class_resolver = class_resolver.as_ref().unwrap(); let class_resolver = class_resolver.as_ref().unwrap();
let class_resolver = class_resolver.as_ref(); let class_resolver = class_resolver.as_ref();
let mut defined_fields: HashSet<_> = HashSet::new(); let mut defined_fields: HashSet<_> = HashSet::new();
for b in class_body_ast { for b in class_body_ast {
// println!("Node type: {:?}\n", b.node);
match &b.node { match &b.node {
ast::StmtKind::FunctionDef { args, returns, name, .. } => { ast::StmtKind::FunctionDef { args, returns, name, .. } => {
let (method_dummy_ty, method_id) = let (method_dummy_ty, method_id) =
@ -1199,65 +1085,36 @@ impl TopLevelComposer {
let mut method_var_map = VarMap::new(); let mut method_var_map = VarMap::new();
let arg_types: Vec<FuncArg> = { let arg_types: Vec<FuncArg> = {
// check method parameters cannot have same name // Function arguments must have:
// 1) `self` as first argument
// 2) unique names
// 3) names different than keywords
match args.args.first() {
Some(id) if id.node.arg == "self".into() => {},
_ => return report_error("class method must have a `self` parameter", b.location),
}
let mut defined_parameter_name: HashSet<_> = HashSet::new(); let mut defined_parameter_name: HashSet<_> = HashSet::new();
let zelf: StrRef = "self".into(); for arg in args.args.iter().skip(1) {
for x in &args.args { if !defined_parameter_name.insert(arg.node.arg) {
if !defined_parameter_name.insert(x.node.arg) return report_error("class method must have a unique parameter names", b.location)
|| (keyword_list.contains(&x.node.arg) && x.node.arg != zelf) }
{ if keyword_list.contains(&arg.node.arg) {
return Err(HashSet::from([ return report_error("parameter names should not be the same as the keywords", b.location)
format!("top level function must have unique parameter names \
and names should not be the same as the keywords (at {})",
x.location),
]))
} }
} }
if name == &"__init__".into() && !defined_parameter_name.contains(&zelf) { // `self` must not be provided type annotation or default value
return Err(HashSet::from([ if args.args.len() == args.defaults.len() {
format!("__init__ method must have a `self` parameter (at {})", b.location), return report_error("`self` cannot have a default value", b.location)
]))
} }
if !defined_parameter_name.contains(&zelf) { if args.args[0].node.annotation.is_some() {
return Err(HashSet::from([ return report_error("`self` cannot have a type annotation", b.location)
format!("class method must have a `self` parameter (at {})", b.location),
]))
} }
let mut result = Vec::new(); let mut result = Vec::new();
let no_defaults = args.args.len() - args.defaults.len() - 1;
let arg_with_default: Vec<( for (idx, x) in itertools::enumerate(args.args.iter().skip(1)) {
&ast::Located<ast::ArgData<()>>,
Option<&ast::Expr>,
)> = args
.args
.iter()
.rev()
.zip(
args.defaults
.iter()
.rev()
.map(|x| -> Option<&ast::Expr> { Some(x) })
.chain(std::iter::repeat(None)),
)
.collect_vec();
for (x, default) in arg_with_default.into_iter().rev() {
let name = x.node.arg;
if name != zelf {
let type_ann = { let type_ann = {
let annotation_expr = x let Some(annotation_expr) = x.node.annotation.as_ref() else {return report_error(format!("type annotation needed for `{}`", x.node.arg).as_str(), x.location)};
.node
.annotation
.as_ref()
.ok_or_else(|| HashSet::from([
format!(
"type annotation needed for `{}` at {}",
x.node.arg, x.location
),
]))?
.as_ref();
parse_ast_to_type_annotation_kinds( parse_ast_to_type_annotation_kinds(
class_resolver, class_resolver,
temp_def_list, temp_def_list,
@ -1270,8 +1127,7 @@ impl TopLevelComposer {
)? )?
}; };
// find type vars within this method parameter type annotation // find type vars within this method parameter type annotation
let type_vars_within = let type_vars_within = get_type_var_contained_in_type_annotation(&type_ann);
get_type_var_contained_in_type_annotation(&type_ann);
// handle the class type var and the method type var // handle the class type var and the method type var
for type_var_within in type_vars_within { for type_var_within in type_vars_within {
let TypeAnnotation::TypeVar(ty) = type_var_within else { let TypeAnnotation::TypeVar(ty) = type_var_within else {
@ -1286,30 +1142,16 @@ impl TopLevelComposer {
} }
// finish handling type vars // finish handling type vars
let dummy_func_arg = FuncArg { let dummy_func_arg = FuncArg {
name, name: x.node.arg,
ty: unifier.get_dummy_var().ty, ty: unifier.get_dummy_var().ty,
default_value: match default { default_value: if idx < no_defaults { None } else {
None => None, let default_idx = idx - no_defaults;
Some(default) => {
if name == "self".into() {
return Err(HashSet::from([
format!("`self` parameter cannot take default value (at {})", x.location),
]));
}
Some({ Some({
let v = Self::parse_parameter_default_value( let v = Self::parse_parameter_default_value(&args.defaults[default_idx], class_resolver)?;
default, Self::check_default_param_type(&v, &type_ann, primitives, unifier).map_err(|err| report_error::<()>(err.as_str(), x.location).unwrap_err())?;
class_resolver,
)?;
Self::check_default_param_type(
&v, &type_ann, primitives, unifier,
)
.map_err(|err| HashSet::from([
format!("{} (at {})", err, x.location),
]))?;
v v
}) })
}
}, },
is_vararg: false, is_vararg: false,
}; };
@ -1319,7 +1161,6 @@ impl TopLevelComposer {
.insert(dummy_func_arg.ty, type_ann.clone()); .insert(dummy_func_arg.ty, type_ann.clone());
result.push(dummy_func_arg); result.push(dummy_func_arg);
} }
}
result result
}; };
@ -1390,6 +1231,7 @@ impl TopLevelComposer {
// unify now since function type is not in type annotation define // unify now since function type is not in type annotation define
// which should be fine since type within method_type will be subst later // which should be fine since type within method_type will be subst later
println!("Method Dummy Type: {} and method type: {}", unifier.stringify(method_dummy_ty), unifier.stringify(method_type));
unifier unifier
.unify(method_dummy_ty, method_type) .unify(method_dummy_ty, method_type)
.map_err(|e| HashSet::from([e.to_display(unifier).to_string()]))?; .map_err(|e| HashSet::from([e.to_display(unifier).to_string()]))?;
@ -1440,23 +1282,13 @@ impl TopLevelComposer {
match v { match v {
ast::Constant::Bool(_) | ast::Constant::Str(_) | ast::Constant::Int(_) | ast::Constant::Float(_) => {} ast::Constant::Bool(_) | ast::Constant::Str(_) | ast::Constant::Int(_) | ast::Constant::Float(_) => {}
_ => { _ => {
return Err(HashSet::from([ return report_error("unsupported statement in class definition body", b.location)
format!(
"unsupported statement in class definition body (at {})",
b.location
),
]))
} }
} }
class_attributes_def.push((*attr, dummy_field_type, v.clone())); class_attributes_def.push((*attr, dummy_field_type, v.clone()));
} }
_ => { _ => {
return Err(HashSet::from([ return report_error("unsupported statement in class definition body", b.location)
format!(
"unsupported statement in class definition body (at {})",
b.location
),
]))
} }
} }
annotation annotation
@ -1482,49 +1314,30 @@ impl TopLevelComposer {
}; };
if !class_type_vars_def.contains(&t) { if !class_type_vars_def.contains(&t) {
return Err(HashSet::from([ return report_error("class fields can only use type vars over which the class is generic", b.location)
format!(
"class fields can only use type \
vars over which the class is generic (at {})",
annotation.location
),
]))
} }
} }
type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation); type_var_to_concrete_def.insert(dummy_field_type, parsed_annotation);
} else { } else {
return Err(HashSet::from([ return report_error(format!("same class fields `{attr}` defined twice").as_str(), target.location)
format!(
"same class fields `{}` defined twice (at {})",
attr, target.location
),
]))
} }
} else { } else {
return Err(HashSet::from([ return report_error("unsupported statement in class definition body", target.location)
format!(
"unsupported statement type in class definition body (at {})",
target.location
),
]))
} }
} }
ast::StmtKind::Assign { .. } // we don't class attributes ast::StmtKind::Assign { .. } // we don't class attributes
| ast::StmtKind::Expr { value: _, .. } // typically a docstring; ignoring all expressions matches CPython behavior | ast::StmtKind::Expr { value: _, .. } // typically a docstring; ignoring all expressions matches CPython behavior
| ast::StmtKind::Pass { .. } => {} | ast::StmtKind::Pass { .. } => {}
_ => { _ => {
return Err(HashSet::from([ return report_error("unsupported statement in class definition body", b.location)
format!(
"unsupported statement in class definition body (at {})",
b.location
),
]))
} }
} }
} }
println!("+++++++++++++++++++++\n");
Ok(()) Ok(())
}
}
// type_var_to_concrete_def: &mut HashMap<Type, TypeAnnotation>,
fn analyze_single_class_ancestors( fn analyze_single_class_ancestors(
class_def: &mut TopLevelDef, class_def: &mut TopLevelDef,
temp_def_list: &[Arc<RwLock<TopLevelDef>>], temp_def_list: &[Arc<RwLock<TopLevelDef>>],
@ -1561,7 +1374,6 @@ impl TopLevelComposer {
let TypeAnnotation::CustomClass { id, params: _ } = base else { let TypeAnnotation::CustomClass { id, params: _ } = base else {
unreachable!("must be class type annotation") unreachable!("must be class type annotation")
}; };
let base = temp_def_list.get(id.0).unwrap(); let base = temp_def_list.get(id.0).unwrap();
let base = base.read(); let base = base.read();
let TopLevelDef::Class { methods, fields, attributes, .. } = &*base else { let TopLevelDef::Class { methods, fields, attributes, .. } = &*base else {
@ -1570,19 +1382,31 @@ impl TopLevelComposer {
// handle methods override // handle methods override
// since we need to maintain the order, create a new list // since we need to maintain the order, create a new list
let mut new_child_methods: Vec<(StrRef, Type, DefinitionId)> = Vec::new(); let mut new_child_methods: IndexMap<StrRef, (Type, DefinitionId)> =
let mut is_override: HashSet<StrRef> = HashSet::new(); methods.iter().map(|m| (m.0, (m.1, m.2))).collect();
for (anc_method_name, anc_method_ty, anc_method_def_id) in methods { for e in new_child_methods.iter() {
// find if there is a method with same name in the child class println!("Method Name: {} with type: {}", e.0, unifier.stringify(e.1.0));
let mut to_be_added = (*anc_method_name, *anc_method_ty, *anc_method_def_id); }
/*
class A:
def fun1
def fun2
method: TFunc(fun1, ...)
TVar
*/
// let mut new_child_methods: Vec<(StrRef, Type, DefinitionId)> = methods.clone();
for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def { for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def {
if class_method_name == anc_method_name { if let Some((ty, _)) = new_child_methods
// ignore and handle self .insert(*class_method_name, (*class_method_ty, *class_method_defid))
// if is __init__ method, no need to check return type {
println!("Class Method Name: {class_method_name}");
println!("Types are {} and {}", unifier.stringify(ty), unifier.stringify(*class_method_ty));
let ok = class_method_name == &"__init__".into() let ok = class_method_name == &"__init__".into()
|| Self::check_overload_function_type( || Self::check_overload_function_type(
*class_method_ty, *class_method_ty,
*anc_method_ty, ty,
unifier, unifier,
type_var_to_concrete_def, type_var_to_concrete_def,
); );
@ -1591,68 +1415,44 @@ impl TopLevelComposer {
"method {class_method_name} has same name as ancestors' method, but incompatible type"), "method {class_method_name} has same name as ancestors' method, but incompatible type"),
])); ]));
} }
// mark it as added
is_override.insert(*class_method_name);
to_be_added = (*class_method_name, *class_method_ty, *class_method_defid);
break;
} }
} }
new_child_methods.push(to_be_added);
}
// add those that are not overriding method to the new_child_methods
for (class_method_name, class_method_ty, class_method_defid) in &*class_methods_def {
if !is_override.contains(class_method_name) {
new_child_methods.push((*class_method_name, *class_method_ty, *class_method_defid));
}
}
// use the new_child_methods to replace all the elements in `class_methods_def`
class_methods_def.clear(); class_methods_def.clear();
class_methods_def.extend(new_child_methods); class_methods_def
.extend(new_child_methods.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
// handle class fields // handle class fields
let mut new_child_fields: Vec<(StrRef, Type, bool)> = Vec::new(); let mut new_child_fields: IndexMap<StrRef, (Type, bool)> =
// let mut is_override: HashSet<_> = HashSet::new(); fields.iter().map(|f| (f.0, (f.1, f.2))).collect();
for (anc_field_name, anc_field_ty, mutable) in fields { let mut new_child_attributes: IndexMap<StrRef, (Type, ast::Constant)> =
let to_be_added = (*anc_field_name, *anc_field_ty, *mutable); attributes.iter().map(|f| (f.0, (f.1, f.2.clone()))).collect();
// find if there is a fields with the same name in the child class // Overriding class fields and attributes is currently not supported
for (class_field_name, ..) in &*class_fields_def { for (name, ty, mutable) in &*class_fields_def {
if class_field_name == anc_field_name if new_child_fields.insert(*name, (*ty, *mutable)).is_some()
|| attributes.iter().any(|f| f.0 == *class_field_name) || new_child_attributes.contains_key(name)
{ {
return Err(HashSet::from([format!( return Err(HashSet::from([format!(
"field `{class_field_name}` has already declared in the ancestor classes" "field `{name}` has already declared in the ancestor classes"
)])); )]));
} }
} }
new_child_fields.push(to_be_added); for (name, ty, val) in &*class_attribute_def {
} if new_child_attributes.insert(*name, (*ty, val.clone())).is_some()
|| new_child_fields.contains_key(name)
// handle class attributes
let mut new_child_attributes: Vec<(StrRef, Type, ast::Constant)> = Vec::new();
for (anc_attr_name, anc_attr_ty, attr_value) in attributes {
let to_be_added = (*anc_attr_name, *anc_attr_ty, attr_value.clone());
// find if there is a attribute with the same name in the child class
for (class_attr_name, ..) in &*class_attribute_def {
if class_attr_name == anc_attr_name
|| fields.iter().any(|f| f.0 == *class_attr_name)
{ {
return Err(HashSet::from([format!( return Err(HashSet::from([format!(
"attribute `{class_attr_name}` has already declared in the ancestor classes" "attribute `{name}` has already declared in the ancestor classes"
)])); )]));
} }
} }
new_child_attributes.push(to_be_added);
}
for (class_field_name, class_field_ty, mutable) in &*class_fields_def {
if !is_override.contains(class_field_name) {
new_child_fields.push((*class_field_name, *class_field_ty, *mutable));
}
}
class_fields_def.clear(); class_fields_def.clear();
class_fields_def.extend(new_child_fields); class_fields_def
.extend(new_child_fields.iter().map(|f| (*f.0, f.1 .0, f.1 .1)).collect_vec());
class_attribute_def.clear(); class_attribute_def.clear();
class_attribute_def.extend(new_child_attributes); class_attribute_def.extend(
new_child_attributes.iter().map(|f| (*f.0, f.1 .0, f.1 .1.clone())).collect_vec(),
);
Ok(()) Ok(())
} }

View File

@ -685,6 +685,16 @@ impl TopLevelComposer {
let this = this.as_ref(); let this = this.as_ref();
let other = unifier.get_ty(other); let other = unifier.get_ty(other);
let other = other.as_ref(); let other = other.as_ref();
println!("Type of this was: {}", this.get_type_name());
println!("Type of other was: {}", other.get_type_name());
if let TypeEnum::TVar { name, .. } = other {
if name.is_some(){
println!("Name of other was {}", name.unwrap());
}else {
println!("Name of other was None");
}
}
let ( let (
TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, .. }), TypeEnum::TFunc(FunSignature { args: this_args, ret: this_ret, .. }),
TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. }), TypeEnum::TFunc(FunSignature { args: other_args, ret: other_ret, .. }),

View File

@ -0,0 +1,14 @@
---
source: nac3core/src/toplevel/test.rs
assertion_line: 576
expression: res_vec
---
[
"Class {\nname: \"A\",\nancestors: [\"class_def_104\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: []\n}\n",
"Function {\nname: \"A.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n",
"Class {\nname: \"B\",\nancestors: [\"class_def_106[typevar230, typevar229]\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: [\"typevar230\", \"typevar229\"]\n}\n",
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n",
"Class {\nname: \"C\",\nancestors: [\"class_def_108[typevar229]\", \"class_def_104\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: [\"typevar229\"]\n}\n",
"Function {\nname: \"C.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n",
]

View File

@ -0,0 +1,15 @@
---
source: nac3core/src/toplevel/test.rs
assertion_line: 576
expression: res_vec
---
[
"Function {\nname: \"foo\",\nsig: \"fn[[a:11[0], b:tuple[T, 2]], 105[109, 3]]\",\nvar_id: []\n}\n",
"Class {\nname: \"A\",\nancestors: [\"class_def_105[T, V]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[v:V], 4]\"), (\"fun\", \"fn[[a:T], V]\")],\ntype_vars: [\"T\", \"V\"]\n}\n",
"Function {\nname: \"A.__init__\",\nsig: \"fn[[v:V], 4]\",\nvar_id: [TypeVarId(243)]\n}\n",
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:T], V]\",\nvar_id: [TypeVarId(248)]\n}\n",
"Function {\nname: \"gfun\",\nsig: \"fn[[a:105[11[2], 0]], 4]\",\nvar_id: []\n}\n",
"Class {\nname: \"B\",\nancestors: [\"class_def_109\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\")],\ntype_vars: []\n}\n",
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n",
]

View File

@ -0,0 +1,15 @@
---
source: nac3core/src/toplevel/test.rs
assertion_line: 576
expression: res_vec
---
[
"Class {\nname: \"A\",\nancestors: [\"class_def_104[typevar229, typevar230]\"],\nfields: [\"a\", \"b\"],\nmethods: [(\"__init__\", \"fn[[a:104[2, 3], b:107], 4]\"), (\"fun\", \"fn[[a:104[2, 3]], 104[3, 0]]\")],\ntype_vars: [\"typevar229\", \"typevar230\"]\n}\n",
"Function {\nname: \"A.__init__\",\nsig: \"fn[[a:104[2, 3], b:107], 4]\",\nvar_id: []\n}\n",
"Function {\nname: \"A.fun\",\nsig: \"fn[[a:104[2, 3]], 104[3, 0]]\",\nvar_id: []\n}\n",
"Class {\nname: \"B\",\nancestors: [\"class_def_107\"],\nfields: [],\nmethods: [(\"__init__\", \"fn[[], 4]\"), (\"foo\", \"fn[[b:107], 107]\"), (\"bar\", \"fn[[a:104[11[107], 0]], tuple[104[virtual[104[107, 0]], 3], 107]]\")],\ntype_vars: []\n}\n",
"Function {\nname: \"B.__init__\",\nsig: \"fn[[], 4]\",\nvar_id: []\n}\n",
"Function {\nname: \"B.foo\",\nsig: \"fn[[b:107], 107]\",\nvar_id: []\n}\n",
"Function {\nname: \"B.bar\",\nsig: \"fn[[a:104[11[107], 0]], tuple[104[virtual[104[107, 0]], 3], 107]]\",\nvar_id: []\n}\n",
]

View File

@ -0,0 +1,9 @@
---
source: nac3core/src/toplevel/test.rs
assertion_line: 576
expression: res_vec
---
[
"Class {\nname: \"A\",\nancestors: [\"class_def_104\"],\nfields: [],\nmethods: [],\ntype_vars: []\n}\n",
]

View File

@ -214,244 +214,246 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) {
} }
} }
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(): // class A():
a: int32 // a: int32
def __init__(self): // def __init__(self):
self.a = 3 // self.a = 3
def fun(self, b: B): // def fun(self, b: B):
pass // pass
def foo(self, a: T, b: V): // def foo(self, a: T, b: V):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B(C): // class B(C):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class C(A): // class C(A):
def __init__(self): // def __init__(self):
pass // pass
def fun(self, b: B): // def fun(self, b: B):
a = 1 // a = 1
pass // pass
"}, // "},
indoc! {" // indoc! {"
def foo(a: A): // def foo(a: A):
pass // pass
"}, // "},
indoc! {" // indoc! {"
def ff(a: T) -> V: // def ff(a: T) -> V:
pass // pass
"} // "}
], // ],
&[]; // &[];
"simple class compose" // "simple class compose"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class Generic_A(Generic[V], B): // class Generic_A(Generic[V], B):
a: int64 // a: int64
def __init__(self): // def __init__(self):
self.a = 123123123123 // self.a = 123123123123
def fun(self, a: int32) -> V: // def fun(self, a: int32) -> V:
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B: // class B:
aa: bool // aa: bool
def __init__(self): // def __init__(self):
self.aa = False // self.aa = False
def foo(self, b: T): // def foo(self, b: T):
pass // pass
"} // "}
], // ],
&[]; // &[];
"generic class" // "generic class"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
def foo(a: list[int32], b: tuple[T, float]) -> A[B, bool]: // def foo(a: list[int32], b: tuple[T, float]) -> A[B, bool]:
pass // pass
"}, // "},
indoc! {" // indoc! {"
class A(Generic[T, V]): // class A(Generic[T, V]):
a: T // a: T
b: V // b: V
def __init__(self, v: V): // def __init__(self, v: V):
self.a = 1 // self.a = 1
self.b = v // self.b = v
def fun(self, a: T) -> V: // def fun(self, a: T) -> V:
pass // pass
"}, // "},
indoc! {" // indoc! {"
def gfun(a: A[list[float], int32]): // def gfun(a: A[list[float], int32]):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B: // class B:
def __init__(self): // def __init__(self):
pass // pass
"} // "}
], // ],
&[]; // &[];
"list tuple generic" // "list tuple generic"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(Generic[T, V]): // class A(Generic[T, V]):
a: A[float, bool] // a: A[float, bool]
b: B // b: B
def __init__(self, a: A[float, bool], b: B): // def __init__(self, a: A[float, bool], b: B):
self.a = a // self.a = a
self.b = b // self.b = b
def fun(self, a: A[float, bool]) -> A[bool, int32]: // def fun(self, a: A[float, bool]) -> A[bool, int32]:
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B(A[int64, bool]): // class B(A[int64, bool]):
def __init__(self): // def __init__(self):
pass // pass
def foo(self, b: B) -> B: // def foo(self, b: B) -> B:
pass // pass
def bar(self, a: A[list[B], int32]) -> tuple[A[virtual[A[B, int32]], bool], B]: // def bar(self, a: A[list[B], int32]) -> tuple[A[virtual[A[B, int32]], bool], B]:
pass // pass
"} // "}
], // ],
&[]; // &[];
"self1" // "self1"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(Generic[T]): // class A(Generic[T]):
a: int32 // a: int32
b: T // b: T
c: A[int64] // c: A[int64]
def __init__(self, t: T): // def __init__(self, t: T):
self.a = 3 // self.a = 3
self.b = T // self.b = T
def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: // def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]:
pass // pass
def foo(self, c: C): // def foo(self, c: C):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B(Generic[V], A[float]): // class B(Generic[V], A[float]):
d: C // d: C
def __init__(self): // def __init__(self):
pass // pass
def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: // def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]:
# override // # override
pass // pass
"}, // "},
indoc! {" // indoc! {"
class C(B[bool]): // class C(B[bool]):
e: int64 // e: int64
def __init__(self): // def __init__(self):
pass // pass
"} // "}
], // ],
&[]; // &[];
"inheritance_override" // "inheritance_override"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(Generic[T]): // class A(Generic[T]):
def __init__(self): // def __init__(self):
pass // pass
def fun(self, a: A[T]) -> A[T]: // def fun(self, a: A[T]) -> A[T]:
pass // pass
"} // "}
], // ],
&["application of type vars to generic class is not currently supported (at unknown:4:24)"]; // &["application of type vars to generic class is not currently supported (at unknown:4:24)"];
"err no type var in generic app" // "err no type var in generic app"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(B): // class A(B):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B(A): // class B(A):
def __init__(self): // def __init__(self):
pass // pass
"} // "}
], // ],
&["cyclic inheritance detected"]; // &["cyclic inheritance detected"];
"cyclic1" // "cyclic1"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(B[bool, int64]): // class A(B[bool, int64]):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B(Generic[V, T], C[int32]): // class B(Generic[V, T], C[int32]):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class C(Generic[T], A): // class C(Generic[T], A):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
], // ],
&["cyclic inheritance detected"]; // &["cyclic inheritance detected"];
"cyclic2" // "cyclic2"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A: // class A:
pass // pass
"} // "}
], // ],
&["5: Class {\nname: \"A\",\ndef_id: DefinitionId(5),\nancestors: [CustomClassKind { id: DefinitionId(5), params: [] }],\nfields: [],\nmethods: [],\ntype_vars: []\n}"]; // &["5: Class {\nname: \"A\",\ndef_id: DefinitionId(5),\nancestors: [CustomClassKind { id: DefinitionId(5), params: [] }],\nfields: [],\nmethods: [],\ntype_vars: []\n}"];
"simple pass in class" // "simple pass in class"
)] // )]
#[test_case( // #[test_case(
&[indoc! {" // &[indoc! {"
class A: // class A:
def __init__(): // def __init__():
pass // pass
"}], // "}],
&["__init__ method must have a `self` parameter (at unknown:2:5)"]; // &["__init__ method must have a `self` parameter (at unknown:2:5)"];
"err no self_1" // "err no self_1"
)] // )]
#[test_case( // #[test_case(
&[ // &[
indoc! {" // indoc! {"
class A(B, Generic[T], C): // class A(B, Generic[T], C):
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class B: // class B:
def __init__(self): // def __init__(self):
pass // pass
"}, // "},
indoc! {" // indoc! {"
class C: // class C:
def __init__(self): // def __init__(self):
pass // pass
"} // "}
], // ],
&["a class definition can only have at most one base class declaration and one generic declaration (at unknown:1:24)"]; // &["a class definition can only have at most one base class declaration and one generic declaration (at unknown:1:24)"];
"err multiple inheritance" // "err multiple inheritance"
)] // )]
/* inompatible method uses class in wrong order. But error message not here. Probably runs into exception, so add early check */
#[test_case( #[test_case(
&[ &[
indoc! {" indoc! {"
@ -463,7 +465,7 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) {
self.a = 3 self.a = 3
self.b = T self.b = T
def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]:
pass self.a = 1
"}, "},
indoc! {" indoc! {"
class B(Generic[V], A[float]): class B(Generic[V], A[float]):
@ -472,6 +474,7 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) {
def fun(self, a: int32, b: T) -> list[virtual[B[int32]]]: def fun(self, a: int32, b: T) -> list[virtual[B[int32]]]:
# override # override
pass pass
"} "}
], ],
&["method fun has same name as ancestors' method, but incompatible type"]; &["method fun has same name as ancestors' method, but incompatible type"];
@ -487,7 +490,7 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) {
def __init__(self, t: T): def __init__(self, t: T):
self.a = 3 self.a = 3
self.b = T self.b = T
def fun(self, a: int32, b: T) -> list[virtual[B[bool]]]: def fun(self, a: int32, b: T) -> list[virtual[A[bool]]]:
pass pass
"}, "},
indoc! {" indoc! {"
@ -521,7 +524,7 @@ fn test_simple_function_analyze(source: &[&str], tys: &[&str], names: &[&str]) {
"class same name" "class same name"
)] )]
fn test_analyze(source: &[&str], res: &[&str]) { fn test_analyze(source: &[&str], res: &[&str]) {
let print = false; let print = true;
let mut composer = let mut composer =
TopLevelComposer::new(Vec::new(), Vec::new(), ComposerConfig::default(), 64).0; TopLevelComposer::new(Vec::new(), Vec::new(), ComposerConfig::default(), 64).0;

View File

@ -1,6 +1,7 @@
use super::*; use super::*;
use crate::symbol_resolver::SymbolValue; use crate::symbol_resolver::SymbolValue;
use crate::toplevel::helper::{PrimDef, PrimDefDetails}; use crate::toplevel::helper::{PrimDef, PrimDefDetails};
use crate::typecheck::type_inferencer::report_error;
use crate::typecheck::typedef::VarMap; use crate::typecheck::typedef::VarMap;
use nac3parser::ast::Constant; use nac3parser::ast::Constant;
use strum::IntoEnumIterator; use strum::IntoEnumIterator;
@ -97,7 +98,13 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() }) Ok(TypeAnnotation::CustomClass { id: PrimDef::Exception.id(), params: Vec::default() })
} else if let Ok(obj_id) = resolver.get_identifier_def(*id) { } else if let Ok(obj_id) = resolver.get_identifier_def(*id) {
let type_vars = { let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read(); let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
return report_error(
format!("Name Error undefined name {id}").as_str(),
expr.location,
);
};
let def_read = top_level_def.try_read();
if let Some(def_read) = def_read { if let Some(def_read) = def_read {
if let TopLevelDef::Class { type_vars, .. } = &*def_read { if let TopLevelDef::Class { type_vars, .. } = &*def_read {
type_vars.clone() type_vars.clone()
@ -152,7 +159,13 @@ pub fn parse_ast_to_type_annotation_kinds<T, S: std::hash::BuildHasher + Clone>(
} }
let obj_id = resolver.get_identifier_def(*id)?; let obj_id = resolver.get_identifier_def(*id)?;
let type_vars = { let type_vars = {
let def_read = top_level_defs[obj_id.0].try_read(); let Some(top_level_def) = top_level_defs.get(obj_id.0) else {
return report_error(
format!("Name Error undefined name {id}").as_str(),
expr.location,
);
};
let def_read = top_level_def.try_read();
if let Some(def_read) = def_read { if let Some(def_read) = def_read {
let TopLevelDef::Class { type_vars, .. } = &*def_read else { let TopLevelDef::Class { type_vars, .. } = &*def_read else {
unreachable!("must be class here") unreachable!("must be class here")

View File

@ -114,7 +114,7 @@ impl Fold<()> for NaiveFolder {
} }
} }
fn report_error<T>(msg: &str, location: Location) -> Result<T, InferenceError> { pub fn report_error<T>(msg: &str, location: Location) -> Result<T, InferenceError> {
Err(HashSet::from([format!("{msg} at {location}")])) Err(HashSet::from([format!("{msg} at {location}")]))
} }

View File

@ -0,0 +1,31 @@
class C:
e: int32
def __init__(self):
pass
def fun(self, a: int32, b: float) -> list[int32]:
pass
def fun2():
pass
class A:
a: int32
b: int32
c: A
def __init__(self, t: int32):
self.a = 3
self.b = 1
def fun(self, a: int32, b: int32) -> list[virtual[A]]:
pass
class B(A):
e: int32
def __init__(self):
pass
def fun(self, a: int32, b: float) -> list[virtual[B]]:
# override
pass

BIN
pyo3/nac3artiq.so Executable file

Binary file not shown.