forked from M-Labs/nac3
Refactor Toplevel composer
This commit is contained in:
parent
c5ae0e7c36
commit
28e34e9444
@ -180,7 +180,9 @@
|
|||||||
clippy
|
clippy
|
||||||
pre-commit
|
pre-commit
|
||||||
rustfmt
|
rustfmt
|
||||||
|
rust-analyzer
|
||||||
];
|
];
|
||||||
|
RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}";
|
||||||
shellHook =
|
shellHook =
|
||||||
''
|
''
|
||||||
export DEMO_LINALG_STUB=${packages.x86_64-linux.demo-linalg-stub}/lib/liblinalg.a
|
export DEMO_LINALG_STUB=${packages.x86_64-linux.demo-linalg-stub}/lib/liblinalg.a
|
||||||
|
BIN
nac3artiq/demo/dataset_db.mdb
Normal file
BIN
nac3artiq/demo/dataset_db.mdb
Normal file
Binary file not shown.
BIN
nac3artiq/demo/dataset_db.mdb-lock
Normal file
BIN
nac3artiq/demo/dataset_db.mdb-lock
Normal file
Binary file not shown.
@ -1,26 +1,87 @@
|
|||||||
from min_artiq import *
|
from min_artiq import *
|
||||||
|
from numpy import int32
|
||||||
|
|
||||||
|
|
||||||
|
# @nac3
|
||||||
|
# class A:
|
||||||
|
# a: int32
|
||||||
|
# core: KernelInvariant[Core]
|
||||||
|
|
||||||
|
# def __init__(self, a: int32):
|
||||||
|
# self.core = Core()
|
||||||
|
# self.a = a
|
||||||
|
|
||||||
|
# @kernel
|
||||||
|
# def output_all_fields(self):
|
||||||
|
# #print(self.a)
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# @kernel
|
||||||
|
# def set_a(self, a: int32):
|
||||||
|
# self.a = a
|
||||||
|
|
||||||
|
# @nac3
|
||||||
|
# class B(A):
|
||||||
|
# b: int32
|
||||||
|
|
||||||
|
# def __init__(self, b: int32):
|
||||||
|
# # A.__init__(self, b + 1)
|
||||||
|
# self.core = Core()
|
||||||
|
# self.a = b
|
||||||
|
# self.b = b
|
||||||
|
# self.set_b(b)
|
||||||
|
|
||||||
|
# @kernel
|
||||||
|
# def output_parent_fields(self):
|
||||||
|
# # A.output_all_fields(self)
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# @kernel
|
||||||
|
# def output_all_fields(self):
|
||||||
|
# # A.output_all_fields(self)
|
||||||
|
# pass
|
||||||
|
# #print(self.b)
|
||||||
|
|
||||||
|
# @kernel
|
||||||
|
# def set_b(self, b: int32):
|
||||||
|
# self.b = b
|
||||||
|
|
||||||
@nac3
|
@nac3
|
||||||
class Demo:
|
class C:
|
||||||
|
c: Kernel[int32]
|
||||||
|
a: Kernel[int32]
|
||||||
|
b: Kernel[int32]
|
||||||
core: KernelInvariant[Core]
|
core: KernelInvariant[Core]
|
||||||
led0: KernelInvariant[TTLOut]
|
|
||||||
led1: KernelInvariant[TTLOut]
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, c: int32):
|
||||||
|
# B.__init__(self, c + 1)
|
||||||
self.core = Core()
|
self.core = Core()
|
||||||
self.led0 = TTLOut(self.core, 18)
|
self.a = c
|
||||||
self.led1 = TTLOut(self.core, 19)
|
self.b = c
|
||||||
|
self.c = c
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def output_parent_fields(self):
|
||||||
|
# B.output_all_fields(self)
|
||||||
|
pass
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def output_all_fields(self):
|
||||||
|
# B.output_all_fields(self)
|
||||||
|
#print(self.c)
|
||||||
|
pass
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def set_c(self, c: int32):
|
||||||
|
self.c = c
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
self.core.reset()
|
self.output_all_fields()
|
||||||
while True:
|
# self.set_a(1)
|
||||||
with parallel:
|
# self.set_b(2)
|
||||||
self.led0.pulse(100.*ms)
|
self.set_c(3)
|
||||||
self.led1.pulse(100.*ms)
|
self.output_all_fields()
|
||||||
self.core.delay(100.*ms)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
Demo().run()
|
C(10).run()
|
||||||
|
BIN
nac3artiq/demo/module.elf
Normal file
BIN
nac3artiq/demo/module.elf
Normal file
Binary file not shown.
@ -5,7 +5,7 @@ use crate::{
|
|||||||
codegen::{expr::get_subst_key, stmt::exn_constructor},
|
codegen::{expr::get_subst_key, stmt::exn_constructor},
|
||||||
symbol_resolver::SymbolValue,
|
symbol_resolver::SymbolValue,
|
||||||
typecheck::{
|
typecheck::{
|
||||||
type_inferencer::{FunctionData, Inferencer},
|
type_inferencer::{report_error, FunctionData, Inferencer},
|
||||||
typedef::{TypeVar, VarMap},
|
typedef::{TypeVar, VarMap},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@ -389,7 +389,26 @@ impl TopLevelComposer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet<String>> {
|
pub fn start_analysis(&mut self, inference: bool) -> Result<(), HashSet<String>> {
|
||||||
self.analyze_top_level_class_type_var()?;
|
let unifier = self.unifier.borrow_mut();
|
||||||
|
let primitives_store = &self.primitives_ty;
|
||||||
|
let temp_def_list = self.extract_def_list();
|
||||||
|
|
||||||
|
// Separate class definitions
|
||||||
|
let def_list = &self.definition_ast_list;
|
||||||
|
let class_def_list = def_list
|
||||||
|
.iter()
|
||||||
|
.skip(self.builtin_num)
|
||||||
|
.filter(|def| def.1.is_some() && matches!(&*def.0.read(), TopLevelDef::Class { .. }))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
// Step 1. Analyze type variables within class definitions
|
||||||
|
Self::analyze_top_level_class_type_var(
|
||||||
|
class_def_list,
|
||||||
|
temp_def_list.clone(),
|
||||||
|
unifier,
|
||||||
|
primitives_store,
|
||||||
|
)?;
|
||||||
|
|
||||||
self.analyze_top_level_class_bases()?;
|
self.analyze_top_level_class_bases()?;
|
||||||
self.analyze_top_level_class_fields_methods()?;
|
self.analyze_top_level_class_fields_methods()?;
|
||||||
self.analyze_top_level_function()?;
|
self.analyze_top_level_function()?;
|
||||||
@ -399,33 +418,30 @@ impl TopLevelComposer {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// step 1, analyze the type vars associated with top level class
|
fn analyze_bases(
|
||||||
fn analyze_top_level_class_type_var(&mut self) -> Result<(), HashSet<String>> {
|
class_def: &Arc<RwLock<TopLevelDef>>,
|
||||||
let def_list = &self.definition_ast_list;
|
class_ast: &Option<Stmt>,
|
||||||
let temp_def_list = self.extract_def_list();
|
temp_def_list: &Vec<Arc<RwLock<TopLevelDef>>>,
|
||||||
let unifier = self.unifier.borrow_mut();
|
unifier: &mut Unifier,
|
||||||
let primitives_store = &self.primitives_ty;
|
primitives_store: &PrimitiveStore,
|
||||||
|
) -> Result<(), HashSet<String>> {
|
||||||
let mut analyze = |class_def: &Arc<RwLock<TopLevelDef>>, class_ast: &Option<Stmt>| {
|
|
||||||
// only deal with class def here
|
|
||||||
let mut class_def = class_def.write();
|
let mut class_def = class_def.write();
|
||||||
let (class_bases_ast, class_def_type_vars, class_resolver) = {
|
let (class_def_id, class_ancestors, class_bases_ast, class_type_vars, class_resolver) = {
|
||||||
if let TopLevelDef::Class { type_vars, resolver, .. } = &mut *class_def {
|
let TopLevelDef::Class { object_id, ancestors, type_vars, resolver, .. } =
|
||||||
let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) =
|
&mut *class_def
|
||||||
class_ast
|
|
||||||
else {
|
else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
let Some(ast::Located { node: ast::StmtKind::ClassDef { bases, .. }, .. }) = class_ast
|
||||||
(bases, type_vars, resolver)
|
else {
|
||||||
} else {
|
unreachable!()
|
||||||
return Ok(());
|
};
|
||||||
}
|
(object_id, ancestors, bases, type_vars, resolver.as_ref().unwrap().as_ref())
|
||||||
};
|
};
|
||||||
let class_resolver = class_resolver.as_ref().unwrap();
|
|
||||||
let class_resolver = &**class_resolver;
|
|
||||||
|
|
||||||
let mut is_generic = false;
|
let mut is_generic = false;
|
||||||
|
let mut has_base = false;
|
||||||
|
// Check class bases for typevars
|
||||||
for b in class_bases_ast {
|
for b in class_bases_ast {
|
||||||
match &b.node {
|
match &b.node {
|
||||||
// analyze typevars bounded to the class,
|
// analyze typevars bounded to the class,
|
||||||
@ -433,19 +449,10 @@ impl TopLevelComposer {
|
|||||||
// things like `class A(Generic[T, V, ImportedModule.T])` is not supported
|
// things like `class A(Generic[T, V, ImportedModule.T])` is not supported
|
||||||
// i.e. only simple names are allowed in the subscript
|
// i.e. only simple names are allowed in the subscript
|
||||||
// should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params
|
// should update the TopLevelDef::Class.typevars and the TypeEnum::TObj.params
|
||||||
ast::ExprKind::Subscript { value, slice, .. }
|
ast::ExprKind::Subscript { value, slice, .. } if matches!(&value.node, ast::ExprKind::Name { id, .. } if id == &"Generic".into()) =>
|
||||||
if {
|
|
||||||
matches!(
|
|
||||||
&value.node,
|
|
||||||
ast::ExprKind::Name { id, .. } if id == &"Generic".into()
|
|
||||||
)
|
|
||||||
} =>
|
|
||||||
{
|
{
|
||||||
if is_generic {
|
if is_generic {
|
||||||
return Err(HashSet::from([format!(
|
return report_error("only single Generic[...] is allowed", b.location);
|
||||||
"only single Generic[...] is allowed (at {})",
|
|
||||||
b.location
|
|
||||||
)]));
|
|
||||||
}
|
}
|
||||||
is_generic = true;
|
is_generic = true;
|
||||||
|
|
||||||
@ -458,7 +465,6 @@ impl TopLevelComposer {
|
|||||||
type_var_list = vec![&**slice];
|
type_var_list = vec![&**slice];
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse the type vars
|
|
||||||
let type_vars = type_var_list
|
let type_vars = type_var_list
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|e| {
|
.map(|e| {
|
||||||
@ -471,44 +477,58 @@ impl TopLevelComposer {
|
|||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?;
|
||||||
|
|
||||||
// check if all are unique type vars
|
class_type_vars.extend(type_vars);
|
||||||
let all_unique_type_var = {
|
}
|
||||||
let mut occurred_type_var_id: HashSet<TypeVarId> = HashSet::new();
|
ast::ExprKind::Subscript { .. } => {
|
||||||
type_vars.iter().all(|x| {
|
if has_base {
|
||||||
let ty = unifier.get_ty(*x);
|
return report_error("a class definition can only have at most one base class declaration and one generic declaration", b.location);
|
||||||
if let TypeEnum::TVar { id, .. } = ty.as_ref() {
|
}
|
||||||
occurred_type_var_id.insert(*id)
|
has_base = true;
|
||||||
|
|
||||||
|
// the function parse_ast_to make sure that no type var occurred in
|
||||||
|
// bast_ty if it is a CustomClassKind
|
||||||
|
let base_ty = parse_ast_to_type_annotation_kinds(
|
||||||
|
class_resolver,
|
||||||
|
&temp_def_list,
|
||||||
|
unifier,
|
||||||
|
&primitives_store,
|
||||||
|
b,
|
||||||
|
vec![(*class_def_id, class_type_vars.clone())]
|
||||||
|
.into_iter()
|
||||||
|
.collect::<HashMap<_, _>>(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
if let TypeAnnotation::CustomClass { .. } = &base_ty {
|
||||||
|
class_ancestors.push(base_ty);
|
||||||
} else {
|
} else {
|
||||||
false
|
return report_error(
|
||||||
|
"class base declaration can only be custom class",
|
||||||
|
b.location,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO: Report Error here
|
||||||
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
})
|
|
||||||
};
|
|
||||||
if !all_unique_type_var {
|
|
||||||
return Err(HashSet::from([format!(
|
|
||||||
"duplicate type variable occurs (at {})",
|
|
||||||
slice.location
|
|
||||||
)]));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// add to TopLevelDef
|
|
||||||
class_def_type_vars.extend(type_vars);
|
|
||||||
}
|
|
||||||
|
|
||||||
// if others, do nothing in this function
|
|
||||||
_ => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
};
|
|
||||||
let mut errors = HashSet::new();
|
|
||||||
for (class_def, class_ast) in def_list.iter().skip(self.builtin_num) {
|
|
||||||
if class_ast.is_none() {
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
if let Err(e) = analyze(class_def, class_ast) {
|
|
||||||
|
/// step 1, analyze the type vars associated with top level class
|
||||||
|
fn analyze_top_level_class_type_var(
|
||||||
|
def_list: Vec<&DefAst>,
|
||||||
|
temp_def_list: Vec<Arc<RwLock<TopLevelDef>>>,
|
||||||
|
unifier: &mut Unifier,
|
||||||
|
primitives_store: &PrimitiveStore,
|
||||||
|
) -> Result<(), HashSet<String>> {
|
||||||
|
let mut errors = HashSet::new();
|
||||||
|
for (class_def, class_ast) in def_list.iter() {
|
||||||
|
if let Err(e) = Self::analyze_bases(class_def, class_ast, &temp_def_list, unifier, primitives_store) {
|
||||||
errors.extend(e);
|
errors.extend(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.is_empty() {
|
if !errors.is_empty() {
|
||||||
return Err(errors);
|
return Err(errors);
|
||||||
}
|
}
|
||||||
|
@ -112,7 +112,7 @@ impl Fold<()> for NaiveFolder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report_error<T>(msg: &str, location: Location) -> Result<T, InferenceError> {
|
pub fn report_error<T>(msg: &str, location: Location) -> Result<T, InferenceError> {
|
||||||
Err(HashSet::from([format!("{msg} at {location}")]))
|
Err(HashSet::from([format!("{msg} at {location}")]))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BIN
pyo3/nac3artiq.so
Executable file
BIN
pyo3/nac3artiq.so
Executable file
Binary file not shown.
Loading…
Reference in New Issue
Block a user